2 # SPDX-FileCopyrightText: 2006-2012 assimp team
3 # SPDX-FileCopyrightText: 2013 Blender Foundation
5 # SPDX-License-Identifier: GPL-2.0-or-later
13 This script will write a JSON file for each FBX argument given.
19 The JSON data is formatted into a list of nested lists of 4 items:
21 ``[id, [data, ...], "data_types", [subtree, ...]]``
23 Where each list may be empty, and the items in
24 the subtree are formatted the same way.
26 data_types is a string, aligned with data that spesifies a type
29 The types are as follows:
41 * 'f': - FLOAT32_ARRAY
43 * 'd': - FLOAT64_ARRAY
48 Note that key:value pairs aren't used since the id's are not
53 # ----------------------------------------------------------------------------
56 from struct
import unpack
60 # at the end of each nested block, there is a NUL record to indicate
61 # that the sub-scope exists (i.e. to distinguish between P: and P : {})
62 _BLOCK_SENTINEL_LENGTH
= ...
63 _BLOCK_SENTINEL_DATA
= ...
64 read_fbx_elem_uint
= ...
65 _IS_BIG_ENDIAN
= (__import__("sys").byteorder
!= 'little')
66 _HEAD_MAGIC
= b
'Kaydara FBX Binary\x20\x20\x00\x1a\x00'
67 from collections
import namedtuple
68 FBXElem
= namedtuple("FBXElem", ("id", "props", "props_type", "elems"))
73 return unpack(b
'<I', read(4))[0]
76 def read_uint64(read
):
77 return unpack(b
'<Q', read(8))[0]
81 return unpack(b
'B', read(1))[0]
84 def read_string_ubyte(read
):
85 size
= read_ubyte(read
)
90 def unpack_array(read
, array_type
, array_stride
, array_byteswap
):
91 length
= read_uint(read
)
92 encoding
= read_uint(read
)
93 comp_len
= read_uint(read
)
100 data
= zlib
.decompress(data
)
102 assert(length
* array_stride
== len(data
))
104 data_array
= array
.array(array_type
, data
)
105 if array_byteswap
and _IS_BIG_ENDIAN
:
106 data_array
.byteswap()
111 b
'Z'[0]: lambda read
: unpack(b
'<b', read(1))[0], # 8 bit int
112 b
'Y'[0]: lambda read
: unpack(b
'<h', read(2))[0], # 16 bit int
113 b
'B'[0]: lambda read
: unpack(b
'?', read(1))[0], # 1 bit bool (yes/no)
114 b
'C'[0]: lambda read
: unpack(b
'<c', read(1))[0], # char
115 b
'I'[0]: lambda read
: unpack(b
'<i', read(4))[0], # 32 bit int
116 b
'F'[0]: lambda read
: unpack(b
'<f', read(4))[0], # 32 bit float
117 b
'D'[0]: lambda read
: unpack(b
'<d', read(8))[0], # 64 bit float
118 b
'L'[0]: lambda read
: unpack(b
'<q', read(8))[0], # 64 bit int
119 b
'R'[0]: lambda read
: read(read_uint(read
)), # binary data
120 b
'S'[0]: lambda read
: read(read_uint(read
)), # string data
121 b
'f'[0]: lambda read
: unpack_array(read
, 'f', 4, False), # array (float)
122 b
'i'[0]: lambda read
: unpack_array(read
, 'i', 4, True), # array (int)
123 b
'd'[0]: lambda read
: unpack_array(read
, 'd', 8, False), # array (double)
124 b
'l'[0]: lambda read
: unpack_array(read
, 'q', 8, True), # array (long)
125 b
'b'[0]: lambda read
: unpack_array(read
, 'b', 1, False), # array (bool)
126 b
'c'[0]: lambda read
: unpack_array(read
, 'B', 1, False), # array (ubyte)
130 # FBX 7500 (aka FBX2016) introduces incompatible changes at binary level:
131 # * The NULL block marking end of nested stuff switches from 13 bytes long to 25 bytes long.
132 # * The FBX element metadata (end_offset, prop_count and prop_length) switch from uint32 to uint64.
133 def init_version(fbx_version
):
134 global _BLOCK_SENTINEL_LENGTH
, _BLOCK_SENTINEL_DATA
, read_fbx_elem_uint
136 assert(_BLOCK_SENTINEL_LENGTH
== ...)
137 assert(_BLOCK_SENTINEL_DATA
== ...)
139 if fbx_version
< 7500:
140 _BLOCK_SENTINEL_LENGTH
= 13
141 read_fbx_elem_uint
= read_uint
143 _BLOCK_SENTINEL_LENGTH
= 25
144 read_fbx_elem_uint
= read_uint64
145 _BLOCK_SENTINEL_DATA
= (b
'\0' * _BLOCK_SENTINEL_LENGTH
)
148 def read_elem(read
, tell
, use_namedtuple
):
149 # [0] the offset at which this block ends
150 # [1] the number of properties in the scope
151 # [2] the length of the property list
152 end_offset
= read_fbx_elem_uint(read
)
156 prop_count
= read_fbx_elem_uint(read
)
157 prop_length
= read_fbx_elem_uint(read
)
159 elem_id
= read_string_ubyte(read
) # elem name of the scope/key
160 elem_props_type
= bytearray(prop_count
) # elem property types
161 elem_props_data
= [None] * prop_count
# elem properties (if any)
162 elem_subtree
= [] # elem children (if any)
164 for i
in range(prop_count
):
165 data_type
= read(1)[0]
166 elem_props_data
[i
] = read_data_dict
[data_type
](read
)
167 elem_props_type
[i
] = data_type
169 if tell() < end_offset
:
170 while tell() < (end_offset
- _BLOCK_SENTINEL_LENGTH
):
171 elem_subtree
.append(read_elem(read
, tell
, use_namedtuple
))
173 if read(_BLOCK_SENTINEL_LENGTH
) != _BLOCK_SENTINEL_DATA
:
174 raise IOError("failed to read nested block sentinel, "
175 "expected all bytes to be 0")
177 if tell() != end_offset
:
178 raise IOError("scope length not reached, something is wrong")
180 args
= (elem_id
, elem_props_data
, elem_props_type
, elem_subtree
)
181 return FBXElem(*args
) if use_namedtuple
else args
184 def parse_version(fn
):
186 Return the FBX version,
187 if the file isn't a binary FBX return zero.
189 with
open(fn
, 'rb') as f
:
192 if read(len(_HEAD_MAGIC
)) != _HEAD_MAGIC
:
195 return read_uint(read
)
198 def parse(fn
, use_namedtuple
=True):
201 with
open(fn
, 'rb') as f
:
205 if read(len(_HEAD_MAGIC
)) != _HEAD_MAGIC
:
206 raise IOError("Invalid header")
208 fbx_version
= read_uint(read
)
209 init_version(fbx_version
)
212 elem
= read_elem(read
, tell
, use_namedtuple
)
215 root_elems
.append(elem
)
217 args
= (b
'', [], bytearray(0), root_elems
)
218 return FBXElem(*args
) if use_namedtuple
else args
, fbx_version
221 # ----------------------------------------------------------------------------
225 data_types
= type(array
)("data_types")
226 data_types
.__dict
__.update(
238 FLOAT32_ARRAY
= b
'f'[0],
239 INT32_ARRAY
= b
'i'[0],
240 FLOAT64_ARRAY
= b
'd'[0],
241 INT64_ARRAY
= b
'l'[0],
242 BOOL_ARRAY
= b
'b'[0],
243 BYTE_ARRAY
= b
'c'[0],
247 parse_bin
= type(array
)("parse_bin")
248 parse_bin
.__dict
__.update(
254 # ----------------------------------------------------------------------------
256 # from pyfbx import parse_bin, data_types
261 def fbx2json_property_as_string(prop
, prop_type
):
262 if prop_type
== data_types
.STRING
:
263 prop_str
= prop
.decode('utf-8')
264 prop_str
= prop_str
.replace('\x00\x01', '::')
265 return json
.dumps(prop_str
)
267 prop_py_type
= type(prop
)
268 if prop_py_type
== bytes
:
269 return json
.dumps(repr(prop
)[2:-1])
270 elif prop_py_type
== bool:
271 return json
.dumps(prop
)
272 elif prop_py_type
== array
.array
:
273 return repr(list(prop
))
278 def fbx2json_properties_as_string(fbx_elem
):
279 return ", ".join(fbx2json_property_as_string(*prop_item
)
280 for prop_item
in zip(fbx_elem
.props
,
281 fbx_elem
.props_type
))
284 def fbx2json_recurse(fw
, fbx_elem
, ident
, is_last
):
285 fbx_elem_id
= fbx_elem
.id.decode('utf-8')
286 fw('%s["%s", ' % (ident
, fbx_elem_id
))
287 fw('[%s], ' % fbx2json_properties_as_string(fbx_elem
))
288 fw('"%s", ' % (fbx_elem
.props_type
.decode('ascii')))
293 ident_sub
= ident
+ " "
294 for fbx_elem_sub
in fbx_elem
.elems
:
295 fbx2json_recurse(fw
, fbx_elem_sub
, ident_sub
,
296 fbx_elem_sub
is fbx_elem
.elems
[-1])
299 fw(']%s' % ('' if is_last
else ',\n'))
305 fn_json
= "%s.json" % os
.path
.splitext(fn
)[0]
306 print("Writing: %r " % fn_json
, end
="")
307 fbx_root_elem
, fbx_version
= parse(fn
, use_namedtuple
=True)
308 print("(Version %d) ..." % fbx_version
)
310 with
open(fn_json
, 'w', encoding
="ascii", errors
='xmlcharrefreplace') as f
:
314 for fbx_elem_sub
in fbx_root_elem
.elems
:
315 fbx2json_recurse(f
.write
, fbx_elem_sub
, ident_sub
,
316 fbx_elem_sub
is fbx_root_elem
.elems
[-1])
320 # ----------------------------------------------------------------------------
326 if "--help" in sys
.argv
:
330 for arg
in sys
.argv
[1:]:
334 print("Failed to convert %r, error:" % arg
)
337 traceback
.print_exc()
340 if __name__
== "__main__":