1 # SPDX-FileCopyrightText: 2013-2023 Blender Foundation
3 # SPDX-License-Identifier: GPL-2.0-or-later
5 # FBX 7.1.0 -> 7.4.0 loader for Blender
7 # Not totally pep8 compliant.
8 # pep8 import_fbx.py --ignore=E501,E123,E702,E125
12 if "parse_fbx" in locals():
13 importlib
.reload(parse_fbx
)
14 if "fbx_utils" in locals():
15 importlib
.reload(fbx_utils
)
18 from bpy
.app
.translations
import pgettext_tip
as tip_
19 from mathutils
import Matrix
, Euler
, Vector
, Quaternion
21 # Also imported in .fbx_utils, so importing here is unlikely to further affect Blender startup time.
26 from . import parse_fbx
, fbx_utils
28 from .parse_fbx
import (
32 from .fbx_utils
import (
34 units_blender_to_fbx_factor
,
43 astype_view_signedness
,
44 MESH_ATTRIBUTE_MATERIAL_INDEX
,
45 MESH_ATTRIBUTE_POSITION
,
46 MESH_ATTRIBUTE_EDGE_VERTS
,
47 MESH_ATTRIBUTE_CORNER_VERT
,
48 MESH_ATTRIBUTE_SHARP_FACE
,
49 MESH_ATTRIBUTE_SHARP_EDGE
,
50 expand_shape_key_range
,
53 FBX_TIMECODE_DEFINITION_TO_KTIME_PER_SECOND
,
56 LINEAR_INTERPOLATION_VALUE
= bpy
.types
.Keyframe
.bl_rna
.properties
['interpolation'].enum_items
['LINEAR'].value
58 # global singleton, assign on execution
62 convert_deg_to_rad_iter
= units_convertor_iter("degree", "radian")
64 MAT_CONVERT_BONE
= fbx_utils
.MAT_CONVERT_BONE
.inverted()
65 MAT_CONVERT_LIGHT
= fbx_utils
.MAT_CONVERT_LIGHT
.inverted()
66 MAT_CONVERT_CAMERA
= fbx_utils
.MAT_CONVERT_CAMERA
.inverted()
69 def validate_blend_names(name
):
70 assert(type(name
) == bytes
)
71 # Blender typically does not accept names over 63 bytes...
74 h
= hashlib
.sha1(name
).hexdigest()
76 name_utf8
= name
[:n
].decode('utf-8', 'replace') + "_" + h
[:7]
77 while len(name_utf8
.encode()) > 63:
79 name_utf8
= name
[:n
].decode('utf-8', 'replace') + "_" + h
[:7]
82 # We use 'replace' even though FBX 'specs' say it should always be utf8, see T53841.
83 return name
.decode('utf-8', 'replace')
86 def elem_find_first(elem
, id_search
, default
=None):
87 for fbx_item
in elem
.elems
:
88 if fbx_item
.id == id_search
:
93 def elem_find_iter(elem
, id_search
):
94 for fbx_item
in elem
.elems
:
95 if fbx_item
.id == id_search
:
99 def elem_find_first_string(elem
, id_search
):
100 fbx_item
= elem_find_first(elem
, id_search
)
101 if fbx_item
is not None and fbx_item
.props
: # Do not error on complete empty properties (see T45291).
102 assert(len(fbx_item
.props
) == 1)
103 assert(fbx_item
.props_type
[0] == data_types
.STRING
)
104 return fbx_item
.props
[0].decode('utf-8', 'replace')
108 def elem_find_first_string_as_bytes(elem
, id_search
):
109 fbx_item
= elem_find_first(elem
, id_search
)
110 if fbx_item
is not None and fbx_item
.props
: # Do not error on complete empty properties (see T45291).
111 assert(len(fbx_item
.props
) == 1)
112 assert(fbx_item
.props_type
[0] == data_types
.STRING
)
113 return fbx_item
.props
[0] # Keep it as bytes as requested...
117 def elem_find_first_bytes(elem
, id_search
, decode
=True):
118 fbx_item
= elem_find_first(elem
, id_search
)
119 if fbx_item
is not None and fbx_item
.props
: # Do not error on complete empty properties (see T45291).
120 assert(len(fbx_item
.props
) == 1)
121 assert(fbx_item
.props_type
[0] == data_types
.BYTES
)
122 return fbx_item
.props
[0]
127 return "%s: props[%d=%r], elems=(%r)" % (
130 ", ".join([repr(p
) for p
in elem
.props
]),
132 b
", ".join([e
.id for e
in elem
.elems
]),
136 def elem_split_name_class(elem
):
137 assert(elem
.props_type
[-2] == data_types
.STRING
)
138 elem_name
, elem_class
= elem
.props
[-2].split(b
'\x00\x01')
139 return elem_name
, elem_class
142 def elem_name_ensure_class(elem
, clss
=...):
143 elem_name
, elem_class
= elem_split_name_class(elem
)
145 assert(elem_class
== clss
)
146 return validate_blend_names(elem_name
)
149 def elem_name_ensure_classes(elem
, clss
=...):
150 elem_name
, elem_class
= elem_split_name_class(elem
)
152 assert(elem_class
in clss
)
153 return validate_blend_names(elem_name
)
156 def elem_split_name_class_nodeattr(elem
):
157 assert(elem
.props_type
[-2] == data_types
.STRING
)
158 elem_name
, elem_class
= elem
.props
[-2].split(b
'\x00\x01')
159 assert(elem_class
== b
'NodeAttribute')
160 assert(elem
.props_type
[-1] == data_types
.STRING
)
161 elem_class
= elem
.props
[-1]
162 return elem_name
, elem_class
166 assert(elem
.props_type
[0] == data_types
.INT64
)
170 def elem_prop_first(elem
, default
=None):
171 return elem
.props
[0] if (elem
is not None) and elem
.props
else default
176 # Properties70: { ... P:
177 # Custom properties ("user properties" in FBX) are ignored here and get handled separately (see #104773).
178 def elem_props_find_first(elem
, elem_prop_id
):
180 # When properties are not found... Should never happen, but happens - as usual.
182 # support for templates (tuple of elems)
183 if type(elem
) is not FBXElem
:
184 assert(type(elem
) is tuple)
186 result
= elem_props_find_first(e
, elem_prop_id
)
187 if result
is not None:
189 assert(len(elem
) > 0)
192 for subelem
in elem
.elems
:
193 assert(subelem
.id == b
'P')
194 # 'U' flag indicates that the property has been defined by the user.
195 if subelem
.props
[0] == elem_prop_id
and b
'U' not in subelem
.props
[3]:
200 def elem_props_get_color_rgb(elem
, elem_prop_id
, default
=None):
201 elem_prop
= elem_props_find_first(elem
, elem_prop_id
)
202 if elem_prop
is not None:
203 assert(elem_prop
.props
[0] == elem_prop_id
)
204 if elem_prop
.props
[1] == b
'Color':
206 assert(elem_prop
.props
[1] == b
'Color')
207 assert(elem_prop
.props
[2] == b
'')
209 assert(elem_prop
.props
[1] == b
'ColorRGB')
210 assert(elem_prop
.props
[2] == b
'Color')
211 assert(elem_prop
.props_type
[4:7] == bytes((data_types
.FLOAT64
,)) * 3)
212 return elem_prop
.props
[4:7]
216 def elem_props_get_vector_3d(elem
, elem_prop_id
, default
=None):
217 elem_prop
= elem_props_find_first(elem
, elem_prop_id
)
218 if elem_prop
is not None:
219 assert(elem_prop
.props_type
[4:7] == bytes((data_types
.FLOAT64
,)) * 3)
220 return elem_prop
.props
[4:7]
224 def elem_props_get_number(elem
, elem_prop_id
, default
=None):
225 elem_prop
= elem_props_find_first(elem
, elem_prop_id
)
226 if elem_prop
is not None:
227 assert(elem_prop
.props
[0] == elem_prop_id
)
228 if elem_prop
.props
[1] == b
'double':
229 assert(elem_prop
.props
[1] == b
'double')
230 assert(elem_prop
.props
[2] == b
'Number')
232 assert(elem_prop
.props
[1] == b
'Number')
233 assert(elem_prop
.props
[2] == b
'')
235 # we could allow other number types
236 assert(elem_prop
.props_type
[4] == data_types
.FLOAT64
)
238 return elem_prop
.props
[4]
242 def elem_props_get_integer(elem
, elem_prop_id
, default
=None):
243 elem_prop
= elem_props_find_first(elem
, elem_prop_id
)
244 if elem_prop
is not None:
245 assert(elem_prop
.props
[0] == elem_prop_id
)
246 if elem_prop
.props
[1] == b
'int':
247 assert(elem_prop
.props
[1] == b
'int')
248 assert(elem_prop
.props
[2] == b
'Integer')
249 elif elem_prop
.props
[1] == b
'ULongLong':
250 assert(elem_prop
.props
[1] == b
'ULongLong')
251 assert(elem_prop
.props
[2] == b
'')
253 # we could allow other number types
254 assert(elem_prop
.props_type
[4] in {data_types
.INT32
, data_types
.INT64
})
256 return elem_prop
.props
[4]
260 def elem_props_get_bool(elem
, elem_prop_id
, default
=None):
261 elem_prop
= elem_props_find_first(elem
, elem_prop_id
)
262 if elem_prop
is not None:
263 assert(elem_prop
.props
[0] == elem_prop_id
)
264 # b'Bool' with a capital seems to be used for animated property... go figure...
265 assert(elem_prop
.props
[1] in {b
'bool', b
'Bool'})
266 assert(elem_prop
.props
[2] == b
'')
268 # we could allow other number types
269 assert(elem_prop
.props_type
[4] == data_types
.INT32
)
270 assert(elem_prop
.props
[4] in {0, 1})
272 return bool(elem_prop
.props
[4])
276 def elem_props_get_enum(elem
, elem_prop_id
, default
=None):
277 elem_prop
= elem_props_find_first(elem
, elem_prop_id
)
278 if elem_prop
is not None:
279 assert(elem_prop
.props
[0] == elem_prop_id
)
280 assert(elem_prop
.props
[1] == b
'enum')
281 assert(elem_prop
.props
[2] == b
'')
282 assert(elem_prop
.props
[3] == b
'')
284 # we could allow other number types
285 assert(elem_prop
.props_type
[4] == data_types
.INT32
)
287 return elem_prop
.props
[4]
291 def elem_props_get_visibility(elem
, elem_prop_id
, default
=None):
292 elem_prop
= elem_props_find_first(elem
, elem_prop_id
)
293 if elem_prop
is not None:
294 assert(elem_prop
.props
[0] == elem_prop_id
)
295 assert(elem_prop
.props
[1] == b
'Visibility')
296 assert(elem_prop
.props
[2] == b
'')
298 # we could allow other number types
299 assert(elem_prop
.props_type
[4] == data_types
.FLOAT64
)
301 return elem_prop
.props
[4]
305 # ----------------------------------------------------------------------------
310 from collections
import namedtuple
313 FBXTransformData
= namedtuple("FBXTransformData", (
315 "rot", "rot_ofs", "rot_piv", "pre_rot", "pst_rot", "rot_ord", "rot_alt_mat", "geom_rot",
316 "sca", "sca_ofs", "sca_piv", "geom_sca",
320 def blen_read_custom_properties(fbx_obj
, blen_obj
, settings
):
321 # There doesn't seem to be a way to put user properties into templates, so this only get the object properties:
322 fbx_obj_props
= elem_find_first(fbx_obj
, b
'Properties70')
324 for fbx_prop
in fbx_obj_props
.elems
:
325 assert(fbx_prop
.id == b
'P')
327 if b
'U' in fbx_prop
.props
[3]:
328 if fbx_prop
.props
[0] == b
'UDP3DSMAX':
329 # Special case for 3DS Max user properties:
331 assert(fbx_prop
.props
[1] == b
'KString')
332 except AssertionError as exc
:
334 assert(fbx_prop
.props_type
[4] == data_types
.STRING
)
335 items
= fbx_prop
.props
[4].decode('utf-8', 'replace')
336 for item
in items
.split('\r\n'):
338 split_item
= item
.split('=', 1)
339 if len(split_item
) != 2:
340 split_item
= item
.split(':', 1)
341 if len(split_item
) != 2:
342 print("cannot parse UDP3DSMAX custom property '%s', ignoring..." % item
)
344 prop_name
, prop_value
= split_item
345 prop_name
= validate_blend_names(prop_name
.strip().encode('utf-8'))
346 blen_obj
[prop_name
] = prop_value
.strip()
348 prop_name
= validate_blend_names(fbx_prop
.props
[0])
349 prop_type
= fbx_prop
.props
[1]
350 if prop_type
in {b
'Vector', b
'Vector3D', b
'Color', b
'ColorRGB'}:
351 assert(fbx_prop
.props_type
[4:7] == bytes((data_types
.FLOAT64
,)) * 3)
352 blen_obj
[prop_name
] = fbx_prop
.props
[4:7]
353 elif prop_type
in {b
'Vector4', b
'ColorRGBA'}:
354 assert(fbx_prop
.props_type
[4:8] == bytes((data_types
.FLOAT64
,)) * 4)
355 blen_obj
[prop_name
] = fbx_prop
.props
[4:8]
356 elif prop_type
== b
'Vector2D':
357 assert(fbx_prop
.props_type
[4:6] == bytes((data_types
.FLOAT64
,)) * 2)
358 blen_obj
[prop_name
] = fbx_prop
.props
[4:6]
359 elif prop_type
in {b
'Integer', b
'int'}:
360 assert(fbx_prop
.props_type
[4] == data_types
.INT32
)
361 blen_obj
[prop_name
] = fbx_prop
.props
[4]
362 elif prop_type
== b
'KString':
363 assert(fbx_prop
.props_type
[4] == data_types
.STRING
)
364 blen_obj
[prop_name
] = fbx_prop
.props
[4].decode('utf-8', 'replace')
365 elif prop_type
in {b
'Number', b
'double', b
'Double'}:
366 assert(fbx_prop
.props_type
[4] == data_types
.FLOAT64
)
367 blen_obj
[prop_name
] = fbx_prop
.props
[4]
368 elif prop_type
in {b
'Float', b
'float'}:
369 assert(fbx_prop
.props_type
[4] == data_types
.FLOAT32
)
370 blen_obj
[prop_name
] = fbx_prop
.props
[4]
371 elif prop_type
in {b
'Bool', b
'bool'}:
372 assert(fbx_prop
.props_type
[4] == data_types
.INT32
)
373 blen_obj
[prop_name
] = fbx_prop
.props
[4] != 0
374 elif prop_type
in {b
'Enum', b
'enum'}:
375 assert(fbx_prop
.props_type
[4:6] == bytes((data_types
.INT32
, data_types
.STRING
)))
376 val
= fbx_prop
.props
[4]
377 if settings
.use_custom_props_enum_as_string
and fbx_prop
.props
[5]:
378 enum_items
= fbx_prop
.props
[5].decode('utf-8', 'replace').split('~')
379 if val
>= 0 and val
< len(enum_items
):
380 blen_obj
[prop_name
] = enum_items
[val
]
382 print ("WARNING: User property '%s' has wrong enum value, skipped" % prop_name
)
384 blen_obj
[prop_name
] = val
386 print ("WARNING: User property type '%s' is not supported" % prop_type
.decode('utf-8', 'replace'))
389 def blen_read_object_transform_do(transform_data
):
390 # This is a nightmare. FBX SDK uses Maya way to compute the transformation matrix of a node - utterly simple:
392 # WorldTransform = ParentWorldTransform @ T @ Roff @ Rp @ Rpre @ R @ Rpost-1 @ Rp-1 @ Soff @ Sp @ S @ Sp-1
394 # Where all those terms are 4 x 4 matrices that contain:
395 # WorldTransform: Transformation matrix of the node in global space.
396 # ParentWorldTransform: Transformation matrix of the parent node in global space.
398 # Roff: Rotation offset
402 # Rpost-1: Inverse of the post-rotation (FBX 2011 documentation incorrectly specifies this without inversion)
403 # Rp-1: Inverse of the rotation pivot
404 # Soff: Scaling offset
407 # Sp-1: Inverse of the scaling pivot
409 # But it was still too simple, and FBX notion of compatibility is... quite specific. So we also have to
410 # support 3DSMax way:
412 # WorldTransform = ParentWorldTransform @ T @ R @ S @ OT @ OR @ OS
414 # Where all those terms are 4 x 4 matrices that contain:
415 # WorldTransform: Transformation matrix of the node in global space
416 # ParentWorldTransform: Transformation matrix of the parent node in global space
420 # OT: Geometric transform translation
421 # OR: Geometric transform rotation
422 # OS: Geometric transform scale
425 # Geometric transformations ***are not inherited***: ParentWorldTransform does not contain the OT, OR, OS
426 # of WorldTransform's parent node.
427 # The R matrix takes into account the rotation order. Other rotation matrices are always 'XYZ' order.
429 # Taken from https://help.autodesk.com/view/FBX/2020/ENU/
430 # ?guid=FBX_Developer_Help_nodes_and_scene_graph_fbx_nodes_computing_transformation_matrix_html
433 lcl_translation
= Matrix
.Translation(transform_data
.loc
)
434 geom_loc
= Matrix
.Translation(transform_data
.geom_loc
)
437 to_rot
= lambda rot
, rot_ord
: Euler(convert_deg_to_rad_iter(rot
), rot_ord
).to_matrix().to_4x4()
438 lcl_rot
= to_rot(transform_data
.rot
, transform_data
.rot_ord
) @ transform_data
.rot_alt_mat
439 pre_rot
= to_rot(transform_data
.pre_rot
, 'XYZ')
440 pst_rot
= to_rot(transform_data
.pst_rot
, 'XYZ')
441 geom_rot
= to_rot(transform_data
.geom_rot
, 'XYZ')
443 rot_ofs
= Matrix
.Translation(transform_data
.rot_ofs
)
444 rot_piv
= Matrix
.Translation(transform_data
.rot_piv
)
445 sca_ofs
= Matrix
.Translation(transform_data
.sca_ofs
)
446 sca_piv
= Matrix
.Translation(transform_data
.sca_piv
)
450 lcl_scale
[0][0], lcl_scale
[1][1], lcl_scale
[2][2] = transform_data
.sca
451 geom_scale
= Matrix();
452 geom_scale
[0][0], geom_scale
[1][1], geom_scale
[2][2] = transform_data
.geom_sca
460 pst_rot
.inverted_safe() @
461 rot_piv
.inverted_safe() @
465 sca_piv
.inverted_safe()
467 geom_mat
= geom_loc
@ geom_rot
@ geom_scale
468 # We return mat without 'geometric transforms' too, because it is to be used for children, sigh...
469 return (base_mat
@ geom_mat
, base_mat
, geom_mat
)
472 # XXX This might be weak, now that we can add vgroups from both bones and shapes, name collisions become
473 # more likely, will have to make this more robust!!!
474 def add_vgroup_to_objects(vg_indices
, vg_weights
, vg_name
, objects
):
475 assert(len(vg_indices
) == len(vg_weights
))
478 # We replace/override here...
479 vg
= obj
.vertex_groups
.get(vg_name
)
481 vg
= obj
.vertex_groups
.new(name
=vg_name
)
483 for i
, w
in zip(vg_indices
, vg_weights
):
484 vg_add((i
,), w
, 'REPLACE')
487 def blen_read_object_transform_preprocess(fbx_props
, fbx_obj
, rot_alt_mat
, use_prepost_rot
):
488 # This is quite involved, 'fbxRNode.cpp' from openscenegraph used as a reference
489 const_vector_zero_3d
= 0.0, 0.0, 0.0
490 const_vector_one_3d
= 1.0, 1.0, 1.0
492 loc
= list(elem_props_get_vector_3d(fbx_props
, b
'Lcl Translation', const_vector_zero_3d
))
493 rot
= list(elem_props_get_vector_3d(fbx_props
, b
'Lcl Rotation', const_vector_zero_3d
))
494 sca
= list(elem_props_get_vector_3d(fbx_props
, b
'Lcl Scaling', const_vector_one_3d
))
496 geom_loc
= list(elem_props_get_vector_3d(fbx_props
, b
'GeometricTranslation', const_vector_zero_3d
))
497 geom_rot
= list(elem_props_get_vector_3d(fbx_props
, b
'GeometricRotation', const_vector_zero_3d
))
498 geom_sca
= list(elem_props_get_vector_3d(fbx_props
, b
'GeometricScaling', const_vector_one_3d
))
500 rot_ofs
= elem_props_get_vector_3d(fbx_props
, b
'RotationOffset', const_vector_zero_3d
)
501 rot_piv
= elem_props_get_vector_3d(fbx_props
, b
'RotationPivot', const_vector_zero_3d
)
502 sca_ofs
= elem_props_get_vector_3d(fbx_props
, b
'ScalingOffset', const_vector_zero_3d
)
503 sca_piv
= elem_props_get_vector_3d(fbx_props
, b
'ScalingPivot', const_vector_zero_3d
)
505 is_rot_act
= elem_props_get_bool(fbx_props
, b
'RotationActive', False)
509 pre_rot
= elem_props_get_vector_3d(fbx_props
, b
'PreRotation', const_vector_zero_3d
)
510 pst_rot
= elem_props_get_vector_3d(fbx_props
, b
'PostRotation', const_vector_zero_3d
)
512 pre_rot
= const_vector_zero_3d
513 pst_rot
= const_vector_zero_3d
521 6: 'XYZ', # XXX eSphericXYZ, not really supported...
522 }.get(elem_props_get_enum(fbx_props
, b
'RotationOrder', 0))
524 pre_rot
= const_vector_zero_3d
525 pst_rot
= const_vector_zero_3d
528 return FBXTransformData(loc
, geom_loc
,
529 rot
, rot_ofs
, rot_piv
, pre_rot
, pst_rot
, rot_ord
, rot_alt_mat
, geom_rot
,
530 sca
, sca_ofs
, sca_piv
, geom_sca
)
535 def _blen_read_object_transform_do_anim(transform_data
, lcl_translation_mat
, lcl_rot_euler
, lcl_scale_mat
,
536 extra_pre_matrix
, extra_post_matrix
):
537 """Specialized version of blen_read_object_transform_do for animation that pre-calculates the non-animated matrices
538 and returns a function that calculates (base_mat @ geom_mat). See the comments in blen_read_object_transform_do for
539 a full description of what this function is doing.
541 The lcl_translation_mat, lcl_rot_euler and lcl_scale_mat arguments should have their values updated each frame and
542 then calling the returned function will calculate the matrix for the current frame.
544 extra_pre_matrix and extra_post_matrix are any extra matrices to multiply first/last."""
546 geom_loc
= Matrix
.Translation(transform_data
.geom_loc
)
550 # All the rotations that can be precalculated have a fixed XYZ order.
551 return Euler(convert_deg_to_rad_iter(rot
), 'XYZ').to_matrix().to_4x4()
552 pre_rot
= to_rot_xyz(transform_data
.pre_rot
)
553 pst_rot_inv
= to_rot_xyz(transform_data
.pst_rot
).inverted_safe()
554 geom_rot
= to_rot_xyz(transform_data
.geom_rot
)
557 rot_ofs
= Matrix
.Translation(transform_data
.rot_ofs
)
558 rot_piv
= Matrix
.Translation(transform_data
.rot_piv
)
559 rot_piv_inv
= rot_piv
.inverted_safe()
560 sca_ofs
= Matrix
.Translation(transform_data
.sca_ofs
)
561 sca_piv
= Matrix
.Translation(transform_data
.sca_piv
)
562 sca_piv_inv
= sca_piv
.inverted_safe()
565 geom_scale
= Matrix()
566 geom_scale
[0][0], geom_scale
[1][1], geom_scale
[2][2] = transform_data
.geom_sca
568 # Some matrices can be combined in advance, using the associative property of matrix multiplication, so that less
569 # matrix multiplication is required each frame.
570 geom_mat
= geom_loc
@ geom_rot
@ geom_scale
571 post_lcl_translation
= rot_ofs
@ rot_piv
@ pre_rot
572 post_lcl_rotation
= transform_data
.rot_alt_mat
@ pst_rot_inv
@ rot_piv_inv
@ sca_ofs
@ sca_piv
573 post_lcl_scaling
= sca_piv_inv
@ geom_mat
@ extra_post_matrix
575 # Get the bound to_matrix method to avoid re-binding it on each call.
576 lcl_rot_euler_to_matrix_3x3
= lcl_rot_euler
.to_matrix
577 # Get the unbound Matrix.to_4x4 method to avoid having to look it up again on each call.
578 matrix_to_4x4
= Matrix
.to_4x4
580 if extra_pre_matrix
== Matrix():
581 # There aren't any other matrices that must be multiplied before lcl_translation_mat that extra_pre_matrix can
582 # be combined with, so skip extra_pre_matrix when it's the identity matrix.
583 return lambda: (lcl_translation_mat
@
584 post_lcl_translation
@
585 matrix_to_4x4(lcl_rot_euler_to_matrix_3x3()) @
590 return lambda: (extra_pre_matrix
@
591 lcl_translation_mat
@
592 post_lcl_translation
@
593 matrix_to_4x4(lcl_rot_euler_to_matrix_3x3()) @
599 def _transformation_curves_gen(item
, values_arrays
, channel_keys
):
600 """Yields flattened location/rotation/scaling values for imported PoseBone/Object Lcl Translation/Rotation/Scaling
601 animation curve values.
603 The value arrays must have the same lengths, where each index of each array corresponds to a single keyframe.
605 Each value array must have a corresponding channel key tuple that identifies the fbx property
606 (b'Lcl Translation'/b'Lcl Rotation'/b'Lcl Scaling') and the channel (x/y/z as 0/1/2) of that property."""
607 from operator
import setitem
608 from functools
import partial
611 bl_obj
= item
.bl_obj
.pose
.bones
[item
.bl_bone
]
615 rot_mode
= bl_obj
.rotation_mode
616 transform_data
= item
.fbx_transform_data
617 rot_eul_prev
= bl_obj
.rotation_euler
.copy()
618 rot_quat_prev
= bl_obj
.rotation_quaternion
.copy()
620 # Pre-compute combined pre-matrix
621 # Remove that rest pose matrix from current matrix (also in parent space) by computing the inverted local rest
622 # matrix of the bone, if relevant.
623 combined_pre_matrix
= item
.get_bind_matrix().inverted_safe() if item
.is_bone
else Matrix()
624 # item.pre_matrix will contain any correction for a parent's correction matrix or the global matrix
626 combined_pre_matrix
@= item
.pre_matrix
628 # Pre-compute combined post-matrix
629 # Compensate for changes in the local matrix during processing
630 combined_post_matrix
= item
.anim_compensation_matrix
.copy() if item
.anim_compensation_matrix
else Matrix()
631 # item.post_matrix will contain any correction for lights, camera and bone orientation
633 combined_post_matrix
@= item
.post_matrix
635 # Create matrices/euler from the initial transformation values of this item.
636 # These variables will be updated in-place as we iterate through each frame.
637 lcl_translation_mat
= Matrix
.Translation(transform_data
.loc
)
638 lcl_rotation_eul
= Euler(convert_deg_to_rad_iter(transform_data
.rot
), transform_data
.rot_ord
)
639 lcl_scaling_mat
= Matrix()
640 lcl_scaling_mat
[0][0], lcl_scaling_mat
[1][1], lcl_scaling_mat
[2][2] = transform_data
.sca
642 # Create setters into lcl_translation_mat, lcl_rotation_eul and lcl_scaling_mat for each values_array and convert
643 # any rotation values into radians.
645 values_arrays_converted
= []
646 for values_array
, (fbx_prop
, channel
) in zip(values_arrays
, channel_keys
):
647 if fbx_prop
== b
'Lcl Translation':
648 # lcl_translation_mat.translation[channel] = value
649 setter
= partial(setitem
, lcl_translation_mat
.translation
, channel
)
650 elif fbx_prop
== b
'Lcl Rotation':
651 # FBX rotations are in degrees, but Blender uses radians, so convert all rotation values in advance.
652 values_array
= np
.deg2rad(values_array
)
653 # lcl_rotation_eul[channel] = value
654 setter
= partial(setitem
, lcl_rotation_eul
, channel
)
656 assert(fbx_prop
== b
'Lcl Scaling')
657 # lcl_scaling_mat[channel][channel] = value
658 setter
= partial(setitem
, lcl_scaling_mat
[channel
], channel
)
659 lcl_setters
.append(setter
)
660 values_arrays_converted
.append(values_array
)
662 # Create an iterator that gets one value from each array. Each iterated tuple will be all the imported
663 # Lcl Translation/Lcl Rotation/Lcl Scaling values for a single frame, in that order.
664 # Note that an FBX animation does not have to animate all the channels, so only the animated channels of each
665 # property will be present.
666 # .data, the memoryview of an np.ndarray, is faster to iterate than the ndarray itself.
667 frame_values_it
= zip(*(arr
.data
for arr
in values_arrays_converted
))
669 # Getting the unbound methods in advance avoids having to look them up again on each call within the loop.
670 mat_decompose
= Matrix
.decompose
671 quat_to_axis_angle
= Quaternion
.to_axis_angle
672 quat_to_euler
= Quaternion
.to_euler
673 quat_dot
= Quaternion
.dot
675 calc_mat
= _blen_read_object_transform_do_anim(transform_data
,
676 lcl_translation_mat
, lcl_rotation_eul
, lcl_scaling_mat
,
677 combined_pre_matrix
, combined_post_matrix
)
679 # Iterate through the values for each frame.
680 for frame_values
in frame_values_it
:
681 # Set each value into its corresponding lcl matrix/euler.
682 for lcl_setter
, value
in zip(lcl_setters
, frame_values
):
685 # Calculate the updated matrix for this frame.
688 # Now we have a virtual matrix of transform from AnimCurves, we can yield keyframe values!
689 loc
, rot
, sca
= mat_decompose(mat
)
690 if rot_mode
== 'QUATERNION':
691 if quat_dot(rot_quat_prev
, rot
) < 0.0:
694 elif rot_mode
== 'AXIS_ANGLE':
695 vec
, ang
= quat_to_axis_angle(rot
)
696 rot
= ang
, vec
.x
, vec
.y
, vec
.z
698 rot
= quat_to_euler(rot
, rot_mode
, rot_eul_prev
)
701 # Yield order matches the order that the location/rotation/scale FCurves are created in.
707 def _combine_curve_keyframe_times(times_and_values_tuples
, initial_values
):
708 """Combine multiple parsed animation curves, that affect different channels, such that every animation curve
709 contains the keyframes from every other curve, interpolating the values for the newly inserted keyframes in each
712 Currently, linear interpolation is assumed, but FBX does store how keyframes should be interpolated, so correctly
713 interpolating the keyframe values is a TODO."""
714 if len(times_and_values_tuples
) == 1:
715 # Nothing to do when there is only a single curve.
716 times
, values
= times_and_values_tuples
[0]
717 return times
, [values
]
719 all_times
= [t
[0] for t
in times_and_values_tuples
]
721 # Get the combined sorted unique times of all the curves.
722 sorted_all_times
= np
.unique(np
.concatenate(all_times
))
725 for (times
, values
), initial_value
in zip(times_and_values_tuples
, initial_values
):
726 if sorted_all_times
.size
== times
.size
:
727 # `sorted_all_times` will always contain all values in `times` and both `times` and `sorted_all_times` must
728 # be strictly increasing, so if both arrays have the same size, they must be identical.
729 extended_values
= values
731 # For now, linear interpolation is assumed. NumPy conveniently has a fast C-compiled function for this.
732 # Efficiently implementing other FBX supported interpolation will most likely be much more complicated.
733 extended_values
= np
.interp(sorted_all_times
, times
, values
, left
=initial_value
)
734 values_arrays
.append(extended_values
)
735 return sorted_all_times
, values_arrays
738 def blen_read_invalid_animation_curve(key_times
, key_values
):
739 """FBX will parse animation curves even when their keyframe times are invalid (not strictly increasing). It's
740 unclear exactly how FBX handles invalid curves, but this matches in some cases and is how the FBX IO addon has been
741 handling invalid keyframe times for a long time.
743 Notably, this function will also correctly parse valid animation curves, though is much slower than the trivial,
746 The returned keyframe times are guaranteed to be strictly increasing."""
747 sorted_unique_times
= np
.unique(key_times
)
749 # Unsure if this can be vectorized with numpy, so using iteration for now.
752 key_times_data
= key_times
.data
753 key_times_len
= len(key_times
)
754 # Iterating .data, the memoryview of the array, is faster than iterating the array directly.
755 for curr_fbxktime
in sorted_unique_times
.data
:
756 if key_times_data
[idx
] < curr_fbxktime
:
759 if idx
>= key_times_len
:
760 # We have reached our last element for this curve, stay on it from now on...
764 indices
= np
.fromiter(index_gen(), dtype
=np
.int64
, count
=len(sorted_unique_times
))
765 indexed_times
= key_times
[indices
]
766 indexed_values
= key_values
[indices
]
768 # Linear interpolate the value for each time in sorted_unique_times according to the times and values at each index
769 # and the previous index.
770 interpolated_values
= np
.empty_like(indexed_values
)
772 # Where the index is 0, there's no previous value to interpolate from, so we set the value without interpolating.
773 # Because the indices are in increasing order, all zeroes must be at the start, so we can find the index of the last
774 # zero and use that to index with a slice instead of a boolean array for performance.
775 # Equivalent to, but as a slice:
776 # idx_zero_mask = indices == 0
777 # idx_nonzero_mask = ~idx_zero_mask
778 first_nonzero_idx
= np
.searchsorted(indices
, 0, side
='right')
779 idx_zero_slice
= slice(0, first_nonzero_idx
) # [:first_nonzero_idx]
780 idx_nonzero_slice
= slice(first_nonzero_idx
, None) # [first_nonzero_idx:]
782 interpolated_values
[idx_zero_slice
] = indexed_values
[idx_zero_slice
]
784 indexed_times_nonzero_idx
= indexed_times
[idx_nonzero_slice
]
785 indexed_values_nonzero_idx
= indexed_values
[idx_nonzero_slice
]
786 indices_nonzero
= indices
[idx_nonzero_slice
]
788 prev_indices_nonzero
= indices_nonzero
- 1
789 prev_indexed_times_nonzero_idx
= key_times
[prev_indices_nonzero
]
790 prev_indexed_values_nonzero_idx
= key_values
[prev_indices_nonzero
]
792 ifac_a
= sorted_unique_times
[idx_nonzero_slice
] - prev_indexed_times_nonzero_idx
793 ifac_b
= indexed_times_nonzero_idx
- prev_indexed_times_nonzero_idx
794 # If key_times contains two (or more) duplicate times in a row, then values in `ifac_b` can be zero which would
795 # result in division by zero.
796 # Use the `np.errstate` context manager to suppress printing the RuntimeWarning to the system console.
797 with np
.errstate(divide
='ignore'):
798 ifac
= ifac_a
/ ifac_b
799 interpolated_values
[idx_nonzero_slice
] = ((indexed_values_nonzero_idx
- prev_indexed_values_nonzero_idx
) * ifac
800 + prev_indexed_values_nonzero_idx
)
802 # If the time to interpolate at is larger than the time in indexed_times, then the value has been extrapolated.
803 # Extrapolated values are excluded.
804 valid_mask
= indexed_times
>= sorted_unique_times
806 key_times
= sorted_unique_times
[valid_mask
]
807 key_values
= interpolated_values
[valid_mask
]
809 return key_times
, key_values
812 def _convert_fbx_time_to_blender_time(key_times
, blen_start_offset
, fbx_start_offset
, fps
, fbx_ktime
):
813 timefac
= fps
/ fbx_ktime
815 # Convert from FBX timing to Blender timing.
816 # Cannot subtract in-place because key_times could be read directly from FBX and could be used by multiple Actions.
817 key_times
= key_times
- fbx_start_offset
818 # FBX times are integers and timefac is a Python float, so the new array will be a np.float64 array.
819 key_times
= key_times
* timefac
821 key_times
+= blen_start_offset
826 def blen_read_animation_curve(fbx_curve
):
827 """Read an animation curve from FBX data.
829 The parsed keyframe times are guaranteed to be strictly increasing."""
830 key_times
= parray_as_ndarray(elem_prop_first(elem_find_first(fbx_curve
, b
'KeyTime')))
831 key_values
= parray_as_ndarray(elem_prop_first(elem_find_first(fbx_curve
, b
'KeyValueFloat')))
833 assert(len(key_values
) == len(key_times
))
835 # The FBX SDK specifies that only one key per time is allowed and that the keys are sorted in time order.
836 # https://help.autodesk.com/view/FBX/2020/ENU/?guid=FBX_Developer_Help_cpp_ref_class_fbx_anim_curve_html
837 all_times_strictly_increasing
= (key_times
[1:] > key_times
[:-1]).all()
839 if all_times_strictly_increasing
:
840 return key_times
, key_values
842 # FBX will still read animation curves even if they are invalid.
843 return blen_read_invalid_animation_curve(key_times
, key_values
)
846 def blen_store_keyframes(fbx_key_times
, blen_fcurve
, key_values
, blen_start_offset
, fps
, fbx_ktime
, fbx_start_offset
=0):
847 """Set all keyframe times and values for a newly created FCurve.
848 Linear interpolation is currently assumed.
850 This is a convenience function for calling blen_store_keyframes_multi with only a single fcurve and values array."""
851 blen_store_keyframes_multi(fbx_key_times
, [(blen_fcurve
, key_values
)], blen_start_offset
, fps
, fbx_ktime
,
855 def blen_store_keyframes_multi(fbx_key_times
, fcurve_and_key_values_pairs
, blen_start_offset
, fps
, fbx_ktime
,
857 """Set all keyframe times and values for multiple pairs of newly created FCurves and keyframe values arrays, where
858 each pair has the same keyframe times.
859 Linear interpolation is currently assumed."""
860 bl_key_times
= _convert_fbx_time_to_blender_time(fbx_key_times
, blen_start_offset
, fbx_start_offset
, fps
, fbx_ktime
)
861 num_keys
= len(bl_key_times
)
863 # Compatible with C float type
864 bl_keyframe_dtype
= np
.single
865 # Compatible with C char type
866 bl_enum_dtype
= np
.ubyte
868 # The keyframe_points 'co' are accessed as flattened pairs of (time, value).
869 # The key times are the same for each (blen_fcurve, key_values) pair, so only the values need to be updated for each
871 keyframe_points_co
= np
.empty(len(bl_key_times
) * 2, dtype
=bl_keyframe_dtype
)
872 # Even indices are times.
873 keyframe_points_co
[0::2] = bl_key_times
875 interpolation_array
= np
.full(num_keys
, LINEAR_INTERPOLATION_VALUE
, dtype
=bl_enum_dtype
)
877 for blen_fcurve
, key_values
in fcurve_and_key_values_pairs
:
878 # The fcurve must be newly created and thus have no keyframe_points.
879 assert(len(blen_fcurve
.keyframe_points
) == 0)
881 # Odd indices are values.
882 keyframe_points_co
[1::2] = key_values
884 # Add the keyframe points to the FCurve and then set the 'co' and 'interpolation' of each point.
885 blen_fcurve
.keyframe_points
.add(num_keys
)
886 blen_fcurve
.keyframe_points
.foreach_set('co', keyframe_points_co
)
887 blen_fcurve
.keyframe_points
.foreach_set('interpolation', interpolation_array
)
889 # Since we inserted our keyframes in 'ultra-fast' mode, we have to update the fcurves now.
893 def blen_read_animations_action_item(action
, item
, cnodes
, fps
, anim_offset
, global_scale
, shape_key_deforms
,
896 'Bake' loc/rot/scale into the action,
897 taking any pre_ and post_ matrix into account to transform from fbx into blender space.
899 from bpy
.types
import Object
, PoseBone
, ShapeKey
, Material
, Camera
901 fbx_curves
: dict[bytes
, dict[int, FBXElem
]] = {}
902 for curves
, fbxprop
in cnodes
.values():
903 channels_dict
= fbx_curves
.setdefault(fbxprop
, {})
904 for (fbx_acdata
, _blen_data
), channel
in curves
.values():
905 if channel
in channels_dict
:
906 # Ignore extra curves when one has already been found for this channel because FBX's default animation
907 # system implementation only uses the first curve assigned to a channel.
908 # Additional curves per channel are allowed by the FBX specification, but the handling of these curves
909 # is considered the responsibility of the application that created them. Note that each curve node is
910 # expected to have a unique set of channels, so these additional curves with the same channel would have
911 # to belong to separate curve nodes. See the FBX SDK documentation for FbxAnimCurveNode.
913 channels_dict
[channel
] = fbx_acdata
915 # Leave if no curves are attached (if a blender curve is attached to scale but without keys it defaults to 0).
916 if len(fbx_curves
) == 0:
919 if isinstance(item
, Material
):
921 props
= [("diffuse_color", 3, grpname
or "Diffuse Color")]
922 elif isinstance(item
, ShapeKey
):
923 props
= [(item
.path_from_id("value"), 1, "Key")]
924 elif isinstance(item
, Camera
):
925 props
= [(item
.path_from_id("lens"), 1, "Camera"), (item
.dof
.path_from_id("focus_distance"), 1, "Camera")]
926 else: # Object or PoseBone:
928 bl_obj
= item
.bl_obj
.pose
.bones
[item
.bl_bone
]
932 # We want to create actions for objects, but for bones we 'reuse' armatures' actions!
933 grpname
= bl_obj
.name
935 # Since we might get other channels animated in the end, due to all FBX transform magic,
936 # we need to add curves for whole loc/rot/scale in any case.
937 props
= [(bl_obj
.path_from_id("location"), 3, grpname
or "Location"),
939 (bl_obj
.path_from_id("scale"), 3, grpname
or "Scale")]
940 rot_mode
= bl_obj
.rotation_mode
941 if rot_mode
== 'QUATERNION':
942 props
[1] = (bl_obj
.path_from_id("rotation_quaternion"), 4, grpname
or "Quaternion Rotation")
943 elif rot_mode
== 'AXIS_ANGLE':
944 props
[1] = (bl_obj
.path_from_id("rotation_axis_angle"), 4, grpname
or "Axis Angle Rotation")
946 props
[1] = (bl_obj
.path_from_id("rotation_euler"), 3, grpname
or "Euler Rotation")
948 blen_curves
= [action
.fcurves
.new(prop
, index
=channel
, action_group
=grpname
)
949 for prop
, nbr_channels
, grpname
in props
for channel
in range(nbr_channels
)]
951 if isinstance(item
, Material
):
952 for fbxprop
, channel_to_curve
in fbx_curves
.items():
953 assert(fbxprop
== b
'DiffuseColor')
954 for channel
, curve
in channel_to_curve
.items():
955 assert(channel
in {0, 1, 2})
956 blen_curve
= blen_curves
[channel
]
957 fbx_key_times
, values
= blen_read_animation_curve(curve
)
958 blen_store_keyframes(fbx_key_times
, blen_curve
, values
, anim_offset
, fps
, fbx_ktime
)
960 elif isinstance(item
, ShapeKey
):
961 for fbxprop
, channel_to_curve
in fbx_curves
.items():
962 assert(fbxprop
== b
'DeformPercent')
963 for channel
, curve
in channel_to_curve
.items():
965 blen_curve
= blen_curves
[channel
]
967 fbx_key_times
, values
= blen_read_animation_curve(curve
)
968 # A fully activated shape key in FBX DeformPercent is 100.0 whereas it is 1.0 in Blender.
969 values
= values
/ 100.0
970 blen_store_keyframes(fbx_key_times
, blen_curve
, values
, anim_offset
, fps
, fbx_ktime
)
972 # Store the minimum and maximum shape key values, so that the shape key's slider range can be expanded
973 # if necessary after reading all animations.
975 deform_values
= shape_key_deforms
.setdefault(item
, [])
976 deform_values
.append(values
.min())
977 deform_values
.append(values
.max())
979 elif isinstance(item
, Camera
):
980 for fbxprop
, channel_to_curve
in fbx_curves
.items():
981 is_focus_distance
= fbxprop
== b
'FocusDistance'
982 assert(fbxprop
== b
'FocalLength' or is_focus_distance
)
983 for channel
, curve
in channel_to_curve
.items():
985 # The indices are determined by the creation of the `props` list above.
986 blen_curve
= blen_curves
[1 if is_focus_distance
else 0]
988 fbx_key_times
, values
= blen_read_animation_curve(curve
)
989 if is_focus_distance
:
990 # Remap the imported values from FBX to Blender.
991 values
= values
/ 1000.0
992 values
*= global_scale
993 blen_store_keyframes(fbx_key_times
, blen_curve
, values
, anim_offset
, fps
, fbx_ktime
)
995 else: # Object or PoseBone:
996 transform_data
= item
.fbx_transform_data
998 # Each transformation curve needs to have keyframes at the times of every other transformation curve
999 # (interpolating missing values), so that we can construct a matrix at every keyframe.
1000 transform_prop_to_attr
= {
1001 b
'Lcl Translation': transform_data
.loc
,
1002 b
'Lcl Rotation': transform_data
.rot
,
1003 b
'Lcl Scaling': transform_data
.sca
,
1006 times_and_values_tuples
= []
1009 for fbxprop
, channel_to_curve
in fbx_curves
.items():
1010 if fbxprop
not in transform_prop_to_attr
:
1011 # Currently, we only care about transformation curves.
1013 for channel
, curve
in channel_to_curve
.items():
1014 assert(channel
in {0, 1, 2})
1015 fbx_key_times
, values
= blen_read_animation_curve(curve
)
1017 channel_keys
.append((fbxprop
, channel
))
1019 initial_values
.append(transform_prop_to_attr
[fbxprop
][channel
])
1021 times_and_values_tuples
.append((fbx_key_times
, values
))
1022 if not times_and_values_tuples
:
1023 # If `times_and_values_tuples` is empty, all the imported animation curves are for properties other than
1024 # transformation (e.g. animated custom properties), so there is nothing to do until support for those other
1025 # properties is added.
1028 # Combine the keyframe times of all the transformation curves so that each curve has a value at every time.
1029 combined_fbx_times
, values_arrays
= _combine_curve_keyframe_times(times_and_values_tuples
, initial_values
)
1031 # Convert from FBX Lcl Translation/Lcl Rotation/Lcl Scaling to the Blender location/rotation/scaling properties
1032 # of this Object/PoseBone.
1033 # The number of fcurves for the Blender properties varies depending on the rotation mode.
1034 num_loc_channels
= 3
1035 num_rot_channels
= 4 if rot_mode
in {'QUATERNION', 'AXIS_ANGLE'} else 3 # Variations of EULER are all 3
1036 num_sca_channels
= 3
1037 num_channels
= num_loc_channels
+ num_rot_channels
+ num_sca_channels
1038 num_frames
= len(combined_fbx_times
)
1039 full_length
= num_channels
* num_frames
1041 # Do the conversion.
1042 flattened_channel_values_gen
= _transformation_curves_gen(item
, values_arrays
, channel_keys
)
1043 flattened_channel_values
= np
.fromiter(flattened_channel_values_gen
, dtype
=np
.single
, count
=full_length
)
1045 # Reshape to one row per frame and then view the transpose so that each row corresponds to a single channel.
1047 # loc_channels = channel_values[:num_loc_channels]
1048 # rot_channels = channel_values[num_loc_channels:num_loc_channels + num_rot_channels]
1049 # sca_channels = channel_values[num_loc_channels + num_rot_channels:]
1050 channel_values
= flattened_channel_values
.reshape(num_frames
, num_channels
).T
1052 # Each channel has the same keyframe times, so the combined times can be passed once along with all the curves
1053 # and values arrays.
1054 blen_store_keyframes_multi(combined_fbx_times
, zip(blen_curves
, channel_values
), anim_offset
, fps
, fbx_ktime
)
1057 def blen_read_animations(fbx_tmpl_astack
, fbx_tmpl_alayer
, stacks
, scene
, anim_offset
, global_scale
, fbx_ktime
):
1059 Recreate an action per stack/layer/object combinations.
1060 Only the first found action is linked to objects, more complex setups are not handled,
1061 it's up to user to reproduce them!
1063 from bpy
.types
import ShapeKey
, Material
, Camera
1065 shape_key_values
= {}
1067 for as_uuid
, ((fbx_asdata
, _blen_data
), alayers
) in stacks
.items():
1068 stack_name
= elem_name_ensure_class(fbx_asdata
, b
'AnimStack')
1069 for al_uuid
, ((fbx_aldata
, _blen_data
), items
) in alayers
.items():
1070 layer_name
= elem_name_ensure_class(fbx_aldata
, b
'AnimLayer')
1071 for item
, cnodes
in items
.items():
1072 if isinstance(item
, Material
):
1074 elif isinstance(item
, ShapeKey
):
1075 id_data
= item
.id_data
1076 elif isinstance(item
, Camera
):
1079 id_data
= item
.bl_obj
1080 # XXX Ignore rigged mesh animations - those are a nightmare to handle, see note about it in
1081 # FbxImportHelperNode class definition.
1082 if id_data
and id_data
.type == 'MESH' and id_data
.parent
and id_data
.parent
.type == 'ARMATURE':
1087 # Create new action if needed (should always be needed, except for keyblocks from shapekeys cases).
1088 key
= (as_uuid
, al_uuid
, id_data
)
1089 action
= actions
.get(key
)
1091 if stack_name
== layer_name
:
1092 action_name
= "|".join((id_data
.name
, stack_name
))
1094 action_name
= "|".join((id_data
.name
, stack_name
, layer_name
))
1095 actions
[key
] = action
= bpy
.data
.actions
.new(action_name
)
1096 action
.use_fake_user
= True
1097 # If none yet assigned, assign this action to id_data.
1098 if not id_data
.animation_data
:
1099 id_data
.animation_data_create()
1100 if not id_data
.animation_data
.action
:
1101 id_data
.animation_data
.action
= action
1102 # And actually populate the action!
1103 blen_read_animations_action_item(action
, item
, cnodes
, scene
.render
.fps
, anim_offset
, global_scale
,
1104 shape_key_values
, fbx_ktime
)
1106 # If the minimum/maximum animated value is outside the slider range of the shape key, attempt to expand the slider
1107 # range until the animated range fits and has extra room to be decreased or increased further.
1108 # Shape key slider_min and slider_max have hard min/max values, if an imported animation uses a value outside that
1109 # range, a warning message will be printed to the console and the slider_min/slider_max values will end up clamped.
1110 shape_key_values_in_range
= True
1111 for shape_key
, deform_values
in shape_key_values
.items():
1112 min_animated_deform
= min(deform_values
)
1113 max_animated_deform
= max(deform_values
)
1114 shape_key_values_in_range
&= expand_shape_key_range(shape_key
, min_animated_deform
)
1115 shape_key_values_in_range
&= expand_shape_key_range(shape_key
, max_animated_deform
)
1116 if not shape_key_values_in_range
:
1117 print("WARNING: The imported animated Value of a Shape Key is beyond the minimum/maximum allowed and will be"
1118 " clamped during playback.")
1124 def blen_read_geom_layerinfo(fbx_layer
):
1126 validate_blend_names(elem_find_first_string_as_bytes(fbx_layer
, b
'Name')),
1127 elem_find_first_string_as_bytes(fbx_layer
, b
'MappingInformationType'),
1128 elem_find_first_string_as_bytes(fbx_layer
, b
'ReferenceInformationType'),
1132 def blen_read_geom_validate_blen_data(blen_data
, blen_dtype
, item_size
):
1133 """Validate blen_data when it's not a bpy_prop_collection.
1134 Returns whether blen_data is a bpy_prop_collection"""
1135 blen_data_is_collection
= isinstance(blen_data
, bpy
.types
.bpy_prop_collection
)
1136 if not blen_data_is_collection
:
1138 assert(len(blen_data
.shape
) == 2)
1139 assert(blen_data
.shape
[1] == item_size
)
1140 assert(blen_data
.dtype
== blen_dtype
)
1141 return blen_data_is_collection
1144 def blen_read_geom_parse_fbx_data(fbx_data
, stride
, item_size
):
1145 """Parse fbx_data as an array.array into a 2d np.ndarray that shares the same memory, where each row is a single
1147 # Technically stride < item_size could be supported, but there's probably not a use case for it since it would
1148 # result in a view of the data with self-overlapping memory.
1149 assert(stride
>= item_size
)
1150 # View the array.array as an np.ndarray.
1151 fbx_data_np
= parray_as_ndarray(fbx_data
)
1153 if stride
== item_size
:
1155 # Need to make sure fbx_data_np has a whole number of items to be able to view item_size elements per row.
1156 items_remainder
= len(fbx_data_np
) % item_size
1158 print("ERROR: not a whole number of items in this FBX layer, skipping the partial item!")
1159 fbx_data_np
= fbx_data_np
[:-items_remainder
]
1160 fbx_data_np
= fbx_data_np
.reshape(-1, item_size
)
1162 # Create a view of fbx_data_np that is only the first item_size elements of each stride. Note that the view will
1163 # not be C-contiguous.
1164 stride_remainder
= len(fbx_data_np
) % stride
1165 if stride_remainder
:
1166 if stride_remainder
< item_size
:
1167 print("ERROR: not a whole number of items in this FBX layer, skipping the partial item!")
1168 # Not enough in the remainder for a full item, so cut off the partial stride
1169 fbx_data_np
= fbx_data_np
[:-stride_remainder
]
1170 # Reshape to one stride per row and then create a view that includes only the first item_size elements
1172 fbx_data_np
= fbx_data_np
.reshape(-1, stride
)[:, :item_size
]
1174 print("ERROR: not a whole number of strides in this FBX layer! There are a whole number of items, but"
1175 " this could indicate an error!")
1176 # There is not a whole number of strides, but there is a whole number of items.
1177 # This is a pain to deal with because fbx_data_np.reshape(-1, stride) is not possible.
1178 # A view of just the items can be created using stride_tricks.as_strided by specifying the shape and
1179 # strides of the view manually.
1180 # Extreme care must be taken when using stride_tricks.as_strided because improper usage can result in
1181 # a view that gives access to memory outside the array.
1182 from numpy
.lib
import stride_tricks
1184 # fbx_data_np should always start off as flat and C-contiguous.
1185 assert(fbx_data_np
.strides
== (fbx_data_np
.itemsize
,))
1187 num_whole_strides
= len(fbx_data_np
) // stride
1188 # Plus the one partial stride that is enough elements for a complete item.
1189 num_items
= num_whole_strides
+ 1
1190 shape
= (num_items
, item_size
)
1192 # strides are the number of bytes to step to get to the next element, for each axis.
1193 step_per_item
= fbx_data_np
.itemsize
* stride
1194 step_per_item_element
= fbx_data_np
.itemsize
1195 strides
= (step_per_item
, step_per_item_element
)
1197 fbx_data_np
= stride_tricks
.as_strided(fbx_data_np
, shape
, strides
)
1199 # There's a whole number of strides, so first reshape to one stride per row and then create a view that
1200 # includes only the first item_size elements of each stride.
1201 fbx_data_np
= fbx_data_np
.reshape(-1, stride
)[:, :item_size
]
1206 def blen_read_geom_check_fbx_data_length(blen_data
, fbx_data_np
, is_indices
=False):
1207 """Check that there are the same number of items in blen_data and fbx_data_np.
1209 Returns a tuple of two elements:
1210 0: fbx_data_np or, if fbx_data_np contains more items than blen_data, a view of fbx_data_np with the excess
1212 1: Whether the returned fbx_data_np contains enough items to completely fill blen_data"""
1213 bl_num_items
= len(blen_data
)
1214 fbx_num_items
= len(fbx_data_np
)
1215 enough_data
= fbx_num_items
>= bl_num_items
1218 print("ERROR: not enough indices in this FBX layer, missing data will be left as default!")
1220 print("ERROR: not enough data in this FBX layer, missing data will be left as default!")
1221 elif fbx_num_items
> bl_num_items
:
1223 print("ERROR: too many indices in this FBX layer, skipping excess!")
1225 print("ERROR: too much data in this FBX layer, skipping excess!")
1226 fbx_data_np
= fbx_data_np
[:bl_num_items
]
1228 return fbx_data_np
, enough_data
1231 def blen_read_geom_xform(fbx_data_np
, xform
):
1232 """xform is either None, or a function that takes fbx_data_np as its only positional argument and returns an
1233 np.ndarray with the same total number of elements as fbx_data_np.
1234 It is acceptable for xform to return an array with a different dtype to fbx_data_np.
1236 Returns xform(fbx_data_np) when xform is not None and ensures the result of xform(fbx_data_np) has the same shape as
1237 fbx_data_np before returning it.
1238 When xform is None, fbx_data_np is returned as is."""
1239 if xform
is not None:
1240 item_size
= fbx_data_np
.shape
[1]
1241 fbx_total_data
= fbx_data_np
.size
1242 fbx_data_np
= xform(fbx_data_np
)
1243 # The amount of data should not be changed by xform
1244 assert(fbx_data_np
.size
== fbx_total_data
)
1245 # Ensure fbx_data_np is still item_size elements per row
1246 if len(fbx_data_np
.shape
) != 2 or fbx_data_np
.shape
[1] != item_size
:
1247 fbx_data_np
= fbx_data_np
.reshape(-1, item_size
)
1251 def blen_read_geom_array_foreach_set_direct(blen_data
, blen_attr
, blen_dtype
, fbx_data
, stride
, item_size
, descr
,
1253 """Generic fbx_layer to blen_data foreach setter for Direct layers.
1254 blen_data must be a bpy_prop_collection or 2d np.ndarray whose second axis length is item_size.
1255 fbx_data must be an array.array."""
1256 fbx_data_np
= blen_read_geom_parse_fbx_data(fbx_data
, stride
, item_size
)
1257 fbx_data_np
, enough_data
= blen_read_geom_check_fbx_data_length(blen_data
, fbx_data_np
)
1258 fbx_data_np
= blen_read_geom_xform(fbx_data_np
, xform
)
1260 blen_data_is_collection
= blen_read_geom_validate_blen_data(blen_data
, blen_dtype
, item_size
)
1262 if blen_data_is_collection
:
1264 blen_total_data
= len(blen_data
) * item_size
1265 buffer = np
.empty(blen_total_data
, dtype
=blen_dtype
)
1266 # It's not clear what values should be used for the missing data, so read the current values into a buffer.
1267 blen_data
.foreach_get(blen_attr
, buffer)
1269 # Change the buffer shape to one item per row
1270 buffer.shape
= (-1, item_size
)
1272 # Copy the fbx data into the start of the buffer
1273 buffer[:len(fbx_data_np
)] = fbx_data_np
1275 # Convert the buffer to the Blender C type of blen_attr
1276 buffer = astype_view_signedness(fbx_data_np
, blen_dtype
)
1278 # Set blen_attr of blen_data. The buffer must be flat and C-contiguous, which ravel() ensures
1279 blen_data
.foreach_set(blen_attr
, buffer.ravel())
1281 assert(blen_data
.size
% item_size
== 0)
1282 blen_data
= blen_data
.view()
1283 blen_data
.shape
= (-1, item_size
)
1284 blen_data
[:len(fbx_data_np
)] = fbx_data_np
1287 def blen_read_geom_array_foreach_set_indexed(blen_data
, blen_attr
, blen_dtype
, fbx_data
, fbx_layer_index
, stride
,
1288 item_size
, descr
, xform
):
1289 """Generic fbx_layer to blen_data foreach setter for IndexToDirect layers.
1290 blen_data must be a bpy_prop_collection or 2d np.ndarray whose second axis length is item_size.
1291 fbx_data must be an array.array or a 1d np.ndarray."""
1292 fbx_data_np
= blen_read_geom_parse_fbx_data(fbx_data
, stride
, item_size
)
1293 fbx_data_np
= blen_read_geom_xform(fbx_data_np
, xform
)
1295 # fbx_layer_index is allowed to be a 1d np.ndarray for use with blen_read_geom_array_foreach_set_looptovert.
1296 if not isinstance(fbx_layer_index
, np
.ndarray
):
1297 fbx_layer_index
= parray_as_ndarray(fbx_layer_index
)
1299 fbx_layer_index
, enough_indices
= blen_read_geom_check_fbx_data_length(blen_data
, fbx_layer_index
, is_indices
=True)
1301 blen_data_is_collection
= blen_read_geom_validate_blen_data(blen_data
, blen_dtype
, item_size
)
1303 blen_data_items_len
= len(blen_data
)
1304 blen_data_len
= blen_data_items_len
* item_size
1305 fbx_num_items
= len(fbx_data_np
)
1307 # Find all indices that are out of bounds of fbx_data_np.
1308 min_index_inclusive
= -fbx_num_items
1309 max_index_inclusive
= fbx_num_items
- 1
1310 valid_index_mask
= np
.equal(fbx_layer_index
, fbx_layer_index
.clip(min_index_inclusive
, max_index_inclusive
))
1311 indices_invalid
= not valid_index_mask
.all()
1313 fbx_data_items
= fbx_data_np
.reshape(-1, item_size
)
1315 if indices_invalid
or not enough_indices
:
1316 if blen_data_is_collection
:
1317 buffer = np
.empty(blen_data_len
, dtype
=blen_dtype
)
1318 buffer_item_view
= buffer.view()
1319 buffer_item_view
.shape
= (-1, item_size
)
1320 # Since we don't know what the default values should be for the missing data, read the current values into a
1322 blen_data
.foreach_get(blen_attr
, buffer)
1324 buffer_item_view
= blen_data
1326 if not enough_indices
:
1327 # Reduce the length of the view to the same length as the number of indices.
1328 buffer_item_view
= buffer_item_view
[:len(fbx_layer_index
)]
1330 # Copy the result of indexing fbx_data_items by each element in fbx_layer_index into the buffer.
1332 print("ERROR: indices in this FBX layer out of bounds of the FBX data, skipping invalid indices!")
1333 buffer_item_view
[valid_index_mask
] = fbx_data_items
[fbx_layer_index
[valid_index_mask
]]
1335 buffer_item_view
[:] = fbx_data_items
[fbx_layer_index
]
1337 if blen_data_is_collection
:
1338 blen_data
.foreach_set(blen_attr
, buffer.ravel())
1340 if blen_data_is_collection
:
1341 # Cast the buffer to the Blender C type of blen_attr
1342 fbx_data_items
= astype_view_signedness(fbx_data_items
, blen_dtype
)
1343 buffer_items
= fbx_data_items
[fbx_layer_index
]
1344 blen_data
.foreach_set(blen_attr
, buffer_items
.ravel())
1346 blen_data
[:] = fbx_data_items
[fbx_layer_index
]
1349 def blen_read_geom_array_foreach_set_allsame(blen_data
, blen_attr
, blen_dtype
, fbx_data
, stride
, item_size
, descr
,
1351 """Generic fbx_layer to blen_data foreach setter for AllSame layers.
1352 blen_data must be a bpy_prop_collection or 2d np.ndarray whose second axis length is item_size.
1353 fbx_data must be an array.array."""
1354 fbx_data_np
= blen_read_geom_parse_fbx_data(fbx_data
, stride
, item_size
)
1355 fbx_data_np
= blen_read_geom_xform(fbx_data_np
, xform
)
1356 blen_data_is_collection
= blen_read_geom_validate_blen_data(blen_data
, blen_dtype
, item_size
)
1357 fbx_items_len
= len(fbx_data_np
)
1358 blen_items_len
= len(blen_data
)
1360 if fbx_items_len
< 1:
1361 print("ERROR: not enough data in this FBX layer, skipping!")
1364 if blen_data_is_collection
:
1365 # Create an array filled with the value from fbx_data_np
1366 buffer = np
.full((blen_items_len
, item_size
), fbx_data_np
[0], dtype
=blen_dtype
)
1368 blen_data
.foreach_set(blen_attr
, buffer.ravel())
1370 blen_data
[:] = fbx_data_np
[0]
1373 def blen_read_geom_array_foreach_set_looptovert(mesh
, blen_data
, blen_attr
, blen_dtype
, fbx_data
, stride
, item_size
,
1375 """Generic fbx_layer to blen_data foreach setter for face corner ByVertice layers.
1376 blen_data must be a bpy_prop_collection or 2d np.ndarray whose second axis length is item_size.
1377 fbx_data must be an array.array"""
1378 # The fbx_data is mapped to vertices. To expand fbx_data to face corners, get an array of the vertex index of each
1379 # face corner that will then be used to index fbx_data.
1380 corner_vertex_indices
= MESH_ATTRIBUTE_CORNER_VERT
.to_ndarray(mesh
.attributes
)
1381 blen_read_geom_array_foreach_set_indexed(blen_data
, blen_attr
, blen_dtype
, fbx_data
, corner_vertex_indices
, stride
,
1382 item_size
, descr
, xform
)
1385 # generic error printers.
1386 def blen_read_geom_array_error_mapping(descr
, fbx_layer_mapping
, quiet
=False):
1388 print("warning layer %r mapping type unsupported: %r" % (descr
, fbx_layer_mapping
))
1391 def blen_read_geom_array_error_ref(descr
, fbx_layer_ref
, quiet
=False):
1393 print("warning layer %r ref type unsupported: %r" % (descr
, fbx_layer_ref
))
1396 def blen_read_geom_array_mapped_vert(
1397 mesh
, blen_data
, blen_attr
, blen_dtype
,
1398 fbx_layer_data
, fbx_layer_index
,
1399 fbx_layer_mapping
, fbx_layer_ref
,
1400 stride
, item_size
, descr
,
1401 xform
=None, quiet
=False,
1403 if fbx_layer_mapping
== b
'ByVertice':
1404 if fbx_layer_ref
== b
'IndexToDirect':
1405 # XXX Looks like we often get no fbx_layer_index in this case, shall not happen but happens...
1406 # We fallback to 'Direct' mapping in this case.
1407 #~ assert(fbx_layer_index is not None)
1408 if fbx_layer_index
is None:
1409 blen_read_geom_array_foreach_set_direct(blen_data
, blen_attr
, blen_dtype
, fbx_layer_data
, stride
,
1410 item_size
, descr
, xform
)
1412 blen_read_geom_array_foreach_set_indexed(blen_data
, blen_attr
, blen_dtype
, fbx_layer_data
,
1413 fbx_layer_index
, stride
, item_size
, descr
, xform
)
1415 elif fbx_layer_ref
== b
'Direct':
1416 assert(fbx_layer_index
is None)
1417 blen_read_geom_array_foreach_set_direct(blen_data
, blen_attr
, blen_dtype
, fbx_layer_data
, stride
, item_size
,
1420 blen_read_geom_array_error_ref(descr
, fbx_layer_ref
, quiet
)
1421 elif fbx_layer_mapping
== b
'AllSame':
1422 if fbx_layer_ref
== b
'IndexToDirect':
1423 assert(fbx_layer_index
is None)
1424 blen_read_geom_array_foreach_set_allsame(blen_data
, blen_attr
, blen_dtype
, fbx_layer_data
, stride
,
1425 item_size
, descr
, xform
)
1427 blen_read_geom_array_error_ref(descr
, fbx_layer_ref
, quiet
)
1429 blen_read_geom_array_error_mapping(descr
, fbx_layer_mapping
, quiet
)
1434 def blen_read_geom_array_mapped_edge(
1435 mesh
, blen_data
, blen_attr
, blen_dtype
,
1436 fbx_layer_data
, fbx_layer_index
,
1437 fbx_layer_mapping
, fbx_layer_ref
,
1438 stride
, item_size
, descr
,
1439 xform
=None, quiet
=False,
1441 if fbx_layer_mapping
== b
'ByEdge':
1442 if fbx_layer_ref
== b
'Direct':
1443 blen_read_geom_array_foreach_set_direct(blen_data
, blen_attr
, blen_dtype
, fbx_layer_data
, stride
, item_size
,
1446 blen_read_geom_array_error_ref(descr
, fbx_layer_ref
, quiet
)
1447 elif fbx_layer_mapping
== b
'AllSame':
1448 if fbx_layer_ref
== b
'IndexToDirect':
1449 assert(fbx_layer_index
is None)
1450 blen_read_geom_array_foreach_set_allsame(blen_data
, blen_attr
, blen_dtype
, fbx_layer_data
, stride
,
1451 item_size
, descr
, xform
)
1453 blen_read_geom_array_error_ref(descr
, fbx_layer_ref
, quiet
)
1455 blen_read_geom_array_error_mapping(descr
, fbx_layer_mapping
, quiet
)
1460 def blen_read_geom_array_mapped_polygon(
1461 mesh
, blen_data
, blen_attr
, blen_dtype
,
1462 fbx_layer_data
, fbx_layer_index
,
1463 fbx_layer_mapping
, fbx_layer_ref
,
1464 stride
, item_size
, descr
,
1465 xform
=None, quiet
=False,
1467 if fbx_layer_mapping
== b
'ByPolygon':
1468 if fbx_layer_ref
== b
'IndexToDirect':
1469 # XXX Looks like we often get no fbx_layer_index in this case, shall not happen but happens...
1470 # We fallback to 'Direct' mapping in this case.
1471 #~ assert(fbx_layer_index is not None)
1472 if fbx_layer_index
is None:
1473 blen_read_geom_array_foreach_set_direct(blen_data
, blen_attr
, blen_dtype
, fbx_layer_data
, stride
,
1474 item_size
, descr
, xform
)
1476 blen_read_geom_array_foreach_set_indexed(blen_data
, blen_attr
, blen_dtype
, fbx_layer_data
,
1477 fbx_layer_index
, stride
, item_size
, descr
, xform
)
1479 elif fbx_layer_ref
== b
'Direct':
1480 blen_read_geom_array_foreach_set_direct(blen_data
, blen_attr
, blen_dtype
, fbx_layer_data
, stride
, item_size
,
1483 blen_read_geom_array_error_ref(descr
, fbx_layer_ref
, quiet
)
1484 elif fbx_layer_mapping
== b
'AllSame':
1485 if fbx_layer_ref
== b
'IndexToDirect':
1486 assert(fbx_layer_index
is None)
1487 blen_read_geom_array_foreach_set_allsame(blen_data
, blen_attr
, blen_dtype
, fbx_layer_data
, stride
,
1488 item_size
, descr
, xform
)
1490 blen_read_geom_array_error_ref(descr
, fbx_layer_ref
, quiet
)
1492 blen_read_geom_array_error_mapping(descr
, fbx_layer_mapping
, quiet
)
1497 def blen_read_geom_array_mapped_polyloop(
1498 mesh
, blen_data
, blen_attr
, blen_dtype
,
1499 fbx_layer_data
, fbx_layer_index
,
1500 fbx_layer_mapping
, fbx_layer_ref
,
1501 stride
, item_size
, descr
,
1502 xform
=None, quiet
=False,
1504 if fbx_layer_mapping
== b
'ByPolygonVertex':
1505 if fbx_layer_ref
== b
'IndexToDirect':
1506 # XXX Looks like we often get no fbx_layer_index in this case, shall not happen but happens...
1507 # We fallback to 'Direct' mapping in this case.
1508 #~ assert(fbx_layer_index is not None)
1509 if fbx_layer_index
is None:
1510 blen_read_geom_array_foreach_set_direct(blen_data
, blen_attr
, blen_dtype
, fbx_layer_data
, stride
,
1511 item_size
, descr
, xform
)
1513 blen_read_geom_array_foreach_set_indexed(blen_data
, blen_attr
, blen_dtype
, fbx_layer_data
,
1514 fbx_layer_index
, stride
, item_size
, descr
, xform
)
1516 elif fbx_layer_ref
== b
'Direct':
1517 blen_read_geom_array_foreach_set_direct(blen_data
, blen_attr
, blen_dtype
, fbx_layer_data
, stride
, item_size
,
1520 blen_read_geom_array_error_ref(descr
, fbx_layer_ref
, quiet
)
1521 elif fbx_layer_mapping
== b
'ByVertice':
1522 if fbx_layer_ref
== b
'Direct':
1523 assert(fbx_layer_index
is None)
1524 blen_read_geom_array_foreach_set_looptovert(mesh
, blen_data
, blen_attr
, blen_dtype
, fbx_layer_data
, stride
,
1525 item_size
, descr
, xform
)
1527 blen_read_geom_array_error_ref(descr
, fbx_layer_ref
, quiet
)
1528 elif fbx_layer_mapping
== b
'AllSame':
1529 if fbx_layer_ref
== b
'IndexToDirect':
1530 assert(fbx_layer_index
is None)
1531 blen_read_geom_array_foreach_set_allsame(blen_data
, blen_attr
, blen_dtype
, fbx_layer_data
, stride
,
1532 item_size
, descr
, xform
)
1534 blen_read_geom_array_error_ref(descr
, fbx_layer_ref
, quiet
)
1536 blen_read_geom_array_error_mapping(descr
, fbx_layer_mapping
, quiet
)
1541 def blen_read_geom_layer_material(fbx_obj
, mesh
):
1542 fbx_layer
= elem_find_first(fbx_obj
, b
'LayerElementMaterial')
1544 if fbx_layer
is None:
1550 ) = blen_read_geom_layerinfo(fbx_layer
)
1552 layer_id
= b
'Materials'
1553 fbx_layer_data
= elem_prop_first(elem_find_first(fbx_layer
, layer_id
))
1555 blen_data
= MESH_ATTRIBUTE_MATERIAL_INDEX
.ensure(mesh
.attributes
).data
1557 assert(fbx_item_size
== MESH_ATTRIBUTE_MATERIAL_INDEX
.item_size
)
1558 blen_read_geom_array_mapped_polygon(
1559 mesh
, blen_data
, MESH_ATTRIBUTE_MATERIAL_INDEX
.foreach_attribute
, MESH_ATTRIBUTE_MATERIAL_INDEX
.dtype
,
1560 fbx_layer_data
, None,
1561 fbx_layer_mapping
, fbx_layer_ref
,
1562 1, fbx_item_size
, layer_id
,
1566 def blen_read_geom_layer_uv(fbx_obj
, mesh
):
1567 for layer_id
in (b
'LayerElementUV',):
1568 for fbx_layer
in elem_find_iter(fbx_obj
, layer_id
):
1569 # all should be valid
1573 ) = blen_read_geom_layerinfo(fbx_layer
)
1575 fbx_layer_data
= elem_prop_first(elem_find_first(fbx_layer
, b
'UV'))
1576 fbx_layer_index
= elem_prop_first(elem_find_first(fbx_layer
, b
'UVIndex'))
1578 # Always init our new layers with (0, 0) UVs.
1579 uv_lay
= mesh
.uv_layers
.new(name
=fbx_layer_name
, do_init
=False)
1581 print("Failed to add {%r %r} UVLayer to %r (probably too many of them?)"
1582 "" % (layer_id
, fbx_layer_name
, mesh
.name
))
1585 blen_data
= uv_lay
.uv
1587 # some valid files omit this data
1588 if fbx_layer_data
is None:
1589 print("%r %r missing data" % (layer_id
, fbx_layer_name
))
1592 blen_read_geom_array_mapped_polyloop(
1593 mesh
, blen_data
, "vector", np
.single
,
1594 fbx_layer_data
, fbx_layer_index
,
1595 fbx_layer_mapping
, fbx_layer_ref
,
1600 def blen_read_geom_layer_color(fbx_obj
, mesh
, colors_type
):
1601 if colors_type
== 'NONE':
1603 use_srgb
= colors_type
== 'SRGB'
1604 layer_type
= 'BYTE_COLOR' if use_srgb
else 'FLOAT_COLOR'
1605 color_prop_name
= "color_srgb" if use_srgb
else "color"
1606 # almost same as UVs
1607 for layer_id
in (b
'LayerElementColor',):
1608 for fbx_layer
in elem_find_iter(fbx_obj
, layer_id
):
1609 # all should be valid
1613 ) = blen_read_geom_layerinfo(fbx_layer
)
1615 fbx_layer_data
= elem_prop_first(elem_find_first(fbx_layer
, b
'Colors'))
1616 fbx_layer_index
= elem_prop_first(elem_find_first(fbx_layer
, b
'ColorIndex'))
1618 color_lay
= mesh
.color_attributes
.new(name
=fbx_layer_name
, type=layer_type
, domain
='CORNER')
1620 if color_lay
is None:
1621 print("Failed to add {%r %r} vertex color layer to %r (probably too many of them?)"
1622 "" % (layer_id
, fbx_layer_name
, mesh
.name
))
1625 blen_data
= color_lay
.data
1627 # some valid files omit this data
1628 if fbx_layer_data
is None:
1629 print("%r %r missing data" % (layer_id
, fbx_layer_name
))
1632 blen_read_geom_array_mapped_polyloop(
1633 mesh
, blen_data
, color_prop_name
, np
.single
,
1634 fbx_layer_data
, fbx_layer_index
,
1635 fbx_layer_mapping
, fbx_layer_ref
,
1640 def blen_read_geom_layer_smooth(fbx_obj
, mesh
):
1641 fbx_layer
= elem_find_first(fbx_obj
, b
'LayerElementSmoothing')
1643 if fbx_layer
is None:
1646 # all should be valid
1650 ) = blen_read_geom_layerinfo(fbx_layer
)
1652 layer_id
= b
'Smoothing'
1653 fbx_layer_data
= elem_prop_first(elem_find_first(fbx_layer
, layer_id
))
1655 # udk has 'Direct' mapped, with no Smoothing, not sure why, but ignore these
1656 if fbx_layer_data
is None:
1659 if fbx_layer_mapping
== b
'ByEdge':
1660 # some models have bad edge data, we can't use this info...
1662 print("warning skipping sharp edges data, no valid edges...")
1665 blen_data
= MESH_ATTRIBUTE_SHARP_EDGE
.ensure(mesh
.attributes
).data
1667 assert(fbx_item_size
== MESH_ATTRIBUTE_SHARP_EDGE
.item_size
)
1668 blen_read_geom_array_mapped_edge(
1669 mesh
, blen_data
, MESH_ATTRIBUTE_SHARP_EDGE
.foreach_attribute
, MESH_ATTRIBUTE_SHARP_EDGE
.dtype
,
1670 fbx_layer_data
, None,
1671 fbx_layer_mapping
, fbx_layer_ref
,
1672 1, fbx_item_size
, layer_id
,
1673 xform
=np
.logical_not
, # in FBX, 0 (False) is sharp, but in Blender True is sharp.
1675 elif fbx_layer_mapping
== b
'ByPolygon':
1676 sharp_face
= MESH_ATTRIBUTE_SHARP_FACE
.ensure(mesh
.attributes
)
1677 blen_data
= sharp_face
.data
1679 assert(fbx_item_size
== MESH_ATTRIBUTE_SHARP_FACE
.item_size
)
1680 sharp_face_set_successfully
= blen_read_geom_array_mapped_polygon(
1681 mesh
, blen_data
, MESH_ATTRIBUTE_SHARP_FACE
.foreach_attribute
, MESH_ATTRIBUTE_SHARP_FACE
.dtype
,
1682 fbx_layer_data
, None,
1683 fbx_layer_mapping
, fbx_layer_ref
,
1684 1, fbx_item_size
, layer_id
,
1685 xform
=lambda s
: (s
== 0), # smoothgroup bitflags, treat as booleans for now
1687 if not sharp_face_set_successfully
:
1688 mesh
.attributes
.remove(sharp_face
)
1690 print("warning layer %r mapping type unsupported: %r" % (fbx_layer
.id, fbx_layer_mapping
))
1693 def blen_read_geom_layer_edge_crease(fbx_obj
, mesh
):
1694 fbx_layer
= elem_find_first(fbx_obj
, b
'LayerElementEdgeCrease')
1696 if fbx_layer
is None:
1699 # all should be valid
1703 ) = blen_read_geom_layerinfo(fbx_layer
)
1705 if fbx_layer_mapping
!= b
'ByEdge':
1708 layer_id
= b
'EdgeCrease'
1709 fbx_layer_data
= elem_prop_first(elem_find_first(fbx_layer
, layer_id
))
1711 # some models have bad edge data, we can't use this info...
1713 print("warning skipping edge crease data, no valid edges...")
1716 if fbx_layer_mapping
== b
'ByEdge':
1717 # some models have bad edge data, we can't use this info...
1719 print("warning skipping edge crease data, no valid edges...")
1722 blen_data
= mesh
.edge_creases_ensure().data
1723 return blen_read_geom_array_mapped_edge(
1724 mesh
, blen_data
, "value", np
.single
,
1725 fbx_layer_data
, None,
1726 fbx_layer_mapping
, fbx_layer_ref
,
1728 # Blender squares those values before sending them to OpenSubdiv, when other software don't,
1729 # so we need to compensate that to get similar results through FBX...
1733 print("warning layer %r mapping type unsupported: %r" % (fbx_layer
.id, fbx_layer_mapping
))
1736 def blen_read_geom_layer_normal(fbx_obj
, mesh
, xform
=None):
1737 fbx_layer
= elem_find_first(fbx_obj
, b
'LayerElementNormal')
1739 if fbx_layer
is None:
1745 ) = blen_read_geom_layerinfo(fbx_layer
)
1747 layer_id
= b
'Normals'
1748 fbx_layer_data
= elem_prop_first(elem_find_first(fbx_layer
, layer_id
))
1749 fbx_layer_index
= elem_prop_first(elem_find_first(fbx_layer
, b
'NormalsIndex'))
1751 if fbx_layer_data
is None:
1752 print("warning %r %r missing data" % (layer_id
, fbx_layer_name
))
1755 # Normals are temporarily set here so that they can be retrieved again after a call to Mesh.validate().
1756 bl_norm_dtype
= np
.single
1758 # try loops, then polygons, then vertices.
1759 tries
= ((mesh
.attributes
["temp_custom_normals"].data
, "Loops", False, blen_read_geom_array_mapped_polyloop
),
1760 (mesh
.polygons
, "Polygons", True, blen_read_geom_array_mapped_polygon
),
1761 (mesh
.vertices
, "Vertices", True, blen_read_geom_array_mapped_vert
))
1762 for blen_data
, blen_data_type
, is_fake
, func
in tries
:
1763 bdata
= np
.zeros((len(blen_data
), item_size
), dtype
=bl_norm_dtype
) if is_fake
else blen_data
1764 if func(mesh
, bdata
, "vector", bl_norm_dtype
,
1765 fbx_layer_data
, fbx_layer_index
, fbx_layer_mapping
, fbx_layer_ref
, 3, item_size
, layer_id
, xform
, True):
1766 if blen_data_type
== "Polygons":
1767 # To expand to per-loop normals, repeat each per-polygon normal by the number of loops of each polygon.
1768 poly_loop_totals
= np
.empty(len(mesh
.polygons
), dtype
=np
.uintc
)
1769 mesh
.polygons
.foreach_get("loop_total", poly_loop_totals
)
1770 loop_normals
= np
.repeat(bdata
, poly_loop_totals
, axis
=0)
1771 mesh
.attributes
["temp_custom_normals"].data
.foreach_set("vector", loop_normals
.ravel())
1772 elif blen_data_type
== "Vertices":
1773 # We have to copy vnors to lnors! Far from elegant, but simple.
1774 loop_vertex_indices
= MESH_ATTRIBUTE_CORNER_VERT
.to_ndarray(mesh
.attributes
)
1775 mesh
.attributes
["temp_custom_normals"].data
.foreach_set("vector", bdata
[loop_vertex_indices
].ravel())
1778 blen_read_geom_array_error_mapping("normal", fbx_layer_mapping
)
1779 blen_read_geom_array_error_ref("normal", fbx_layer_ref
)
1783 def blen_read_geom(fbx_tmpl
, fbx_obj
, settings
):
1784 # Vertices are in object space, but we are post-multiplying all transforms with the inverse of the
1785 # global matrix, so we need to apply the global matrix to the vertices to get the correct result.
1786 geom_mat_co
= settings
.global_matrix
if settings
.bake_space_transform
else None
1787 # We need to apply the inverse transpose of the global matrix when transforming normals.
1788 geom_mat_no
= Matrix(settings
.global_matrix_inv_transposed
) if settings
.bake_space_transform
else None
1789 if geom_mat_no
is not None:
1790 # Remove translation & scaling!
1791 geom_mat_no
.translation
= Vector()
1792 geom_mat_no
.normalize()
1794 # TODO, use 'fbx_tmpl'
1795 elem_name_utf8
= elem_name_ensure_class(fbx_obj
, b
'Geometry')
1797 fbx_verts
= elem_prop_first(elem_find_first(fbx_obj
, b
'Vertices'))
1798 fbx_polys
= elem_prop_first(elem_find_first(fbx_obj
, b
'PolygonVertexIndex'))
1799 fbx_edges
= elem_prop_first(elem_find_first(fbx_obj
, b
'Edges'))
1801 # The dtypes when empty don't matter, but are set to what the fbx arrays are expected to be.
1802 fbx_verts
= parray_as_ndarray(fbx_verts
) if fbx_verts
else np
.empty(0, dtype
=data_types
.ARRAY_FLOAT64
)
1803 fbx_polys
= parray_as_ndarray(fbx_polys
) if fbx_polys
else np
.empty(0, dtype
=data_types
.ARRAY_INT32
)
1804 fbx_edges
= parray_as_ndarray(fbx_edges
) if fbx_edges
else np
.empty(0, dtype
=data_types
.ARRAY_INT32
)
1806 # Each vert is a 3d vector so is made of 3 components.
1807 tot_verts
= len(fbx_verts
) // 3
1808 if tot_verts
* 3 != len(fbx_verts
):
1809 print("ERROR: Not a whole number of vertices. Ignoring the partial vertex!")
1810 # Remove any remainder.
1811 fbx_verts
= fbx_verts
[:tot_verts
* 3]
1813 tot_loops
= len(fbx_polys
)
1814 tot_edges
= len(fbx_edges
)
1816 mesh
= bpy
.data
.meshes
.new(name
=elem_name_utf8
)
1817 attributes
= mesh
.attributes
1820 if geom_mat_co
is not None:
1821 fbx_verts
= vcos_transformed(fbx_verts
, geom_mat_co
, MESH_ATTRIBUTE_POSITION
.dtype
)
1823 fbx_verts
= fbx_verts
.astype(MESH_ATTRIBUTE_POSITION
.dtype
, copy
=False)
1825 mesh
.vertices
.add(tot_verts
)
1826 MESH_ATTRIBUTE_POSITION
.foreach_set(attributes
, fbx_verts
.ravel())
1829 bl_loop_start_dtype
= np
.uintc
1831 mesh
.loops
.add(tot_loops
)
1832 # The end of each polygon is specified by an inverted index.
1833 fbx_loop_end_idx
= np
.flatnonzero(fbx_polys
< 0)
1835 tot_polys
= len(fbx_loop_end_idx
)
1837 # Un-invert the loop ends.
1838 fbx_polys
[fbx_loop_end_idx
] ^
= -1
1839 # Set loop vertex indices, casting to the Blender C type first for performance.
1840 MESH_ATTRIBUTE_CORNER_VERT
.foreach_set(
1841 attributes
, astype_view_signedness(fbx_polys
, MESH_ATTRIBUTE_CORNER_VERT
.dtype
))
1843 poly_loop_starts
= np
.empty(tot_polys
, dtype
=bl_loop_start_dtype
)
1844 # The first loop is always a loop start.
1845 poly_loop_starts
[0] = 0
1846 # Ignoring the last loop end, the indices after every loop end are the remaining loop starts.
1847 poly_loop_starts
[1:] = fbx_loop_end_idx
[:-1] + 1
1849 mesh
.polygons
.add(tot_polys
)
1850 mesh
.polygons
.foreach_set("loop_start", poly_loop_starts
)
1852 blen_read_geom_layer_material(fbx_obj
, mesh
)
1853 blen_read_geom_layer_uv(fbx_obj
, mesh
)
1854 blen_read_geom_layer_color(fbx_obj
, mesh
, settings
.colors_type
)
1857 # edges in fact index the polygons (NOT the vertices)
1859 # The first vertex index of each edge is the vertex index of the corresponding loop in fbx_polys.
1860 edges_a
= fbx_polys
[fbx_edges
]
1862 # The second vertex index of each edge is the vertex index of the next loop in the same polygon. The
1863 # complexity here is that if the first vertex index was the last loop of that polygon in fbx_polys, the next
1864 # loop in the polygon is the first loop of that polygon, which is not the next loop in fbx_polys.
1866 # Copy fbx_polys, but rolled backwards by 1 so that indexing the result by [fbx_edges] will get the next
1867 # loop of the same polygon unless the first vertex index was the last loop of the polygon.
1868 fbx_polys_next
= np
.roll(fbx_polys
, -1)
1869 # Get the first loop of each polygon and set them into fbx_polys_next at the same indices as the last loop
1870 # of each polygon in fbx_polys.
1871 fbx_polys_next
[fbx_loop_end_idx
] = fbx_polys
[poly_loop_starts
]
1873 # Indexing fbx_polys_next by fbx_edges now gets the vertex index of the next loop in fbx_polys.
1874 edges_b
= fbx_polys_next
[fbx_edges
]
1876 # edges_a and edges_b need to be combined so that the first vertex index of each edge is immediately
1877 # followed by the second vertex index of that same edge.
1878 # Stack edges_a and edges_b as individual columns like np.column_stack((edges_a, edges_b)).
1879 # np.concatenate is used because np.column_stack doesn't allow specifying the dtype of the returned array.
1880 edges_conv
= np
.concatenate((edges_a
.reshape(-1, 1), edges_b
.reshape(-1, 1)),
1881 axis
=1, dtype
=MESH_ATTRIBUTE_EDGE_VERTS
.dtype
, casting
='unsafe')
1883 # Add the edges and set their vertex indices.
1884 mesh
.edges
.add(len(edges_conv
))
1885 # ravel() because edges_conv must be flat and C-contiguous when passed to foreach_set.
1886 MESH_ATTRIBUTE_EDGE_VERTS
.foreach_set(attributes
, edges_conv
.ravel())
1888 print("ERROR: No polygons, but edges exist. Ignoring the edges!")
1890 # must be after edge, face loading.
1891 blen_read_geom_layer_smooth(fbx_obj
, mesh
)
1893 blen_read_geom_layer_edge_crease(fbx_obj
, mesh
)
1896 if settings
.use_custom_normals
:
1897 # Note: we store 'temp' normals in loops, since validate() may alter final mesh,
1898 # we can only set custom lnors *after* calling it.
1899 mesh
.attributes
.new("temp_custom_normals", 'FLOAT_VECTOR', 'CORNER')
1900 if geom_mat_no
is None:
1901 ok_normals
= blen_read_geom_layer_normal(fbx_obj
, mesh
)
1903 ok_normals
= blen_read_geom_layer_normal(fbx_obj
, mesh
,
1904 lambda v_array
: nors_transformed(v_array
, geom_mat_no
))
1906 mesh
.validate(clean_customdata
=False) # *Very* important to not remove lnors here!
1909 bl_nors_dtype
= np
.single
1910 clnors
= np
.empty(len(mesh
.loops
) * 3, dtype
=bl_nors_dtype
)
1911 mesh
.attributes
["temp_custom_normals"].data
.foreach_get("vector", clnors
)
1913 # Iterating clnors into a nested tuple first is faster than passing clnors.reshape(-1, 3) directly into
1914 # normals_split_custom_set. We use clnors.data since it is a memoryview, which is faster to iterate than clnors.
1915 mesh
.normals_split_custom_set(tuple(zip(*(iter(clnors
.data
),) * 3)))
1916 if settings
.use_custom_normals
:
1917 mesh
.attributes
.remove(mesh
.attributes
["temp_custom_normals"])
1919 if settings
.use_custom_props
:
1920 blen_read_custom_properties(fbx_obj
, mesh
, settings
)
1925 def blen_read_shapes(fbx_tmpl
, fbx_data
, objects
, me
, scene
):
1927 # No shape key data. Nothing to do.
1930 me_vcos
= MESH_ATTRIBUTE_POSITION
.to_ndarray(me
.attributes
)
1931 me_vcos_vector_view
= me_vcos
.reshape(-1, 3)
1933 objects
= list({node
.bl_obj
for node
in objects
})
1936 # Blender has a hard minimum and maximum shape key Value. If an imported shape key has a value outside this range it
1937 # will be clamped, and we'll print a warning message to the console.
1938 shape_key_values_in_range
= True
1939 bc_uuid_to_keyblocks
= {}
1940 for bc_uuid
, fbx_sdata
, fbx_bcdata
, shapes_assigned_to_channel
in fbx_data
:
1941 num_shapes_assigned_to_channel
= len(shapes_assigned_to_channel
)
1942 if num_shapes_assigned_to_channel
> 1:
1943 # Relevant design task: #104698
1944 raise RuntimeError("FBX in-between Shapes are not currently supported") # See bug report #84111
1945 elem_name_utf8
= elem_name_ensure_class(fbx_sdata
, b
'Geometry')
1946 indices
= elem_prop_first(elem_find_first(fbx_sdata
, b
'Indexes'))
1947 dvcos
= elem_prop_first(elem_find_first(fbx_sdata
, b
'Vertices'))
1949 indices
= parray_as_ndarray(indices
) if indices
else np
.empty(0, dtype
=data_types
.ARRAY_INT32
)
1950 dvcos
= parray_as_ndarray(dvcos
) if dvcos
else np
.empty(0, dtype
=data_types
.ARRAY_FLOAT64
)
1952 # If there's not a whole number of vectors, trim off the remainder.
1953 # 3 components per vector.
1954 remainder
= len(dvcos
) % 3
1956 dvcos
= dvcos
[:-remainder
]
1957 dvcos
= dvcos
.reshape(-1, 3)
1959 # There must be the same number of indices as vertex coordinate differences.
1960 assert(len(indices
) == len(dvcos
))
1962 # We completely ignore normals here!
1963 weight
= elem_prop_first(elem_find_first(fbx_bcdata
, b
'DeformPercent'), default
=100.0) / 100.0
1965 # The FullWeights array stores the deformation percentages of the BlendShapeChannel that fully activate each
1966 # Shape assigned to the BlendShapeChannel. Blender also uses this array to store Vertex Group weights, but this
1967 # is not part of the FBX standard.
1968 full_weights
= elem_prop_first(elem_find_first(fbx_bcdata
, b
'FullWeights'))
1969 full_weights
= parray_as_ndarray(full_weights
) if full_weights
else np
.empty(0, dtype
=data_types
.ARRAY_FLOAT64
)
1971 # Special case for Blender exported Shape Keys with a Vertex Group assigned. The Vertex Group weights are stored
1972 # in the FullWeights array.
1973 # XXX - It's possible, though very rare, to get a false positive here and create a Vertex Group when we
1974 # shouldn't. This should only be possible when there are extraneous FullWeights or when there is a single
1975 # FullWeight and its value is not 100.0.
1977 # Blender exported Shape Keys only ever export as 1 Shape per BlendShapeChannel.
1978 num_shapes_assigned_to_channel
== 1
1979 # There should be one vertex weight for each vertex moved by the Shape.
1980 and len(full_weights
) == len(indices
)
1981 # Skip creating a Vertex Group when all the weights are 100.0 because such a Vertex Group has no effect.
1982 # This also avoids creating a Vertex Group for imported Shapes that only move a single vertex because
1983 # their BlendShapeChannel's singular FullWeight is expected to always be 100.0.
1984 and not np
.all(full_weights
== 100.0)
1985 # Blender vertex weights are always within the [0.0, 1.0] range (scaled to [0.0, 100.0] when saving to
1986 # FBX). This can eliminate imported BlendShapeChannels from Unreal that have extraneous FullWeights
1987 # because the extraneous values are usually negative.
1988 and np
.all((full_weights
>= 0.0) & (full_weights
<= 100.0))
1990 # Not doing the division in-place because it's technically possible for FBX BlendShapeChannels to be used by
1991 # more than one FBX BlendShape, though this shouldn't be the case for Blender exported Shape Keys.
1992 vgweights
= full_weights
/ 100.0
1995 # There must be a FullWeight for each Shape. Any extra FullWeights are ignored.
1996 assert(len(full_weights
) >= num_shapes_assigned_to_channel
)
1998 # To add shape keys to the mesh, an Object using the mesh is needed.
1999 if me
.shape_keys
is None:
2000 objects
[0].shape_key_add(name
="Basis", from_mix
=False)
2001 kb
= objects
[0].shape_key_add(name
=elem_name_utf8
, from_mix
=False)
2002 me
.shape_keys
.use_relative
= True # Should already be set as such.
2004 # Only need to set the shape key co if there are any non-zero dvcos.
2006 shape_cos
= me_vcos_vector_view
.copy()
2007 shape_cos
[indices
] += dvcos
2008 kb
.points
.foreach_set("co", shape_cos
.ravel())
2010 shape_key_values_in_range
&= expand_shape_key_range(kb
, weight
)
2014 # Add vgroup if necessary.
2015 if vgweights
is not None:
2016 # VertexGroup.add only allows sequences of int indices, but iterating the indices array directly would
2017 # produce numpy scalars of types such as np.int32. The underlying memoryview of the indices array, however,
2018 # does produce standard Python ints when iterated, so pass indices.data to add_vgroup_to_objects instead of
2020 # memoryviews tend to be faster to iterate than numpy arrays anyway, so vgweights.data is passed too.
2021 add_vgroup_to_objects(indices
.data
, vgweights
.data
, kb
.name
, objects
)
2022 kb
.vertex_group
= kb
.name
2024 bc_uuid_to_keyblocks
.setdefault(bc_uuid
, []).append(kb
)
2026 if not shape_key_values_in_range
:
2027 print("WARNING: The imported Value of a Shape Key on the Mesh '%s' is beyond the minimum/maximum allowed and"
2028 " has been clamped." % me
.name
)
2030 return bc_uuid_to_keyblocks
2036 def blen_read_material(fbx_tmpl
, fbx_obj
, settings
):
2037 from bpy_extras
import node_shader_utils
2038 from math
import sqrt
2040 elem_name_utf8
= elem_name_ensure_class(fbx_obj
, b
'Material')
2042 nodal_material_wrap_map
= settings
.nodal_material_wrap_map
2043 ma
= bpy
.data
.materials
.new(name
=elem_name_utf8
)
2045 const_color_white
= 1.0, 1.0, 1.0
2046 const_color_black
= 0.0, 0.0, 0.0
2048 fbx_props
= (elem_find_first(fbx_obj
, b
'Properties70'),
2049 elem_find_first(fbx_tmpl
, b
'Properties70', fbx_elem_nil
))
2050 fbx_props_no_template
= (fbx_props
[0], fbx_elem_nil
)
2052 ma_wrap
= node_shader_utils
.PrincipledBSDFWrapper(ma
, is_readonly
=False, use_nodes
=True)
2053 ma_wrap
.base_color
= elem_props_get_color_rgb(fbx_props
, b
'DiffuseColor', const_color_white
)
2054 # No specular color in Principled BSDF shader, assumed to be either white or take some tint from diffuse one...
2055 # TODO: add way to handle tint option (guesstimate from spec color + intensity...)?
2056 ma_wrap
.specular
= elem_props_get_number(fbx_props
, b
'SpecularFactor', 0.25) * 2.0
2057 # XXX Totally empirical conversion, trying to adapt it (and protect against invalid negative values, see T96076):
2058 # From [1.0 - 0.0] Principled BSDF range to [0.0 - 100.0] FBX shininess range)...
2059 fbx_shininess
= max(elem_props_get_number(fbx_props
, b
'Shininess', 20.0), 0.0)
2060 ma_wrap
.roughness
= 1.0 - (sqrt(fbx_shininess
) / 10.0)
2061 # Sweetness... Looks like we are not the only ones to not know exactly how FBX is supposed to work (see T59850).
2062 # According to one of its developers, Unity uses that formula to extract alpha value:
2064 # alpha = 1 - TransparencyFactor
2065 # if (alpha == 1 or alpha == 0):
2066 # alpha = 1 - TransparentColor.r
2068 # Until further info, let's assume this is correct way to do, hence the following code for TransparentColor.
2069 # However, there are some cases (from 3DSMax, see T65065), where we do have TransparencyFactor only defined
2070 # in the template to 0.0, and then materials defining TransparentColor to pure white (1.0, 1.0, 1.0),
2071 # and setting alpha value in Opacity... try to cope with that too. :((((
2072 alpha
= 1.0 - elem_props_get_number(fbx_props
, b
'TransparencyFactor', 0.0)
2073 if (alpha
== 1.0 or alpha
== 0.0):
2074 alpha
= elem_props_get_number(fbx_props_no_template
, b
'Opacity', None)
2076 alpha
= 1.0 - elem_props_get_color_rgb(fbx_props
, b
'TransparentColor', const_color_black
)[0]
2077 ma_wrap
.alpha
= alpha
2078 ma_wrap
.metallic
= elem_props_get_number(fbx_props
, b
'ReflectionFactor', 0.0)
2079 # We have no metallic (a.k.a. reflection) color...
2080 # elem_props_get_color_rgb(fbx_props, b'ReflectionColor', const_color_white)
2081 ma_wrap
.normalmap_strength
= elem_props_get_number(fbx_props
, b
'BumpFactor', 1.0)
2082 # Emission strength and color
2083 ma_wrap
.emission_strength
= elem_props_get_number(fbx_props
, b
'EmissiveFactor', 1.0)
2084 ma_wrap
.emission_color
= elem_props_get_color_rgb(fbx_props
, b
'EmissiveColor', const_color_black
)
2086 nodal_material_wrap_map
[ma
] = ma_wrap
2088 if settings
.use_custom_props
:
2089 blen_read_custom_properties(fbx_obj
, ma
, settings
)
2097 def blen_read_texture_image(fbx_tmpl
, fbx_obj
, basedir
, settings
):
2099 from bpy_extras
import image_utils
2101 def pack_data_from_content(image
, fbx_obj
):
2102 data
= elem_find_first_bytes(fbx_obj
, b
'Content')
2104 data_len
= len(data
)
2106 image
.pack(data
=data
, data_len
=data_len
)
2108 elem_name_utf8
= elem_name_ensure_classes(fbx_obj
, {b
'Texture', b
'Video'})
2110 image_cache
= settings
.image_cache
2112 # Yet another beautiful logic demonstration by Master FBX:
2113 # * RelativeFilename in both Video and Texture nodes.
2114 # * FileName in texture nodes.
2115 # * Filename in video nodes.
2116 # Aaaaaaaarrrrrrrrgggggggggggg!!!!!!!!!!!!!!
2117 filepath
= elem_find_first_string(fbx_obj
, b
'RelativeFilename')
2119 # Make sure we do handle a relative path, and not an absolute one (see D5143).
2120 filepath
= filepath
.lstrip(os
.path
.sep
).lstrip(os
.path
.altsep
)
2121 filepath
= os
.path
.join(basedir
, filepath
)
2123 filepath
= elem_find_first_string(fbx_obj
, b
'FileName')
2125 filepath
= elem_find_first_string(fbx_obj
, b
'Filename')
2127 print("Error, could not find any file path in ", fbx_obj
)
2128 print(" Falling back to: ", elem_name_utf8
)
2129 filepath
= elem_name_utf8
2131 filepath
= filepath
.replace('\\', '/') if (os
.sep
== '/') else filepath
.replace('/', '\\')
2133 image
= image_cache
.get(filepath
)
2134 if image
is not None:
2135 # Data is only embedded once, we may have already created the image but still be missing its data!
2136 if not image
.has_data
:
2137 pack_data_from_content(image
, fbx_obj
)
2140 image
= image_utils
.load_image(
2144 recursive
=settings
.use_image_search
,
2147 # Try to use embedded data, if available!
2148 pack_data_from_content(image
, fbx_obj
)
2150 image_cache
[filepath
] = image
2151 # name can be ../a/b/c
2152 image
.name
= os
.path
.basename(elem_name_utf8
)
2154 if settings
.use_custom_props
:
2155 blen_read_custom_properties(fbx_obj
, image
, settings
)
2160 def blen_read_camera(fbx_tmpl
, fbx_obj
, settings
):
2164 global_scale
= settings
.global_scale
2166 elem_name_utf8
= elem_name_ensure_class(fbx_obj
, b
'NodeAttribute')
2168 fbx_props
= (elem_find_first(fbx_obj
, b
'Properties70'),
2169 elem_find_first(fbx_tmpl
, b
'Properties70', fbx_elem_nil
))
2171 camera
= bpy
.data
.cameras
.new(name
=elem_name_utf8
)
2173 camera
.type = 'ORTHO' if elem_props_get_enum(fbx_props
, b
'CameraProjectionType', 0) == 1 else 'PERSP'
2175 camera
.dof
.focus_distance
= elem_props_get_number(fbx_props
, b
'FocusDistance', 10) * global_scale
2176 if (elem_props_get_bool(fbx_props
, b
'UseDepthOfField', False)):
2177 camera
.dof
.use_dof
= True
2179 camera
.lens
= elem_props_get_number(fbx_props
, b
'FocalLength', 35.0)
2180 camera
.sensor_width
= elem_props_get_number(fbx_props
, b
'FilmWidth', 32.0 * M2I
) / M2I
2181 camera
.sensor_height
= elem_props_get_number(fbx_props
, b
'FilmHeight', 32.0 * M2I
) / M2I
2183 camera
.ortho_scale
= elem_props_get_number(fbx_props
, b
'OrthoZoom', 1.0)
2185 filmaspect
= camera
.sensor_width
/ camera
.sensor_height
2187 camera
.shift_x
= elem_props_get_number(fbx_props
, b
'FilmOffsetX', 0.0) / (M2I
* camera
.sensor_width
)
2188 camera
.shift_y
= elem_props_get_number(fbx_props
, b
'FilmOffsetY', 0.0) / (M2I
* camera
.sensor_height
* filmaspect
)
2190 camera
.clip_start
= elem_props_get_number(fbx_props
, b
'NearPlane', 0.01) * global_scale
2191 camera
.clip_end
= elem_props_get_number(fbx_props
, b
'FarPlane', 100.0) * global_scale
2193 if settings
.use_custom_props
:
2194 blen_read_custom_properties(fbx_obj
, camera
, settings
)
2199 def blen_read_light(fbx_tmpl
, fbx_obj
, settings
):
2201 elem_name_utf8
= elem_name_ensure_class(fbx_obj
, b
'NodeAttribute')
2203 fbx_props
= (elem_find_first(fbx_obj
, b
'Properties70'),
2204 elem_find_first(fbx_tmpl
, b
'Properties70', fbx_elem_nil
))
2209 2: 'SPOT'}.get(elem_props_get_enum(fbx_props
, b
'LightType', 0), 'POINT')
2211 lamp
= bpy
.data
.lights
.new(name
=elem_name_utf8
, type=light_type
)
2213 if light_type
== 'SPOT':
2214 spot_size
= elem_props_get_number(fbx_props
, b
'OuterAngle', None)
2215 if spot_size
is None:
2217 spot_size
= elem_props_get_number(fbx_props
, b
'Cone angle', 45.0)
2218 lamp
.spot_size
= math
.radians(spot_size
)
2220 spot_blend
= elem_props_get_number(fbx_props
, b
'InnerAngle', None)
2221 if spot_blend
is None:
2223 spot_blend
= elem_props_get_number(fbx_props
, b
'HotSpot', 45.0)
2224 lamp
.spot_blend
= 1.0 - (spot_blend
/ spot_size
)
2226 # TODO, cycles nodes???
2227 lamp
.color
= elem_props_get_color_rgb(fbx_props
, b
'Color', (1.0, 1.0, 1.0))
2228 lamp
.energy
= elem_props_get_number(fbx_props
, b
'Intensity', 100.0) / 100.0
2229 lamp
.use_shadow
= elem_props_get_bool(fbx_props
, b
'CastShadow', True)
2230 if hasattr(lamp
, "cycles"):
2231 lamp
.cycles
.cast_shadow
= lamp
.use_shadow
2232 # Keeping this for now, but this is not used nor exposed anymore afaik...
2233 lamp
.shadow_color
= elem_props_get_color_rgb(fbx_props
, b
'ShadowColor', (0.0, 0.0, 0.0))
2235 if settings
.use_custom_props
:
2236 blen_read_custom_properties(fbx_obj
, lamp
, settings
)
2241 # ### Import Utility class
2242 class FbxImportHelperNode
:
2244 Temporary helper node to store a hierarchy of fbxNode objects before building Objects, Armatures and Bones.
2245 It tries to keep the correction data in one place so it can be applied consistently to the imported data.
2249 '_parent', 'anim_compensation_matrix', 'is_global_animation', 'armature_setup', 'armature', 'bind_matrix',
2250 'bl_bone', 'bl_data', 'bl_obj', 'bone_child_matrix', 'children', 'clusters',
2251 'fbx_elem', 'fbx_data_elem', 'fbx_name', 'fbx_transform_data', 'fbx_type',
2252 'is_armature', 'has_bone_children', 'is_bone', 'is_root', 'is_leaf',
2253 'matrix', 'matrix_as_parent', 'matrix_geom', 'meshes', 'post_matrix', 'pre_matrix')
2255 def __init__(self
, fbx_elem
, bl_data
, fbx_transform_data
, is_bone
):
2256 self
.fbx_name
= elem_name_ensure_class(fbx_elem
, b
'Model') if fbx_elem
else 'Unknown'
2257 self
.fbx_type
= fbx_elem
.props
[2] if fbx_elem
else None
2258 self
.fbx_elem
= fbx_elem
2259 self
.fbx_data_elem
= None # FBX elem of a connected NodeAttribute/Geometry for helpers whose bl_data does not exist or is yet to be created.
2261 self
.bl_data
= bl_data
2262 self
.bl_bone
= None # Name of bone if this is a bone (this may be different to fbx_name if there was a name conflict in Blender!)
2263 self
.fbx_transform_data
= fbx_transform_data
2264 self
.is_root
= False
2265 self
.is_bone
= is_bone
2266 self
.is_armature
= False
2267 self
.armature
= None # For bones only, relevant armature node.
2268 self
.has_bone_children
= False # True if the hierarchy below this node contains bones, important to support mixed hierarchies.
2269 self
.is_leaf
= False # True for leaf-bones added to the end of some bone chains to set the lengths.
2270 self
.pre_matrix
= None # correction matrix that needs to be applied before the FBX transform
2271 self
.bind_matrix
= None # for bones this is the matrix used to bind to the skin
2272 if fbx_transform_data
:
2273 self
.matrix
, self
.matrix_as_parent
, self
.matrix_geom
= blen_read_object_transform_do(fbx_transform_data
)
2275 self
.matrix
, self
.matrix_as_parent
, self
.matrix_geom
= (None, None, None)
2276 self
.post_matrix
= None # correction matrix that needs to be applied after the FBX transform
2277 self
.bone_child_matrix
= None # Objects attached to a bone end not the beginning, this matrix corrects for that
2279 # XXX Those two are to handle the fact that rigged meshes are not linked to their armature in FBX, which implies
2280 # that their animation is in global space (afaik...).
2281 # This is actually not really solvable currently, since anim_compensation_matrix is not valid if armature
2282 # itself is animated (we'd have to recompute global-to-local anim_compensation_matrix for each frame,
2283 # and for each armature action... beyond being an insane work).
2284 # Solution for now: do not read rigged meshes animations at all! sic...
2285 self
.anim_compensation_matrix
= None # a mesh moved in the hierarchy may have a different local matrix. This compensates animations for this.
2286 self
.is_global_animation
= False
2288 self
.meshes
= None # List of meshes influenced by this bone.
2289 self
.clusters
= [] # Deformer Cluster nodes
2290 self
.armature_setup
= {} # mesh and armature matrix when the mesh was bound
2300 def parent(self
, value
):
2301 if self
._parent
is not None:
2302 self
._parent
.children
.remove(self
)
2303 self
._parent
= value
2304 if self
._parent
is not None:
2305 self
._parent
.children
.append(self
)
2309 # Separating leaf status from ignore status itself.
2310 # Currently they are equivalent, but this may change in future.
2315 return self
.fbx_elem
.props
[1].decode()
2319 def print_info(self
, indent
=0):
2320 print(" " * indent
+ (self
.fbx_name
if self
.fbx_name
else "(Null)")
2321 + ("[root]" if self
.is_root
else "")
2322 + ("[leaf]" if self
.is_leaf
else "")
2323 + ("[ignore]" if self
.ignore
else "")
2324 + ("[armature]" if self
.is_armature
else "")
2325 + ("[bone]" if self
.is_bone
else "")
2326 + ("[HBC]" if self
.has_bone_children
else "")
2328 for c
in self
.children
:
2329 c
.print_info(indent
+ 1)
2331 def mark_leaf_bones(self
):
2332 if self
.is_bone
and len(self
.children
) == 1:
2333 child
= self
.children
[0]
2334 if child
.is_bone
and len(child
.children
) == 0:
2335 child
.is_leaf
= True
2336 for child
in self
.children
:
2337 child
.mark_leaf_bones()
2339 def do_bake_transform(self
, settings
):
2340 return (settings
.bake_space_transform
and self
.fbx_type
in (b
'Mesh', b
'Null') and
2341 not self
.is_armature
and not self
.is_bone
)
2343 def find_correction_matrix(self
, settings
, parent_correction_inv
=None):
2344 from bpy_extras
.io_utils
import axis_conversion
2346 if self
.parent
and (self
.parent
.is_root
or self
.parent
.do_bake_transform(settings
)):
2347 self
.pre_matrix
= settings
.global_matrix
2349 if parent_correction_inv
:
2350 self
.pre_matrix
= parent_correction_inv
@ (self
.pre_matrix
if self
.pre_matrix
else Matrix())
2352 correction_matrix
= None
2355 if settings
.automatic_bone_orientation
:
2356 # find best orientation to align bone with
2357 bone_children
= tuple(child
for child
in self
.children
if child
.is_bone
)
2358 if len(bone_children
) == 0:
2359 # no children, inherit the correction from parent (if possible)
2360 if self
.parent
and self
.parent
.is_bone
:
2361 correction_matrix
= parent_correction_inv
.inverted() if parent_correction_inv
else None
2363 # else find how best to rotate the bone to align the Y axis with the children
2364 best_axis
= (1, 0, 0)
2365 if len(bone_children
) == 1:
2366 vec
= bone_children
[0].get_bind_matrix().to_translation()
2367 best_axis
= Vector((0, 0, 1 if vec
[2] >= 0 else -1))
2368 if abs(vec
[0]) > abs(vec
[1]):
2369 if abs(vec
[0]) > abs(vec
[2]):
2370 best_axis
= Vector((1 if vec
[0] >= 0 else -1, 0, 0))
2371 elif abs(vec
[1]) > abs(vec
[2]):
2372 best_axis
= Vector((0, 1 if vec
[1] >= 0 else -1, 0))
2374 # get the child directions once because they may be checked several times
2375 child_locs
= (child
.get_bind_matrix().to_translation() for child
in bone_children
)
2376 child_locs
= tuple(loc
.normalized() for loc
in child_locs
if loc
.magnitude
> 0.0)
2378 # I'm not sure which one I like better...
2383 s
= -1 if i
% 2 == 1 else 1
2384 test_axis
= Vector((s
if a
== 0 else 0, s
if a
== 1 else 0, s
if a
== 2 else 0))
2386 # find max angle to children
2388 for loc
in child_locs
:
2389 max_angle
= min(max_angle
, test_axis
.dot(loc
))
2391 # is it better than the last one?
2392 if best_angle
< max_angle
:
2393 best_angle
= max_angle
2394 best_axis
= test_axis
2397 for vec
in child_locs
:
2398 test_axis
= Vector((0, 0, 1 if vec
[2] >= 0 else -1))
2399 if abs(vec
[0]) > abs(vec
[1]):
2400 if abs(vec
[0]) > abs(vec
[2]):
2401 test_axis
= Vector((1 if vec
[0] >= 0 else -1, 0, 0))
2402 elif abs(vec
[1]) > abs(vec
[2]):
2403 test_axis
= Vector((0, 1 if vec
[1] >= 0 else -1, 0))
2405 # find max angle to children
2407 for loc
in child_locs
:
2408 max_angle
= min(max_angle
, test_axis
.dot(loc
))
2410 # is it better than the last one?
2411 if best_angle
< max_angle
:
2412 best_angle
= max_angle
2413 best_axis
= test_axis
2415 # convert best_axis to axis string
2416 to_up
= 'Z' if best_axis
[2] >= 0 else '-Z'
2417 if abs(best_axis
[0]) > abs(best_axis
[1]):
2418 if abs(best_axis
[0]) > abs(best_axis
[2]):
2419 to_up
= 'X' if best_axis
[0] >= 0 else '-X'
2420 elif abs(best_axis
[1]) > abs(best_axis
[2]):
2421 to_up
= 'Y' if best_axis
[1] >= 0 else '-Y'
2422 to_forward
= 'X' if to_up
not in {'X', '-X'} else 'Y'
2424 # Build correction matrix
2425 if (to_up
, to_forward
) != ('Y', 'X'):
2426 correction_matrix
= axis_conversion(from_forward
='X',
2428 to_forward
=to_forward
,
2432 correction_matrix
= settings
.bone_correction_matrix
2434 # camera and light can be hard wired
2435 if self
.fbx_type
== b
'Camera':
2436 correction_matrix
= MAT_CONVERT_CAMERA
2437 elif self
.fbx_type
== b
'Light':
2438 correction_matrix
= MAT_CONVERT_LIGHT
2440 self
.post_matrix
= correction_matrix
2442 if self
.do_bake_transform(settings
):
2443 self
.post_matrix
= settings
.global_matrix_inv
@ (self
.post_matrix
if self
.post_matrix
else Matrix())
2446 correction_matrix_inv
= correction_matrix
.inverted_safe() if correction_matrix
else None
2447 for child
in self
.children
:
2448 child
.find_correction_matrix(settings
, correction_matrix_inv
)
2450 def find_armature_bones(self
, armature
):
2451 for child
in self
.children
:
2453 child
.armature
= armature
2454 child
.find_armature_bones(armature
)
2456 def find_armatures(self
):
2457 needs_armature
= False
2458 for child
in self
.children
:
2460 needs_armature
= True
2463 if self
.fbx_type
in {b
'Null', b
'Root'}:
2464 # if empty then convert into armature
2465 self
.is_armature
= True
2468 # otherwise insert a new node
2469 # XXX Maybe in case self is virtual FBX root node, we should instead add one armature per bone child?
2470 armature
= FbxImportHelperNode(None, None, None, False)
2471 armature
.fbx_name
= "Armature"
2472 armature
.is_armature
= True
2474 for child
in tuple(self
.children
):
2476 child
.parent
= armature
2478 armature
.parent
= self
2480 armature
.find_armature_bones(armature
)
2482 for child
in self
.children
:
2483 if child
.is_armature
or child
.is_bone
:
2485 child
.find_armatures()
2487 def find_bone_children(self
):
2488 has_bone_children
= False
2489 for child
in self
.children
:
2490 has_bone_children |
= child
.find_bone_children()
2491 self
.has_bone_children
= has_bone_children
2492 return self
.is_bone
or has_bone_children
2494 def find_fake_bones(self
, in_armature
=False):
2495 if in_armature
and not self
.is_bone
and self
.has_bone_children
:
2497 # if we are not a null node we need an intermediate node for the data
2498 if self
.fbx_type
not in {b
'Null', b
'Root'}:
2499 node
= FbxImportHelperNode(self
.fbx_elem
, self
.bl_data
, None, False)
2500 self
.fbx_elem
= None
2504 for child
in self
.children
:
2505 if child
.is_bone
or child
.has_bone_children
:
2512 if self
.is_armature
:
2514 for child
in self
.children
:
2515 child
.find_fake_bones(in_armature
)
2517 def get_world_matrix_as_parent(self
):
2518 matrix
= self
.parent
.get_world_matrix_as_parent() if self
.parent
else Matrix()
2519 if self
.matrix_as_parent
:
2520 matrix
= matrix
@ self
.matrix_as_parent
2523 def get_world_matrix(self
):
2524 matrix
= self
.parent
.get_world_matrix_as_parent() if self
.parent
else Matrix()
2526 matrix
= matrix
@ self
.matrix
2529 def get_matrix(self
):
2530 matrix
= self
.matrix
if self
.matrix
else Matrix()
2532 matrix
= self
.pre_matrix
@ matrix
2533 if self
.post_matrix
:
2534 matrix
= matrix
@ self
.post_matrix
2537 def get_bind_matrix(self
):
2538 matrix
= self
.bind_matrix
if self
.bind_matrix
else Matrix()
2540 matrix
= self
.pre_matrix
@ matrix
2541 if self
.post_matrix
:
2542 matrix
= matrix
@ self
.post_matrix
2545 def make_bind_pose_local(self
, parent_matrix
=None):
2546 if parent_matrix
is None:
2547 parent_matrix
= Matrix()
2549 if self
.bind_matrix
:
2550 bind_matrix
= parent_matrix
.inverted_safe() @ self
.bind_matrix
2552 bind_matrix
= self
.matrix
.copy() if self
.matrix
else None
2554 self
.bind_matrix
= bind_matrix
2556 parent_matrix
= parent_matrix
@ bind_matrix
2558 for child
in self
.children
:
2559 child
.make_bind_pose_local(parent_matrix
)
2561 def collect_skeleton_meshes(self
, meshes
):
2562 for _
, m
in self
.clusters
:
2564 for child
in self
.children
:
2565 if not child
.meshes
:
2566 child
.collect_skeleton_meshes(meshes
)
2568 def collect_armature_meshes(self
):
2569 if self
.is_armature
:
2570 armature_matrix_inv
= self
.get_world_matrix().inverted_safe()
2573 for child
in self
.children
:
2574 # Children meshes may be linked to children armatures, in which case we do not want to link them
2575 # to a parent one. See T70244.
2576 child
.collect_armature_meshes()
2577 if not child
.meshes
:
2578 child
.collect_skeleton_meshes(meshes
)
2580 old_matrix
= m
.matrix
2581 m
.matrix
= armature_matrix_inv
@ m
.get_world_matrix()
2582 m
.anim_compensation_matrix
= old_matrix
.inverted_safe() @ m
.matrix
2583 m
.is_global_animation
= True
2585 self
.meshes
= meshes
2587 for child
in self
.children
:
2588 child
.collect_armature_meshes()
2590 def build_skeleton(self
, arm
, parent_matrix
, settings
, parent_bone_size
=1):
2591 def child_connect(par_bone
, child_bone
, child_head
, connect_ctx
):
2592 # child_bone or child_head may be None.
2593 force_connect_children
, connected
= connect_ctx
2594 if child_bone
is not None:
2595 child_bone
.parent
= par_bone
2596 child_head
= child_bone
.head
2598 if similar_values_iter(par_bone
.tail
, child_head
):
2599 if child_bone
is not None:
2600 child_bone
.use_connect
= True
2601 # Disallow any force-connection at this level from now on, since that child was 'really'
2602 # connected, we do not want to move current bone's tail anymore!
2604 elif force_connect_children
and connected
is not None:
2605 # We only store position where tail of par_bone should be in the end.
2606 # Actual tail moving and force connection of compatible child bones will happen
2607 # once all have been checked.
2608 if connected
is ...:
2609 connected
= ([child_head
.copy(), 1], [child_bone
] if child_bone
is not None else [])
2611 connected
[0][0] += child_head
2612 connected
[0][1] += 1
2613 if child_bone
is not None:
2614 connected
[1].append(child_bone
)
2615 connect_ctx
[1] = connected
2617 def child_connect_finalize(par_bone
, connect_ctx
):
2618 force_connect_children
, connected
= connect_ctx
2619 # Do nothing if force connection is not enabled!
2620 if force_connect_children
and connected
is not None and connected
is not ...:
2621 # Here again we have to be wary about zero-length bones!!!
2622 par_tail
= connected
[0][0] / connected
[0][1]
2623 if (par_tail
- par_bone
.head
).magnitude
< 1e-2:
2624 par_bone_vec
= (par_bone
.tail
- par_bone
.head
).normalized()
2625 par_tail
= par_bone
.head
+ par_bone_vec
* 0.01
2626 par_bone
.tail
= par_tail
2627 for child_bone
in connected
[1]:
2628 if similar_values_iter(par_tail
, child_bone
.head
):
2629 child_bone
.use_connect
= True
2631 # Create the (edit)bone.
2632 bone
= arm
.bl_data
.edit_bones
.new(name
=self
.fbx_name
)
2634 self
.bl_obj
= arm
.bl_obj
2635 self
.bl_data
= arm
.bl_data
2636 self
.bl_bone
= bone
.name
# Could be different from the FBX name!
2637 # Read EditBone custom props the NodeAttribute
2638 if settings
.use_custom_props
and self
.fbx_data_elem
:
2639 blen_read_custom_properties(self
.fbx_data_elem
, bone
, settings
)
2641 # get average distance to children
2644 for child
in self
.children
:
2646 bone_size
+= child
.get_bind_matrix().to_translation().magnitude
2649 bone_size
/= bone_count
2651 bone_size
= parent_bone_size
2653 # So that our bone gets its final length, but still Y-aligned in armature space.
2654 # 0-length bones are automatically collapsed into their parent when you leave edit mode,
2655 # so this enforces a minimum length.
2656 bone_tail
= Vector((0.0, 1.0, 0.0)) * max(0.01, bone_size
)
2657 bone
.tail
= bone_tail
2659 # And rotate/move it to its final "rest pose".
2660 bone_matrix
= parent_matrix
@ self
.get_bind_matrix().normalized()
2662 bone
.matrix
= bone_matrix
2664 force_connect_children
= settings
.force_connect_children
2666 connect_ctx
= [force_connect_children
, ...]
2667 for child
in self
.children
:
2668 if child
.is_leaf
and force_connect_children
:
2669 # Arggggggggggggggggg! We do not want to create this bone, but we need its 'virtual head' location
2670 # to orient current one!!!
2671 child_head
= (bone_matrix
@ child
.get_bind_matrix().normalized()).translation
2672 child_connect(bone
, None, child_head
, connect_ctx
)
2673 elif child
.is_bone
and not child
.ignore
:
2674 child_bone
= child
.build_skeleton(arm
, bone_matrix
, settings
, bone_size
)
2675 # Connection to parent.
2676 child_connect(bone
, child_bone
, None, connect_ctx
)
2678 child_connect_finalize(bone
, connect_ctx
)
2680 # Correction for children attached to a bone. FBX expects to attach to the head of a bone, while Blender
2681 # attaches to the tail.
2682 if force_connect_children
:
2683 # When forcefully connecting, the bone's tail position may be changed, which can change both the bone's
2684 # rotation and its length.
2685 # Set the correction matrix such that it transforms the current tail transformation back to the original
2686 # head transformation.
2687 head_to_origin
= bone
.matrix
.inverted_safe()
2688 tail_to_head
= Matrix
.Translation(bone
.head
-bone
.tail
)
2689 origin_to_original_head
= bone_matrix
2690 tail_to_original_head
= head_to_origin
@ tail_to_head
@ origin_to_original_head
2691 self
.bone_child_matrix
= tail_to_original_head
2693 self
.bone_child_matrix
= Matrix
.Translation(-bone_tail
)
2697 def build_node_obj(self
, fbx_tmpl
, settings
):
2701 if self
.is_bone
or not self
.fbx_elem
:
2704 # create when linking since we need object data
2705 elem_name_utf8
= self
.fbx_name
2707 # Object data must be created already
2708 self
.bl_obj
= obj
= bpy
.data
.objects
.new(name
=elem_name_utf8
, object_data
=self
.bl_data
)
2710 fbx_props
= (elem_find_first(self
.fbx_elem
, b
'Properties70'),
2711 elem_find_first(fbx_tmpl
, b
'Properties70', fbx_elem_nil
))
2716 obj
.color
[0:3] = elem_props_get_color_rgb(fbx_props
, b
'Color', (0.8, 0.8, 0.8))
2717 obj
.hide_viewport
= not bool(elem_props_get_visibility(fbx_props
, b
'Visibility', 1.0))
2719 obj
.matrix_basis
= self
.get_matrix()
2721 if settings
.use_custom_props
:
2722 blen_read_custom_properties(self
.fbx_elem
, obj
, settings
)
2726 def build_skeleton_children(self
, fbx_tmpl
, settings
, scene
, view_layer
):
2728 for child
in self
.children
:
2731 child
.build_skeleton_children(fbx_tmpl
, settings
, scene
, view_layer
)
2734 # child is not a bone
2735 obj
= self
.build_node_obj(fbx_tmpl
, settings
)
2740 for child
in self
.children
:
2743 child
.build_skeleton_children(fbx_tmpl
, settings
, scene
, view_layer
)
2746 view_layer
.active_layer_collection
.collection
.objects
.link(obj
)
2747 obj
.select_set(True)
2751 def link_skeleton_children(self
, fbx_tmpl
, settings
, scene
):
2753 for child
in self
.children
:
2756 child_obj
= child
.bl_obj
2757 if child_obj
and child_obj
!= self
.bl_obj
:
2758 child_obj
.parent
= self
.bl_obj
# get the armature the bone belongs to
2759 child_obj
.parent_bone
= self
.bl_bone
2760 child_obj
.parent_type
= 'BONE'
2761 child_obj
.matrix_parent_inverse
= Matrix()
2763 # Blender attaches to the end of a bone, while FBX attaches to the start.
2764 # bone_child_matrix corrects for that.
2765 if child
.pre_matrix
:
2766 child
.pre_matrix
= self
.bone_child_matrix
@ child
.pre_matrix
2768 child
.pre_matrix
= self
.bone_child_matrix
2770 child_obj
.matrix_basis
= child
.get_matrix()
2771 child
.link_skeleton_children(fbx_tmpl
, settings
, scene
)
2776 for child
in self
.children
:
2779 child_obj
= child
.link_skeleton_children(fbx_tmpl
, settings
, scene
)
2781 child_obj
.parent
= obj
2785 def set_pose_matrix_and_custom_props(self
, arm
, settings
):
2786 pose_bone
= arm
.bl_obj
.pose
.bones
[self
.bl_bone
]
2787 pose_bone
.matrix_basis
= self
.get_bind_matrix().inverted_safe() @ self
.get_matrix()
2789 # `self.fbx_elem` can be `None` in cases where the imported hierarchy contains a mix of bone and non-bone FBX
2790 # Nodes parented to one another, e.g. "bone1"->"mesh1"->"bone2". In Blender, an Armature can only consist of
2791 # bones, so to maintain the imported hierarchy, a placeholder bone with the same name as "mesh1" is inserted
2792 # into the Armature and then the imported "mesh1" Object is parented to the placeholder bone. The placeholder
2793 # bone won't have a `self.fbx_elem` because it belongs to the "mesh1" Object instead.
2794 # See FbxImportHelperNode.find_fake_bones().
2795 if settings
.use_custom_props
and self
.fbx_elem
:
2796 blen_read_custom_properties(self
.fbx_elem
, pose_bone
, settings
)
2798 for child
in self
.children
:
2802 child
.set_pose_matrix_and_custom_props(arm
, settings
)
2804 def merge_weights(self
, combined_weights
, fbx_cluster
):
2805 indices
= elem_prop_first(elem_find_first(fbx_cluster
, b
'Indexes', default
=None), default
=())
2806 weights
= elem_prop_first(elem_find_first(fbx_cluster
, b
'Weights', default
=None), default
=())
2808 for index
, weight
in zip(indices
, weights
):
2809 w
= combined_weights
.get(index
)
2811 combined_weights
[index
] = [weight
]
2815 def set_bone_weights(self
):
2816 ignored_children
= tuple(child
for child
in self
.children
2817 if child
.is_bone
and child
.ignore
and len(child
.clusters
) > 0)
2819 if len(ignored_children
) > 0:
2820 # If we have an ignored child bone we need to merge their weights into the current bone weights.
2821 # This can happen both intentionally and accidentally when skinning a model. Either way, they
2822 # need to be moved into a parent bone or they cause animation glitches.
2823 for fbx_cluster
, meshes
in self
.clusters
:
2824 combined_weights
= {}
2825 self
.merge_weights(combined_weights
, fbx_cluster
)
2827 for child
in ignored_children
:
2828 for child_cluster
, child_meshes
in child
.clusters
:
2829 if not meshes
.isdisjoint(child_meshes
):
2830 self
.merge_weights(combined_weights
, child_cluster
)
2832 # combine child weights
2835 for i
, w
in combined_weights
.items():
2838 # Add ignored child weights to the current bone's weight.
2839 # XXX - Weights that sum to more than 1.0 get clamped to 1.0 when set in the vertex group.
2840 weights
.append(sum(w
))
2842 weights
.append(w
[0])
2844 add_vgroup_to_objects(indices
, weights
, self
.bl_bone
, [node
.bl_obj
for node
in meshes
])
2846 # clusters that drive meshes not included in a parent don't need to be merged
2847 all_meshes
= set().union(*[meshes
for _
, meshes
in self
.clusters
])
2848 for child
in ignored_children
:
2849 for child_cluster
, child_meshes
in child
.clusters
:
2850 if all_meshes
.isdisjoint(child_meshes
):
2851 indices
= elem_prop_first(elem_find_first(child_cluster
, b
'Indexes', default
=None), default
=())
2852 weights
= elem_prop_first(elem_find_first(child_cluster
, b
'Weights', default
=None), default
=())
2853 add_vgroup_to_objects(indices
, weights
, self
.bl_bone
, [node
.bl_obj
for node
in child_meshes
])
2855 # set the vertex weights on meshes
2856 for fbx_cluster
, meshes
in self
.clusters
:
2857 indices
= elem_prop_first(elem_find_first(fbx_cluster
, b
'Indexes', default
=None), default
=())
2858 weights
= elem_prop_first(elem_find_first(fbx_cluster
, b
'Weights', default
=None), default
=())
2859 add_vgroup_to_objects(indices
, weights
, self
.bl_bone
, [node
.bl_obj
for node
in meshes
])
2861 for child
in self
.children
:
2862 if child
.is_bone
and not child
.ignore
:
2863 child
.set_bone_weights()
2865 def build_hierarchy(self
, fbx_tmpl
, settings
, scene
, view_layer
):
2866 if self
.is_armature
:
2867 # create when linking since we need object data
2868 elem_name_utf8
= self
.fbx_name
2870 self
.bl_data
= arm_data
= bpy
.data
.armatures
.new(name
=elem_name_utf8
)
2872 # Object data must be created already
2873 self
.bl_obj
= arm
= bpy
.data
.objects
.new(name
=elem_name_utf8
, object_data
=arm_data
)
2875 arm
.matrix_basis
= self
.get_matrix()
2878 fbx_props
= (elem_find_first(self
.fbx_elem
, b
'Properties70'),
2879 elem_find_first(fbx_tmpl
, b
'Properties70', fbx_elem_nil
))
2881 if settings
.use_custom_props
:
2882 # Read Armature Object custom props from the Node
2883 blen_read_custom_properties(self
.fbx_elem
, arm
, settings
)
2885 if self
.fbx_data_elem
:
2886 # Read Armature Data custom props from the NodeAttribute
2887 blen_read_custom_properties(self
.fbx_data_elem
, arm_data
, settings
)
2890 view_layer
.active_layer_collection
.collection
.objects
.link(arm
)
2891 arm
.select_set(True)
2895 # Switch to Edit mode.
2896 view_layer
.objects
.active
= arm
2897 is_hidden
= arm
.hide_viewport
2898 arm
.hide_viewport
= False # Can't switch to Edit mode hidden objects...
2899 bpy
.ops
.object.mode_set(mode
='EDIT')
2901 for child
in self
.children
:
2905 child
.build_skeleton(self
, Matrix(), settings
)
2907 bpy
.ops
.object.mode_set(mode
='OBJECT')
2909 arm
.hide_viewport
= is_hidden
2911 # Set pose matrix and PoseBone custom properties
2912 for child
in self
.children
:
2916 child
.set_pose_matrix_and_custom_props(self
, settings
)
2918 # Add bone children:
2919 for child
in self
.children
:
2922 child_obj
= child
.build_skeleton_children(fbx_tmpl
, settings
, scene
, view_layer
)
2925 elif self
.fbx_elem
and not self
.is_bone
:
2926 obj
= self
.build_node_obj(fbx_tmpl
, settings
)
2928 # walk through children
2929 for child
in self
.children
:
2930 child
.build_hierarchy(fbx_tmpl
, settings
, scene
, view_layer
)
2933 view_layer
.active_layer_collection
.collection
.objects
.link(obj
)
2934 obj
.select_set(True)
2938 for child
in self
.children
:
2939 child
.build_hierarchy(fbx_tmpl
, settings
, scene
, view_layer
)
2943 def link_hierarchy(self
, fbx_tmpl
, settings
, scene
):
2944 if self
.is_armature
:
2947 # Link bone children:
2948 for child
in self
.children
:
2951 child_obj
= child
.link_skeleton_children(fbx_tmpl
, settings
, scene
)
2953 child_obj
.parent
= arm
2955 # Add armature modifiers to the meshes
2957 for mesh
in self
.meshes
:
2958 (mmat
, amat
) = mesh
.armature_setup
[self
]
2959 me_obj
= mesh
.bl_obj
2961 # bring global armature & mesh matrices into *Blender* global space.
2962 # Note: Usage of matrix_geom (local 'diff' transform) here is quite brittle.
2963 # Among other things, why in hell isn't it taken into account by bindpose & co???
2964 # Probably because org app (max) handles it completely aside from any parenting stuff,
2965 # which we obviously cannot do in Blender. :/
2967 amat
= self
.bind_matrix
2968 amat
= settings
.global_matrix
@ (Matrix() if amat
is None else amat
)
2969 if self
.matrix_geom
:
2970 amat
= amat
@ self
.matrix_geom
2971 mmat
= settings
.global_matrix
@ mmat
2972 if mesh
.matrix_geom
:
2973 mmat
= mmat
@ mesh
.matrix_geom
2975 # Now that we have armature and mesh in there (global) bind 'state' (matrix),
2976 # we can compute inverse parenting matrix of the mesh.
2977 me_obj
.matrix_parent_inverse
= amat
.inverted_safe() @ mmat
@ me_obj
.matrix_basis
.inverted_safe()
2979 mod
= mesh
.bl_obj
.modifiers
.new(arm
.name
, 'ARMATURE')
2982 # Add bone weights to the deformers
2983 for child
in self
.children
:
2987 child
.set_bone_weights()
2993 # walk through children
2994 for child
in self
.children
:
2995 child_obj
= child
.link_hierarchy(fbx_tmpl
, settings
, scene
)
2997 child_obj
.parent
= obj
3001 for child
in self
.children
:
3002 child
.link_hierarchy(fbx_tmpl
, settings
, scene
)
3007 def load(operator
, context
, filepath
="",
3008 use_manual_orientation
=False,
3012 bake_space_transform
=False,
3013 use_custom_normals
=True,
3014 use_image_search
=False,
3015 use_alpha_decals
=False,
3020 use_custom_props
=True,
3021 use_custom_props_enum_as_string
=True,
3022 ignore_leaf_bones
=False,
3023 force_connect_children
=False,
3024 automatic_bone_orientation
=False,
3025 primary_bone_axis
='Y',
3026 secondary_bone_axis
='X',
3027 use_prepost_rot
=True,
3028 colors_type
='SRGB'):
3031 fbx_elem_nil
= FBXElem('', (), (), ())
3035 from bpy_extras
.io_utils
import axis_conversion
3037 from . import parse_fbx
3038 from .fbx_utils
import RIGHT_HAND_AXES
, FBX_FRAMERATES
3040 start_time_proc
= time
.process_time()
3041 start_time_sys
= time
.time()
3045 perfmon
.step("FBX Import: start importing %s" % filepath
)
3048 # Detect ASCII files.
3050 # Typically it's bad practice to fail silently on any error,
3051 # however the file may fail to read for many reasons,
3052 # and this situation is handled later in the code,
3053 # right now we only want to know if the file successfully reads as ascii.
3055 with
open(filepath
, 'r', encoding
="utf-8") as fh
:
3062 operator
.report({'ERROR'}, tip_("ASCII FBX files are not supported %r") % filepath
)
3063 return {'CANCELLED'}
3065 # End ascii detection.
3068 elem_root
, version
= parse_fbx
.parse(filepath
)
3069 except Exception as e
:
3071 traceback
.print_exc()
3073 operator
.report({'ERROR'}, tip_("Couldn't open file %r (%s)") % (filepath
, e
))
3074 return {'CANCELLED'}
3077 operator
.report({'ERROR'}, tip_("Version %r unsupported, must be %r or later") % (version
, 7100))
3078 return {'CANCELLED'}
3080 print("FBX version: %r" % version
)
3082 if bpy
.ops
.object.mode_set
.poll():
3083 bpy
.ops
.object.mode_set(mode
='OBJECT', toggle
=False)
3086 if bpy
.ops
.object.select_all
.poll():
3087 bpy
.ops
.object.select_all(action
='DESELECT')
3089 basedir
= os
.path
.dirname(filepath
)
3091 nodal_material_wrap_map
= {}
3094 # Tables: (FBX_byte_id -> [FBX_data, None or Blender_datablock])
3095 fbx_table_nodes
= {}
3097 if use_alpha_decals
:
3098 material_decals
= set()
3100 material_decals
= None
3102 scene
= context
.scene
3103 view_layer
= context
.view_layer
3105 # #### Get some info from GlobalSettings.
3107 perfmon
.step("FBX import: Prepare...")
3109 fbx_settings
= elem_find_first(elem_root
, b
'GlobalSettings')
3110 fbx_settings_props
= elem_find_first(fbx_settings
, b
'Properties70')
3111 if fbx_settings
is None or fbx_settings_props
is None:
3112 operator
.report({'ERROR'}, tip_("No 'GlobalSettings' found in file %r") % filepath
)
3113 return {'CANCELLED'}
3115 # FBX default base unit seems to be the centimeter, while raw Blender Unit is equivalent to the meter...
3116 unit_scale
= elem_props_get_number(fbx_settings_props
, b
'UnitScaleFactor', 1.0)
3117 unit_scale_org
= elem_props_get_number(fbx_settings_props
, b
'OriginalUnitScaleFactor', 1.0)
3118 global_scale
*= (unit_scale
/ units_blender_to_fbx_factor(context
.scene
))
3119 # Compute global matrix and scale.
3120 if not use_manual_orientation
:
3121 axis_forward
= (elem_props_get_integer(fbx_settings_props
, b
'FrontAxis', 1),
3122 elem_props_get_integer(fbx_settings_props
, b
'FrontAxisSign', 1))
3123 axis_up
= (elem_props_get_integer(fbx_settings_props
, b
'UpAxis', 2),
3124 elem_props_get_integer(fbx_settings_props
, b
'UpAxisSign', 1))
3125 axis_coord
= (elem_props_get_integer(fbx_settings_props
, b
'CoordAxis', 0),
3126 elem_props_get_integer(fbx_settings_props
, b
'CoordAxisSign', 1))
3127 axis_key
= (axis_up
, axis_forward
, axis_coord
)
3128 axis_up
, axis_forward
= {v
: k
for k
, v
in RIGHT_HAND_AXES
.items()}.get(axis_key
, ('Z', 'Y'))
3129 global_matrix
= (Matrix
.Scale(global_scale
, 4) @
3130 axis_conversion(from_forward
=axis_forward
, from_up
=axis_up
).to_4x4())
3132 # To cancel out unwanted rotation/scale on nodes.
3133 global_matrix_inv
= global_matrix
.inverted()
3134 # For transforming mesh normals.
3135 global_matrix_inv_transposed
= global_matrix_inv
.transposed()
3137 # Compute bone correction matrix
3138 bone_correction_matrix
= None # None means no correction/identity
3139 if not automatic_bone_orientation
:
3140 if (primary_bone_axis
, secondary_bone_axis
) != ('Y', 'X'):
3141 bone_correction_matrix
= axis_conversion(from_forward
='X',
3143 to_forward
=secondary_bone_axis
,
3144 to_up
=primary_bone_axis
,
3147 # Compute framerate settings.
3148 custom_fps
= elem_props_get_number(fbx_settings_props
, b
'CustomFrameRate', 25.0)
3149 time_mode
= elem_props_get_enum(fbx_settings_props
, b
'TimeMode')
3150 real_fps
= {eid
: val
for val
, eid
in FBX_FRAMERATES
[1:]}.get(time_mode
, custom_fps
)
3153 scene
.render
.fps
= round(real_fps
)
3154 scene
.render
.fps_base
= scene
.render
.fps
/ real_fps
3156 # store global settings that need to be accessed during conversion
3157 settings
= FBXImportSettings(
3158 operator
.report
, (axis_up
, axis_forward
), global_matrix
, global_scale
,
3159 bake_space_transform
, global_matrix_inv
, global_matrix_inv_transposed
,
3160 use_custom_normals
, use_image_search
,
3161 use_alpha_decals
, decal_offset
,
3162 use_anim
, anim_offset
,
3164 use_custom_props
, use_custom_props_enum_as_string
,
3165 nodal_material_wrap_map
, image_cache
,
3166 ignore_leaf_bones
, force_connect_children
, automatic_bone_orientation
, bone_correction_matrix
,
3167 use_prepost_rot
, colors_type
,
3170 # #### And now, the "real" data.
3172 perfmon
.step("FBX import: Templates...")
3174 fbx_defs
= elem_find_first(elem_root
, b
'Definitions') # can be None
3175 fbx_nodes
= elem_find_first(elem_root
, b
'Objects')
3176 fbx_connections
= elem_find_first(elem_root
, b
'Connections')
3178 if fbx_nodes
is None:
3179 operator
.report({'ERROR'}, tip_("No 'Objects' found in file %r") % filepath
)
3180 return {'CANCELLED'}
3181 if fbx_connections
is None:
3182 operator
.report({'ERROR'}, tip_("No 'Connections' found in file %r") % filepath
)
3183 return {'CANCELLED'}
3186 # First load property templates
3187 # Load 'PropertyTemplate' values.
3188 # Key is a tuple, (ObjectType, FBXNodeType)
3189 # eg, (b'Texture', b'KFbxFileTexture')
3190 # (b'Geometry', b'KFbxMesh')
3194 if fbx_defs
is not None:
3195 for fbx_def
in fbx_defs
.elems
:
3196 if fbx_def
.id == b
'ObjectType':
3197 for fbx_subdef
in fbx_def
.elems
:
3198 if fbx_subdef
.id == b
'PropertyTemplate':
3199 assert(fbx_def
.props_type
== b
'S')
3200 assert(fbx_subdef
.props_type
== b
'S')
3201 # (b'Texture', b'KFbxFileTexture') - eg.
3202 key
= fbx_def
.props
[0], fbx_subdef
.props
[0]
3203 fbx_templates
[key
] = fbx_subdef
3206 def fbx_template_get(key
):
3207 ret
= fbx_templates
.get(key
, fbx_elem_nil
)
3208 if ret
is fbx_elem_nil
:
3209 # Newest FBX (7.4 and above) use no more 'K' in their type names...
3210 key
= (key
[0], key
[1][1:])
3211 return fbx_templates
.get(key
, fbx_elem_nil
)
3214 perfmon
.step("FBX import: Nodes...")
3217 # Build FBX node-table
3219 for fbx_obj
in fbx_nodes
.elems
:
3220 # TODO, investigate what other items after first 3 may be
3221 assert(fbx_obj
.props_type
[:3] == b
'LSS')
3222 fbx_uuid
= elem_uuid(fbx_obj
)
3223 fbx_table_nodes
[fbx_uuid
] = [fbx_obj
, None]
3228 # http://download.autodesk.com/us/fbx/20112/FBX_SDK_HELP/index.html?url=
3229 # WS73099cc142f487551fea285e1221e4f9ff8-7fda.htm,topicNumber=d0e6388
3231 perfmon
.step("FBX import: Connections...")
3233 fbx_connection_map
= {}
3234 fbx_connection_map_reverse
= {}
3237 for fbx_link
in fbx_connections
.elems
:
3238 c_type
= fbx_link
.props
[0]
3239 if fbx_link
.props_type
[1:3] == b
'LL':
3240 c_src
, c_dst
= fbx_link
.props
[1:3]
3241 fbx_connection_map
.setdefault(c_src
, []).append((c_dst
, fbx_link
))
3242 fbx_connection_map_reverse
.setdefault(c_dst
, []).append((c_src
, fbx_link
))
3245 perfmon
.step("FBX import: Meshes...")
3250 fbx_tmpl
= fbx_template_get((b
'Geometry', b
'KFbxMesh'))
3252 for fbx_uuid
, fbx_item
in fbx_table_nodes
.items():
3253 fbx_obj
, blen_data
= fbx_item
3254 if fbx_obj
.id != b
'Geometry':
3256 if fbx_obj
.props
[-1] == b
'Mesh':
3257 assert(blen_data
is None)
3258 fbx_item
[1] = blen_read_geom(fbx_tmpl
, fbx_obj
, settings
)
3261 perfmon
.step("FBX import: Materials & Textures...")
3264 # Load material data
3266 fbx_tmpl
= fbx_template_get((b
'Material', b
'KFbxSurfacePhong'))
3267 # b'KFbxSurfaceLambert'
3269 for fbx_uuid
, fbx_item
in fbx_table_nodes
.items():
3270 fbx_obj
, blen_data
= fbx_item
3271 if fbx_obj
.id != b
'Material':
3273 assert(blen_data
is None)
3274 fbx_item
[1] = blen_read_material(fbx_tmpl
, fbx_obj
, settings
)
3278 # Load image & textures data
3280 fbx_tmpl_tex
= fbx_template_get((b
'Texture', b
'KFbxFileTexture'))
3281 fbx_tmpl_img
= fbx_template_get((b
'Video', b
'KFbxVideo'))
3283 # Important to run all 'Video' ones first, embedded images are stored in those nodes.
3284 # XXX Note we simplify things here, assuming both matching Video and Texture will use same file path,
3285 # this may be a bit weak, if issue arise we'll fallback to plain connection stuff...
3286 for fbx_uuid
, fbx_item
in fbx_table_nodes
.items():
3287 fbx_obj
, blen_data
= fbx_item
3288 if fbx_obj
.id != b
'Video':
3290 fbx_item
[1] = blen_read_texture_image(fbx_tmpl_img
, fbx_obj
, basedir
, settings
)
3291 for fbx_uuid
, fbx_item
in fbx_table_nodes
.items():
3292 fbx_obj
, blen_data
= fbx_item
3293 if fbx_obj
.id != b
'Texture':
3295 fbx_item
[1] = blen_read_texture_image(fbx_tmpl_tex
, fbx_obj
, basedir
, settings
)
3298 perfmon
.step("FBX import: Cameras & Lamps...")
3303 fbx_tmpl
= fbx_template_get((b
'NodeAttribute', b
'KFbxCamera'))
3305 for fbx_uuid
, fbx_item
in fbx_table_nodes
.items():
3306 fbx_obj
, blen_data
= fbx_item
3307 if fbx_obj
.id != b
'NodeAttribute':
3309 if fbx_obj
.props
[-1] == b
'Camera':
3310 assert(blen_data
is None)
3311 fbx_item
[1] = blen_read_camera(fbx_tmpl
, fbx_obj
, settings
)
3317 fbx_tmpl
= fbx_template_get((b
'NodeAttribute', b
'KFbxLight'))
3319 for fbx_uuid
, fbx_item
in fbx_table_nodes
.items():
3320 fbx_obj
, blen_data
= fbx_item
3321 if fbx_obj
.id != b
'NodeAttribute':
3323 if fbx_obj
.props
[-1] == b
'Light':
3324 assert(blen_data
is None)
3325 fbx_item
[1] = blen_read_light(fbx_tmpl
, fbx_obj
, settings
)
3330 def connection_filter_ex(fbx_uuid
, fbx_id
, dct
):
3331 return [(c_found
[0], c_found
[1], c_type
)
3332 for (c_uuid
, c_type
) in dct
.get(fbx_uuid
, ())
3333 # 0 is used for the root node, which isn't in fbx_table_nodes
3334 for c_found
in (() if c_uuid
== 0 else (fbx_table_nodes
.get(c_uuid
, (None, None)),))
3335 if (fbx_id
is None) or (c_found
[0] and c_found
[0].id == fbx_id
)]
3337 def connection_filter_forward(fbx_uuid
, fbx_id
):
3338 return connection_filter_ex(fbx_uuid
, fbx_id
, fbx_connection_map
)
3340 def connection_filter_reverse(fbx_uuid
, fbx_id
):
3341 return connection_filter_ex(fbx_uuid
, fbx_id
, fbx_connection_map_reverse
)
3343 perfmon
.step("FBX import: Objects & Armatures...")
3345 # -- temporary helper hierarchy to build armatures and objects from
3346 # lookup from uuid to helper node. Used to build parent-child relations and later to look up animated nodes.
3347 fbx_helper_nodes
= {}
3350 # We build an intermediate hierarchy used to:
3351 # - Calculate and store bone orientation correction matrices. The same matrices will be reused for animation.
3352 # - Find/insert armature nodes.
3353 # - Filter leaf bones.
3356 fbx_helper_nodes
[0] = root_helper
= FbxImportHelperNode(None, None, None, False)
3357 root_helper
.is_root
= True
3360 fbx_tmpl
= fbx_template_get((b
'Model', b
'KFbxNode'))
3361 for a_uuid
, a_item
in fbx_table_nodes
.items():
3362 fbx_obj
, bl_data
= a_item
3363 if fbx_obj
is None or fbx_obj
.id != b
'Model':
3366 fbx_props
= (elem_find_first(fbx_obj
, b
'Properties70'),
3367 elem_find_first(fbx_tmpl
, b
'Properties70', fbx_elem_nil
))
3369 transform_data
= blen_read_object_transform_preprocess(fbx_props
, fbx_obj
, Matrix(), use_prepost_rot
)
3370 # Note: 'Root' "bones" are handled as (armature) objects.
3371 # Note: See T46912 for first FBX file I ever saw with 'Limb' bones - thought those were totally deprecated.
3372 is_bone
= fbx_obj
.props
[2] in {b
'LimbNode', b
'Limb'}
3373 fbx_helper_nodes
[a_uuid
] = FbxImportHelperNode(fbx_obj
, bl_data
, transform_data
, is_bone
)
3375 # add parent-child relations and add blender data to the node
3376 for fbx_link
in fbx_connections
.elems
:
3377 if fbx_link
.props
[0] != b
'OO':
3379 if fbx_link
.props_type
[1:3] == b
'LL':
3380 c_src
, c_dst
= fbx_link
.props
[1:3]
3381 parent
= fbx_helper_nodes
.get(c_dst
)
3385 child
= fbx_helper_nodes
.get(c_src
)
3387 # add blender data (meshes, lights, cameras, etc.) to a helper node
3388 fbx_sdata
, bl_data
= p_item
= fbx_table_nodes
.get(c_src
, (None, None))
3389 if fbx_sdata
is None:
3391 if fbx_sdata
.id not in {b
'Geometry', b
'NodeAttribute'}:
3393 parent
.bl_data
= bl_data
3395 # If there's no bl_data, add the fbx_sdata so that it can be read when creating the bl_data/bone
3396 parent
.fbx_data_elem
= fbx_sdata
3399 child
.parent
= parent
3401 # find armatures (either an empty below a bone or a new node inserted at the bone
3402 root_helper
.find_armatures()
3404 # mark nodes that have bone children
3405 root_helper
.find_bone_children()
3407 # mark nodes that need a bone to attach child-bones to
3408 root_helper
.find_fake_bones()
3410 # mark leaf nodes that are only required to mark the end of their parent bone
3411 if settings
.ignore_leaf_bones
:
3412 root_helper
.mark_leaf_bones()
3414 # What a mess! Some bones have several BindPoses, some have none, clusters contain a bind pose as well,
3415 # and you can have several clusters per bone!
3416 # Maybe some conversion can be applied to put them all into the same frame of reference?
3418 # get the bind pose from pose elements
3419 for a_uuid
, a_item
in fbx_table_nodes
.items():
3420 fbx_obj
, bl_data
= a_item
3423 if fbx_obj
.id != b
'Pose':
3425 if fbx_obj
.props
[2] != b
'BindPose':
3427 for fbx_pose_node
in fbx_obj
.elems
:
3428 if fbx_pose_node
.id != b
'PoseNode':
3430 node_elem
= elem_find_first(fbx_pose_node
, b
'Node')
3431 node
= elem_uuid(node_elem
)
3432 matrix_elem
= elem_find_first(fbx_pose_node
, b
'Matrix')
3433 matrix
= array_to_matrix4(matrix_elem
.props
[0]) if matrix_elem
else None
3434 bone
= fbx_helper_nodes
.get(node
)
3436 # Store the matrix in the helper node.
3437 # There may be several bind pose matrices for the same node, but in tests they seem to be identical.
3438 bone
.bind_matrix
= matrix
# global space
3440 # get clusters and bind pose
3441 for helper_uuid
, helper_node
in fbx_helper_nodes
.items():
3442 if not helper_node
.is_bone
:
3444 for cluster_uuid
, cluster_link
in fbx_connection_map
.get(helper_uuid
, ()):
3445 if cluster_link
.props
[0] != b
'OO':
3447 fbx_cluster
, _
= fbx_table_nodes
.get(cluster_uuid
, (None, None))
3448 if fbx_cluster
is None or fbx_cluster
.id != b
'Deformer' or fbx_cluster
.props
[2] != b
'Cluster':
3451 # Get the bind pose from the cluster:
3452 tx_mesh_elem
= elem_find_first(fbx_cluster
, b
'Transform', default
=None)
3453 tx_mesh
= array_to_matrix4(tx_mesh_elem
.props
[0]) if tx_mesh_elem
else Matrix()
3455 tx_bone_elem
= elem_find_first(fbx_cluster
, b
'TransformLink', default
=None)
3456 tx_bone
= array_to_matrix4(tx_bone_elem
.props
[0]) if tx_bone_elem
else None
3458 tx_arm_elem
= elem_find_first(fbx_cluster
, b
'TransformAssociateModel', default
=None)
3459 tx_arm
= array_to_matrix4(tx_arm_elem
.props
[0]) if tx_arm_elem
else None
3461 mesh_matrix
= tx_mesh
3462 armature_matrix
= tx_arm
3465 mesh_matrix
= tx_bone
@ mesh_matrix
3466 helper_node
.bind_matrix
= tx_bone
# overwrite the bind matrix
3468 # Get the meshes driven by this cluster: (Shouldn't that be only one?)
3470 for skin_uuid
, skin_link
in fbx_connection_map
.get(cluster_uuid
):
3471 if skin_link
.props
[0] != b
'OO':
3473 fbx_skin
, _
= fbx_table_nodes
.get(skin_uuid
, (None, None))
3474 if fbx_skin
is None or fbx_skin
.id != b
'Deformer' or fbx_skin
.props
[2] != b
'Skin':
3476 skin_connection
= fbx_connection_map
.get(skin_uuid
)
3477 if skin_connection
is None:
3479 for mesh_uuid
, mesh_link
in skin_connection
:
3480 if mesh_link
.props
[0] != b
'OO':
3482 fbx_mesh
, _
= fbx_table_nodes
.get(mesh_uuid
, (None, None))
3483 if fbx_mesh
is None or fbx_mesh
.id != b
'Geometry' or fbx_mesh
.props
[2] != b
'Mesh':
3485 for object_uuid
, object_link
in fbx_connection_map
.get(mesh_uuid
):
3486 if object_link
.props
[0] != b
'OO':
3488 mesh_node
= fbx_helper_nodes
[object_uuid
]
3491 # If we get a valid mesh matrix (in bone space), store armature and
3492 # mesh global matrices, we need them to compute mesh's matrix_parent_inverse
3493 # when actually binding them via the modifier.
3494 # Note we assume all bones were bound with the same mesh/armature (global) matrix,
3495 # we do not support otherwise in Blender anyway!
3496 mesh_node
.armature_setup
[helper_node
.armature
] = (mesh_matrix
, armature_matrix
)
3497 meshes
.add(mesh_node
)
3499 helper_node
.clusters
.append((fbx_cluster
, meshes
))
3501 # convert bind poses from global space into local space
3502 root_helper
.make_bind_pose_local()
3504 # collect armature meshes
3505 root_helper
.collect_armature_meshes()
3507 # find the correction matrices to align FBX objects with their Blender equivalent
3508 root_helper
.find_correction_matrix(settings
)
3510 # build the Object/Armature/Bone hierarchy
3511 root_helper
.build_hierarchy(fbx_tmpl
, settings
, scene
, view_layer
)
3513 # Link the Object/Armature/Bone hierarchy
3514 root_helper
.link_hierarchy(fbx_tmpl
, settings
, scene
)
3516 # root_helper.print_info(0)
3519 perfmon
.step("FBX import: ShapeKeys...")
3521 # We can handle shapes.
3522 blend_shape_channels
= {} # We do not need Shapes themselves, but keyblocks, for anim.
3525 fbx_tmpl
= fbx_template_get((b
'Geometry', b
'KFbxShape'))
3527 # - FBX | - Blender equivalent
3529 # BlendShape | `Key`
3530 # BlendShapeChannel | `ShapeKey`, but without its `.data`.
3531 # Shape | `ShapeKey.data`, but also includes normals and the values are relative to the base Mesh
3532 # | instead of being absolute. The data is sparse, so each Shape has an "Indexes" array too.
3533 # | FBX 2020 introduced 'Modern Style' Shapes that also support tangents, binormals, vertex
3534 # | colors and UVs, and can be absolute values instead of relative, but 'Modern Style' Shapes
3535 # | are not currently supported.
3537 # The FBX connections between Shapes and Meshes form multiple many-many relationships:
3538 # Mesh >-< BlendShape >-< BlendShapeChannel >-< Shape
3539 # In practice, the relationships are almost never many-many and are more typically 1-many or 1-1:
3540 # Mesh --- BlendShape:
3541 # usually 1-1 and the FBX SDK might enforce that each BlendShape is connected to at most one Mesh.
3542 # BlendShape --< BlendShapeChannel:
3544 # BlendShapeChannel --- or uncommonly --< Shape:
3545 # usually 1-1, but 1-many is a documented feature.
3547 def connections_gen(c_src_uuid
, fbx_id
, fbx_type
):
3548 """Helper to reduce duplicate code"""
3549 # Rarely, an imported FBX file will have duplicate connections. For Shape Key related connections, FBX
3550 # appears to ignore the duplicates, or overwrite the existing duplicates such that the end result is the
3551 # same as ignoring them, so keep a set of the seen connections and ignore any duplicates.
3552 seen_connections
= set()
3553 for c_dst_uuid
, ctype
in fbx_connection_map
.get(c_src_uuid
, ()):
3554 if ctype
.props
[0] != b
'OO':
3555 # 'Object-Object' connections only.
3557 fbx_data
, bl_data
= fbx_table_nodes
.get(c_dst_uuid
, (None, None))
3558 if fbx_data
is None or fbx_data
.id != fbx_id
or fbx_data
.props
[2] != fbx_type
:
3559 # Either `c_dst_uuid` doesn't exist, or it has a different id or type.
3561 connection_key
= (c_src_uuid
, c_dst_uuid
)
3562 if connection_key
in seen_connections
:
3563 # The connection is a duplicate, skip it.
3565 seen_connections
.add(connection_key
)
3566 yield c_dst_uuid
, fbx_data
, bl_data
3568 # XXX - Multiple Shapes can be assigned to a single BlendShapeChannel to create a progressive blend between the
3569 # base mesh and the assigned Shapes, with the percentage at which each Shape is fully blended being stored
3570 # in the BlendShapeChannel's FullWeights array. This is also known as 'in-between shapes'.
3571 # We don't have any support for in-between shapes currently.
3572 blend_shape_channel_to_shapes
= {}
3574 for s_uuid
, (fbx_sdata
, _bl_sdata
) in fbx_table_nodes
.items():
3575 if fbx_sdata
is None or fbx_sdata
.id != b
'Geometry' or fbx_sdata
.props
[2] != b
'Shape':
3578 # shape -> blendshapechannel -> blendshape -> mesh.
3579 for bc_uuid
, fbx_bcdata
, _bl_bcdata
in connections_gen(s_uuid
, b
'Deformer', b
'BlendShapeChannel'):
3580 # Track the Shapes connected to each BlendShapeChannel.
3581 shapes_assigned_to_channel
= blend_shape_channel_to_shapes
.setdefault(bc_uuid
, [])
3582 shapes_assigned_to_channel
.append(s_uuid
)
3583 for bs_uuid
, _fbx_bsdata
, _bl_bsdata
in connections_gen(bc_uuid
, b
'Deformer', b
'BlendShape'):
3584 for m_uuid
, _fbx_mdata
, bl_mdata
in connections_gen(bs_uuid
, b
'Geometry', b
'Mesh'):
3585 # Blenmeshes are assumed already created at that time!
3586 assert(isinstance(bl_mdata
, bpy
.types
.Mesh
))
3587 # Group shapes by mesh so that each mesh only needs to be processed once for all of its shape
3589 if bl_mdata
not in mesh_to_shapes
:
3590 # And we have to find all objects using this mesh!
3592 for o_uuid
, o_ctype
in fbx_connection_map
.get(m_uuid
, ()):
3593 if o_ctype
.props
[0] != b
'OO':
3595 node
= fbx_helper_nodes
[o_uuid
]
3597 objects
.append(node
)
3599 mesh_to_shapes
[bl_mdata
] = (objects
, shapes_list
)
3601 shapes_list
= mesh_to_shapes
[bl_mdata
][1]
3602 # Only the number of shapes assigned to each BlendShapeChannel needs to be passed through to
3603 # `blen_read_shapes`, but that number isn't known until all the connections have been
3604 # iterated, so pass the `shapes_assigned_to_channel` list instead.
3605 shapes_list
.append((bc_uuid
, fbx_sdata
, fbx_bcdata
, shapes_assigned_to_channel
))
3606 # BlendShape deformers are only here to connect BlendShapeChannels to meshes, nothing else to do.
3608 # Iterate through each mesh and create its shape keys
3609 for bl_mdata
, (objects
, shapes
) in mesh_to_shapes
.items():
3610 for bc_uuid
, keyblocks
in blen_read_shapes(fbx_tmpl
, shapes
, objects
, bl_mdata
, scene
).items():
3611 # keyblocks is a list of tuples (mesh, keyblock) matching that shape/blendshapechannel, for animation.
3612 blend_shape_channels
.setdefault(bc_uuid
, []).extend(keyblocks
)
3615 if settings
.use_subsurf
:
3616 perfmon
.step("FBX import: Subdivision surfaces")
3618 # Look through connections for subsurf in meshes and add it to the parent object
3620 for fbx_link
in fbx_connections
.elems
:
3621 if fbx_link
.props
[0] != b
'OO':
3623 if fbx_link
.props_type
[1:3] == b
'LL':
3624 c_src
, c_dst
= fbx_link
.props
[1:3]
3625 parent
= fbx_helper_nodes
.get(c_dst
)
3629 child
= fbx_helper_nodes
.get(c_src
)
3631 fbx_sdata
, bl_data
= fbx_table_nodes
.get(c_src
, (None, None))
3632 if fbx_sdata
.id != b
'Geometry':
3635 preview_levels
= elem_prop_first(elem_find_first(fbx_sdata
, b
'PreviewDivisionLevels'))
3636 render_levels
= elem_prop_first(elem_find_first(fbx_sdata
, b
'RenderDivisionLevels'))
3637 if isinstance(preview_levels
, int) and isinstance(render_levels
, int):
3638 mod
= parent
.bl_obj
.modifiers
.new('subsurf', 'SUBSURF')
3639 mod
.levels
= preview_levels
3640 mod
.render_levels
= render_levels
3641 boundary_rule
= elem_prop_first(elem_find_first(fbx_sdata
, b
'BoundaryRule'), default
=1)
3642 if boundary_rule
== 1:
3643 mod
.boundary_smooth
= "PRESERVE_CORNERS"
3645 mod
.boundary_smooth
= "ALL"
3650 perfmon
.step("FBX import: Animations...")
3654 # Find the number of "ktimes" per second for this file.
3655 # Start with the default for this FBX version.
3656 fbx_ktime
= FBX_KTIME_V8
if version
>= 8000 else FBX_KTIME_V7
3657 # Try to find the value of the nested elem_root->'FBXHeaderExtension'->'OtherFlags'->'TCDefinition' element
3658 # and look up the "ktimes" per second for its value.
3659 if header
:= elem_find_first(elem_root
, b
'FBXHeaderExtension'):
3660 # The header version that added TCDefinition support is 1004.
3661 if elem_prop_first(elem_find_first(header
, b
'FBXHeaderVersion'), default
=0) >= 1004:
3662 if other_flags
:= elem_find_first(header
, b
'OtherFlags'):
3663 if timecode_definition
:= elem_find_first(other_flags
, b
'TCDefinition'):
3664 timecode_definition_value
= elem_prop_first(timecode_definition
)
3665 # If its value is unknown or missing, default to FBX_KTIME_V8.
3666 fbx_ktime
= FBX_TIMECODE_DEFINITION_TO_KTIME_PER_SECOND
.get(timecode_definition_value
,
3669 fbx_tmpl_astack
= fbx_template_get((b
'AnimationStack', b
'FbxAnimStack'))
3670 fbx_tmpl_alayer
= fbx_template_get((b
'AnimationLayer', b
'FbxAnimLayer'))
3674 for as_uuid
, fbx_asitem
in fbx_table_nodes
.items():
3675 fbx_asdata
, _blen_data
= fbx_asitem
3676 if fbx_asdata
.id != b
'AnimationStack' or fbx_asdata
.props
[2] != b
'':
3678 stacks
[as_uuid
] = (fbx_asitem
, {})
3681 # (mixing is completely ignored for now, each layer results in an independent set of actions).
3682 def get_astacks_from_alayer(al_uuid
):
3683 for as_uuid
, as_ctype
in fbx_connection_map
.get(al_uuid
, ()):
3684 if as_ctype
.props
[0] != b
'OO':
3686 fbx_asdata
, _bl_asdata
= fbx_table_nodes
.get(as_uuid
, (None, None))
3687 if (fbx_asdata
is None or fbx_asdata
.id != b
'AnimationStack' or
3688 fbx_asdata
.props
[2] != b
'' or as_uuid
not in stacks
):
3691 for al_uuid
, fbx_alitem
in fbx_table_nodes
.items():
3692 fbx_aldata
, _blen_data
= fbx_alitem
3693 if fbx_aldata
.id != b
'AnimationLayer' or fbx_aldata
.props
[2] != b
'':
3695 for as_uuid
in get_astacks_from_alayer(al_uuid
):
3696 _fbx_asitem
, alayers
= stacks
[as_uuid
]
3697 alayers
[al_uuid
] = (fbx_alitem
, {})
3699 # AnimationCurveNodes (also the ones linked to actual animated data!).
3701 for acn_uuid
, fbx_acnitem
in fbx_table_nodes
.items():
3702 fbx_acndata
, _blen_data
= fbx_acnitem
3703 if fbx_acndata
.id != b
'AnimationCurveNode' or fbx_acndata
.props
[2] != b
'':
3705 cnode
= curvenodes
[acn_uuid
] = {}
3707 for n_uuid
, n_ctype
in fbx_connection_map
.get(acn_uuid
, ()):
3708 if n_ctype
.props
[0] != b
'OP':
3710 lnk_prop
= n_ctype
.props
[3]
3711 if lnk_prop
in {b
'Lcl Translation', b
'Lcl Rotation', b
'Lcl Scaling'}:
3712 # n_uuid can (????) be linked to root '0' node, instead of a mere object node... See T41712.
3713 ob
= fbx_helper_nodes
.get(n_uuid
, None)
3714 if ob
is None or ob
.is_root
:
3716 items
.append((ob
, lnk_prop
))
3717 elif lnk_prop
== b
'DeformPercent': # Shape keys.
3718 keyblocks
= blend_shape_channels
.get(n_uuid
, None)
3719 if keyblocks
is None:
3721 items
+= [(kb
, lnk_prop
) for kb
in keyblocks
]
3722 elif lnk_prop
== b
'FocalLength': # Camera lens.
3723 from bpy
.types
import Camera
3724 fbx_item
= fbx_table_nodes
.get(n_uuid
, None)
3725 if fbx_item
is None or not isinstance(fbx_item
[1], Camera
):
3728 items
.append((cam
, lnk_prop
))
3729 elif lnk_prop
== b
'FocusDistance': # Camera focus.
3730 from bpy
.types
import Camera
3731 fbx_item
= fbx_table_nodes
.get(n_uuid
, None)
3732 if fbx_item
is None or not isinstance(fbx_item
[1], Camera
):
3735 items
.append((cam
, lnk_prop
))
3736 elif lnk_prop
== b
'DiffuseColor':
3737 from bpy
.types
import Material
3738 fbx_item
= fbx_table_nodes
.get(n_uuid
, None)
3739 if fbx_item
is None or not isinstance(fbx_item
[1], Material
):
3742 items
.append((mat
, lnk_prop
))
3743 print("WARNING! Importing material's animation is not supported for Nodal materials...")
3744 for al_uuid
, al_ctype
in fbx_connection_map
.get(acn_uuid
, ()):
3745 if al_ctype
.props
[0] != b
'OO':
3747 fbx_aldata
, _blen_aldata
= fbx_alitem
= fbx_table_nodes
.get(al_uuid
, (None, None))
3748 if fbx_aldata
is None or fbx_aldata
.id != b
'AnimationLayer' or fbx_aldata
.props
[2] != b
'':
3750 for as_uuid
in get_astacks_from_alayer(al_uuid
):
3751 _fbx_alitem
, anim_items
= stacks
[as_uuid
][1][al_uuid
]
3752 assert(_fbx_alitem
== fbx_alitem
)
3753 for item
, item_prop
in items
:
3754 # No need to keep curvenode FBX data here, contains nothing useful for us.
3755 anim_items
.setdefault(item
, {})[acn_uuid
] = (cnode
, item_prop
)
3757 # AnimationCurves (real animation data).
3758 for ac_uuid
, fbx_acitem
in fbx_table_nodes
.items():
3759 fbx_acdata
, _blen_data
= fbx_acitem
3760 if fbx_acdata
.id != b
'AnimationCurve' or fbx_acdata
.props
[2] != b
'':
3762 for acn_uuid
, acn_ctype
in fbx_connection_map
.get(ac_uuid
, ()):
3763 if acn_ctype
.props
[0] != b
'OP':
3765 fbx_acndata
, _bl_acndata
= fbx_table_nodes
.get(acn_uuid
, (None, None))
3766 if (fbx_acndata
is None or fbx_acndata
.id != b
'AnimationCurveNode' or
3767 fbx_acndata
.props
[2] != b
'' or acn_uuid
not in curvenodes
):
3769 # Note this is an infamous simplification of the compound props stuff,
3770 # seems to be standard naming but we'll probably have to be smarter to handle more exotic files?
3772 b
'd|X': 0, b
'd|Y': 1, b
'd|Z': 2,
3773 b
'd|DeformPercent': 0,
3774 b
'd|FocalLength': 0,
3775 b
'd|FocusDistance': 0
3776 }.get(acn_ctype
.props
[3], None)
3779 curvenodes
[acn_uuid
][ac_uuid
] = (fbx_acitem
, channel
)
3781 # And now that we have sorted all this, apply animations!
3782 blen_read_animations(fbx_tmpl_astack
, fbx_tmpl_alayer
, stacks
, scene
, settings
.anim_offset
, global_scale
,
3787 perfmon
.step("FBX import: Assign materials...")
3790 # link Material's to Geometry (via Model's)
3791 processed_meshes
= set()
3792 for helper_uuid
, helper_node
in fbx_helper_nodes
.items():
3793 obj
= helper_node
.bl_obj
3794 if not obj
or obj
.type != 'MESH':
3797 # Get the Mesh corresponding to the Geometry used by this Model.
3799 processed_meshes
.add(mesh
)
3801 # Get the Materials from the Model's connections.
3802 material_connections
= connection_filter_reverse(helper_uuid
, b
'Material')
3803 if not material_connections
:
3806 mesh_mats
= mesh
.materials
3807 num_mesh_mats
= len(mesh_mats
)
3809 if num_mesh_mats
== 0:
3810 # This is the first (or only) model to use this Geometry. This is the most common case when importing.
3811 # All the Materials can trivially be appended to the Mesh's Materials.
3812 mats_to_append
= material_connections
3813 mats_to_compare
= ()
3814 elif num_mesh_mats
== len(material_connections
):
3815 # Another Model uses the same Geometry and has already appended its Materials to the Mesh. This is the
3816 # second most common case when importing.
3817 # It's also possible that a Model could share the same Geometry and have the same number of Materials,
3818 # but have different Materials, though this is less common.
3819 # The Model Materials will need to be compared with the Mesh Materials at the same indices to check if
3820 # they are different.
3822 mats_to_compare
= material_connections
3824 # Under the assumption that only used Materials are connected to the Model, the number of Materials of
3825 # each Model using a specific Geometry should be the same, otherwise the Material Indices of the
3826 # Geometry will be out-of-bounds of the Materials of at least one of the Models using that Geometry.
3827 # We wouldn't expect this case to happen, but there's nothing to say it can't.
3828 # We'll handle a differing number of Materials by appending any extra Materials and comparing the rest.
3829 mats_to_append
= material_connections
[num_mesh_mats
:]
3830 mats_to_compare
= material_connections
[:num_mesh_mats
]
3832 for _fbx_lnk_material
, material
, _fbx_lnk_material_type
in mats_to_append
:
3833 mesh_mats
.append(material
)
3835 mats_to_compare_and_slots
= zip(mats_to_compare
, obj
.material_slots
)
3836 for (_fbx_lnk_material
, material
, _fbx_lnk_material_type
), mat_slot
in mats_to_compare_and_slots
:
3837 if material
!= mat_slot
.material
:
3838 # Material Slots default to being linked to the Mesh, so a previously processed Object is also using
3839 # this Mesh, but the Mesh uses a different Material for this Material Slot.
3840 # To have a different Material for this Material Slot on this Object only, the Material Slot must be
3841 # linked to the Object rather than the Mesh.
3842 # TODO: add an option to link all materials to objects in Blender instead?
3843 mat_slot
.link
= 'OBJECT'
3844 mat_slot
.material
= material
3846 # We have to validate mesh polygons' ma_idx, see #41015!
3847 # Some FBX seem to have an extra 'default' material which is not defined in FBX file.
3848 for mesh
in processed_meshes
:
3849 if mesh
.validate_material_indices():
3850 print("WARNING: mesh '%s' had invalid material indices, those were reset to first material" % mesh
.name
)
3853 perfmon
.step("FBX import: Assign textures...")
3856 material_images
= {}
3858 fbx_tmpl
= fbx_template_get((b
'Material', b
'KFbxSurfacePhong'))
3859 # b'KFbxSurfaceLambert'
3861 def texture_mapping_set(fbx_obj
, node_texture
):
3862 assert(fbx_obj
.id == b
'Texture')
3864 fbx_props
= (elem_find_first(fbx_obj
, b
'Properties70'),
3865 elem_find_first(fbx_tmpl
, b
'Properties70', fbx_elem_nil
))
3866 loc
= elem_props_get_vector_3d(fbx_props
, b
'Translation', (0.0, 0.0, 0.0))
3867 rot
= tuple(-r
for r
in elem_props_get_vector_3d(fbx_props
, b
'Rotation', (0.0, 0.0, 0.0)))
3868 scale
= tuple(((1.0 / s
) if s
!= 0.0 else 1.0)
3869 for s
in elem_props_get_vector_3d(fbx_props
, b
'Scaling', (1.0, 1.0, 1.0)))
3870 clamp
= (bool(elem_props_get_enum(fbx_props
, b
'WrapModeU', 0)) or
3871 bool(elem_props_get_enum(fbx_props
, b
'WrapModeV', 0)))
3873 if (loc
== (0.0, 0.0, 0.0) and
3874 rot
== (0.0, 0.0, 0.0) and
3875 scale
== (1.0, 1.0, 1.0) and
3879 node_texture
.translation
= loc
3880 node_texture
.rotation
= rot
3881 node_texture
.scale
= scale
3883 node_texture
.extension
= 'EXTEND'
3885 for fbx_uuid
, fbx_item
in fbx_table_nodes
.items():
3886 fbx_obj
, blen_data
= fbx_item
3887 if fbx_obj
.id != b
'Material':
3890 material
= fbx_table_nodes
.get(fbx_uuid
, (None, None))[1]
3893 fbx_lnk_type
) in connection_filter_reverse(fbx_uuid
, b
'Texture'):
3895 if fbx_lnk_type
.props
[0] == b
'OP':
3896 lnk_type
= fbx_lnk_type
.props
[3]
3898 ma_wrap
= nodal_material_wrap_map
[material
]
3900 if lnk_type
in {b
'DiffuseColor', b
'3dsMax|maps|texmap_diffuse'}:
3901 ma_wrap
.base_color_texture
.image
= image
3902 texture_mapping_set(fbx_lnk
, ma_wrap
.base_color_texture
)
3903 elif lnk_type
in {b
'SpecularColor', b
'SpecularFactor'}:
3904 # Intensity actually, not color...
3905 ma_wrap
.specular_texture
.image
= image
3906 texture_mapping_set(fbx_lnk
, ma_wrap
.specular_texture
)
3907 elif lnk_type
in {b
'ReflectionColor', b
'ReflectionFactor', b
'3dsMax|maps|texmap_reflection'}:
3908 # Intensity actually, not color...
3909 ma_wrap
.metallic_texture
.image
= image
3910 texture_mapping_set(fbx_lnk
, ma_wrap
.metallic_texture
)
3911 elif lnk_type
in {b
'TransparentColor', b
'TransparencyFactor'}:
3912 ma_wrap
.alpha_texture
.image
= image
3913 texture_mapping_set(fbx_lnk
, ma_wrap
.alpha_texture
)
3914 if use_alpha_decals
:
3915 material_decals
.add(material
)
3916 elif lnk_type
== b
'ShininessExponent':
3917 # That is probably reversed compared to expected results? TODO...
3918 ma_wrap
.roughness_texture
.image
= image
3919 texture_mapping_set(fbx_lnk
, ma_wrap
.roughness_texture
)
3920 # XXX, applications abuse bump!
3921 elif lnk_type
in {b
'NormalMap', b
'Bump', b
'3dsMax|maps|texmap_bump'}:
3922 ma_wrap
.normalmap_texture
.image
= image
3923 texture_mapping_set(fbx_lnk
, ma_wrap
.normalmap_texture
)
3925 elif lnk_type == b'Bump':
3926 # TODO displacement...
3928 elif lnk_type
in {b
'EmissiveColor'}:
3929 ma_wrap
.emission_color_texture
.image
= image
3930 texture_mapping_set(fbx_lnk
, ma_wrap
.emission_color_texture
)
3931 elif lnk_type
in {b
'EmissiveFactor'}:
3932 ma_wrap
.emission_strength_texture
.image
= image
3933 texture_mapping_set(fbx_lnk
, ma_wrap
.emission_strength_texture
)
3935 print("WARNING: material link %r ignored" % lnk_type
)
3937 material_images
.setdefault(material
, {})[lnk_type
] = image
3939 # Check if the diffuse image has an alpha channel,
3940 # if so, use the alpha channel.
3942 # Note: this could be made optional since images may have alpha but be entirely opaque
3943 for fbx_uuid
, fbx_item
in fbx_table_nodes
.items():
3944 fbx_obj
, blen_data
= fbx_item
3945 if fbx_obj
.id != b
'Material':
3947 material
= fbx_table_nodes
.get(fbx_uuid
, (None, None))[1]
3948 image
= material_images
.get(material
, {}).get(b
'DiffuseColor', None)
3950 if image
and image
.depth
== 32:
3951 if use_alpha_decals
:
3952 material_decals
.add(material
)
3954 ma_wrap
= nodal_material_wrap_map
[material
]
3955 ma_wrap
.alpha_texture
.use_alpha
= True
3956 ma_wrap
.alpha_texture
.copy_from(ma_wrap
.base_color_texture
)
3958 # Propagate mapping from diffuse to all other channels which have none defined.
3959 # XXX Commenting for now, I do not really understand the logic here, why should diffuse mapping
3960 # be applied to all others if not defined for them???
3961 # ~ ma_wrap = nodal_material_wrap_map[material]
3962 # ~ ma_wrap.mapping_set_from_diffuse()
3966 perfmon
.step("FBX import: Cycles z-offset workaround...")
3969 # Annoying workaround for cycles having no z-offset
3970 if material_decals
and use_alpha_decals
:
3971 for fbx_uuid
, fbx_item
in fbx_table_nodes
.items():
3972 fbx_obj
, blen_data
= fbx_item
3973 if fbx_obj
.id != b
'Geometry':
3975 if fbx_obj
.props
[-1] == b
'Mesh':
3978 num_verts
= len(mesh
.vertices
)
3979 if decal_offset
!= 0.0 and num_verts
> 0:
3980 for material
in mesh
.materials
:
3981 if material
in material_decals
:
3982 blen_norm_dtype
= np
.single
3983 vcos
= MESH_ATTRIBUTE_POSITION
.to_ndarray(mesh
.attributes
)
3984 vnorm
= np
.empty(num_verts
* 3, dtype
=blen_norm_dtype
)
3985 mesh
.vertex_normals
.foreach_get("vector", vnorm
)
3987 vcos
+= vnorm
* decal_offset
3989 MESH_ATTRIBUTE_POSITION
.foreach_set(mesh
.attributes
, vcos
)
3992 for obj
in (obj
for obj
in bpy
.data
.objects
if obj
.data
== mesh
):
3993 obj
.visible_shadow
= False
3996 perfmon
.level_down()
3998 perfmon
.level_down("Import finished.")