Newer
Older
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
# Script copyright (C) Campbell Barton, Bastien Montagne
import array
import datetime
import math
import os
import time
import collections
from collections import namedtuple, OrderedDict
import itertools
from itertools import zip_longest, chain
import bpy
import bpy_extras
from bpy.types import Object, Bone
from mathutils import Vector, Matrix
from . import encode_bin, data_types
# "Constants"
FBX_VERSION = 7400
FBX_HEADER_VERSION = 1003
FBX_SCENEINFO_VERSION = 100
FBX_TEMPLATES_VERSION = 100
FBX_MODELS_VERSION = 232
FBX_GEOMETRY_VERSION = 124
FBX_GEOMETRY_NORMAL_VERSION = 102
FBX_GEOMETRY_BINORMAL_VERSION = 102
FBX_GEOMETRY_TANGENT_VERSION = 102
FBX_GEOMETRY_SMOOTHING_VERSION = 102
FBX_GEOMETRY_VCOLOR_VERSION = 101
FBX_GEOMETRY_UV_VERSION = 101
FBX_GEOMETRY_MATERIAL_VERSION = 101
FBX_GEOMETRY_LAYER_VERSION = 100
FBX_POSE_BIND_VERSION = 100
FBX_DEFORMER_SKIN_VERSION = 101
FBX_DEFORMER_CLUSTER_VERSION = 100
FBX_MATERIAL_VERSION = 102
FBX_TEXTURE_VERSION = 202
FBX_NAME_CLASS_SEP = b"\x00\x01"
FBX_KTIME = 46186158000 # This is the number of "ktimes" in one second (yep, precision over the nanosecond...)
MAT_CONVERT_LAMP = Matrix.Rotation(math.pi / 2.0, 4, 'X') # Blender is -Z, FBX is -Y.
MAT_CONVERT_CAMERA = Matrix.Rotation(math.pi / 2.0, 4, 'Y') # Blender is -Z, FBX is +X.
#MAT_CONVERT_BONE = Matrix.Rotation(math.pi / -2.0, 4, 'X') # Blender is +Y, FBX is +Z.
MAT_CONVERT_BONE = Matrix()
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
# Lamps.
FBX_LIGHT_TYPES = {
'POINT': 0, # Point.
'SUN': 1, # Directional.
'SPOT': 2, # Spot.
'HEMI': 1, # Directional.
'AREA': 3, # Area.
}
FBX_LIGHT_DECAY_TYPES = {
'CONSTANT': 0, # None.
'INVERSE_LINEAR': 1, # Linear.
'INVERSE_SQUARE': 2, # Quadratic.
'CUSTOM_CURVE': 2, # Quadratic.
'LINEAR_QUADRATIC_WEIGHTED': 2, # Quadratic.
}
##### Misc utilities #####
# Note: this could be in a utility (math.units e.g.)...
UNITS = {
"meter": 1.0, # Ref unit!
"kilometer": 0.001,
"millimeter": 1000.0,
"foot": 1.0 / 0.3048,
"inch": 1.0 / 0.0254,
"turn": 1.0, # Ref unit!
"degree": 360.0,
"radian": math.pi * 2.0,
"second": 1.0, # Ref unit!
"ktime": FBX_KTIME,
}
def units_convert(val, u_from, u_to):
"""Convert value."""
conv = UNITS[u_to] / UNITS[u_from]
return val * conv
def units_convert_iter(it, u_from, u_to):
"""Convert value."""
conv = UNITS[u_to] / UNITS[u_from]
return (v * conv for v in it)
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
def matrix_to_array(mat):
"""Concatenate matrix's columns into a single, flat tuple"""
# blender matrix is row major, fbx is col major so transpose on write
return tuple(f for v in mat.transposed() for f in v)
RIGHT_HAND_AXES = {
# Up, Front -> FBX values (tuples of (axis, sign), Up, Front, Coord).
# Note: Since we always stay in right-handed system, third coord sign is always positive!
('X', 'Y'): ((0, 1), (1, 1), (2, 1)),
('X', '-Y'): ((0, 1), (1, -1), (2, 1)),
('X', 'Z'): ((0, 1), (2, 1), (1, 1)),
('X', '-Z'): ((0, 1), (2, -1), (1, 1)),
('-X', 'Y'): ((0, -1), (1, 1), (2, 1)),
('-X', '-Y'): ((0, -1), (1, -1), (2, 1)),
('-X', 'Z'): ((0, -1), (2, 1), (1, 1)),
('-X', '-Z'): ((0, -1), (2, -1), (1, 1)),
('Y', 'X'): ((1, 1), (0, 1), (2, 1)),
('Y', '-X'): ((1, 1), (0, -1), (2, 1)),
('Y', 'Z'): ((1, 1), (2, 1), (0, 1)),
('Y', '-Z'): ((1, 1), (2, -1), (0, 1)),
('-Y', 'X'): ((1, -1), (0, 1), (2, 1)),
('-Y', '-X'): ((1, -1), (0, -1), (2, 1)),
('-Y', 'Z'): ((1, -1), (2, 1), (0, 1)),
('-Y', '-Z'): ((1, -1), (2, -1), (0, 1)),
('Z', 'X'): ((2, 1), (0, 1), (1, 1)),
('Z', '-X'): ((2, 1), (0, -1), (1, 1)),
('Z', 'Y'): ((2, 1), (1, 1), (0, 1)), # Blender system!
('Z', '-Y'): ((2, 1), (1, -1), (0, 1)),
('-Z', 'X'): ((2, -1), (0, 1), (1, 1)),
('-Z', '-X'): ((2, -1), (0, -1), (1, 1)),
('-Z', 'Y'): ((2, -1), (1, 1), (0, 1)),
('-Z', '-Y'): ((2, -1), (1, -1), (0, 1)),
}
##### UIDs code. #####
# ID class (mere int).
class UID(int):
pass
# UIDs storage.
_keys_to_uids = {}
_uids_to_keys = {}
def _key_to_uid(uids, key):
# TODO: Check this is robust enough for our needs!
# Note: We assume we have already checked the related key wasn't yet in _keys_to_uids!
# As int64 is signed in FBX, we keep uids below 2**63...
if isinstance(key, int) and 0 <= key < 2**63:
# We can use value directly as id!
uid = key
else:
uid = hash(key)
if uid < 0:
uid = -uid
if uid >= 2**63:
uid //= 2
# Make sure our uid *is* unique.
if uid in uids:
inc = 1 if uid < 2**62 else -1
while uid in uids:
uid += inc
if 0 > uid >= 2**63:
# Note that this is more that unlikely, but does not harm anyway...
raise ValueError("Unable to generate an UID for key {}".format(key))
return UID(uid)
def get_fbxuid_from_key(key):
"""
Return an UID for given key, which is assumed hasable.
"""
uid = _keys_to_uids.get(key, None)
if uid is None:
uid = _key_to_uid(_uids_to_keys, key)
_keys_to_uids[key] = uid
_uids_to_keys[uid] = key
return uid
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
def get_key_from_fbxuid(uid):
"""
Return the key which generated this uid.
"""
assert(uid.__class__ == UID)
return _uids_to_keys.get(uid, None)
# Blender-specific key generators
def get_blenderID_key(bid):
return "B" + bid.rna_type.name + "::" + bid.name
def get_blender_bone_key(armature, bone):
"""Return bone's keys (Model and NodeAttribute)."""
key = "|".join((get_blenderID_key(armature), get_blenderID_key(bone)))
return key, key + "_Data"
def get_blender_armature_bindpose_key(armature, mesh):
"""Return armature's bindpose key."""
return "|".join((get_blenderID_key(armature), get_blenderID_key(mesh), "BindPose"))
def get_blender_armature_skin_key(armature, mesh):
"""Return armature's skin key."""
return "|".join((get_blenderID_key(armature), get_blenderID_key(mesh), "DeformerSkin"))
def get_blender_bone_cluster_key(armature, mesh, bone):
"""Return bone's cluster key."""
return "|".join((get_blenderID_key(armature), get_blenderID_key(mesh),
get_blenderID_key(bone), "SubDeformerCluster"))
def get_blender_anim_stack_key(scene):
"""Return single anim stack key."""
return "|".join((get_blenderID_key(scene), "AnimStack"))
def get_blender_anim_layer_key(ID):
"""Return ID's anim layer key."""
return "|".join((get_blenderID_key(ID), "AnimLayer"))
def get_blender_anim_curve_node_key(ID, fbx_prop_name):
"""Return (ID, fbxprop) curve node key."""
return "|".join((get_blenderID_key(ID), fbx_prop_name, "AnimCurveNode"))
def get_blender_anim_curve_key(ID, fbx_prop_name, fbx_prop_item_name):
"""Return (ID, fbxprop, item) curve key."""
return "|".join((get_blenderID_key(ID), fbx_prop_name, fbx_prop_item_name, "AnimCurve"))
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
##### Element generators. #####
# Note: elem may be None, in this case the element is not added to any parent.
def elem_empty(elem, name):
sub_elem = encode_bin.FBXElem(name)
if elem is not None:
elem.elems.append(sub_elem)
return sub_elem
def elem_properties(elem):
return elem_empty(elem, b"Properties70")
def _elem_data_single(elem, name, value, func_name):
sub_elem = elem_empty(elem, name)
getattr(sub_elem, func_name)(value)
return sub_elem
def _elem_data_vec(elem, name, value, func_name):
sub_elem = elem_empty(elem, name)
func = getattr(sub_elem, func_name)
for v in value:
func(v)
return sub_elem
def elem_data_single_bool(elem, name, value):
return _elem_data_single(elem, name, value, "add_bool")
def elem_data_single_int16(elem, name, value):
return _elem_data_single(elem, name, value, "add_int16")
def elem_data_single_int32(elem, name, value):
return _elem_data_single(elem, name, value, "add_int32")
def elem_data_single_int64(elem, name, value):
return _elem_data_single(elem, name, value, "add_int64")
def elem_data_single_float32(elem, name, value):
return _elem_data_single(elem, name, value, "add_float32")
def elem_data_single_float64(elem, name, value):
return _elem_data_single(elem, name, value, "add_float64")
def elem_data_single_bytes(elem, name, value):
return _elem_data_single(elem, name, value, "add_bytes")
def elem_data_single_string(elem, name, value):
return _elem_data_single(elem, name, value, "add_string")
def elem_data_single_string_unicode(elem, name, value):
return _elem_data_single(elem, name, value, "add_string_unicode")
def elem_data_single_bool_array(elem, name, value):
return _elem_data_single(elem, name, value, "add_bool_array")
def elem_data_single_int32_array(elem, name, value):
return _elem_data_single(elem, name, value, "add_int32_array")
def elem_data_single_int64_array(elem, name, value):
return _elem_data_single(elem, name, value, "add_int64_array")
def elem_data_single_float32_array(elem, name, value):
return _elem_data_single(elem, name, value, "add_float32_array")
def elem_data_single_float64_array(elem, name, value):
return _elem_data_single(elem, name, value, "add_float64_array")
def elem_data_single_byte_array(elem, name, value):
return _elem_data_single(elem, name, value, "add_byte_array")
def elem_data_vec_float64(elem, name, value):
return _elem_data_vec(elem, name, value, "add_float64")
##### Generators for standard FBXProperties70 properties. #####
# Properties definitions, format: (b"type_1", b"label(???)", "name_set_value_1", "name_set_value_2", ...)
# XXX Looks like there can be various variations of formats here... Will have to be checked ultimately!
# Also, those "custom" types like 'FieldOfView' or 'Lcl Translation' are pure nonsense,
# these are just Vector3D ultimately... *sigh* (again).
FBX_PROPERTIES_DEFINITIONS = {
"p_bool": (b"bool", b"", "add_int32"), # Yes, int32 for a bool (and they do have a core bool type)!!!
"p_integer": (b"int", b"Integer", "add_int32"),
"p_ulonglong": (b"ULongLong", b"", "add_int64"),
"p_enum": (b"enum", b"", "add_int32"),
"p_number": (b"double", b"Number", "add_float64"),
"p_visibility": (b"Visibility", b"", "add_float64"),
"p_fov": (b"FieldOfView", b"", "add_float64"),
"p_fov_x": (b"FieldOfViewX", b"", "add_float64"),
"p_fov_y": (b"FieldOfViewY", b"", "add_float64"),
"p_vector_3d": (b"Vector3D", b"Vector", "add_float64", "add_float64", "add_float64"),
"p_lcl_translation": (b"Lcl Translation", b"", "add_float64", "add_float64", "add_float64"),
"p_lcl_rotation": (b"Lcl Rotation", b"", "add_float64", "add_float64", "add_float64"),
"p_lcl_scaling": (b"Lcl Scaling", b"", "add_float64", "add_float64", "add_float64"),
"p_color_rgb": (b"ColorRGB", b"Color", "add_float64", "add_float64", "add_float64"),
"p_string": (b"KString", b"", "add_string_unicode"),
"p_string_url": (b"KString", b"Url", "add_string_unicode"),
"p_timestamp": (b"KTime", b"Time", "add_int64"),
"p_datetime": (b"DateTime", b"", "add_string_unicode"),
"p_object": (b"object", b""), # XXX Check this! No value for this prop???
"p_compound": (b"Compound", b""),
def _elem_props_set(elem, ptype, name, value, flags):
p = elem_data_single_string(elem, b"P", name)
p.add_string(flags)
if len(ptype) == 3:
getattr(p, ptype[2])(value)
elif len(ptype) > 3:
# We assume value is iterable, else it's a bug!
for callback, val in zip(ptype[2:], value):
getattr(p, callback)(val)
def _elem_props_flags(animatable, custom):
if animatable and custom:
return b"AU"
elif animatable:
return b"A"
elif custom:
return b"U"
return b""
def elem_props_set(elem, ptype, name, value=None, animatable=False, custom=False):
ptype = FBX_PROPERTIES_DEFINITIONS[ptype]
_elem_props_set(elem, ptype, name, value, _elem_props_flags(animatable, custom))
def elem_props_compound(elem, cmpd_name, custom=False):
def _setter(ptype, name, value, animatable=False, custom=False):
name = cmpd_name + b"|" + name
elem_props_set(elem, ptype, name, value, animatable=animatable, custom=custom)
elem_props_set(elem, "p_compound", cmpd_name, custom=custom)
def elem_props_template_set(template, elem, ptype_name, name, value, animatable=False):
"""
Only add a prop if the same value is not already defined in given template.
Note it is important to not give iterators as value, here!
"""
ptype = FBX_PROPERTIES_DEFINITIONS[ptype_name]
tmpl_val, tmpl_ptype, tmpl_animatable = template.properties.get(name, (None, None, False))
# Note animatable flag from template takes precedence over given one, if applicable.
if tmpl_ptype is not None:
if ((len(ptype) == 3 and (tmpl_val, tmpl_ptype) == (value, ptype_name)) or
(len(ptype) > 3 and (tuple(tmpl_val), tmpl_ptype) == (tuple(value), ptype_name))):
return # Already in template and same value.
_elem_props_set(elem, ptype, name, value, _elem_props_flags(tmpl_animatable, False))
else:
_elem_props_set(elem, ptype, name, value, _elem_props_flags(animatable, False))
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
##### Generators for connection elements. #####
def elem_connection(elem, c_type, uid_src, uid_dst, prop_dst=None):
e = elem_data_single_string(elem, b"C", c_type)
e.add_int64(uid_src)
e.add_int64(uid_dst)
if prop_dst is not None:
e.add_string(prop_dst)
##### Templates #####
# TODO: check all those "default" values, they should match Blender's default as much as possible, I guess?
FBXTemplate = namedtuple("FBXTemplate", ("type_name", "prop_type_name", "properties", "nbr_users"))
def fbx_templates_generate(root, fbx_templates):
# We may have to gather different templates in the same node (e.g. NodeAttribute template gathers properties
# for Lights, Cameras, LibNodes, etc.).
templates = OrderedDict()
for type_name, prop_type_name, properties, nbr_users in fbx_templates.values():
if type_name not in templates:
templates[type_name] = [OrderedDict(((prop_type_name, properties),)), nbr_users]
else:
templates[type_name][0][prop_type_name] = properties
templates[type_name][1] += nbr_users
for type_name, (subprops, nbr_users) in templates.items():
template = elem_data_single_string(root, b"ObjectType", type_name)
elem_data_single_int32(template, b"Count", nbr_users)
for prop_type_name, properties in subprops.items():
if prop_type_name and properties:
elem = elem_data_single_string(template, b"PropertyTemplate", prop_type_name)
props = elem_properties(elem)
for name, (value, ptype, animatable) in properties.items():
elem_props_set(props, ptype, name, value, animatable=animatable)
def fbx_template_def_globalsettings(scene, settings, override_defaults=None, nbr_users=0):
if override_defaults is not None:
props.update(override_defaults)
return FBXTemplate(b"GlobalSettings", b"", props, nbr_users)
def fbx_template_def_model(scene, settings, override_defaults=None, nbr_users=0):
gscale = settings.global_scale
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
props = OrderedDict((
# Name, Value, Type, Animatable
(b"QuaternionInterpolate", (False, "p_bool", False)),
(b"RotationOffset", ((0.0, 0.0, 0.0), "p_vector_3d", False)),
(b"RotationPivot", ((0.0, 0.0, 0.0), "p_vector_3d", False)),
(b"ScalingOffset", ((0.0, 0.0, 0.0), "p_vector_3d", False)),
(b"ScalingPivot", ((0.0, 0.0, 0.0), "p_vector_3d", False)),
(b"TranslationActive", (False, "p_bool", False)),
(b"TranslationMin", ((0.0, 0.0, 0.0), "p_vector_3d", False)),
(b"TranslationMax", ((0.0, 0.0, 0.0), "p_vector_3d", False)),
(b"TranslationMinX", (False, "p_bool", False)),
(b"TranslationMinY", (False, "p_bool", False)),
(b"TranslationMinZ", (False, "p_bool", False)),
(b"TranslationMaxX", (False, "p_bool", False)),
(b"TranslationMaxY", (False, "p_bool", False)),
(b"TranslationMaxZ", (False, "p_bool", False)),
(b"RotationOrder", (0, "p_enum", False)), # we always use 'XYZ' order.
(b"RotationSpaceForLimitOnly", (False, "p_bool", False)),
(b"RotationStiffnessX", (0.0, "p_number", False)),
(b"RotationStiffnessY", (0.0, "p_number", False)),
(b"RotationStiffnessZ", (0.0, "p_number", False)),
(b"AxisLen", (10.0, "p_number", False)),
(b"PreRotation", ((0.0, 0.0, 0.0), "p_vector_3d", False)),
(b"PostRotation", ((0.0, 0.0, 0.0), "p_vector_3d", False)),
(b"RotationActive", (False, "p_bool", False)),
(b"RotationMin", ((0.0, 0.0, 0.0), "p_vector_3d", False)),
(b"RotationMax", ((0.0, 0.0, 0.0), "p_vector_3d", False)),
(b"RotationMinX", (False, "p_bool", False)),
(b"RotationMinY", (False, "p_bool", False)),
(b"RotationMinZ", (False, "p_bool", False)),
(b"RotationMaxX", (False, "p_bool", False)),
(b"RotationMaxY", (False, "p_bool", False)),
(b"RotationMaxZ", (False, "p_bool", False)),
(b"InheritType", (1, "p_enum", False)), # RSrs
(b"ScalingActive", (False, "p_bool", False)),
(b"ScalingMin", (Vector((1.0, 1.0, 1.0)) * gscale, "p_vector_3d", False)),
(b"ScalingMax", (Vector((1.0, 1.0, 1.0)) * gscale, "p_vector_3d", False)),
(b"ScalingMinX", (False, "p_bool", False)),
(b"ScalingMinY", (False, "p_bool", False)),
(b"ScalingMinZ", (False, "p_bool", False)),
(b"ScalingMaxX", (False, "p_bool", False)),
(b"ScalingMaxY", (False, "p_bool", False)),
(b"ScalingMaxZ", (False, "p_bool", False)),
(b"GeometricTranslation", ((0.0, 0.0, 0.0), "p_vector_3d", False)),
(b"GeometricRotation", ((0.0, 0.0, 0.0), "p_vector_3d", False)),
(b"GeometricScaling", (Vector((1.0, 1.0, 1.0)) * gscale, "p_vector_3d", False)),
(b"MinDampRangeX", (0.0, "p_number", False)),
(b"MinDampRangeY", (0.0, "p_number", False)),
(b"MinDampRangeZ", (0.0, "p_number", False)),
(b"MaxDampRangeX", (0.0, "p_number", False)),
(b"MaxDampRangeY", (0.0, "p_number", False)),
(b"MaxDampRangeZ", (0.0, "p_number", False)),
(b"MinDampStrengthX", (0.0, "p_number", False)),
(b"MinDampStrengthY", (0.0, "p_number", False)),
(b"MinDampStrengthZ", (0.0, "p_number", False)),
(b"MaxDampStrengthX", (0.0, "p_number", False)),
(b"MaxDampStrengthY", (0.0, "p_number", False)),
(b"MaxDampStrengthZ", (0.0, "p_number", False)),
(b"PreferedAngleX", (0.0, "p_number", False)),
(b"PreferedAngleY", (0.0, "p_number", False)),
(b"PreferedAngleZ", (0.0, "p_number", False)),
(b"LookAtProperty", (None, "p_object", False)),
(b"UpVectorProperty", (None, "p_object", False)),
(b"Show", (True, "p_bool", False)),
(b"NegativePercentShapeSupport", (True, "p_bool", False)),
(b"DefaultAttributeIndex", (0, "p_integer", False)),
(b"Freeze", (False, "p_bool", False)),
(b"LODBox", (False, "p_bool", False)),
(b"Lcl Translation", ((0.0, 0.0, 0.0), "p_lcl_translation", True)),
(b"Lcl Rotation", ((0.0, 0.0, 0.0), "p_lcl_rotation", True)),
(b"Lcl Scaling", (Vector((1.0, 1.0, 1.0)) * gscale, "p_lcl_scaling", True)),
(b"Visibility", (1.0, "p_visibility", True)),
))
if override_defaults is not None:
props.update(override_defaults)
return FBXTemplate(b"Model", b"FbxNode", props, nbr_users)
def fbx_template_def_light(scene, settings, override_defaults=None, nbr_users=0):
gscale = settings.global_scale
props = OrderedDict((
(b"LightType", (0, "p_enum", False)), # Point light.
(b"CastLight", (True, "p_bool", False)),
(b"Color", ((1.0, 1.0, 1.0), "p_color_rgb", True)),
(b"Intensity", (100.0, "p_number", True)), # Times 100 compared to Blender values...
(b"DecayType", (2, "p_enum", False)), # Quadratic.
(b"DecayStart", (30.0 * gscale, "p_number", False)),
(b"CastShadows", (True, "p_bool", False)),
(b"ShadowColor", ((0.0, 0.0, 0.0), "p_color_rgb", True)),
(b"AreaLightShape", (0, "p_enum", False)), # Rectangle.
))
if override_defaults is not None:
props.update(override_defaults)
return FBXTemplate(b"NodeAttribute", b"FbxLight", props, nbr_users)
def fbx_template_def_camera(scene, settings, override_defaults=None, nbr_users=0):
props = OrderedDict() # TODO!!!
if override_defaults is not None:
props.update(override_defaults)
return FBXTemplate(b"NodeAttribute", b"FbxCamera", props, nbr_users)
def fbx_template_def_bone(scene, settings, override_defaults=None, nbr_users=0):
if override_defaults is not None:
props.update(override_defaults)
return FBXTemplate(b"NodeAttribute", b"LimbNode", props, nbr_users)
def fbx_template_def_geometry(scene, settings, override_defaults=None, nbr_users=0):
props = OrderedDict((
(b"Color", ((0.8, 0.8, 0.8), "p_color_rgb", False)),
(b"BBoxMin", ((0.0, 0.0, 0.0), "p_vector_3d", False)),
(b"BBoxMax", ((0.0, 0.0, 0.0), "p_vector_3d", False)),
(b"Primary Visibility", (True, "p_bool", False)),
(b"Casts Shadows", (True, "p_bool", False)),
(b"Receive Shadows", (True, "p_bool", False)),
))
if override_defaults is not None:
props.update(override_defaults)
return FBXTemplate(b"Geometry", b"FbxMesh", props, nbr_users)
def fbx_template_def_material(scene, settings, override_defaults=None, nbr_users=0):
# WIP...
props = OrderedDict((
(b"ShadingModel", ("phong", "p_string", False)),
(b"MultiLayer", (False, "p_bool", False)),
(b"EmissiveColor", ((0.8, 0.8, 0.8), "p_color_rgb", True)), # Same as diffuse.
(b"EmissiveFactor", (0.0, "p_number", True)),
(b"AmbientColor", ((0.0, 0.0, 0.0), "p_color_rgb", True)),
(b"AmbientFactor", (1.0, "p_number", True)),
(b"DiffuseColor", ((0.8, 0.8, 0.8), "p_color_rgb", True)),
(b"DiffuseFactor", (0.8, "p_number", True)),
(b"TransparentColor", ((0.8, 0.8, 0.8), "p_color_rgb", True)), # Same as diffuse.
(b"TransparencyFactor", (0.0, "p_number", True)),
(b"Opacity", (1.0, "p_number", True)),
(b"NormalMap", ((0.0, 0.0, 0.0), "p_vector_3d", False)),
(b"Bump", ((0.0, 0.0, 0.0), "p_vector_3d", False)),
(b"BumpFactor", (1.0, "p_number", False)),
(b"DisplacementColor", ((0.0, 0.0, 0.0), "p_color_rgb", False)),
(b"DisplacementFactor", (0.0, "p_number", False)),
(b"SpecularColor", ((1.0, 1.0, 1.0), "p_color_rgb", True)),
(b"SpecularFactor", (0.5 / 2.0, "p_number", True)),
# Not sure about the name, importer uses this (but ShininessExponent for tex prop name!)
# And in fbx exported by sdk, you have one in template, the other in actual material!!! :/
# For now, using both.
(b"Shininess", ((50.0 - 1.0) / 5.10, "p_number", True)),
(b"ShininessExponent", ((50.0 - 1.0) / 5.10, "p_number", True)),
(b"ReflectionColor", ((1.0, 1.0, 1.0), "p_color_rgb", True)),
(b"ReflectionFactor", (0.0, "p_number", True)),
))
if override_defaults is not None:
props.update(override_defaults)
return FBXTemplate(b"Material", b"FbxSurfacePhong", props, nbr_users)
def fbx_template_def_texture_file(scene, settings, override_defaults=None, nbr_users=0):
# WIP...
# XXX Not sure about all names!
props = OrderedDict((
(b"TextureTypeUse", (0, "p_enum", False)), # Standard.
(b"AlphaSource", (2, "p_enum", False)), # Black (i.e. texture's alpha), XXX name guessed!.
(b"Texture alpha", (1.0, "p_number", False)),
(b"PremultiplyAlpha", (False, "p_bool", False)),
(b"CurrentTextureBlendMode", (0, "p_enum", False)), # Translucent, assuming this means "Alpha over"!
(b"CurrentMappingType", (1, "p_enum", False)), # Planar.
(b"WrapModeU", (0, "p_enum", False)), # Repeat.
(b"WrapModeV", (0, "p_enum", False)), # Repeat.
(b"UVSwap", (False, "p_bool", False)),
(b"Translation", ((0.0, 0.0, 0.0), "p_vector_3d", False)),
(b"Rotation", ((0.0, 0.0, 0.0), "p_vector_3d", False)),
(b"Scaling", ((1.0, 1.0, 1.0), "p_vector_3d", False)),
(b"TextureRotationPivot", ((0.0, 0.0, 0.0), "p_vector_3d", False)),
(b"TextureScalingPivot", ((0.0, 0.0, 0.0), "p_vector_3d", False)),
# Not sure about those two... At least, UseMaterial should always be ON imho.
(b"UseMaterial", (True, "p_bool", False)),
(b"UseMipMap", (False, "p_bool", False)),
))
if override_defaults is not None:
props.update(override_defaults)
return FBXTemplate(b"Texture", b"FbxFileTexture", props, nbr_users)
def fbx_template_def_video(scene, settings, override_defaults=None, nbr_users=0):
# WIP...
(b"Width", (0, "p_integer", False)),
(b"Height", (0, "p_integer", False)),
(b"Path", ("", "p_string_url", False)),
(b"AccessMode", (0, "p_enum", False)), # Disk (0=Disk, 1=Mem, 2=DiskAsync).
(b"StartFrame", (0, "p_integer", False)),
(b"StopFrame", (0, "p_integer", False)),
(b"Offset", (0, "p_timestamp", False)),
(b"PlaySpeed", (1.0, "p_number", False)),
(b"FreeRunning", (False, "p_bool", False)),
(b"Loop", (False, "p_bool", False)),
(b"InterlaceMode", (0, "p_enum", False)), # None, i.e. progressive.
(b"ImageSequence", (False, "p_bool", False)),
(b"ImageSequenceOffset", (0, "p_integer", False)),
(b"FrameRate", (scene.render.fps / scene.render.fps_base, "p_number", False)),
(b"LastFrame", (0, "p_integer", False)),
))
if override_defaults is not None:
props.update(override_defaults)
return FBXTemplate(b"Video", b"FbxVideo", props, nbr_users)
def fbx_template_def_pose(scene, settings, override_defaults=None, nbr_users=0):
if override_defaults is not None:
props.update(override_defaults)
return FBXTemplate(b"Pose", b"", props, nbr_users)
def fbx_template_def_deformer(scene, settings, override_defaults=None, nbr_users=0):
if override_defaults is not None:
props.update(override_defaults)
return FBXTemplate(b"Deformer", b"", props, nbr_users)
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
def fbx_template_def_animstack(scene, settings, override_defaults=None, nbr_users=0):
props = OrderedDict((
(b"Description", ("", "p_string", False)),
(b"LocalStart", (0, "p_timestamp", False)),
(b"LocalStop", (0, "p_timestamp", False)),
(b"ReferenceStart", (0, "p_timestamp", False)),
(b"ReferenceStop", (0, "p_timestamp", False)),
))
if override_defaults is not None:
props.update(override_defaults)
return FBXTemplate(b"AnimationStack", b"FbxAnimStack", props, nbr_users)
def fbx_template_def_animlayer(scene, settings, override_defaults=None, nbr_users=0):
props = OrderedDict((
(b"Weight", (100.0, "p_number", True)),
(b"Mute", (False, "p_bool", False)),
(b"Solo", (False, "p_bool", False)),
(b"Lock", (False, "p_bool", False)),
(b"Color", ((0.8, 0.8, 0.8), "p_color_rgb", True)),
(b"BlendMode", (0, "p_enum", False)),
(b"RotationAccumulationMode", (0, "p_enum", False)),
(b"ScaleAccumulationMode", (0, "p_enum", False)),
(b"BlendModeBypass", (0, "p_ulonglong", False)),
))
if override_defaults is not None:
props.update(override_defaults)
return FBXTemplate(b"AnimationLayer", b"FbxAnimLayer", props, nbr_users)
def fbx_template_def_animcurvenode(scene, settings, override_defaults=None, nbr_users=0):
props = OrderedDict((
(b"d", (None, "p_compound", False)),
))
if override_defaults is not None:
props.update(override_defaults)
return FBXTemplate(b"AnimationCurveNode", b"FbxAnimCurveNode", props, nbr_users)
def fbx_template_def_animcurve(scene, settings, override_defaults=None, nbr_users=0):
props = OrderedDict()
if override_defaults is not None:
props.update(override_defaults)
return FBXTemplate(b"AnimationCurve", b"", props, nbr_users)
##### FBX objects generators. #####
def has_valid_parent(scene_data, obj):
return obj.parent and obj.parent in scene_data.objects
def use_bake_space_transform(scene_data, obj):
# NOTE: Only applies to object types supporting this!!! Currently, only meshes...
# Also, do not apply it to children objects.
# TODO: Check whether this can work for bones too...
return (scene_data.settings.bake_space_transform and not isinstance(obj, Bone) and
obj.type in {'MESH'} and not has_valid_parent(scene_data, obj))
def fbx_object_matrix(scene_data, obj, armature=None, local_space=False, global_space=False):
Generate object transform matrix (*always* in matching *FBX* space!).
If local_space is True, returned matrix is *always* in local space.
Else:
If global_space is True, returned matrix is always in world space.
If global_space is False, returned matrix is in parent space if parent is valid, else in world space.
Note local_space has precedence over global_space.
If obj is a bone, and global_space is True, armature must be provided (it's the bone's armature object!).
Applies specific rotation to bones, lamps and cameras (conversion Blender -> FBX).
"""
is_bone = isinstance(obj, Bone)
# Objects which are not bones and do not have any parent are *always* in global space (unless local_space is True!).
is_global = not local_space and (global_space or not (is_bone or has_valid_parent(scene_data, obj)))
#assert((is_bone and is_global and armature is None) == False,
#"You must provide an armature object to get bones transform matrix in global space!")
matrix = obj.matrix_local
# Lamps, cameras and bones need to be rotated (in local space!).
if is_bone:
matrix = matrix * MAT_CONVERT_BONE
elif obj.type == 'LAMP':
matrix = matrix * MAT_CONVERT_LAMP
elif obj.type == 'CAMERA':
matrix = matrix * MAT_CONVERT_CAMERA
# Up till here, our matrix is in local space, time to bring it in its final desired space.
if is_bone:
# Bones are in armature (object) space currently, either bring them to global space or real
# local space (relative to parent bone).
if is_global:
matrix = armature.matrix_world * matrix
elif obj.parent: # Parent bone, get matrix relative to it.
par_matrix = obj.parent.matrix_local * MAT_CONVERT_BONE
matrix = par_matrix.inverted() * matrix
elif obj.parent:
if is_global:
# Move matrix to global Blender space.
matrix = obj.parent.matrix_world * matrix
elif use_bake_space_transform(scene_data, obj.parent):
# Blender's and FBX's local space of parent may differ if we use bake_space_transform...
# Apply parent's *Blender* local space...
matrix = obj.parent.matrix_local * matrix
# ...and move it back into parent's *FBX* local space.
par_mat = fbx_object_matrix(scene_data, obj.parent, local_space=True)
matrix = par_mat.inverted() * matrix
if use_bake_space_transform(scene_data, obj):
# If we bake the transforms we need to post-multiply inverse global transform.
# This means that the global transform will not apply to children of this transform.
matrix = matrix * scene_data.settings.global_matrix_inv
if is_global:
# In any case, pre-multiply the global matrix to get it in FBX global space!
matrix = scene_data.settings.global_matrix * matrix
return matrix
def fbx_object_tx(scene_data, obj):
"""
Generate object transform data (always in local space when possible).
"""
matrix = fbx_object_matrix(scene_data, obj)
loc, rot, scale = matrix.decompose()
matrix_rot = rot.to_matrix()
rot = rot.to_euler() # quat -> euler, we always use 'XYZ' order.
return loc, rot, scale, matrix, matrix_rot
def fbx_name_class(name, cls):
return FBX_NAME_CLASS_SEP.join((name, cls))
def fbx_data_element_custom_properties(props, bid):
"""
Store custom properties of blender ID bid (any mapping-like object, in fact) into FBX properties props.
"""
for k, v in bid.items():
if isinstance(v, str):
elem_props_set(props, "p_string", k.encode(), v, custom=True)
elif isinstance(v, int):
elem_props_set(props, "p_integer", k.encode(), v, custom=True)
if isinstance(v, float):
elem_props_set(props, "p_number", k.encode(), v, custom=True)
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
def fbx_data_lamp_elements(root, lamp, scene_data):
"""
Write the Lamp data block.
"""
gscale = scene_data.settings.global_scale
lamp_key = scene_data.data_lamps[lamp]
do_light = True
decay_type = FBX_LIGHT_DECAY_TYPES['CONSTANT']
do_shadow = False
shadow_color = Vector((0.0, 0.0, 0.0))
if lamp.type not in {'HEMI'}:
if lamp.type not in {'SUN'}:
decay_type = FBX_LIGHT_DECAY_TYPES[lamp.falloff_type]
do_light = (not lamp.use_only_shadow) and (lamp.use_specular or lamp.use_diffuse)
do_shadow = lamp.shadow_method not in {'NOSHADOW'}
shadow_color = lamp.shadow_color
light = elem_data_single_int64(root, b"NodeAttribute", get_fbxuid_from_key(lamp_key))
light.add_string(fbx_name_class(lamp.name.encode(), b"NodeAttribute"))
light.add_string(b"Light")
elem_data_single_int32(light, b"GeometryVersion", FBX_GEOMETRY_VERSION) # Sic...
tmpl = scene_data.templates[b"Light"]
props = elem_properties(light)
elem_props_template_set(tmpl, props, "p_enum", b"LightType", FBX_LIGHT_TYPES[lamp.type])
elem_props_template_set(tmpl, props, "p_bool", b"CastLight", do_light)
elem_props_template_set(tmpl, props, "p_color_rgb", b"Color", lamp.color)
elem_props_template_set(tmpl, props, "p_number", b"Intensity", lamp.energy * 100.0)
elem_props_template_set(tmpl, props, "p_enum", b"DecayType", decay_type)
elem_props_template_set(tmpl, props, "p_number", b"DecayStart", lamp.distance * gscale)
elem_props_template_set(tmpl, props, "p_bool", b"CastShadows", do_shadow)
elem_props_template_set(tmpl, props, "p_color_rgb", b"ShadowColor", shadow_color)
if lamp.type in {'SPOT'}:
elem_props_template_set(tmpl, props, "p_number", b"OuterAngle", math.degrees(lamp.spot_size))
elem_props_template_set(tmpl, props, "p_number", b"InnerAngle",
math.degrees(lamp.spot_size * (1.0 - lamp.spot_blend)))
# Custom properties.
if scene_data.settings.use_custom_properties:
fbx_data_element_custom_properties(props, lamp)
def fbx_data_camera_elements(root, cam_obj, scene_data):
"""
Write the Camera data blocks.
"""
gscale = scene_data.settings.global_scale
cam_data = cam_obj.data
cam_key = scene_data.data_cameras[cam_obj]
# Real data now, good old camera!
# Object transform info.
loc, rot, scale, matrix, matrix_rot = fbx_object_tx(scene_data, cam_obj)
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
up = matrix_rot * Vector((0.0, 1.0, 0.0))
to = matrix_rot * Vector((0.0, 0.0, -1.0))
# Render settings.
# TODO We could export much more...
render = scene_data.scene.render
width = render.resolution_x
height = render.resolution_y
aspect = width / height
# Film width & height from mm to inches
filmwidth = units_convert(cam_data.sensor_width, "millimeter", "inch")
filmheight = units_convert(cam_data.sensor_height, "millimeter", "inch")
filmaspect = filmwidth / filmheight
# Film offset
offsetx = filmwidth * cam_data.shift_x
offsety = filmaspect * filmheight * cam_data.shift_y
cam = elem_data_single_int64(root, b"NodeAttribute", get_fbxuid_from_key(cam_key))
cam.add_string(fbx_name_class(cam_data.name.encode(), b"NodeAttribute"))
cam.add_string(b"Camera")
tmpl = scene_data.templates[b"Camera"]
props = elem_properties(cam)
elem_props_template_set(tmpl, props, "p_vector_3d", b"Position", loc)
elem_props_template_set(tmpl, props, "p_vector_3d", b"UpVector", up)
elem_props_template_set(tmpl, props, "p_vector_3d", b"InterestPosition", to)
# Should we use world value?
elem_props_template_set(tmpl, props, "p_color_rgb", b"BackgroundColor", (0.0, 0.0, 0.0))
elem_props_template_set(tmpl, props, "p_bool", b"DisplayTurnTableIcon", True)
elem_props_template_set(tmpl, props, "p_number", b"FilmWidth", filmwidth)
elem_props_template_set(tmpl, props, "p_number", b"FilmHeight", filmheight)
elem_props_template_set(tmpl, props, "p_number", b"FilmAspectRatio", filmaspect)
elem_props_template_set(tmpl, props, "p_number", b"FilmOffsetX", offsetx)
elem_props_template_set(tmpl, props, "p_number", b"FilmOffsetY", offsety)
elem_props_template_set(tmpl, props, "p_enum", b"ApertureMode", 3) # FocalLength.
elem_props_template_set(tmpl, props, "p_enum", b"GateFit", 2) # FitHorizontal.
elem_props_template_set(tmpl, props, "p_fov", b"FieldOfView", math.degrees(cam_data.angle_x))
elem_props_template_set(tmpl, props, "p_fov_x", b"FieldOfViewX", math.degrees(cam_data.angle_x))
elem_props_template_set(tmpl, props, "p_fov_y", b"FieldOfViewY", math.degrees(cam_data.angle_y))
# No need to convert to inches here...
elem_props_template_set(tmpl, props, "p_number", b"FocalLength", cam_data.lens)
elem_props_template_set(tmpl, props, "p_number", b"SafeAreaAspectRatio", aspect)
elem_props_template_set(tmpl, props, "p_number", b"NearPlane", cam_data.clip_start * gscale)
elem_props_template_set(tmpl, props, "p_number", b"FarPlane", cam_data.clip_end * gscale)
elem_props_template_set(tmpl, props, "p_enum", b"BackPlaneDistanceMode", 1) # RelativeToCamera.
elem_props_template_set(tmpl, props, "p_number", b"BackPlaneDistance", cam_data.clip_end * gscale)
# Custom properties.
if scene_data.settings.use_custom_properties:
fbx_data_element_custom_properties(props, cam_data)
elem_data_single_string(cam, b"TypeFlags", b"Camera")
elem_data_single_int32(cam, b"GeometryVersion", 124) # Sic...
elem_data_vec_float64(cam, b"Position", loc)
elem_data_vec_float64(cam, b"Up", up)
elem_data_vec_float64(cam, b"LookAt", to)
elem_data_single_int32(cam, b"ShowInfoOnMoving", 1)
elem_data_single_int32(cam, b"ShowAudio", 0)
elem_data_vec_float64(cam, b"AudioColor", (0.0, 1.0, 0.0))
elem_data_single_float64(cam, b"CameraOrthoZoom", 1.0)
def fbx_data_mesh_elements(root, me, scene_data):
"""
Write the Mesh (Geometry) data block.
"""
Bastien Montagne
committed
# Ugly helper... :/
def _infinite_gen(val):
while 1:
yield val
me_key, me_obj = scene_data.data_meshes[me]
# No gscale/gmat here, all data are supposed to be in object space.
smooth_type = scene_data.settings.mesh_smooth_type
do_bake_space_transform = use_bake_space_transform(scene_data, me_obj)
# Vertices are in object space, but we are post-multiplying all transforms with the inverse of the
# global matrix, so we need to apply the global matrix to the vertices to get the correct result.
geom_mat_co = scene_data.settings.global_matrix if do_bake_space_transform else None
# We need to apply the inverse transpose of the global matrix when transforming normals.
Bastien Montagne
committed
geom_mat_no = Matrix(scene_data.settings.global_matrix_inv_transposed) if do_bake_space_transform else None
if geom_mat_no is not None:
# Remove translation & scaling!
geom_mat_no.translation = Vector()
geom_mat_no.normalize()
geom = elem_data_single_int64(root, b"Geometry", get_fbxuid_from_key(me_key))
geom.add_string(fbx_name_class(me.name.encode(), b"Geometry"))
geom.add_string(b"Mesh")
tmpl = scene_data.templates[b"Geometry"]
props = elem_properties(geom)
# Custom properties.
if scene_data.settings.use_custom_properties:
fbx_data_element_custom_properties(props, me)
elem_data_single_int32(geom, b"GeometryVersion", FBX_GEOMETRY_VERSION)
# Vertex cos.
t_co = array.array(data_types.ARRAY_FLOAT64, (0.0,)) * len(me.vertices) * 3
me.vertices.foreach_get("co", t_co)
if geom_mat_co is not None:
def _vcos_transformed_gen(raw_cos, m=None):
# Note: we could most likely get much better performances with numpy, but will leave this as TODO for now.
return chain(*(m * Vector(v) for v in zip(*(iter(raw_cos),) * 3)))
t_co = _vcos_transformed_gen(t_co, geom_mat_co)
elem_data_single_float64_array(geom, b"Vertices", t_co)
del t_co
# Polygon indices.
#
# We do loose edges as two-vertices faces, if enabled...
#
# Note we have to process Edges in the same time, as they are based on poly's loops...
loop_nbr = len(me.loops)
t_pvi = array.array(data_types.ARRAY_INT32, (0,)) * loop_nbr
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
t_ls = [None] * len(me.polygons)
me.loops.foreach_get("vertex_index", t_pvi)
me.polygons.foreach_get("loop_start", t_ls)
# Add "fake" faces for loose edges.
if scene_data.settings.use_mesh_edges:
t_le = tuple(e.vertices for e in me.edges if e.is_loose)
t_pvi.extend(chain(*t_le))
t_ls.extend(range(loop_nbr, loop_nbr + len(t_le), 2))
del t_le
# Edges...
# Note: Edges are represented as a loop here: each edge uses a single index, which refers to the polygon array.
# The edge is made by the vertex indexed py this polygon's point and the next one on the same polygon.
# Advantage: Only one index per edge.
# Drawback: Only polygon's edges can be represented (that's why we have to add fake two-verts polygons
# for loose edges).
# We also have to store a mapping from real edges to their indices in this array, for edge-mapped data
# (like e.g. crease).
t_eli = array.array(data_types.ARRAY_INT32)
edges_map = {}
edges_nbr = 0
if t_ls and t_pvi:
t_ls = set(t_ls)
todo_edges = [None] * len(me.edges) * 2
me.edges.foreach_get("vertices", todo_edges)
todo_edges = set((v1, v2) if v1 < v2 else (v2, v1) for v1, v2 in zip(*(iter(todo_edges),) * 2))
li = 0
vi = vi_start = t_pvi[0]
for li_next, vi_next in enumerate(t_pvi[1:] + t_pvi[:1], start=1):
if li_next in t_ls: # End of a poly's loop.
vi2 = vi_start
vi_start = vi_next
else:
vi2 = vi_next
e_key = (vi, vi2) if vi < vi2 else (vi2, vi)
if e_key in todo_edges:
t_eli.append(li)
todo_edges.remove(e_key)
edges_map[e_key] = edges_nbr
edges_nbr += 1
vi = vi_next
li = li_next
# End of edges!
# We have to ^-1 last index of each loop.
for ls in t_ls:
t_pvi[ls - 1] ^= -1
# And finally we can write data!
elem_data_single_int32_array(geom, b"PolygonVertexIndex", t_pvi)
elem_data_single_int32_array(geom, b"Edges", t_eli)
del t_pvi
del t_ls
del t_eli
# And now, layers!
# Smoothing.
if smooth_type in {'FACE', 'EDGE'}:
t_ps = None
_map = b""
if smooth_type == 'FACE':
t_ps = array.array(data_types.ARRAY_INT32, (0,)) * len(me.polygons)
me.polygons.foreach_get("use_smooth", t_ps)
_map = b"ByPolygon"
else: # EDGE
# Write Edge Smoothing.
t_ps = array.array(data_types.ARRAY_INT32, (0,)) * edges_nbr
for e in me.edges:
if e.key not in edges_map:
continue # Only loose edges, in theory!
t_ps[edges_map[e.key]] = not e.use_edge_sharp
_map = b"ByEdge"
lay_smooth = elem_data_single_int32(geom, b"LayerElementSmoothing", 0)
elem_data_single_int32(lay_smooth, b"Version", FBX_GEOMETRY_SMOOTHING_VERSION)
elem_data_single_string(lay_smooth, b"Name", b"")
elem_data_single_string(lay_smooth, b"MappingInformationType", _map)
elem_data_single_string(lay_smooth, b"ReferenceInformationType", b"Direct")
elem_data_single_int32_array(lay_smooth, b"Smoothing", t_ps) # Sight, int32 for bool...
del t_ps
# TODO: Edge crease (LayerElementCrease).
# And we are done with edges!
del edges_map
# Loop normals.
# NOTE: this is not supported by importer currently.
# XXX Official docs says normals should use IndexToDirect,
# but this does not seem well supported by apps currently...
me.calc_normals_split()
def _nortuples_gen(raw_nors, m):
Bastien Montagne
committed
# Great, now normals are also expected 4D!
gen = zip(*(iter(raw_nors),) * 3 + (_infinite_gen(1.0),))
return gen if m is None else (m * Vector(v) for v in gen)
t_ln = array.array(data_types.ARRAY_FLOAT64, (0.0,)) * len(me.loops) * 3
me.loops.foreach_get("normal", t_ln)
t_ln = _nortuples_gen(t_ln, geom_mat_no)
t_ln = tuple(t_ln) # No choice... :/
lay_nor = elem_data_single_int32(geom, b"LayerElementNormal", 0)
elem_data_single_int32(lay_nor, b"Version", FBX_GEOMETRY_NORMAL_VERSION)
elem_data_single_string(lay_nor, b"Name", b"")
elem_data_single_string(lay_nor, b"MappingInformationType", b"ByPolygonVertex")
elem_data_single_string(lay_nor, b"ReferenceInformationType", b"IndexToDirect")
ln2idx = tuple(set(t_ln))
elem_data_single_float64_array(lay_nor, b"Normals", chain(*ln2idx))
# Normal weights, no idea what it is.
t_lnw = array.array(data_types.ARRAY_FLOAT64, (0.0,)) * len(ln2idx)
elem_data_single_float64_array(lay_nor, b"NormalsW", t_lnw)
ln2idx = {nor: idx for idx, nor in enumerate(ln2idx)}
elem_data_single_int32_array(lay_nor, b"NormalsIndex", (ln2idx[n] for n in t_ln))
else:
lay_nor = elem_data_single_int32(geom, b"LayerElementNormal", 0)
elem_data_single_int32(lay_nor, b"Version", FBX_GEOMETRY_NORMAL_VERSION)
elem_data_single_string(lay_nor, b"Name", b"")
elem_data_single_string(lay_nor, b"MappingInformationType", b"ByPolygonVertex")
elem_data_single_string(lay_nor, b"ReferenceInformationType", b"Direct")
elem_data_single_float64_array(lay_nor, b"Normals", chain(*t_ln))
# Normal weights, no idea what it is.
t_ln = array.array(data_types.ARRAY_FLOAT64, (0.0,)) * len(me.loops)
elem_data_single_float64_array(lay_nor, b"NormalsW", t_ln)
del t_ln
# tspace
tspacenumber = 0
if scene_data.settings.use_tspace:
tspacenumber = len(me.uv_layers)
if tspacenumber:
t_ln = array.array(data_types.ARRAY_FLOAT64, (0.0,)) * len(me.loops) * 3
t_lnw = array.array(data_types.ARRAY_FLOAT64, (0.0,)) * len(me.loops)
for idx, uvlayer in enumerate(me.uv_layers):
name = uvlayer.name
me.calc_tangents(name)
# Loop bitangents (aka binormals).
# NOTE: this is not supported by importer currently.
me.loops.foreach_get("bitangent", t_ln)
lay_nor = elem_data_single_int32(geom, b"LayerElementBinormal", idx)
elem_data_single_int32(lay_nor, b"Version", FBX_GEOMETRY_BINORMAL_VERSION)
elem_data_single_string_unicode(lay_nor, b"Name", name)
elem_data_single_string(lay_nor, b"MappingInformationType", b"ByPolygonVertex")
elem_data_single_string(lay_nor, b"ReferenceInformationType", b"Direct")
elem_data_single_float64_array(lay_nor, b"Binormals", chain(*_nortuples_gen(t_ln, geom_mat_no)))
# Binormal weights, no idea what it is.
elem_data_single_float64_array(lay_nor, b"BinormalsW", t_lnw)
# Loop tangents.
# NOTE: this is not supported by importer currently.
me.loops.foreach_get("tangent", t_ln)
lay_nor = elem_data_single_int32(geom, b"LayerElementTangent", idx)
elem_data_single_int32(lay_nor, b"Version", FBX_GEOMETRY_TANGENT_VERSION)
elem_data_single_string_unicode(lay_nor, b"Name", name)
elem_data_single_string(lay_nor, b"MappingInformationType", b"ByPolygonVertex")
elem_data_single_string(lay_nor, b"ReferenceInformationType", b"Direct")
elem_data_single_float64_array(lay_nor, b"Binormals", chain(*_nortuples_gen(t_ln, geom_mat_no)))
# Tangent weights, no idea what it is.
elem_data_single_float64_array(lay_nor, b"TangentsW", t_lnw)
me.free_tangents()
me.free_normals_split()
Bastien Montagne
committed
del _nortuples_gen
# Write VertexColor Layers
# note, no programs seem to use this info :/
vcolnumber = len(me.vertex_colors)
if vcolnumber:
def _coltuples_gen(raw_cols):
return zip(*(iter(raw_cols),) * 3 + (_infinite_gen(1.0),)) # We need a fake alpha...
t_lc = array.array(data_types.ARRAY_FLOAT64, (0.0,)) * len(me.loops) * 3
for colindex, collayer in enumerate(me.vertex_colors):
collayer.data.foreach_get("color", t_lc)
lay_vcol = elem_data_single_int32(geom, b"LayerElementColor", colindex)
elem_data_single_int32(lay_vcol, b"Version", FBX_GEOMETRY_VCOLOR_VERSION)
elem_data_single_string_unicode(lay_vcol, b"Name", collayer.name)
elem_data_single_string(lay_vcol, b"MappingInformationType", b"ByPolygonVertex")
elem_data_single_string(lay_vcol, b"ReferenceInformationType", b"IndexToDirect")
col2idx = tuple(set(_coltuples_gen(t_lc)))
elem_data_single_float64_array(lay_vcol, b"Colors", chain(*col2idx)) # Flatten again...
col2idx = {col: idx for idx, col in enumerate(col2idx)}
elem_data_single_int32_array(lay_vcol, b"ColorIndex", (col2idx[c] for c in _coltuples_gen(t_lc)))
del col2idx
del t_lc
del _coltuples_gen
# Write UV layers.
# Note: LayerElementTexture is deprecated since FBX 2011 - luckily!
# Textures are now only related to materials, in FBX!
uvnumber = len(me.uv_layers)
if uvnumber:
def _uvtuples_gen(raw_uvs):
return zip(*(iter(raw_uvs),) * 2)
t_luv = array.array(data_types.ARRAY_FLOAT64, (0.0,)) * len(me.loops) * 2
for uvindex, uvlayer in enumerate(me.uv_layers):
uvlayer.data.foreach_get("uv", t_luv)
lay_uv = elem_data_single_int32(geom, b"LayerElementUV", uvindex)
elem_data_single_int32(lay_uv, b"Version", FBX_GEOMETRY_UV_VERSION)
elem_data_single_string_unicode(lay_uv, b"Name", uvlayer.name)
elem_data_single_string(lay_uv, b"MappingInformationType", b"ByPolygonVertex")
elem_data_single_string(lay_uv, b"ReferenceInformationType", b"IndexToDirect")
uv2idx = tuple(set(_uvtuples_gen(t_luv)))
elem_data_single_float64_array(lay_uv, b"UV", chain(*uv2idx)) # Flatten again...
uv2idx = {uv: idx for idx, uv in enumerate(uv2idx)}
elem_data_single_int32_array(lay_uv, b"UVIndex", (uv2idx[uv] for uv in _uvtuples_gen(t_luv)))
del uv2idx
del t_luv
del _uvtuples_gen
# Face's materials.
me_fbxmats_idx = None
if me in scene_data.mesh_mat_indices:
me_fbxmats_idx = scene_data.mesh_mat_indices[me]
me_blmats = me.materials
if me_fbxmats_idx and me_blmats:
lay_mat = elem_data_single_int32(geom, b"LayerElementMaterial", 0)
elem_data_single_int32(lay_mat, b"Version", FBX_GEOMETRY_MATERIAL_VERSION)
elem_data_single_string(lay_mat, b"Name", b"")
nbr_mats = len(me_fbxmats_idx)
if nbr_mats > 1:
t_pm = array.array(data_types.ARRAY_INT32, (0,)) * len(me.polygons)
me.polygons.foreach_get("material_index", t_pm)
# We have to validate mat indices, and map them to FBX indices.
blmats_to_fbxmats_idxs = [me_fbxmats_idx[m] for m in me_blmats]
mat_idx_limit = len(blmats_to_fbxmats_idxs)
def_mat = blmats_to_fbxmats_idxs[0]
_gen = (blmats_to_fbxmats_idxs[m] if m < mat_idx_limit else def_mat for m in t_pm)
t_pm = array.array(data_types.ARRAY_INT32, _gen)
elem_data_single_string(lay_mat, b"MappingInformationType", b"ByPolygon")
# XXX Logically, should be "Direct" reference type, since we do not have any index array, and have one
# value per polygon...
# But looks like FBX expects it to be IndexToDirect here (maybe because materials are already
# indices??? *sigh*).
elem_data_single_string(lay_mat, b"ReferenceInformationType", b"IndexToDirect")
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
1325
1326
1327
1328
1329
1330
1331
1332
1333
1334
1335
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345
1346
1347
1348
1349
1350
1351
1352
1353
1354
1355
1356
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
elem_data_single_int32_array(lay_mat, b"Materials", t_pm)
del t_pm
else:
elem_data_single_string(lay_mat, b"MappingInformationType", b"AllSame")
elem_data_single_string(lay_mat, b"ReferenceInformationType", b"IndexToDirect")
elem_data_single_int32_array(lay_mat, b"Materials", [0])
# And the "layer TOC"...
layer = elem_data_single_int32(geom, b"Layer", 0)
elem_data_single_int32(layer, b"Version", FBX_GEOMETRY_LAYER_VERSION)
lay_nor = elem_empty(layer, b"LayerElement")
elem_data_single_string(lay_nor, b"Type", b"LayerElementNormal")
elem_data_single_int32(lay_nor, b"TypedIndex", 0)
if smooth_type in {'FACE', 'EDGE'}:
lay_smooth = elem_empty(layer, b"LayerElement")
elem_data_single_string(lay_smooth, b"Type", b"LayerElementSmoothing")
elem_data_single_int32(lay_smooth, b"TypedIndex", 0)
if vcolnumber:
lay_vcol = elem_empty(layer, b"LayerElement")
elem_data_single_string(lay_vcol, b"Type", b"LayerElementColor")
elem_data_single_int32(lay_vcol, b"TypedIndex", 0)
if uvnumber:
lay_uv = elem_empty(layer, b"LayerElement")
elem_data_single_string(lay_uv, b"Type", b"LayerElementUV")
elem_data_single_int32(lay_uv, b"TypedIndex", 0)
if me_fbxmats_idx is not None:
lay_mat = elem_empty(layer, b"LayerElement")
elem_data_single_string(lay_mat, b"Type", b"LayerElementMaterial")
elem_data_single_int32(lay_mat, b"TypedIndex", 0)
# Add other uv and/or vcol layers...
for vcolidx, uvidx, tspaceidx in zip_longest(range(1, vcolnumber), range(1, uvnumber), range(1, tspacenumber),
fillvalue=0):
layer = elem_data_single_int32(geom, b"Layer", max(vcolidx, uvidx))
elem_data_single_int32(layer, b"Version", FBX_GEOMETRY_LAYER_VERSION)
if vcolidx:
lay_vcol = elem_empty(layer, b"LayerElement")
elem_data_single_string(lay_vcol, b"Type", b"LayerElementColor")
elem_data_single_int32(lay_vcol, b"TypedIndex", vcolidx)
if uvidx:
lay_uv = elem_empty(layer, b"LayerElement")
elem_data_single_string(lay_uv, b"Type", b"LayerElementUV")
elem_data_single_int32(lay_uv, b"TypedIndex", uvidx)
if tspaceidx:
lay_binor = elem_empty(layer, b"LayerElement")
elem_data_single_string(lay_binor, b"Type", b"LayerElementBinormal")
elem_data_single_int32(lay_binor, b"TypedIndex", tspaceidx)
lay_tan = elem_empty(layer, b"LayerElement")
elem_data_single_string(lay_tan, b"Type", b"LayerElementTangent")
elem_data_single_int32(lay_tan, b"TypedIndex", tspaceidx)
def fbx_data_material_elements(root, mat, scene_data):
"""
Write the Material data block.
"""
ambient_color = (0.0, 0.0, 0.0)
if scene_data.data_world:
ambient_color = next(iter(scene_data.data_world.keys())).ambient_color
mat_key, _objs = scene_data.data_materials[mat]
# Approximation...
mat_type = b"phong" if mat.specular_shader in {'COOKTORR', 'PHONG', 'BLINN'} else b"lambert"
fbx_mat = elem_data_single_int64(root, b"Material", get_fbxuid_from_key(mat_key))
fbx_mat.add_string(fbx_name_class(mat.name.encode(), b"Material"))
fbx_mat.add_string(b"")
elem_data_single_int32(fbx_mat, b"Version", FBX_MATERIAL_VERSION)
# those are not yet properties, it seems...
elem_data_single_string(fbx_mat, b"ShadingModel", mat_type)
elem_data_single_int32(fbx_mat, b"MultiLayer", 0) # Should be bool...
tmpl = scene_data.templates[b"Material"]
props = elem_properties(fbx_mat)
elem_props_template_set(tmpl, props, "p_string", b"ShadingModel", mat_type.decode())
elem_props_template_set(tmpl, props, "p_color_rgb", b"EmissiveColor", mat.diffuse_color)
elem_props_template_set(tmpl, props, "p_number", b"EmissiveFactor", mat.emit)
elem_props_template_set(tmpl, props, "p_color_rgb", b"AmbientColor", ambient_color)
elem_props_template_set(tmpl, props, "p_number", b"AmbientFactor", mat.ambient)
elem_props_template_set(tmpl, props, "p_color_rgb", b"DiffuseColor", mat.diffuse_color)
elem_props_template_set(tmpl, props, "p_number", b"DiffuseFactor", mat.diffuse_intensity)
elem_props_template_set(tmpl, props, "p_color_rgb", b"TransparentColor",
mat.diffuse_color if mat.use_transparency else (1.0, 1.0, 1.0))
Bastien Montagne
committed
elem_props_template_set(tmpl, props, "p_number", b"TransparencyFactor",
1.0 - mat.alpha if mat.use_transparency else 0.0)
elem_props_template_set(tmpl, props, "p_number", b"Opacity", mat.alpha if mat.use_transparency else 1.0)
elem_props_template_set(tmpl, props, "p_vector_3d", b"NormalMap", (0.0, 0.0, 0.0))
# Not sure about those...
b"Bump": ((0.0, 0.0, 0.0), "p_vector_3d"),
b"BumpFactor": (1.0, "p_number"),
b"DisplacementColor": ((0.0, 0.0, 0.0), "p_color_rgb"),
b"DisplacementFactor": (0.0, "p_number"),
"""
if mat_type == b"phong":
elem_props_template_set(tmpl, props, "p_color_rgb", b"SpecularColor", mat.specular_color)
elem_props_template_set(tmpl, props, "p_number", b"SpecularFactor", mat.specular_intensity / 2.0)
# See Material template about those two!
elem_props_template_set(tmpl, props, "p_number", b"Shininess", (mat.specular_hardness - 1.0) / 5.10)
elem_props_template_set(tmpl, props, "p_number", b"ShininessExponent", (mat.specular_hardness - 1.0) / 5.10)
elem_props_template_set(tmpl, props, "p_color_rgb", b"ReflectionColor", mat.mirror_color)
elem_props_template_set(tmpl, props, "p_number", b"ReflectionFactor",
mat.raytrace_mirror.reflect_factor if mat.raytrace_mirror.use else 0.0)
# Custom properties.
if scene_data.settings.use_custom_properties:
fbx_data_element_custom_properties(props, mat)
1405
1406
1407
1408
1409
1410
1411
1412
1413
1414
1415
1416
1417
1418
1419
1420
1421
1422
1423
1424
1425
1426
1427
1428
1429
1430
1431
1432
1433
1434
1435
1436
1437
1438
1439
1440
1441
1442
1443
1444
1445
1446
1447
1448
1449
1450
1451
1452
1453
1454
1455
1456
1457
1458
1459
1460
1461
1462
1463
1464
1465
1466
1467
1468
1469
1470
1471
1472
1473
1474
1475
def _gen_vid_path(img, scene_data):
msetts = scene_data.settings.media_settings
fname_rel = bpy_extras.io_utils.path_reference(img.filepath, msetts.base_src, msetts.base_dst, msetts.path_mode,
msetts.subdir, msetts.copy_set, img.library)
fname_abs = os.path.normpath(os.path.abspath(os.path.join(msetts.base_dst, fname_rel)))
return fname_abs, fname_rel
def fbx_data_texture_file_elements(root, tex, scene_data):
"""
Write the (file) Texture data block.
"""
# XXX All this is very fuzzy to me currently...
# Textures do not seem to use properties as much as they could.
# For now assuming most logical and simple stuff.
tex_key, _mats = scene_data.data_textures[tex]
img = tex.texture.image
fname_abs, fname_rel = _gen_vid_path(img, scene_data)
fbx_tex = elem_data_single_int64(root, b"Texture", get_fbxuid_from_key(tex_key))
fbx_tex.add_string(fbx_name_class(tex.name.encode(), b"Texture"))
fbx_tex.add_string(b"")
elem_data_single_string(fbx_tex, b"Type", b"TextureVideoClip")
elem_data_single_int32(fbx_tex, b"Version", FBX_TEXTURE_VERSION)
elem_data_single_string(fbx_tex, b"TextureName", fbx_name_class(tex.name.encode(), b"Texture"))
elem_data_single_string(fbx_tex, b"Media", fbx_name_class(img.name.encode(), b"Video"))
elem_data_single_string_unicode(fbx_tex, b"FileName", fname_abs)
elem_data_single_string_unicode(fbx_tex, b"RelativeFilename", fname_rel)
alpha_source = 0 # None
if img.use_alpha:
if tex.texture.use_calculate_alpha:
alpha_source = 1 # RGBIntensity as alpha.
else:
alpha_source = 2 # Black, i.e. alpha channel.
# BlendMode not useful for now, only affects layered textures afaics.
mapping = 0 # None.
if tex.texture_coords in {'ORCO'}: # XXX Others?
if tex.mapping in {'FLAT'}:
mapping = 1 # Planar
elif tex.mapping in {'CUBE'}:
mapping = 4 # Box
elif tex.mapping in {'TUBE'}:
mapping = 3 # Cylindrical
elif tex.mapping in {'SPHERE'}:
mapping = 2 # Spherical
elif tex.texture_coords in {'UV'}:
# XXX *HOW* do we link to correct UVLayer???
mapping = 6 # UV
wrap_mode = 1 # Clamp
if tex.texture.extension in {'REPEAT'}:
wrap_mode = 0 # Repeat
tmpl = scene_data.templates[b"TextureFile"]
props = elem_properties(fbx_tex)
elem_props_template_set(tmpl, props, "p_enum", b"AlphaSource", alpha_source)
elem_props_template_set(tmpl, props, "p_bool", b"PremultiplyAlpha",
img.alpha_mode in {'STRAIGHT'}) # Or is it PREMUL?
elem_props_template_set(tmpl, props, "p_enum", b"CurrentMappingType", mapping)
elem_props_template_set(tmpl, props, "p_enum", b"WrapModeU", wrap_mode)
elem_props_template_set(tmpl, props, "p_enum", b"WrapModeV", wrap_mode)
elem_props_template_set(tmpl, props, "p_vector_3d", b"Translation", tex.offset)
elem_props_template_set(tmpl, props, "p_vector_3d", b"Scaling", tex.scale)
elem_props_template_set(tmpl, props, "p_bool", b"UseMipMap", tex.texture.use_mipmap)
# Custom properties.
if scene_data.settings.use_custom_properties:
fbx_data_element_custom_properties(props, tex.texture)
1479
1480
1481
1482
1483
1484
1485
1486
1487
1488
1489
1490
1491
1492
1493
1494
1495
1496
1497
1498
1499
1500
1501
1502
1503
1504
1505
1506
1507
1508
1509
1510
1511
1512
1513
1514
1515
1516
1517
1518
1519
1520
1521
1522
def fbx_data_video_elements(root, vid, scene_data):
"""
Write the actual image data block.
"""
vid_key, _texs = scene_data.data_videos[vid]
fname_abs, fname_rel = _gen_vid_path(vid, scene_data)
fbx_vid = elem_data_single_int64(root, b"Video", get_fbxuid_from_key(vid_key))
fbx_vid.add_string(fbx_name_class(vid.name.encode(), b"Video"))
fbx_vid.add_string(b"Clip")
elem_data_single_string(fbx_vid, b"Type", b"Clip")
# XXX No Version???
elem_data_single_string_unicode(fbx_vid, b"FileName", fname_abs)
elem_data_single_string_unicode(fbx_vid, b"RelativeFilename", fname_rel)
if scene_data.settings.media_settings.embed_textures:
try:
with open(vid.filepath, 'br') as f:
elem_data_single_byte_array(fbx_vid, b"Content", f.read())
except Exception as e:
print("WARNING: embeding file {} failed ({})".format(vid.filepath, e))
elem_data_single_byte_array(fbx_vid, b"Content", b"")
else:
elem_data_single_byte_array(fbx_vid, b"Content", b"")
def fbx_data_armature_elements(root, armature, scene_data):
"""
Write:
* Bones "data" (NodeAttribute::LimbNode, contains pretty much nothing!).
* Deformers (i.e. Skin), bind between an armature and a mesh.
** SubDeformers (i.e. Cluster), one per bone/vgroup pair.
* BindPose.
Note armature itself has no data, it is a mere "Null" Model...
"""
# Bones "data".
tmpl = scene_data.templates[b"Bone"]
for bo in armature.data.bones:
_bo_key, bo_data_key, _arm = scene_data.data_bones[bo]
fbx_bo = elem_data_single_int64(root, b"NodeAttribute", get_fbxuid_from_key(bo_data_key))
fbx_bo.add_string(fbx_name_class(bo.name.encode(), b"NodeAttribute"))
fbx_bo.add_string(b"LimbNode")
elem_data_single_string(fbx_bo, b"TypeFlags", b"Skeleton")
props = elem_properties(fbx_bo)
elem_props_template_set(tmpl, props, "p_number", b"Size", (bo.tail_local - bo.head_local).length)
# Custom properties.
if scene_data.settings.use_custom_properties:
fbx_data_element_custom_properties(props, bo)
# Deformers and BindPoses.
# Note: we might also use Deformers for our "parent to vertex" stuff???
deformer = scene_data.data_deformers.get(armature, None)
if deformer is not None:
for me, (skin_key, obj, clusters) in deformer.items():
# BindPose.
# We assume bind pose for our bones are their "Editmode" pose...
# All matrices are expected in global (world) space.
bindpose_key = get_blender_armature_bindpose_key(armature, me)
fbx_pose = elem_data_single_int64(root, b"Pose", get_fbxuid_from_key(bindpose_key))
fbx_pose.add_string(fbx_name_class(me.name.encode(), b"Pose"))
fbx_pose.add_string(b"BindPose")
elem_data_single_string(fbx_pose, b"Type", b"BindPose")
elem_data_single_int32(fbx_pose, b"Version", FBX_POSE_BIND_VERSION)
elem_data_single_int32(fbx_pose, b"NbPoseNodes", 1 + len(armature.data.bones))
# First node is mesh/object.
mat_world_obj = fbx_object_matrix(scene_data, obj, global_space=True)
fbx_posenode = elem_empty(fbx_pose, b"PoseNode")
elem_data_single_int64(fbx_posenode, b"Node", get_fbxuid_from_key(scene_data.objects[obj]))
elem_data_single_float64_array(fbx_posenode, b"Matrix", matrix_to_array(mat_world_obj))
# And all bones of armature!
mat_world_bones = {}
for bo in armature.data.bones:
bomat = fbx_object_matrix(scene_data, bo, armature, global_space=True)
mat_world_bones[bo] = bomat
fbx_posenode = elem_empty(fbx_pose, b"PoseNode")
elem_data_single_int64(fbx_posenode, b"Node", get_fbxuid_from_key(scene_data.objects[bo]))
elem_data_single_float64_array(fbx_posenode, b"Matrix", matrix_to_array(bomat))
# Deformer.
fbx_skin = elem_data_single_int64(root, b"Deformer", get_fbxuid_from_key(skin_key))
fbx_skin.add_string(fbx_name_class(armature.name.encode(), b"Deformer"))
fbx_skin.add_string(b"Skin")
elem_data_single_int32(fbx_skin, b"Version", FBX_DEFORMER_SKIN_VERSION)
elem_data_single_float64(fbx_skin, b"Link_DeformAcuracy", 50.0) # Only vague idea what it is...
for bo, clstr_key in clusters.items():
# Find which vertices are affected by this bone/vgroup pair, and matching weights.
1575
1576
1577
1578
1579
1580
1581
1582
1583
1584
1585
1586
1587
1588
1589
1590
1591
1592
1593
1594
1595
1596
1597
vg_idx = obj.vertex_groups[bo.name].index
for idx, v in enumerate(me.vertices):
vert_vg = [vg for vg in v.groups if vg.group == vg_idx]
if not vert_vg:
continue
indices.append(idx)
weights.append(vert_vg[0].weight)
# Create the cluster.
fbx_clstr = elem_data_single_int64(root, b"Deformer", get_fbxuid_from_key(clstr_key))
fbx_clstr.add_string(fbx_name_class(bo.name.encode(), b"SubDeformer"))
fbx_clstr.add_string(b"Cluster")
elem_data_single_int32(fbx_clstr, b"Version", FBX_DEFORMER_CLUSTER_VERSION)
# No idea what that user data might be...
fbx_userdata = elem_data_single_string(fbx_clstr, b"UserData", b"")
fbx_userdata.add_string(b"")
if indices:
elem_data_single_int32_array(fbx_clstr, b"Indexes", indices)
elem_data_single_float64_array(fbx_clstr, b"Weights", weights)
# Transform and TransformLink matrices...
# They seem to be mostly the same as BindPose ones???
# WARNING! Even though official FBX API presents Transform in global space,
# **it is stored in bone space in FBX data!** See:
# http://area.autodesk.com/forum/autodesk-fbx/fbx-sdk/why-the-values-return-
# by-fbxcluster-gettransformmatrix-x-not-same-with-the-value-in-ascii-fbx-file/
elem_data_single_float64_array(fbx_clstr, b"Transform",
matrix_to_array(mat_world_bones[bo].inverted() * mat_world_obj))
elem_data_single_float64_array(fbx_clstr, b"TransformLink", matrix_to_array(mat_world_bones[bo]))
def fbx_data_object_elements(root, obj, scene_data):
"""
Write the Object (Model) data blocks.
Note we handle "Model" part of bones as well here!
"""
obj_type = b"Null" # default, sort of empty...
if isinstance(obj, Bone):
obj_type = b"LimbNode"
elif (obj.type == 'MESH'):
obj_type = b"Mesh"
elif (obj.type == 'LAMP'):
obj_type = b"Light"
elif (obj.type == 'CAMERA'):
obj_type = b"Camera"
obj_key = scene_data.objects[obj]
model = elem_data_single_int64(root, b"Model", get_fbxuid_from_key(obj_key))
model.add_string(fbx_name_class(obj.name.encode(), b"Model"))
model.add_string(obj_type)
elem_data_single_int32(model, b"Version", FBX_MODELS_VERSION)
# Object transform info.
loc, rot, scale, matrix, matrix_rot = fbx_object_tx(scene_data, obj)
rot = tuple(units_convert_iter(rot, "radian", "degree"))
tmpl = scene_data.templates[b"Model"]
# For now add only loc/rot/scale...
props = elem_properties(model)
elem_props_template_set(tmpl, props, "p_lcl_translation", b"Lcl Translation", loc)
elem_props_template_set(tmpl, props, "p_lcl_rotation", b"Lcl Rotation", rot)
elem_props_template_set(tmpl, props, "p_lcl_scaling", b"Lcl Scaling", scale)
# TODO: "constraints" (limit loc/rot/scale, and target-to-object).
# Custom properties.
if scene_data.settings.use_custom_properties:
fbx_data_element_custom_properties(props, obj)
# Those settings would obviously need to be edited in a complete version of the exporter, may depends on
# object type, etc.
elem_data_single_int32(model, b"MultiLayer", 0)
elem_data_single_int32(model, b"MultiTake", 0)
elem_data_single_bool(model, b"Shading", True)
elem_data_single_string(model, b"Culling", b"CullingOff")
if isinstance(obj, Object) and obj.type == 'CAMERA':
# Why, oh why are FBX cameras such a mess???
# And WHY add camera data HERE??? Not even sure this is needed...
render = scene_data.scene.render
width = render.resolution_x * 1.0
height = render.resolution_y * 1.0
elem_props_template_set(tmpl, props, "p_enum", b"ResolutionMode", 0) # Don't know what it means
elem_props_template_set(tmpl, props, "p_number", b"AspectW", width)
elem_props_template_set(tmpl, props, "p_number", b"AspectH", height)
elem_props_template_set(tmpl, props, "p_bool", b"ViewFrustum", True)
elem_props_template_set(tmpl, props, "p_enum", b"BackgroundMode", 0) # Don't know what it means
elem_props_template_set(tmpl, props, "p_bool", b"ForegroundTransparent", True)
1665
1666
1667
1668
1669
1670
1671
1672
1673
1674
1675
1676
1677
1678
1679
1680
1681
1682
1683
1684
1685
1686
1687
1688
1689
1690
1691
1692
1693
1694
1695
1696
1697
1698
1699
def fbx_data_animation_elements(root, scene_data):
"""
Write animation data.
"""
animations = scene_data.animations
if not animations:
return
scene = scene_data.scene
fps = scene.render.fps / scene.render.fps_base
def keys_to_ktimes(keys):
return (int(v) for v in units_convert_iter((f / fps for f, _v in keys), "second", "ktime"))
astack_key, alayers = animations
acn_tmpl = scene_data.templates[b"AnimationCurveNode"]
# Animation stack.
astack = elem_data_single_int64(root, b"AnimationStack", get_fbxuid_from_key(astack_key))
astack.add_string(fbx_name_class(scene.name.encode(), b"AnimStack"))
astack.add_string(b"")
for obj, (alayer_key, acurvenodes) in alayers.items():
# Animation layer.
alayer = elem_data_single_int64(root, b"AnimationLayer", get_fbxuid_from_key(alayer_key))
alayer.add_string(fbx_name_class(obj.name.encode(), b"AnimLayer"))
alayer.add_string(b"")
for fbx_prop, (acurvenode_key, acurves) in acurvenodes.items():
# Animation curve node.
acurvenode = elem_data_single_int64(root, b"AnimationCurveNode", get_fbxuid_from_key(acurvenode_key))
acurvenode.add_string(fbx_name_class(fbx_prop.encode(), b"AnimCurveNode"))
acurvenode.add_string(b"")
acn_props = elem_properties(acurvenode)
for fbx_item, (acurve_key, def_value, keys) in acurves.items():
elem_props_template_set(acn_tmpl, acn_props, "p_number", fbx_item.encode(), def_value, animatable=True)
# Only create Animation curve if needed!
if keys:
acurve = elem_data_single_int64(root, b"AnimationCurve", get_fbxuid_from_key(acurve_key))
acurve.add_string(fbx_name_class(b"", b"AnimCurve"))
acurve.add_string(b"")
# key attributes...
nbr_keys = len(keys)
# flags...
keyattr_flags = (1 << 3 | # interpolation mode, 1 = constant, 2 = linear, 3 = cubic.
1 << 8 | # tangent mode, 8 = auto, 9 = TCB, 10 = user, 11 = generic break,
1 << 13 | # tangent mode, 12 = generic clamp, 13 = generic time independent,
1 << 14 | # tangent mode, 13 + 14 = generic clamp progressive.
0,
)
# Maybe values controlling TCB & co???
keyattr_datafloat = (0.0, 0.0, 9.419963346924634e-30, 0.0)
# And now, the *real* data!
elem_data_single_float64(acurve, b"Default", def_value)
elem_data_single_int32(acurve, b"KeyVer", FBX_ANIM_KEY_VERSION)
elem_data_single_int64_array(acurve, b"KeyTime", keys_to_ktimes(keys))
elem_data_single_float32_array(acurve, b"KeyValueFloat", (v for _f, v in keys))
elem_data_single_int32_array(acurve, b"KeyAttrFlags", keyattr_flags)
elem_data_single_float32_array(acurve, b"KeyAttrDataFloat", keyattr_datafloat)
elem_data_single_int32_array(acurve, b"KeyAttrRefCount", (nbr_keys,))
##### Top-level FBX data container. #####
# Helper container gathering some data we need multiple times:
# * templates.
# * objects.
# * connections.
# * takes.
FBXData = namedtuple("FBXData", (
"templates", "templates_users", "connections",
"settings", "scene", "objects", "animations",
1741
1742
1743
1744
1745
1746
1747
1748
1749
1750
1751
1752
1753
1754
1755
1756
1757
1758
1759
1760
1761
1762
1763
1764
1765
1766
"data_lamps", "data_cameras", "data_meshes", "mesh_mat_indices",
"data_bones", "data_deformers",
"data_world", "data_materials", "data_textures", "data_videos",
))
def fbx_mat_properties_from_texture(tex):
"""
Returns a set of FBX metarial properties that are affected by the given texture.
Quite obviously, this is a fuzzy and far-from-perfect mapping! Amounts of influence are completely lost, e.g.
Note tex is actually expected to be a texture slot.
"""
# Tex influence does not exists in FBX, so assume influence < 0.5 = no influence... :/
INFLUENCE_THRESHOLD = 0.5
# Mapping Blender -> FBX (blend_use_name, blend_fact_name, fbx_name).
blend_to_fbx = (
# Lambert & Phong...
("diffuse", "diffuse", b"DiffuseFactor"),
("color_diffuse", "diffuse_color", b"DiffuseColor"),
("alpha", "alpha", b"TransparencyFactor"),
("diffuse", "diffuse", b"TransparentColor"), # Uses diffuse color in Blender!
("emit", "emit", b"EmissiveFactor"),
("diffuse", "diffuse", b"EmissiveColor"), # Uses diffuse color in Blender!
("ambient", "ambient", b"AmbientFactor"),
#("", "", b"AmbientColor"), # World stuff in Blender, for now ignore...
Bastien Montagne
committed
("normal", "normal", b"NormalMap"),
# Note: unsure about those... :/
1769
1770
1771
1772
1773
1774
1775
1776
1777
1778
1779
1780
1781
1782
1783
1784
1785
1786
1787
1788
1789
1790
1791
1792
1793
1794
1795
1796
1797
1798
1799
1800
1801
1802
1803
1804
1805
1806
#("", "", b"Bump"),
#("", "", b"BumpFactor"),
#("", "", b"DisplacementColor"),
#("", "", b"DisplacementFactor"),
# Phong only.
("specular", "specular", b"SpecularFactor"),
("color_spec", "specular_color", b"SpecularColor"),
# See Material template about those two!
("hardness", "hardness", b"Shininess"),
("hardness", "hardness", b"ShininessExponent"),
("mirror", "mirror", b"ReflectionColor"),
("raymir", "raymir", b"ReflectionFactor"),
)
tex_fbx_props = set()
for use_map_name, name_factor, fbx_prop_name in blend_to_fbx:
if getattr(tex, "use_map_" + use_map_name) and getattr(tex, name_factor + "_factor") >= INFLUENCE_THRESHOLD:
tex_fbx_props.add(fbx_prop_name)
return tex_fbx_props
def fbx_skeleton_from_armature(scene, settings, armature, objects, data_bones, data_deformers, arm_parents):
"""
Create skeleton from armature/bones (NodeAttribute/LimbNode and Model/LimbNode), and for each deformed mesh,
create Pose/BindPose(with sub PoseNode) and Deformer/Skin(with Deformer/SubDeformer/Cluster).
Also supports "parent to bone" (simple parent to Model/LimbNode).
arm_parents is a set of tuples (armature, object) for all successful armature bindings.
"""
arm = armature.data
bones = {}
for bo in arm.bones:
key, data_key = get_blender_bone_key(armature, bo)
objects[bo] = key
data_bones[bo] = (key, data_key, armature)
bones[bo.name] = bo
for obj in objects.keys():
if not isinstance(obj, Object):
1808
1809
1810
1811
1812
1813
1814
1815
1816
1817
1818
1819
1820
1821
1822
1823
1824
1825
1826
1827
1828
1829
1830
1831
1832
1833
1834
1835
1836
1837
1838
1839
1840
1841
1842
1843
continue
if obj.type not in {'MESH'}:
continue
if obj.parent != armature:
continue
# Always handled by an Armature modifier...
found = False
for mod in obj.modifiers:
if mod.type not in {'ARMATURE'}:
continue
# We only support vertex groups binding method, not bone envelopes one!
if mod.object == armature and mod.use_vertex_groups:
found = True
break
if not found:
continue
# Now we have a mesh using this armature. First, find out which bones are concerned!
# XXX Assuming here non-used bones can have no cluster, this has to be checked!
used_bones = tuple(bones[vg.name] for vg in obj.vertex_groups if vg.name in bones)
if not used_bones:
continue
# Note: bindpose have no relations at all (no connections), so no need for any preprocess for them.
# Create skin & clusters relations (note skins are connected to geometry, *not* model!).
me = obj.data
clusters = {bo: get_blender_bone_cluster_key(armature, me, bo) for bo in used_bones}
data_deformers.setdefault(armature, {})[me] = (get_blender_armature_skin_key(armature, me), obj, clusters)
# We don't want a regular parent relationship for those in FBX...
arm_parents.add((armature, obj))
1844
1845
1846
1847
1848
1849
1850
1851
1852
1853
1854
1855
1856
1857
1858
1859
1860
1861
1862
1863
1864
1865
1866
1867
1868
1869
1870
1871
1872
1873
def fbx_animations_simplify(scene_data, animdata):
"""
Simplifies FCurves!
"""
fac = scene_data.settings.bake_anim_simplify_factor
step = scene_data.settings.bake_anim_step
# So that, with default factor and step values (1), we get:
max_frame_diff = step * fac * 10 # max step of 10 frames.
value_diff_fac = fac / 1000 # min value evolution: 0.1% of whole range.
for obj, keys in animdata.items():
if not keys:
continue
extremums = [(min(values), max(values)) for values in zip(*(k[1] for k in keys))]
min_diffs = [max((mx - mn) * value_diff_fac, 0.000001) for mx, mn in extremums]
p_currframe, p_key, p_key_write = keys[0]
p_keyed = [(p_currframe - max_frame_diff, val) for val in p_key]
for currframe, key, key_write in keys:
#if obj.name == "Cube":
#print(currframe, key, key_write)
for idx, (val, p_val) in enumerate(zip(key, p_key)):
p_keyedframe, p_keyedval = p_keyed[idx]
if val == p_val:
# Never write keyframe when value is exactly the same as prev one!
continue
if abs(val - p_val) >= min_diffs[idx]:
# If enough difference from previous sampled value, key this value *and* the previous one!
key_write[idx] = True
p_key_write[idx] = True
p_keyed[idx] = (currframe, val)
elif (abs(val - p_keyedval) >= min_diffs[idx]) or (currframe - p_keyedframe >= max_frame_diff):
# Else, if enough difference from previous keyed value (or max gap between keys is reached),
# key this value only!
key_write[idx] = True
p_keyed[idx] = (currframe, val)
p_currframe, p_key, p_key_write = currframe, key, key_write
# Always key last sampled values (we ignore curves with a single valid key anyway).
p_key_write[:] = [True] * len(p_key_write)
1882
1883
1884
1885
1886
1887
1888
1889
1890
1891
1892
1893
1894
1895
1896
1897
1898
1899
1900
1901
1902
1903
1904
1905
1906
1907
1908
1909
1910
1911
1912
1913
1914
1915
1916
1917
1918
1919
1920
1921
1922
1923
1924
1925
1926
1927
1928
1929
1930
1931
1932
1933
1934
1935
1936
1937
1938
1939
1940
1941
1942
1943
1944
1945
1946
1947
1948
1949
1950
1951
1952
1953
1954
1955
1956
def fbx_animations_objects(scene_data):
"""
Generate animation data from objects.
"""
objects = scene_data.objects
bake_step = scene_data.settings.bake_anim_step
scene = scene_data.scene
# FBX mapping info: Property affected, and name of the "sub" property (to distinguish e.g. vector's channels).
fbx_names = (
("Lcl Translation", "d|X"), ("Lcl Translation", "d|Y"), ("Lcl Translation", "d|Z"),
("Lcl Rotation", "d|X"), ("Lcl Rotation", "d|Y"), ("Lcl Rotation", "d|Z"),
("Lcl Scaling", "d|X"), ("Lcl Scaling", "d|Y"), ("Lcl Scaling", "d|Z"),
)
back_currframe = scene.frame_current
animdata = {obj: [] for obj in objects.keys()}
currframe = scene.frame_start
while currframe < scene.frame_end:
scene.frame_set(int(currframe), currframe - int(currframe))
for obj in objects.keys():
if isinstance(obj, Bone):
continue # TODO!
# We compute baked loc/rot/scale for all objects.
loc, rot, scale, _m, _mr = fbx_object_tx(scene_data, obj)
tx = tuple(loc) + tuple(units_convert_iter(rot, "radian", "degree")) + tuple(scale)
animdata[obj].append((currframe, tx, [False] * len(tx)))
currframe += bake_step
scene.frame_set(back_currframe, 0.0)
fbx_animations_simplify(scene_data, animdata)
animations = {}
# And now, produce final data (usable by FBX export code)...
for obj, keys in animdata.items():
if not keys:
continue
curves = [[] for k in keys[0][1]]
for currframe, key, key_write in keys:
#if obj.name == "Cube":
#print(currframe, key, key_write)
for idx, (val, wrt) in enumerate(zip(key, key_write)):
if wrt:
curves[idx].append((currframe, val))
loc, rot, scale, _m, _mr = fbx_object_tx(scene_data, obj)
tx = tuple(loc) + tuple(units_convert_iter(rot, "radian", "degree")) + tuple(scale)
# If animation for a channel, (True, keyframes), else (False, current value).
final_keys = {}
for idx, c in enumerate(curves):
fbx_group, fbx_item = fbx_names[idx]
fbx_item_key = get_blender_anim_curve_key(obj, fbx_group, fbx_item)
if fbx_group not in final_keys:
final_keys[fbx_group] = (get_blender_anim_curve_node_key(obj, fbx_group), {})
final_keys[fbx_group][1][fbx_item] = (fbx_item_key, tx[idx], c if len(c) > 1 else [])
# And now, remove anim groups (i.e. groups of curves affecting a single FBX property) with no curve at all!
del_groups = []
for grp, (_k, data) in final_keys.items():
if True in (bool(d[2]) for d in data.values()):
continue
del_groups.append(grp)
for grp in del_groups:
del final_keys[grp]
if final_keys:
animations[obj] = (get_blender_anim_layer_key(obj), final_keys)
return (get_blender_anim_stack_key(scene), animations) if animations else None
def fbx_data_from_scene(scene, settings):
"""
Do some pre-processing over scene's data...
"""
objtypes = settings.object_types
##### Gathering data...
# This is rather simple for now, maybe we could end generating templates with most-used values
# instead of default ones?
objects = {obj: get_blenderID_key(obj) for obj in scene.objects if obj.type in objtypes}
data_lamps = {obj.data: get_blenderID_key(obj.data) for obj in objects if obj.type == 'LAMP'}
# Unfortunately, FBX camera data contains object-level data (like position, orientation, etc.)...
data_cameras = {obj: get_blenderID_key(obj.data) for obj in objects if obj.type == 'CAMERA'}
data_meshes = {obj.data: (get_blenderID_key(obj.data), obj) for obj in objects if obj.type == 'MESH'}
1972
1973
1974
1975
1976
1977
1978
1979
1980
1981
1982
1983
1984
1985
1986
1987
1988
1989
1990
1991
1992
1993
1994
# Armatures!
data_bones = {}
data_deformers = {}
arm_parents = set()
for obj in tuple(objects.keys()):
if obj.type not in {'ARMATURE'}:
continue
fbx_skeleton_from_armature(scene, settings, obj, objects, data_bones, data_deformers, arm_parents)
# Some world settings are embedded in FBX materials...
if scene.world:
data_world = {scene.world: get_blenderID_key(scene.world)}
else:
data_world = {}
# TODO: Check all the mat stuff works even when mats are linked to Objects
# (we can then have the same mesh used with different materials...).
# *Should* work, as FBX always links its materials to Models (i.e. objects).
# XXX However, material indices would probably break...
data_materials = {}
for obj in objects:
# Only meshes for now!
if not isinstance(obj, Object) or obj.type not in {'MESH'}:
continue
for mat_s in obj.material_slots:
mat = mat_s.material
# Note theoretically, FBX supports any kind of materials, even GLSL shaders etc.
# However, I doubt anything else than Lambert/Phong is really portable!
# We support any kind of 'surface' shader though, better to have some kind of default Lambert than nothing.
# TODO: Support nodes (*BIG* todo!).
if mat.type in {'SURFACE'} and not mat.use_nodes:
2004
2005
2006
2007
2008
2009
2010
2011
2012
2013
2014
2015
2016
2017
2018
2019
2020
2021
2022
2023
2024
2025
2026
2027
2028
2029
2030
2031
2032
2033
2034
2035
2036
2037
2038
2039
2040
2041
if mat in data_materials:
data_materials[mat][1].append(obj)
else:
data_materials[mat] = (get_blenderID_key(mat), [obj])
# Note FBX textures also hold their mapping info.
# TODO: Support layers?
data_textures = {}
# FbxVideo also used to store static images...
data_videos = {}
# For now, do not use world textures, don't think they can be linked to anything FBX wise...
for mat in data_materials.keys():
for tex in mat.texture_slots:
if tex is None:
continue
# For now, only consider image textures.
# Note FBX does has support for procedural, but this is not portable at all (opaque blob),
# so not useful for us.
# TODO I think ENVIRONMENT_MAP should be usable in FBX as well, but for now let it aside.
#if tex.texture.type not in {'IMAGE', 'ENVIRONMENT_MAP'}:
if tex.texture.type not in {'IMAGE'}:
continue
img = tex.texture.image
if img is None:
continue
# Find out whether we can actually use this texture for this material, in FBX context.
tex_fbx_props = fbx_mat_properties_from_texture(tex)
if not tex_fbx_props:
continue
if tex in data_textures:
data_textures[tex][1][mat] = tex_fbx_props
else:
data_textures[tex] = (get_blenderID_key(tex), {mat: tex_fbx_props})
if img in data_videos:
data_videos[img][1].append(tex)
else:
data_videos[img] = (get_blenderID_key(img), [tex])
# Animation...
# From objects only for a start.
tmp_scdata = FBXData( # Kind of hack, we need a temp scene_data for object's space handling to bake animations...
None, None, None,
settings, scene, objects, None,
data_lamps, data_cameras, data_meshes, None,
data_bones, data_deformers,
data_world, data_materials, data_textures, data_videos,
)
animations = fbx_animations_objects(tmp_scdata)
2053
2054
2055
2056
2057
2058
2059
2060
2061
2062
2063
2064
2065
2066
2067
2068
2069
2070
2071
2072
2073
2074
2075
2076
2077
##### Creation of templates...
templates = OrderedDict()
templates[b"GlobalSettings"] = fbx_template_def_globalsettings(scene, settings, nbr_users=1)
if data_lamps:
templates[b"Light"] = fbx_template_def_light(scene, settings, nbr_users=len(data_lamps))
if data_cameras:
templates[b"Camera"] = fbx_template_def_camera(scene, settings, nbr_users=len(data_cameras))
if data_bones:
templates[b"Bone"] = fbx_template_def_bone(scene, settings, nbr_users=len(data_bones))
if data_meshes:
templates[b"Geometry"] = fbx_template_def_geometry(scene, settings, nbr_users=len(data_meshes))
if objects:
templates[b"Model"] = fbx_template_def_model(scene, settings, nbr_users=len(objects))
if arm_parents:
# Number of Pose|BindPose elements should be the same as number of meshes-parented-to-armatures
templates[b"BindPose"] = fbx_template_def_pose(scene, settings, nbr_users=len(arm_parents))
if data_deformers:
nbr = len(data_deformers)
nbr += sum(len(clusters) for def_me in data_deformers.values() for a, b, clusters in def_me.values())
templates[b"Deformers"] = fbx_template_def_deformer(scene, settings, nbr_users=nbr)
# No world support in FBX...
"""
if data_world:
templates[b"World"] = fbx_template_def_world(scene, settings, nbr_users=len(data_world))
"""
if data_materials:
templates[b"Material"] = fbx_template_def_material(scene, settings, nbr_users=len(data_materials))
if data_textures:
templates[b"TextureFile"] = fbx_template_def_texture_file(scene, settings, nbr_users=len(data_textures))
if data_videos:
templates[b"Video"] = fbx_template_def_video(scene, settings, nbr_users=len(data_videos))
if animations:
# One stack!
templates[b"AnimationStack"] = fbx_template_def_animstack(scene, settings, nbr_users=1)
# One layer per animated object.
templates[b"AnimationLayer"] = fbx_template_def_animlayer(scene, settings, nbr_users=len(animations[1]))
# As much curve node as animated properties.
nbr = sum(len(al) for _kal, al in animations[1].values())
templates[b"AnimationCurveNode"] = fbx_template_def_animcurvenode(scene, settings, nbr_users=nbr)
# And the number of curves themselves...
nbr = sum(1 if ac else 0 for _kal, al in animations[1].values()
for _kacn, acn in al.values()
for _kac, _dv, ac in acn.values())
templates[b"AnimationCurve"] = fbx_template_def_animcurve(scene, settings, nbr_users=nbr)
templates_users = sum(tmpl.nbr_users for tmpl in templates.values())
##### Creation of connections...
connections = []
# Objects (with classical parenting).
for obj, obj_key in objects.items():
# Bones are handled later.
if isinstance(obj, Object):
2121
2122
2123
2124
2125
2126
2127
2128
2129
2130
2131
2132
2133
2134
2135
2136
2137
2138
2139
2140
2141
2142
2143
2144
2145
2146
2147
2148
2149
par = obj.parent
par_key = 0 # Convention, "root" node (never explicitly written).
if par and par in objects:
par_type = obj.parent_type
if par_type in {'OBJECT', 'BONE'}:
# Meshes parented to armature also have 'OBJECT' par_type, in FBX this is handled separately,
# we do not want an extra object parenting!
if (par, obj) not in arm_parents:
par_key = objects[par]
else:
print("Sorry, “{}” parenting type is not supported".format(par_type))
connections.append((b"OO", get_fbxuid_from_key(obj_key), get_fbxuid_from_key(par_key), None))
# Armature & Bone chains.
for bo, (bo_key, _bo_data_key, arm) in data_bones.items():
par = bo.parent
if not par: # Root bone.
par = arm
if par not in objects:
continue
connections.append((b"OO", get_fbxuid_from_key(bo_key), get_fbxuid_from_key(objects[par]), None))
# Cameras
for obj_cam, cam_key in data_cameras.items():
cam_obj_key = objects[obj_cam]
connections.append((b"OO", get_fbxuid_from_key(cam_key), get_fbxuid_from_key(cam_obj_key), None))
# Object data.
for obj, obj_key in objects.items():
if isinstance(obj, Bone):
_bo_key, bo_data_key, _arm = data_bones[obj]
assert(_bo_key == obj_key)
connections.append((b"OO", get_fbxuid_from_key(bo_data_key), get_fbxuid_from_key(obj_key), None))
elif obj.type == 'LAMP':
lamp_key = data_lamps[obj.data]
connections.append((b"OO", get_fbxuid_from_key(lamp_key), get_fbxuid_from_key(obj_key), None))
elif obj.type == 'MESH':
mesh_key, _obj = data_meshes[obj.data]
connections.append((b"OO", get_fbxuid_from_key(mesh_key), get_fbxuid_from_key(obj_key), None))
# Deformers (armature-to-geometry, only for meshes currently)...
for arm, deformed_meshes in data_deformers.items():
for me, (skin_key, _obj, clusters) in deformed_meshes.items():
# skin -> geometry
mesh_key, _obj = data_meshes[me]
connections.append((b"OO", get_fbxuid_from_key(skin_key), get_fbxuid_from_key(mesh_key), None))
2167
2168
2169
2170
2171
2172
2173
2174
2175
2176
2177
2178
2179
2180
2181
2182
2183
2184
2185
2186
2187
2188
2189
2190
2191
2192
2193
2194
2195
2196
2197
2198
2199
2200
for bo, clstr_key in clusters.items():
# cluster -> skin
connections.append((b"OO", get_fbxuid_from_key(clstr_key), get_fbxuid_from_key(skin_key), None))
# bone -> cluster
connections.append((b"OO", get_fbxuid_from_key(objects[bo]), get_fbxuid_from_key(clstr_key), None))
# Materials
mesh_mat_indices = {}
_objs_indices = {}
for mat, (mat_key, objs) in data_materials.items():
for obj in objs:
obj_key = objects[obj]
connections.append((b"OO", get_fbxuid_from_key(mat_key), get_fbxuid_from_key(obj_key), None))
# Get index of this mat for this object.
# Mat indices for mesh faces are determined by their order in 'mat to ob' connections.
# Only mats for meshes currently...
me = obj.data
idx = _objs_indices[obj] = _objs_indices.get(obj, -1) + 1
mesh_mat_indices.setdefault(me, {})[mat] = idx
del _objs_indices
# Textures
for tex, (tex_key, mats) in data_textures.items():
for mat, fbx_mat_props in mats.items():
mat_key, _objs = data_materials[mat]
for fbx_prop in fbx_mat_props:
# texture -> material properties
connections.append((b"OP", get_fbxuid_from_key(tex_key), get_fbxuid_from_key(mat_key), fbx_prop))
# Images
for vid, (vid_key, texs) in data_videos.items():
for tex in texs:
tex_key, _texs = data_textures[tex]
connections.append((b"OO", get_fbxuid_from_key(vid_key), get_fbxuid_from_key(tex_key), None))
Loading
Loading full blame...