"docs.it4i/software/compilers.md" did not exist on "2d119ecf24ca0e4ca877cacea41de013597cb595"
Newer
Older
shadow_color = lamp.shadow_color
light = elem_data_single_int64(root, b"NodeAttribute", get_fbxuid_from_key(lamp_key))
light.add_string(fbx_name_class(lamp.name.encode(), b"NodeAttribute"))
light.add_string(b"Light")
elem_data_single_int32(light, b"GeometryVersion", FBX_GEOMETRY_VERSION) # Sic...
tmpl = scene_data.templates[b"Light"]
props = elem_properties(light)
elem_props_template_set(tmpl, props, "p_enum", b"LightType", FBX_LIGHT_TYPES[lamp.type])
elem_props_template_set(tmpl, props, "p_bool", b"CastLight", do_light)
Bastien Montagne
committed
elem_props_template_set(tmpl, props, "p_color", b"Color", lamp.color)
elem_props_template_set(tmpl, props, "p_number", b"Intensity", lamp.energy * 100.0)
elem_props_template_set(tmpl, props, "p_enum", b"DecayType", decay_type)
Bastien Montagne
committed
elem_props_template_set(tmpl, props, "p_double", b"DecayStart", lamp.distance * gscale)
elem_props_template_set(tmpl, props, "p_bool", b"CastShadows", do_shadow)
Bastien Montagne
committed
elem_props_template_set(tmpl, props, "p_color", b"ShadowColor", shadow_color)
if lamp.type in {'SPOT'}:
Bastien Montagne
committed
elem_props_template_set(tmpl, props, "p_double", b"OuterAngle", math.degrees(lamp.spot_size))
elem_props_template_set(tmpl, props, "p_double", b"InnerAngle",
math.degrees(lamp.spot_size * (1.0 - lamp.spot_blend)))
# Custom properties.
if scene_data.settings.use_custom_properties:
fbx_data_element_custom_properties(props, lamp)
def fbx_data_camera_elements(root, cam_obj, scene_data):
"""
Write the Camera data blocks.
"""
gscale = scene_data.settings.global_scale
cam_data = cam_obj.data
cam_key = scene_data.data_cameras[cam_obj]
# Real data now, good old camera!
# Object transform info.
loc, rot, scale, matrix, matrix_rot = fbx_object_tx(scene_data, cam_obj)
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
up = matrix_rot * Vector((0.0, 1.0, 0.0))
to = matrix_rot * Vector((0.0, 0.0, -1.0))
# Render settings.
# TODO We could export much more...
render = scene_data.scene.render
width = render.resolution_x
height = render.resolution_y
aspect = width / height
# Film width & height from mm to inches
filmwidth = units_convert(cam_data.sensor_width, "millimeter", "inch")
filmheight = units_convert(cam_data.sensor_height, "millimeter", "inch")
filmaspect = filmwidth / filmheight
# Film offset
offsetx = filmwidth * cam_data.shift_x
offsety = filmaspect * filmheight * cam_data.shift_y
cam = elem_data_single_int64(root, b"NodeAttribute", get_fbxuid_from_key(cam_key))
cam.add_string(fbx_name_class(cam_data.name.encode(), b"NodeAttribute"))
cam.add_string(b"Camera")
tmpl = scene_data.templates[b"Camera"]
props = elem_properties(cam)
Bastien Montagne
committed
elem_props_template_set(tmpl, props, "p_vector", b"Position", loc)
elem_props_template_set(tmpl, props, "p_vector", b"UpVector", up)
elem_props_template_set(tmpl, props, "p_vector", b"InterestPosition", to)
# Should we use world value?
Bastien Montagne
committed
elem_props_template_set(tmpl, props, "p_color", b"BackgroundColor", (0.0, 0.0, 0.0))
elem_props_template_set(tmpl, props, "p_bool", b"DisplayTurnTableIcon", True)
Bastien Montagne
committed
elem_props_template_set(tmpl, props, "p_double", b"FilmWidth", filmwidth)
elem_props_template_set(tmpl, props, "p_double", b"FilmHeight", filmheight)
elem_props_template_set(tmpl, props, "p_double", b"FilmAspectRatio", filmaspect)
elem_props_template_set(tmpl, props, "p_double", b"FilmOffsetX", offsetx)
elem_props_template_set(tmpl, props, "p_double", b"FilmOffsetY", offsety)
elem_props_template_set(tmpl, props, "p_enum", b"ApertureMode", 3) # FocalLength.
elem_props_template_set(tmpl, props, "p_enum", b"GateFit", 2) # FitHorizontal.
elem_props_template_set(tmpl, props, "p_fov", b"FieldOfView", math.degrees(cam_data.angle_x))
elem_props_template_set(tmpl, props, "p_fov_x", b"FieldOfViewX", math.degrees(cam_data.angle_x))
elem_props_template_set(tmpl, props, "p_fov_y", b"FieldOfViewY", math.degrees(cam_data.angle_y))
# No need to convert to inches here...
Bastien Montagne
committed
elem_props_template_set(tmpl, props, "p_double", b"FocalLength", cam_data.lens)
elem_props_template_set(tmpl, props, "p_double", b"SafeAreaAspectRatio", aspect)
Bastien Montagne
committed
elem_props_template_set(tmpl, props, "p_double", b"NearPlane", cam_data.clip_start * gscale)
elem_props_template_set(tmpl, props, "p_double", b"FarPlane", cam_data.clip_end * gscale)
elem_props_template_set(tmpl, props, "p_enum", b"BackPlaneDistanceMode", 1) # RelativeToCamera.
Bastien Montagne
committed
elem_props_template_set(tmpl, props, "p_double", b"BackPlaneDistance", cam_data.clip_end * gscale)
# Custom properties.
if scene_data.settings.use_custom_properties:
fbx_data_element_custom_properties(props, cam_data)
elem_data_single_string(cam, b"TypeFlags", b"Camera")
elem_data_single_int32(cam, b"GeometryVersion", 124) # Sic...
elem_data_vec_float64(cam, b"Position", loc)
elem_data_vec_float64(cam, b"Up", up)
elem_data_vec_float64(cam, b"LookAt", to)
elem_data_single_int32(cam, b"ShowInfoOnMoving", 1)
elem_data_single_int32(cam, b"ShowAudio", 0)
elem_data_vec_float64(cam, b"AudioColor", (0.0, 1.0, 0.0))
elem_data_single_float64(cam, b"CameraOrthoZoom", 1.0)
def fbx_data_mesh_elements(root, me, scene_data):
"""
Write the Mesh (Geometry) data block.
"""
Bastien Montagne
committed
# Ugly helper... :/
def _infinite_gen(val):
while 1:
yield val
me_key, me_obj = scene_data.data_meshes[me]
# No gscale/gmat here, all data are supposed to be in object space.
smooth_type = scene_data.settings.mesh_smooth_type
do_bake_space_transform = use_bake_space_transform(scene_data, me_obj)
# Vertices are in object space, but we are post-multiplying all transforms with the inverse of the
# global matrix, so we need to apply the global matrix to the vertices to get the correct result.
geom_mat_co = scene_data.settings.global_matrix if do_bake_space_transform else None
# We need to apply the inverse transpose of the global matrix when transforming normals.
Bastien Montagne
committed
geom_mat_no = Matrix(scene_data.settings.global_matrix_inv_transposed) if do_bake_space_transform else None
if geom_mat_no is not None:
# Remove translation & scaling!
geom_mat_no.translation = Vector()
geom_mat_no.normalize()
geom = elem_data_single_int64(root, b"Geometry", get_fbxuid_from_key(me_key))
geom.add_string(fbx_name_class(me.name.encode(), b"Geometry"))
geom.add_string(b"Mesh")
tmpl = scene_data.templates[b"Geometry"]
props = elem_properties(geom)
# Custom properties.
if scene_data.settings.use_custom_properties:
fbx_data_element_custom_properties(props, me)
elem_data_single_int32(geom, b"GeometryVersion", FBX_GEOMETRY_VERSION)
# Vertex cos.
t_co = array.array(data_types.ARRAY_FLOAT64, (0.0,)) * len(me.vertices) * 3
me.vertices.foreach_get("co", t_co)
if geom_mat_co is not None:
def _vcos_transformed_gen(raw_cos, m=None):
# Note: we could most likely get much better performances with numpy, but will leave this as TODO for now.
return chain(*(m * Vector(v) for v in zip(*(iter(raw_cos),) * 3)))
t_co = _vcos_transformed_gen(t_co, geom_mat_co)
elem_data_single_float64_array(geom, b"Vertices", t_co)
del t_co
# Polygon indices.
#
# We do loose edges as two-vertices faces, if enabled...
#
# Note we have to process Edges in the same time, as they are based on poly's loops...
loop_nbr = len(me.loops)
t_pvi = array.array(data_types.ARRAY_INT32, (0,)) * loop_nbr
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
t_ls = [None] * len(me.polygons)
me.loops.foreach_get("vertex_index", t_pvi)
me.polygons.foreach_get("loop_start", t_ls)
# Add "fake" faces for loose edges.
if scene_data.settings.use_mesh_edges:
t_le = tuple(e.vertices for e in me.edges if e.is_loose)
t_pvi.extend(chain(*t_le))
t_ls.extend(range(loop_nbr, loop_nbr + len(t_le), 2))
del t_le
# Edges...
# Note: Edges are represented as a loop here: each edge uses a single index, which refers to the polygon array.
# The edge is made by the vertex indexed py this polygon's point and the next one on the same polygon.
# Advantage: Only one index per edge.
# Drawback: Only polygon's edges can be represented (that's why we have to add fake two-verts polygons
# for loose edges).
# We also have to store a mapping from real edges to their indices in this array, for edge-mapped data
# (like e.g. crease).
t_eli = array.array(data_types.ARRAY_INT32)
edges_map = {}
edges_nbr = 0
if t_ls and t_pvi:
t_ls = set(t_ls)
todo_edges = [None] * len(me.edges) * 2
me.edges.foreach_get("vertices", todo_edges)
todo_edges = set((v1, v2) if v1 < v2 else (v2, v1) for v1, v2 in zip(*(iter(todo_edges),) * 2))
li = 0
vi = vi_start = t_pvi[0]
for li_next, vi_next in enumerate(t_pvi[1:] + t_pvi[:1], start=1):
if li_next in t_ls: # End of a poly's loop.
vi2 = vi_start
vi_start = vi_next
else:
vi2 = vi_next
e_key = (vi, vi2) if vi < vi2 else (vi2, vi)
if e_key in todo_edges:
t_eli.append(li)
todo_edges.remove(e_key)
edges_map[e_key] = edges_nbr
edges_nbr += 1
vi = vi_next
li = li_next
# End of edges!
# We have to ^-1 last index of each loop.
for ls in t_ls:
t_pvi[ls - 1] ^= -1
# And finally we can write data!
elem_data_single_int32_array(geom, b"PolygonVertexIndex", t_pvi)
elem_data_single_int32_array(geom, b"Edges", t_eli)
del t_pvi
del t_ls
del t_eli
# And now, layers!
# Smoothing.
if smooth_type in {'FACE', 'EDGE'}:
t_ps = None
_map = b""
if smooth_type == 'FACE':
t_ps = array.array(data_types.ARRAY_INT32, (0,)) * len(me.polygons)
me.polygons.foreach_get("use_smooth", t_ps)
_map = b"ByPolygon"
else: # EDGE
# Write Edge Smoothing.
t_ps = array.array(data_types.ARRAY_INT32, (0,)) * edges_nbr
for e in me.edges:
if e.key not in edges_map:
continue # Only loose edges, in theory!
t_ps[edges_map[e.key]] = not e.use_edge_sharp
_map = b"ByEdge"
lay_smooth = elem_data_single_int32(geom, b"LayerElementSmoothing", 0)
elem_data_single_int32(lay_smooth, b"Version", FBX_GEOMETRY_SMOOTHING_VERSION)
elem_data_single_string(lay_smooth, b"Name", b"")
elem_data_single_string(lay_smooth, b"MappingInformationType", _map)
elem_data_single_string(lay_smooth, b"ReferenceInformationType", b"Direct")
elem_data_single_int32_array(lay_smooth, b"Smoothing", t_ps) # Sight, int32 for bool...
del t_ps
# TODO: Edge crease (LayerElementCrease).
# And we are done with edges!
del edges_map
# Loop normals.
# NOTE: this is not supported by importer currently.
# XXX Official docs says normals should use IndexToDirect,
# but this does not seem well supported by apps currently...
me.calc_normals_split()
def _nortuples_gen(raw_nors, m):
Bastien Montagne
committed
# Great, now normals are also expected 4D!
gen = zip(*(iter(raw_nors),) * 3 + (_infinite_gen(1.0),))
return gen if m is None else (m * Vector(v) for v in gen)
t_ln = array.array(data_types.ARRAY_FLOAT64, (0.0,)) * len(me.loops) * 3
me.loops.foreach_get("normal", t_ln)
t_ln = _nortuples_gen(t_ln, geom_mat_no)
t_ln = tuple(t_ln) # No choice... :/
lay_nor = elem_data_single_int32(geom, b"LayerElementNormal", 0)
elem_data_single_int32(lay_nor, b"Version", FBX_GEOMETRY_NORMAL_VERSION)
elem_data_single_string(lay_nor, b"Name", b"")
elem_data_single_string(lay_nor, b"MappingInformationType", b"ByPolygonVertex")
elem_data_single_string(lay_nor, b"ReferenceInformationType", b"IndexToDirect")
ln2idx = tuple(set(t_ln))
elem_data_single_float64_array(lay_nor, b"Normals", chain(*ln2idx))
# Normal weights, no idea what it is.
t_lnw = array.array(data_types.ARRAY_FLOAT64, (0.0,)) * len(ln2idx)
elem_data_single_float64_array(lay_nor, b"NormalsW", t_lnw)
ln2idx = {nor: idx for idx, nor in enumerate(ln2idx)}
elem_data_single_int32_array(lay_nor, b"NormalsIndex", (ln2idx[n] for n in t_ln))
else:
lay_nor = elem_data_single_int32(geom, b"LayerElementNormal", 0)
elem_data_single_int32(lay_nor, b"Version", FBX_GEOMETRY_NORMAL_VERSION)
elem_data_single_string(lay_nor, b"Name", b"")
elem_data_single_string(lay_nor, b"MappingInformationType", b"ByPolygonVertex")
elem_data_single_string(lay_nor, b"ReferenceInformationType", b"Direct")
elem_data_single_float64_array(lay_nor, b"Normals", chain(*t_ln))
# Normal weights, no idea what it is.
t_ln = array.array(data_types.ARRAY_FLOAT64, (0.0,)) * len(me.loops)
elem_data_single_float64_array(lay_nor, b"NormalsW", t_ln)
del t_ln
# tspace
tspacenumber = 0
if scene_data.settings.use_tspace:
tspacenumber = len(me.uv_layers)
if tspacenumber:
t_ln = array.array(data_types.ARRAY_FLOAT64, (0.0,)) * len(me.loops) * 3
t_lnw = array.array(data_types.ARRAY_FLOAT64, (0.0,)) * len(me.loops)
for idx, uvlayer in enumerate(me.uv_layers):
name = uvlayer.name
me.calc_tangents(name)
# Loop bitangents (aka binormals).
# NOTE: this is not supported by importer currently.
me.loops.foreach_get("bitangent", t_ln)
lay_nor = elem_data_single_int32(geom, b"LayerElementBinormal", idx)
elem_data_single_int32(lay_nor, b"Version", FBX_GEOMETRY_BINORMAL_VERSION)
elem_data_single_string_unicode(lay_nor, b"Name", name)
elem_data_single_string(lay_nor, b"MappingInformationType", b"ByPolygonVertex")
elem_data_single_string(lay_nor, b"ReferenceInformationType", b"Direct")
elem_data_single_float64_array(lay_nor, b"Binormals", chain(*_nortuples_gen(t_ln, geom_mat_no)))
# Binormal weights, no idea what it is.
elem_data_single_float64_array(lay_nor, b"BinormalsW", t_lnw)
# Loop tangents.
# NOTE: this is not supported by importer currently.
me.loops.foreach_get("tangent", t_ln)
lay_nor = elem_data_single_int32(geom, b"LayerElementTangent", idx)
elem_data_single_int32(lay_nor, b"Version", FBX_GEOMETRY_TANGENT_VERSION)
elem_data_single_string_unicode(lay_nor, b"Name", name)
elem_data_single_string(lay_nor, b"MappingInformationType", b"ByPolygonVertex")
elem_data_single_string(lay_nor, b"ReferenceInformationType", b"Direct")
elem_data_single_float64_array(lay_nor, b"Binormals", chain(*_nortuples_gen(t_ln, geom_mat_no)))
# Tangent weights, no idea what it is.
elem_data_single_float64_array(lay_nor, b"TangentsW", t_lnw)
me.free_tangents()
me.free_normals_split()
Bastien Montagne
committed
del _nortuples_gen
# Write VertexColor Layers
# note, no programs seem to use this info :/
vcolnumber = len(me.vertex_colors)
if vcolnumber:
def _coltuples_gen(raw_cols):
return zip(*(iter(raw_cols),) * 3 + (_infinite_gen(1.0),)) # We need a fake alpha...
t_lc = array.array(data_types.ARRAY_FLOAT64, (0.0,)) * len(me.loops) * 3
for colindex, collayer in enumerate(me.vertex_colors):
collayer.data.foreach_get("color", t_lc)
lay_vcol = elem_data_single_int32(geom, b"LayerElementColor", colindex)
elem_data_single_int32(lay_vcol, b"Version", FBX_GEOMETRY_VCOLOR_VERSION)
elem_data_single_string_unicode(lay_vcol, b"Name", collayer.name)
elem_data_single_string(lay_vcol, b"MappingInformationType", b"ByPolygonVertex")
elem_data_single_string(lay_vcol, b"ReferenceInformationType", b"IndexToDirect")
col2idx = tuple(set(_coltuples_gen(t_lc)))
elem_data_single_float64_array(lay_vcol, b"Colors", chain(*col2idx)) # Flatten again...
col2idx = {col: idx for idx, col in enumerate(col2idx)}
elem_data_single_int32_array(lay_vcol, b"ColorIndex", (col2idx[c] for c in _coltuples_gen(t_lc)))
del col2idx
del t_lc
del _coltuples_gen
# Write UV layers.
# Note: LayerElementTexture is deprecated since FBX 2011 - luckily!
# Textures are now only related to materials, in FBX!
uvnumber = len(me.uv_layers)
if uvnumber:
def _uvtuples_gen(raw_uvs):
return zip(*(iter(raw_uvs),) * 2)
t_luv = array.array(data_types.ARRAY_FLOAT64, (0.0,)) * len(me.loops) * 2
for uvindex, uvlayer in enumerate(me.uv_layers):
uvlayer.data.foreach_get("uv", t_luv)
lay_uv = elem_data_single_int32(geom, b"LayerElementUV", uvindex)
elem_data_single_int32(lay_uv, b"Version", FBX_GEOMETRY_UV_VERSION)
elem_data_single_string_unicode(lay_uv, b"Name", uvlayer.name)
elem_data_single_string(lay_uv, b"MappingInformationType", b"ByPolygonVertex")
elem_data_single_string(lay_uv, b"ReferenceInformationType", b"IndexToDirect")
uv2idx = tuple(set(_uvtuples_gen(t_luv)))
elem_data_single_float64_array(lay_uv, b"UV", chain(*uv2idx)) # Flatten again...
uv2idx = {uv: idx for idx, uv in enumerate(uv2idx)}
elem_data_single_int32_array(lay_uv, b"UVIndex", (uv2idx[uv] for uv in _uvtuples_gen(t_luv)))
del uv2idx
del t_luv
del _uvtuples_gen
# Face's materials.
me_fbxmats_idx = None
if me in scene_data.mesh_mat_indices:
me_fbxmats_idx = scene_data.mesh_mat_indices[me]
me_blmats = me.materials
if me_fbxmats_idx and me_blmats:
lay_mat = elem_data_single_int32(geom, b"LayerElementMaterial", 0)
elem_data_single_int32(lay_mat, b"Version", FBX_GEOMETRY_MATERIAL_VERSION)
elem_data_single_string(lay_mat, b"Name", b"")
nbr_mats = len(me_fbxmats_idx)
if nbr_mats > 1:
t_pm = array.array(data_types.ARRAY_INT32, (0,)) * len(me.polygons)
me.polygons.foreach_get("material_index", t_pm)
# We have to validate mat indices, and map them to FBX indices.
blmats_to_fbxmats_idxs = [me_fbxmats_idx[m] for m in me_blmats]
mat_idx_limit = len(blmats_to_fbxmats_idxs)
def_mat = blmats_to_fbxmats_idxs[0]
_gen = (blmats_to_fbxmats_idxs[m] if m < mat_idx_limit else def_mat for m in t_pm)
t_pm = array.array(data_types.ARRAY_INT32, _gen)
elem_data_single_string(lay_mat, b"MappingInformationType", b"ByPolygon")
# XXX Logically, should be "Direct" reference type, since we do not have any index array, and have one
# value per polygon...
# But looks like FBX expects it to be IndexToDirect here (maybe because materials are already
# indices??? *sigh*).
elem_data_single_string(lay_mat, b"ReferenceInformationType", b"IndexToDirect")
1417
1418
1419
1420
1421
1422
1423
1424
1425
1426
1427
1428
1429
1430
1431
1432
1433
1434
1435
1436
1437
1438
1439
1440
1441
1442
1443
1444
1445
1446
1447
1448
1449
1450
1451
1452
1453
1454
1455
1456
1457
1458
1459
1460
1461
1462
1463
1464
1465
1466
1467
1468
1469
1470
1471
1472
1473
1474
1475
1476
1477
1478
1479
elem_data_single_int32_array(lay_mat, b"Materials", t_pm)
del t_pm
else:
elem_data_single_string(lay_mat, b"MappingInformationType", b"AllSame")
elem_data_single_string(lay_mat, b"ReferenceInformationType", b"IndexToDirect")
elem_data_single_int32_array(lay_mat, b"Materials", [0])
# And the "layer TOC"...
layer = elem_data_single_int32(geom, b"Layer", 0)
elem_data_single_int32(layer, b"Version", FBX_GEOMETRY_LAYER_VERSION)
lay_nor = elem_empty(layer, b"LayerElement")
elem_data_single_string(lay_nor, b"Type", b"LayerElementNormal")
elem_data_single_int32(lay_nor, b"TypedIndex", 0)
if smooth_type in {'FACE', 'EDGE'}:
lay_smooth = elem_empty(layer, b"LayerElement")
elem_data_single_string(lay_smooth, b"Type", b"LayerElementSmoothing")
elem_data_single_int32(lay_smooth, b"TypedIndex", 0)
if vcolnumber:
lay_vcol = elem_empty(layer, b"LayerElement")
elem_data_single_string(lay_vcol, b"Type", b"LayerElementColor")
elem_data_single_int32(lay_vcol, b"TypedIndex", 0)
if uvnumber:
lay_uv = elem_empty(layer, b"LayerElement")
elem_data_single_string(lay_uv, b"Type", b"LayerElementUV")
elem_data_single_int32(lay_uv, b"TypedIndex", 0)
if me_fbxmats_idx is not None:
lay_mat = elem_empty(layer, b"LayerElement")
elem_data_single_string(lay_mat, b"Type", b"LayerElementMaterial")
elem_data_single_int32(lay_mat, b"TypedIndex", 0)
# Add other uv and/or vcol layers...
for vcolidx, uvidx, tspaceidx in zip_longest(range(1, vcolnumber), range(1, uvnumber), range(1, tspacenumber),
fillvalue=0):
layer = elem_data_single_int32(geom, b"Layer", max(vcolidx, uvidx))
elem_data_single_int32(layer, b"Version", FBX_GEOMETRY_LAYER_VERSION)
if vcolidx:
lay_vcol = elem_empty(layer, b"LayerElement")
elem_data_single_string(lay_vcol, b"Type", b"LayerElementColor")
elem_data_single_int32(lay_vcol, b"TypedIndex", vcolidx)
if uvidx:
lay_uv = elem_empty(layer, b"LayerElement")
elem_data_single_string(lay_uv, b"Type", b"LayerElementUV")
elem_data_single_int32(lay_uv, b"TypedIndex", uvidx)
if tspaceidx:
lay_binor = elem_empty(layer, b"LayerElement")
elem_data_single_string(lay_binor, b"Type", b"LayerElementBinormal")
elem_data_single_int32(lay_binor, b"TypedIndex", tspaceidx)
lay_tan = elem_empty(layer, b"LayerElement")
elem_data_single_string(lay_tan, b"Type", b"LayerElementTangent")
elem_data_single_int32(lay_tan, b"TypedIndex", tspaceidx)
def fbx_data_material_elements(root, mat, scene_data):
"""
Write the Material data block.
"""
ambient_color = (0.0, 0.0, 0.0)
if scene_data.data_world:
ambient_color = next(iter(scene_data.data_world.keys())).ambient_color
mat_key, _objs = scene_data.data_materials[mat]
# Approximation...
mat_type = b"Phong" if mat.specular_shader in {'COOKTORR', 'PHONG', 'BLINN'} else b"Lambert"
fbx_mat = elem_data_single_int64(root, b"Material", get_fbxuid_from_key(mat_key))
fbx_mat.add_string(fbx_name_class(mat.name.encode(), b"Material"))
fbx_mat.add_string(b"")
elem_data_single_int32(fbx_mat, b"Version", FBX_MATERIAL_VERSION)
# those are not yet properties, it seems...
elem_data_single_string(fbx_mat, b"ShadingModel", mat_type)
elem_data_single_int32(fbx_mat, b"MultiLayer", 0) # Should be bool...
tmpl = scene_data.templates[b"Material"]
props = elem_properties(fbx_mat)
elem_props_template_set(tmpl, props, "p_string", b"ShadingModel", mat_type.decode())
Bastien Montagne
committed
elem_props_template_set(tmpl, props, "p_color", b"EmissiveColor", mat.diffuse_color)
elem_props_template_set(tmpl, props, "p_number", b"EmissiveFactor", mat.emit)
Bastien Montagne
committed
elem_props_template_set(tmpl, props, "p_color", b"AmbientColor", ambient_color)
elem_props_template_set(tmpl, props, "p_number", b"AmbientFactor", mat.ambient)
Bastien Montagne
committed
elem_props_template_set(tmpl, props, "p_color", b"DiffuseColor", mat.diffuse_color)
elem_props_template_set(tmpl, props, "p_number", b"DiffuseFactor", mat.diffuse_intensity)
Bastien Montagne
committed
elem_props_template_set(tmpl, props, "p_color", b"TransparentColor",
mat.diffuse_color if mat.use_transparency else (1.0, 1.0, 1.0))
Bastien Montagne
committed
elem_props_template_set(tmpl, props, "p_number", b"TransparencyFactor",
1.0 - mat.alpha if mat.use_transparency else 0.0)
elem_props_template_set(tmpl, props, "p_number", b"Opacity", mat.alpha if mat.use_transparency else 1.0)
elem_props_template_set(tmpl, props, "p_vector_3d", b"NormalMap", (0.0, 0.0, 0.0))
# Not sure about those...
b"Bump": ((0.0, 0.0, 0.0), "p_vector_3d"),
Bastien Montagne
committed
b"BumpFactor": (1.0, "p_double"),
b"DisplacementColor": ((0.0, 0.0, 0.0), "p_color_rgb"),
Bastien Montagne
committed
b"DisplacementFactor": (0.0, "p_double"),
if mat_type == b"Phong":
Bastien Montagne
committed
elem_props_template_set(tmpl, props, "p_color", b"SpecularColor", mat.specular_color)
elem_props_template_set(tmpl, props, "p_number", b"SpecularFactor", mat.specular_intensity / 2.0)
# See Material template about those two!
elem_props_template_set(tmpl, props, "p_number", b"Shininess", (mat.specular_hardness - 1.0) / 5.10)
elem_props_template_set(tmpl, props, "p_number", b"ShininessExponent", (mat.specular_hardness - 1.0) / 5.10)
Bastien Montagne
committed
elem_props_template_set(tmpl, props, "p_color", b"ReflectionColor", mat.mirror_color)
elem_props_template_set(tmpl, props, "p_number", b"ReflectionFactor",
mat.raytrace_mirror.reflect_factor if mat.raytrace_mirror.use else 0.0)
# Custom properties.
if scene_data.settings.use_custom_properties:
fbx_data_element_custom_properties(props, mat)
1526
1527
1528
1529
1530
1531
1532
1533
1534
1535
1536
1537
1538
1539
1540
1541
1542
1543
1544
1545
1546
1547
1548
1549
1550
1551
1552
1553
1554
1555
1556
1557
1558
1559
1560
1561
1562
1563
1564
1565
1566
1567
1568
1569
1570
1571
1572
1573
1574
1575
1576
1577
1578
1579
1580
1581
1582
1583
1584
1585
1586
1587
1588
1589
1590
1591
1592
1593
1594
1595
1596
def _gen_vid_path(img, scene_data):
msetts = scene_data.settings.media_settings
fname_rel = bpy_extras.io_utils.path_reference(img.filepath, msetts.base_src, msetts.base_dst, msetts.path_mode,
msetts.subdir, msetts.copy_set, img.library)
fname_abs = os.path.normpath(os.path.abspath(os.path.join(msetts.base_dst, fname_rel)))
return fname_abs, fname_rel
def fbx_data_texture_file_elements(root, tex, scene_data):
"""
Write the (file) Texture data block.
"""
# XXX All this is very fuzzy to me currently...
# Textures do not seem to use properties as much as they could.
# For now assuming most logical and simple stuff.
tex_key, _mats = scene_data.data_textures[tex]
img = tex.texture.image
fname_abs, fname_rel = _gen_vid_path(img, scene_data)
fbx_tex = elem_data_single_int64(root, b"Texture", get_fbxuid_from_key(tex_key))
fbx_tex.add_string(fbx_name_class(tex.name.encode(), b"Texture"))
fbx_tex.add_string(b"")
elem_data_single_string(fbx_tex, b"Type", b"TextureVideoClip")
elem_data_single_int32(fbx_tex, b"Version", FBX_TEXTURE_VERSION)
elem_data_single_string(fbx_tex, b"TextureName", fbx_name_class(tex.name.encode(), b"Texture"))
elem_data_single_string(fbx_tex, b"Media", fbx_name_class(img.name.encode(), b"Video"))
elem_data_single_string_unicode(fbx_tex, b"FileName", fname_abs)
elem_data_single_string_unicode(fbx_tex, b"RelativeFilename", fname_rel)
alpha_source = 0 # None
if img.use_alpha:
if tex.texture.use_calculate_alpha:
alpha_source = 1 # RGBIntensity as alpha.
else:
alpha_source = 2 # Black, i.e. alpha channel.
# BlendMode not useful for now, only affects layered textures afaics.
mapping = 0 # None.
if tex.texture_coords in {'ORCO'}: # XXX Others?
if tex.mapping in {'FLAT'}:
mapping = 1 # Planar
elif tex.mapping in {'CUBE'}:
mapping = 4 # Box
elif tex.mapping in {'TUBE'}:
mapping = 3 # Cylindrical
elif tex.mapping in {'SPHERE'}:
mapping = 2 # Spherical
elif tex.texture_coords in {'UV'}:
# XXX *HOW* do we link to correct UVLayer???
mapping = 6 # UV
wrap_mode = 1 # Clamp
if tex.texture.extension in {'REPEAT'}:
wrap_mode = 0 # Repeat
tmpl = scene_data.templates[b"TextureFile"]
props = elem_properties(fbx_tex)
elem_props_template_set(tmpl, props, "p_enum", b"AlphaSource", alpha_source)
elem_props_template_set(tmpl, props, "p_bool", b"PremultiplyAlpha",
img.alpha_mode in {'STRAIGHT'}) # Or is it PREMUL?
elem_props_template_set(tmpl, props, "p_enum", b"CurrentMappingType", mapping)
elem_props_template_set(tmpl, props, "p_enum", b"WrapModeU", wrap_mode)
elem_props_template_set(tmpl, props, "p_enum", b"WrapModeV", wrap_mode)
elem_props_template_set(tmpl, props, "p_vector_3d", b"Translation", tex.offset)
elem_props_template_set(tmpl, props, "p_vector_3d", b"Scaling", tex.scale)
elem_props_template_set(tmpl, props, "p_bool", b"UseMipMap", tex.texture.use_mipmap)
# Custom properties.
if scene_data.settings.use_custom_properties:
fbx_data_element_custom_properties(props, tex.texture)
1600
1601
1602
1603
1604
1605
1606
1607
1608
1609
1610
1611
1612
1613
1614
1615
1616
1617
1618
1619
1620
1621
1622
1623
1624
1625
1626
1627
1628
1629
1630
1631
1632
1633
1634
1635
1636
1637
1638
1639
1640
1641
1642
1643
def fbx_data_video_elements(root, vid, scene_data):
"""
Write the actual image data block.
"""
vid_key, _texs = scene_data.data_videos[vid]
fname_abs, fname_rel = _gen_vid_path(vid, scene_data)
fbx_vid = elem_data_single_int64(root, b"Video", get_fbxuid_from_key(vid_key))
fbx_vid.add_string(fbx_name_class(vid.name.encode(), b"Video"))
fbx_vid.add_string(b"Clip")
elem_data_single_string(fbx_vid, b"Type", b"Clip")
# XXX No Version???
elem_data_single_string_unicode(fbx_vid, b"FileName", fname_abs)
elem_data_single_string_unicode(fbx_vid, b"RelativeFilename", fname_rel)
if scene_data.settings.media_settings.embed_textures:
try:
with open(vid.filepath, 'br') as f:
elem_data_single_byte_array(fbx_vid, b"Content", f.read())
except Exception as e:
print("WARNING: embeding file {} failed ({})".format(vid.filepath, e))
elem_data_single_byte_array(fbx_vid, b"Content", b"")
else:
elem_data_single_byte_array(fbx_vid, b"Content", b"")
def fbx_data_armature_elements(root, armature, scene_data):
"""
Write:
* Bones "data" (NodeAttribute::LimbNode, contains pretty much nothing!).
* Deformers (i.e. Skin), bind between an armature and a mesh.
** SubDeformers (i.e. Cluster), one per bone/vgroup pair.
* BindPose.
Note armature itself has no data, it is a mere "Null" Model...
"""
# Bones "data".
tmpl = scene_data.templates[b"Bone"]
for bo in armature.data.bones:
_bo_key, bo_data_key, _arm = scene_data.data_bones[bo]
fbx_bo = elem_data_single_int64(root, b"NodeAttribute", get_fbxuid_from_key(bo_data_key))
fbx_bo.add_string(fbx_name_class(bo.name.encode(), b"NodeAttribute"))
fbx_bo.add_string(b"LimbNode")
elem_data_single_string(fbx_bo, b"TypeFlags", b"Skeleton")
props = elem_properties(fbx_bo)
Bastien Montagne
committed
elem_props_template_set(tmpl, props, "p_double", b"Size", (bo.tail_local - bo.head_local).length)
# Custom properties.
if scene_data.settings.use_custom_properties:
fbx_data_element_custom_properties(props, bo)
# Deformers and BindPoses.
# Note: we might also use Deformers for our "parent to vertex" stuff???
deformer = scene_data.data_deformers.get(armature, None)
if deformer is not None:
for me, (skin_key, obj, clusters) in deformer.items():
# BindPose.
# We assume bind pose for our bones are their "Editmode" pose...
# All matrices are expected in global (world) space.
bindpose_key = get_blender_armature_bindpose_key(armature, me)
fbx_pose = elem_data_single_int64(root, b"Pose", get_fbxuid_from_key(bindpose_key))
fbx_pose.add_string(fbx_name_class(me.name.encode(), b"Pose"))
fbx_pose.add_string(b"BindPose")
elem_data_single_string(fbx_pose, b"Type", b"BindPose")
elem_data_single_int32(fbx_pose, b"Version", FBX_POSE_BIND_VERSION)
elem_data_single_int32(fbx_pose, b"NbPoseNodes", 1 + len(armature.data.bones))
# First node is mesh/object.
mat_world_obj = fbx_object_matrix(scene_data, obj, global_space=True)
fbx_posenode = elem_empty(fbx_pose, b"PoseNode")
elem_data_single_int64(fbx_posenode, b"Node", get_fbxuid_from_key(scene_data.objects[obj]))
elem_data_single_float64_array(fbx_posenode, b"Matrix", matrix_to_array(mat_world_obj))
# And all bones of armature!
mat_world_bones = {}
for bo in armature.data.bones:
bomat = fbx_object_matrix(scene_data, bo, armature, global_space=True)
mat_world_bones[bo] = bomat
fbx_posenode = elem_empty(fbx_pose, b"PoseNode")
elem_data_single_int64(fbx_posenode, b"Node", get_fbxuid_from_key(scene_data.objects[bo]))
elem_data_single_float64_array(fbx_posenode, b"Matrix", matrix_to_array(bomat))
# Deformer.
fbx_skin = elem_data_single_int64(root, b"Deformer", get_fbxuid_from_key(skin_key))
fbx_skin.add_string(fbx_name_class(armature.name.encode(), b"Deformer"))
fbx_skin.add_string(b"Skin")
elem_data_single_int32(fbx_skin, b"Version", FBX_DEFORMER_SKIN_VERSION)
elem_data_single_float64(fbx_skin, b"Link_DeformAcuracy", 50.0) # Only vague idea what it is...
for bo, clstr_key in clusters.items():
# Find which vertices are affected by this bone/vgroup pair, and matching weights.
1696
1697
1698
1699
1700
1701
1702
1703
1704
1705
1706
1707
1708
1709
1710
1711
1712
1713
1714
1715
1716
1717
1718
vg_idx = obj.vertex_groups[bo.name].index
for idx, v in enumerate(me.vertices):
vert_vg = [vg for vg in v.groups if vg.group == vg_idx]
if not vert_vg:
continue
indices.append(idx)
weights.append(vert_vg[0].weight)
# Create the cluster.
fbx_clstr = elem_data_single_int64(root, b"Deformer", get_fbxuid_from_key(clstr_key))
fbx_clstr.add_string(fbx_name_class(bo.name.encode(), b"SubDeformer"))
fbx_clstr.add_string(b"Cluster")
elem_data_single_int32(fbx_clstr, b"Version", FBX_DEFORMER_CLUSTER_VERSION)
# No idea what that user data might be...
fbx_userdata = elem_data_single_string(fbx_clstr, b"UserData", b"")
fbx_userdata.add_string(b"")
if indices:
elem_data_single_int32_array(fbx_clstr, b"Indexes", indices)
elem_data_single_float64_array(fbx_clstr, b"Weights", weights)
# Transform and TransformLink matrices...
# They seem to be mostly the same as BindPose ones???
# WARNING! Even though official FBX API presents Transform in global space,
# **it is stored in bone space in FBX data!** See:
# http://area.autodesk.com/forum/autodesk-fbx/fbx-sdk/why-the-values-return-
# by-fbxcluster-gettransformmatrix-x-not-same-with-the-value-in-ascii-fbx-file/
elem_data_single_float64_array(fbx_clstr, b"Transform",
matrix_to_array(mat_world_bones[bo].inverted() * mat_world_obj))
elem_data_single_float64_array(fbx_clstr, b"TransformLink", matrix_to_array(mat_world_bones[bo]))
def fbx_data_object_elements(root, obj, scene_data):
"""
Write the Object (Model) data blocks.
Note we handle "Model" part of bones as well here!
"""
obj_type = b"Null" # default, sort of empty...
if isinstance(obj, Bone):
obj_type = b"LimbNode"
elif (obj.type == 'MESH'):
obj_type = b"Mesh"
elif (obj.type == 'LAMP'):
obj_type = b"Light"
elif (obj.type == 'CAMERA'):
obj_type = b"Camera"
obj_key = scene_data.objects[obj]
model = elem_data_single_int64(root, b"Model", get_fbxuid_from_key(obj_key))
model.add_string(fbx_name_class(obj.name.encode(), b"Model"))
model.add_string(obj_type)
elem_data_single_int32(model, b"Version", FBX_MODELS_VERSION)
# Object transform info.
loc, rot, scale, matrix, matrix_rot = fbx_object_tx(scene_data, obj)
rot = tuple(units_convert_iter(rot, "radian", "degree"))
tmpl = scene_data.templates[b"Model"]
# For now add only loc/rot/scale...
props = elem_properties(model)
elem_props_template_set(tmpl, props, "p_lcl_translation", b"Lcl Translation", loc)
elem_props_template_set(tmpl, props, "p_lcl_rotation", b"Lcl Rotation", rot)
elem_props_template_set(tmpl, props, "p_lcl_scaling", b"Lcl Scaling", scale)
# TODO: "constraints" (limit loc/rot/scale, and target-to-object).
# Custom properties.
if scene_data.settings.use_custom_properties:
fbx_data_element_custom_properties(props, obj)
# Those settings would obviously need to be edited in a complete version of the exporter, may depends on
# object type, etc.
elem_data_single_int32(model, b"MultiLayer", 0)
elem_data_single_int32(model, b"MultiTake", 0)
elem_data_single_bool(model, b"Shading", True)
elem_data_single_string(model, b"Culling", b"CullingOff")
if isinstance(obj, Object) and obj.type == 'CAMERA':
# Why, oh why are FBX cameras such a mess???
# And WHY add camera data HERE??? Not even sure this is needed...
render = scene_data.scene.render
width = render.resolution_x * 1.0
height = render.resolution_y * 1.0
elem_props_template_set(tmpl, props, "p_enum", b"ResolutionMode", 0) # Don't know what it means
Bastien Montagne
committed
elem_props_template_set(tmpl, props, "p_double", b"AspectW", width)
elem_props_template_set(tmpl, props, "p_double", b"AspectH", height)
elem_props_template_set(tmpl, props, "p_bool", b"ViewFrustum", True)
elem_props_template_set(tmpl, props, "p_enum", b"BackgroundMode", 0) # Don't know what it means
elem_props_template_set(tmpl, props, "p_bool", b"ForegroundTransparent", True)
1786
1787
1788
1789
1790
1791
1792
1793
1794
1795
1796
1797
1798
1799
1800
1801
1802
1803
1804
1805
1806
1807
1808
1809
1810
1811
1812
1813
1814
1815
1816
1817
1818
1819
1820
def fbx_data_animation_elements(root, scene_data):
"""
Write animation data.
"""
animations = scene_data.animations
if not animations:
return
scene = scene_data.scene
fps = scene.render.fps / scene.render.fps_base
def keys_to_ktimes(keys):
return (int(v) for v in units_convert_iter((f / fps for f, _v in keys), "second", "ktime"))
astack_key, alayers = animations
acn_tmpl = scene_data.templates[b"AnimationCurveNode"]
# Animation stack.
astack = elem_data_single_int64(root, b"AnimationStack", get_fbxuid_from_key(astack_key))
astack.add_string(fbx_name_class(scene.name.encode(), b"AnimStack"))
astack.add_string(b"")
for obj, (alayer_key, acurvenodes) in alayers.items():
# Animation layer.
alayer = elem_data_single_int64(root, b"AnimationLayer", get_fbxuid_from_key(alayer_key))
alayer.add_string(fbx_name_class(obj.name.encode(), b"AnimLayer"))
alayer.add_string(b"")
for fbx_prop, (acurvenode_key, acurves) in acurvenodes.items():
# Animation curve node.
acurvenode = elem_data_single_int64(root, b"AnimationCurveNode", get_fbxuid_from_key(acurvenode_key))
acurvenode.add_string(fbx_name_class(fbx_prop.encode(), b"AnimCurveNode"))
acurvenode.add_string(b"")
acn_props = elem_properties(acurvenode)
for fbx_item, (acurve_key, def_value, keys) in acurves.items():
elem_props_template_set(acn_tmpl, acn_props, "p_number", fbx_item.encode(), def_value, animatable=True)
# Only create Animation curve if needed!
if keys:
acurve = elem_data_single_int64(root, b"AnimationCurve", get_fbxuid_from_key(acurve_key))
acurve.add_string(fbx_name_class(b"", b"AnimCurve"))
acurve.add_string(b"")
# key attributes...
nbr_keys = len(keys)
# flags...
keyattr_flags = (1 << 3 | # interpolation mode, 1 = constant, 2 = linear, 3 = cubic.
1 << 8 | # tangent mode, 8 = auto, 9 = TCB, 10 = user, 11 = generic break,
1 << 13 | # tangent mode, 12 = generic clamp, 13 = generic time independent,
1 << 14 | # tangent mode, 13 + 14 = generic clamp progressive.
0,
)
# Maybe values controlling TCB & co???
keyattr_datafloat = (0.0, 0.0, 9.419963346924634e-30, 0.0)
# And now, the *real* data!
elem_data_single_float64(acurve, b"Default", def_value)
elem_data_single_int32(acurve, b"KeyVer", FBX_ANIM_KEY_VERSION)
elem_data_single_int64_array(acurve, b"KeyTime", keys_to_ktimes(keys))
elem_data_single_float32_array(acurve, b"KeyValueFloat", (v for _f, v in keys))
elem_data_single_int32_array(acurve, b"KeyAttrFlags", keyattr_flags)
elem_data_single_float32_array(acurve, b"KeyAttrDataFloat", keyattr_datafloat)
elem_data_single_int32_array(acurve, b"KeyAttrRefCount", (nbr_keys,))
##### Top-level FBX data container. #####
# Helper container gathering some data we need multiple times:
# * templates.
# * objects.
# * connections.
# * takes.
FBXData = namedtuple("FBXData", (
"templates", "templates_users", "connections",
"settings", "scene", "objects", "animations",
1862
1863
1864
1865
1866
1867
1868
1869
1870
1871
1872
1873
1874
1875
1876
1877
1878
1879
1880
1881
1882
1883
1884
1885
1886
1887
"data_lamps", "data_cameras", "data_meshes", "mesh_mat_indices",
"data_bones", "data_deformers",
"data_world", "data_materials", "data_textures", "data_videos",
))
def fbx_mat_properties_from_texture(tex):
"""
Returns a set of FBX metarial properties that are affected by the given texture.
Quite obviously, this is a fuzzy and far-from-perfect mapping! Amounts of influence are completely lost, e.g.
Note tex is actually expected to be a texture slot.
"""
# Tex influence does not exists in FBX, so assume influence < 0.5 = no influence... :/
INFLUENCE_THRESHOLD = 0.5
# Mapping Blender -> FBX (blend_use_name, blend_fact_name, fbx_name).
blend_to_fbx = (
# Lambert & Phong...
("diffuse", "diffuse", b"DiffuseFactor"),
("color_diffuse", "diffuse_color", b"DiffuseColor"),
("alpha", "alpha", b"TransparencyFactor"),
("diffuse", "diffuse", b"TransparentColor"), # Uses diffuse color in Blender!
("emit", "emit", b"EmissiveFactor"),
("diffuse", "diffuse", b"EmissiveColor"), # Uses diffuse color in Blender!
("ambient", "ambient", b"AmbientFactor"),
#("", "", b"AmbientColor"), # World stuff in Blender, for now ignore...
Bastien Montagne
committed
("normal", "normal", b"NormalMap"),
# Note: unsure about those... :/
1890
1891
1892
1893
1894
1895
1896
1897
1898
1899
1900
1901
1902
1903
1904
1905
1906
1907
1908
1909
1910
1911
1912
1913
1914
1915
1916
1917
1918
1919
1920
1921
1922
1923
1924
1925
1926
1927
#("", "", b"Bump"),
#("", "", b"BumpFactor"),
#("", "", b"DisplacementColor"),
#("", "", b"DisplacementFactor"),
# Phong only.
("specular", "specular", b"SpecularFactor"),
("color_spec", "specular_color", b"SpecularColor"),
# See Material template about those two!
("hardness", "hardness", b"Shininess"),
("hardness", "hardness", b"ShininessExponent"),
("mirror", "mirror", b"ReflectionColor"),
("raymir", "raymir", b"ReflectionFactor"),
)
tex_fbx_props = set()
for use_map_name, name_factor, fbx_prop_name in blend_to_fbx:
if getattr(tex, "use_map_" + use_map_name) and getattr(tex, name_factor + "_factor") >= INFLUENCE_THRESHOLD:
tex_fbx_props.add(fbx_prop_name)
return tex_fbx_props
def fbx_skeleton_from_armature(scene, settings, armature, objects, data_bones, data_deformers, arm_parents):
"""
Create skeleton from armature/bones (NodeAttribute/LimbNode and Model/LimbNode), and for each deformed mesh,
create Pose/BindPose(with sub PoseNode) and Deformer/Skin(with Deformer/SubDeformer/Cluster).
Also supports "parent to bone" (simple parent to Model/LimbNode).
arm_parents is a set of tuples (armature, object) for all successful armature bindings.
"""
arm = armature.data
bones = {}
for bo in arm.bones:
key, data_key = get_blender_bone_key(armature, bo)
objects[bo] = key
data_bones[bo] = (key, data_key, armature)
bones[bo.name] = bo
for obj in objects.keys():
if not isinstance(obj, Object):
1929
1930
1931
1932
1933
1934
1935
1936
1937
1938
1939
1940
1941
1942
1943
1944
1945
1946
1947
1948
1949
1950
1951
1952
1953
1954
1955
1956
1957
1958
1959
1960
1961
1962
1963
1964
continue
if obj.type not in {'MESH'}:
continue
if obj.parent != armature:
continue
# Always handled by an Armature modifier...
found = False
for mod in obj.modifiers:
if mod.type not in {'ARMATURE'}:
continue
# We only support vertex groups binding method, not bone envelopes one!
if mod.object == armature and mod.use_vertex_groups:
found = True
break
if not found:
continue
# Now we have a mesh using this armature. First, find out which bones are concerned!
# XXX Assuming here non-used bones can have no cluster, this has to be checked!
used_bones = tuple(bones[vg.name] for vg in obj.vertex_groups if vg.name in bones)
if not used_bones:
continue
# Note: bindpose have no relations at all (no connections), so no need for any preprocess for them.
# Create skin & clusters relations (note skins are connected to geometry, *not* model!).
me = obj.data
clusters = {bo: get_blender_bone_cluster_key(armature, me, bo) for bo in used_bones}
data_deformers.setdefault(armature, {})[me] = (get_blender_armature_skin_key(armature, me), obj, clusters)
# We don't want a regular parent relationship for those in FBX...
arm_parents.add((armature, obj))
1965
1966
1967
1968
1969
1970
1971
1972
1973
1974
1975
1976
1977
1978
1979
1980
1981
1982
1983
1984
1985
1986
1987
1988
1989
1990
1991
1992
1993
1994
def fbx_animations_simplify(scene_data, animdata):
"""
Simplifies FCurves!
"""
fac = scene_data.settings.bake_anim_simplify_factor
step = scene_data.settings.bake_anim_step
# So that, with default factor and step values (1), we get:
max_frame_diff = step * fac * 10 # max step of 10 frames.
value_diff_fac = fac / 1000 # min value evolution: 0.1% of whole range.
for obj, keys in animdata.items():
if not keys:
continue
extremums = [(min(values), max(values)) for values in zip(*(k[1] for k in keys))]
min_diffs = [max((mx - mn) * value_diff_fac, 0.000001) for mx, mn in extremums]
p_currframe, p_key, p_key_write = keys[0]
p_keyed = [(p_currframe - max_frame_diff, val) for val in p_key]
for currframe, key, key_write in keys:
#if obj.name == "Cube":
#print(currframe, key, key_write)
for idx, (val, p_val) in enumerate(zip(key, p_key)):
p_keyedframe, p_keyedval = p_keyed[idx]
if val == p_val:
# Never write keyframe when value is exactly the same as prev one!
continue
if abs(val - p_val) >= min_diffs[idx]:
# If enough difference from previous sampled value, key this value *and* the previous one!
key_write[idx] = True
p_key_write[idx] = True
p_keyed[idx] = (currframe, val)
elif (abs(val - p_keyedval) >= min_diffs[idx]) or (currframe - p_keyedframe >= max_frame_diff):
# Else, if enough difference from previous keyed value (or max gap between keys is reached),
# key this value only!
key_write[idx] = True
p_keyed[idx] = (currframe, val)
p_currframe, p_key, p_key_write = currframe, key, key_write