Newer
Older
Bastien Montagne
committed
assert(fbx_layer_index is None)
blen_read_geom_array_setattr(blen_read_geom_array_gen_allsame(len(blen_data)),
blen_data, blen_attr, fbx_layer_data, stride, item_size, descr, xform)
return True
blen_read_geom_array_error_ref(descr, fbx_layer_ref, quiet)
blen_read_geom_array_error_mapping(descr, fbx_layer_mapping, quiet)
def blen_read_geom_layer_material(fbx_obj, mesh):
fbx_layer = elem_find_first(fbx_obj, b'LayerElementMaterial')
if fbx_layer is None:
return
(fbx_layer_name,
fbx_layer_mapping,
fbx_layer_ref,
) = blen_read_geom_layerinfo(fbx_layer)
layer_id = b'Materials'
fbx_layer_data = elem_prop_first(elem_find_first(fbx_layer, layer_id))
blen_data = mesh.polygons
mesh, blen_data, "material_index",
fbx_layer_data, None,
fbx_layer_mapping, fbx_layer_ref,
def blen_read_geom_layer_uv(fbx_obj, mesh):
for layer_id in (b'LayerElementUV',):
for fbx_layer in elem_find_iter(fbx_obj, layer_id):
# all should be valid
(fbx_layer_name,
fbx_layer_mapping,
fbx_layer_ref,
) = blen_read_geom_layerinfo(fbx_layer)
fbx_layer_data = elem_prop_first(elem_find_first(fbx_layer, b'UV'))
fbx_layer_index = elem_prop_first(elem_find_first(fbx_layer, b'UVIndex'))
Bastien Montagne
committed
# Always init our new layers with (0, 0) UVs.
uv_lay = mesh.uv_layers.new(name=fbx_layer_name, do_init=False)
if uv_lay is None:
print("Failed to add {%r %r} UVLayer to %r (probably too many of them?)"
"" % (layer_id, fbx_layer_name, mesh.name))
continue
blen_data = uv_lay.data
# some valid files omit this data
if fbx_layer_data is None:
print("%r %r missing data" % (layer_id, fbx_layer_name))
continue
blen_read_geom_array_mapped_polyloop(
mesh, blen_data, "uv",
fbx_layer_data, fbx_layer_index,
fbx_layer_mapping, fbx_layer_ref,
def blen_read_geom_layer_color(fbx_obj, mesh):
# almost same as UV's
for layer_id in (b'LayerElementColor',):
for fbx_layer in elem_find_iter(fbx_obj, layer_id):
# all should be valid
(fbx_layer_name,
fbx_layer_mapping,
fbx_layer_ref,
) = blen_read_geom_layerinfo(fbx_layer)
fbx_layer_data = elem_prop_first(elem_find_first(fbx_layer, b'Colors'))
fbx_layer_index = elem_prop_first(elem_find_first(fbx_layer, b'ColorIndex'))
Bastien Montagne
committed
# Always init our new layers with full white opaque color.
color_lay = mesh.vertex_colors.new(name=fbx_layer_name, do_init=False)
if color_lay is None:
print("Failed to add {%r %r} vertex color layer to %r (probably too many of them?)"
"" % (layer_id, fbx_layer_name, mesh.name))
continue
blen_data = color_lay.data
# some valid files omit this data
if fbx_layer_data is None:
print("%r %r missing data" % (layer_id, fbx_layer_name))
continue
blen_read_geom_array_mapped_polyloop(
mesh, blen_data, "color",
fbx_layer_data, fbx_layer_index,
fbx_layer_mapping, fbx_layer_ref,
Bastien Montagne
committed
4, 4, layer_id,
def blen_read_geom_layer_smooth(fbx_obj, mesh):
fbx_layer = elem_find_first(fbx_obj, b'LayerElementSmoothing')
if fbx_layer is None:
return False
# all should be valid
(fbx_layer_name,
fbx_layer_mapping,
fbx_layer_ref,
) = blen_read_geom_layerinfo(fbx_layer)
layer_id = b'Smoothing'
fbx_layer_data = elem_prop_first(elem_find_first(fbx_layer, layer_id))
# udk has 'Direct' mapped, with no Smoothing, not sure why, but ignore these
if fbx_layer_data is None:
return False
if fbx_layer_mapping == b'ByEdge':
# some models have bad edge data, we cant use this info...
if not mesh.edges:
Bastien Montagne
committed
print("warning skipping sharp edges data, no valid edges...")
Bastien Montagne
committed
blen_read_geom_array_mapped_edge(
mesh, blen_data, "use_edge_sharp",
fbx_layer_data, None,
fbx_layer_mapping, fbx_layer_ref,
xform=lambda s: not s,
Bastien Montagne
committed
# We only set sharp edges here, not face smoothing itself...
mesh.use_auto_smooth = True
return False
elif fbx_layer_mapping == b'ByPolygon':
blen_data = mesh.polygons
return blen_read_geom_array_mapped_polygon(
mesh, blen_data, "use_smooth",
fbx_layer_data, None,
fbx_layer_mapping, fbx_layer_ref,
xform=lambda s: (s != 0), # smoothgroup bitflags, treat as booleans for now
)
else:
print("warning layer %r mapping type unsupported: %r" % (fbx_layer.id, fbx_layer_mapping))
return False
def blen_read_geom_layer_edge_crease(fbx_obj, mesh):
Samuli Raivio
committed
from math import sqrt
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
fbx_layer = elem_find_first(fbx_obj, b'LayerElementEdgeCrease')
if fbx_layer is None:
return False
# all should be valid
(fbx_layer_name,
fbx_layer_mapping,
fbx_layer_ref,
) = blen_read_geom_layerinfo(fbx_layer)
if fbx_layer_mapping != b'ByEdge':
return False
layer_id = b'EdgeCrease'
fbx_layer_data = elem_prop_first(elem_find_first(fbx_layer, layer_id))
# some models have bad edge data, we cant use this info...
if not mesh.edges:
print("warning skipping edge crease data, no valid edges...")
return False
if fbx_layer_mapping == b'ByEdge':
# some models have bad edge data, we cant use this info...
if not mesh.edges:
print("warning skipping edge crease data, no valid edges...")
return False
blen_data = mesh.edges
return blen_read_geom_array_mapped_edge(
mesh, blen_data, "crease",
fbx_layer_data, None,
fbx_layer_mapping, fbx_layer_ref,
1, 1, layer_id,
Samuli Raivio
committed
# Blender squares those values before sending them to OpenSubdiv, when other softwares don't,
# so we need to compensate that to get similar results through FBX...
xform=sqrt,
)
else:
print("warning layer %r mapping type unsupported: %r" % (fbx_layer.id, fbx_layer_mapping))
return False
Bastien Montagne
committed
def blen_read_geom_layer_normal(fbx_obj, mesh, xform=None):
fbx_layer = elem_find_first(fbx_obj, b'LayerElementNormal')
if fbx_layer is None:
return False
(fbx_layer_name,
fbx_layer_mapping,
fbx_layer_ref,
) = blen_read_geom_layerinfo(fbx_layer)
layer_id = b'Normals'
fbx_layer_data = elem_prop_first(elem_find_first(fbx_layer, layer_id))
fbx_layer_index = elem_prop_first(elem_find_first(fbx_layer, b'NormalsIndex'))
if fbx_layer_data is None:
print("warning %r %r missing data" % (layer_id, fbx_layer_name))
return False
# try loops, then vertices.
tries = ((mesh.loops, "Loops", False, blen_read_geom_array_mapped_polyloop),
(mesh.polygons, "Polygons", True, blen_read_geom_array_mapped_polygon),
(mesh.vertices, "Vertices", True, blen_read_geom_array_mapped_vert))
for blen_data, blen_data_type, is_fake, func in tries:
bdata = [None] * len(blen_data) if is_fake else blen_data
if func(mesh, bdata, "normal",
fbx_layer_data, fbx_layer_index, fbx_layer_mapping, fbx_layer_ref, 3, 3, layer_id, xform, True):
for pidx, p in enumerate(mesh.polygons):
for lidx in range(p.loop_start, p.loop_start + p.loop_total):
mesh.loops[lidx].normal[:] = bdata[pidx]
elif blen_data_type == "Vertices":
# We have to copy vnors to lnors! Far from elegant, but simple.
for l in mesh.loops:
l.normal[:] = bdata[l.vertex_index]
blen_read_geom_array_error_mapping("normal", fbx_layer_mapping)
blen_read_geom_array_error_ref("normal", fbx_layer_ref)
Jens Ch. Restemeier
committed
def blen_read_geom(fbx_tmpl, fbx_obj, settings):
Bastien Montagne
committed
from itertools import chain
import array
# Vertices are in object space, but we are post-multiplying all transforms with the inverse of the
# global matrix, so we need to apply the global matrix to the vertices to get the correct result.
geom_mat_co = settings.global_matrix if settings.bake_space_transform else None
# We need to apply the inverse transpose of the global matrix when transforming normals.
geom_mat_no = Matrix(settings.global_matrix_inv_transposed) if settings.bake_space_transform else None
if geom_mat_no is not None:
# Remove translation & scaling!
geom_mat_no.translation = Vector()
geom_mat_no.normalize()
# TODO, use 'fbx_tmpl'
elem_name_utf8 = elem_name_ensure_class(fbx_obj, b'Geometry')
fbx_verts = elem_prop_first(elem_find_first(fbx_obj, b'Vertices'))
fbx_polys = elem_prop_first(elem_find_first(fbx_obj, b'PolygonVertexIndex'))
fbx_edges = elem_prop_first(elem_find_first(fbx_obj, b'Edges'))
Bastien Montagne
committed
if geom_mat_co is not None:
def _vcos_transformed_gen(raw_cos, m=None):
# Note: we could most likely get much better performances with numpy, but will leave this as TODO for now.
return chain(*(m @ Vector(v) for v in zip(*(iter(raw_cos),) * 3)))
Bastien Montagne
committed
fbx_verts = array.array(fbx_verts.typecode, _vcos_transformed_gen(fbx_verts, geom_mat_co))
if fbx_verts is None:
fbx_verts = ()
if fbx_polys is None:
fbx_polys = ()
mesh = bpy.data.meshes.new(name=elem_name_utf8)
mesh.vertices.add(len(fbx_verts) // 3)
mesh.vertices.foreach_set("co", fbx_verts)
if fbx_polys:
mesh.loops.add(len(fbx_polys))
poly_loop_starts = []
poly_loop_totals = []
poly_loop_prev = 0
for i, l in enumerate(mesh.loops):
index = fbx_polys[i]
if index < 0:
poly_loop_starts.append(poly_loop_prev)
poly_loop_totals.append((i - poly_loop_prev) + 1)
poly_loop_prev = i + 1
l.vertex_index = index
mesh.polygons.add(len(poly_loop_starts))
mesh.polygons.foreach_set("loop_start", poly_loop_starts)
mesh.polygons.foreach_set("loop_total", poly_loop_totals)
blen_read_geom_layer_material(fbx_obj, mesh)
blen_read_geom_layer_uv(fbx_obj, mesh)
blen_read_geom_layer_color(fbx_obj, mesh)
# edges in fact index the polygons (NOT the vertices)
import array
tot_edges = len(fbx_edges)
edges_conv = array.array('i', [0]) * (tot_edges * 2)
edge_index = 0
for i in fbx_edges:
e_a = fbx_polys[i]
if e_a >= 0:
e_b = fbx_polys[i + 1]
else:
# Last index of polygon, wrap back to the start.
# ideally we wouldn't have to search back,
# but it should only be 2-3 iterations.
j = i - 1
while j >= 0 and fbx_polys[j] >= 0:
j -= 1
edges_conv[edge_index] = e_a
edges_conv[edge_index + 1] = e_b
edge_index += 2
mesh.edges.add(tot_edges)
mesh.edges.foreach_set("vertices", edges_conv)
# must be after edge, face loading.
ok_smooth = blen_read_geom_layer_smooth(fbx_obj, mesh)
ok_crease = blen_read_geom_layer_edge_crease(fbx_obj, mesh)
ok_normals = False
if settings.use_custom_normals:
# Note: we store 'temp' normals in loops, since validate() may alter final mesh,
# we can only set custom lnors *after* calling it.
mesh.create_normals_split()
if geom_mat_no is None:
ok_normals = blen_read_geom_layer_normal(fbx_obj, mesh)
else:
def nortrans(v):
ok_normals = blen_read_geom_layer_normal(fbx_obj, mesh, nortrans)
mesh.validate(clean_customdata=False) # *Very* important to not remove lnors here!
if ok_normals:
clnors = array.array('f', [0.0] * (len(mesh.loops) * 3))
mesh.loops.foreach_get("normal", clnors)
if not ok_smooth:
mesh.polygons.foreach_set("use_smooth", [True] * len(mesh.polygons))
ok_smooth = True
mesh.normals_split_custom_set(tuple(zip(*(iter(clnors),) * 3)))
mesh.use_auto_smooth = True
else:
if settings.use_custom_normals:
mesh.free_normals_split()
mesh.polygons.foreach_set("use_smooth", [True] * len(mesh.polygons))
if ok_crease:
mesh.use_customdata_edge_crease = True
if settings.use_custom_props:
blen_read_custom_properties(fbx_obj, mesh, settings)
return mesh
Bastien Montagne
committed
def blen_read_shape(fbx_tmpl, fbx_sdata, fbx_bcdata, meshes, scene):
elem_name_utf8 = elem_name_ensure_class(fbx_sdata, b'Geometry')
indices = elem_prop_first(elem_find_first(fbx_sdata, b'Indexes'), default=())
dvcos = tuple(co for co in zip(*[iter(elem_prop_first(elem_find_first(fbx_sdata, b'Vertices'), default=()))] * 3))
# We completely ignore normals here!
weight = elem_prop_first(elem_find_first(fbx_bcdata, b'DeformPercent'), default=100.0) / 100.0
vgweights = tuple(vgw / 100.0 for vgw in elem_prop_first(elem_find_first(fbx_bcdata, b'FullWeights'), default=()))
Bastien Montagne
committed
# Special case, in case all weights are the same, FullWeight can have only one element - *sigh!*
nbr_indices = len(indices)
if len(vgweights) == 1 and nbr_indices > 1:
vgweights = (vgweights[0],) * nbr_indices
assert(len(vgweights) == nbr_indices == len(dvcos))
keyblocks = []
for me, objects in meshes:
vcos = tuple((idx, me.vertices[idx].co + Vector(dvco)) for idx, dvco in zip(indices, dvcos))
Bastien Montagne
committed
objects = list({node.bl_obj for node in objects})
assert(objects)
if me.shape_keys is None:
objects[0].shape_key_add(name="Basis", from_mix=False)
kb = objects[0].shape_key_add(name=elem_name_utf8, from_mix=False)
me.shape_keys.use_relative = True # Should already be set as such.
for idx, co in vcos:
kb.data[idx].co[:] = co
kb.value = weight
# Add vgroup if necessary.
if create_vg:
vgoups = add_vgroup_to_objects(indices, vgweights, kb.name, objects)
kb.vertex_group = kb.name
keyblocks.append(kb)
return keyblocks
# --------
# Material
Jens Ch. Restemeier
committed
def blen_read_material(fbx_tmpl, fbx_obj, settings):
from bpy_extras import node_shader_utils
from math import sqrt
elem_name_utf8 = elem_name_ensure_class(fbx_obj, b'Material')
nodal_material_wrap_map = settings.nodal_material_wrap_map
ma = bpy.data.materials.new(name=elem_name_utf8)
const_color_white = 1.0, 1.0, 1.0
const_color_black = 0.0, 0.0, 0.0
fbx_props = (elem_find_first(fbx_obj, b'Properties70'),
elem_find_first(fbx_tmpl, b'Properties70', fbx_elem_nil))
fbx_props_no_template = (fbx_props[0], fbx_elem_nil)
ma_wrap = node_shader_utils.PrincipledBSDFWrapper(ma, is_readonly=False, use_nodes=True)
ma_wrap.base_color = elem_props_get_color_rgb(fbx_props, b'DiffuseColor', const_color_white)
# No specular color in Principled BSDF shader, assumed to be either white or take some tint from diffuse one...
# TODO: add way to handle tint option (guesstimate from spec color + intensity...)?
ma_wrap.specular = elem_props_get_number(fbx_props, b'SpecularFactor', 0.25) * 2.0
# XXX Totally empirical conversion, trying to adapt it
# (from 1.0 - 0.0 Principled BSDF range to 0.0 - 100.0 FBX shininess range)...
fbx_shininess = elem_props_get_number(fbx_props, b'Shininess', 20.0)
ma_wrap.roughness = 1.0 - (sqrt(fbx_shininess) / 10.0)
# Sweetness... Looks like we are not the only ones to not know exactly how FBX is supposed to work (see T59850).
# According to one of its developers, Unity uses that formula to extract alpha value:
#
# alpha = 1 - TransparencyFactor
# if (alpha == 1 or alpha == 0):
# alpha = 1 - TransparentColor.r
#
# Until further info, let's assume this is correct way to do, hence the following code for TransparentColor.
# However, there are some cases (from 3DSMax, see T65065), where we do have TransparencyFactor only defined
# in the template to 0.0, and then materials defining TransparentColor to pure white (1.0, 1.0, 1.0),
# and setting alpha value in Opacity... try to cope with that too. :((((
alpha = 1.0 - elem_props_get_number(fbx_props, b'TransparencyFactor', 0.0)
if (alpha == 1.0 or alpha == 0.0):
alpha = elem_props_get_number(fbx_props_no_template, b'Opacity', None)
if alpha is None:
alpha = 1.0 - elem_props_get_color_rgb(fbx_props, b'TransparentColor', const_color_black)[0]
ma_wrap.alpha = alpha
ma_wrap.metallic = elem_props_get_number(fbx_props, b'ReflectionFactor', 0.0)
# We have no metallic (a.k.a. reflection) color...
# elem_props_get_color_rgb(fbx_props, b'ReflectionColor', const_color_white)
ma_wrap.normalmap_strength = elem_props_get_number(fbx_props, b'BumpFactor', 1.0)
# Emission strength and color
ma_wrap.emission_strength = elem_props_get_number(fbx_props, b'EmissiveFactor', 1.0)
ma_wrap.emission_color = elem_props_get_color_rgb(fbx_props, b'EmissiveColor', const_color_black)
nodal_material_wrap_map[ma] = ma_wrap
if settings.use_custom_props:
blen_read_custom_properties(fbx_obj, ma, settings)
return ma
# -------
Bastien Montagne
committed
# Image & Texture
Bastien Montagne
committed
def blen_read_texture_image(fbx_tmpl, fbx_obj, basedir, settings):
import os
from bpy_extras import image_utils
def pack_data_from_content(image, fbx_obj):
data = elem_find_first_bytes(fbx_obj, b'Content')
if (data):
data_len = len(data)
if (data_len):
image.pack(data=data, data_len=data_len)
Bastien Montagne
committed
elem_name_utf8 = elem_name_ensure_classes(fbx_obj, {b'Texture', b'Video'})
Jens Ch. Restemeier
committed
image_cache = settings.image_cache
# Yet another beautiful logic demonstration by Master FBX:
# * RelativeFilename in both Video and Texture nodes.
# * FileName in texture nodes.
# * Filename in video nodes.
# Aaaaaaaarrrrrrrrgggggggggggg!!!!!!!!!!!!!!
filepath = elem_find_first_string(fbx_obj, b'RelativeFilename')
Bastien Montagne
committed
if filepath:
Bastien Montagne
committed
# Make sure we do handle a relative path, and not an absolute one (see D5143).
filepath = filepath.lstrip(os.path.sep).lstrip(os.path.altsep)
Bastien Montagne
committed
filepath = os.path.join(basedir, filepath)
Bastien Montagne
committed
filepath = elem_find_first_string(fbx_obj, b'FileName')
if not filepath:
filepath = elem_find_first_string(fbx_obj, b'Filename')
if not filepath:
print("Error, could not find any file path in ", fbx_obj)
Bastien Montagne
committed
print(" Falling back to: ", elem_name_utf8)
filepath = elem_name_utf8
else :
filepath = filepath.replace('\\', '/') if (os.sep == '/') else filepath.replace('/', '\\')
image = image_cache.get(filepath)
if image is not None:
# Data is only embedded once, we may have already created the image but still be missing its data!
if not image.has_data:
pack_data_from_content(image, fbx_obj)
return image
image = image_utils.load_image(
filepath,
dirname=basedir,
place_holder=True,
Jens Ch. Restemeier
committed
recursive=settings.use_image_search,
)
Bastien Montagne
committed
# Try to use embedded data, if available!
pack_data_from_content(image, fbx_obj)
Bastien Montagne
committed
image_cache[filepath] = image
# name can be ../a/b/c
image.name = os.path.basename(elem_name_utf8)
if settings.use_custom_props:
blen_read_custom_properties(fbx_obj, image, settings)
return image
def blen_read_camera(fbx_tmpl, fbx_obj, global_scale):
# meters to inches
M2I = 0.0393700787
elem_name_utf8 = elem_name_ensure_class(fbx_obj, b'NodeAttribute')
fbx_props = (elem_find_first(fbx_obj, b'Properties70'),
elem_find_first(fbx_tmpl, b'Properties70', fbx_elem_nil))
camera = bpy.data.cameras.new(name=elem_name_utf8)
camera.type = 'ORTHO' if elem_props_get_enum(fbx_props, b'CameraProjectionType', 0) == 1 else 'PERSP'
camera.lens = elem_props_get_number(fbx_props, b'FocalLength', 35.0)
camera.sensor_width = elem_props_get_number(fbx_props, b'FilmWidth', 32.0 * M2I) / M2I
camera.sensor_height = elem_props_get_number(fbx_props, b'FilmHeight', 32.0 * M2I) / M2I
camera.ortho_scale = elem_props_get_number(fbx_props, b'OrthoZoom', 1.0)
filmaspect = camera.sensor_width / camera.sensor_height
# film offset
camera.shift_x = elem_props_get_number(fbx_props, b'FilmOffsetX', 0.0) / (M2I * camera.sensor_width)
camera.shift_y = elem_props_get_number(fbx_props, b'FilmOffsetY', 0.0) / (M2I * camera.sensor_height * filmaspect)
Campbell Barton
committed
camera.clip_start = elem_props_get_number(fbx_props, b'NearPlane', 0.01) * global_scale
camera.clip_end = elem_props_get_number(fbx_props, b'FarPlane', 100.0) * global_scale
def blen_read_light(fbx_tmpl, fbx_obj, global_scale):
elem_name_utf8 = elem_name_ensure_class(fbx_obj, b'NodeAttribute')
fbx_props = (elem_find_first(fbx_obj, b'Properties70'),
elem_find_first(fbx_tmpl, b'Properties70', fbx_elem_nil))
light_type = {
0: 'POINT',
1: 'SUN',
2: 'SPOT'}.get(elem_props_get_enum(fbx_props, b'LightType', 0), 'POINT')
lamp = bpy.data.lights.new(name=elem_name_utf8, type=light_type)
if light_type == 'SPOT':
spot_size = elem_props_get_number(fbx_props, b'OuterAngle', None)
if spot_size is None:
# Deprecated.
spot_size = elem_props_get_number(fbx_props, b'Cone angle', 45.0)
lamp.spot_size = math.radians(spot_size)
spot_blend = elem_props_get_number(fbx_props, b'InnerAngle', None)
if spot_blend is None:
# Deprecated.
spot_blend = elem_props_get_number(fbx_props, b'HotSpot', 45.0)
lamp.spot_blend = 1.0 - (spot_blend / spot_size)
lamp.color = elem_props_get_color_rgb(fbx_props, b'Color', (1.0, 1.0, 1.0))
Campbell Barton
committed
lamp.energy = elem_props_get_number(fbx_props, b'Intensity', 100.0) / 100.0
lamp.distance = elem_props_get_number(fbx_props, b'DecayStart', 25.0) * global_scale
lamp.use_shadow = elem_props_get_bool(fbx_props, b'CastShadow', True)
if hasattr(lamp, "cycles"):
lamp.cycles.cast_shadow = lamp.use_shadow
# Keeping this for now, but this is not used nor exposed anymore afaik...
Campbell Barton
committed
lamp.shadow_color = elem_props_get_color_rgb(fbx_props, b'ShadowColor', (0.0, 0.0, 0.0))
Bastien Montagne
committed
# ### Import Utility class
class FbxImportHelperNode:
"""
Temporary helper node to store a hierarchy of fbxNode objects before building Objects, Armatures and Bones.
It tries to keep the correction data in one place so it can be applied consistently to the imported data.
Bastien Montagne
committed
"""
Bastien Montagne
committed
__slots__ = (
'_parent', 'anim_compensation_matrix', 'is_global_animation', 'armature_setup', 'armature', 'bind_matrix',
Bastien Montagne
committed
'bl_bone', 'bl_data', 'bl_obj', 'bone_child_matrix', 'children', 'clusters',
'fbx_elem', 'fbx_name', 'fbx_transform_data', 'fbx_type',
'is_armature', 'has_bone_children', 'is_bone', 'is_root', 'is_leaf',
'matrix', 'matrix_as_parent', 'matrix_geom', 'meshes', 'post_matrix', 'pre_matrix')
Bastien Montagne
committed
def __init__(self, fbx_elem, bl_data, fbx_transform_data, is_bone):
self.fbx_name = elem_name_ensure_class(fbx_elem, b'Model') if fbx_elem else 'Unknown'
self.fbx_type = fbx_elem.props[2] if fbx_elem else None
self.fbx_elem = fbx_elem
self.bl_obj = None
self.bl_data = bl_data
self.bl_bone = None # Name of bone if this is a bone (this may be different to fbx_name if there was a name conflict in Blender!)
self.fbx_transform_data = fbx_transform_data
self.is_root = False
self.is_bone = is_bone
self.is_armature = False
self.armature = None # For bones only, relevant armature node.
Bastien Montagne
committed
self.has_bone_children = False # True if the hierarchy below this node contains bones, important to support mixed hierarchies.
Bastien Montagne
committed
self.is_leaf = False # True for leaf-bones added to the end of some bone chains to set the lengths.
Bastien Montagne
committed
self.pre_matrix = None # correction matrix that needs to be applied before the FBX transform
self.bind_matrix = None # for bones this is the matrix used to bind to the skin
if fbx_transform_data:
self.matrix, self.matrix_as_parent, self.matrix_geom = blen_read_object_transform_do(fbx_transform_data)
else:
self.matrix, self.matrix_as_parent, self.matrix_geom = (None, None, None)
Bastien Montagne
committed
self.post_matrix = None # correction matrix that needs to be applied after the FBX transform
self.bone_child_matrix = None # Objects attached to a bone end not the beginning, this matrix corrects for that
# XXX Those two are to handle the fact that rigged meshes are not linked to their armature in FBX, which implies
# that their animation is in global space (afaik...).
# This is actually not really solvable currently, since anim_compensation_matrix is not valid if armature
# itself is animated (we'd have to recompute global-to-local anim_compensation_matrix for each frame,
# and for each armature action... beyond being an insane work).
# Solution for now: do not read rigged meshes animations at all! sic...
Bastien Montagne
committed
self.anim_compensation_matrix = None # a mesh moved in the hierarchy may have a different local matrix. This compensates animations for this.
self.is_global_animation = False
Bastien Montagne
committed
self.meshes = None # List of meshes influenced by this bone.
self.clusters = [] # Deformer Cluster nodes
self.armature_setup = {} # mesh and armature matrix when the mesh was bound
Bastien Montagne
committed
self._parent = None
self.children = []
@property
def parent(self):
return self._parent
@parent.setter
def parent(self, value):
if self._parent is not None:
self._parent.children.remove(self)
self._parent = value
if self._parent is not None:
self._parent.children.append(self)
Bastien Montagne
committed
@property
def ignore(self):
# Separating leaf status from ignore status itself.
# Currently they are equivalent, but this may change in future.
return self.is_leaf
def __repr__(self):
if self.fbx_elem:
return self.fbx_elem.props[1].decode()
else:
return "None"
Bastien Montagne
committed
def print_info(self, indent=0):
print(" " * indent + (self.fbx_name if self.fbx_name else "(Null)")
+ ("[root]" if self.is_root else "")
Bastien Montagne
committed
+ ("[leaf]" if self.is_leaf else "")
Bastien Montagne
committed
+ ("[ignore]" if self.ignore else "")
+ ("[armature]" if self.is_armature else "")
+ ("[bone]" if self.is_bone else "")
+ ("[HBC]" if self.has_bone_children else "")
)
for c in self.children:
c.print_info(indent + 1)
def mark_leaf_bones(self):
if self.is_bone and len(self.children) == 1:
child = self.children[0]
if child.is_bone and len(child.children) == 0:
Bastien Montagne
committed
child.is_leaf = True
Bastien Montagne
committed
for child in self.children:
child.mark_leaf_bones()
def do_bake_transform(self, settings):
return (settings.bake_space_transform and self.fbx_type in (b'Mesh', b'Null') and
not self.is_armature and not self.is_bone)
def find_correction_matrix(self, settings, parent_correction_inv=None):
from bpy_extras.io_utils import axis_conversion
if self.parent and (self.parent.is_root or self.parent.do_bake_transform(settings)):
self.pre_matrix = settings.global_matrix
if parent_correction_inv:
self.pre_matrix = parent_correction_inv @ (self.pre_matrix if self.pre_matrix else Matrix())
Bastien Montagne
committed
correction_matrix = None
if self.is_bone:
if settings.automatic_bone_orientation:
# find best orientation to align bone with
bone_children = tuple(child for child in self.children if child.is_bone)
Bastien Montagne
committed
if len(bone_children) == 0:
# no children, inherit the correction from parent (if possible)
if self.parent and self.parent.is_bone:
correction_matrix = parent_correction_inv.inverted() if parent_correction_inv else None
else:
# else find how best to rotate the bone to align the Y axis with the children
best_axis = (1, 0, 0)
if len(bone_children) == 1:
vec = bone_children[0].get_bind_matrix().to_translation()
Bastien Montagne
committed
best_axis = Vector((0, 0, 1 if vec[2] >= 0 else -1))
if abs(vec[0]) > abs(vec[1]):
if abs(vec[0]) > abs(vec[2]):
best_axis = Vector((1 if vec[0] >= 0 else -1, 0, 0))
elif abs(vec[1]) > abs(vec[2]):
best_axis = Vector((0, 1 if vec[1] >= 0 else -1, 0))
else:
# get the child directions once because they may be checked several times
child_locs = (child.get_bind_matrix().to_translation() for child in bone_children)
child_locs = tuple(loc.normalized() for loc in child_locs if loc.magnitude > 0.0)
Bastien Montagne
committed
1751
1752
1753
1754
1755
1756
1757
1758
1759
1760
1761
1762
1763
1764
1765
1766
1767
1768
1769
1770
1771
1772
1773
1774
1775
1776
1777
1778
1779
1780
1781
1782
1783
1784
1785
1786
1787
1788
1789
1790
1791
1792
1793
1794
1795
1796
1797
1798
1799
1800
1801
1802
1803
1804
1805
1806
1807
1808
1809
# I'm not sure which one I like better...
if False:
best_angle = -1.0
for i in range(6):
a = i // 2
s = -1 if i % 2 == 1 else 1
test_axis = Vector((s if a == 0 else 0, s if a == 1 else 0, s if a == 2 else 0))
# find max angle to children
max_angle = 1.0
for loc in child_locs:
max_angle = min(max_angle, test_axis.dot(loc))
# is it better than the last one?
if best_angle < max_angle:
best_angle = max_angle
best_axis = test_axis
else:
best_angle = -1.0
for vec in child_locs:
test_axis = Vector((0, 0, 1 if vec[2] >= 0 else -1))
if abs(vec[0]) > abs(vec[1]):
if abs(vec[0]) > abs(vec[2]):
test_axis = Vector((1 if vec[0] >= 0 else -1, 0, 0))
elif abs(vec[1]) > abs(vec[2]):
test_axis = Vector((0, 1 if vec[1] >= 0 else -1, 0))
# find max angle to children
max_angle = 1.0
for loc in child_locs:
max_angle = min(max_angle, test_axis.dot(loc))
# is it better than the last one?
if best_angle < max_angle:
best_angle = max_angle
best_axis = test_axis
# convert best_axis to axis string
to_up = 'Z' if best_axis[2] >= 0 else '-Z'
if abs(best_axis[0]) > abs(best_axis[1]):
if abs(best_axis[0]) > abs(best_axis[2]):
to_up = 'X' if best_axis[0] >= 0 else '-X'
elif abs(best_axis[1]) > abs(best_axis[2]):
to_up = 'Y' if best_axis[1] >= 0 else '-Y'
to_forward = 'X' if to_up not in {'X', '-X'} else 'Y'
# Build correction matrix
if (to_up, to_forward) != ('Y', 'X'):
correction_matrix = axis_conversion(from_forward='X',
from_up='Y',
to_forward=to_forward,
to_up=to_up,
).to_4x4()
else:
correction_matrix = settings.bone_correction_matrix
else:
# camera and light can be hard wired
if self.fbx_type == b'Camera':
Bastien Montagne
committed
correction_matrix = MAT_CONVERT_CAMERA
Bastien Montagne
committed
elif self.fbx_type == b'Light':
correction_matrix = MAT_CONVERT_LIGHT
Bastien Montagne
committed
self.post_matrix = correction_matrix
if self.do_bake_transform(settings):
self.post_matrix = settings.global_matrix_inv @ (self.post_matrix if self.post_matrix else Matrix())
Bastien Montagne
committed
# process children
correction_matrix_inv = correction_matrix.inverted_safe() if correction_matrix else None
Bastien Montagne
committed
for child in self.children:
child.find_correction_matrix(settings, correction_matrix_inv)
def find_armature_bones(self, armature):
for child in self.children:
if child.is_bone:
child.armature = armature
child.find_armature_bones(armature)
Bastien Montagne
committed
def find_armatures(self):
needs_armature = False
for child in self.children:
if child.is_bone:
needs_armature = True
break
if needs_armature:
if self.fbx_type in {b'Null', b'Root'}:
Bastien Montagne
committed
# if empty then convert into armature
self.is_armature = True
armature = self
Bastien Montagne
committed
else:
# otherwise insert a new node
# XXX Maybe in case self is virtual FBX root node, we should instead add one armature per bone child?
Bastien Montagne
committed
armature = FbxImportHelperNode(None, None, None, False)
armature.fbx_name = "Armature"
armature.is_armature = True
for child in tuple(self.children):
Bastien Montagne
committed
if child.is_bone:
child.parent = armature
armature.parent = self
armature.find_armature_bones(armature)
Bastien Montagne
committed
for child in self.children:
Bastien Montagne
committed
continue
child.find_armatures()
def find_bone_children(self):
has_bone_children = False
for child in self.children:
has_bone_children |= child.find_bone_children()
self.has_bone_children = has_bone_children
return self.is_bone or has_bone_children
def find_fake_bones(self, in_armature=False):
if in_armature and not self.is_bone and self.has_bone_children:
self.is_bone = True
# if we are not a null node we need an intermediate node for the data
if self.fbx_type not in {b'Null', b'Root'}:
Bastien Montagne
committed
node = FbxImportHelperNode(self.fbx_elem, self.bl_data, None, False)
self.fbx_elem = None
self.bl_data = None
# transfer children
for child in self.children:
if child.is_bone or child.has_bone_children:
continue
child.parent = node
# attach to parent
node.parent = self
if self.is_armature:
in_armature = True
for child in self.children:
child.find_fake_bones(in_armature)
def get_world_matrix_as_parent(self):
matrix = self.parent.get_world_matrix_as_parent() if self.parent else Matrix()
if self.matrix_as_parent:
return matrix
Bastien Montagne
committed
def get_world_matrix(self):
matrix = self.parent.get_world_matrix_as_parent() if self.parent else Matrix()
Bastien Montagne
committed
if self.matrix:
Bastien Montagne
committed
return matrix
def get_matrix(self):
matrix = self.matrix if self.matrix else Matrix()
if self.pre_matrix:
Bastien Montagne
committed
if self.post_matrix:
Bastien Montagne
committed
return matrix
def get_bind_matrix(self):
matrix = self.bind_matrix if self.bind_matrix else Matrix()
if self.pre_matrix:
Bastien Montagne
committed
if self.post_matrix:
Bastien Montagne
committed
return matrix
def make_bind_pose_local(self, parent_matrix=None):
if parent_matrix is None:
parent_matrix = Matrix()
if self.bind_matrix:
bind_matrix = parent_matrix.inverted_safe() @ self.bind_matrix
Bastien Montagne
committed
else:
bind_matrix = self.matrix.copy() if self.matrix else None
self.bind_matrix = bind_matrix
if bind_matrix:
parent_matrix = parent_matrix @ bind_matrix
Bastien Montagne
committed
for child in self.children:
child.make_bind_pose_local(parent_matrix)
def collect_skeleton_meshes(self, meshes):
for _, m in self.clusters:
meshes.update(m)
for child in self.children:
if not child.meshes:
child.collect_skeleton_meshes(meshes)
Bastien Montagne
committed
def collect_armature_meshes(self):
if self.is_armature:
armature_matrix_inv = self.get_world_matrix().inverted_safe()
Bastien Montagne
committed
meshes = set()
for child in self.children:
# Children meshes may be linked to children armatures, in which case we do not want to link them
# to a parent one. See T70244.
child.collect_armature_meshes()
if not child.meshes:
child.collect_skeleton_meshes(meshes)
Bastien Montagne
committed
for m in meshes:
old_matrix = m.matrix
m.matrix = armature_matrix_inv @ m.get_world_matrix()
m.anim_compensation_matrix = old_matrix.inverted_safe() @ m.matrix
m.is_global_animation = True
Bastien Montagne
committed
m.parent = self
self.meshes = meshes
else:
for child in self.children:
child.collect_armature_meshes()
Bastien Montagne
committed
def build_skeleton(self, arm, parent_matrix, parent_bone_size=1, force_connect_children=False):
Bastien Montagne
committed
1965
1966
1967
1968
1969
1970
1971
1972
1973
1974
1975
1976
1977
1978
1979
1980
1981
1982
1983
1984
1985
1986
1987
1988
1989
1990
1991
1992
1993
1994
def child_connect(par_bone, child_bone, child_head, connect_ctx):
# child_bone or child_head may be None.
force_connect_children, connected = connect_ctx
if child_bone is not None:
child_bone.parent = par_bone
child_head = child_bone.head
if similar_values_iter(par_bone.tail, child_head):
if child_bone is not None:
child_bone.use_connect = True
# Disallow any force-connection at this level from now on, since that child was 'really'
# connected, we do not want to move current bone's tail anymore!
connected = None
elif force_connect_children and connected is not None:
# We only store position where tail of par_bone should be in the end.
# Actual tail moving and force connection of compatible child bones will happen
# once all have been checked.
if connected is ...:
connected = ([child_head.copy(), 1], [child_bone] if child_bone is not None else [])
else:
connected[0][0] += child_head
connected[0][1] += 1
if child_bone is not None:
connected[1].append(child_bone)
connect_ctx[1] = connected
def child_connect_finalize(par_bone, connect_ctx):
force_connect_children, connected = connect_ctx
# Do nothing if force connection is not enabled!
if force_connect_children and connected is not None and connected is not ...:
# Here again we have to be wary about zero-length bones!!!
par_tail = connected[0][0] / connected[0][1]
if (par_tail - par_bone.head).magnitude < 1e-2:
par_bone_vec = (par_bone.tail - par_bone.head).normalized()
par_tail = par_bone.head + par_bone_vec * 0.01
par_bone.tail = par_tail