Newer
Older
if bpy.ops.object.select_all.poll():
bpy.ops.object.select_all(action='DESELECT')
scene = context.scene
# scn.objects.selected = []
print('\tbuilding geometry...\n\tverts:%i faces:%i materials: %i smoothgroups:%i ...' % (len(verts_loc), len(faces), len(unique_materials), len(unique_smooth_groups)))
# Split the mesh by objects/materials, may
SPLIT_OB_OR_GROUP = True
else:
SPLIT_OB_OR_GROUP = False
for verts_loc_split, faces_split, unique_materials_split, dataname in split_mesh(verts_loc, faces, unique_materials, filepath, SPLIT_OB_OR_GROUP):
# Create meshes from the data, warning 'vertex_groups' wont support splitting
create_mesh(new_objects,
use_edges,
verts_loc_split,
verts_tex,
faces_split,
unique_materials_split,
unique_material_images,
unique_smooth_groups,
vertex_groups,
dataname,
)
# nurbs support
for context_nurbs in nurbs:
create_nurbs(context_nurbs, verts_loc, new_objects)
# Create new obj
for obj in new_objects:
base = scene.objects.link(obj)
base.select = True
# we could apply this anywhere before scaling.
obj.matrix_world = global_matrix
scene.update()
axis_min = [1000000000] * 3
axis_max = [-1000000000] * 3
Campbell Barton
committed
# Get all object bounds
for ob in new_objects:
for v in ob.bound_box:
for axis, value in enumerate(v):
if axis_min[axis] > value:
axis_min[axis] = value
if axis_max[axis] < value:
axis_max[axis] = value
# Scale objects
max_axis = max(axis_max[0] - axis_min[0], axis_max[1] - axis_min[1], axis_max[2] - axis_min[2])
scale = 1.0
Campbell Barton
committed
scale = scale / 10.0
for obj in new_objects:
obj.scale = scale, scale, scale
print("finished importing: %r in %.4f sec." % (filepath, (time_new - time_main)))
return {'FINISHED'}