Newer
Older
# so we need to know whether
context_multi_line = b''
# Per-face handling data.
face_vert_loc_indices = None
face_vert_nor_indices = None
face_vert_tex_indices = None
verts_loc_len = verts_nor_len = verts_tex_len = 0
face_items_usage = set()
face_invalid_blenpoly = None
prev_vidx = None
face = None
vec = []
quick_vert_failures = 0
skip_quick_vert = False
progress.enter_substeps(3, "Parsing OBJ file...")
with open(filepath, 'rb') as f:
line_split = line.split()
if not line_split:
continue
line_start = line_split[0] # we compare with this a _lot_
if len(line_split) == 1 and not context_multi_line and line_start != b'end':
print("WARNING, skipping malformatted line: %s" % line.decode('UTF-8', 'replace').rstrip())
continue
# Handling vertex data are pretty similar, factorize that.
# Also, most OBJ files store all those on a single line, so try fast parsing for that first,
# and only fallback to full multi-line parsing when needed, this gives significant speed-up
# (~40% on affected code).
if line_start == b'v':
vdata, vdata_len, do_quick_vert = verts_loc, 3, not skip_quick_vert
elif line_start == b'vn':
vdata, vdata_len, do_quick_vert = verts_nor, 3, not skip_quick_vert
elif line_start == b'vt':
vdata, vdata_len, do_quick_vert = verts_tex, 2, not skip_quick_vert
elif context_multi_line == b'v':
vdata, vdata_len, do_quick_vert = verts_loc, 3, False
elif context_multi_line == b'vn':
vdata, vdata_len, do_quick_vert = verts_nor, 3, False
elif context_multi_line == b'vt':
vdata, vdata_len, do_quick_vert = verts_tex, 2, False
else:
vdata_len = 0
if vdata_len:
if do_quick_vert:
try:
Bastien Montagne
committed
vdata.append(list(map(float_func, line_split[1:vdata_len + 1])))
except:
do_quick_vert = False
# In case we get too many failures on quick parsing, force fallback to full multi-line one.
# Exception handling can become costly...
quick_vert_failures += 1
if quick_vert_failures > 10000:
skip_quick_vert = True
if not do_quick_vert:
context_multi_line = handle_vec(line_start, context_multi_line, line_split,
Bastien Montagne
committed
context_multi_line or line_start,
vdata, vec, vdata_len)
elif line_start == b'f' or context_multi_line == b'f':
if not context_multi_line:
line_split = line_split[1:]
# Instantiate a face
face = create_face(context_material, context_smooth_group, context_object_key)
(face_vert_loc_indices, face_vert_nor_indices, face_vert_tex_indices,
_1, _2, _3, face_invalid_blenpoly) = face
faces.append(face)
face_items_usage.clear()
verts_loc_len = len(verts_loc)
verts_nor_len = len(verts_nor)
verts_tex_len = len(verts_tex)
if context_material is None:
use_default_material = True
# Else, use face_vert_loc_indices and face_vert_tex_indices previously defined and used the obj_face
context_multi_line = b'f' if strip_slash(line_split) else b''
for v in line_split:
obj_vert = v.split(b'/')
idx = int(obj_vert[0]) # Note that we assume here we cannot get OBJ invalid 0 index...
vert_loc_index = (idx + verts_loc_len) if (idx < 1) else idx - 1
# Add the vertex to the current group
# *warning*, this wont work for files that have groups defined around verts
if use_groups_as_vgroups and context_vgroup:
vertex_groups[context_vgroup].append(vert_loc_index)
# This a first round to quick-detect ngons that *may* use a same edge more than once.
# Potential candidate will be re-checked once we have done parsing the whole face.
if not face_invalid_blenpoly:
# If we use more than once a same vertex, invalid ngon is suspected.
if vert_loc_index in face_items_usage:
face_invalid_blenpoly.append(True)
else:
face_items_usage.add(vert_loc_index)
face_vert_loc_indices.append(vert_loc_index)
# formatting for faces with normals and textures is
# loc_index/tex_index/nor_index
if len(obj_vert) > 1 and obj_vert[1] and obj_vert[1] != b'0':
idx = int(obj_vert[1])
face_vert_tex_indices.append((idx + verts_tex_len) if (idx < 1) else idx - 1)
face_vert_tex_indices.append(0)
if len(obj_vert) > 2 and obj_vert[2] and obj_vert[2] != b'0':
idx = int(obj_vert[2])
face_vert_nor_indices.append((idx + verts_nor_len) if (idx < 1) else idx - 1)
face_vert_nor_indices.append(0)
if not context_multi_line:
# Means we have finished a face, we have to do final check if ngon is suspected to be blender-invalid...
if face_invalid_blenpoly:
face_invalid_blenpoly.clear()
face_items_usage.clear()
prev_vidx = face_vert_loc_indices[-1]
for vidx in face_vert_loc_indices:
edge_key = (prev_vidx, vidx) if (prev_vidx < vidx) else (vidx, prev_vidx)
if edge_key in face_items_usage:
face_invalid_blenpoly.append(True)
break
face_items_usage.add(edge_key)
prev_vidx = vidx
elif use_edges and (line_start == b'l' or context_multi_line == b'l'):
# very similar to the face load function above with some parts removed
if not context_multi_line:
line_split = line_split[1:]
# Instantiate a face
face = create_face(context_material, context_smooth_group, context_object_key)
face_vert_loc_indices = face[0]
# XXX A bit hackish, we use special 'value' of face_vert_nor_indices (a single True item) to tag this
# as a polyline, and not a regular face...
face[1][:] = [True]
faces.append(face)
if context_material is None:
use_default_material = True
# Else, use face_vert_loc_indices previously defined and used the obj_face
context_multi_line = b'l' if strip_slash(line_split) else b''
for v in line_split:
obj_vert = v.split(b'/')
idx = int(obj_vert[0]) - 1
face_vert_loc_indices.append((idx + len(verts_loc) + 1) if (idx < 0) else idx)
elif line_start == b's':
if use_smooth_groups:
context_smooth_group = line_value(line_split)
if context_smooth_group == b'off':
context_smooth_group = None
elif context_smooth_group: # is not None
unique_smooth_groups[context_smooth_group] = None
elif line_start == b'o':
if use_split_objects:
context_object_key = unique_name(objects_names, line_value(line_split))
context_object_obpart = context_object_key
# unique_objects[context_object_key]= None
elif line_start == b'g':
if use_split_groups:
grppart = line_value(line_split)
context_object_key = (context_object_obpart, grppart) if context_object_obpart else grppart
# print 'context_object_key', context_object_key
# unique_objects[context_object_key]= None
elif use_groups_as_vgroups:
context_vgroup = line_value(line.split())
if context_vgroup and context_vgroup != b'(null)':
vertex_groups.setdefault(context_vgroup, [])
else:
context_vgroup = None # dont assign a vgroup
elif line_start == b'usemtl':
context_material = line_value(line.split())
unique_materials[context_material] = None
elif line_start == b'mtllib': # usemap or usemat
# can have multiple mtllib filenames per line, mtllib can appear more than once,
# so make sure only occurrence of material exists
material_libs |= {os.fsdecode(f) for f in filenames_group_by_ext(line.lstrip()[7:].strip(), b'.mtl')
}
# Nurbs support
elif line_start == b'cstype':
context_nurbs[b'cstype'] = line_value(line.split()) # 'rat bspline' / 'bspline'
elif line_start == b'curv' or context_multi_line == b'curv':
curv_idx = context_nurbs[b'curv_idx'] = context_nurbs.get(b'curv_idx', []) # in case were multiline
if not context_multi_line:
context_nurbs[b'curv_range'] = float_func(line_split[1]), float_func(line_split[2])
line_split[0:3] = [] # remove first 3 items
if strip_slash(line_split):
context_multi_line = b'curv'
else:
context_multi_line = b''
for i in line_split:
vert_loc_index = int(i) - 1
if vert_loc_index < 0:
vert_loc_index = len(verts_loc) + vert_loc_index + 1
curv_idx.append(vert_loc_index)
elif line_start == b'parm' or context_multi_line == b'parm':
if context_multi_line:
context_multi_line = b''
Bastien Montagne
committed
else:
context_parm = line_split[1]
line_split[0:2] = [] # remove first 2
Bastien Montagne
committed
if strip_slash(line_split):
context_multi_line = b'parm'
else:
context_multi_line = b''
if context_parm.lower() == b'u':
context_nurbs.setdefault(b'parm_u', []).extend([float_func(f) for f in line_split])
elif context_parm.lower() == b'v': # surfaces not supported yet
context_nurbs.setdefault(b'parm_v', []).extend([float_func(f) for f in line_split])
# else: # may want to support other parm's ?
elif line_start == b'deg':
context_nurbs[b'deg'] = [int(i) for i in line.split()[1:]]
elif line_start == b'end':
# Add the nurbs curve
if context_object_key:
context_nurbs[b'name'] = context_object_key
nurbs.append(context_nurbs)
context_nurbs = {}
context_parm = b''
''' # How to use usemap? deprecated?
elif line_start == b'usema': # usemap or usemat
context_image= line_value(line_split)
'''
progress.step("Done, loading materials and images...")
if use_default_material:
unique_materials[None] = None
create_materials(filepath, relpath, material_libs, unique_materials,
use_image_search, float_func)
1251
1252
1253
1254
1255
1256
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283
progress.step("Done, building geometries (verts:%i faces:%i materials: %i smoothgroups:%i) ..." %
(len(verts_loc), len(faces), len(unique_materials), len(unique_smooth_groups)))
# deselect all
if bpy.ops.object.select_all.poll():
bpy.ops.object.select_all(action='DESELECT')
new_objects = [] # put new objects here
# Split the mesh by objects/materials, may
SPLIT_OB_OR_GROUP = bool(use_split_objects or use_split_groups)
for data in split_mesh(verts_loc, faces, unique_materials, filepath, SPLIT_OB_OR_GROUP):
verts_loc_split, faces_split, unique_materials_split, dataname, use_vnor, use_vtex = data
# Create meshes from the data, warning 'vertex_groups' wont support splitting
#~ print(dataname, use_vnor, use_vtex)
create_mesh(new_objects,
use_edges,
verts_loc_split,
verts_nor if use_vnor else [],
verts_tex if use_vtex else [],
faces_split,
unique_materials_split,
unique_smooth_groups,
vertex_groups,
dataname,
)
# nurbs support
for context_nurbs in nurbs:
create_nurbs(context_nurbs, verts_loc, new_objects)
collection = view_layer.active_layer_collection.collection
# Create new obj
for obj in new_objects:
# we could apply this anywhere before scaling.
obj.matrix_world = global_matrix
axis_min = [1000000000] * 3
axis_max = [-1000000000] * 3
# Get all object bounds
for ob in new_objects:
for v in ob.bound_box:
for axis, value in enumerate(v):
if axis_min[axis] > value:
axis_min[axis] = value
if axis_max[axis] < value:
axis_max[axis] = value
# Scale objects
max_axis = max(axis_max[0] - axis_min[0], axis_max[1] - axis_min[1], axis_max[2] - axis_min[2])
scale = 1.0
while global_clamp_size < max_axis * scale:
Campbell Barton
committed
for obj in new_objects:
obj.scale = scale, scale, scale
progress.leave_substeps("Done.")
progress.leave_substeps("Finished importing: %r" % filepath)
return {'FINISHED'}