Newer
Older
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
# Script copyright (C) Campbell Barton
# Contributors: Campbell Barton, Jiri Hnidek, Paolo Ciccone
"""
This script imports a Wavefront OBJ files to Blender.
Usage:
Run this script from "File->Import" menu and then load the desired OBJ file.
Note, This loads mesh objects and materials only, nurbs and curves are not supported.
http://wiki.blender.org/index.php/Scripts/Manual/Import/wavefront_obj
"""
import os
import time
import bpy
import mathutils
from bpy_extras.image_utils import load_image
from bpy_extras.wm_utils.progress_report import ProgressReport
def line_value(line_split):
Returns 1 string representing the value for this line
None will be returned if there's only 1 word
if length == 1:
return None
elif length == 2:
return line_split[1]
elif length > 2:
return b' '.join(line_split[1:])
def obj_image_load(context_imagepath_map, line, DIR, recursive, relpath):
But we try all space-separated items from current line when file is not found with last one
(users keep generating/using image files with spaces in a format that does not support them, sigh...)
Also tries to replace '_' with ' ' for Max's exporter replaces spaces with underscores.
filepath_parts = line.split(b' ')
image = None
for i in range(-1, -len(filepath_parts), -1):
imagepath = os.fsdecode(b" ".join(filepath_parts[i:]))
image = context_imagepath_map.get(imagepath, ...)
if image is ...:
image = load_image(imagepath, DIR, recursive=recursive, relpath=relpath)
if image is None and "_" in imagepath:
image = load_image(imagepath.replace("_", " "), DIR, recursive=recursive, relpath=relpath)
if image is not None:
context_imagepath_map[imagepath] = image
break;
if image is None:
imagepath = os.fsdecode(filepath_parts[-1])
image = load_image(imagepath, DIR, recursive=recursive, place_holder=True, relpath=relpath)
context_imagepath_map[imagepath] = image
return image
Campbell Barton
committed
def create_materials(filepath, relpath,
material_libs, unique_materials,
use_image_search, float_func):
Create all the used materials in this obj,
assign colors and images to the materials from all referenced material libs
from bpy_extras import node_shader_utils
Campbell Barton
committed
context_material_vars = set()
# Don't load the same image multiple times
context_imagepath_map = {}
nodal_material_wrap_map = {}
def load_material_image(blender_material, mat_wrap, context_material_name, img_data, line, type):
"""
Set textures defined in .mtl file.
"""
map_options = {}
curr_token = []
for token in img_data[:-1]:
if token.startswith(b'-') and token[1:].isalpha():
if curr_token:
map_options[curr_token[0]] = curr_token[1:]
curr_token[:] = []
curr_token.append(token)
Bastien Montagne
committed
if curr_token:
map_options[curr_token[0]] = curr_token[1:]
# Absolute path - c:\.. etc would work here
image = obj_image_load(context_imagepath_map, line, DIR, use_image_search, relpath)
map_offset = map_options.get(b'-o')
map_scale = map_options.get(b'-s')
def _generic_tex_set(nodetex, image, texcoords, translation, scale):
nodetex.image = image
nodetex.texcoords = texcoords
if translation is not None:
nodetex.translation = translation
if scale is not None:
nodetex.scale = scale
# Adds textures for materials (rendering)
if type == 'Kd':
_generic_tex_set(mat_wrap.base_color_texture, image, 'UV', map_offset, map_scale)
elif type == 'Ka':
# XXX Not supported?
print("WARNING, currently unsupported ambient texture, skipped.")
elif type == 'Ks':
_generic_tex_set(mat_wrap.specular_texture, image, 'UV', map_offset, map_scale)
Bastien Montagne
committed
elif type == 'Ke':
# XXX Not supported?
print("WARNING, currently unsupported emit texture, skipped.")
elif type == 'Bump':
bump_mult = map_options.get(b'-bm')
Bastien Montagne
committed
bump_mult = float(bump_mult[0]) if (bump_mult and len(bump_mult[0]) > 1) else 1.0
mat_wrap.normalmap_strength_set(bump_mult)
_generic_tex_set(mat_wrap.normalmap_texture, image, 'UV', map_offset, map_scale)
elif type == 'D':
_generic_tex_set(mat_wrap.transmission_texture, image, 'UV', map_offset, map_scale)
Campbell Barton
committed
elif type == 'disp':
# XXX Not supported?
print("WARNING, currently unsupported displacement texture, skipped.")
# ~ mat_wrap.bump_image_set(image)
# ~ mat_wrap.bump_mapping_set(coords='UV', translation=map_offset, scale=map_scale)
elif type == 'refl':
map_type = map_options.get(b'-type')
if map_type and map_type != [b'sphere']:
print("WARNING, unsupported reflection type '%s', defaulting to 'sphere'"
"" % ' '.join(i.decode() for i in map_type))
_generic_tex_set(mat_wrap.base_color_texture, image, 'Reflection', map_offset, map_scale)
mat_wrap.base_color_texture.projection = 'SPHERE'
raise Exception("invalid type %r" % type)
Bastien Montagne
committed
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
def finalize_material(context_material, context_material_vars, spec_colors, emit_colors,
do_highlight, do_reflection, do_transparency, do_glass):
# Finalize previous mat, if any.
if context_material:
if "specular" in context_material_vars:
# XXX This is highly approximated, not sure whether we can do better...
# TODO: Find a way to guesstimate best value from diffuse color...
# IDEA: Use standard deviation of both spec and diff colors (i.e. how far away they are
# from some grey), and apply the the proportion between those two as tint factor?
spec = sum(spec_colors) / 3.0
# ~ spec_var = math.sqrt(sum((c - spec) ** 2 for c in spec_color) / 3.0)
# ~ diff = sum(context_mat_wrap.base_color) / 3.0
# ~ diff_var = math.sqrt(sum((c - diff) ** 2 for c in context_mat_wrap.base_color) / 3.0)
# ~ tint = min(1.0, spec_var / diff_var)
context_mat_wrap.specular = spec
context_mat_wrap.specular_tint = 0.0
if "roughness" not in context_material_vars:
context_mat_wrap.roughness = 0.0
emit_value = sum(emit_colors) / 3.0
if emit_value > 1e-6:
print("WARNING, emit value unsupported by Principled BSDF shader, skipped.")
# We have to adapt it to diffuse color too...
emit_value /= sum(context_material.diffuse_color) / 3.0
# ~ context_material.emit = emit_value
# FIXME, how else to use this?
if do_highlight:
if "specular" not in context_material_vars:
context_mat_wrap.specular = 1.0
if "roughness" not in context_material_vars:
context_mat_wrap.roughness = 0.0
else:
if "specular" not in context_material_vars:
context_mat_wrap.specular = 0.0
if "roughness" not in context_material_vars:
context_mat_wrap.roughness = 1.0
if do_reflection:
if "metallic" not in context_material_vars:
context_mat_wrap.metallic = 1.0
else:
# since we are (ab)using ambient term for metallic (which can be non-zero)
context_mat_wrap.metallic = 0.0
Bastien Montagne
committed
if do_transparency:
if "ior" not in context_material_vars:
context_mat_wrap.ior = 1.0
if "transmission" not in context_material_vars:
context_mat_wrap.transmission = 1.0
# EEVEE only
context_material.blend_method = 'BLEND'
if do_glass:
if "ior" not in context_material_vars:
context_mat_wrap.ior = 1.5
# Try to find a MTL with the same name as the OBJ if no MTLs are specified.
temp_mtl = os.path.splitext((os.path.basename(filepath)))[0] + ".mtl"
if os.path.exists(os.path.join(DIR, temp_mtl)):
material_libs.add(temp_mtl)
del temp_mtl
ma_name = "Default OBJ" if name is None else name.decode('utf-8', "replace")
ma = unique_materials[name] = bpy.data.materials.new(ma_name)
ma_wrap = node_shader_utils.PrincipledBSDFWrapper(ma, is_readonly=False)
nodal_material_wrap_map[ma] = ma_wrap
ma_wrap.use_nodes = True
for libname in sorted(material_libs):
if not os.path.exists(mtlpath):
print("\tMaterial not found MTL: %r" % mtlpath)
else:
# Note: with modern Principled BSDF shader, things like ambient, raytrace or fresnel are always 'ON'
# (i.e. automatically controlled by other parameters).
do_highlight = False
do_reflection = False
do_transparency = False
do_glass = False
spec_colors = [0.0, 0.0, 0.0]
Bastien Montagne
committed
emit_colors = [0.0, 0.0, 0.0]
# print('\t\tloading mtl: %e' % mtlpath)
context_mat_wrap = None
mtl = open(mtlpath, 'rb')
Campbell Barton
committed
line = line.strip()
if not line or line.startswith(b'#'):
continue
line_split = line.split()
line_id = line_split[0].lower()
if line_id == b'newmtl':
Bastien Montagne
committed
# Finalize previous mat, if any.
Bastien Montagne
committed
finalize_material(context_material, context_material_vars, spec_colors, emit_colors,
do_highlight, do_reflection, do_transparency, do_glass)
context_material_name = line_value(line_split)
context_material = unique_materials.get(context_material_name)
if context_material is not None:
context_mat_wrap = nodal_material_wrap_map[context_material]
Campbell Barton
committed
context_material_vars.clear()
spec_colors = [0.0, 0.0, 0.0]
Bastien Montagne
committed
emit_colors[:] = [0.0, 0.0, 0.0]
do_highlight = False
do_reflection = False
do_transparency = False
do_glass = False
elif context_material:
# we need to make a material to assign properties to it.
if line_id == b'ka':
refl = (float_func(line_split[1]) + float_func(line_split[2]) + float_func(line_split[3])) / 3.0
context_mat_wrap.metallic = refl
context_material_vars.add("metallic")
elif line_id == b'kd':
col = (float_func(line_split[1]), float_func(line_split[2]), float_func(line_split[3]))
context_mat_wrap.base_color = col
elif line_id == b'ks':
spec_colors[:] = [
float_func(line_split[1]), float_func(line_split[2]), float_func(line_split[3])]
context_material_vars.add("specular")
Bastien Montagne
committed
elif line_id == b'ke':
# We cannot set context_material.emit right now, we need final diffuse color as well for this.
Bastien Montagne
committed
emit_colors[:] = [
float_func(line_split[1]), float_func(line_split[2]), float_func(line_split[3])]
elif line_id == b'ns':
# XXX Totally empirical conversion, trying to adapt it
# (from 0.0 - 900.0 OBJ specular exponent range to 1.0 - 0.0 Principled BSDF range)...
context_mat_wrap.roughness = 1.0 - (sqrt(float_func(line_split[1])) / 30)
context_material_vars.add("roughness")
elif line_id == b'ni': # Refraction index (between 0.001 and 10).
context_mat_wrap.ior = float_func(line_split[1])
context_material_vars.add("ior")
elif line_id == b'd': # dissolve (transparency)
context_mat_wrap.transmission = 1.0 - float_func(line_split[1])
context_material_vars.add("transmission")
elif line_id == b'tr': # translucency
print("WARNING, currently unsupported 'tr' translucency option, skipped.")
elif line_id == b'tf':
Campbell Barton
committed
# rgb, filter color, blender has no support for this.
print("WARNING, currently unsupported 'tf' filter color option, skipped.")
elif line_id == b'illum':
illum = get_int(line_split[1])
Campbell Barton
committed
# inline comments are from the spec, v4.2
if illum == 0:
# Color on and Ambient off
print("WARNING, Principled BSDF shader does not support illumination 0 mode "
"(colors with no ambient), skipped.")
Campbell Barton
committed
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
elif illum == 1:
# Color on and Ambient on
pass
elif illum == 2:
# Highlight on
do_highlight = True
elif illum == 3:
# Reflection on and Ray trace on
do_reflection = True
elif illum == 4:
# Transparency: Glass on
# Reflection: Ray trace on
do_transparency = True
do_reflection = True
do_glass = True
elif illum == 5:
# Reflection: Fresnel on and Ray trace on
do_reflection = True
elif illum == 6:
# Transparency: Refraction on
# Reflection: Fresnel off and Ray trace on
do_transparency = True
do_reflection = True
elif illum == 7:
# Transparency: Refraction on
# Reflection: Fresnel on and Ray trace on
do_transparency = True
do_reflection = True
elif illum == 8:
# Reflection on and Ray trace off
do_reflection = True
elif illum == 9:
# Transparency: Glass on
# Reflection: Ray trace off
do_transparency = True
Campbell Barton
committed
do_glass = True
elif illum == 10:
# Casts shadows onto invisible surfaces
print("WARNING, Principled BSDF shader does not support illumination 10 mode "
"(cast shadows on invisible surfaces), skipped.")
Campbell Barton
committed
pass
elif line_id == b'map_ka':
img_data = line.split()[1:]
if img_data:
load_material_image(context_material, context_mat_wrap,
context_material_name, img_data, line, 'Ka')
elif line_id == b'map_ks':
img_data = line.split()[1:]
if img_data:
load_material_image(context_material, context_mat_wrap,
context_material_name, img_data, line, 'Ks')
elif line_id == b'map_kd':
img_data = line.split()[1:]
if img_data:
load_material_image(context_material, context_mat_wrap,
context_material_name, img_data, line, 'Kd')
Bastien Montagne
committed
elif line_id == b'map_ke':
img_data = line.split()[1:]
if img_data:
load_material_image(context_material, context_mat_wrap,
context_material_name, img_data, line, 'Ke')
elif line_id in {b'map_bump', b'bump'}: # 'bump' is incorrect but some files use it.
img_data = line.split()[1:]
if img_data:
load_material_image(context_material, context_mat_wrap,
context_material_name, img_data, line, 'Bump')
elif line_id in {b'map_d', b'map_tr'}: # Alpha map - Dissolve
img_data = line.split()[1:]
if img_data:
load_material_image(context_material, context_mat_wrap,
context_material_name, img_data, line, 'D')
elif line_id in {b'map_disp', b'disp'}: # displacementmap
img_data = line.split()[1:]
if img_data:
load_material_image(context_material, context_mat_wrap,
context_material_name, img_data, line, 'disp')
Campbell Barton
committed
elif line_id in {b'map_refl', b'refl'}: # reflectionmap
img_data = line.split()[1:]
if img_data:
load_material_image(context_material, context_mat_wrap,
context_material_name, img_data, line, 'refl')
Campbell Barton
committed
else:
print("WARNING: %r:%r (ignored)" % (filepath, line))
Bastien Montagne
committed
# Finalize last mat, if any.
finalize_material(context_material, context_material_vars, spec_colors, emit_colors,
do_highlight, do_reflection, do_transparency, do_glass)
mtl.close()
def split_mesh(verts_loc, faces, unique_materials, filepath, SPLIT_OB_OR_GROUP):
Takes vert_loc and faces, and separates into multiple sets of
(verts_loc, faces, unique_materials, dataname)
filename = os.path.splitext((os.path.basename(filepath)))[0]
Bastien Montagne
committed
if not SPLIT_OB_OR_GROUP or not faces:
use_verts_nor = any(f[1] for f in faces)
use_verts_tex = any(f[2] for f in faces)
Bastien Montagne
committed
# use the filename for the object name since we aren't chopping up the mesh.
return [(verts_loc, faces, unique_materials, filename, use_verts_nor, use_verts_tex)]
def key_to_name(key):
# if the key is a tuple, join it to make a string
if not key:
return filename # assume its a string. make sure this is true if the splitting code is changed
else:
return key.decode('utf-8', 'replace')
# Return a key that makes the faces unique.
oldkey = -1 # initialize to a value that will never match the key
for face in faces:
(face_vert_loc_indices,
face_vert_nor_indices,
face_vert_tex_indices,
context_material,
context_smooth_group,
context_object,
face_invalid_blenpoly,
) = face
key = context_object
if oldkey != key:
# Check the key has changed.
Bastien Montagne
committed
(verts_split, faces_split, unique_materials_split, vert_remap,
use_verts_nor, use_verts_tex) = face_split_dict.setdefault(key, ([], [], {}, {}, [], []))
if not use_verts_nor and face_vert_nor_indices:
Bastien Montagne
committed
use_verts_nor.append(True)
if not use_verts_tex and face_vert_tex_indices:
Bastien Montagne
committed
use_verts_tex.append(True)
# Remap verts to new vert list and add where needed
for loop_idx, vert_idx in enumerate(face_vert_loc_indices):
map_index = vert_remap.get(vert_idx)
if map_index is None:
map_index = len(verts_split)
vert_remap[vert_idx] = map_index # set the new remapped index so we only add once and can reference next time.
verts_split.append(verts_loc[vert_idx]) # add the vert to the local verts
face_vert_loc_indices[loop_idx] = map_index # remap to the local index
if context_material not in unique_materials_split:
unique_materials_split[context_material] = unique_materials[context_material]
faces_split.append(face)
# remove one of the items and reorder
Bastien Montagne
committed
return [(verts_split, faces_split, unique_materials_split, key_to_name(key), bool(use_vnor), bool(use_vtex))
for key, (verts_split, faces_split, unique_materials_split, _, use_vnor, use_vtex)
in face_split_dict.items()]
def create_mesh(new_objects,
verts_tex,
faces,
unique_materials,
unique_smooth_groups,
vertex_groups,
dataname,
):
Takes all the data gathered and generates a mesh, adding the new object to new_objects
deals with ngons, sharp edges and assigning materials
if unique_smooth_groups:
sharp_edges = set()
smooth_group_users = {context_smooth_group: {} for context_smooth_group in unique_smooth_groups.keys()}
fgon_edges = set() # Used for storing fgon keys when we need to tessellate/untessellate them (ngons with hole).
edges = []
tot_loops = 0
# reverse loop through face indices
face_vert_nor_indices,
face_vert_tex_indices,
context_material,
context_smooth_group,
context_object,
Bastien Montagne
committed
face_invalid_blenpoly,
len_face_vert_loc_indices = len(face_vert_loc_indices)
if len_face_vert_loc_indices == 1:
faces.pop(f_idx) # cant add single vert faces
Bastien Montagne
committed
# Face with a single item in face_vert_nor_indices is actually a polyline!
elif len(face_vert_nor_indices) == 1 or len_face_vert_loc_indices == 2:
edges.extend((face_vert_loc_indices[i], face_vert_loc_indices[i + 1])
for i in range(len_face_vert_loc_indices - 1))
faces.pop(f_idx)
# Smooth Group
if unique_smooth_groups and context_smooth_group:
# Is a part of of a smooth group and is a face
if context_smooth_group_old is not context_smooth_group:
edge_dict = smooth_group_users[context_smooth_group]
context_smooth_group_old = context_smooth_group
Bastien Montagne
committed
prev_vidx = face_vert_loc_indices[-1]
for vidx in face_vert_loc_indices:
edge_key = (prev_vidx, vidx) if (prev_vidx < vidx) else (vidx, prev_vidx)
prev_vidx = vidx
edge_dict[edge_key] = edge_dict.get(edge_key, 0) + 1
Bastien Montagne
committed
# NGons into triangles
if face_invalid_blenpoly:
# ignore triangles with invalid indices
if len(face_vert_loc_indices) > 3:
from bpy_extras.mesh_utils import ngon_tessellate
ngon_face_indices = ngon_tessellate(verts_loc, face_vert_loc_indices, debug_print=bpy.app.debug)
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
faces.extend([([face_vert_loc_indices[ngon[0]],
face_vert_loc_indices[ngon[1]],
face_vert_loc_indices[ngon[2]],
],
[face_vert_nor_indices[ngon[0]],
face_vert_nor_indices[ngon[1]],
face_vert_nor_indices[ngon[2]],
] if face_vert_nor_indices else [],
[face_vert_tex_indices[ngon[0]],
face_vert_tex_indices[ngon[1]],
face_vert_tex_indices[ngon[2]],
] if face_vert_tex_indices else [],
context_material,
context_smooth_group,
context_object,
[],
)
for ngon in ngon_face_indices]
)
tot_loops += 3 * len(ngon_face_indices)
# edges to make ngons
if len(ngon_face_indices) > 1:
edge_users = set()
for ngon in ngon_face_indices:
prev_vidx = face_vert_loc_indices[ngon[-1]]
for ngidx in ngon:
vidx = face_vert_loc_indices[ngidx]
if vidx == prev_vidx:
continue # broken OBJ... Just skip.
edge_key = (prev_vidx, vidx) if (prev_vidx < vidx) else (vidx, prev_vidx)
prev_vidx = vidx
if edge_key in edge_users:
fgon_edges.add(edge_key)
else:
edge_users.add(edge_key)
Bastien Montagne
committed
faces.pop(f_idx)
else:
tot_loops += len_face_vert_loc_indices
# Build sharp edges
if unique_smooth_groups:
for edge_dict in smooth_group_users.values():
for key, users in edge_dict.items():
if users == 1: # This edge is on the boundary of a group
sharp_edges.add(key)
# map the material names to an index
material_mapping = {name: i for i, name in enumerate(unique_materials)} # enumerate over unique_materials keys()
for name, index in material_mapping.items():
me = bpy.data.meshes.new(dataname)
# make sure the list isnt too big
for material in materials:
me.materials.append(material)
me.vertices.add(len(verts_loc))
me.loops.add(tot_loops)
me.polygons.add(len(faces))
# verts_loc is a list of (x, y, z) tuples
me.vertices.foreach_set("co", unpack_list(verts_loc))
loops_vert_idx = tuple(vidx for (face_vert_loc_indices, _, _, _, _, _, _) in faces for vidx in face_vert_loc_indices)
faces_loop_start = []
lidx = 0
for f in faces:
face_vert_loc_indices = f[0]
nbr_vidx = len(face_vert_loc_indices)
faces_loop_start.append(lidx)
lidx += nbr_vidx
faces_loop_total = tuple(len(face_vert_loc_indices) for (face_vert_loc_indices, _, _, _, _, _, _) in faces)
me.loops.foreach_set("vertex_index", loops_vert_idx)
me.polygons.foreach_set("loop_start", faces_loop_start)
me.polygons.foreach_set("loop_total", faces_loop_total)
faces_ma_index = tuple(material_mapping[context_material] for (_, _, _, context_material, _, _, _) in faces)
me.polygons.foreach_set("material_index", faces_ma_index)
faces_use_smooth = tuple(bool(context_smooth_group) for (_, _, _, _, context_smooth_group, _, _) in faces)
me.polygons.foreach_set("use_smooth", faces_use_smooth)
# Note: we store 'temp' normals in loops, since validate() may alter final mesh,
# we can only set custom lnors *after* calling it.
me.create_normals_split()
loops_nor = tuple(no for (_, face_vert_nor_indices, _, _, _, _, _) in faces for face_noidx in face_vert_nor_indices for no in verts_nor[face_noidx])
me.loops.foreach_set("normal", loops_nor)
if verts_tex and me.polygons:
me.uv_layers.new()
loops_uv = tuple(uv for (_, _, face_vert_tex_indices, _, _, _, _) in faces for face_uvidx in face_vert_tex_indices for uv in verts_tex[face_uvidx])
me.uv_layers[0].data.foreach_set("uv", loops_uv)
use_edges = use_edges and bool(edges)
me.edges.add(len(edges))
# edges should be a list of (a, b) tuples
me.edges.foreach_set("vertices", unpack_list(edges))
me.validate(clean_customdata=False) # *Very* important to not remove lnors here!
Philipp Oeser
committed
me.update(calc_edges=use_edges)
Bastien Montagne
committed
# Un-tessellate as much as possible, in case we had to triangulate some ngons...
if fgon_edges:
import bmesh
bm = bmesh.new()
bm.from_mesh(me)
verts = bm.verts[:]
get = bm.edges.get
edges = [get((verts[vidx1], verts[vidx2])) for vidx1, vidx2 in fgon_edges]
try:
bmesh.ops.dissolve_edges(bm, edges=edges, use_verts=False)
except:
# Possible dissolve fails for some edges, but don't fail silently in case this is a real bug.
import traceback
traceback.print_exc()
bm.to_mesh(me)
bm.free()
# XXX If validate changes the geometry, this is likely to be broken...
Philipp Oeser
committed
if unique_smooth_groups and sharp_edges:
for e in me.edges:
if e.key in sharp_edges:
e.use_edge_sharp = True
if verts_nor:
clnors = array.array('f', [0.0] * (len(me.loops) * 3))
me.loops.foreach_get("normal", clnors)
if not unique_smooth_groups:
me.polygons.foreach_set("use_smooth", [True] * len(me.polygons))
me.normals_split_custom_set(tuple(zip(*(iter(clnors),) * 3)))
me.use_auto_smooth = True
ob = bpy.data.objects.new(me.name, me)
new_objects.append(ob)
# Create the vertex groups. No need to have the flag passed here since we test for the
# content of the vertex_groups. If the user selects to NOT have vertex groups saved then
# the following test will never run
for group_name, group_indices in vertex_groups.items():
group = ob.vertex_groups.new(name=group_name.decode('utf-8', "replace"))
group.add(group_indices, 1.0, 'REPLACE')
def create_nurbs(context_nurbs, vert_loc, new_objects):
Add nurbs object to blender, only support one type at the moment
deg = context_nurbs.get(b'deg', (3,))
curv_range = context_nurbs.get(b'curv_range')
curv_idx = context_nurbs.get(b'curv_idx', [])
parm_u = context_nurbs.get(b'parm_u', [])
parm_v = context_nurbs.get(b'parm_v', [])
name = context_nurbs.get(b'name', b'ObjNurb')
cstype = context_nurbs.get(b'cstype')
if cstype is None:
print('\tWarning, cstype not found')
return
if cstype != b'bspline':
print('\tWarning, cstype is not supported (only bspline)')
return
if not curv_idx:
print('\tWarning, curv argument empty or not set')
return
if len(deg) > 1 or parm_v:
print('\tWarning, surfaces not supported')
return
Campbell Barton
committed
cu = bpy.data.curves.new(name.decode('utf-8', "replace"), 'CURVE')
cu.dimensions = '3D'
nu = cu.splines.new('NURBS')
nu.points.add(len(curv_idx) - 1) # a point is added to start with
nu.points.foreach_set("co", [co_axis for vt_idx in curv_idx for co_axis in (vert_loc[vt_idx] + (1.0,))])
nu.order_u = deg[0] + 1
# get for endpoint flag from the weighting
do_endpoints = True
do_endpoints = False
break
if abs(parm_u[-(i + 1)] - curv_range[1]) > 0.0001:
do_endpoints = False
break
else:
do_endpoints = False
if do_endpoints:
nu.use_endpoint_u = True
# close
'''
do_closed = False
if len(parm_u) > deg[0]+1:
for i in xrange(deg[0]+1):
#print curv_idx[i], curv_idx[-(i+1)]
if curv_idx[i]==curv_idx[-(i+1)]:
do_closed = True
break
if do_closed:
nu.use_cyclic_u = True
'''
Campbell Barton
committed
ob = bpy.data.objects.new(name.decode('utf-8', "replace"), cu)
new_objects.append(ob)
def strip_slash(line_split):
if line_split[-1][-1] == 92: # '\' char
if len(line_split[-1]) == 1:
line_split.pop() # remove the \ item
else:
line_split[-1] = line_split[-1][:-1] # remove the \ from the end last number
return True
return False
def get_float_func(filepath):
find the float function for this obj file
- whether to replace commas or not
file = open(filepath, 'rb')
line = line.lstrip()
if line.startswith(b'v'): # vn vt v
if b',' in line:
return lambda f: float(f.replace(b',', b'.'))
elif b'.' in line:
return float
return float
def get_int(svalue):
if b',' in svalue:
return int(float(svalue.replace(b',', b'.')))
return int(float(svalue))
def load(context,
filepath,
*,
use_smooth_groups=True,
use_edges=True,
use_split_objects=True,
use_split_groups=True,
use_image_search=True,
use_groups_as_vgroups=False,
Campbell Barton
committed
relpath=None,
Called by the user interface or another script.
load_obj(path) - should give acceptable results.
This function passes the file and sends the data off
to be split into objects and then converted into mesh objects
def handle_vec(line_start, context_multi_line, line_split, tag, data, vec, vec_len):
ret_context_multi_line = tag if strip_slash(line_split) else b''
if line_start == tag:
vec[:] = [float_func(v) for v in line_split[1:]]
elif context_multi_line == tag:
vec += [float_func(v) for v in line_split]
if not ret_context_multi_line:
data.append(tuple(vec[:vec_len]))
return ret_context_multi_line
def create_face(context_material, context_smooth_group, context_object):
face_vert_loc_indices = []
face_vert_nor_indices = []
face_vert_tex_indices = []
return (
face_vert_loc_indices,
face_vert_nor_indices,
face_vert_tex_indices,
context_material,
context_smooth_group,
context_object,
Bastien Montagne
committed
[], # If non-empty, that face is a Blender-invalid ngon (holes...), need a mutable object for that...
)
with ProgressReport(context.window_manager) as progress:
progress.enter_substeps(1, "Importing OBJ %r..." % filepath)
if global_matrix is None:
global_matrix = mathutils.Matrix()
if use_split_objects or use_split_groups:
use_groups_as_vgroups = False
time_main = time.time()
verts_loc = []
verts_nor = []
verts_tex = []
faces = [] # tuples of the faces
material_libs = set() # filenames to material libs this OBJ uses
vertex_groups = {} # when use_groups_as_vgroups is true
# Get the string to float conversion func for this file- is 'float' for almost all files.
float_func = get_float_func(filepath)
# Context variables
context_material = None
context_smooth_group = None
context_object = None
context_vgroup = None
# Nurbs
context_nurbs = {}
nurbs = []
context_parm = b'' # used by nurbs too but could be used elsewhere
# Until we can use sets
use_default_material = False
unique_materials = {}
unique_smooth_groups = {}
# unique_obects= {} - no use for this variable since the objects are stored in the face.
# when there are faces that end with \
# it means they are multiline-
# since we use xreadline we cant skip to the next line
# so we need to know whether
context_multi_line = b''
# Per-face handling data.
face_vert_loc_indices = None
face_vert_nor_indices = None
face_vert_tex_indices = None
face_vert_nor_valid = face_vert_tex_valid = False
verts_loc_len = verts_nor_len = verts_tex_len = 0
face_items_usage = set()
face_invalid_blenpoly = None
prev_vidx = None
face = None
vec = []
quick_vert_failures = 0
skip_quick_vert = False
progress.enter_substeps(3, "Parsing OBJ file...")
with open(filepath, 'rb') as f:
if not line_split:
continue
line_start = line_split[0] # we compare with this a _lot_
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
# Handling vertex data are pretty similar, factorize that.
# Also, most OBJ files store all those on a single line, so try fast parsing for that first,
# and only fallback to full multi-line parsing when needed, this gives significant speed-up
# (~40% on affected code).
if line_start == b'v':
vdata, vdata_len, do_quick_vert = (verts_loc, 3, not skip_quick_vert)
elif line_start == b'vn':
vdata, vdata_len, do_quick_vert = (verts_nor, 3, not skip_quick_vert)
elif line_start == b'vt':
vdata, vdata_len, do_quick_vert = verts_tex, 2, not skip_quick_vert
elif context_multi_line == b'v':
vdata, vdata_len, do_quick_vert = verts_loc, 3, False
elif context_multi_line == b'vn':
vdata, vdata_len, do_quick_vert = verts_nor, 3, False
elif context_multi_line == b'vt':
vdata, vdata_len, do_quick_vert = verts_tex, 2, False
else:
vdata_len = 0
if vdata_len:
if do_quick_vert:
try:
vdata.append(tuple(map(float_func, line_split[1:vdata_len + 1])))
except:
do_quick_vert = False
# In case we get too many failures on quick parsing, force fallback to full multi-line one.
# Exception handling can become costly...
quick_vert_failures += 1
if quick_vert_failures > 10000:
skip_quick_vert = True
if not do_quick_vert:
context_multi_line = handle_vec(line_start, context_multi_line, line_split, b'v', vdata, vec, vdata_len)
elif line_start == b'f' or context_multi_line == b'f':
if not context_multi_line:
line_split = line_split[1:]
# Instantiate a face
face = create_face(context_material, context_smooth_group, context_object)
(face_vert_loc_indices, face_vert_nor_indices, face_vert_tex_indices,
_1, _2, _3, face_invalid_blenpoly) = face
faces.append(face)
face_items_usage.clear()
verts_loc_len = len(verts_loc)
verts_nor_len = len(verts_nor)
verts_tex_len = len(verts_tex)
if context_material is None:
use_default_material = True
# Else, use face_vert_loc_indices and face_vert_tex_indices previously defined and used the obj_face