Newer
Older
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
# Script copyright (C) Campbell Barton
# Contributors: Campbell Barton, Jiri Hnidek, Paolo Ciccone
"""
This script imports a Wavefront OBJ files to Blender.
Usage:
Run this script from "File->Import" menu and then load the desired OBJ file.
Note, This loads mesh objects and materials only, nurbs and curves are not supported.
http://wiki.blender.org/index.php/Scripts/Manual/Import/wavefront_obj
"""
import os
import time
import bpy
import mathutils
from bpy_extras.image_utils import load_image
from bpy_extras.wm_utils.progress_report import ProgressReport
def line_value(line_split):
Returns 1 string representing the value for this line
None will be returned if there's only 1 word
if length == 1:
return None
elif length == 2:
return line_split[1]
elif length > 2:
return b' '.join(line_split[1:])
def filenames_group_by_ext(line, ext):
"""
Splits material libraries supporting spaces, so:
b'foo bar.mtl baz spam.MTL' -> (b'foo bar.mtl', b'baz spam.MTL')
Also handle " chars (some software use those to protect filenames with spaces, see T67266... sic).
Bastien Montagne
committed
# Note that we assume that if there are some " in that line,
# then all filenames are properly enclosed within those...
start = line.find(b'"') + 1
if start != 0:
while start != 0:
end = line.find(b'"', start)
if end != -1:
yield line[start:end]
start = line.find(b'"', end + 1) + 1
else:
break
return
line_lower = line.lower()
i_prev = 0
while i_prev != -1 and i_prev < len(line):
i = line_lower.find(ext, i_prev)
if i != -1:
i += len(ext)
yield line[i_prev:i].strip()
i_prev = i
def obj_image_load(img_data, context_imagepath_map, line, DIR, recursive, relpath):
But we try all space-separated items from current line when file is not found with last one
(users keep generating/using image files with spaces in a format that does not support them, sigh...)
Also tries to replace '_' with ' ' for Max's exporter replaces spaces with underscores.
Also handle " chars (some software use those to protect filenames with spaces, see T67266... sic).
Also corrects img_data (in case filenames with spaces have been split up in multiple entries, see T72148).
filepath_parts = line.split(b' ')
Bastien Montagne
committed
start = line.find(b'"') + 1
if start != 0:
end = line.find(b'"', start)
if end != 0:
filepath_parts = (line[start:end],)
image = None
for i in range(-1, -len(filepath_parts), -1):
imagepath = os.fsdecode(b" ".join(filepath_parts[i:]))
image = context_imagepath_map.get(imagepath, ...)
if image is ...:
image = load_image(imagepath, DIR, recursive=recursive, relpath=relpath)
if image is None and "_" in imagepath:
image = load_image(imagepath.replace("_", " "), DIR, recursive=recursive, relpath=relpath)
if image is not None:
context_imagepath_map[imagepath] = image
del img_data[i:]
img_data.append(imagepath)
break;
else:
del img_data[i:]
img_data.append(imagepath)
break;
if image is None:
imagepath = os.fsdecode(filepath_parts[-1])
image = load_image(imagepath, DIR, recursive=recursive, place_holder=True, relpath=relpath)
context_imagepath_map[imagepath] = image
return image
Campbell Barton
committed
def create_materials(filepath, relpath,
material_libs, unique_materials,
use_image_search, float_func):
Create all the used materials in this obj,
assign colors and images to the materials from all referenced material libs
from bpy_extras import node_shader_utils
Campbell Barton
committed
context_material_vars = set()
# Don't load the same image multiple times
context_imagepath_map = {}
nodal_material_wrap_map = {}
def load_material_image(blender_material, mat_wrap, context_material_name, img_data, line, type):
"""
Set textures defined in .mtl file.
"""
map_options = {}
# Absolute path - c:\.. etc would work here
image = obj_image_load(img_data, context_imagepath_map, line, DIR, use_image_search, relpath)
curr_token = []
for token in img_data[:-1]:
if token.startswith(b'-') and token[1:].isalpha():
if curr_token:
map_options[curr_token[0]] = curr_token[1:]
curr_token[:] = []
curr_token.append(token)
Bastien Montagne
committed
if curr_token:
map_options[curr_token[0]] = curr_token[1:]
map_offset = map_options.get(b'-o')
map_scale = map_options.get(b'-s')
Bastien Montagne
committed
if map_offset is not None:
map_offset = tuple(map(float_func, map_offset))
if map_scale is not None:
map_scale = tuple(map(float_func, map_scale))
def _generic_tex_set(nodetex, image, texcoords, translation, scale):
nodetex.image = image
nodetex.texcoords = texcoords
if translation is not None:
nodetex.translation = translation
if scale is not None:
nodetex.scale = scale
# Adds textures for materials (rendering)
if type == 'Kd':
_generic_tex_set(mat_wrap.base_color_texture, image, 'UV', map_offset, map_scale)
elif type == 'Ka':
# XXX Not supported?
print("WARNING, currently unsupported ambient texture, skipped.")
elif type == 'Ks':
_generic_tex_set(mat_wrap.specular_texture, image, 'UV', map_offset, map_scale)
Bastien Montagne
committed
elif type == 'Ke':
Bastien Montagne
committed
_generic_tex_set(mat_wrap.emission_color_texture, image, 'UV', map_offset, map_scale)
mat_wrap.emission_strength = 1.0
elif type == 'Bump':
bump_mult = map_options.get(b'-bm')
Bastien Montagne
committed
bump_mult = float(bump_mult[0]) if (bump_mult and len(bump_mult[0]) > 1) else 1.0
mat_wrap.normalmap_strength_set(bump_mult)
_generic_tex_set(mat_wrap.normalmap_texture, image, 'UV', map_offset, map_scale)
elif type == 'D':
_generic_tex_set(mat_wrap.alpha_texture, image, 'UV', map_offset, map_scale)
Campbell Barton
committed
elif type == 'disp':
# XXX Not supported?
print("WARNING, currently unsupported displacement texture, skipped.")
# ~ mat_wrap.bump_image_set(image)
# ~ mat_wrap.bump_mapping_set(coords='UV', translation=map_offset, scale=map_scale)
elif type == 'refl':
map_type = map_options.get(b'-type')
if map_type and map_type != [b'sphere']:
print("WARNING, unsupported reflection type '%s', defaulting to 'sphere'"
"" % ' '.join(i.decode() for i in map_type))
_generic_tex_set(mat_wrap.base_color_texture, image, 'Reflection', map_offset, map_scale)
mat_wrap.base_color_texture.projection = 'SPHERE'
raise Exception("invalid type %r" % type)
Bastien Montagne
committed
def finalize_material(context_material, context_material_vars, spec_colors,
Bastien Montagne
committed
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
do_highlight, do_reflection, do_transparency, do_glass):
# Finalize previous mat, if any.
if context_material:
if "specular" in context_material_vars:
# XXX This is highly approximated, not sure whether we can do better...
# TODO: Find a way to guesstimate best value from diffuse color...
# IDEA: Use standard deviation of both spec and diff colors (i.e. how far away they are
# from some grey), and apply the the proportion between those two as tint factor?
spec = sum(spec_colors) / 3.0
# ~ spec_var = math.sqrt(sum((c - spec) ** 2 for c in spec_color) / 3.0)
# ~ diff = sum(context_mat_wrap.base_color) / 3.0
# ~ diff_var = math.sqrt(sum((c - diff) ** 2 for c in context_mat_wrap.base_color) / 3.0)
# ~ tint = min(1.0, spec_var / diff_var)
context_mat_wrap.specular = spec
context_mat_wrap.specular_tint = 0.0
if "roughness" not in context_material_vars:
context_mat_wrap.roughness = 0.0
# FIXME, how else to use this?
if do_highlight:
if "specular" not in context_material_vars:
context_mat_wrap.specular = 1.0
if "roughness" not in context_material_vars:
context_mat_wrap.roughness = 0.0
else:
if "specular" not in context_material_vars:
context_mat_wrap.specular = 0.0
if "roughness" not in context_material_vars:
context_mat_wrap.roughness = 1.0
if do_reflection:
if "metallic" not in context_material_vars:
context_mat_wrap.metallic = 1.0
else:
# since we are (ab)using ambient term for metallic (which can be non-zero)
context_mat_wrap.metallic = 0.0
Bastien Montagne
committed
if do_transparency:
if "ior" not in context_material_vars:
context_mat_wrap.ior = 1.0
if "alpha" not in context_material_vars:
context_mat_wrap.alpha = 1.0
Bastien Montagne
committed
# EEVEE only
context_material.blend_method = 'BLEND'
if do_glass:
if "ior" not in context_material_vars:
context_mat_wrap.ior = 1.5
# Try to find a MTL with the same name as the OBJ if no MTLs are specified.
temp_mtl = os.path.splitext((os.path.basename(filepath)))[0] + ".mtl"
if os.path.exists(os.path.join(DIR, temp_mtl)):
material_libs.add(temp_mtl)
del temp_mtl
ma_name = "Default OBJ" if name is None else name.decode('utf-8', "replace")
ma = unique_materials[name] = bpy.data.materials.new(ma_name)
ma_wrap = node_shader_utils.PrincipledBSDFWrapper(ma, is_readonly=False)
nodal_material_wrap_map[ma] = ma_wrap
ma_wrap.use_nodes = True
for libname in sorted(material_libs):
if not os.path.exists(mtlpath):
print("\tMaterial not found MTL: %r" % mtlpath)
else:
# Note: with modern Principled BSDF shader, things like ambient, raytrace or fresnel are always 'ON'
# (i.e. automatically controlled by other parameters).
do_highlight = False
do_reflection = False
do_transparency = False
do_glass = False
spec_colors = [0.0, 0.0, 0.0]
# print('\t\tloading mtl: %e' % mtlpath)
context_mat_wrap = None
mtl = open(mtlpath, 'rb')
Campbell Barton
committed
line = line.strip()
if not line or line.startswith(b'#'):
continue
line_split = line.split()
line_id = line_split[0].lower()
if line_id == b'newmtl':
Bastien Montagne
committed
# Finalize previous mat, if any.
Bastien Montagne
committed
finalize_material(context_material, context_material_vars, spec_colors,
Bastien Montagne
committed
do_highlight, do_reflection, do_transparency, do_glass)
context_material_name = line_value(line_split)
context_material = unique_materials.get(context_material_name)
if context_material is not None:
context_mat_wrap = nodal_material_wrap_map[context_material]
Campbell Barton
committed
context_material_vars.clear()
Bastien Montagne
committed
spec_colors[:] = [0.0, 0.0, 0.0]
do_highlight = False
do_reflection = False
do_transparency = False
do_glass = False
elif context_material:
Bastien Montagne
committed
def _get_colors(line_split):
# OBJ 'allows' one or two components values, treat single component as greyscale, and two as blue = 0.0.
ln = len(line_split)
if ln == 2:
return [float_func(line_split[1])] * 3
elif ln == 3:
return [float_func(line_split[1]), float_func(line_split[2]), 0.0]
else:
return [float_func(line_split[1]), float_func(line_split[2]), float_func(line_split[3])]
# we need to make a material to assign properties to it.
if line_id == b'ka':
Bastien Montagne
committed
refl = sum(_get_colors(line_split)) / 3.0
context_mat_wrap.metallic = refl
context_material_vars.add("metallic")
elif line_id == b'kd':
Bastien Montagne
committed
context_mat_wrap.base_color = _get_colors(line_split)
elif line_id == b'ks':
Bastien Montagne
committed
spec_colors[:] = _get_colors(line_split)
context_material_vars.add("specular")
Bastien Montagne
committed
elif line_id == b'ke':
# We cannot set context_material.emit right now, we need final diffuse color as well for this.
Bastien Montagne
committed
context_mat_wrap.emission_color = _get_colors(line_split)
context_mat_wrap.emission_strength = 1.0
elif line_id == b'ns':
# XXX Totally empirical conversion, trying to adapt it
Bastien Montagne
committed
# (from 0.0 - 1000.0 OBJ specular exponent range to 1.0 - 0.0 Principled BSDF range)...
val = max(0.0, min(1000.0, float_func(line_split[1])))
context_mat_wrap.roughness = 1.0 - (sqrt(val / 1000))
context_material_vars.add("roughness")
elif line_id == b'ni': # Refraction index (between 0.001 and 10).
context_mat_wrap.ior = float_func(line_split[1])
context_material_vars.add("ior")
elif line_id == b'd': # dissolve (transparency)
context_mat_wrap.alpha = float_func(line_split[1])
context_material_vars.add("alpha")
elif line_id == b'tr': # translucency
print("WARNING, currently unsupported 'tr' translucency option, skipped.")
elif line_id == b'tf':
Campbell Barton
committed
# rgb, filter color, blender has no support for this.
print("WARNING, currently unsupported 'tf' filter color option, skipped.")
elif line_id == b'illum':
# Some MTL files incorrectly use a float for this value, see T60135.
illum = any_number_as_int(line_split[1])
Campbell Barton
committed
# inline comments are from the spec, v4.2
if illum == 0:
# Color on and Ambient off
print("WARNING, Principled BSDF shader does not support illumination 0 mode "
"(colors with no ambient), skipped.")
Campbell Barton
committed
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
elif illum == 1:
# Color on and Ambient on
pass
elif illum == 2:
# Highlight on
do_highlight = True
elif illum == 3:
# Reflection on and Ray trace on
do_reflection = True
elif illum == 4:
# Transparency: Glass on
# Reflection: Ray trace on
do_transparency = True
do_reflection = True
do_glass = True
elif illum == 5:
# Reflection: Fresnel on and Ray trace on
do_reflection = True
elif illum == 6:
# Transparency: Refraction on
# Reflection: Fresnel off and Ray trace on
do_transparency = True
do_reflection = True
elif illum == 7:
# Transparency: Refraction on
# Reflection: Fresnel on and Ray trace on
do_transparency = True
do_reflection = True
elif illum == 8:
# Reflection on and Ray trace off
do_reflection = True
elif illum == 9:
# Transparency: Glass on
# Reflection: Ray trace off
do_transparency = True
Campbell Barton
committed
do_glass = True
elif illum == 10:
# Casts shadows onto invisible surfaces
print("WARNING, Principled BSDF shader does not support illumination 10 mode "
"(cast shadows on invisible surfaces), skipped.")
Campbell Barton
committed
pass
elif line_id == b'map_ka':
img_data = line.split()[1:]
if img_data:
load_material_image(context_material, context_mat_wrap,
context_material_name, img_data, line, 'Ka')
elif line_id == b'map_ks':
img_data = line.split()[1:]
if img_data:
load_material_image(context_material, context_mat_wrap,
context_material_name, img_data, line, 'Ks')
elif line_id == b'map_kd':
img_data = line.split()[1:]
if img_data:
load_material_image(context_material, context_mat_wrap,
context_material_name, img_data, line, 'Kd')
Bastien Montagne
committed
elif line_id == b'map_ke':
img_data = line.split()[1:]
if img_data:
load_material_image(context_material, context_mat_wrap,
context_material_name, img_data, line, 'Ke')
elif line_id in {b'map_bump', b'bump'}: # 'bump' is incorrect but some files use it.
img_data = line.split()[1:]
if img_data:
load_material_image(context_material, context_mat_wrap,
context_material_name, img_data, line, 'Bump')
elif line_id in {b'map_d', b'map_tr'}: # Alpha map - Dissolve
img_data = line.split()[1:]
if img_data:
load_material_image(context_material, context_mat_wrap,
context_material_name, img_data, line, 'D')
elif line_id in {b'map_disp', b'disp'}: # displacementmap
img_data = line.split()[1:]
if img_data:
load_material_image(context_material, context_mat_wrap,
context_material_name, img_data, line, 'disp')
Campbell Barton
committed
elif line_id in {b'map_refl', b'refl'}: # reflectionmap
img_data = line.split()[1:]
if img_data:
load_material_image(context_material, context_mat_wrap,
context_material_name, img_data, line, 'refl')
Campbell Barton
committed
else:
print("WARNING: %r:%r (ignored)" % (filepath, line))
Bastien Montagne
committed
# Finalize last mat, if any.
Bastien Montagne
committed
finalize_material(context_material, context_material_vars, spec_colors,
Bastien Montagne
committed
do_highlight, do_reflection, do_transparency, do_glass)
mtl.close()
Bastien Montagne
committed
def face_is_edge(face):
"""Simple check to test whether given (temp, working) data is an edge, and not a real face."""
face_vert_loc_indices = face[0]
face_vert_nor_indices = face[1]
return len(face_vert_nor_indices) == 1 or len(face_vert_loc_indices) == 2
def split_mesh(verts_loc, faces, unique_materials, filepath, SPLIT_OB_OR_GROUP):
Takes vert_loc and faces, and separates into multiple sets of
(verts_loc, faces, unique_materials, dataname)
filename = os.path.splitext((os.path.basename(filepath)))[0]
Bastien Montagne
committed
if not SPLIT_OB_OR_GROUP or not faces:
use_verts_nor = any(f[1] for f in faces)
use_verts_tex = any(f[2] for f in faces)
Bastien Montagne
committed
# use the filename for the object name since we aren't chopping up the mesh.
return [(verts_loc, faces, unique_materials, filename, use_verts_nor, use_verts_tex)]
def key_to_name(key):
# if the key is a tuple, join it to make a string
if not key:
return filename # assume its a string. make sure this is true if the splitting code is changed
return key.decode('utf-8', 'replace')
else:
return "_".join(k.decode('utf-8', 'replace') for k in key)
# Return a key that makes the faces unique.
oldkey = -1 # initialize to a value that will never match the key
for face in faces:
(face_vert_loc_indices,
face_vert_nor_indices,
face_vert_tex_indices,
context_material,
if oldkey != key:
# Check the key has changed.
Bastien Montagne
committed
(verts_split, faces_split, unique_materials_split, vert_remap,
use_verts_nor, use_verts_tex) = face_split_dict.setdefault(key, ([], [], {}, {}, [], []))
Bastien Montagne
committed
if not face_is_edge(face):
if not use_verts_nor and face_vert_nor_indices:
use_verts_nor.append(True)
Bastien Montagne
committed
if not use_verts_tex and face_vert_tex_indices:
use_verts_tex.append(True)
Bastien Montagne
committed
# Remap verts to new vert list and add where needed
for loop_idx, vert_idx in enumerate(face_vert_loc_indices):
map_index = vert_remap.get(vert_idx)
if map_index is None:
map_index = len(verts_split)
vert_remap[vert_idx] = map_index # set the new remapped index so we only add once and can reference next time.
verts_split.append(verts_loc[vert_idx]) # add the vert to the local verts
face_vert_loc_indices[loop_idx] = map_index # remap to the local index
if context_material not in unique_materials_split:
unique_materials_split[context_material] = unique_materials[context_material]
faces_split.append(face)
# remove one of the items and reorder
Bastien Montagne
committed
return [(verts_split, faces_split, unique_materials_split, key_to_name(key), bool(use_vnor), bool(use_vtex))
for key, (verts_split, faces_split, unique_materials_split, _, use_vnor, use_vtex)
in face_split_dict.items()]
def create_mesh(new_objects,
verts_tex,
faces,
unique_materials,
unique_smooth_groups,
vertex_groups,
dataname,
):
Takes all the data gathered and generates a mesh, adding the new object to new_objects
deals with ngons, sharp edges and assigning materials
if unique_smooth_groups:
sharp_edges = set()
smooth_group_users = {context_smooth_group: {} for context_smooth_group in unique_smooth_groups.keys()}
fgon_edges = set() # Used for storing fgon keys when we need to tessellate/untessellate them (ngons with hole).
edges = []
tot_loops = 0
# reverse loop through face indices
Bastien Montagne
committed
face = faces[f_idx]
face_vert_nor_indices,
face_vert_tex_indices,
context_material,
context_smooth_group,
Bastien Montagne
committed
face_invalid_blenpoly,
Bastien Montagne
committed
) = face
len_face_vert_loc_indices = len(face_vert_loc_indices)
if len_face_vert_loc_indices == 1:
faces.pop(f_idx) # cant add single vert faces
Bastien Montagne
committed
# Face with a single item in face_vert_nor_indices is actually a polyline!
Bastien Montagne
committed
elif face_is_edge(face):
edges.extend((face_vert_loc_indices[i], face_vert_loc_indices[i + 1])
for i in range(len_face_vert_loc_indices - 1))
faces.pop(f_idx)
# Smooth Group
if unique_smooth_groups and context_smooth_group:
# Is a part of of a smooth group and is a face
if context_smooth_group_old is not context_smooth_group:
edge_dict = smooth_group_users[context_smooth_group]
context_smooth_group_old = context_smooth_group
Bastien Montagne
committed
prev_vidx = face_vert_loc_indices[-1]
for vidx in face_vert_loc_indices:
edge_key = (prev_vidx, vidx) if (prev_vidx < vidx) else (vidx, prev_vidx)
prev_vidx = vidx
edge_dict[edge_key] = edge_dict.get(edge_key, 0) + 1
Bastien Montagne
committed
# NGons into triangles
if face_invalid_blenpoly:
# ignore triangles with invalid indices
if len(face_vert_loc_indices) > 3:
from bpy_extras.mesh_utils import ngon_tessellate
ngon_face_indices = ngon_tessellate(verts_loc, face_vert_loc_indices, debug_print=bpy.app.debug)
faces.extend([([face_vert_loc_indices[ngon[0]],
face_vert_loc_indices[ngon[1]],
face_vert_loc_indices[ngon[2]],
],
[face_vert_nor_indices[ngon[0]],
face_vert_nor_indices[ngon[1]],
face_vert_nor_indices[ngon[2]],
] if face_vert_nor_indices else [],
[face_vert_tex_indices[ngon[0]],
face_vert_tex_indices[ngon[1]],
face_vert_tex_indices[ngon[2]],
] if face_vert_tex_indices else [],
context_material,
context_smooth_group,
[],
)
for ngon in ngon_face_indices]
)
tot_loops += 3 * len(ngon_face_indices)
# edges to make ngons
if len(ngon_face_indices) > 1:
edge_users = set()
for ngon in ngon_face_indices:
prev_vidx = face_vert_loc_indices[ngon[-1]]
for ngidx in ngon:
vidx = face_vert_loc_indices[ngidx]
if vidx == prev_vidx:
continue # broken OBJ... Just skip.
edge_key = (prev_vidx, vidx) if (prev_vidx < vidx) else (vidx, prev_vidx)
prev_vidx = vidx
if edge_key in edge_users:
fgon_edges.add(edge_key)
else:
edge_users.add(edge_key)
Bastien Montagne
committed
faces.pop(f_idx)
else:
tot_loops += len_face_vert_loc_indices
# Build sharp edges
if unique_smooth_groups:
for edge_dict in smooth_group_users.values():
for key, users in edge_dict.items():
if users == 1: # This edge is on the boundary of a group
sharp_edges.add(key)
# map the material names to an index
material_mapping = {name: i for i, name in enumerate(unique_materials)} # enumerate over unique_materials keys()
for name, index in material_mapping.items():
me = bpy.data.meshes.new(dataname)
# make sure the list isn't too big
for material in materials:
me.materials.append(material)
me.vertices.add(len(verts_loc))
me.loops.add(tot_loops)
me.polygons.add(len(faces))
# verts_loc is a list of (x, y, z) tuples
me.vertices.foreach_set("co", unpack_list(verts_loc))
loops_vert_idx = tuple(vidx for (face_vert_loc_indices, _, _, _, _, _, _) in faces for vidx in face_vert_loc_indices)
faces_loop_start = []
lidx = 0
for f in faces:
face_vert_loc_indices = f[0]
nbr_vidx = len(face_vert_loc_indices)
faces_loop_start.append(lidx)
lidx += nbr_vidx
faces_loop_total = tuple(len(face_vert_loc_indices) for (face_vert_loc_indices, _, _, _, _, _, _) in faces)
me.loops.foreach_set("vertex_index", loops_vert_idx)
me.polygons.foreach_set("loop_start", faces_loop_start)
me.polygons.foreach_set("loop_total", faces_loop_total)
faces_ma_index = tuple(material_mapping[context_material] for (_, _, _, context_material, _, _, _) in faces)
me.polygons.foreach_set("material_index", faces_ma_index)
faces_use_smooth = tuple(bool(context_smooth_group) for (_, _, _, _, context_smooth_group, _, _) in faces)
me.polygons.foreach_set("use_smooth", faces_use_smooth)
# Note: we store 'temp' normals in loops, since validate() may alter final mesh,
# we can only set custom lnors *after* calling it.
me.create_normals_split()
Bastien Montagne
committed
loops_nor = tuple(no for (_, face_vert_nor_indices, _, _, _, _, _) in faces
for face_noidx in face_vert_nor_indices
for no in verts_nor[face_noidx])
me.loops.foreach_set("normal", loops_nor)
if verts_tex and me.polygons:
# Some files Do not explicitly write the 'v' value when it's 0.0, see T68249...
Bastien Montagne
committed
verts_tex = [uv if len(uv) == 2 else uv + [0.0] for uv in verts_tex]
me.uv_layers.new(do_init=False)
Bastien Montagne
committed
loops_uv = tuple(uv for (_, _, face_vert_tex_indices, _, _, _, _) in faces
for face_uvidx in face_vert_tex_indices
for uv in verts_tex[face_uvidx])
me.uv_layers[0].data.foreach_set("uv", loops_uv)
use_edges = use_edges and bool(edges)
me.edges.add(len(edges))
# edges should be a list of (a, b) tuples
me.edges.foreach_set("vertices", unpack_list(edges))
me.validate(clean_customdata=False) # *Very* important to not remove lnors here!
Bastien Montagne
committed
me.update(calc_edges=use_edges, calc_edges_loose=use_edges)
Philipp Oeser
committed
Bastien Montagne
committed
# Un-tessellate as much as possible, in case we had to triangulate some ngons...
if fgon_edges:
import bmesh
bm = bmesh.new()
bm.from_mesh(me)
verts = bm.verts[:]
get = bm.edges.get
edges = [get((verts[vidx1], verts[vidx2])) for vidx1, vidx2 in fgon_edges]
try:
bmesh.ops.dissolve_edges(bm, edges=edges, use_verts=False)
except:
# Possible dissolve fails for some edges, but don't fail silently in case this is a real bug.
import traceback
traceback.print_exc()
bm.to_mesh(me)
bm.free()
# XXX If validate changes the geometry, this is likely to be broken...
Philipp Oeser
committed
if unique_smooth_groups and sharp_edges:
for e in me.edges:
if e.key in sharp_edges:
e.use_edge_sharp = True
if verts_nor:
clnors = array.array('f', [0.0] * (len(me.loops) * 3))
me.loops.foreach_get("normal", clnors)
if not unique_smooth_groups:
me.polygons.foreach_set("use_smooth", [True] * len(me.polygons))
me.normals_split_custom_set(tuple(zip(*(iter(clnors),) * 3)))
me.use_auto_smooth = True
ob = bpy.data.objects.new(me.name, me)
new_objects.append(ob)
# Create the vertex groups. No need to have the flag passed here since we test for the
# content of the vertex_groups. If the user selects to NOT have vertex groups saved then
# the following test will never run
for group_name, group_indices in vertex_groups.items():
group = ob.vertex_groups.new(name=group_name.decode('utf-8', "replace"))
group.add(group_indices, 1.0, 'REPLACE')
def create_nurbs(context_nurbs, vert_loc, new_objects):
Add nurbs object to blender, only support one type at the moment
deg = context_nurbs.get(b'deg', (3,))
curv_range = context_nurbs.get(b'curv_range')
curv_idx = context_nurbs.get(b'curv_idx', [])
parm_u = context_nurbs.get(b'parm_u', [])
parm_v = context_nurbs.get(b'parm_v', [])
name = context_nurbs.get(b'name', b'ObjNurb')
cstype = context_nurbs.get(b'cstype')
if cstype is None:
print('\tWarning, cstype not found')
return
if cstype != b'bspline':
print('\tWarning, cstype is not supported (only bspline)')
return
if not curv_idx:
print('\tWarning, curv argument empty or not set')
return
if len(deg) > 1 or parm_v:
print('\tWarning, surfaces not supported')
return
Campbell Barton
committed
cu = bpy.data.curves.new(name.decode('utf-8', "replace"), 'CURVE')
cu.dimensions = '3D'
nu = cu.splines.new('NURBS')
nu.points.add(len(curv_idx) - 1) # a point is added to start with
nu.points.foreach_set("co", [co_axis for vt_idx in curv_idx for co_axis in (vert_loc[vt_idx] + [1.0])])
nu.order_u = deg[0] + 1
# get for endpoint flag from the weighting
do_endpoints = True
do_endpoints = False
break
if abs(parm_u[-(i + 1)] - curv_range[1]) > 0.0001:
do_endpoints = False
break
else:
do_endpoints = False
if do_endpoints:
nu.use_endpoint_u = True
# close
'''
do_closed = False
if len(parm_u) > deg[0]+1:
for i in xrange(deg[0]+1):
#print curv_idx[i], curv_idx[-(i+1)]
if curv_idx[i]==curv_idx[-(i+1)]:
do_closed = True
break
if do_closed:
nu.use_cyclic_u = True
'''
Campbell Barton
committed
ob = bpy.data.objects.new(name.decode('utf-8', "replace"), cu)
new_objects.append(ob)
def strip_slash(line_split):
if line_split[-1][-1] == 92: # '\' char
if len(line_split[-1]) == 1:
line_split.pop() # remove the \ item
else:
line_split[-1] = line_split[-1][:-1] # remove the \ from the end last number
return True
return False
def get_float_func(filepath):
find the float function for this obj file
- whether to replace commas or not
file = open(filepath, 'rb')
line = line.lstrip()
if line.startswith(b'v'): # vn vt v
if b',' in line:
return lambda f: float(f.replace(b',', b'.'))
elif b'.' in line:
return float
return float
def any_number_as_int(svalue):
svalue = svalue.replace(b',', b'.')
return int(float(svalue))
def load(context,
filepath,
*,
use_smooth_groups=True,
use_edges=True,
use_split_objects=True,
use_split_groups=False,
use_groups_as_vgroups=False,
Campbell Barton
committed
relpath=None,
Called by the user interface or another script.
load_obj(path) - should give acceptable results.
This function passes the file and sends the data off
to be split into objects and then converted into mesh objects
Bastien Montagne
committed
def unique_name(existing_names, name_orig):
i = 0
if name_orig is None:
name_orig = b"ObjObject"
Bastien Montagne
committed
name = name_orig
while name in existing_names:
name = b"%s.%03d" % (name_orig, i)
i += 1
existing_names.add(name)
return name
def handle_vec(line_start, context_multi_line, line_split, tag, data, vec, vec_len):
ret_context_multi_line = tag if strip_slash(line_split) else b''
if line_start == tag:
vec[:] = [float_func(v) for v in line_split[1:]]
elif context_multi_line == tag:
vec += [float_func(v) for v in line_split]
if not ret_context_multi_line:
data.append(tuple(vec[:vec_len]))
return ret_context_multi_line
def create_face(context_material, context_smooth_group, context_object_key):
face_vert_loc_indices = []
face_vert_nor_indices = []
face_vert_tex_indices = []
return (
face_vert_loc_indices,
face_vert_nor_indices,
face_vert_tex_indices,
context_material,
context_smooth_group,
Bastien Montagne
committed
[], # If non-empty, that face is a Blender-invalid ngon (holes...), need a mutable object for that...
)
with ProgressReport(context.window_manager) as progress:
progress.enter_substeps(1, "Importing OBJ %r..." % filepath)
if global_matrix is None:
global_matrix = mathutils.Matrix()
if use_split_objects or use_split_groups:
use_groups_as_vgroups = False
verts_loc = []
verts_nor = []
verts_tex = []
faces = [] # tuples of the faces
material_libs = set() # filenames to material libs this OBJ uses
vertex_groups = {} # when use_groups_as_vgroups is true
# Get the string to float conversion func for this file- is 'float' for almost all files.
float_func = get_float_func(filepath)
# Context variables
context_material = None
context_smooth_group = None
context_object_key = None
context_object_obpart = None
Bastien Montagne
committed
objects_names = set()
# Nurbs
context_nurbs = {}
nurbs = []
context_parm = b'' # used by nurbs too but could be used elsewhere
# Until we can use sets
use_default_material = False
unique_materials = {}
unique_smooth_groups = {}
# unique_obects= {} - no use for this variable since the objects are stored in the face.
# when there are faces that end with \
# it means they are multiline-
# since we use xreadline we cant skip to the next line