Newer
Older
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
# Script copyright (C) Campbell Barton
# Contributors: Campbell Barton, Jiri Hnidek, Paolo Ciccone
"""
This script imports a Wavefront OBJ files to Blender.
Usage:
Run this script from "File->Import" menu and then load the desired OBJ file.
Note, This loads mesh objects and materials only, nurbs and curves are not supported.
http://wiki.blender.org/index.php/Scripts/Manual/Import/wavefront_obj
"""
import os
import time
import bpy
import mathutils
from bpy_extras.image_utils import load_image
from progress_report import ProgressReport, ProgressReportSubstep
def line_value(line_split):
Returns 1 string representing the value for this line
None will be returned if theres only 1 word
if length == 1:
return None
elif length == 2:
return line_split[1]
elif length > 2:
return b' '.join(line_split[1:])
Campbell Barton
committed
def obj_image_load(imagepath, DIR, recursive, relpath):
Mainly uses comprehensiveImageLoad
but tries to replace '_' with ' ' for Max's exporter replaces spaces with underscores.
if "_" in imagepath:
image = load_image(imagepath.replace("_", " "), DIR, recursive=recursive, relpath=relpath)
if image:
return image
Campbell Barton
committed
return load_image(imagepath, DIR, recursive=recursive, place_holder=True, relpath=relpath)
Campbell Barton
committed
def create_materials(filepath, relpath,
material_libs, unique_materials, unique_material_images,
use_image_search, float_func):
Create all the used materials in this obj,
assign colors and images to the materials from all referenced material libs
Campbell Barton
committed
context_material_vars = set()
def load_material_image(blender_material, context_material_name, img_data, type):
"""
Set textures defined in .mtl file.
"""
imagepath = os.fsdecode(img_data[-1])
map_options = {}
curr_token = []
for token in img_data[:-1]:
if token.startswith(b'-'):
if curr_token:
map_options[curr_token[0]] = curr_token[1:]
curr_token[:] = []
curr_token.append(token)
texture = bpy.data.textures.new(name=type, type='IMAGE')
# Absolute path - c:\.. etc would work here
Campbell Barton
committed
image = obj_image_load(imagepath, DIR, use_image_search, relpath)
texture.image = image
# Adds textures for materials (rendering)
if type == 'Kd':
Campbell Barton
committed
mtex = blender_material.texture_slots.add()
mtex.texture = texture
mtex.texture_coords = 'UV'
mtex.use_map_color_diffuse = True
# adds textures to faces (Textured/Alt-Z mode)
# Only apply the diffuse texture to the face if the image has not been set with the inline usemat func.
Campbell Barton
committed
unique_material_images[context_material_name] = image # set the texface image
elif type == 'Ka':
mtex = blender_material.texture_slots.add()
mtex.use_map_color_diffuse = False
mtex.texture = texture
mtex.texture_coords = 'UV'
mtex.use_map_ambient = True
elif type == 'Ks':
mtex = blender_material.texture_slots.add()
mtex.use_map_color_diffuse = False
mtex.texture = texture
mtex.texture_coords = 'UV'
mtex.use_map_color_spec = True
Bastien Montagne
committed
elif type == 'Ke':
mtex = blender_material.texture_slots.add()
mtex.use_map_color_diffuse = False
mtex.texture = texture
mtex.texture_coords = 'UV'
mtex.use_map_emit = True
elif type == 'Bump':
mtex = blender_material.texture_slots.add()
mtex.use_map_color_diffuse = False
mtex.texture = texture
mtex.texture_coords = 'UV'
mtex.use_map_normal = True
bump_mult = map_options.get(b'-bm')
if bump_mult:
mtex.normal_factor = bump_mult[0]
elif type == 'D':
mtex = blender_material.texture_slots.add()
mtex.use_map_color_diffuse = False
mtex.texture = texture
mtex.texture_coords = 'UV'
mtex.use_map_alpha = True
blender_material.use_transparency = True
blender_material.transparency_method = 'Z_TRANSPARENCY'
Campbell Barton
committed
if "alpha" not in context_material_vars:
blender_material.alpha = 0.0
# Todo, unset deffuse material alpha if it has an alpha channel
Campbell Barton
committed
elif type == 'disp':
mtex = blender_material.texture_slots.add()
mtex.use_map_color_diffuse = False
mtex.texture = texture
mtex.texture_coords = 'UV'
mtex.use_map_displacement = True
elif type == 'refl':
mtex = blender_material.texture_slots.add()
mtex.use_map_color_diffuse = False
mtex.texture = texture
mtex.texture_coords = 'REFLECTION'
mtex.use_map_color_diffuse = True
map_type = map_options.get(b'-type')
if map_type and map_type != [b'sphere']:
print("WARNING, unsupported reflection type '%s', defaulting to 'sphere'"
"" % ' '.join(i.decode() for i in map_type))
mtex.mapping = 'SPHERE'
raise Exception("invalid type %r" % type)
map_offset = map_options.get(b'-o')
map_scale = map_options.get(b'-s')
if map_offset:
mtex.offset.x = float(map_offset[0])
if len(map_offset) >= 2:
mtex.offset.y = float(map_offset[1])
if len(map_offset) >= 3:
mtex.offset.z = float(map_offset[2])
if map_scale:
mtex.scale.x = float(map_scale[0])
if len(map_scale) >= 2:
mtex.scale.y = float(map_scale[1])
if len(map_scale) >= 3:
mtex.scale.z = float(map_scale[2])
# Add an MTL with the same name as the obj if no MTLs are spesified.
temp_mtl = os.path.splitext((os.path.basename(filepath)))[0] + ".mtl"
if os.path.exists(os.path.join(DIR, temp_mtl)):
material_libs.add(temp_mtl)
del temp_mtl
Campbell Barton
committed
unique_materials[name] = bpy.data.materials.new(name.decode('utf-8', "replace"))
unique_material_images[name] = None # assign None to all material images to start with, add to later.
# XXX Why was this needed? Cannot find any good reason, and adds stupid empty matslot in case we do not separate
# mesh (see T44947).
#~ unique_materials[None] = None
#~ unique_material_images[None] = None
for libname in sorted(material_libs):
if not os.path.exists(mtlpath):
print("\tMaterial not found MTL: %r" % mtlpath)
else:
do_ambient = True
do_highlight = False
do_reflection = False
do_transparency = False
do_glass = False
do_fresnel = False
do_raytrace = False
Bastien Montagne
committed
emit_colors = [0.0, 0.0, 0.0]
# print('\t\tloading mtl: %e' % mtlpath)
mtl = open(mtlpath, 'rb')
Campbell Barton
committed
line = line.strip()
if not line or line.startswith(b'#'):
continue
line_split = line.split()
line_id = line_split[0].lower()
if line_id == b'newmtl':
Bastien Montagne
committed
# Finalize previous mat, if any.
Bastien Montagne
committed
emit_value = sum(emit_colors) / 3.0
if emit_value > 1e-6:
# We have to adapt it to diffuse color too...
emit_value /= sum(context_material.diffuse_color) / 3.0
context_material.emit = emit_value
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
if not do_ambient:
context_material.ambient = 0.0
if do_highlight:
# FIXME, how else to use this?
context_material.specular_intensity = 1.0
if do_reflection:
context_material.raytrace_mirror.use = True
context_material.raytrace_mirror.reflect_factor = 1.0
if do_transparency:
context_material.use_transparency = True
context_material.transparency_method = 'RAYTRACE' if do_raytrace else 'Z_TRANSPARENCY'
if "alpha" not in context_material_vars:
context_material.alpha = 0.0
if do_glass:
if "ior" not in context_material_vars:
context_material.raytrace_transparency.ior = 1.5
if do_fresnel:
context_material.raytrace_mirror.fresnel = 1.0 # could be any value for 'ON'
"""
if do_raytrace:
context_material.use_raytrace = True
else:
context_material.use_raytrace = False
"""
# XXX, this is not following the OBJ spec, but this was
# written when raytracing wasnt default, annoying to disable for blender users.
context_material.use_raytrace = True
context_material_name = line_value(line_split)
context_material = unique_materials.get(context_material_name)
Campbell Barton
committed
context_material_vars.clear()
Bastien Montagne
committed
emit_colors[:] = [0.0, 0.0, 0.0]
do_ambient = True
do_highlight = False
do_reflection = False
do_transparency = False
do_glass = False
do_fresnel = False
do_raytrace = False
elif context_material:
# we need to make a material to assign properties to it.
if line_id == b'ka':
context_material.mirror_color = (
float_func(line_split[1]), float_func(line_split[2]), float_func(line_split[3]))
# This is highly approximated, but let's try to stick as close from exporter as possible... :/
context_material.ambient = sum(context_material.mirror_color) / 3
elif line_id == b'kd':
context_material.diffuse_color = (
float_func(line_split[1]), float_func(line_split[2]), float_func(line_split[3]))
context_material.diffuse_intensity = 1.0
elif line_id == b'ks':
context_material.specular_color = (
float_func(line_split[1]), float_func(line_split[2]), float_func(line_split[3]))
context_material.specular_intensity = 1.0
Bastien Montagne
committed
elif line_id == b'ke':
# We cannot set context_material.emit right now, we need final diffuse color as well for this.
emit_colors[:] = [
float_func(line_split[1]), float_func(line_split[2]), float_func(line_split[3])]
elif line_id == b'ns':
context_material.specular_hardness = int((float_func(line_split[1]) * 0.51) + 1)
elif line_id == b'ni': # Refraction index (between 1 and 3).
context_material.raytrace_transparency.ior = max(1, min(float_func(line_split[1]), 3))
Campbell Barton
committed
context_material_vars.add("ior")
elif line_id == b'd': # dissolve (transparency)
Campbell Barton
committed
context_material.alpha = float_func(line_split[1])
context_material.use_transparency = True
context_material.transparency_method = 'Z_TRANSPARENCY'
Campbell Barton
committed
context_material_vars.add("alpha")
elif line_id == b'tr': # translucency
context_material.translucency = float_func(line_split[1])
elif line_id == b'tf':
Campbell Barton
committed
# rgb, filter color, blender has no support for this.
pass
elif line_id == b'illum':
Campbell Barton
committed
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
illum = int(line_split[1])
# inline comments are from the spec, v4.2
if illum == 0:
# Color on and Ambient off
do_ambient = False
elif illum == 1:
# Color on and Ambient on
pass
elif illum == 2:
# Highlight on
do_highlight = True
elif illum == 3:
# Reflection on and Ray trace on
do_reflection = True
do_raytrace = True
elif illum == 4:
# Transparency: Glass on
# Reflection: Ray trace on
do_transparency = True
do_reflection = True
do_glass = True
do_raytrace = True
elif illum == 5:
# Reflection: Fresnel on and Ray trace on
do_reflection = True
do_fresnel = True
do_raytrace = True
elif illum == 6:
# Transparency: Refraction on
# Reflection: Fresnel off and Ray trace on
do_transparency = True
do_reflection = True
do_raytrace = True
elif illum == 7:
# Transparency: Refraction on
# Reflection: Fresnel on and Ray trace on
do_transparency = True
do_reflection = True
do_fresnel = True
do_raytrace = True
elif illum == 8:
# Reflection on and Ray trace off
do_reflection = True
elif illum == 9:
# Transparency: Glass on
# Reflection: Ray trace off
do_transparency = True
do_reflection = True
do_glass = True
elif illum == 10:
# Casts shadows onto invisible surfaces
Campbell Barton
committed
pass
elif line_id == b'map_ka':
img_data = line.split()[1:]
if img_data:
load_material_image(context_material, context_material_name, img_data, 'Ka')
elif line_id == b'map_ks':
img_data = line.split()[1:]
if img_data:
load_material_image(context_material, context_material_name, img_data, 'Ks')
elif line_id == b'map_kd':
img_data = line.split()[1:]
if img_data:
load_material_image(context_material, context_material_name, img_data, 'Kd')
Bastien Montagne
committed
elif line_id == b'map_ke':
img_data = line.split()[1:]
if img_data:
load_material_image(context_material, context_material_name, img_data, 'Ke')
elif line_id in {b'map_bump', b'bump'}: # 'bump' is incorrect but some files use it.
img_data = line.split()[1:]
if img_data:
load_material_image(context_material, context_material_name, img_data, 'Bump')
elif line_id in {b'map_d', b'map_tr'}: # Alpha map - Dissolve
img_data = line.split()[1:]
if img_data:
load_material_image(context_material, context_material_name, img_data, 'D')
elif line_id in {b'map_disp', b'disp'}: # displacementmap
img_data = line.split()[1:]
if img_data:
load_material_image(context_material, context_material_name, img_data, 'disp')
Campbell Barton
committed
elif line_id in {b'map_refl', b'refl'}: # reflectionmap
img_data = line.split()[1:]
if img_data:
load_material_image(context_material, context_material_name, img_data, 'refl')
Campbell Barton
committed
else:
print("\t%r:%r (ignored)" % (filepath, line))
mtl.close()
def split_mesh(verts_loc, faces, unique_materials, filepath, SPLIT_OB_OR_GROUP):
Takes vert_loc and faces, and separates into multiple sets of
(verts_loc, faces, unique_materials, dataname)
filename = os.path.splitext((os.path.basename(filepath)))[0]
Bastien Montagne
committed
if not SPLIT_OB_OR_GROUP or not faces:
use_verts_nor = any((False if f[1] is ... else True) for f in faces)
use_verts_tex = any((False if f[2] is ... else True) for f in faces)
Bastien Montagne
committed
# use the filename for the object name since we aren't chopping up the mesh.
return [(verts_loc, faces, unique_materials, filename, use_verts_nor, use_verts_tex)]
def key_to_name(key):
# if the key is a tuple, join it to make a string
if not key:
return filename # assume its a string. make sure this is true if the splitting code is changed
else:
return key.decode('utf-8', 'replace')
# Return a key that makes the faces unique.
oldkey = -1 # initialize to a value that will never match the key
for face in faces:
if oldkey != key:
# Check the key has changed.
Bastien Montagne
committed
(verts_split, faces_split, unique_materials_split, vert_remap,
use_verts_nor, use_verts_tex) = face_split_dict.setdefault(key, ([], [], {}, {}, [], []))
if not use_verts_nor and face[1] is not ...:
Bastien Montagne
committed
use_verts_nor.append(True)
if not use_verts_tex and face[2] is not ...:
Bastien Montagne
committed
use_verts_tex.append(True)
# Remap verts to new vert list and add where needed
for enum, i in enumerate(face_vert_loc_indices):
map_index = vert_remap.get(i)
if map_index is None:
map_index = len(verts_split)
vert_remap[i] = map_index # set the new remapped index so we only add once and can reference next time.
verts_split.append(verts_loc[i]) # add the vert to the local verts
face_vert_loc_indices[enum] = map_index # remap to the local index
if matname and matname not in unique_materials_split:
unique_materials_split[matname] = unique_materials[matname]
faces_split.append(face)
# remove one of the items and reorder
Bastien Montagne
committed
return [(verts_split, faces_split, unique_materials_split, key_to_name(key), bool(use_vnor), bool(use_vtex))
for key, (verts_split, faces_split, unique_materials_split, _, use_vnor, use_vtex)
in face_split_dict.items()]
def create_mesh(new_objects,
verts_tex,
faces,
unique_materials,
unique_material_images,
unique_smooth_groups,
vertex_groups,
dataname,
):
Takes all the data gathered and generates a mesh, adding the new object to new_objects
deals with ngons, sharp edges and assigning materials
if unique_smooth_groups:
sharp_edges = set()
smooth_group_users = {context_smooth_group: {} for context_smooth_group in unique_smooth_groups.keys()}
fgon_edges = set() # Used for storing fgon keys when we need to tesselate/untesselate them (ngons with hole).
edges = []
tot_loops = 0
# reverse loop through face indices
face_vert_nor_indices,
face_vert_tex_indices,
context_material,
context_smooth_group,
context_object,
Bastien Montagne
committed
face_invalid_blenpoly,
len_face_vert_loc_indices = len(face_vert_loc_indices)
if len_face_vert_loc_indices == 1:
faces.pop(f_idx) # cant add single vert faces
Bastien Montagne
committed
# Face with a single item in face_vert_nor_indices is actually a polyline!
elif len(face_vert_nor_indices) == 1 or len_face_vert_loc_indices == 2:
edges.extend((face_vert_loc_indices[i], face_vert_loc_indices[i + 1])
for i in range(len_face_vert_loc_indices - 1))
faces.pop(f_idx)
# Smooth Group
if unique_smooth_groups and context_smooth_group:
# Is a part of of a smooth group and is a face
if context_smooth_group_old is not context_smooth_group:
edge_dict = smooth_group_users[context_smooth_group]
context_smooth_group_old = context_smooth_group
Bastien Montagne
committed
prev_vidx = face_vert_loc_indices[-1]
for vidx in face_vert_loc_indices:
edge_key = (prev_vidx, vidx) if (prev_vidx < vidx) else (vidx, prev_vidx)
prev_vidx = vidx
edge_dict[edge_key] = edge_dict.get(edge_key, 0) + 1
Bastien Montagne
committed
# NGons into triangles
if face_invalid_blenpoly:
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
# ignore triangles with invalid indices
if len(face_vert_loc_indices) > 3:
from bpy_extras.mesh_utils import ngon_tessellate
ngon_face_indices = ngon_tessellate(verts_loc, face_vert_loc_indices)
faces.extend([([face_vert_loc_indices[ngon[0]],
face_vert_loc_indices[ngon[1]],
face_vert_loc_indices[ngon[2]],
],
[face_vert_nor_indices[ngon[0]],
face_vert_nor_indices[ngon[1]],
face_vert_nor_indices[ngon[2]],
] if face_vert_nor_indices else [],
[face_vert_tex_indices[ngon[0]],
face_vert_tex_indices[ngon[1]],
face_vert_tex_indices[ngon[2]],
] if face_vert_tex_indices else [],
context_material,
context_smooth_group,
context_object,
[],
)
for ngon in ngon_face_indices]
)
tot_loops += 3 * len(ngon_face_indices)
# edges to make ngons
if len(ngon_face_indices) > 1:
edge_users = set()
for ngon in ngon_face_indices:
prev_vidx = face_vert_loc_indices[ngon[-1]]
for ngidx in ngon:
vidx = face_vert_loc_indices[ngidx]
if vidx == prev_vidx:
continue # broken OBJ... Just skip.
edge_key = (prev_vidx, vidx) if (prev_vidx < vidx) else (vidx, prev_vidx)
prev_vidx = vidx
if edge_key in edge_users:
fgon_edges.add(edge_key)
else:
edge_users.add(edge_key)
Bastien Montagne
committed
faces.pop(f_idx)
else:
tot_loops += len_face_vert_loc_indices
# Build sharp edges
if unique_smooth_groups:
for edge_dict in smooth_group_users.values():
for key, users in edge_dict.items():
if users == 1: # This edge is on the boundry of a group
sharp_edges.add(key)
# map the material names to an index
material_mapping = {name: i for i, name in enumerate(unique_materials)} # enumerate over unique_materials keys()
for name, index in material_mapping.items():
me = bpy.data.meshes.new(dataname)
# make sure the list isnt too big
for material in materials:
me.materials.append(material)
me.vertices.add(len(verts_loc))
me.loops.add(tot_loops)
me.polygons.add(len(faces))
# verts_loc is a list of (x, y, z) tuples
me.vertices.foreach_set("co", unpack_list(verts_loc))
loops_vert_idx = []
faces_loop_start = []
faces_loop_total = []
lidx = 0
for f in faces:
vidx = f[0]
nbr_vidx = len(vidx)
loops_vert_idx.extend(vidx)
faces_loop_start.append(lidx)
faces_loop_total.append(nbr_vidx)
lidx += nbr_vidx
me.loops.foreach_set("vertex_index", loops_vert_idx)
me.polygons.foreach_set("loop_start", faces_loop_start)
me.polygons.foreach_set("loop_total", faces_loop_total)
# Note: we store 'temp' normals in loops, since validate() may alter final mesh,
# we can only set custom lnors *after* calling it.
me.create_normals_split()
if verts_tex and me.polygons:
me.uv_textures.new()
context_material_old = -1 # avoid a dict lookup
mat = 0 # rare case it may be un-initialized.
for i, (face, blen_poly) in enumerate(zip(faces, me.polygons)):
if len(face[0]) < 3:
raise Exception("bad face") # Shall not happen, we got rid of those earlier!
(face_vert_loc_indices,
face_vert_nor_indices,
face_vert_tex_indices,
context_material,
context_smooth_group,
context_object,
Bastien Montagne
committed
face_invalid_blenpoly,
if context_smooth_group:
blen_poly.use_smooth = True
if context_material:
if context_material_old is not context_material:
mat = material_mapping[context_material]
context_material_old = context_material
blen_poly.material_index = mat
Campbell Barton
committed
Bastien Montagne
committed
if verts_nor and face_vert_nor_indices:
for face_noidx, lidx in zip(face_vert_nor_indices, blen_poly.loop_indices):
me.loops[lidx].normal[:] = verts_nor[0 if (face_noidx is ...) else face_noidx]
Bastien Montagne
committed
if verts_tex and face_vert_tex_indices:
Campbell Barton
committed
if context_material:
image = unique_material_images[context_material]
if image: # Can be none if the material dosnt have an image.
me.uv_textures[0].data[i].image = image
Campbell Barton
committed
blen_uvs = me.uv_layers[0]
for face_uvidx, lidx in zip(face_vert_tex_indices, blen_poly.loop_indices):
blen_uvs.data[lidx].uv = verts_tex[0 if (face_uvidx is ...) else face_uvidx]
use_edges = use_edges and bool(edges)
me.edges.add(len(edges))
# edges should be a list of (a, b) tuples
me.edges.foreach_set("vertices", unpack_list(edges))
me.validate(clean_customdata=False) # *Very* important to not remove lnors here!
Philipp Oeser
committed
me.update(calc_edges=use_edges)
Bastien Montagne
committed
# Un-tessellate as much as possible, in case we had to triangulate some ngons...
if fgon_edges:
import bmesh
bm = bmesh.new()
bm.from_mesh(me)
verts = bm.verts[:]
get = bm.edges.get
edges = [get((verts[vidx1], verts[vidx2])) for vidx1, vidx2 in fgon_edges]
try:
bmesh.ops.dissolve_edges(bm, edges=edges, use_verts=False)
except:
# Possible dissolve fails for some edges, but don't fail silently in case this is a real bug.
import traceback
traceback.print_exc()
bm.to_mesh(me)
bm.free()
# XXX If validate changes the geometry, this is likely to be broken...
Philipp Oeser
committed
if unique_smooth_groups and sharp_edges:
for e in me.edges:
if e.key in sharp_edges:
e.use_edge_sharp = True
me.show_edge_sharp = True
if verts_nor:
clnors = array.array('f', [0.0] * (len(me.loops) * 3))
me.loops.foreach_get("normal", clnors)
if not unique_smooth_groups:
me.polygons.foreach_set("use_smooth", [True] * len(me.polygons))
me.normals_split_custom_set(tuple(zip(*(iter(clnors),) * 3)))
me.use_auto_smooth = True
me.show_edge_sharp = True
ob = bpy.data.objects.new(me.name, me)
new_objects.append(ob)
# Create the vertex groups. No need to have the flag passed here since we test for the
# content of the vertex_groups. If the user selects to NOT have vertex groups saved then
# the following test will never run
for group_name, group_indices in vertex_groups.items():
Campbell Barton
committed
group = ob.vertex_groups.new(group_name.decode('utf-8', "replace"))
group.add(group_indices, 1.0, 'REPLACE')
def create_nurbs(context_nurbs, vert_loc, new_objects):
Add nurbs object to blender, only support one type at the moment
deg = context_nurbs.get(b'deg', (3,))
curv_range = context_nurbs.get(b'curv_range')
curv_idx = context_nurbs.get(b'curv_idx', [])
parm_u = context_nurbs.get(b'parm_u', [])
parm_v = context_nurbs.get(b'parm_v', [])
name = context_nurbs.get(b'name', b'ObjNurb')
cstype = context_nurbs.get(b'cstype')
if cstype is None:
print('\tWarning, cstype not found')
return
if cstype != b'bspline':
print('\tWarning, cstype is not supported (only bspline)')
return
if not curv_idx:
print('\tWarning, curv argument empty or not set')
return
if len(deg) > 1 or parm_v:
print('\tWarning, surfaces not supported')
return
Campbell Barton
committed
cu = bpy.data.curves.new(name.decode('utf-8', "replace"), 'CURVE')
cu.dimensions = '3D'
nu = cu.splines.new('NURBS')
nu.points.add(len(curv_idx) - 1) # a point is added to start with
nu.points.foreach_set("co", [co_axis for vt_idx in curv_idx for co_axis in (vert_loc[vt_idx] + (1.0,))])
nu.order_u = deg[0] + 1
# get for endpoint flag from the weighting
do_endpoints = True
do_endpoints = False
break
if abs(parm_u[-(i + 1)] - curv_range[1]) > 0.0001:
do_endpoints = False
break
else:
do_endpoints = False
if do_endpoints:
nu.use_endpoint_u = True
# close
'''
do_closed = False
if len(parm_u) > deg[0]+1:
for i in xrange(deg[0]+1):
#print curv_idx[i], curv_idx[-(i+1)]
if curv_idx[i]==curv_idx[-(i+1)]:
do_closed = True
break
if do_closed:
nu.use_cyclic_u = True
'''
Campbell Barton
committed
ob = bpy.data.objects.new(name.decode('utf-8', "replace"), cu)
new_objects.append(ob)
def strip_slash(line_split):
if line_split[-1][-1] == 92: # '\' char
if len(line_split[-1]) == 1:
line_split.pop() # remove the \ item
else:
line_split[-1] = line_split[-1][:-1] # remove the \ from the end last number
return True
return False
def get_float_func(filepath):
find the float function for this obj file
- whether to replace commas or not
file = open(filepath, 'rb')
line = line.lstrip()
if line.startswith(b'v'): # vn vt v
if b',' in line:
return lambda f: float(f.replace(b',', b'.'))
elif b'.' in line:
return float
return float
def load(context,
filepath,
*,
global_clamp_size=0.0,
use_smooth_groups=True,
use_edges=True,
use_split_objects=True,
use_split_groups=True,
use_image_search=True,
use_groups_as_vgroups=False,
Campbell Barton
committed
relpath=None,
Called by the user interface or another script.
load_obj(path) - should give acceptable results.
This function passes the file and sends the data off
to be split into objects and then converted into mesh objects
def handle_vec(line_start, context_multi_line, line_split, tag, data, vec, vec_len):
ret_context_multi_line = tag if strip_slash(line_split) else b''
if line_start == tag:
vec[:] = [float_func(v) for v in line_split[1:]]
elif context_multi_line == tag:
vec += [float_func(v) for v in line_split]
if not ret_context_multi_line:
data.append(tuple(vec[:vec_len]))
return ret_context_multi_line
def create_face(context_material, context_smooth_group, context_object):
face_vert_loc_indices = []
face_vert_nor_indices = []
face_vert_tex_indices = []
return (
face_vert_loc_indices,
face_vert_nor_indices,
face_vert_tex_indices,
context_material,
context_smooth_group,
context_object,
Bastien Montagne
committed
[], # If non-empty, that face is a Blender-invalid ngon (holes...), need a mutable object for that...
)
with ProgressReport(context.window_manager) as progress:
progress.enter_substeps(1, "Importing OBJ %r..." % filepath)
if global_matrix is None:
global_matrix = mathutils.Matrix()
if use_split_objects or use_split_groups:
use_groups_as_vgroups = False
time_main = time.time()
verts_loc = []
verts_nor = []
verts_tex = []
faces = [] # tuples of the faces
material_libs = set() # filenames to material libs this OBJ uses
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
vertex_groups = {} # when use_groups_as_vgroups is true
# Get the string to float conversion func for this file- is 'float' for almost all files.
float_func = get_float_func(filepath)
# Context variables
context_material = None
context_smooth_group = None
context_object = None
context_vgroup = None
# Nurbs
context_nurbs = {}
nurbs = []
context_parm = b'' # used by nurbs too but could be used elsewhere
# Until we can use sets
unique_materials = {}
unique_material_images = {}
unique_smooth_groups = {}
# unique_obects= {} - no use for this variable since the objects are stored in the face.
# when there are faces that end with \
# it means they are multiline-
# since we use xreadline we cant skip to the next line
# so we need to know whether
context_multi_line = b''
# Per-face handling data.
face_vert_loc_indices = None
face_vert_nor_indices = None
face_vert_tex_indices = None
face_vert_nor_valid = face_vert_tex_valid = False
face_items_usage = set()
face_invalid_blenpoly = None
prev_vidx = None
face = None
vec = []
progress.enter_substeps(3, "Parsing OBJ file...")
with open(filepath, 'rb') as f:
for line in f: # .readlines():
line_split = line.split()
if not line_split:
continue
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
line_start = line_split[0] # we compare with this a _lot_
if line_start == b'v' or context_multi_line == b'v':
context_multi_line = handle_vec(line_start, context_multi_line, line_split, b'v', verts_loc, vec, 3)
elif line_start == b'vn' or context_multi_line == b'vn':
context_multi_line = handle_vec(line_start, context_multi_line, line_split, b'vn', verts_nor, vec, 3)
elif line_start == b'vt' or context_multi_line == b'vt':
context_multi_line = handle_vec(line_start, context_multi_line, line_split, b'vt', verts_tex, vec, 2)
# Handle faces lines (as faces) and the second+ lines of fa multiline face here
# use 'f' not 'f ' because some objs (very rare have 'fo ' for faces)
elif line_start == b'f' or context_multi_line == b'f':
if not context_multi_line:
line_split = line_split[1:]
# Instantiate a face
face = create_face(context_material, context_smooth_group, context_object)
(face_vert_loc_indices, face_vert_nor_indices, face_vert_tex_indices,
_1, _2, _3, face_invalid_blenpoly) = face
faces.append(face)
face_items_usage.clear()
# Else, use face_vert_loc_indices and face_vert_tex_indices previously defined and used the obj_face
context_multi_line = b'f' if strip_slash(line_split) else b''
for v in line_split:
obj_vert = v.split(b'/')
idx = int(obj_vert[0]) - 1