Newer
Older
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# Project Name: MakeHuman
# Product Home Page: http://www.makehuman.org/
# Code Home Page: http://code.google.com/p/makehuman/
# Authors: Thomas Larsson
# Script copyright (C) MakeHuman Team 2001-2013
# Coding Standards: See http://www.makehuman.org/node/165
Thomas Larsson
committed
MHX (MakeHuman eXchange format) importer for Blender 2.6x.
This script should be distributed with Blender.
If not, place it in the .blender/scripts/addons dir
Activate the script in the "Addons" tab (user preferences).
Access from the File > Import menu.
Alternatively, run the script in the script editor (Alt-P), and access from the File > Import menu
'name': 'Import: MakeHuman (.mhx)',
'author': 'Thomas Larsson',
'version': (1, 16, 1),
Thomas Larsson
committed
"blender": (2, 67, 1),
'location': "File > Import > MakeHuman (.mhx)",
'description': 'Import files in the MakeHuman eXchange format (.mhx)',
'warning': '',
'wiki_url': 'http://www.makehuman.org/documentation',
'tracker_url': 'https://projects.blender.org/tracker/index.php?'\
MAJOR_VERSION = 1
Thomas Larsson
committed
MINOR_VERSION = 16
Thomas Larsson
committed
FROM_VERSION = 13
SUB_VERSION = 1
Thomas Larsson
committed
majorVersion = MAJOR_VERSION
minorVersion = MINOR_VERSION
#
#
#
import bpy
import os
import time
Thomas Larsson
committed
import math
Thomas Larsson
committed
from mathutils import Vector, Matrix
from bpy.props import *
MHX249 = False
Blender24 = False
Blender25 = True
theDir = "~/makehuman/exports"
One = 1.0/theScale
useMesh = 1
verbosity = 2
warnedTextureDir = False
warnedVersion = False
true = True
false = False
Epsilon = 1e-6
nErrors = 0
theTempDatum = None
T_EnforceVersion = 0x01
T_Clothes = 0x02
T_HardParents = 0x0
Thomas Larsson
committed
T_CrashSafe = 0x0
T_Diamond = 0x10
T_Shapekeys = 0x40
T_ShapeDrivers = 0x80
T_Face = T_Shapekeys
T_Shape = T_Shapekeys
T_Mesh = 0x100
T_Armature = 0x200
T_Proxy = 0x400
T_Cage = 0x800
T_Opcns = 0x2000
Thomas Larsson
committed
DefaultToggle = ( T_EnforceVersion + T_Mesh + T_Armature +
T_Shapekeys + T_ShapeDrivers + T_Proxy + T_Clothes + T_Rigify )
Thomas Larsson
committed
toggle = DefaultToggle
toggleSettings = toggle
def initLoadedData():
global loadedData
loadedData = {
'NONE' : {},
'Object' : {},
'Mesh' : {},
'Armature' : {},
'Lamp' : {},
'Camera' : {},
'Lattice' : {},
'Curve' : {},
'Text' : {},
'Material' : {},
'Image' : {},
'MaterialTextureSlot' : {},
'Texture' : {},
Thomas Larsson
committed
'Bone' : {},
'BoneGroup' : {},
'Rigify' : {},
'Action' : {},
'Group' : {},
'MeshTextureFaceLayer' : {},
'MeshColorLayer' : {},
'VertexGroup' : {},
'ShapeKey' : {},
'ParticleSystem' : {},
'ObjectConstraints' : {},
'ObjectModifiers' : {},
'MaterialSlot' : {},
}
return
Thomas Larsson
committed
def reinitGlobalData():
global loadedData
for key in [
'MeshTextureFaceLayer', 'MeshColorLayer', 'VertexGroup', 'ShapeKey',
'ParticleSystem', 'ObjectConstraints', 'ObjectModifiers', 'MaterialSlot']:
loadedData[key] = {}
return
'Object' : 'objects',
'Mesh' : 'meshes',
'Lattice' : 'lattices',
'Curve' : 'curves',
'Text' : 'texts',
'Group' : 'groups',
'Empty' : 'empties',
'Armature' : 'armatures',
'Bone' : 'bones',
'BoneGroup' : 'bone_groups',
'Pose' : 'poses',
'PoseBone' : 'pose_bones',
'Material' : 'materials',
'Texture' : 'textures',
'Image' : 'images',
'Camera' : 'cameras',
'Lamp' : 'lamps',
'World' : 'worlds',
# readMhxFile(filePath):
def readMhxFile(filePath):
Thomas Larsson
committed
global todo, nErrors, theScale, theArmature, defaultScale, One
global toggle, warnedVersion, theMessage, alpha7, theDir
defaultScale = theScale
Thomas Larsson
committed
alpha7 = False
warnedVersion = False
initLoadedData()
theMessage = ""
theDir = os.path.dirname(filePath)
_,ext = os.path.splitext(fileName)
print("Error: Not a mhx file: %s" % fileName.encode('utf-8', 'strict'))
print( "Opening MHX file %s " % fileName.encode('utf-8', 'strict') )
print("Toggle %x" % toggle)
stack = []
tokens = []
key = "toplevel"
level = 0
nErrors = 0
comment = 0
nesting = 0
file= open(fileName, "rU")
print( "Tokenizing" )
lineNo = 0
Thomas Larsson
committed
for line in file:
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
# print(line)
lineSplit= line.split()
lineNo += 1
if len(lineSplit) == 0:
pass
elif lineSplit[0][0] == '#':
if lineSplit[0] == '#if':
if comment == nesting:
try:
res = eval(lineSplit[1])
except:
res = False
if res:
comment += 1
nesting += 1
elif lineSplit[0] == '#else':
if comment == nesting-1:
comment += 1
elif comment == nesting:
comment -= 1
elif lineSplit[0] == '#endif':
if comment == nesting:
comment -= 1
nesting -= 1
elif comment < nesting:
pass
elif lineSplit[0] == 'end':
try:
sub = tokens
tokens = stack.pop()
if tokens:
tokens[-1][2] = sub
level -= 1
except:
print( "Tokenizer error at or before line %d.\nThe mhx file has been corrupted.\nTry to export it again from MakeHuman." % lineNo )
elif lineSplit[-1] == ';':
if lineSplit[0] == '\\':
key = lineSplit[1]
tokens.append([key,lineSplit[2:-1],[]])
else:
key = lineSplit[0]
tokens.append([key,lineSplit[1:-1],[]])
else:
key = lineSplit[0]
tokens.append([key,lineSplit[1:],[]])
stack.append(tokens)
level += 1
tokens = []
file.close()
if level != 0:
Thomas Larsson
committed
MyError("Tokenizer error (%d).\nThe mhx file has been corrupted.\nTry to export it again from MakeHuman." % level)
Thomas Larsson
committed
for (expr, glbals, lcals) in todo:
try:
print("Doing %s" % expr)
exec(expr, glbals, lcals)
except:
Thomas Larsson
committed
scn.objects.active = theArmature
theArmature.MhAlpha8 = not alpha7
#bpy.ops.wm.properties_edit(data_path="object", property="MhxRig", value=theArmature["MhxRig"])
Thomas Larsson
committed
time2 = time.clock()
msg = "File %s loaded in %g s" % (fileName, time2-time1)
if nErrors:
msg += " but there where %d errors. " % (nErrors)
print(msg)
return
#
# getObject(name, var, glbals, lcals):
try:
ob = loadedData['Object'][name]
except:
if name != "None":
pushOnTodoList(None, "ob = loadedData['Object'][name]" % globals(), locals())
ob = None
return ob
#
def checkMhxVersion(major, minor):
print("MHX", (major,minor), (MAJOR_VERSION, MINOR_VERSION), warnedVersion)
Thomas Larsson
committed
if major != MAJOR_VERSION or minor < FROM_VERSION:
"Wrong MHX version\n" +
Thomas Larsson
committed
"Expected MHX %d.%02d but the loaded file " % (MAJOR_VERSION, MINOR_VERSION) +
"has version MHX %d.%02d\n" % (major, minor))
Thomas Larsson
committed
if minor < FROM_VERSION:
msg += (
"You can disable this error message by deselecting the \n" +
"Enforce version option when importing. Better:\n" +
"Export the MHX file again with an updated version of MakeHuman.\n" +
"The most up-to-date version of MakeHuman is the nightly build.\n")
else:
msg += (
"Download the most recent Blender build from www.graphicall.org. \n" +
"The most up-to-date version of the import script is distributed\n" +
"with Blender. It can also be downloaded from MakeHuman. \n" +
"It is located in the importers/mhx/blender25x \n" +
"folder and is called import_scene_mhx.py. \n")
if (toggle & T_EnforceVersion or minor > MINOR_VERSION):
else:
print(msg)
warnedVersion = True
return
#
ifResult = False
def parse(tokens):
Thomas Larsson
committed
global majorVersion, minorVersion
for (key, val, sub) in tokens:
Thomas Larsson
committed
majorVersion = int(val[0])
minorVersion = int(val[1])
checkMhxVersion(majorVersion, minorVersion)
elif key == 'MHX249':
MHX249 = eval(val[0])
print("Blender 2.49 compatibility mode is %s\n" % MHX249)
elif MHX249:
pass
elif key == 'print':
msg = concatList(val)
print(msg)
elif key == 'warn':
msg = concatList(val)
print(msg)
elif key == 'error':
msg = concatList(val)
Thomas Larsson
committed
MyError(msg)
elif key == 'NoScale':
if eval(val[0]):
theScale = 1.0
else:
Thomas Larsson
committed
theScale = defaultScale
One = 1.0/theScale
elif key == "Object":
parseObject(val, sub)
elif key == "Mesh":
reinitGlobalData()
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
data = parseMesh(val, sub)
elif key == "Armature":
data = parseArmature(val, sub)
elif key == "Pose":
data = parsePose(val, sub)
elif key == "Action":
data = parseAction(val, sub)
elif key == "Material":
data = parseMaterial(val, sub)
elif key == "Texture":
data = parseTexture(val, sub)
elif key == "Image":
data = parseImage(val, sub)
elif key == "Curve":
data = parseCurve(val, sub)
elif key == "TextCurve":
data = parseTextCurve(val, sub)
elif key == "Lattice":
data = parseLattice(val, sub)
elif key == "Group":
data = parseGroup(val, sub)
elif key == "Lamp":
data = parseLamp(val, sub)
elif key == "World":
data = parseWorld(val, sub)
elif key == "Scene":
data = parseScene(val, sub)
elif key == "DefineProperty":
parseDefineProperty(val, sub)
elif key == "PostProcess":
postProcess(val)
hideLayers(val)
elif key == "CorrectRig":
correctRig(val)
elif key == "Rigify":
if toggle & T_Rigify:
Thomas Larsson
committed
rigifyMhx(bpy.context)
elif key == 'AnimationData':
try:
ob = loadedData['Object'][val[0]]
except:
ob = None
if ob:
bpy.context.scene.objects.active = ob
parseAnimationData(ob, val, sub)
elif key == 'MaterialAnimationData':
try:
ob = loadedData['Object'][val[0]]
except:
ob = None
if ob:
bpy.context.scene.objects.active = ob
mat = ob.data.materials[int(val[2])]
parseAnimationData(mat, val, sub)
elif key == 'ShapeKeys':
try:
ob = loadedData['Object'][val[0]]
except:
MyError("ShapeKeys object %s does not exist" % val[0])
Thomas Larsson
committed
parseShapeKeys(ob, ob.data, val, sub)
Thomas Larsson
committed
data = parseDefaultType(key, val, sub)
name = args[0]
data = None
expr = "bpy.data.%s.new('%s')" % (Plural[typ], name)
data = eval(expr)
bpyType = typ.capitalize()
loadedData[bpyType][name] = data
if data is None:
for (key, val, sub) in tokens:
defaultKey(key, val, sub, 'data', [], globals(), locals())
return data
Thomas Larsson
committed
string = ""
for elt in elts:
string += " %s" % elt
return string
# parseAction(args, tokens):
# parseFCurve(fcu, args, tokens):
# parseKeyFramePoint(pt, args, tokens):
name = args[0]
if invalid(args[1]):
return
ob = bpy.context.object
bpy.ops.object.mode_set(mode='POSE')
if ob.animation_data:
ob.animation_data.action = None
created = {}
for (key, val, sub) in tokens:
if key == 'FCurve':
prepareActionFCurve(ob, created, val, sub)
Thomas Larsson
committed
act = ob.animation_data.action
loadedData['Action'][name] = act
if act is None:
print("Ignoring action %s" % name)
return act
act.name = name
print("Action", name, act, ob)
Thomas Larsson
committed
for (key, val, sub) in tokens:
if key == 'FCurve':
fcu = parseActionFCurve(act, ob, val, sub)
else:
defaultKey(key, val, sub, 'act', [], globals(), locals())
ob.animation_data.action = None
bpy.ops.object.mode_set(mode='OBJECT')
return act
Thomas Larsson
committed
def prepareActionFCurve(ob, created, args, tokens):
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
dataPath = args[0]
index = args[1]
(expr, channel) = channelFromDataPath(dataPath, index)
try:
if channel in created[expr]:
return
else:
created[expr].append(channel)
except:
created[expr] = [channel]
times = []
for (key, val, sub) in tokens:
if key == 'kp':
times.append(int(val[0]))
try:
data = eval(expr)
except:
print("Ignoring illegal expression: %s" % expr)
return
n = 0
for t in times:
#bpy.context.scene.current_frame = t
bpy.ops.anim.change_frame(frame = t)
try:
data.keyframe_insert(channel)
n += 1
except:
pass
#print("failed", data, expr, channel)
if n != len(times):
print("Mismatch", n, len(times), expr, channel)
return
words = dataPath.split(']')
if len(words) == 1:
# location
expr = "ob"
channel = dataPath
elif len(words) == 2:
# pose.bones["tongue"].location
expr = "ob.%s]" % (words[0])
cwords = words[1].split('.')
channel = cwords[1]
elif len(words) == 3:
# pose.bones["brow.R"]["mad"]
expr = "ob.%s]" % (words[0])
cwords = words[1].split('"')
channel = cwords[1]
return (expr, channel)
dataPath = args[0]
index = args[1]
(expr, channel) = channelFromDataPath(dataPath, index)
index = int(args[1])
success = False
for fcu in act.fcurves:
(expr1, channel1) = channelFromDataPath(fcu.data_path, fcu.array_index)
if expr1 == expr and channel1 == channel and fcu.array_index == index:
success = True
break
if not success:
return None
n = 0
for (key, val, sub) in tokens:
if key == 'kp':
try:
pt = fcu.keyframe_points[n]
pt.interpolation = 'LINEAR'
pt = parseKeyFramePoint(pt, val, sub)
n += 1
except:
pass
#print(tokens)
#MyError("kp", fcu, n, len(fcu.keyframe_points), val)
else:
defaultKey(key, val, sub, 'fcu', [], globals(), locals())
return fcu
pt.co = (float(args[0]), float(args[1]))
if len(args) > 2:
pt.handle1 = (float(args[2]), float(args[3]))
pt.handle2 = (float(args[3]), float(args[5]))
return pt
# parseAnimationData(rna, args, tokens):
# parseDriver(drv, args, tokens):
# parseDriverVariable(var, args, tokens):
#
def parseAnimationData(rna, args, tokens):
Thomas Larsson
committed
if rna.animation_data is None:
rna.animation_data_create()
adata = rna.animation_data
for (key, val, sub) in tokens:
if key == 'FCurve':
Thomas Larsson
committed
fcu = parseAnimDataFCurve(adata, rna, val, sub)
else:
defaultKey(key, val, sub, 'adata', [], globals(), locals())
return adata
def parseAnimDataFCurve(adata, rna, args, tokens):
if invalid(args[2]):
return
dataPath = args[0]
index = int(args[1])
n = 1
for (key, val, sub) in tokens:
if key == 'Driver':
fcu = parseDriver(adata, dataPath, index, rna, val, sub)
fmod = fcu.modifiers[0]
fcu.modifiers.remove(fmod)
elif key == 'FModifier':
parseFModifier(fcu, val, sub)
elif key == 'kp':
pt = fcu.keyframe_points.insert(n, 0)
pt.interpolation = 'LINEAR'
pt = parseKeyFramePoint(pt, val, sub)
n += 1
else:
defaultKey(key, val, sub, 'fcu', [], globals(), locals())
return fcu
fcurve = con.driver_add("influence", 0)
driver = fcurve.driver
driver.type = 'AVERAGE'
"""
def parseDriver(adata, dataPath, index, rna, args, tokens):
if dataPath[-1] == ']':
words = dataPath.split(']')
expr = "rna." + words[0] + ']'
pwords = words[1].split('"')
prop = pwords[1]
bone = eval(expr)
return None
else:
words = dataPath.split('.')
channel = words[-1]
expr = "rna"
for n in range(len(words)-1):
expr += "." + words[n]
expr += ".driver_add('%s', index)" % channel
Thomas Larsson
committed
fcu = eval(expr)
drv = fcu.driver
drv.type = args[0]
for (key, val, sub) in tokens:
if key == 'DriverVariable':
var = parseDriverVariable(drv, rna, val, sub)
else:
defaultKey(key, val, sub, 'drv', [], globals(), locals())
return fcu
def parseDriverVariable(drv, rna, args, tokens):
var = drv.variables.new()
var.name = args[0]
var.type = args[1]
nTarget = 0
for (key, val, sub) in tokens:
if key == 'Target':
parseDriverTarget(var, nTarget, rna, val, sub)
nTarget += 1
else:
defaultKey(key, val, sub, 'var', [], globals(), locals())
return var
fmod = fcu.modifiers.new(args[0])
#fmod = fcu.modifiers[0]
for (key, val, sub) in tokens:
defaultKey(key, val, sub, 'fmod', [], globals(), locals())
return fmod
var = driver.variables.new()
var.name = target_bone
var.targets[0].id_type = 'OBJECT'
var.targets[0].id = obj
var.targets[0].rna_path = driver_path
"""
def parseDriverTarget(var, nTarget, rna, args, tokens):
name = args[0]
#targ.id_type = args[1]
dtype = args[1].capitalize()
dtype = 'Object'
targ.id = loadedData[dtype][name]
Thomas Larsson
committed
if key == 'data_path':
words = val[0].split('"')
if len(words) > 1:
targ.data_path = propNames(words[1])[1]
else:
targ.data_path = propNames(val)[1]
else:
defaultKey(key, val, sub, 'targ', [], globals(), locals())
Thomas Larsson
committed
# parseMaterial(args, ext, tokens):
# parseMTex(mat, args, tokens):
# parseTexture(args, tokens):
global todo
name = args[0]
mat = bpy.data.materials.new(name)
if mat is None:
return None
loadedData['Material'][name] = mat
for (key, val, sub) in tokens:
if key == 'MTex':
parseMTex(mat, val, sub)
elif key == 'Ramp':
parseRamp(mat, val, sub)
elif key == 'RaytraceTransparency':
parseDefault(mat.raytrace_transparency, sub, {}, [])
elif key == 'Halo':
parseDefault(mat.halo, sub, {}, [])
elif key == 'SSS':
parseDefault(mat.subsurface_scattering, sub, {}, [])
elif key == 'Strand':
parseDefault(mat.strand, sub, {}, [])
elif key == 'NodeTree':
mat.use_nodes = True
parseNodeTree(mat.node_tree, val, sub)
elif key == 'AnimationData':
parseAnimationData(mat, val, sub)
else:
exclude = ['specular_intensity', 'tangent_shading']
defaultKey(key, val, sub, 'mat', [], globals(), locals())
Thomas Larsson
committed
global todo
index = int(args[0])
texname = args[1]
texco = args[2]
mapto = args[3]
tex = loadedData['Texture'][texname]
mtex = mat.texture_slots.add()
mtex.texture_coords = texco
mtex.texture = tex
for (key, val, sub) in tokens:
defaultKey(key, val, sub, "mtex", [], globals(), locals())
global todo
if verbosity > 2:
print( "Parsing texture %s" % args )
name = args[0]
tex = bpy.data.textures.new(name=name, type=args[1])
loadedData['Texture'][name] = tex
Thomas Larsson
committed
for (key, val, sub) in tokens:
if key == 'Image':
try:
imgName = val[0]
img = loadedData['Image'][imgName]
tex.image = img
except:
msg = "Unable to load image '%s'" % val[0]
elif key == 'Ramp':
parseRamp(tex, val, sub)
elif key == 'NodeTree':
tex.use_nodes = True
parseNodeTree(tex.node_tree, val, sub)
else:
defaultKey(key, val, sub, "tex", ['use_nodes', 'use_textures', 'contrast', 'use_alpha'], globals(), locals())
nvar = "data.%s" % args[0]
use = "data.use_%s = True" % args[0]
exec(use)
ramp = eval(nvar)
elts = ramp.elements
n = 0
for (key, val, sub) in tokens:
# print("Ramp", key, val)
if key == 'Element':
elts[n].color = eval(val[0])
elts[n].position = eval(val[1])
n += 1
else:
defaultKey(key, val, sub, "tex", ['use_nodes', 'use_textures', 'contrast'], globals(), locals())
Thomas Larsson
committed
sss = mat.subsurface_scattering
for (key, val, sub) in tokens:
defaultKey(key, val, sub, "sss", [], globals(), locals())
strand = mat.strand
for (key, val, sub) in tokens:
defaultKey(key, val, sub, "strand", [], globals(), locals())
# parseNodeTree(tree, args, tokens):
# parseNode(node, args, tokens):
# parseSocket(socket, args, tokens):
#
def parseNodeTree(tree, args, tokens):
return
print("Tree", tree, args)
print(list(tree.nodes))
tree.name = args[0]
for (key, val, sub) in tokens:
if key == 'Node':
parseNodes(tree.nodes, val, sub)
else:
defaultKey(key, val, sub, "tree", [], globals(), locals())
def parseNodes(nodes, args, tokens):
print("Nodes", nodes, args)
print(list(nodes))
node.name = args[0]
for (key, val, sub) in tokens:
if key == 'Inputs':
parseSocket(node.inputs, val, sub)
elif key == 'Outputs':
parseSocket(node.outputs, val, sub)
else:
defaultKey(key, val, sub, "node", [], globals(), locals())
def parseNode(node, args, tokens):
print("Node", node, args)
print(list(node.inputs), list(node.outputs))
node.name = args[0]
for (key, val, sub) in tokens:
if key == 'Inputs':
parseSocket(node.inputs, val, sub)
elif key == 'Outputs':
parseSocket(node.outputs, val, sub)
else:
defaultKey(key, val, sub, "node", [], globals(), locals())
def parseSocket(socket, args, tokens):
print("Socket", socket, args)
socket.name = args[0]
for (key, val, sub) in tokens:
if key == 'Node':
parseNode(tree.nodes, val, sub)
else:
defaultKey(key, val, sub, "tree", [], globals(), locals())
#
def loadImage(relFilepath):
filepath = os.path.normpath(os.path.join(theDir, relFilepath))
print( "Loading %s" % filepath.encode('utf-8','strict'))
if os.path.isfile(filepath):
#print( "Found file %s." % filepath.encode('utf-8','strict') )
img = bpy.data.images.load(filepath)
return img
except:
print( "Cannot read image" )
return None
else:
print( "No such file: %s" % filepath.encode('utf-8','strict') )
Thomas Larsson
committed
global todo
imgName = args[0]
img = None
for (key, val, sub) in tokens:
if key == 'Filename':
filename = val[0]
for n in range(1,len(val)):
filename += " " + val[n]
img = loadImage(filename)
if img is None:
defaultKey(key, val, sub, "img", ['depth', 'dirty', 'has_data', 'size', 'type', 'use_premultiply'], globals(), locals())
print ("Image %s" % img )
loadedData['Image'][imgName] = img
return img
#
# parseObject(args, tokens):
# createObject(type, name, data, datName):
# setObjectAndData(args, typ):
#
Thomas Larsson
committed
if verbosity > 2:
print( "Parsing object %s" % args )
name = args[0]
typ = args[1]
datName = args[2]
if typ == 'EMPTY':
ob = bpy.data.objects.new(name, None)
loadedData['Object'][name] = ob
linkObject(ob, None)
else:
try:
Thomas Larsson
committed
data = loadedData[typ.capitalize()][datName]
MyError("Failed to find data: %s %s %s" % (name, typ, datName))
return
try:
ob = loadedData['Object'][name]
bpy.context.scene.objects.active = ob
#print("Found data", ob)
except:
ob = None
if ob is None:
ob = createObject(typ, name, data, datName)
linkObject(ob, data)
for (key, val, sub) in tokens:
if key == 'Modifier':
parseModifier(ob, val, sub)
elif key == 'Constraint':
parseConstraint(ob.constraints, None, val, sub)
elif key == 'AnimationData':
parseAnimationData(ob, val, sub)
elif key == 'ParticleSystem':
parseParticleSystem(ob, val, sub)
elif key == 'FieldSettings':
parseDefault(ob.field, sub, {}, [])
else:
defaultKey(key, val, sub, "ob", ['type', 'data'], globals(), locals())
Thomas Larsson
committed
if ob.type == 'MESH':