Newer
Older
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# Project Name: MakeHuman
# Product Home Page: http://www.makehuman.org/
# Code Home Page: http://code.google.com/p/makehuman/
# Authors: Thomas Larsson
# Script copyright (C) MakeHuman Team 2001-2011
# Coding Standards: See http://sites.google.com/site/makehumandocs/developers-guide
Abstract
MHX (MakeHuman eXchange format) importer for Blender 2.5x.
Thomas Larsson
committed
Version 1.4.0
This script should be distributed with Blender.
If not, place it in the .blender/scripts/addons dir
Activate the script in the "Add-Ons" tab (user preferences).
Access from the File > Import menu.
Alternatively, run the script in the script editor (Alt-P), and access from the File > Import menu
'name': 'Import: MakeHuman (.mhx)',
'author': 'Thomas Larsson',
Thomas Larsson
committed
'version': (1, 4, 0),
Thomas Larsson
committed
"api": 35774,
'location': "File > Import > MakeHuman (.mhx)",
'description': 'Import files in the MakeHuman eXchange format (.mhx)',
'warning': '',
Thomas Larsson
committed
'wiki_url': 'http://sites.google.com/site/makehumandocs/blender-export-and-mhx',
'tracker_url': 'https://projects.blender.org/tracker/index.php?'\
MAJOR_VERSION = 1
Thomas Larsson
committed
MINOR_VERSION = 4
SUB_VERSION = 0
BLENDER_VERSION = (2, 57, 0)
#
#
#
import bpy
import os
import time
import mathutils
from mathutils import Matrix
MHX249 = False
Blender24 = False
Blender25 = True
TexDir = "~/makehuman/exports"
#
#
#
theScale = 1.0
One = 1.0/theScale
useMesh = 1
verbosity = 2
warnedTextureDir = False
warnedVersion = False
true = True
false = False
Epsilon = 1e-6
nErrors = 0
theTempDatum = None
todo = []
#
T_EnforceVersion = 0x01
T_Clothes = 0x02
T_Stretch = 0x04
T_Bend = 0x08
T_Diamond = 0x10
T_Replace = 0x20
T_Face = 0x40
T_Shape = 0x80
T_Mesh = 0x100
T_Armature = 0x200
T_Proxy = 0x400
T_Cage = 0x800
T_Opcns = 0x2000
toggle = T_EnforceVersion + T_Replace + T_Mesh + T_Armature + T_Face + T_Shape + T_Proxy + T_Clothes
#
# Blender versions
#
BLENDER_GRAPHICALL = 0
BLENDER_256a = 1
BlenderVersions = ['Graphicall', 'Blender256a']
theBlenderVersion = BLENDER_GRAPHICALL
#fFingerPanel = 0.0
#fFingerIK = 0.0
fNoStretch = 0.0
#T_GoboFoot = 0x0002
#T_InvFoot = 0x0010
#T_InvFootPT = 0x0020
#T_InvFootNoPT = 0x0040
#T_FingerPanel = 0x100
#T_FingerRot = 0x0200
#T_FingerIK = 0x0400
#T_LocalFKIK = 0x8000
#rigLeg = 0
#rigArm = 0
def setFlagsAndFloats():
(footRig, fingerRig) = rigFlags
rigLeg = 0
if footRig == 'Reverse foot':
rigLeg |= T_InvFoot
if toggle & T_PoleTar:
rigLeg |= T_InvFootPT
else:
rigLeg |= T_InvFootNoPT
elif footRig == 'Gobo': rigLeg |= T_GoboFoot
rigArm = 0
if fingerRig == 'Panel': rigArm |= T_FingerPanel
elif fingerRig == 'Rotation': rigArm |= T_FingerRot
elif fingerRig == 'IK': rigArm |= T_FingerIK
toggle |= T_Panel
'''
global fNoStretch
if toggle&T_Stretch: fNoStretch == 0.0
else: fNoStretch = 1.0
'MaterialTextureSlot' : {},
'Texture' : {},
'Bone' : {},
'BoneGroup' : {},
'Rigify' : {},
'Action' : {},
'Group' : {},
'MeshTextureFaceLayer' : {},
'MeshColorLayer' : {},
'VertexGroup' : {},
'ShapeKey' : {},
'ParticleSystem' : {},
'ObjectConstraints' : {},
'ObjectModifiers' : {},
'MaterialSlot' : {},
'Object' : 'objects',
'Mesh' : 'meshes',
'Lattice' : 'lattices',
'Curve' : 'curves',
'Text' : 'texts',
'Group' : 'groups',
'Empty' : 'empties',
'Armature' : 'armatures',
'Bone' : 'bones',
'BoneGroup' : 'bone_groups',
'Pose' : 'poses',
'PoseBone' : 'pose_bones',
'Material' : 'materials',
'Texture' : 'textures',
'Image' : 'images',
'Camera' : 'cameras',
'Lamp' : 'lamps',
'World' : 'worlds',
#
def checkBlenderVersion():
print("Found Blender", bpy.app.version)
(A, B, C) = bpy.app.version
(a, b, c) = BLENDER_VERSION
if a <= A: return
if b <= B: return
if c <= C: return
msg = (
"This version of the MHX importer only works with Blender (%d, %d, %d) or later. " % (a, b, c) +
"Download a more recent Blender from www.blender.org or www.graphicall.org.\n"
# readMhxFile(filePath):
def readMhxFile(filePath):
global todo, nErrors, theScale, defaultScale, One, toggle
#checkBlenderVersion()
defaultScale = theScale
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
One = 1.0/theScale
fileName = os.path.expanduser(filePath)
(shortName, ext) = os.path.splitext(fileName)
if ext.lower() != ".mhx":
print("Error: Not a mhx file: " + fileName)
return
print( "Opening MHX file "+ fileName )
time1 = time.clock()
ignore = False
stack = []
tokens = []
key = "toplevel"
level = 0
nErrors = 0
comment = 0
nesting = 0
setFlagsAndFloats()
file= open(fileName, "rU")
print( "Tokenizing" )
lineNo = 0
for line in file:
# print(line)
lineSplit= line.split()
lineNo += 1
if len(lineSplit) == 0:
pass
elif lineSplit[0][0] == '#':
if lineSplit[0] == '#if':
if comment == nesting:
try:
res = eval(lineSplit[1])
except:
res = False
if res:
comment += 1
nesting += 1
elif lineSplit[0] == '#else':
if comment == nesting-1:
comment += 1
elif comment == nesting:
comment -= 1
elif lineSplit[0] == '#endif':
if comment == nesting:
comment -= 1
nesting -= 1
elif comment < nesting:
pass
elif lineSplit[0] == 'end':
try:
sub = tokens
tokens = stack.pop()
if tokens:
tokens[-1][2] = sub
level -= 1
except:
print( "Tokenizer error at or before line %d" % lineNo )
print( line )
dummy = stack.pop()
elif lineSplit[-1] == ';':
if lineSplit[0] == '\\':
key = lineSplit[1]
tokens.append([key,lineSplit[2:-1],[]])
else:
key = lineSplit[0]
tokens.append([key,lineSplit[1:-1],[]])
else:
key = lineSplit[0]
tokens.append([key,lineSplit[1:],[]])
stack.append(tokens)
level += 1
tokens = []
file.close()
if level != 0:
raise NameError("Tokenizer out of kilter %d" % level)
clearScene()
print( "Parsing" )
parse(tokens)
for (expr, glbals, lcals) in todo:
try:
print("Doing %s" % expr)
exec(expr, glbals, lcals)
except:
msg = "Failed: "+expr
print( msg )
nErrors += 1
#raise NameError(msg)
time2 = time.clock()
print("toggle = %x" % toggle)
msg = "File %s loaded in %g s" % (fileName, time2-time1)
if nErrors:
msg += " but there where %d errors. " % (nErrors)
print(msg)
return
#
# getObject(name, var, glbals, lcals):
try:
ob = loadedData['Object'][name]
except:
if name != "None":
pushOnTodoList(None, "ob = loadedData['Object'][name]" % globals(), locals())
ob = None
return ob
#
def checkMhxVersion(major, minor):
Thomas Larsson
committed
print((major,minor), (MAJOR_VERSION, MINOR_VERSION), warnedVersion)
if major != MAJOR_VERSION or minor != MINOR_VERSION:
"Wrong MHX version\n" +
"Expected MHX %d.%d but the loaded file has version MHX %d.%d\n" % (MAJOR_VERSION, MINOR_VERSION, major, minor) +
"You can disable this error message by deselecting the Enforce version option when importing. " +
"Alternatively, you can try to download the most recent nightly build from www.makehuman.org. " +
"The current version of the import script is located in the importers/mhx/blender25x folder and is called import_scene_mhx.py. " +
"The version distributed with Blender builds from www.graphicall.org may be out of date.\n"
)
if toggle & T_EnforceVersion:
raise NameError(msg)
else:
print(msg)
warnedVersion = True
return
#
ifResult = False
def parse(tokens):
global MHX249, ifResult, theScale, defaultScale, One
for (key, val, sub) in tokens:
print("Parse %s" % key)
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
data = None
if key == 'MHX':
checkMhxVersion(int(val[0]), int(val[1]))
elif key == 'MHX249':
MHX249 = eval(val[0])
print("Blender 2.49 compatibility mode is %s\n" % MHX249)
elif MHX249:
pass
elif key == 'print':
msg = concatList(val)
print(msg)
elif key == 'warn':
msg = concatList(val)
print(msg)
elif key == 'error':
msg = concatList(val)
raise NameError(msg)
elif key == 'NoScale':
if eval(val[0]):
theScale = 1.0
else:
theScale = defaultScale
One = 1.0/theScale
elif key == "Object":
parseObject(val, sub)
elif key == "Mesh":
data = parseMesh(val, sub)
elif key == "Armature":
data = parseArmature(val, sub)
elif key == "Pose":
data = parsePose(val, sub)
elif key == "Action":
data = parseAction(val, sub)
elif key == "Material":
data = parseMaterial(val, sub)
elif key == "Texture":
data = parseTexture(val, sub)
elif key == "Image":
data = parseImage(val, sub)
elif key == "Curve":
data = parseCurve(val, sub)
elif key == "TextCurve":
data = parseTextCurve(val, sub)
elif key == "Lattice":
data = parseLattice(val, sub)
elif key == "Group":
data = parseGroup(val, sub)
elif key == "Lamp":
data = parseLamp(val, sub)
elif key == "World":
data = parseWorld(val, sub)
elif key == "Scene":
data = parseScene(val, sub)
elif key == "DefineProperty":
parseDefineProperty(val, sub)
elif key == "PostProcess":
postProcess(val)
hideLayers(val)
elif key == "CorrectRig":
correctRig(val)
elif key == 'AnimationData':
try:
ob = loadedData['Object'][val[0]]
except:
ob = None
if ob:
bpy.context.scene.objects.active = ob
parseAnimationData(ob, val, sub)
elif key == 'MaterialAnimationData':
try:
ob = loadedData['Object'][val[0]]
except:
ob = None
if ob:
bpy.context.scene.objects.active = ob
mat = ob.data.materials[int(val[2])]
print("matanim", ob, mat)
parseAnimationData(mat, val, sub)
elif key == 'ShapeKeys':
try:
ob = loadedData['Object'][val[0]]
except:
raise NameError("ShapeKeys object %s does not exist" % val[0])
if ob:
bpy.context.scene.objects.active = ob
parseShapeKeys(ob, ob.data, val, sub)
else:
data = parseDefaultType(key, val, sub)
if data and key != 'Mesh':
print( data )
return
#
# parseDefaultType(typ, args, tokens):
name = args[0]
data = None
expr = "bpy.data.%s.new('%s')" % (Plural[typ], name)
# print(expr)
# print(" ok", data)
bpyType = typ.capitalize()
print(bpyType, name, data)
loadedData[bpyType][name] = data
if data is None:
for (key, val, sub) in tokens:
#print("%s %s" % (key, val))
defaultKey(key, val, sub, 'data', [], globals(), locals())
print("Done ", data)
return data
string = ""
for elt in elts:
string += " %s" % elt
return string
# parseAction(args, tokens):
# parseFCurve(fcu, args, tokens):
# parseKeyFramePoint(pt, args, tokens):
name = args[0]
if invalid(args[1]):
return
ob = bpy.context.object
bpy.ops.object.mode_set(mode='POSE')
if ob.animation_data:
ob.animation_data.action = None
created = {}
for (key, val, sub) in tokens:
if key == 'FCurve':
prepareActionFCurve(ob, created, val, sub)
act = ob.animation_data.action
loadedData['Action'][name] = act
if act is None:
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
print("Ignoring action %s" % name)
return act
act.name = name
print("Action", name, act, ob)
for (key, val, sub) in tokens:
if key == 'FCurve':
fcu = parseActionFCurve(act, ob, val, sub)
else:
defaultKey(key, val, sub, 'act', [], globals(), locals())
ob.animation_data.action = None
bpy.ops.object.mode_set(mode='OBJECT')
return act
def prepareActionFCurve(ob, created, args, tokens):
dataPath = args[0]
index = args[1]
(expr, channel) = channelFromDataPath(dataPath, index)
try:
if channel in created[expr]:
return
else:
created[expr].append(channel)
except:
created[expr] = [channel]
times = []
for (key, val, sub) in tokens:
if key == 'kp':
times.append(int(val[0]))
try:
data = eval(expr)
except:
print("Ignoring illegal expression: %s" % expr)
return
n = 0
for t in times:
#bpy.context.scene.current_frame = t
bpy.ops.anim.change_frame(frame = t)
try:
data.keyframe_insert(channel)
n += 1
except:
pass
#print("failed", data, expr, channel)
if n != len(times):
print("Mismatch", n, len(times), expr, channel)
return
words = dataPath.split(']')
if len(words) == 1:
# location
expr = "ob"
channel = dataPath
elif len(words) == 2:
# pose.bones["tongue"].location
expr = "ob.%s]" % (words[0])
cwords = words[1].split('.')
channel = cwords[1]
elif len(words) == 3:
# pose.bones["brow.R"]["mad"]
expr = "ob.%s]" % (words[0])
cwords = words[1].split('"')
channel = cwords[1]
# print(expr, channel, index)
return (expr, channel)
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
dataPath = args[0]
index = args[1]
(expr, channel) = channelFromDataPath(dataPath, index)
index = int(args[1])
success = False
for fcu in act.fcurves:
(expr1, channel1) = channelFromDataPath(fcu.data_path, fcu.array_index)
if expr1 == expr and channel1 == channel and fcu.array_index == index:
success = True
break
if not success:
return None
n = 0
for (key, val, sub) in tokens:
if key == 'kp':
try:
pt = fcu.keyframe_points[n]
pt.interpolation = 'LINEAR'
pt = parseKeyFramePoint(pt, val, sub)
n += 1
except:
pass
#print(tokens)
#raise NameError("kp", fcu, n, len(fcu.keyframe_points), val)
else:
defaultKey(key, val, sub, 'fcu', [], globals(), locals())
return fcu
pt.co = (float(args[0]), float(args[1]))
if len(args) > 2:
pt.handle1 = (float(args[2]), float(args[3]))
pt.handle2 = (float(args[3]), float(args[5]))
return pt
# parseAnimationData(rna, args, tokens):
# parseDriver(drv, args, tokens):
# parseDriverVariable(var, args, tokens):
#
def parseAnimationData(rna, args, tokens):
print("Parse Animation data")
if rna.animation_data is None:
rna.animation_data_create()
adata = rna.animation_data
for (key, val, sub) in tokens:
if key == 'FCurve':
fcu = parseAnimDataFCurve(adata, rna, val, sub)
else:
defaultKey(key, val, sub, 'adata', [], globals(), locals())
print(adata)
def parseAnimDataFCurve(adata, rna, args, tokens):
global theBlenderVersion
if invalid(args[2]):
return
dataPath = args[0]
index = int(args[1])
n = 1
for (key, val, sub) in tokens:
if key == 'Driver':
fcu = parseDriver(adata, dataPath, index, rna, val, sub)
fmod = fcu.modifiers[0]
fcu.modifiers.remove(fmod)
elif key == 'FModifier':
parseFModifier(fcu, val, sub)
elif key == 'kp':
if theBlenderVersion >= BLENDER_256a:
pt = fcu.keyframe_points.add(n, 0)
else:
pt = fcu.keyframe_points.insert(n, 0)
pt.interpolation = 'LINEAR'
pt = parseKeyFramePoint(pt, val, sub)
n += 1
else:
defaultKey(key, val, sub, 'fcu', [], globals(), locals())
return fcu
fcurve = con.driver_add("influence", 0)
driver = fcurve.driver
driver.type = 'AVERAGE'
"""
def parseDriver(adata, dataPath, index, rna, args, tokens):
if dataPath[-1] == ']':
words = dataPath.split(']')
expr = "rna." + words[0] + ']'
pwords = words[1].split('"')
prop = pwords[1]
#print("prop", expr, prop)
bone = eval(expr)
return None
else:
words = dataPath.split('.')
channel = words[-1]
expr = "rna"
for n in range(len(words)-1):
expr += "." + words[n]
expr += ".driver_add('%s', index)" % channel
#print("expr", rna, expr)
fcu = eval(expr)
drv = fcu.driver
#print(" Driver type", drv, args[0])
#print(" ->", drv.type)
for (key, val, sub) in tokens:
if key == 'DriverVariable':
var = parseDriverVariable(drv, rna, val, sub)
else:
defaultKey(key, val, sub, 'drv', [], globals(), locals())
return fcu
def parseDriverVariable(drv, rna, args, tokens):
#print(" Var type", var, args[1])
#print(" ->", var.type)
nTarget = 0
for (key, val, sub) in tokens:
if key == 'Target':
parseDriverTarget(var, nTarget, rna, val, sub)
nTarget += 1
else:
defaultKey(key, val, sub, 'var', [], globals(), locals())
return var
fmod = fcu.modifiers.new(args[0])
#fmod = fcu.modifiers[0]
for (key, val, sub) in tokens:
defaultKey(key, val, sub, 'fmod', [], globals(), locals())
return fmod
var = driver.variables.new()
var.name = target_bone
var.targets[0].id_type = 'OBJECT'
var.targets[0].id = obj
var.targets[0].rna_path = driver_path
"""
def parseDriverTarget(var, nTarget, rna, args, tokens):
ob = loadedData['Object'][args[0]]
#print(" targ id", targ, ob)
targ.id = ob
#print(" ->", targ.id)
for (key, val, sub) in tokens:
defaultKey(key, val, sub, 'targ', [], globals(), locals())
return targ
# parseMaterial(args, ext, tokens):
# parseMTex(mat, args, tokens):
# parseTexture(args, tokens):
global todo
name = args[0]
mat = bpy.data.materials.new(name)
if mat is None:
return None
loadedData['Material'][name] = mat
for (key, val, sub) in tokens:
if key == 'MTex':
parseMTex(mat, val, sub)
elif key == 'Ramp':
parseRamp(mat, val, sub)
elif key == 'RaytraceTransparency':
parseDefault(mat.raytrace_transparency, sub, {}, [])
elif key == 'Halo':
parseDefault(mat.halo, sub, {}, [])
elif key == 'SSS':
parseDefault(mat.subsurface_scattering, sub, {}, [])
elif key == 'Strand':
parseDefault(mat.strand, sub, {}, [])
elif key == 'NodeTree':
mat.use_nodes = True
parseNodeTree(mat.node_tree, val, sub)
else:
exclude = ['specular_intensity', 'tangent_shading']
defaultKey(key, val, sub, 'mat', [], globals(), locals())
return mat
global todo
index = int(args[0])
texname = args[1]
texco = args[2]
mapto = args[3]
tex = loadedData['Texture'][texname]
mtex = mat.texture_slots.add()
mtex.texture_coords = texco
mtex.texture = tex
for (key, val, sub) in tokens:
defaultKey(key, val, sub, "mtex", [], globals(), locals())
global todo
if verbosity > 2:
print( "Parsing texture %s" % args )
name = args[0]
tex = bpy.data.textures.new(name=name, type=args[1])
loadedData['Texture'][name] = tex
for (key, val, sub) in tokens:
if key == 'Image':
try:
imgName = val[0]
img = loadedData['Image'][imgName]
tex.image = img
except:
msg = "Unable to load image '%s'" % val[0]
elif key == 'Ramp':
parseRamp(tex, val, sub)
elif key == 'NodeTree':
tex.use_nodes = True
parseNodeTree(tex.node_tree, val, sub)
else:
defaultKey(key, val, sub, "tex", ['use_nodes', 'use_textures', 'contrast'], globals(), locals())
return tex
nvar = "data.%s" % args[0]
use = "data.use_%s = True" % args[0]
exec(use)
ramp = eval(nvar)
elts = ramp.elements
n = 0
for (key, val, sub) in tokens:
# print("Ramp", key, val)
if key == 'Element':
elts[n].color = eval(val[0])
elts[n].position = eval(val[1])
n += 1
else:
defaultKey(key, val, sub, "tex", ['use_nodes', 'use_textures', 'contrast'], globals(), locals())
sss = mat.subsurface_scattering
for (key, val, sub) in tokens:
defaultKey(key, val, sub, "sss", [], globals(), locals())
strand = mat.strand
for (key, val, sub) in tokens:
defaultKey(key, val, sub, "strand", [], globals(), locals())
# parseNodeTree(tree, args, tokens):
# parseNode(node, args, tokens):
# parseSocket(socket, args, tokens):
#
def parseNodeTree(tree, args, tokens):
return
print("Tree", tree, args)
print(list(tree.nodes))
tree.name = args[0]
for (key, val, sub) in tokens:
if key == 'Node':
parseNodes(tree.nodes, val, sub)
else:
defaultKey(key, val, sub, "tree", [], globals(), locals())
def parseNodes(nodes, args, tokens):
print("Nodes", nodes, args)
print(list(nodes))
node.name = args[0]
for (key, val, sub) in tokens:
if key == 'Inputs':
parseSocket(node.inputs, val, sub)
elif key == 'Outputs':
parseSocket(node.outputs, val, sub)
else:
defaultKey(key, val, sub, "node", [], globals(), locals())
def parseNode(node, args, tokens):
print("Node", node, args)
print(list(node.inputs), list(node.outputs))
node.name = args[0]
for (key, val, sub) in tokens:
if key == 'Inputs':
parseSocket(node.inputs, val, sub)
elif key == 'Outputs':
parseSocket(node.outputs, val, sub)
else:
defaultKey(key, val, sub, "node", [], globals(), locals())
def parseSocket(socket, args, tokens):
print("Socket", socket, args)
socket.name = args[0]
for (key, val, sub) in tokens:
if key == 'Node':
parseNode(tree.nodes, val, sub)
else:
defaultKey(key, val, sub, "tree", [], globals(), locals())
#
# doLoadImage(filepath):
# loadImage(filepath):
# parseImage(args, tokens):
def doLoadImage(filepath):
path1 = os.path.expanduser(filepath)
file1 = os.path.realpath(path1)
if os.path.isfile(file1):
print( "Found file "+file1 )
try:
img = bpy.data.images.load(file1)
return img
except:
print( "Cannot read image" )
return None
else:
print( "No file "+file1 )
return None
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
global TexDir, warnedTextureDir, loadedData
texDir = os.path.expanduser(TexDir)
path1 = os.path.expanduser(filepath)
file1 = os.path.realpath(path1)
(path, filename) = os.path.split(file1)
(name, ext) = os.path.splitext(filename)
print( "Loading ", filepath, " = ", filename )
# img = doLoadImage(texDir+"/"+name+".png")
# if img:
# return img
img = doLoadImage(texDir+"/"+filename)
if img:
return img
# img = doLoadImage(path+"/"+name+".png")
# if img:
# return img
img = doLoadImage(path+"/"+filename)
if img:
return img
if warnedTextureDir:
return None
warnedTextureDir = True
return None
TexDir = Draw.PupStrInput("TexDir? ", path, 100)
texDir = os.path.expanduser(TexDir)
img = doLoadImage(texDir+"/"+name+".png")
if img:
return img
img = doLoadImage(TexDir+"/"+filename)
return img
global todo
imgName = args[0]
img = None
for (key, val, sub) in tokens:
if key == 'Filename':
filename = val[0]
for n in range(1,len(val)):
filename += " " + val[n]
img = loadImage(filename)
if img is None:
return None
img.name = imgName
else:
defaultKey(key, val, sub, "img", ['depth', 'dirty', 'has_data', 'size', 'type'], globals(), locals())
print ("Image %s" % img )
loadedData['Image'][imgName] = img
return img
#
# parseObject(args, tokens):
# createObject(type, name, data, datName):
# setObjectAndData(args, typ):
#
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
if verbosity > 2:
print( "Parsing object %s" % args )
name = args[0]
typ = args[1]
datName = args[2]
if typ == 'EMPTY':
ob = bpy.data.objects.new(name, None)
loadedData['Object'][name] = ob
linkObject(ob, None)
else:
try:
data = loadedData[typ.capitalize()][datName]
except:
raise NameError("Failed to find data: %s %s %s" % (name, typ, datName))
return
try:
ob = loadedData['Object'][name]
bpy.context.scene.objects.active = ob
#print("Found data", ob)
except:
ob = None
if ob is None:
print("Create", name, data, datName)
ob = createObject(typ, name, data, datName)
print("created", ob)
linkObject(ob, data)
for (key, val, sub) in tokens:
if key == 'Modifier':
parseModifier(ob, val, sub)
elif key == 'Constraint':
parseConstraint(ob.constraints, None, val, sub)
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
elif key == 'AnimationData':
parseAnimationData(ob, val, sub)
elif key == 'ParticleSystem':
parseParticleSystem(ob, val, sub)
elif key == 'FieldSettings':
parseDefault(ob.field, sub, {}, [])
else:
defaultKey(key, val, sub, "ob", ['type', 'data'], globals(), locals())
# Needed for updating layers
if bpy.context.object == ob:
pass
'''
if ob.data in ['MESH', 'ARMATURE']:
print(ob, ob.data)
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.object.mode_set(mode='OBJECT')
'''
else:
print("Context", ob, bpy.context.object, bpy.context.scene.objects.active)
return
# print( "Creating object %s %s %s" % (typ, name, data) )
ob = bpy.data.objects.new(name, data)
if data:
loadedData[typ.capitalize()][datName] = data
loadedData['Object'][name] = ob
return ob
if data and ob.data is None:
ob.data = data
print("Data linked", ob, ob.data)
scn = bpy.context.scene
scn.objects.link(ob)
scn.objects.active = ob
#print("Linked object", ob)
#print("Scene", scn)
#print("Active", scn.objects.active)
#print("Context", bpy.context.object)
return ob
def setObjectAndData(args, typ):
datName = args[0]
obName = args[1]
#bpy.ops.object.add(type=typ)
ob = bpy.context.object
ob.name = obName
ob.data.name = datName
loadedData[typ][datName] = ob.data
loadedData['Object'][obName] = ob
return ob.data
#
name = args[0]
typ = args[1]
if typ == 'PARTICLE_SYSTEM':
return None
mod = ob.modifiers.new(name, typ)
for (key, val, sub) in tokens:
if key == 'HookAssignNth':
if val[0] == 'CURVE':
hookAssignNth(mod, int(val[1]), True, ob.data.splines[0].points)
elif val[0] == 'LATTICE':
hookAssignNth(mod, int(val[1]), False, ob.data.points)
elif val[0] == 'MESH':
hookAssignNth(mod, int(val[1]), True, ob.data.vertices)
else:
raise NameError("Unknown hook %s" % val)
else:
defaultKey(key, val, sub, 'mod', [], globals(), locals())
def hookAssignNth(mod, n, select, points):
if select:
for pt in points:
pt.select = False
points[n].select = True
sel = []
for pt in points:
sel.append(pt.select)
#print(mod, sel, n, points)
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.object.hook_reset(modifier=mod.name)
bpy.ops.object.hook_select(modifier=mod.name)
bpy.ops.object.hook_assign(modifier=mod.name)
bpy.ops.object.mode_set(mode='OBJECT')
return
# parseParticleSystem(ob, args, tokens):
# parseParticles(particles, args, tokens):
# parseParticle(par, args, tokens):
#
def parseParticleSystem(ob, args, tokens):
print(ob, bpy.context.object)
pss = ob.particle_systems
print(pss, pss.values())
name = args[0]
typ = args[1]
#psys = pss.new(name, typ)
bpy.ops.object.particle_system_add()
print(pss, pss.values())
psys = pss[-1]
psys.name = name
psys.settings.type = typ
loadedData['ParticleSystem'][name] = psys
print("Psys", psys)
for (key, val, sub) in tokens:
if key == 'Particles':
parseParticles(psys, val, sub)
else:
defaultKey(key, val, sub, 'psys', [], globals(), locals())
return psys
particles = psys.particles
bpy.ops.particle.particle_edit_toggle()
n = 0
for (key, val, sub) in tokens:
if key == 'Particle':
parseParticle(particles[n], val, sub)
n += 1
else:
for par in particles:
defaultKey(key, val, sub, 'par', [], globals(), locals())
bpy.ops.particle.particle_edit_toggle()
return particles
n = 0
for (key, val, sub) in tokens:
if key == 'h':
h = par.hair[n]
h.location = eval(val[0])
h.time = int(val[1])
h.weight = float(val[2])
n += 1
elif key == 'location':
par.location = eval(val[0])
return
l = []
for t in list_of_tuples:
l.extend(t)
return l
1254
1255
1256
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
global todo
if verbosity > 2:
print( "Parsing mesh %s" % args )
mename = args[0]
obname = args[1]
me = bpy.data.meshes.new(mename)
ob = createObject('MESH', obname, me, mename)
verts = []
edges = []
faces = []
vertsTex = []
texFaces = []
for (key, val, sub) in tokens:
if key == 'Verts':
verts = parseVerts(sub)
elif key == 'Edges':
edges = parseEdges(sub)
elif key == 'Faces':
faces = parseFaces(sub)
if faces:
me.from_pydata(verts, [], faces)
else:
me.from_pydata(verts, edges, [])
me.update()
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
linkObject(ob, me)
mats = []
for (key, val, sub) in tokens:
if key == 'Verts' or key == 'Edges' or key == 'Faces':
pass
elif key == 'MeshTextureFaceLayer':
parseUvTexture(val, sub, me)
elif key == 'MeshColorLayer':
parseVertColorLayer(val, sub, me)
elif key == 'VertexGroup':
parseVertexGroup(ob, me, val, sub)
elif key == 'ShapeKeys':
parseShapeKeys(ob, me, val, sub)
elif key == 'Material':
try:
mat = loadedData['Material'][val[0]]
except:
mat = None
if mat:
me.materials.append(mat)
else:
defaultKey(key, val, sub, "me", [], globals(), locals())
for (key, val, sub) in tokens:
if key == 'Faces':
parseFaces2(sub, me)
print(me)
return me
#
# parseVerts(tokens):
# parseEdges(tokens):
# parseFaces(tokens):
# parseFaces2(tokens, me):
verts = []
for (key, val, sub) in tokens:
if key == 'v':
verts.append( (theScale*float(val[0]), theScale*float(val[1]), theScale*float(val[2])) )
return verts
1327
1328
1329
1330
1331
1332
1333
1334
1335
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345
1346
1347
1348
1349
1350
1351
1352
1353
1354
1355
1356
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368
edges = []
for (key, val, sub) in tokens:
if key == 'e':
edges.append((int(val[0]), int(val[1])))
return edges
def parseFaces(tokens):
faces = []
for (key, val, sub) in tokens:
if key == 'f':
if len(val) == 3:
face = [int(val[0]), int(val[1]), int(val[2])]
elif len(val) == 4:
face = [int(val[0]), int(val[1]), int(val[2]), int(val[3])]
faces.append(face)
return faces
def parseFaces2(tokens, me):
n = 0
for (key, val, sub) in tokens:
if key == 'ft':
f = me.faces[n]
f.material_index = int(val[0])
f.use_smooth = int(val[1])
n += 1
elif key == 'mn':
fn = int(val[0])
mn = int(val[1])
f = me.faces[fn]
f.material_index = mn
elif key == 'ftall':
mat = int(val[0])
smooth = int(val[1])
for f in me.faces:
f.material_index = mat
f.use_smooth = smooth
return
#
# parseUvTexture(args, tokens, me):
# parseUvTexData(args, tokens, uvdata):
name = args[0]
me.uv_textures.new(name = name)
uvtex = me.uv_textures[-1]
loadedData['MeshTextureFaceLayer'][name] = uvtex
for (key, val, sub) in tokens:
if key == 'Data':
parseUvTexData(val, sub, uvtex.data)
else:
defaultKey(key, val, sub, "uvtex", [], globals(), locals())
return
n = 0
for (key, val, sub) in tokens:
if key == 'vt':
data[n].uv1 = (float(val[0]), float(val[1]))
data[n].uv2 = (float(val[2]), float(val[3]))
data[n].uv3 = (float(val[4]), float(val[5]))
if len(val) > 6:
data[n].uv4 = (float(val[6]), float(val[7]))
n += 1
else:
pass
#for i in range(n):
# defaultKey(key, val, sub, "data[i]", [], globals(), locals())
return
#
# parseVertColorLayer(args, tokens, me):
# parseVertColorData(args, tokens, data):
#
def parseVertColorLayer(args, tokens, me):
name = args[0]
print("VertColorLayer", name)
vcol = me.vertex_colors.new(name)
loadedData['MeshColorLayer'][name] = vcol
for (key, val, sub) in tokens:
if key == 'Data':
parseVertColorData(val, sub, vcol.data)
else:
defaultKey(key, val, sub, "vcol", [], globals(), locals())
return
n = 0
for (key, val, sub) in tokens:
if key == 'cv':
data[n].color1 = eval(val[0])
data[n].color2 = eval(val[1])
data[n].color3 = eval(val[2])
data[n].color4 = eval(val[3])
n += 1
return
#
def parseVertexGroup(ob, me, args, tokens):
global toggle, theBlenderVersion
if verbosity > 2:
print( "Parsing vertgroup %s" % args )
grpName = args[0]
try:
res = eval(args[1])
except:
res = True
if not res:
return
if (toggle & T_Armature) or (grpName in ['Eye_L', 'Eye_R', 'Gums', 'Head', 'Jaw', 'Left', 'Middle', 'Right', 'Scalp']):
group = ob.vertex_groups.new(grpName)
loadedData['VertexGroup'][grpName] = group
if theBlenderVersion >= BLENDER_256a:
for (key, val, sub) in tokens:
if key == 'wv':
ob.vertex_groups.assign([int(val[0])], group, float(val[1]), 'REPLACE')
else:
for (key, val, sub) in tokens:
if key == 'wv':
group.add( [int(val[0])], float(val[1]), 'REPLACE' )
return
#
# parseShapeKeys(ob, me, args, tokens):
# parseShapeKey(ob, me, args, tokens):
# addShapeKey(ob, name, vgroup, tokens):
# doShape(name):
if (toggle & T_Shape+T_Face) and (name == 'Basis'):
return True
else:
return (toggle & T_Face)
for (key, val, sub) in tokens:
if key == 'ShapeKey':
parseShapeKey(ob, me, val, sub)
elif key == 'AnimationData':
if me.shape_keys:
parseAnimationData(me.shape_keys, val, sub)
ob.active_shape_key_index = 0
print("Shapekeys parsed")
if verbosity > 2:
print( "Parsing ob %s shape %s" % (bpy.context.object, args[0] ))
name = args[0]
lr = args[1]
if invalid(args[2]):
return
if lr == 'Sym' or toggle & T_Symm:
addShapeKey(ob, name, None, tokens)
elif lr == 'LR':
addShapeKey(ob, name+'_L', 'Left', tokens)
addShapeKey(ob, name+'_R', 'Right', tokens)
else:
raise NameError("ShapeKey L/R %s" % lr)
return
skey = ob.shape_key_add(name=name, from_mix=False)
if name != 'Basis':
skey.relative_key = loadedData['ShapeKey']['Basis']
skey.name = name
if vgroup:
skey.vertex_group = vgroup
loadedData['ShapeKey'][name] = skey
for (key, val, sub) in tokens:
if key == 'sv':
index = int(val[0])
pt = skey.data[index].co
pt[0] += theScale*float(val[1])
pt[1] += theScale*float(val[2])
pt[2] += theScale*float(val[3])
else:
defaultKey(key, val, sub, "skey", [], globals(), locals())
1527
1528
1529
1530
1531
1532
1533
1534
1535
1536
1537
1538
1539
1540
1541
1542
1543
1544
1545
1546
1547
1548
1549
1550
1551
1552
1553
1554
1555
1556
1557
global toggle
if verbosity > 2:
print( "Parsing armature %s" % args )
amtname = args[0]
obname = args[1]
mode = args[2]
if mode == 'Rigify':
toggle |= T_Rigify
return parseRigify(amtname, obname, tokens)
toggle &= ~T_Rigify
amt = bpy.data.armatures.new(amtname)
ob = createObject('ARMATURE', obname, amt, amtname)
linkObject(ob, amt)
print("Linked")
bpy.ops.object.mode_set(mode='OBJECT')
bpy.ops.object.mode_set(mode='EDIT')
heads = {}
tails = {}
for (key, val, sub) in tokens:
if key == 'Bone':
bname = val[0]
if not invalid(val[1]):
bone = amt.edit_bones.new(bname)
parseBone(bone, amt, sub, heads, tails)
loadedData['Bone'][bname] = bone
elif key == 'RecalcRoll':
for bone in amt.edit_bones:
bone.select = False
blist = eval(val[0])
for name in blist:
bone = amt.edit_bones[name]
bone.select = True
bpy.ops.armature.calculate_roll(type='Z')
for bone in amt.edit_bones:
rolls[bone.name] = bone.roll
bpy.ops.object.mode_set(mode='OBJECT')
for bone in amt.bones:
bone['Roll'] = rolls[bone.name]
bpy.ops.object.mode_set(mode='EDIT')
1573
1574
1575
1576
1577
1578
1579
1580
1581
1582
1583
1584
1585
1586
1587
1588
1589
1590
1591
1592
1593
1594
1595
1596
1597
1598
1599
1600
1601
1602
1603
1604
1605
1606
1607
1608
1609
1610
1611
1612
1613
1614
1615
1616
1617
1618
1619
1620
1621
1622
1623
1624
1625
else:
defaultKey(key, val, sub, "amt", ['MetaRig'], globals(), locals())
bpy.ops.object.mode_set(mode='OBJECT')
return amt
#
# parseRigify(amtname, obname, tokens):
#
def parseRigify(amtname, obname, tokens):
(key,val,sub) = tokens[0]
if key != 'MetaRig':
raise NameError("Expected MetaRig")
typ = val[0]
if typ == "human":
bpy.ops.object.armature_human_advanced_add()
else:
bpy.ops.pose.metarig_sample_add(type = typ)
ob = bpy.context.scene.objects.active
amt = ob.data
loadedData['Rigify'][obname] = ob
loadedData['Armature'][amtname] = amt
loadedData['Object'][obname] = ob
print("Rigify object", ob, amt)
bpy.ops.object.mode_set(mode='OBJECT')
bpy.ops.object.mode_set(mode='EDIT')
heads = {}
tails = {}
for (bname, bone) in amt.edit_bones.items():
heads[bname] = 10*theScale*bone.head
tails[bname] = 10*theScale*bone.tail
for (key, val, sub) in tokens:
if key == 'Bone':
bname = val[0]
print("Bone", bname)
try:
bone = amt.edit_bones[bname]
except:
print("Did not find bone %s" % bname)
bone = None
print(" -> ", bone)
if bone:
parseBone(bone, amt, sub, heads, tails)
else:
defaultKey(key, val, sub, "amt", ['MetaRig'], globals(), locals())
bpy.ops.object.mode_set(mode='OBJECT')
return amt
#
# parseBone(bone, amt, tokens, heads, tails):
#
def parseBone(bone, amt, tokens, heads, tails):
for (key, val, sub) in tokens:
if key == "head":
bone.head = (theScale*float(val[0]), theScale*float(val[1]), theScale*float(val[2]))
elif key == "tail":
bone.tail = (theScale*float(val[0]), theScale*float(val[1]), theScale*float(val[2]))
#elif key == 'restrict_select':
# pass
elif key == 'hide' and val[0] == 'True':
name = bone.name
'''
#bpy.ops.object.mode_set(mode='OBJECT')
pbone = amt.bones[name]
pbone.hide = True
print("Hide", pbone, pbone.hide)
#bpy.ops.object.mode_set(mode='EDIT')
'''
else:
defaultKey(key, val, sub, "bone", [], globals(), locals())
return bone
1656
1657
1658
1659
1660
1661
1662
1663
1664
1665
1666
1667
1668
1669
1670
1671
1672
1673
1674
1675
1676
1677
1678
1679
1680
1681
1682
1683
1684
1685
1686
1687
global todo
if toggle & T_Rigify:
return
name = args[0]
ob = loadedData['Object'][name]
bpy.context.scene.objects.active = ob
bpy.ops.object.mode_set(mode='POSE')
pbones = ob.pose.bones
nGrps = 0
for (key, val, sub) in tokens:
if key == 'Posebone':
parsePoseBone(pbones, ob, val, sub)
elif key == 'BoneGroup':
parseBoneGroup(ob.pose, nGrps, val, sub)
nGrps += 1
elif key == 'SetProp':
bone = val[0]
prop = val[1]
value = eval(val[2])
pb = pbones[bone]
print("Setting", pb, prop, val)
pb[prop] = value
print("Prop set", pb[prop])
else:
defaultKey(key, val, sub, "ob.pose", [], globals(), locals())
bpy.ops.object.mode_set(mode='OBJECT')
return ob
#
# parsePoseBone(pbones, args, tokens):
# parseArray(data, exts, args):
#
def parseBoneGroup(pose, nGrps, args, tokens):
if verbosity > 2:
print( "Parsing bonegroup %s" % args )
name = args[0]
bpy.ops.pose.group_add()
bg = pose.bone_groups.active
loadedData['BoneGroup'][name] = bg
for (key, val, sub) in tokens:
defaultKey(key, val, sub, "bg", [], globals(), locals())
return
global todo
if invalid(args[1]):
return
name = args[0]
pb = pbones[name]
amt = ob.data
amt.bones.active = pb.bone
for (key, val, sub) in tokens:
if key == 'Constraint':
amt.bones.active = pb.bone
cns = parseConstraint(pb.constraints, pb, val, sub)
amt.bones.active = pb.bone
print(expr)
exec(expr)
elif key == 'ik_dof':
parseArray(pb, ["ik_dof_x", "ik_dof_y", "ik_dof_z"], val)
elif key == 'ik_limit':
parseArray(pb, ["ik_limit_x", "ik_limit_y", "ik_limit_z"], val)
elif key == 'ik_max':
parseArray(pb, ["ik_max_x", "ik_max_y", "ik_max_z"], val)
elif key == 'ik_min':
parseArray(pb, ["ik_min_x", "ik_min_y", "ik_min_z"], val)
elif key == 'ik_stiffness':
parseArray(pb, ["ik_stiffness_x", "ik_stiffness_y", "ik_stiffness_z"], val)
elif key == 'hide':
#bpy.ops.object.mode_set(mode='OBJECT')
amt.bones[name].hide = eval(val[0])
#bpy.ops.object.mode_set(mode='POSE')
else:
defaultKey(key, val, sub, "pb", [], globals(), locals())
#print("pb %s done" % name)
return
n = 1
for ext in exts:
expr = "data.%s = %s" % (ext, args[n])
# print(expr)
exec(expr)
n += 1
return
# parseConstraint(constraints, pb, args, tokens)
def parseConstraint(constraints, pb, args, tokens):
if (toggle&T_Opcns and pb):
print("Active")
aob = bpy.context.object
print("ob", aob)
aamt = aob.data
print("amt", aamt)
apose = aob.pose
print("pose", apose)
abone = aamt.bones.active
print("bone", abone)
print('Num cns before', len(list(constraints)))
bpy.ops.pose.constraint_add(type=args[1])
cns = constraints.active
print('and after', pb, cns, len(list(constraints)))
else:
cns = constraints.new(args[1])
cns.name = args[0]
for (key,val,sub) in tokens:
if key == 'invert':
parseArray(cns, ["invert_x", "invert_y", "invert_z"], val)
elif key == 'use':
parseArray(cns, ["use_x", "use_y", "use_z"], val)
elif key == 'pos_lock':
parseArray(cns, ["lock_location_x", "lock_location_y", "lock_location_z"], val)
elif key == 'rot_lock':
parseArray(cns, ["lock_rotation_x", "lock_rotation_y", "lock_rotation_z"], val)
else:
defaultKey(key, val, sub, "cns", [], globals(), locals())
#
# parseCurve (args, tokens):
# parseSpline(cu, args, tokens):
# parseBezier(spline, n, args, tokens):
global todo
if verbosity > 2:
print( "Parsing curve %s" % args )
bpy.ops.object.add(type='CURVE')
cu = setObjectAndData(args, 'Curve')
for (key, val, sub) in tokens:
if key == 'Spline':
parseSpline(cu, val, sub)
else:
defaultKey(key, val, sub, "cu", [], globals(), locals())
return
def parseTextCurve (args, tokens):
global todo
if verbosity > 2:
print( "Parsing text curve %s" % args )
bpy.ops.object.text_add()
txt = setObjectAndData(args, 'Text')
for (key, val, sub) in tokens:
if key == 'Spline':
parseSpline(txt, val, sub)
elif key == 'BodyFormat':
parseCollection(txt.body_format, sub, [])
elif key == 'EditFormat':
parseDefault(txt.edit_format, sub, {}, [])
elif key == 'Font':
parseDefault(txt.font, sub, {}, [])
elif key == 'TextBox':
parseCollection(txt.body_format, sub, [])
else:
defaultKey(key, val, sub, "txt", [], globals(), locals())
return
def parseSpline(cu, args, tokens):
1836
1837
1838
1839
1840
1841
1842
1843
1844
1845
1846
1847
1848
1849
1850
1851
1852
1853
1854
1855
1856
1857
1858
typ = args[0]
spline = cu.splines.new(typ)
nPointsU = int(args[1])
nPointsV = int(args[2])
#spline.point_count_u = nPointsU
#spline.point_count_v = nPointsV
if typ == 'BEZIER' or typ == 'BSPLINE':
spline.bezier_points.add(nPointsU)
else:
spline.points.add(nPointsU)
n = 0
for (key, val, sub) in tokens:
if key == 'bz':
parseBezier(spline.bezier_points[n], val, sub)
n += 1
elif key == 'pt':
parsePoint(spline.points[n], val, sub)
n += 1
else:
defaultKey(key, val, sub, "spline", [], globals(), locals())
return
def parseBezier(bez, args, tokens):
bez.co = eval(args[0])
bez.co = theScale*bez.co
bez.handle1 = eval(args[1])
bez.handle1_type = args[2]
bez.handle2 = eval(args[3])
bez.handle2_type = args[4]
return
def parsePoint(pt, args, tokens):
print(" pt", pt.co)
global todo
if verbosity > 2:
print( "Parsing lattice %s" % args )
bpy.ops.object.add(type='LATTICE')
lat = setObjectAndData(args, 'Lattice')
for (key, val, sub) in tokens:
if key == 'Points':
parseLatticePoints(val, sub, lat.points)
else:
defaultKey(key, val, sub, "lat", [], globals(), locals())
return
global todo
n = 0
for (key, val, sub) in tokens:
if key == 'pt':
v = points[n].co_deform
v.x = theScale*float(val[0])
v.y = theScale*float(val[1])
v.z = theScale*float(val[2])
# parseLamp (args, tokens):
# parseFalloffCurve(focu, args, tokens):
def parseLamp (args, tokens):
global todo
if verbosity > 2:
print( "Parsing lamp %s" % args )
bpy.ops.object.add(type='LAMP')
lamp = setObjectAndData(args, 'Lamp')
for (key, val, sub) in tokens:
if key == 'FalloffCurve':
parseFalloffCurve(lamp.falloff_curve, val, sub)
else:
defaultKey(key, val, sub, "lamp", [], globals(), locals())
return
def parseFalloffCurve(focu, args, tokens):
# parseGroup (args, tokens):
# parseGroupObjects(args, tokens, grp):
#
def parseGroup (args, tokens):
global todo
if verbosity > 2:
print( "Parsing group %s" % args )
grpName = args[0]
grp = bpy.data.groups.new(grpName)
loadedData['Group'][grpName] = grp
for (key, val, sub) in tokens:
if key == 'Objects':
parseGroupObjects(val, sub, grp)
else:
defaultKey(key, val, sub, "grp", [], globals(), locals())
return
def parseGroupObjects(args, tokens, grp):
global todo
for (key, val, sub) in tokens:
if key == 'ob':
try:
ob = loadedData['Object'][val[0]]
grp.objects.link(ob)
except:
pass
return
#
def parseWorld (args, tokens):
global todo
if verbosity > 2:
print( "Parsing world %s" % args )
world = bpy.context.scene.world
for (key, val, sub) in tokens:
if key == 'Lighting':
parseDefault(world.lighting, sub, {}, [])
elif key == 'Mist':
parseDefault(world.mist, sub, {}, [])
elif key == 'Stars':
parseDefault(world.stars, sub, {}, [])
else:
defaultKey(key, val, sub, "world", [], globals(), locals())
return
#
# parseScene (args, tokens):
# parseRenderSettings(render, args, tokens):
# parseToolSettings(tool, args, tokens):
#
def parseScene (args, tokens):
1982
1983
1984
1985
1986
1987
1988
1989
1990
1991
1992
1993
1994
1995
1996
1997
1998
1999
2000
2001
2002
2003
2004
2005
2006
2007
2008
2009
2010
global todo
if verbosity > 2:
print( "Parsing scene %s" % args )
scn = bpy.context.scene
for (key, val, sub) in tokens:
if key == 'NodeTree':
scn.use_nodes = True
parseNodeTree(scn, val, sub)
elif key == 'GameData':
parseDefault(scn.game_data, sub, {}, [])
elif key == 'KeyingSet':
pass
#parseDefault(scn.keying_sets, sub, {}, [])
elif key == 'ObjectBase':
pass
#parseDefault(scn.bases, sub, {}, [])
elif key == 'RenderSettings':
parseRenderSettings(scn.render, sub, [])
elif key == 'ToolSettings':
subkeys = {'ImagePaint' : "image_paint",
'Sculpt' : "sculpt",
'VertexPaint' : "vertex_paint",
'WeightPaint' : "weight_paint" }
parseDefault(scn.tool_settings, sub, subkeys, [])
elif key == 'UnitSettings':
parseDefault(scn.unit_settings, sub, {}, [])
else:
defaultKey(key, val, sub, "scn", [], globals(), locals())
return
def parseRenderSettings(render, args, tokens):
global todo
if verbosity > 2:
print( "Parsing RenderSettings %s" % args )
for (key, val, sub) in tokens:
if key == 'Layer':
pass
#parseDefault(scn.layers, sub, [])
else:
defaultKey(key, val, sub, "render", [], globals(), locals())
return
# parseDefineProperty(args, tokens):
2028
2029
2030
2031
2032
2033
2034
2035
2036
2037
2038
2039
2040
2041
2042
2043
2044
2045
2046
2047
2048
2049
def parseDefineProperty(args, tokens):
expr = "bpy.types.Object.%s = %sProperty" % (args[0], args[1])
c = '('
for option in args[2:]:
expr += "%s %s" % (c, option)
c = ','
expr += ')'
#print(expr)
exec(expr)
#print("Done")
return
#
# correctRig(args):
#
def correctRig(args):
human = args[0]
print("CorrectRig %s" % human)
try:
ob = loadedData['Object'][human]
except:
2051
2052
2053
2054
2055
2056
2057
2058
2059
2060
2061
2062
2063
2064
2065
2066
2067
2068
2069
2070
2071
2072
2073
2074
2075
2076
2077
2078
2079
2080
bpy.context.scene.objects.active = ob
bpy.ops.object.mode_set(mode='POSE')
amt = ob.data
cnslist = []
for pb in ob.pose.bones:
for cns in pb.constraints:
if cns.type == 'CHILD_OF':
cnslist.append((pb, cns, cns.influence))
cns.influence = 0
for (pb, cns, inf) in cnslist:
amt.bones.active = pb.bone
cns.influence = 1
#print("Childof %s %s %s %.2f" % (amt.name, pb.name, cns.name, inf))
bpy.ops.constraint.childof_clear_inverse(constraint=cns.name, owner='BONE')
bpy.ops.constraint.childof_set_inverse(constraint=cns.name, owner='BONE')
cns.influence = 0
for (pb, cns, inf) in cnslist:
cns.influence = inf
return
#
# postProcess(args)
#
def postProcess(args):
human = args[0]
print("Postprocess %s" % human)
ob = loadedData['Object'][human]
except:
ob = None
if toggle & T_Diamond == 0 and ob:
deleteDiamonds(ob)
if toggle & T_Rigify and False:
for rig in loadedData['Rigify'].values():
bpy.context.scene.objects.active = rig
print("Rigify", rig)
bpy.ops.pose.metarig_generate()
print("Metarig generated")
#bpy.context.scene.objects.unlink(rig)
rig = bpy.context.scene.objects.active
print("Rigged", rig, bpy.context.object)
ob = loadedData['Object'][human]
mod = ob.modifiers[0]
print(ob, mod, mod.object)
mod.object = rig
print("Rig changed", mod.object)
return
#
# deleteDiamonds(ob)
# Delete joint diamonds in main mesh
#
def deleteDiamonds(ob):
2110
2111
2112
2113
2114
2115
2116
2117
2118
2119
2120
2121
2122
2123
2124
2125
2126
2127
2128
2129
2130
bpy.context.scene.objects.active = ob
if not bpy.context.object:
return
print("Delete diamonds in %s" % bpy.context.object)
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.mesh.select_all(action='DESELECT')
bpy.ops.object.mode_set(mode='OBJECT')
me = ob.data
for f in me.faces:
if len(f.vertices) < 4:
for vn in f.vertices:
me.vertices[vn].select = True
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.mesh.delete(type='VERT')
bpy.ops.object.mode_set(mode='OBJECT')
return
#
# parseProcess(args, tokens):
# applyTransform(objects, rig, parents):
2134
2135
2136
2137
2138
2139
2140
2141
2142
2143
2144
2145
2146
2147
2148
2149
2150
2151
2152
2153
2154
2155
2156
2157
2158
if toggle & T_Bend == 0:
return
try:
rig = loadedData['Object'][args[0]]
except:
rig = None
if not rig:
return
parents = {}
objects = []
for (key, val, sub) in tokens:
#print(key, val)
if key == 'Reparent':
bname = val[0]
try:
eb = ebones[bname]
parents[bname] = eb.parent.name
eb.parent = ebones[val[1]]
except:
pass
elif key == 'Bend':
axis = val[1]
angle = float(val[2])
mat = Matrix.Rotation(angle, 4, axis)
if pb:
prod = pb.matrix_basis * mat
for i in range(4):
for j in range(4):
pb.matrix_basis[i][j] = prod[i][j]
elif key == 'Snap':
try:
eb = ebones[val[0]]
except:
eb = None
tb = ebones[val[1]]
typ = val[2]
if eb is None:
2178
2179
2180
2181
2182
2183
2184
2185
2186
2187
2188
2189
2190
2191
2192
2193
2194
2195
2196
2197
2198
2199
2200
pass
elif typ == 'Inv':
eb.head = tb.tail
eb.tail = tb.head
elif typ == 'Head':
eb.head = tb.head
elif typ == 'Tail':
eb.tail = tb.tail
elif typ == 'Both':
eb.head = tb.head
eb.tail = tb.tail
eb.roll = tb.roll
else:
raise NameError("Snap type %s" % typ)
elif key == 'PoseMode':
bpy.context.scene.objects.active = rig
bpy.ops.object.mode_set(mode='POSE')
pbones = rig.pose.bones
elif key == 'ObjectMode':
bpy.context.scene.objects.active = rig
bpy.ops.object.mode_set(mode='POSE')
pbones = rig.pose.bones
elif key == 'EditMode':
2201
2202
2203
2204
2205
2206
2207
2208
2209
2210
2211
2212
2213
2214
2215
2216
2217
2218
2219
2220
2221
2222
2223
2224
2225
2226
2227
2228
2229
2230
2231
2232
2233
2234
2235
2236
bpy.context.scene.objects.active = rig
bpy.ops.object.mode_set(mode='EDIT')
ebones = rig.data.edit_bones
bpy.ops.armature.select_all(action='DESELECT')
elif key == 'Roll':
try:
eb = ebones[val[0]]
except:
eb = None
if eb:
eb.roll = float(val[1])
elif key == 'Select':
pass
elif key == 'RollUp':
pass
elif key == 'Apply':
applyTransform(objects, rig, parents)
elif key == 'ApplyArmature':
try:
ob = loadedData['Object'][val[0]]
objects.append((ob,sub))
except:
ob = None
elif key == 'Object':
try:
ob = loadedData['Object'][val[0]]
except:
ob = None
if ob:
bpy.context.scene.objects.active = ob
#mod = ob.modifiers[0]
#ob.modifiers.remove(mod)
for (key1, val1, sub1) in sub:
if key1 == 'Modifier':
parseModifier(ob, val1, sub1)
return
def applyTransform(objects, rig, parents):
for (ob,tokens) in objects:
print("Applying transform to %s" % ob)
bpy.context.scene.objects.active = ob
bpy.ops.object.visual_transform_apply()
bpy.ops.object.modifier_apply(apply_as='DATA', modifier='Armature')
bpy.context.scene.objects.active = rig
bpy.ops.object.mode_set(mode='POSE')
bpy.ops.pose.armature_apply()
bpy.ops.object.mode_set(mode='OBJECT')
bpy.ops.object.mode_set(mode='EDIT')
ebones = rig.data.edit_bones
for (bname, pname) in parents.items():
eb = ebones[bname]
par = ebones[pname]
if eb.use_connect:
par.tail = eb.head
eb.parent = par
# defaultKey(ext, args, tokens, var, exclude, glbals, lcals):
#
def defaultKey(ext, args, tokens, var, exclude, glbals, lcals):
global todo
if ext == 'Property':
try:
expr = "%s['%s'] = %s" % (var, args[0], args[1])
except:
expr = None
#print("Property", expr)
if ext == 'bpyops':
expr = "bpy.ops.%s" % args[0]
print(expr)
exec(expr)
return
2283
2284
2285
2286
2287
2288
2289
2290
2291
2292
2293
2294
2295
2296
2297
2298
2299
2300
2301
2302
2303
2304
2305
2306
2307
2308
2309
2310
2311
nvar = "%s.%s" % (var, ext)
#print(ext)
if ext in exclude:
return
#print("D", nvar)
if len(args) == 0:
raise NameError("Key length 0: %s" % ext)
rnaType = args[0]
if rnaType == 'Add':
print("*** Cannot Add yet ***")
return
elif rnaType == 'Refer':
typ = args[1]
name = args[2]
data = "loadedData['%s']['%s']" % (typ, name)
elif rnaType == 'Struct' or rnaType == 'Define':
typ = args[1]
name = args[2]
try:
data = eval(nvar, glbals, lcals)
except:
data = None
# print("Old structrna", nvar, data)
if data is None:
2313
2314
2315
2316
2317
2318
2319
2320
2321
2322
2323
2324
2325
2326
2327
2328
2329
2330
2331
2332
2333
2334
2335
2336
2337
2338
2339
2340
2341
2342
2343
2344
2345
2346
2347
2348
2349
2350
2351
2352
2353
2354
2355
2356
2357
2358
2359
2360
2361
2362
2363
2364
2365
2366
2367
2368
2369
2370
2371
2372
2373
2374
2375
2376
2377
2378
2379
2380
2381
2382
2383
2384
try:
creator = args[3]
except:
creator = None
# print("Creator", creator, eval(var,glbals,lcals))
try:
rna = eval(var,glbals,lcals)
data = eval(creator)
except:
data = None
# print("New struct", nvar, typ, data)
if rnaType == 'Define':
loadedData[typ][name] = data
if data:
for (key, val, sub) in tokens:
defaultKey(key, val, sub, "data", [], globals(), locals())
print("Struct done", nvar)
return
elif rnaType == 'PropertyRNA':
raise NameError("PropertyRNA!")
#print("PropertyRNA ", ext, var)
for (key, val, sub) in tokens:
defaultKey(ext, val, sub, nvar, [], glbals, lcals)
return
elif rnaType == 'Array':
for n in range(1, len(args)):
expr = "%s[%d] = %s" % (nvar, n-1, args[n])
exec(expr, glbals, lcals)
if len(args) > 0:
expr = "%s[0] = %s" % (nvar, args[1])
exec(expr, glbals, lcals)
return
elif rnaType == 'List':
data = []
for (key, val, sub) in tokens:
elt = eval(val[1], glbals, lcals)
data.append(elt)
elif rnaType == 'Matrix':
return
i = 0
n = len(tokens)
for (key, val, sub) in tokens:
if key == 'row':
for j in range(n):
expr = "%s[%d][%d] = %g" % (nvar, i, j, float(val[j]))
exec(expr, glbals, lcals)
i += 1
return
else:
try:
data = loadedData[rnaType][args[1]]
#print("From loaded", rnaType, args[1], data)
return data
except:
data = rnaType
#print(var, ext, data)
expr = "%s = %s" % (nvar, data)
try:
exec(expr, glbals, lcals)
except:
pushOnTodoList(var, expr, glbals, lcals)
return
#
#
#
def pushOnTodoList(var, expr, glbals, lcals):
global todo
print("Tdo", var)
print(dir(eval(var, glbals, lcals)))
raise NameError("Todo", expr)
todo.append((expr, glbals, lcals))
return
list = []
for c in mask:
if c == '0':
list.append(False)
else:
list.append(True)
return list
i = 0
for (key, val, sub) in tokens:
if key == 'row':
matrix[i][0] = float(val[0])
matrix[i][1] = float(val[1])
matrix[i][2] = float(val[2])
matrix[i][3] = float(val[3])
i += 1
return matrix
def parseDefault(data, tokens, subkeys, exclude):
for (key, val, sub) in tokens:
if key in subkeys.keys():
for (key2, val2, sub2) in sub:
defaultKey(key2, val2, sub2, "data.%s" % subkeys[key], [], globals(), locals())
else:
defaultKey(key, val, sub, "data", exclude, globals(), locals())
def parseCollection(data, tokens, exclude):
typeSplit = str(type(data)).split("'")
if typeSplit[0] != '<class ':
return None
classSplit = typeSplit[1].split(".")
if classSplit[0] == 'bpy' and classSplit[1] == 'types':
return classSplit[2]
elif classSplit[0] == 'bpy_types':
return classSplit[1]
else:
return None
if string == 'True':
return True
elif string == 'False':
return False
else:
raise NameError("Bool %s?" % string)
global rigLeg, rigArm, toggle
res = eval(condition, globals())
try:
res = eval(condition, globals())
#print("%s = %s" % (condition, res))
return not res
except:
#print("%s invalid!" % condition)
return True
global toggle
scn = bpy.context.scene
for n in range(len(scn.layers)):
scn.layers[n] = True
print("clearScene %s %s" % (toggle & T_Replace, scn))
if not toggle & T_Replace:
return scn
for ob in scn.objects:
if ob.type in ["MESH", "ARMATURE", 'EMPTY', 'CURVE', 'LATTICE']:
try:
bpy.ops.object.mode_set(mode='OBJECT')
except:
pass
scn.objects.unlink(ob)
del ob
#print(scn.objects)
return scn
#
# hideLayers(args):
# args = sceneLayers sceneHideLayers boneLayers boneHideLayers or nothing
#
def hideLayers(args):
if len(args) > 1:
sceneLayers = int(args[2], 16)
sceneHideLayers = int(args[3], 16)
boneLayers = int(args[4], 16)
boneHideLayers = int(args[5], 16)
else:
sceneLayers = 0x00ff
sceneHideLayers = 0
boneLayers = 0
boneHideLayers = 0
2535
2536
2537
2538
2539
2540
2541
2542
2543
2544
2545
2546
2547
2548
2549
2550
2551
2552
2553
2554
2555
2556
2557
2558
2559
2560
2561
2562
2563
2564
2565
2566
2567
2568
2569
2570
2571
2572
2573
2574
2575
2576
2577
2578
2579
2580
2581
2582
2583
2584
2585
2586
2587
2588
2589
2590
2591
2592
2593
2594
2595
2596
2597
2598
2599
2600
2601
2602
2603
2604
2605
2606
2607
2608
2609
2610
2611
mask = 1
hidelayers = []
for n in range(20):
scn.layers[n] = True if sceneLayers & mask else False
if sceneHideLayers & mask:
hidelayers.append(n)
mask = mask << 1
for ob in scn.objects:
for n in hidelayers:
if ob.layers[n]:
ob.hide = True
if boneLayers:
human = args[1]
try:
ob = loadedData['Object'][human]
except:
return
mask = 1
hidelayers = []
for n in range(32):
ob.data.layers[n] = True if boneLayers & mask else False
if boneHideLayers & mask:
hidelayers.append(n)
mask = mask << 1
for b in ob.data.bones:
for n in hidelayers:
if b.layers[n]:
b.hide = True
return
#
# readDefaults():
# writeDefaults():
#
ConfigFile = '~/mhx_import.cfg'
def readDefaults():
global toggle, theScale, theBlenderVersion, BlenderVersions
path = os.path.realpath(os.path.expanduser(ConfigFile))
try:
fp = open(path, 'rU')
print('Storing defaults')
except:
print('Cannot open "%s" for reading' % path)
return
bver = ''
for line in fp:
words = line.split()
if len(words) >= 3:
try:
toggle = int(words[0],16)
theScale = float(words[1])
theBlenderVersion = BlenderVersions.index(words[2])
except:
print('Configuration file "%s" is corrupt' % path)
fp.close()
return
def writeDefaults():
global toggle, theScale, theBlenderVersion, BlenderVersions
path = os.path.realpath(os.path.expanduser(ConfigFile))
try:
fp = open(path, 'w')
print('Storing defaults')
except:
print('Cannot open "%s" for writing' % path)
return
fp.write("%x %f %s" % (toggle, theScale, BlenderVersions[theBlenderVersion]))
fp.close()
DEBUG = False
from bpy.props import StringProperty, FloatProperty, EnumProperty, BoolProperty
Campbell Barton
committed
from bpy_extras.io_utils import ImportHelper
MhxBoolProps = [
("enforce", "Enforce version", "Only accept MHX files of correct version", T_EnforceVersion),
("mesh", "Mesh", "Use main mesh", T_Mesh),
("proxy", "Proxies", "Use proxies", T_Proxy),
("armature", "Armature", "Use armature", T_Armature),
("replace", "Replace scene", "Replace scene", T_Replace),
("cage", "Cage", "Load mesh deform cage", T_Cage),
("clothes", "Clothes", "Include clothes", T_Clothes),
("stretch", "Stretchy limbs", "Stretchy limbs", T_Stretch),
("face", "Face shapes", "Include facial shapekeys", T_Face),
("shape", "Body shapes", "Include body shapekeys", T_Shape),
("symm", "Symmetric shapes", "Keep shapekeys symmetric", T_Symm),
("diamond", "Diamonds", "Keep joint diamonds", T_Diamond),
("bend", "Bend joints", "Bend joints for better IK", T_Bend),
#("opcns", "Operator constraints", "Only for Aligorith", T_Opcns),
]
class ImportMhx(bpy.types.Operator, ImportHelper):
'''Import from MHX file format (.mhx)'''
bl_idname = "import_scene.makehuman_mhx"
bl_description = 'Import from MHX file format (.mhx)'
bl_label = "Import MHX"
bl_space_type = "PROPERTIES"
bl_region_type = "WINDOW"
scale = FloatProperty(name="Scale", description="Default meter, decimeter = 1.0", default = theScale)
enums = []
for enum in BlenderVersions:
enums.append((enum,enum,enum))
bver = EnumProperty(name="Blender version", items=enums, default = BlenderVersions[0])
filename_ext = ".mhx"
filter_glob = StringProperty(default="*.mhx", options={'HIDDEN'})
filepath = StringProperty(name="File Path", description="File path used for importing the MHX file", maxlen= 1024, default= "")
for (prop, name, desc, flag) in MhxBoolProps:
expr = '%s = BoolProperty(name="%s", description="%s", default=toggle&%s)' % (prop, name, desc, flag)
exec(expr)
global toggle, theScale, MhxBoolProps, theBlenderVersion, BlenderVersions
toggle = 0
for (prop, name, desc, flag) in MhxBoolProps:
expr = '(%s if self.%s else 0)' % (flag, prop)
toggle |= eval(expr)
print("execute flags %x" % toggle)
theScale = self.scale
theBlenderVersion = BlenderVersions.index(self.bver)
readMhxFile(self.filepath)
writeDefaults()
return {'FINISHED'}
def invoke(self, context, event):
global toggle, theScale, MhxBoolProps, theBlenderVersion, BlenderVersions
readDefaults()
self.scale = theScale
self.bver = BlenderVersions[theBlenderVersion]
for (prop, name, desc, flag) in MhxBoolProps:
expr = 'self.%s = toggle&%s' % (prop, flag)
exec(expr)
context.window_manager.fileselect_add(self)
return {'RUNNING_MODAL'}
def menu_func(self, context):
self.layout.operator(ImportMhx.bl_idname, text="MakeHuman (.mhx)...")
Campbell Barton
committed
bpy.utils.register_module(__name__)
Campbell Barton
committed
bpy.utils.unregister_module(__name__)
try:
unregister()
except:
pass
register()
#readMhxFile("C:/Documents and Settings/xxxxxxxxxxxxxxxxxxxx/Mina dokument/makehuman/exports/foo-25.mhx", 'Classic')
readMhxFile("/home/thomas/makehuman/exports/foo-25.mhx", 1.0)
#toggle = T_Replace + T_Mesh + T_Armature + T_MHX
#readMhxFile("/home/thomas/myblends/test.mhx", 1.0)
"""