Newer
Older
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# Project Name: MakeHuman
# Product Home Page: http://www.makehuman.org/
# Code Home Page: http://code.google.com/p/makehuman/
# Authors: Thomas Larsson
# Script copyright (C) MakeHuman Team 2001-2011
# Coding Standards: See http://sites.google.com/site/makehumandocs/developers-guide
Abstract
MHX (MakeHuman eXchange format) importer for Blender 2.5x.
Version 1.6.1
This script should be distributed with Blender.
If not, place it in the .blender/scripts/addons dir
Activate the script in the "Add-Ons" tab (user preferences).
Access from the File > Import menu.
Alternatively, run the script in the script editor (Alt-P), and access from the File > Import menu
'name': 'Import: MakeHuman (.mhx)',
'author': 'Thomas Larsson',
'version': (1, 6, 1),
"blender": (2, 5, 8),
"api": 37702,
'location': "File > Import > MakeHuman (.mhx)",
'description': 'Import files in the MakeHuman eXchange format (.mhx)',
'warning': '',
Thomas Larsson
committed
'wiki_url': 'http://sites.google.com/site/makehumandocs/blender-export-and-mhx',
'tracker_url': 'https://projects.blender.org/tracker/index.php?'\
MAJOR_VERSION = 1
SUB_VERSION = 1
#
#
#
import bpy
import os
import time
from mathutils import Matrix
MHX249 = False
Blender24 = False
Blender25 = True
TexDir = "~/makehuman/exports"
#
#
#
theScale = 1.0
One = 1.0/theScale
useMesh = 1
verbosity = 2
warnedTextureDir = False
warnedVersion = False
true = True
false = False
Epsilon = 1e-6
nErrors = 0
theTempDatum = None
T_EnforceVersion = 0x01
T_Clothes = 0x02
T_Stretch = 0x04
T_Limit = 0x08
T_Diamond = 0x10
T_Replace = 0x20
T_Face = 0x40
T_Shape = 0x80
T_Mesh = 0x100
T_Armature = 0x200
T_Proxy = 0x400
T_Cage = 0x800
T_Opcns = 0x2000
toggle = (T_EnforceVersion + T_Replace + T_Mesh + T_Armature +
T_Face + T_Shape + T_Proxy + T_Clothes + T_Rigify + T_Limit)
#
# Blender versions
#
BLENDER_GRAPHICALL = 0
BLENDER_256a = 1
BlenderVersions = ['Graphicall', 'Blender256a']
theBlenderVersion = BLENDER_GRAPHICALL
#fFingerPanel = 0.0
#fFingerIK = 0.0
fNoStretch = 0.0
#T_GoboFoot = 0x0002
#T_InvFoot = 0x0010
#T_InvFootPT = 0x0020
#T_InvFootNoPT = 0x0040
#T_FingerPanel = 0x100
#T_FingerRot = 0x0200
#T_FingerIK = 0x0400
#T_LocalFKIK = 0x8000
#rigLeg = 0
#rigArm = 0
def setFlagsAndFloats():
(footRig, fingerRig) = rigFlags
rigLeg = 0
if footRig == 'Reverse foot':
rigLeg |= T_InvFoot
if toggle & T_PoleTar:
rigLeg |= T_InvFootPT
else:
rigLeg |= T_InvFootNoPT
elif footRig == 'Gobo': rigLeg |= T_GoboFoot
rigArm = 0
if fingerRig == 'Panel': rigArm |= T_FingerPanel
elif fingerRig == 'Rotation': rigArm |= T_FingerRot
elif fingerRig == 'IK': rigArm |= T_FingerIK
toggle |= T_Panel
'''
global fNoStretch
if toggle&T_Stretch: fNoStretch == 0.0
else: fNoStretch = 1.0
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
'NONE' : {},
'Object' : {},
'Mesh' : {},
'Armature' : {},
'Lamp' : {},
'Camera' : {},
'Lattice' : {},
'Curve' : {},
'Text' : {},
'Material' : {},
'Image' : {},
'MaterialTextureSlot' : {},
'Texture' : {},
'Bone' : {},
'BoneGroup' : {},
'Rigify' : {},
'Action' : {},
'Group' : {},
'MeshTextureFaceLayer' : {},
'MeshColorLayer' : {},
'VertexGroup' : {},
'ShapeKey' : {},
'ParticleSystem' : {},
'ObjectConstraints' : {},
'ObjectModifiers' : {},
'MaterialSlot' : {},
'Object' : 'objects',
'Mesh' : 'meshes',
'Lattice' : 'lattices',
'Curve' : 'curves',
'Text' : 'texts',
'Group' : 'groups',
'Empty' : 'empties',
'Armature' : 'armatures',
'Bone' : 'bones',
'BoneGroup' : 'bone_groups',
'Pose' : 'poses',
'PoseBone' : 'pose_bones',
'Material' : 'materials',
'Texture' : 'textures',
'Image' : 'images',
'Camera' : 'cameras',
'Lamp' : 'lamps',
'World' : 'worlds',
#
def checkBlenderVersion():
print("Found Blender", bpy.app.version)
(A, B, C) = bpy.app.version
(a, b, c) = BLENDER_VERSION
if a <= A: return
if b <= B: return
if c <= C: return
msg = (
"This version of the MHX importer only works with \n" +
"Blender (%d, %d, %d) or later.\n" % (a, b, c) +
"Download a more recent Blender from \n" +
"www.blender.org or www.graphicall.org.\n"
# readMhxFile(filePath):
def readMhxFile(filePath):
global todo, nErrors, theScale, defaultScale, One, toggle
#checkBlenderVersion()
defaultScale = theScale
One = 1.0/theScale
fileName = os.path.expanduser(filePath)
(shortName, ext) = os.path.splitext(fileName)
if ext.lower() != ".mhx":
print("Error: Not a mhx file: " + fileName)
return
print( "Opening MHX file "+ fileName )
time1 = time.clock()
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
stack = []
tokens = []
key = "toplevel"
level = 0
nErrors = 0
comment = 0
nesting = 0
setFlagsAndFloats()
file= open(fileName, "rU")
print( "Tokenizing" )
lineNo = 0
for line in file:
# print(line)
lineSplit= line.split()
lineNo += 1
if len(lineSplit) == 0:
pass
elif lineSplit[0][0] == '#':
if lineSplit[0] == '#if':
if comment == nesting:
try:
res = eval(lineSplit[1])
except:
res = False
if res:
comment += 1
nesting += 1
elif lineSplit[0] == '#else':
if comment == nesting-1:
comment += 1
elif comment == nesting:
comment -= 1
elif lineSplit[0] == '#endif':
if comment == nesting:
comment -= 1
nesting -= 1
elif comment < nesting:
pass
elif lineSplit[0] == 'end':
try:
sub = tokens
tokens = stack.pop()
if tokens:
tokens[-1][2] = sub
level -= 1
except:
print( "Tokenizer error at or before line %d" % lineNo )
print( line )
elif lineSplit[-1] == ';':
if lineSplit[0] == '\\':
key = lineSplit[1]
tokens.append([key,lineSplit[2:-1],[]])
else:
key = lineSplit[0]
tokens.append([key,lineSplit[1:-1],[]])
else:
key = lineSplit[0]
tokens.append([key,lineSplit[1:],[]])
stack.append(tokens)
level += 1
tokens = []
file.close()
if level != 0:
MyError("Tokenizer out of kilter %d" % level)
clearScene()
print( "Parsing" )
parse(tokens)
for (expr, glbals, lcals) in todo:
try:
print("Doing %s" % expr)
exec(expr, glbals, lcals)
except:
time2 = time.clock()
print("toggle = %x" % toggle)
msg = "File %s loaded in %g s" % (fileName, time2-time1)
if nErrors:
msg += " but there where %d errors. " % (nErrors)
print(msg)
return
#
# getObject(name, var, glbals, lcals):
try:
ob = loadedData['Object'][name]
except:
if name != "None":
pushOnTodoList(None, "ob = loadedData['Object'][name]" % globals(), locals())
ob = None
return ob
#
def checkMhxVersion(major, minor):
Thomas Larsson
committed
print((major,minor), (MAJOR_VERSION, MINOR_VERSION), warnedVersion)
if major != MAJOR_VERSION or minor != MINOR_VERSION:
"Wrong MHX version\n" +
"Expected MHX %d.%d but the loaded file \n" % (MAJOR_VERSION, MINOR_VERSION) +
"has version MHX %d.%d\n" % (major, minor) +
"You can disable this error message by deselecting the \n" +
"Enforce version option when importing. \n" +
"Alternatively, you can try to download the most recent \n" +
"nightly build from www.makehuman.org. \n" +
"The current version of the import script is located in the \n" +
"importers/mhx/blender25x folder and is called import_scene_mhx.py. \n" +
"The version distributed with Blender builds from \n" +
"www.graphicall.org may be out of date.\n"
else:
print(msg)
warnedVersion = True
return
#
ifResult = False
def parse(tokens):
global MHX249, ifResult, theScale, defaultScale, One
for (key, val, sub) in tokens:
print("Parse %s" % key)
data = None
if key == 'MHX':
checkMhxVersion(int(val[0]), int(val[1]))
elif key == 'MHX249':
MHX249 = eval(val[0])
print("Blender 2.49 compatibility mode is %s\n" % MHX249)
elif MHX249:
pass
elif key == 'print':
msg = concatList(val)
print(msg)
elif key == 'warn':
msg = concatList(val)
print(msg)
elif key == 'error':
msg = concatList(val)
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
elif key == 'NoScale':
if eval(val[0]):
theScale = 1.0
else:
theScale = defaultScale
One = 1.0/theScale
elif key == "Object":
parseObject(val, sub)
elif key == "Mesh":
data = parseMesh(val, sub)
elif key == "Armature":
data = parseArmature(val, sub)
elif key == "Pose":
data = parsePose(val, sub)
elif key == "Action":
data = parseAction(val, sub)
elif key == "Material":
data = parseMaterial(val, sub)
elif key == "Texture":
data = parseTexture(val, sub)
elif key == "Image":
data = parseImage(val, sub)
elif key == "Curve":
data = parseCurve(val, sub)
elif key == "TextCurve":
data = parseTextCurve(val, sub)
elif key == "Lattice":
data = parseLattice(val, sub)
elif key == "Group":
data = parseGroup(val, sub)
elif key == "Lamp":
data = parseLamp(val, sub)
elif key == "World":
data = parseWorld(val, sub)
elif key == "Scene":
data = parseScene(val, sub)
elif key == "DefineProperty":
parseDefineProperty(val, sub)
elif key == "PostProcess":
postProcess(val)
hideLayers(val)
elif key == "CorrectRig":
correctRig(val)
elif key == "Rigify":
if toggle & T_Rigify:
Thomas Larsson
committed
rigifyMhx(bpy.context, val[0])
elif key == 'AnimationData':
try:
ob = loadedData['Object'][val[0]]
except:
ob = None
if ob:
bpy.context.scene.objects.active = ob
parseAnimationData(ob, val, sub)
elif key == 'MaterialAnimationData':
try:
ob = loadedData['Object'][val[0]]
except:
ob = None
if ob:
bpy.context.scene.objects.active = ob
mat = ob.data.materials[int(val[2])]
print("matanim", ob, mat)
parseAnimationData(mat, val, sub)
elif key == 'ShapeKeys':
try:
ob = loadedData['Object'][val[0]]
except:
MyError("ShapeKeys object %s does not exist" % val[0])
parseShapeKeys(ob, ob.data, val, sub)
else:
data = parseDefaultType(key, val, sub)
if data and key != 'Mesh':
print( data )
return
#
# parseDefaultType(typ, args, tokens):
name = args[0]
data = None
expr = "bpy.data.%s.new('%s')" % (Plural[typ], name)
# print(expr)
# print(" ok", data)
bpyType = typ.capitalize()
print(bpyType, name, data)
loadedData[bpyType][name] = data
if data is None:
for (key, val, sub) in tokens:
#print("%s %s" % (key, val))
defaultKey(key, val, sub, 'data', [], globals(), locals())
print("Done ", data)
return data
string = ""
for elt in elts:
string += " %s" % elt
return string
# parseAction(args, tokens):
# parseFCurve(fcu, args, tokens):
# parseKeyFramePoint(pt, args, tokens):
name = args[0]
if invalid(args[1]):
return
ob = bpy.context.object
bpy.ops.object.mode_set(mode='POSE')
if ob.animation_data:
ob.animation_data.action = None
created = {}
for (key, val, sub) in tokens:
if key == 'FCurve':
prepareActionFCurve(ob, created, val, sub)
act = ob.animation_data.action
loadedData['Action'][name] = act
if act is None:
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
print("Ignoring action %s" % name)
return act
act.name = name
print("Action", name, act, ob)
for (key, val, sub) in tokens:
if key == 'FCurve':
fcu = parseActionFCurve(act, ob, val, sub)
else:
defaultKey(key, val, sub, 'act', [], globals(), locals())
ob.animation_data.action = None
bpy.ops.object.mode_set(mode='OBJECT')
return act
def prepareActionFCurve(ob, created, args, tokens):
dataPath = args[0]
index = args[1]
(expr, channel) = channelFromDataPath(dataPath, index)
try:
if channel in created[expr]:
return
else:
created[expr].append(channel)
except:
created[expr] = [channel]
times = []
for (key, val, sub) in tokens:
if key == 'kp':
times.append(int(val[0]))
try:
data = eval(expr)
except:
print("Ignoring illegal expression: %s" % expr)
return
n = 0
for t in times:
#bpy.context.scene.current_frame = t
bpy.ops.anim.change_frame(frame = t)
try:
data.keyframe_insert(channel)
n += 1
except:
pass
#print("failed", data, expr, channel)
if n != len(times):
print("Mismatch", n, len(times), expr, channel)
return
words = dataPath.split(']')
if len(words) == 1:
# location
expr = "ob"
channel = dataPath
elif len(words) == 2:
# pose.bones["tongue"].location
expr = "ob.%s]" % (words[0])
cwords = words[1].split('.')
channel = cwords[1]
elif len(words) == 3:
# pose.bones["brow.R"]["mad"]
expr = "ob.%s]" % (words[0])
cwords = words[1].split('"')
channel = cwords[1]
# print(expr, channel, index)
return (expr, channel)
dataPath = args[0]
index = args[1]
(expr, channel) = channelFromDataPath(dataPath, index)
index = int(args[1])
success = False
for fcu in act.fcurves:
(expr1, channel1) = channelFromDataPath(fcu.data_path, fcu.array_index)
if expr1 == expr and channel1 == channel and fcu.array_index == index:
success = True
break
if not success:
return None
n = 0
for (key, val, sub) in tokens:
if key == 'kp':
try:
pt = fcu.keyframe_points[n]
pt.interpolation = 'LINEAR'
pt = parseKeyFramePoint(pt, val, sub)
n += 1
except:
pass
#print(tokens)
#MyError("kp", fcu, n, len(fcu.keyframe_points), val)
else:
defaultKey(key, val, sub, 'fcu', [], globals(), locals())
return fcu
pt.co = (float(args[0]), float(args[1]))
if len(args) > 2:
pt.handle1 = (float(args[2]), float(args[3]))
pt.handle2 = (float(args[3]), float(args[5]))
return pt
# parseAnimationData(rna, args, tokens):
# parseDriver(drv, args, tokens):
# parseDriverVariable(var, args, tokens):
#
def parseAnimationData(rna, args, tokens):
print("Parse Animation data")
if rna.animation_data is None:
rna.animation_data_create()
adata = rna.animation_data
for (key, val, sub) in tokens:
if key == 'FCurve':
fcu = parseAnimDataFCurve(adata, rna, val, sub)
else:
defaultKey(key, val, sub, 'adata', [], globals(), locals())
print(adata)
def parseAnimDataFCurve(adata, rna, args, tokens):
global theBlenderVersion
if invalid(args[2]):
return
dataPath = args[0]
index = int(args[1])
n = 1
for (key, val, sub) in tokens:
if key == 'Driver':
fcu = parseDriver(adata, dataPath, index, rna, val, sub)
fmod = fcu.modifiers[0]
fcu.modifiers.remove(fmod)
elif key == 'FModifier':
parseFModifier(fcu, val, sub)
elif key == 'kp':
if theBlenderVersion >= BLENDER_256a:
pt = fcu.keyframe_points.add(n, 0)
else:
pt = fcu.keyframe_points.insert(n, 0)
pt.interpolation = 'LINEAR'
pt = parseKeyFramePoint(pt, val, sub)
n += 1
else:
defaultKey(key, val, sub, 'fcu', [], globals(), locals())
return fcu
fcurve = con.driver_add("influence", 0)
driver = fcurve.driver
driver.type = 'AVERAGE'
"""
def parseDriver(adata, dataPath, index, rna, args, tokens):
if dataPath[-1] == ']':
words = dataPath.split(']')
expr = "rna." + words[0] + ']'
pwords = words[1].split('"')
prop = pwords[1]
#print("prop", expr, prop)
bone = eval(expr)
return None
else:
words = dataPath.split('.')
channel = words[-1]
expr = "rna"
for n in range(len(words)-1):
expr += "." + words[n]
expr += ".driver_add('%s', index)" % channel
#print("expr", rna, expr)
fcu = eval(expr)
drv = fcu.driver
#print(" Driver type", drv, args[0])
#print(" ->", drv.type)
for (key, val, sub) in tokens:
if key == 'DriverVariable':
var = parseDriverVariable(drv, rna, val, sub)
else:
defaultKey(key, val, sub, 'drv', [], globals(), locals())
return fcu
def parseDriverVariable(drv, rna, args, tokens):
#print(" Var type", var, args[1])
#print(" ->", var.type)
nTarget = 0
for (key, val, sub) in tokens:
if key == 'Target':
parseDriverTarget(var, nTarget, rna, val, sub)
nTarget += 1
else:
defaultKey(key, val, sub, 'var', [], globals(), locals())
return var
fmod = fcu.modifiers.new(args[0])
#fmod = fcu.modifiers[0]
for (key, val, sub) in tokens:
defaultKey(key, val, sub, 'fmod', [], globals(), locals())
return fmod
var = driver.variables.new()
var.name = target_bone
var.targets[0].id_type = 'OBJECT'
var.targets[0].id = obj
var.targets[0].rna_path = driver_path
"""
def parseDriverTarget(var, nTarget, rna, args, tokens):
ob = loadedData['Object'][args[0]]
#print(" targ id", targ, ob)
targ.id = ob
#print(" ->", targ.id)
for (key, val, sub) in tokens:
defaultKey(key, val, sub, 'targ', [], globals(), locals())
return targ
# parseMaterial(args, ext, tokens):
# parseMTex(mat, args, tokens):
# parseTexture(args, tokens):
global todo
name = args[0]
mat = bpy.data.materials.new(name)
if mat is None:
return None
loadedData['Material'][name] = mat
for (key, val, sub) in tokens:
if key == 'MTex':
parseMTex(mat, val, sub)
elif key == 'Ramp':
parseRamp(mat, val, sub)
elif key == 'RaytraceTransparency':
parseDefault(mat.raytrace_transparency, sub, {}, [])
elif key == 'Halo':
parseDefault(mat.halo, sub, {}, [])
elif key == 'SSS':
parseDefault(mat.subsurface_scattering, sub, {}, [])
elif key == 'Strand':
parseDefault(mat.strand, sub, {}, [])
elif key == 'NodeTree':
mat.use_nodes = True
parseNodeTree(mat.node_tree, val, sub)
else:
exclude = ['specular_intensity', 'tangent_shading']
defaultKey(key, val, sub, 'mat', [], globals(), locals())
return mat
global todo
index = int(args[0])
texname = args[1]
texco = args[2]
mapto = args[3]
tex = loadedData['Texture'][texname]
mtex = mat.texture_slots.add()
mtex.texture_coords = texco
mtex.texture = tex
for (key, val, sub) in tokens:
defaultKey(key, val, sub, "mtex", [], globals(), locals())
global todo
if verbosity > 2:
print( "Parsing texture %s" % args )
name = args[0]
tex = bpy.data.textures.new(name=name, type=args[1])
loadedData['Texture'][name] = tex
for (key, val, sub) in tokens:
if key == 'Image':
try:
imgName = val[0]
img = loadedData['Image'][imgName]
tex.image = img
except:
msg = "Unable to load image '%s'" % val[0]
elif key == 'Ramp':
parseRamp(tex, val, sub)
elif key == 'NodeTree':
tex.use_nodes = True
parseNodeTree(tex.node_tree, val, sub)
else:
defaultKey(key, val, sub, "tex", ['use_nodes', 'use_textures', 'contrast'], globals(), locals())
return tex
nvar = "data.%s" % args[0]
use = "data.use_%s = True" % args[0]
exec(use)
ramp = eval(nvar)
elts = ramp.elements
n = 0
for (key, val, sub) in tokens:
# print("Ramp", key, val)
if key == 'Element':
elts[n].color = eval(val[0])
elts[n].position = eval(val[1])
n += 1
else:
defaultKey(key, val, sub, "tex", ['use_nodes', 'use_textures', 'contrast'], globals(), locals())
sss = mat.subsurface_scattering
for (key, val, sub) in tokens:
defaultKey(key, val, sub, "sss", [], globals(), locals())
strand = mat.strand
for (key, val, sub) in tokens:
defaultKey(key, val, sub, "strand", [], globals(), locals())
# parseNodeTree(tree, args, tokens):
# parseNode(node, args, tokens):
# parseSocket(socket, args, tokens):
#
def parseNodeTree(tree, args, tokens):
return
print("Tree", tree, args)
print(list(tree.nodes))
tree.name = args[0]
for (key, val, sub) in tokens:
if key == 'Node':
parseNodes(tree.nodes, val, sub)
else:
defaultKey(key, val, sub, "tree", [], globals(), locals())
def parseNodes(nodes, args, tokens):
print("Nodes", nodes, args)
print(list(nodes))
node.name = args[0]
for (key, val, sub) in tokens:
if key == 'Inputs':
parseSocket(node.inputs, val, sub)
elif key == 'Outputs':
parseSocket(node.outputs, val, sub)
else:
defaultKey(key, val, sub, "node", [], globals(), locals())
def parseNode(node, args, tokens):
print("Node", node, args)
print(list(node.inputs), list(node.outputs))
node.name = args[0]
for (key, val, sub) in tokens:
if key == 'Inputs':
parseSocket(node.inputs, val, sub)
elif key == 'Outputs':
parseSocket(node.outputs, val, sub)
else:
defaultKey(key, val, sub, "node", [], globals(), locals())
def parseSocket(socket, args, tokens):
print("Socket", socket, args)
socket.name = args[0]
for (key, val, sub) in tokens:
if key == 'Node':
parseNode(tree.nodes, val, sub)
else:
defaultKey(key, val, sub, "tree", [], globals(), locals())
#
# doLoadImage(filepath):
# loadImage(filepath):
# parseImage(args, tokens):
def doLoadImage(filepath):
path1 = os.path.expanduser(filepath)
file1 = os.path.realpath(path1)
if os.path.isfile(file1):
print( "Found file "+file1 )
try:
img = bpy.data.images.load(file1)
return img
except:
print( "Cannot read image" )
return None
else:
print( "No file "+file1 )
return None
global TexDir, warnedTextureDir, loadedData
texDir = os.path.expanduser(TexDir)
path1 = os.path.expanduser(filepath)
file1 = os.path.realpath(path1)