Newer
Older
if material.subsurface_scattering.use:
subsurface_scattering = material.subsurface_scattering
Maurice Raybaud
committed
tabWrite("subsurface { translucency <%.3g, %.3g, %.3g> }\n" % (
Maurice Raybaud
committed
(subsurface_scattering.radius[0]),
(subsurface_scattering.radius[1]),
Maurice Raybaud
committed
)
Bastien Montagne
committed
if material.pov.irid_enable:
tabWrite("irid { %.4g thickness %.4g turbulence %.4g }" % \
(material.pov.irid_amount, material.pov.irid_thickness,
material.pov.irid_turbulence))
tabWrite("diffuse 0.8\n")
tabWrite("phong 70.0\n")
#tabWrite("specular 0.2\n")
# This is written into the object
'''
if material and material.transparency_method=='RAYTRACE':
'interior { ior %.3g} ' % material.raytrace_transparency.ior
'''
#tabWrite("crand 1.0\n") # Sand granyness
#tabWrite("metallic %.6f\n" % material.spec)
#tabWrite("phong %.6f\n" % material.spec)
#tabWrite("phong_size %.6f\n" % material.spec)
#tabWrite("brilliance %.6f " % (material.specular_hardness/256.0) # Like hardness
tabWrite("}\n\n")
# Level=2 Means translation of spec and mir levels for when no map influences them
povHasnoSpecularMaps(Level=2)
special_texture_found = False
for t in material.texture_slots:
if t and t.use:
if (t.texture.type == 'IMAGE' and t.texture.image) or t.texture.type != 'IMAGE':
validPath=True
else:
validPath=False
if(t and t.use and validPath and
Bastien Montagne
committed
(t.use_map_specular or t.use_map_raymir or t.use_map_normal or t.use_map_alpha)):
continue # Some texture found
Maurice Raybaud
committed
if special_texture_found or colored_specular_found:
# Level=1 Means No specular nor Mirror reflection
povHasnoSpecularMaps(Level=1)
# Level=3 Means Maximum Spec and Mirror
povHasnoSpecularMaps(Level=3)
Doug Hammond
committed
# DH disabled for now, this isn't the correct context
active_object = None # bpy.context.active_object # does not always work MR
matrix = global_matrix * camera.matrix_world
focal_point = camera.data.dof_distance
Qsize = render.resolution_x / render.resolution_y
tabWrite("#declare camLocation = <%.6f, %.6f, %.6f>;\n" %
matrix.translation[:])
tabWrite("#declare camLookAt = <%.6f, %.6f, %.6f>;\n" %
Bastien Montagne
committed
tuple([degrees(e) for e in matrix.to_3x3().to_euler()]))
tabWrite("camera {\n")
Bastien Montagne
committed
if scene.pov.baking_enable and active_object and active_object.type == 'MESH':
tabWrite("mesh_camera{ 1 3\n") # distribution 3 is what we want here
tabWrite("mesh{%s}\n" % active_object.name)
tabWrite("}\n")
tabWrite("location <0,0,.01>")
tabWrite("direction <0,0,-1>")
tabWrite("location <0, 0, 0>\n")
tabWrite("look_at <0, 0, -1>\n")
tabWrite("right <%s, 0, 0>\n" % - Qsize)
tabWrite("up <0, 1, 0>\n")
tabWrite("angle %f\n" % (360.0 * atan(16.0 / camera.data.lens) / pi))
Bastien Montagne
committed
tabWrite("rotate <%.6f, %.6f, %.6f>\n" % \
tuple([degrees(e) for e in matrix.to_3x3().to_euler()]))
tabWrite("translate <%.6f, %.6f, %.6f>\n" % matrix.translation[:])
Bastien Montagne
committed
if camera.data.pov.dof_enable and focal_point != 0:
tabWrite("aperture %.3g\n" % camera.data.pov.dof_aperture)
tabWrite("blur_samples %d %d\n" % \
(camera.data.pov.dof_samples_min, camera.data.pov.dof_samples_max))
tabWrite("variance 1/%d\n" % camera.data.pov.dof_variance)
tabWrite("confidence %.3g\n" % camera.data.pov.dof_confidence)
tabWrite("focal_point <0, 0, %f>\n" % focal_point)
tabWrite("}\n")
# Incremented after each lamp export to declare its target
# currently used for Fresnel diffuse shader as their slope vector:
global lampCount
lampCount = 0
# Get all lamps
for ob in lamps:
lamp = ob.data
matrix = global_matrix * ob.matrix_world
# Color is modified by energy #muiltiplie by 2 for a better match --Maurice
Bastien Montagne
committed
color = tuple([c * lamp.energy * 2.0 for c in lamp.color])
tabWrite("light_source {\n")
tabWrite("< 0,0,0 >\n")
tabWrite("color rgb<%.3g, %.3g, %.3g>\n" % color)
tabWrite("spotlight\n")
tabWrite("falloff %.2f\n" % (degrees(lamp.spot_size) / 2.0)) # 1 TO 179 FOR BOTH
Bastien Montagne
committed
tabWrite("radius %.6f\n" % \
((degrees(lamp.spot_size) / 2.0) * (1.0 - lamp.spot_blend)))
# Blender does not have a tightness equivilent, 0 is most like blender default.
tabWrite("tightness 0\n") # 0:10f
tabWrite("point_at <0, 0, -1>\n")
tabWrite("parallel\n")
tabWrite("point_at <0, 0, -1>\n") # *must* be after 'parallel'
tabWrite("area_illumination\n")
tabWrite("fade_distance %.6f\n" % (lamp.distance / 2.0))
Bastien Montagne
committed
# Area lights have no falloff type, so always use blenders lamp quad equivalent
# for those?
tabWrite("fade_power %d\n" % 2)
size_x = lamp.size
samples_x = lamp.shadow_ray_samples_x
if lamp.shape == 'SQUARE':
size_y = size_x
samples_y = samples_x
else:
size_y = lamp.size_y
samples_y = lamp.shadow_ray_samples_y
Maurice Raybaud
committed
tabWrite("area_light <%.6f,0,0>,<0,%.6f,0> %d, %d\n" % \
(size_x, size_y, samples_x, samples_y))
Maurice Raybaud
committed
if lamp.shadow_ray_sample_method == 'CONSTANT_JITTERED':
if lamp.use_jitter:
tabWrite("jitter\n")
tabWrite("adaptive 1\n")
tabWrite("jitter\n")
Bastien Montagne
committed
# HEMI never has any shadow_method attribute
if(not scene.render.use_shadows or lamp.type == 'HEMI' or
(lamp.type != 'HEMI' and lamp.shadow_method == 'NOSHADOW')):
tabWrite("shadowless\n")
Bastien Montagne
committed
# Sun shouldn't be attenuated. Hemi and area lights have no falloff attribute so they
# are put to type 2 attenuation a little higher above.
if lamp.type not in {'SUN', 'AREA', 'HEMI'}:
tabWrite("fade_distance %.6f\n" % (lamp.distance / 2.0))
tabWrite("fade_power %d\n" % 2) # Use blenders lamp quad equivalent
tabWrite("fade_power %d\n" % 1) # Use blenders lamp linear
Maurice Raybaud
committed
# supposing using no fade power keyword would default to constant, no attenuation.
Bastien Montagne
committed
elif lamp.falloff_type == 'CONSTANT':
Bastien Montagne
committed
# Using Custom curve for fade power 3 for now.
elif lamp.falloff_type == 'CUSTOM_CURVE':
tabWrite("fade_power %d\n" % 4)
tabWrite("}\n")
lampCount += 1
# v(A,B) rotates vector A about origin by vector B.
Bastien Montagne
committed
file.write("#declare lampTarget%s= vrotate(<%.4g,%.4g,%.4g>,<%.4g,%.4g,%.4g>);\n" % \
(lampCount, -(ob.location.x), -(ob.location.y), -(ob.location.z),
ob.rotation_euler.x, ob.rotation_euler.y, ob.rotation_euler.z))
Bastien Montagne
committed
####################################################################################################
def exportMeta(metas):
# TODO - blenders 'motherball' naming is not supported.
if comments and len(metas) >= 1:
file.write("//--Blob objects--\n\n")
# important because no elements will break parsing.
elements = [elem for elem in meta.elements if elem.type in {'BALL', 'ELLIPSOID'}]
tabWrite("blob {\n")
tabWrite("threshold %.4g\n" % meta.threshold)
Bastien Montagne
committed
importance = ob.pov.importance_value
material = meta.materials[0] # lame! - blender cant do enything else.
except:
material = None
for elem in elements:
loc = elem.co
stiffness = elem.stiffness
if elem.use_negative:
stiffness = - stiffness
if elem.type == 'BALL':
Bastien Montagne
committed
tabWrite("sphere { <%.6g, %.6g, %.6g>, %.4g, %.4g }\n" % \
(loc.x, loc.y, loc.z, elem.radius, stiffness))
# After this wecould do something simple like...
# "pigment {Blue} }"
# except we'll write the color
elif elem.type == 'ELLIPSOID':
# location is modified by scale
Bastien Montagne
committed
tabWrite("sphere { <%.6g, %.6g, %.6g>, %.4g, %.4g }\n" % \
(loc.x / elem.size_x, loc.y / elem.size_y, loc.z / elem.size_z,
elem.radius, stiffness))
tabWrite("scale <%.6g, %.6g, %.6g> \n" % \
(elem.size_x, elem.size_y, elem.size_z))
if material:
diffuse_color = material.diffuse_color
trans = 1.0 - material.alpha
if material.use_transparency and material.transparency_method == 'RAYTRACE':
povFilter = material.raytrace_transparency.filter * (1.0 - material.alpha)
trans = (1.0 - material.alpha) - povFilter
material_finish = materialNames[material.name]
Bastien Montagne
committed
tabWrite("pigment {rgbft<%.3g, %.3g, %.3g, %.3g, %.3g>} \n" % \
(diffuse_color[0], diffuse_color[1], diffuse_color[2],
povFilter, trans))
tabWrite("finish {%s}\n" % safety(material_finish, Level=2))
tabWrite("pigment {rgb<1 1 1>} \n")
Bastien Montagne
committed
# Write the finish last.
tabWrite("finish {%s}\n" % (safety(DEF_MAT_NAME, Level=2)))
writeObjectMaterial(material, ob)
writeMatrix(global_matrix * ob.matrix_world)
Maurice Raybaud
committed
# Importance for radiosity sampling added here
tabWrite("radiosity { \n")
tabWrite("importance %3g \n" % importance)
tabWrite("}\n")
tabWrite("}\n") # End of Metaball block
if comments and len(metas) >= 1:
file.write("\n")
Bastien Montagne
committed
# objectNames = {}
DEF_OBJ_NAME = "Default"
Bastien Montagne
committed
def exportMeshes(scene, sel):
# obmatslist = []
# def hasUniqueMaterial():
# # Grab materials attached to object instances ...
# if hasattr(ob, 'material_slots'):
# for ms in ob.material_slots:
# if ms.material is not None and ms.link == 'OBJECT':
Bastien Montagne
committed
# if ms.material in obmatslist:
# return False
# else:
# obmatslist.append(ms.material)
# return True
# def hasObjectMaterial(ob):
# # Grab materials attached to object instances ...
# if hasattr(ob, 'material_slots'):
# for ms in ob.material_slots:
# if ms.material is not None and ms.link == 'OBJECT':
Bastien Montagne
committed
# # If there is at least one material slot linked to the object
Bastien Montagne
committed
# # and not the data (mesh), always create a new, "private" data instance.
Bastien Montagne
committed
# return True
# return False
# For objects using local material(s) only!
# This is a mapping between a tuple (dataname, materialnames, ...), and the POV dataname.
Bastien Montagne
committed
# As only objects using:
# * The same data.
# * EXACTLY the same materials, in EXACTLY the same sockets.
# ... can share a same instance in POV export.
Bastien Montagne
committed
obmats2data = {}
Bastien Montagne
committed
def checkObjectMaterials(ob, name, dataname):
if hasattr(ob, 'material_slots'):
has_local_mats = False
key = [dataname]
for ms in ob.material_slots:
Bastien Montagne
committed
key.append(ms.material.name)
if ms.link == 'OBJECT' and not has_local_mats:
has_local_mats = True
else:
# Even if the slot is empty, it is important to grab it...
Bastien Montagne
committed
key.append("")
if has_local_mats:
# If this object uses local material(s), lets find if another object
# using the same data and exactly the same list of materials
# (in the same slots) has already been processed...
Bastien Montagne
committed
# Note that here also, we use object name as new, unique dataname for Pov.
key = tuple(key) # Lists are not hashable...
Bastien Montagne
committed
if key not in obmats2data:
obmats2data[key] = name
return obmats2data[key]
return None
data_ref = {}
Bastien Montagne
committed
def store(scene, ob, name, dataname, matrix):
# The Object needs to be written at least once but if its data is
# already in data_ref this has already been done.
Bastien Montagne
committed
# This func returns the "povray" name of the data, or None
Bastien Montagne
committed
# if no writing is needed.
if ob.is_modified(scene, 'RENDER'):
# Data modified.
# Create unique entry in data_ref by using object name
# (always unique in Blender) as data name.
data_ref[name] = [(name, MatrixAsPovString(matrix))]
return name
# Here, we replace dataname by the value returned by checkObjectMaterials, only if
# it is not evaluated to False (i.e. only if the object uses some local material(s)).
dataname = checkObjectMaterials(ob, name, dataname) or dataname
if dataname in data_ref:
# Data already known, just add the object instance.
data_ref[dataname].append((name, MatrixAsPovString(matrix)))
# No need to write data
return None
else:
# Data not yet processed, create a new entry in data_ref.
data_ref[dataname] = [(name, MatrixAsPovString(matrix))]
return dataname
1354
1355
1356
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383
1384
1385
1386
1387
1388
1389
1390
1391
1392
1393
1394
1395
1396
1397
1398
1399
1400
1401
1402
1403
1404
1405
1406
1407
1408
1409
1410
1411
1412
1413
1414
1415
1416
1417
1418
1419
1420
1421
1422
1423
1424
1425
1426
1427
1428
1429
1430
1431
1432
1433
1434
1435
1436
1437
1438
1439
1440
1441
1442
1443
1444
1445
1446
1447
1448
1449
1450
1451
1452
1453
1454
1455
1456
1457
1458
1459
1460
1461
1462
1463
1464
1465
1466
1467
1468
1469
1470
1471
1472
1473
1474
def exportSmoke(smoke_obj_name):
#if LuxManager.CurrentScene.name == 'preview':
#return 1, 1, 1, 1.0
#else:
flowtype = -1
smoke_obj = bpy.data.objects[smoke_obj_name]
domain = None
# Search smoke domain target for smoke modifiers
for mod in smoke_obj.modifiers:
if mod.name == 'Smoke':
if mod.smoke_type == 'FLOW':
if mod.flow_settings.smoke_flow_type == 'BOTH':
flowtype = 2
else:
if mod.flow_settings.smoke_flow_type == 'SMOKE':
flowtype = 0
else:
if mod.flow_settings.smoke_flow_type == 'FIRE':
flowtype = 1
if mod.smoke_type == 'DOMAIN':
domain = smoke_obj
smoke_modifier = mod
eps = 0.000001
if domain is not None:
#if bpy.app.version[0] >= 2 and bpy.app.version[1] >= 71:
# Blender version 2.71 supports direct access to smoke data structure
set = mod.domain_settings
channeldata = []
for v in set.density_grid:
channeldata.append(v.real)
print(v.real)
## Usage en voxel texture:
# channeldata = []
# if channel == 'density':
# for v in set.density_grid:
# channeldata.append(v.real)
# if channel == 'fire':
# for v in set.flame_grid:
# channeldata.append(v.real)
resolution = set.resolution_max
big_res = []
big_res.append(set.domain_resolution[0])
big_res.append(set.domain_resolution[1])
big_res.append(set.domain_resolution[2])
if set.use_high_resolution:
big_res[0] = big_res[0] * (set.amplify + 1)
big_res[1] = big_res[1] * (set.amplify + 1)
big_res[2] = big_res[2] * (set.amplify + 1)
# else:
# p = []
##gather smoke domain settings
# BBox = domain.bound_box
# p.append([BBox[0][0], BBox[0][1], BBox[0][2]])
# p.append([BBox[6][0], BBox[6][1], BBox[6][2]])
# set = mod.domain_settings
# resolution = set.resolution_max
# smokecache = set.point_cache
# ret = read_cache(smokecache, set.use_high_resolution, set.amplify + 1, flowtype)
# res_x = ret[0]
# res_y = ret[1]
# res_z = ret[2]
# density = ret[3]
# fire = ret[4]
# if res_x * res_y * res_z > 0:
##new cache format
# big_res = []
# big_res.append(res_x)
# big_res.append(res_y)
# big_res.append(res_z)
# else:
# max = domain.dimensions[0]
# if (max - domain.dimensions[1]) < -eps:
# max = domain.dimensions[1]
# if (max - domain.dimensions[2]) < -eps:
# max = domain.dimensions[2]
# big_res = [int(round(resolution * domain.dimensions[0] / max, 0)),
# int(round(resolution * domain.dimensions[1] / max, 0)),
# int(round(resolution * domain.dimensions[2] / max, 0))]
# if set.use_high_resolution:
# big_res = [big_res[0] * (set.amplify + 1), big_res[1] * (set.amplify + 1),
# big_res[2] * (set.amplify + 1)]
# if channel == 'density':
# channeldata = density
# if channel == 'fire':
# channeldata = fire
# sc_fr = '%s/%s/%s/%05d' % (efutil.export_path, efutil.scene_filename(), bpy.context.scene.name, bpy.context.scene.frame_current)
# if not os.path.exists( sc_fr ):
# os.makedirs(sc_fr)
#
# smoke_filename = '%s.smoke' % bpy.path.clean_name(domain.name)
# smoke_path = '/'.join([sc_fr, smoke_filename])
#
# with open(smoke_path, 'wb') as smoke_file:
# # Binary densitygrid file format
# #
# # File header
# smoke_file.write(b'SMOKE') #magic number
# smoke_file.write(struct.pack('<I', big_res[0]))
# smoke_file.write(struct.pack('<I', big_res[1]))
# smoke_file.write(struct.pack('<I', big_res[2]))
# Density data
# smoke_file.write(struct.pack('<%df'%len(channeldata), *channeldata))
#
# LuxLog('Binary SMOKE file written: %s' % (smoke_path))
#return big_res[0], big_res[1], big_res[2], channeldata
mydf3 = df3.df3(big_res[0],big_res[1],big_res[2])
sim_sizeX, sim_sizeY, sim_sizeZ = mydf3.size()
for x in range(sim_sizeX):
for y in range(sim_sizeY):
for z in range(sim_sizeZ):
mydf3.set(x, y, z, channeldata[((z * sim_sizeY + y) * sim_sizeX + x)])
mydf3.exportDF3(smokePath)
print('Binary smoke.df3 file written in preview directory')
if comments:
file.write("\n//--Smoke--\n\n")
# Note: We start with a default unit cube.
# This is mandatory to read correctly df3 data - otherwise we could just directly use bbox
# coordinates from the start, and avoid scale/translate operations at the end...
file.write("box{<0,0,0>, <1,1,1>\n")
file.write(" pigment{ rgbt 1 }\n")
file.write(" hollow\n")
file.write(" interior{ //---------------------\n")
file.write(" media{ method 3\n")
file.write(" emission <1,1,1>*1\n")# 0>1 for dark smoke to white vapour
file.write(" scattering{ 1, // Type\n")
file.write(" <1,1,1>*0.1\n")
file.write(" density{density_file df3 \"%s\"\n" % (smokePath))
file.write(" color_map {\n")
file.write(" [0.00 rgb 0]\n")
file.write(" [0.05 rgb 0]\n")
file.write(" [0.20 rgb 0.2]\n")
file.write(" [0.30 rgb 0.6]\n")
file.write(" [0.40 rgb 1]\n")
file.write(" [1.00 rgb 1]\n")
file.write(" } // end color_map\n")
file.write(" } // end of density\n")
file.write(" samples %i // higher = more precise\n" % resolution)
file.write(" } // end of media --------------------------\n")
file.write(" } // end of interior\n")
# START OF TRANSFORMATIONS
# Size to consider here are bbox dimensions (i.e. still in object space, *before* applying
# loc/rot/scale and other transformations (like parent stuff), aka matrix_world).
bbox = smoke_obj.bound_box
dim = [abs(bbox[6][0] - bbox[0][0]), abs(bbox[6][1] - bbox[0][1]), abs(bbox[6][2] - bbox[0][2])]
# We scale our cube to get its final size and shapes but still in *object* space (same as Blender's bbox).
file.write("scale<%.6g,%.6g,%.6g>\n" % (dim[0], dim[1], dim[2]))
# We offset our cube such that (0,0,0) coordinate matches Blender's object center.
file.write("translate<%.6g,%.6g,%.6g>\n" % (bbox[0][0], bbox[0][1], bbox[0][2]))
# We apply object's transformations to get final loc/rot/size in world space!
# Note: we could combine the two previous transformations with this matrix directly...
writeMatrix(global_matrix * smoke_obj.matrix_world)
# END OF TRANSFORMATIONS
#file.write(" interpolate 1\n")
#file.write(" frequency 0\n")
#file.write(" }\n")
#file.write("}\n")
Bastien Montagne
committed
ob_num = 0
Bastien Montagne
committed
# XXX I moved all those checks here, as there is no need to compute names
Bastien Montagne
committed
# for object we won't export here!
if ob.type in {'LAMP', 'CAMERA', 'EMPTY', 'META', 'ARMATURE', 'LATTICE'}:
for mod in ob.modifiers:
if mod and hasattr(mod, 'smoke_type'):
1552
1553
1554
1555
1556
1557
1558
1559
1560
1561
1562
1563
1564
1565
1566
1567
1568
1569
1570
1571
1572
1573
1574
1575
1576
1577
1578
1579
1580
1581
1582
1583
1584
1585
1586
1587
1588
1589
1590
1591
1592
1593
1594
1595
1596
1597
1598
1599
1600
1601
smokeFlag=True
if (mod.smoke_type == 'DOMAIN'):
exportSmoke(ob.name)
break # don't render domain mesh or flow emitter mesh, skip to next object.
if not smokeFlag:
# Export Hair
renderEmitter = True
if hasattr(ob, 'particle_systems'):
renderEmitter = False
for pSys in ob.particle_systems:
if pSys.settings.use_render_emitter:
renderEmitter = True
for mod in [m for m in ob.modifiers if (m is not None) and (m.type == 'PARTICLE_SYSTEM')]:
if (pSys.settings.render_type == 'PATH') and mod.show_render and (pSys.name == mod.particle_system.name):
tstart = time.time()
texturedHair=0
if ob.active_material is not None:
pmaterial = ob.material_slots[pSys.settings.material - 1].material
for th in pmaterial.texture_slots:
if th and th.use:
if (th.texture.type == 'IMAGE' and th.texture.image) or th.texture.type != 'IMAGE':
if th.use_map_color_diffuse:
texturedHair=1
if pmaterial.strand.use_blender_units:
strandStart = pmaterial.strand.root_size
strandEnd = pmaterial.strand.tip_size
strandShape = pmaterial.strand.shape
else: # Blender unit conversion
strandStart = pmaterial.strand.root_size / 200.0
strandEnd = pmaterial.strand.tip_size / 200.0
strandShape = pmaterial.strand.shape
else:
pmaterial = "default" # No material assigned in blender, use default one
strandStart = 0.01
strandEnd = 0.01
strandShape = 0.0
# Set the number of particles to render count rather than 3d view display
pSys.set_resolution(scene, ob, 'RENDER')
steps = pSys.settings.draw_step
steps = 3 ** steps # or (power of 2 rather than 3) + 1 # Formerly : len(particle.hair_keys)
totalNumberOfHairs = ( len(pSys.particles) + len(pSys.child_particles) )
#hairCounter = 0
file.write('#declare HairArray = array[%i] {\n' % totalNumberOfHairs)
for pindex in range(0, totalNumberOfHairs):
#if particle.is_exist and particle.is_visible:
#hairCounter += 1
#controlPointCounter = 0
# Each hair is represented as a separate sphere_sweep in POV-Ray.
1603
1604
1605
1606
1607
1608
1609
1610
1611
1612
1613
1614
1615
1616
1617
1618
1619
1620
1621
1622
1623
1624
1625
1626
1627
1628
1629
1630
1631
1632
1633
1634
1635
1636
1637
1638
1639
1640
1641
1642
1643
1644
1645
1646
1647
1648
1649
1650
1651
1652
1653
1654
file.write('sphere_sweep{')
if pSys.settings.use_hair_bspline:
file.write('b_spline ')
file.write('%i,\n' % (steps + 2)) # +2 because the first point needs tripling to be more than a handle in POV
else:
file.write('linear_spline ')
file.write('%i,\n' % (steps))
#changing world coordinates to object local coordinates by multiplying with inverted matrix
initCo = ob.matrix_world.inverted()*(pSys.co_hair(ob, pindex, 0))
if ob.active_material is not None:
pmaterial = ob.material_slots[pSys.settings.material-1].material
for th in pmaterial.texture_slots:
if th and th.use and th.use_map_color_diffuse:
#treat POV textures as bitmaps
if (th.texture.type == 'IMAGE' and th.texture.image and th.texture_coords == 'UV' and ob.data.uv_textures != None): # or (th.texture.pov.tex_pattern_type != 'emulator' and th.texture_coords == 'UV' and ob.data.uv_textures != None):
image=th.texture.image
image_width = image.size[0]
image_height = image.size[1]
image_pixels = image.pixels[:]
uv_co = pSys.uv_on_emitter(mod, pSys.particles[pindex], pindex, 0)
x_co = round(uv_co[0] * (image_width - 1))
y_co = round(uv_co[1] * (image_height - 1))
pixelnumber = (image_width * y_co) + x_co
r = image_pixels[pixelnumber*4]
g = image_pixels[pixelnumber*4+1]
b = image_pixels[pixelnumber*4+2]
a = image_pixels[pixelnumber*4+3]
initColor=(r,g,b,a)
else:
#only overwrite variable for each competing texture for now
initColor=th.texture.evaluate((initCo[0],initCo[1],initCo[2]))
for step in range(0, steps):
co = pSys.co_hair(ob, pindex, step)
#for controlPoint in particle.hair_keys:
if pSys.settings.clump_factor != 0:
hDiameter = pSys.settings.clump_factor / 200.0 * random.uniform(0.5, 1)
elif step == 0:
hDiameter = strandStart
else:
hDiameter += (strandEnd-strandStart)/(pSys.settings.draw_step+1) #XXX +1 or not?
if step == 0 and pSys.settings.use_hair_bspline:
# Write three times the first point to compensate pov Bezier handling
file.write('<%.6g,%.6g,%.6g>,%.7g,\n' % (co[0], co[1], co[2], abs(hDiameter)))
file.write('<%.6g,%.6g,%.6g>,%.7g,\n' % (co[0], co[1], co[2], abs(hDiameter)))
#file.write('<%.6g,%.6g,%.6g>,%.7g' % (particle.location[0], particle.location[1], particle.location[2], abs(hDiameter))) # Useless because particle location is the tip, not the root.
#file.write(',\n')
#controlPointCounter += 1
#totalNumberOfHairs += len(pSys.particles)# len(particle.hair_keys)
# Each control point is written out, along with the radius of the
# hair at that point.
file.write('<%.6g,%.6g,%.6g>,%.7g' % (co[0], co[1], co[2], abs(hDiameter)))
# All coordinates except the last need a following comma.
if step != steps - 1:
file.write(',\n')
else:
if texturedHair:
# Write pigment and alpha (between Pov and Blender alpha 0 and 1 are reversed)
file.write('\npigment{ color rgbf < %.3g, %.3g, %.3g, %.3g> }\n' %(initColor[0], initColor[1], initColor[2], 1.0-initColor[3]))
# End the sphere_sweep declaration for this hair
file.write('}\n')
# All but the final sphere_sweep (each array element) needs a terminating comma.
if pindex != totalNumberOfHairs:
file.write(',\n')
else:
file.write('\n')
# End the array declaration.
1678
1679
1680
1681
1682
1683
1684
1685
1686
1687
1688
1689
1690
1691
1692
1693
1694
1695
1696
1697
1698
1699
1700
1701
1702
1703
1704
1705
1706
1707
1708
1709
1710
1711
1712
1713
1714
1715
1716
1717
1718
1719
1720
1721
1722
1723
1724
1725
if not texturedHair:
# Pick up the hair material diffuse color and create a default POV-Ray hair texture.
file.write('#ifndef (HairTexture)\n')
file.write(' #declare HairTexture = texture {\n')
file.write(' pigment {rgbt <%s,%s,%s,%s>}\n' % (pmaterial.diffuse_color[0], pmaterial.diffuse_color[1], pmaterial.diffuse_color[2], (pmaterial.strand.width_fade + 0.05)))
file.write(' }\n')
file.write('#end\n')
file.write('\n')
# Dynamically create a union of the hairstrands (or a subset of them).
# By default use every hairstrand, commented line is for hand tweaking test renders.
file.write('//Increasing HairStep divides the amount of hair for test renders.\n')
file.write('#ifndef(HairStep) #declare HairStep = 1; #end\n')
file.write('union{\n')
file.write(' #local I = 0;\n')
file.write(' #while (I < %i)\n' % totalNumberOfHairs)
file.write(' object {HairArray[I]')
if not texturedHair:
file.write(' texture{HairTexture}\n')
else:
file.write('\n')
# Translucency of the hair:
file.write(' hollow\n')
file.write(' double_illuminate\n')
file.write(' interior {\n')
file.write(' ior 1.45\n')
file.write(' media {\n')
file.write(' scattering { 1, 10*<0.73, 0.35, 0.15> /*extinction 0*/ }\n')
file.write(' absorption 10/<0.83, 0.75, 0.15>\n')
file.write(' samples 1\n')
file.write(' method 2\n')
file.write(' density {\n')
file.write(' color_map {\n')
file.write(' [0.0 rgb <0.83, 0.45, 0.35>]\n')
file.write(' [0.5 rgb <0.8, 0.8, 0.4>]\n')
file.write(' [1.0 rgb <1,1,1>]\n')
file.write(' }\n')
file.write(' }\n')
file.write(' }\n')
file.write(' }\n')
file.write(' }\n')
file.write(' #local I = I + HairStep;\n')
file.write(' #end\n')
writeMatrix(global_matrix * ob.matrix_world)
Bastien Montagne
committed
file.write('}')
print('Totals hairstrands written: %i' % totalNumberOfHairs)
print('Number of tufts (particle systems)', len(ob.particle_systems))
# Set back the displayed number of particles to preview count
pSys.set_resolution(scene, ob, 'PREVIEW')
if renderEmitter == False:
continue #don't render mesh, skip to next object.
try:
me = ob.to_mesh(scene, True, 'RENDER')
except:
# happens when curves cant be made into meshes because of no-data
continue
Bastien Montagne
committed
importance = ob.pov.importance_value
me_materials = me.materials
me_faces = me.tessfaces[:]
if not me or not me_faces:
continue
#############################################
# Generating a name for object just like materials to be able to use it
# (baking for now or anything else).
# XXX I don't understand that: if we are here, sel if a non-empty iterable,
# so this condition is always True, IMO -- mont29
if sel:
name_orig = "OB" + ob.name
dataname_orig = "DATA" + ob.data.name
1759
1760
1761
1762
1763
1764
1765
1766
1767
1768
1769
1770
1771
1772
1773
1774
1775
1776
1777
1778
1779
1780
1781
1782
1783
1784
1785
1786
1787
name_orig = DEF_OBJ_NAME
dataname_orig = DEF_OBJ_NAME
name = string_strip_hyphen(bpy.path.clean_name(name_orig))
dataname = string_strip_hyphen(bpy.path.clean_name(dataname_orig))
## for slot in ob.material_slots:
## if slot.material is not None and slot.link == 'OBJECT':
## obmaterial = slot.material
#############################################
if info_callback:
info_callback("Object %2.d of %2.d (%s)" % (ob_num, len(sel), ob.name))
#if ob.type != 'MESH':
# continue
# me = ob.data
matrix = global_matrix * ob.matrix_world
povdataname = store(scene, ob, name, dataname, matrix)
if povdataname is None:
print("This is an instance")
continue
print("Writing Down First Occurence")
uv_textures = me.tessface_uv_textures
if len(uv_textures) > 0:
if me.uv_textures.active and uv_textures.active.data:
uv_layer = uv_textures.active.data
uv_layer = None
try:
#vcol_layer = me.vertex_colors.active.data
vcol_layer = me.tessface_vertex_colors.active.data
except AttributeError:
vcol_layer = None
faces_verts = [f.vertices[:] for f in me_faces]
faces_normals = [f.normal[:] for f in me_faces]
verts_normals = [v.normal[:] for v in me.vertices]
# quads incur an extra face
quadCount = sum(1 for f in faces_verts if len(f) == 4)
# Use named declaration to allow reference e.g. for baking. MR
file.write("\n")
tabWrite("#declare %s =\n" % povdataname)
tabWrite("mesh2 {\n")
tabWrite("vertex_vectors {\n")
tabWrite("%d" % len(me.vertices)) # vert count
tabStr = tab * tabLevel
for v in me.vertices:
if linebreaksinlists:
file.write(",\n")
file.write(tabStr + "<%.6f, %.6f, %.6f>" % v.co[:]) # vert count
file.write(", ")
file.write("<%.6f, %.6f, %.6f>" % v.co[:]) # vert count
#tabWrite("<%.6f, %.6f, %.6f>" % v.co[:]) # vert count
file.write("\n")
tabWrite("}\n")
# Build unique Normal list
uniqueNormals = {}
for fi, f in enumerate(me_faces):
fv = faces_verts[fi]
# [-1] is a dummy index, use a list so we can modify in place
if f.use_smooth: # Use vertex normals
for v in fv:
key = verts_normals[v]
uniqueNormals[key] = [-1]
else: # Use face normal
key = faces_normals[fi]
uniqueNormals[key] = [-1]
tabWrite("normal_vectors {\n")
tabWrite("%d" % len(uniqueNormals)) # vert count
Constantin Rahn
committed
tabStr = tab * tabLevel
for no, index in uniqueNormals.items():
file.write(tabStr + "<%.6f, %.6f, %.6f>" % no) # vert count
file.write("<%.6f, %.6f, %.6f>" % no) # vert count
file.write("\n")
tabWrite("}\n")
# Vertex colors
vertCols = {} # Use for material colors also.
if uv_layer:
# Generate unique UV's
uniqueUVs = {}
#n = 0
for fi, uv in enumerate(uv_layer):
uvs = uv_layer[fi].uv[0], uv_layer[fi].uv[1], uv_layer[fi].uv[2], uv_layer[fi].uv[3]
uvs = uv_layer[fi].uv[0], uv_layer[fi].uv[1], uv_layer[fi].uv[2]
for uv in uvs:
uniqueUVs[uv[:]] = [-1]
tabWrite("uv_vectors {\n")
#print unique_uvs
tabWrite("%d" % len(uniqueUVs)) # vert count
idx = 0
tabStr = tab * tabLevel
for uv, index in uniqueUVs.items():
if linebreaksinlists:
file.write(",\n")
file.write(tabStr + "<%.6f, %.6f>" % uv)
file.write(", ")
file.write("<%.6f, %.6f>" % uv)
index[0] = idx
idx += 1
'''
# Just add 1 dummy vector, no real UV's
tabWrite('1') # vert count
file.write(',\n\t\t<0.0, 0.0>')
'''
file.write("\n")
tabWrite("}\n")
if me.vertex_colors:
#Write down vertex colors as a texture for each vertex
tabWrite("texture_list {\n")
tabWrite("%d\n" % (((len(me_faces)-quadCount) * 3 )+ quadCount * 4)) # works only with tris and quad mesh for now
VcolIdx=0
if comments:
file.write("\n //Vertex colors: one simple pigment texture per vertex\n")
for fi, f in enumerate(me_faces):
# annoying, index may be invalid
material_index = f.material_index
try:
material = me_materials[material_index]
except:
material = None
if material: #and material.use_vertex_color_paint: #Always use vertex color when there is some for now
col = vcol_layer[fi]
if len(faces_verts[fi]) == 4:
cols = col.color1, col.color2, col.color3, col.color4
cols = col.color1, col.color2, col.color3
for col in cols:
key = col[0], col[1], col[2], material_index # Material index!
VcolIdx+=1
vertCols[key] = [VcolIdx]
if linebreaksinlists:
tabWrite("texture {pigment{ color rgb <%6f,%6f,%6f> }}\n" % (col[0], col[1], col[2]))
else:
tabWrite("texture {pigment{ color rgb <%6f,%6f,%6f> }}" % (col[0], col[1], col[2]))
tabStr = tab * tabLevel
else:
if material:
# Multiply diffuse with SSS Color
if material.subsurface_scattering.use:
diffuse_color = [i * j for i, j in zip(material.subsurface_scattering.color[:], material.diffuse_color[:])]
key = diffuse_color[0], diffuse_color[1], diffuse_color[2], \
material_index
vertCols[key] = [-1]
else:
diffuse_color = material.diffuse_color[:]
key = diffuse_color[0], diffuse_color[1], diffuse_color[2], \
material_index
vertCols[key] = [-1]
tabWrite("\n}\n")
# Face indices
tabWrite("\nface_indices {\n")
tabWrite("%d" % (len(me_faces) + quadCount)) # faces count
tabStr = tab * tabLevel
for fi, f in enumerate(me_faces):
fv = faces_verts[fi]
material_index = f.material_index
if len(fv) == 4:
indices = (0, 1, 2), (0, 2, 3)
else:
indices = ((0, 1, 2),)
if vcol_layer:
col = vcol_layer[fi]
if len(fv) == 4:
cols = col.color1, col.color2, col.color3, col.color4
cols = col.color1, col.color2, col.color3
if not me_materials or me_materials[material_index] is None: # No materials
for i1, i2, i3 in indices:
if linebreaksinlists:
file.write(",\n")
# vert count
file.write(tabStr + "<%d,%d,%d>" % (fv[i1], fv[i2], fv[i3]))
else:
file.write(", ")
file.write("<%d,%d,%d>" % (fv[i1], fv[i2], fv[i3])) # vert count
1970
1971
1972
1973
1974
1975
1976
1977
1978
1979
1980
1981
1982
1983
1984
1985
1986
1987
1988
1989
1990
1991
1992
1993
1994
1995
1996
1997
1998
material = me_materials[material_index]
for i1, i2, i3 in indices:
if me.vertex_colors: #and material.use_vertex_color_paint:
# Color per vertex - vertex color
col1 = cols[i1]
col2 = cols[i2]
col3 = cols[i3]
ci1 = vertCols[col1[0], col1[1], col1[2], material_index][0]
ci2 = vertCols[col2[0], col2[1], col2[2], material_index][0]
ci3 = vertCols[col3[0], col3[1], col3[2], material_index][0]
else:
# Color per material - flat material color
if material.subsurface_scattering.use:
diffuse_color = [i * j for i, j in zip(material.subsurface_scattering.color[:], material.diffuse_color[:])]
else:
diffuse_color = material.diffuse_color[:]
ci1 = ci2 = ci3 = vertCols[diffuse_color[0], diffuse_color[1], \
diffuse_color[2], f.material_index][0]
# ci are zero based index so we'll subtract 1 from them
if linebreaksinlists:
file.write(",\n")
file.write(tabStr + "<%d,%d,%d>, %d,%d,%d" % \
(fv[i1], fv[i2], fv[i3], ci1-1, ci2-1, ci3-1)) # vert count
else:
file.write(", ")
file.write("<%d,%d,%d>, %d,%d,%d" % \
(fv[i1], fv[i2], fv[i3], ci1-1, ci2-1, ci3-1)) # vert count