+ ss = Statement("transform")
+
+ loc = i.matrix_world.to_translation()
+ ss.sub.append(Statement("position", *tuple(loc)))
+
+ quat = i.matrix_world.to_quaternion()
+ if i.rotation_mode in ('XYZ', 'XZY', 'YXZ', 'YZX', 'ZXY', 'ZYX'):
+ angles = [a*180/math.pi for a in quat.to_euler()]
+ ss.sub.append(Statement("euler", *angles));
+ else:
+ ss.sub.append(Statement("rotation", quat.angle*180/math.pi, *tuple(quat.axis)))
+
+ scale = i.matrix_world.to_scale()
+ ss.sub.append(Statement("scale", *tuple(scale)))
+
+ st.sub.append(ss)
+ statements.append(st)
+
+ def export_sequence_resources(self, scene, resources):
+ from .datafile import Resource, Statement, Token
+
+ lights = []
+ s = scene
+ while s:
+ lights += s.lights
+ s = s.background_set
+
+ from .util import make_unique
+ lights = make_unique(lights)
+
+ from .export_light import LightExporter
+ light_exporter = LightExporter()
+ for l in lights:
+ light_name = l.name+".light"
+ if light_name not in resources:
+ resources[light_name] = light_exporter.export_light(l)
+
+ lighting_name = scene.name+".lightn"
+ if lighting_name not in resources:
+ lighting_res = Resource(lighting_name, "lighting")
+ lighting_res.statements.append(Statement("ambient", *tuple(scene.ambient_light)))
+ for l in lights:
+ lighting_res.statements.append(lighting_res.create_reference_statement("light", resources[l.name+".light"]))
+
+ resources[lighting_name] = lighting_res
+
+ def export_sequence(self, scene, resources):
+ from .datafile import Resource, Statement, Token
+ seq_res = Resource(scene.name+".seq", "sequence")
+
+ if scene.use_hdr:
+ seq_res.statements.append(Statement("hdr", True))
+
+ ss = Statement("clear")
+ ss.sub.append(Statement("color", 0.0, 0.0, 0.0, 0.0))
+ ss.sub.append(Statement("depth", 1.0))
+ seq_res.statements.append(ss)
+
+ scene_res = resources[scene.name+".scene"]
+ lighting_res = resources[scene.name+".lightn"]
+
+ any_opaque = False
+ any_blended = False
+ s = scene
+ while s:
+ if s.instances:
+ any_opaque = True
+ if s.blended_instances:
+ any_blended = True
+ s = s.background_set
+
+ if any_opaque:
+ ss = Statement("step", "", "content")
+ ss.sub.append(Statement("depth_test", Token("LEQUAL")))
+ ss.sub.append(seq_res.create_reference_statement("lighting", lighting_res))
+ ss.sub.append(seq_res.create_reference_statement("scene", scene_res))
+ seq_res.statements.append(ss)
+
+ if any_blended:
+ ss = Statement("step", "blended", "content")
+ ss.sub.append(Statement("depth_test", Token("LEQUAL")))
+ ss.sub.append(seq_res.create_reference_statement("lighting", lighting_res))
+ ss.sub.append(seq_res.create_reference_statement("scene", scene_res))
+ seq_res.statements.append(ss)
+
+ if scene.use_ao:
+ ss = Statement("ambient_occlusion")
+ ss.sub.append(Statement("occlusion_radius", scene.ao_distance))
+ ss.sub.append(Statement("samples", scene.ao_samples))
+ seq_res.statements.append(ss)
+
+ if scene.use_hdr:
+ seq_res.statements.append(Statement("bloom"))
+ ss = Statement("colorcurve")
+ ss.sub.append(Statement("exposure_adjust", scene.exposure))
+ ss.sub.append(Statement("srgb"))
+ seq_res.statements.append(ss)
+ else:
+ # Add a colorcurve with linear response to convert into sRGB color space
+ ss = Statement("colorcurve")
+ ss.sub.append(Statement("brightness_response", 1.0))
+ ss.sub.append(Statement("srgb"))
+ seq_res.statements.append(ss)
+
+ return seq_res