X-Git-Url: http://git.tdb.fi/?p=libs%2Fgl.git;a=blobdiff_plain;f=blender%2Fio_mspgl%2Fexport_mesh.py;h=e620b86f22ab9f0ebe155eb8ed50d3bcd9615789;hp=0d2e66ab442206b9f2454baa0ffe2e37f7557cb3;hb=6e0a6f7a7a406bd22eb4e3f7fc4bf2bdce01d7f9;hpb=b61c103559c83d6fe7309f2ca4489f09e701c4cf diff --git a/blender/io_mspgl/export_mesh.py b/blender/io_mspgl/export_mesh.py index 0d2e66ab..e620b86f 100644 --- a/blender/io_mspgl/export_mesh.py +++ b/blender/io_mspgl/export_mesh.py @@ -1,436 +1,132 @@ import itertools import bpy -from .outfile import OutFile - -class VertexCache: - def __init__(self, size): - self.size = size - self.slots = [-1]*self.size - - def fetch(self, v): - hit = v.index in self.slots - if hit: - self.slots.remove(v.index) - self.slots.append(v.index) - if not hit: - del self.slots[0] - return hit - - def fetch_strip(self, strip): - hits = 0 - for v in strip: - if self.fetch(v): - hits += 1 - return hits - - def test_strip(self, strip): - hits = 0 - for i in range(len(strip)): - if i>=self.size: - break - if strip[i].index in self.slots[i:]: - hits += 1 - return hits - +import mathutils class MeshExporter: def __init__(self): + self.show_progress = True self.use_strips = True - self.use_degen_tris = True - self.max_strip_len = 1024 - self.optimize_cache = False - self.cache_size = 64 - self.export_lines = True - self.export_uv = "UNIT0" - self.tbn_vecs = False - self.tbn_uvtex = "" - self.compound = False - self.object = False - self.material_tex = False - self.textures = "REF" - self.smoothing = "MSPGL" - self.export_groups = False - self.max_groups = 2 - - def stripify(self, mesh, progress = None): - for f in mesh.faces: - f.flag = False - - faces_done = 0 - strips = [] - loose = [] - - cache = None - if self.optimize_cache: - cache = VertexCache(self.cache_size) - - island = [] - island_strips = [] - while 1: - if not island: - # No current island; find any unused face to start from - queue = [] - for f in mesh.faces: - if not f.flag: - f.flag = True - queue.append(f) - break - - if not queue: - break - - # Find all faces connected to the first one - while queue: - face = queue.pop(0) - island.append(face) - - for n in f.get_neighbors(): - if not n.flag: - n.flag = True - queue.append(n) - - # Unflag the island for the next phase - for f in island: - f.flag = False - - # Find an unused face with as few unused neighbors as possible, but - # at least one. This heuristic gives a preference to faces in corners - # or along borders of a non-closed island. - best = 5 - face = None - for f in island: - if f.flag: - continue + self.use_degen_tris = False - score = sum(not n.flag for n in f.get_neighbors()) - if score>0 and scorebest_hits: - best = i - best_hits = hits - - strip = island_strips.pop(best) - strips.append(strip) - - if cache: - cache.fetch_strip(strip) - - faces_done += len(island) - if progress: - progress.set_progress(float(faces_done)/len(mesh.faces)) - - # Collect any faces that weren't used in strips - loose += [f for f in island if not f.flag] - for f in island: - f.flag = True - - island = [] - island_strips = [] - - if cache: - cache = VertexCache(self.cache_size) - total_hits = 0 - - if self.use_degen_tris and strips: - big_strip = [] - - for s in strips: - if big_strip: - # Generate glue elements, ensuring that the next strip begins at - # an even position - glue = [big_strip[-1], s[0]] - if len(big_strip)%2: - glue += [s[0]] - - big_strip += glue - if cache: - total_hits += cache.fetch_strip(glue) - - big_strip += s - if cache: - total_hits += cache.fetch_strip(s) - - for f in loose: - # Add loose faces to the end. This wastes space, using five - # elements per triangle and six elements per quad. + for s in strips: + if big_strip: + # Generate glue elements, ensuring that the next strip begins at + # an even position + glue = [big_strip[-1], s[0]] if len(big_strip)%2: - order = (-1, -2, 0, 1) - else: - order = (0, 1, -1, -2) - vertices = [f.vertices[i] for i in order[:len(f.vertices)]] - - if big_strip: - glue = [big_strip[-1], vertices[0]] - big_strip += glue - if cache: - total_hits += cache.fetch_strip(glue) + glue += [s[0]] - big_strip += vertices - if cache: - total_hits += cache.fetch_strip(vertices) + big_strip += glue - strips = [big_strip] - loose = [] + big_strip += s - return strips, loose - - def export(self, context, fn): - if self.compound: - objs = context.selected_objects - else: - objs = [context.active_object] + return big_strip - if not objs: - raise Exception("Nothing to export") - for o in objs: - if o.type!="MESH": - raise Exception("Can only export Mesh data") + def export_to_file(self, context, out_fn): + obj = context.active_object - from .mesh import Mesh from .util import Progress - progress = Progress() - progress.set_task("Preparing", 0.0, 0.0) - - mesh = None - bmeshes = [] - for o in objs: - bmesh = o.to_mesh(context.scene, True, "PREVIEW") - bmeshes.append(bmesh) - if not mesh: - mesh = Mesh(bmesh) - else: - mesh.splice(Mesh(bmesh)) - - progress.set_task("Smoothing", 0.05, 0.35) - if self.smoothing=="NONE": - mesh.flatten_faces() - mesh.split_smooth(progress) - - if self.smoothing!="BLENDER": - mesh.compute_normals() - - if self.export_groups: - mesh.sort_vertex_groups(self.max_groups) - - # Create a mapping from vertex group indices to bone indices - group_index_map = dict((i, i) for i in range(len(objs[0].vertex_groups))) - if objs[0].parent and objs[0].parent.type=="ARMATURE": - armature = objs[0].parent.data - bone_indices = dict((armature.bones[i].name, i) for i in range(len(armature.bones))) - for g in objs[0].vertex_groups: - if g.name in bone_indices: - group_index_map[g.index] = bone_indices[g.name] - - if self.material_tex and mesh.materials: - mesh.generate_material_uv() - - texunits = [] - if mesh.uv_layers and self.export_uv!="NONE": - # Figure out which UV layers to export - if self.export_uv=="UNIT0": - if mesh.uv_layers[0].unit==0: - texunits = [0] - else: - texunits = range(len(mesh.uv_layers)) - texunits = [(i, mesh.uv_layers[i]) for i in texunits] - texunits = [u for u in texunits if not u[1].hidden] - - if self.tbn_vecs: - # TBN coordinates must be generated before vertices are split by any other layer - uv_names = [u.name for i, u in texunits] - if self.tbn_uvtex in uv_names: - tbn_index = uv_names.index(self.tbn_uvtex) - unit = texunits[tbn_index] - del texunits[tbn_index] - texunits.insert(0, unit) - - for i, u in texunits: - progress.set_task("Splitting UVs", 0.35+0.3*i/len(texunits), 0.35+0.3*(i+1)/len(texunits)) - mesh.split_uv(i, progress) - if self.tbn_vecs and u.name==self.tbn_uvtex: - mesh.compute_uv() - mesh.compute_tbn(i) - - mesh.compute_uv() + progress = Progress(self.show_progress and context) + progress.push_task("", 0.0, 0.95) + resource = self.export_mesh(context, obj, progress) - strips = [] - loose = mesh.faces - if self.use_strips: - progress.set_task("Creating strips", 0.65, 0.95) - strips, loose = self.stripify(mesh, progress) - - progress.set_task("Writing file", 0.95, 1.0) + with open(out_fn, "w") as out_file: + for s in resource.statements: + s.write_to_file(out_file) - out_file = OutFile(fn) - if self.object: - out_file.begin("mesh") + def export_mesh(self, context, mesh_or_obj, progress): + from .mesh import Mesh, create_mesh_from_object - fmt = "NORMAL3" - if texunits: - for i, u in texunits: + if type(mesh_or_obj)==Mesh: + mesh = mesh_or_obj + else: + progress.push_task("", 0.0, 0.9) + mesh = create_mesh_from_object(context, mesh_or_obj, progress) + progress.pop_task() + + from .datafile import Resource, Statement, Token + resource = Resource(mesh.name+".mesh") + statements = resource.statements + + st = Statement("vertices", Token("NORMAL3")) + if mesh.uv_layers: + for u in mesh.uv_layers: + size = str(len(u.uvs[0])) if u.unit==0: - fmt += "_TEXCOORD2" + st.append(Token("TEXCOORD"+size)) else: - fmt += "_TEXCOORD2%d"%u.unit - if self.tbn_vecs: - fmt += "_ATTRIB33_ATTRIB34" - if self.export_groups: - fmt += "_ATTRIB%d5"%(self.max_groups*2) - fmt += "_VERTEX3" - out_file.begin("vertices", fmt) + st.append(Token("TEXCOORD{}_{}".format(size, u.unit))) + if mesh.tbn_vecs: + st.append(Token("TANGENT3")) + st.append(Token("BINORMAL3")) + if mesh.vertex_groups: + st.append(Token("ATTRIB{}_5".format(mesh.max_groups_per_vertex*2))) + st.append(Token("VERTEX3")) + normal = None - uvs = [None]*len(texunits) + uvs = [None]*len(mesh.uv_layers) tan = None bino = None group = None for v in mesh.vertices: if v.normal!=normal: - out_file.write("normal3", *v.normal) + st.sub.append(Statement("normal", *v.normal)) normal = v.normal - for i, u in texunits: + for i, u in enumerate(mesh.uv_layers): if v.uvs[i]!=uvs[i]: if u.unit==0: - out_file.write("texcoord2", *v.uvs[i]) + st.sub.append(Statement("texcoord", *v.uvs[i])) else: - out_file.write("multitexcoord2", u.unit, *v.uvs[i]) + st.sub.append(Statement("multitexcoord", u.unit, *v.uvs[i])) uvs[i] = v.uvs[i] - if self.tbn_vecs: + if mesh.tbn_vecs: if v.tan!=tan: - out_file.write("attrib3", 3, *v.tan) + st.sub.append(Statement("tangent", *v.tan)) tan = v.tan if v.bino!=bino: - out_file.write("attrib3", 4, *v.bino) + st.sub.append(Statement("binormal", *v.bino)) bino = v.bino - if self.export_groups: - group_attr = [(group_index_map[g.group], g.weight*v.group_weight_scale) for g in v.groups[:self.max_groups]] - while len(group_attr)=32: - out_file.write("indices", *indices) - indices = [] - if indices: - out_file.write("indices", *indices) - out_file.end() - - if loose: - out_file.begin("batch", "TRIANGLES") - for f in loose: - for i in range(2, len(f.vertices)): - out_file.write("indices", f.vertices[0].index, f.vertices[i-1].index, f.vertices[i].index) - out_file.end() - - if self.export_lines and mesh.lines: - out_file.write("batch", "LINES") - for l in mesh.lines: - out_file.write("indices", l.vertices[0].index, l.vertices[1].index) - out_file.end() - - if self.object: - out_file.end() - out_file.begin("technique") - out_file.begin("pass", '""') - if mesh.materials: - if self.material_tex: - out_file.begin("material") - out_file.write("diffuse", 1.0, 1.0, 1.0, 1.0) - out_file.end() - index = 0 - for u in mesh.uv_layers: - if u.name=="material_tex": - index = u.unit - out_file.begin("texunit", index) - out_file.begin("texture2d") - out_file.write("min_filter", "NEAREST") - out_file.write("mag_filter", "NEAREST") - out_file.write("storage", "RGB", len(mesh.materials), 1) - texdata = '"' - for m in mesh.materials: - color = [int(c*255) for c in m.diffuse_color] - texdata += "\\x%02X\\x%02X\\x%02X"%tuple(color) - texdata += '"' - out_file.write("raw_data", texdata) - out_file.end() - out_file.end() - else: - mat = mesh.materials[0] - out_file.begin("material") - diff = mat.diffuse_color - out_file.write("diffuse", diff.r, diff.g, diff.b, 1.0) - amb = diff*mat.ambient - out_file.write("ambient", amb.r, amb.g, amb.b, 1.0) - spec = mat.specular_color*mat.specular_intensity - out_file.write("specular", spec.r, spec.g, spec.b, 1.0) - out_file.write("shininess", mat.specular_hardness); - out_file.end() + st.sub.append(Statement("vertex", *v.co)) - if self.textures!="NONE": - for slot in mesh.materials[0].texture_slots: - if not slot: - continue + statements.append(st) - tex = slot.texture - if tex.type!="IMAGE": - continue + if self.use_strips: + strips = mesh.vertex_sequence + if self.use_degen_tris: + strips = [self.join_strips(strips)] - if slot.uv_layer: - for u in mesh.uv_layers: - if u.name==slot.uv_layer: - index = u.unit - else: - index = mesh.uv_layers[0].unit + for s in strips: + st = Statement("batch", Token("TRIANGLE_STRIP")) + for i in range(0, len(s), 32): + st.sub.append(Statement("indices", *(v.index for v in s[i:i+32]))) + statements.append(st) + else: + st = Statement("batch", Token('TRIANGLES')) + for f in mesh.faces: + st.sub.append(Statement("indices", *(v.index for v in f.vertices))) + statements.append(st) - out_file.begin("texunit", index) - if self.textures=="INLINE": - out_file.begin("texture2d") - out_file.write("min_filter", "LINEAR") - out_file.write("storage", "RGBA", tex.image.size[0], tex.image.size[1]) - texdata = '"' - for p in tex.image.pixels: - texdata += "\\x%02X"%int(p*255) - texdata += '"' - out_file.write("raw_data", texdata) - out_file.end() - else: - out_file.write("texture", '"%s"'%tex.image.name) - out_file.end() + if mesh.lines: + st = Statement("batch", Token('LINES')) + for l in mesh.lines: + st.sub.append(Statement("indices", *(v.index for v in l.vertices))) + statements.append(st) - out_file.end() - out_file.end() + if mesh.winding_test: + statements.append(Statement("winding", Token('COUNTERCLOCKWISE'))) - progress.set_task("Done", 1.0, 1.0) + progress.set_progress(1.0) - for m in bmeshes: - bpy.data.meshes.remove(m) + return resource