X-Git-Url: http://git.tdb.fi/?p=libs%2Fgl.git;a=blobdiff_plain;f=blender%2Fio_mspgl%2Fexport_mesh.py;h=16f9514a97402443cadd8bb12d2ce1ca6e2170ee;hp=b695b40e1e09af236ee8d1d0cde3c0703a56f4a9;hb=86721a55699193e63c76e8a0a7b0ced0416c1cce;hpb=d5b484e2aee6c485abd4d07631f6d863eaaa90a0 diff --git a/blender/io_mspgl/export_mesh.py b/blender/io_mspgl/export_mesh.py index b695b40e..16f9514a 100644 --- a/blender/io_mspgl/export_mesh.py +++ b/blender/io_mspgl/export_mesh.py @@ -1,397 +1,147 @@ import itertools +import os import bpy import mathutils -class VertexCache: - def __init__(self, size): - self.size = size - self.slots = [-1]*self.size - - def fetch(self, v): - hit = v.index in self.slots - if hit: - self.slots.remove(v.index) - self.slots.append(v.index) - if not hit: - del self.slots[0] - return hit - - def fetch_strip(self, strip): - hits = 0 - for v in strip: - if self.fetch(v): - hits += 1 - return hits - - def test_strip(self, strip): - hits = 0 - for i in range(len(strip)): - if i>=self.size: - break - if strip[i].index in self.slots[i:]: - hits += 1 - return hits - - class MeshExporter: def __init__(self): self.show_progress = True self.use_strips = True self.use_degen_tris = False - self.max_strip_len = 1024 - self.optimize_cache = True - self.cache_size = 64 - self.material_tex = False - - def stripify(self, mesh, progress=None): - for f in mesh.faces: - f.flag = False - - faces_done = 0 - strips = [] - loose = [] - - cache = None - if self.optimize_cache: - cache = VertexCache(self.cache_size) - - island = [] - face_neighbors = [] - island_strips = [] - while 1: - if not island: - # No current island; find any unused face to start from - queue = [] - for f in mesh.faces: - if not f.flag: - f.flag = True - queue.append(f) - break - - if not queue: - break - - # Find all faces connected to the first one - while queue: - face = queue.pop(0) - island.append(face) - - for n in face.get_neighbors(): - if not n.flag: - n.flag = True - queue.append(n) - - face_neighbors = [f.get_neighbors() for f in island] - - # Unflag the island for the next phase - for f in island: - f.flag = False - - # Find an unused face with as few unused neighbors as possible, but - # at least one. This heuristic gives a preference to faces in corners - # or along borders of a non-closed island. - best = 5 - face = None - for i, f in enumerate(island): - if f.flag: - continue - - score = sum(not n.flag for n in face_neighbors[i]) - if score>0 and scorebest_hits: - best = i - best_hits = hits - - strip = island_strips.pop(best) - strips.append(strip) - - if cache: - cache.fetch_strip(strip) - - faces_done += len(island) - if progress: - progress.set_progress(float(faces_done)/len(mesh.faces)) - - # Collect any faces that weren't used in strips - loose += [f for f in island if not f.flag] - for f in island: - f.flag = True - - island = [] - island_strips = [] - - if cache: - cache = VertexCache(self.cache_size) - total_hits = 0 - - if self.use_degen_tris and strips: - big_strip = [] - - for s in strips: - if big_strip: - # Generate glue elements, ensuring that the next strip begins at - # an even position - glue = [big_strip[-1], s[0]] - if len(big_strip)%2: - glue += [s[0]] - - big_strip += glue - if cache: - total_hits += cache.fetch_strip(glue) + self.export_all = False - big_strip += s - if cache: - total_hits += cache.fetch_strip(s) + def join_strips(self, strips): + big_strip = [] - for f in loose: - # Add loose faces to the end. This wastes space, using five - # elements per triangle and six elements per quad. + for s in strips: + if big_strip: + # Generate glue elements, ensuring that the next strip begins at + # an even position + glue = [big_strip[-1], s[0]] if len(big_strip)%2: - order = (-1, -2, 0, 1) - else: - order = (0, 1, -1, -2) - vertices = [f.vertices[i] for i in order[:len(f.vertices)]] - - if big_strip: - glue = [big_strip[-1], vertices[0]] - big_strip += glue - if cache: - total_hits += cache.fetch_strip(glue) - - big_strip += vertices - if cache: - total_hits += cache.fetch_strip(vertices) - - strips = [big_strip] - loose = [] - - return strips, loose - - def export(self, context, out_file, obj=None, progress=None): - if obj is None: - obj = context.active_object + glue += [s[0]] - objs = [(obj, mathutils.Matrix())] - check = objs - while check: - children = [] - for o, m in check: - for c in o.children: - if c.compound: - children.append((c, m*c.matrix_local)) - objs += children - check = children + big_strip += glue - if not objs: - raise Exception("Nothing to export") - for o, m in objs: - if o.type!="MESH": - raise Exception("Can only export Mesh data") + big_strip += s - from .mesh import Mesh - from .util import Progress + return big_strip - if self.show_progress: - if not progress: - progress = Progress(context) - progress.set_task("Preparing", 0.0, 0.0) + def export_to_file(self, context, out_fn): + if self.export_all: + objs = [o for o in context.selected_objects if o.type=="MESH"] else: - progress = None - - mesh = None - bmeshes = [] - winding_test = False - for o, m in objs: - if o.data.winding_test: - winding_test = True - if o.material_tex: - self.material_tex = True - bmesh = o.to_mesh(context.scene, True, "PREVIEW") - bmeshes.append(bmesh) - me = Mesh(bmesh) - me.transform(m) - if not mesh: - mesh = me - else: - mesh.splice(me) - - if progress: - progress.set_task("Smoothing", 0.05, 0.35) - if mesh.smoothing=="NONE": - mesh.flatten_faces() - mesh.split_smooth(progress) - - if mesh.smoothing!="BLENDER": - mesh.compute_normals() - - if mesh.vertex_groups: - mesh.sort_vertex_groups(mesh.max_groups_per_vertex) - - # Create a mapping from vertex group indices to bone indices - first_obj = objs[0][0] - group_index_map = dict((i, i) for i in range(len(first_obj.vertex_groups))) - if first_obj.parent and first_obj.parent.type=="ARMATURE": - armature = first_obj.parent.data - bone_indices = dict((armature.bones[i].name, i) for i in range(len(armature.bones))) - for g in first_obj.vertex_groups: - if g.name in bone_indices: - group_index_map[g.index] = bone_indices[g.name] + objs = [context.active_object] - if self.material_tex and mesh.materials: - mesh.generate_material_uv() - - texunits = [] - force_unit0 = False - if mesh.uv_layers and (mesh.use_uv!="NONE" or self.material_tex): - # Figure out which UV layers to export - if mesh.use_uv=="ALL": - texunits = range(len(mesh.uv_layers)) - elif self.material_tex: - # The material UV layer is always the last one - texunits = [len(mesh.uv_layers)-1] - force_unit0 = True - else: - for i, u in enumerate(mesh.uv_layers): - if u.unit==0: - texunits = [i] - break - texunits = [(i, mesh.uv_layers[i]) for i in texunits] - texunits = [u for u in texunits if not u[1].hidden] + from .util import Progress - if mesh.tbn_vecs: - # TBN coordinates must be generated before vertices are split by any other layer - uv_names = [u.name for i, u in texunits] - if mesh.tbn_uvtex in uv_names: - tbn_index = uv_names.index(mesh.tbn_uvtex) - unit = texunits[tbn_index] - del texunits[tbn_index] - texunits.insert(0, unit) + path, base = os.path.split(out_fn) + base, ext = os.path.splitext(base) - for i, u in texunits: - if progress: - progress.set_task("Splitting UVs", 0.35+0.3*i/len(texunits), 0.35+0.3*(i+1)/len(texunits)) - mesh.split_uv(i, progress) - if mesh.tbn_vecs and u.name==mesh.tbn_uvtex: - mesh.compute_uv() - mesh.compute_tbn(i) + progress = Progress(self.show_progress and context) + for i, obj in enumerate(objs): + if self.export_all: + out_fn = os.path.join(path, obj.data.name+ext) - mesh.compute_uv() + progress.push_task_slice(obj.data.name, i, len(objs)) + resource = self.export_mesh(context, obj, progress) - strips = [] - loose = mesh.faces - if self.use_strips: - if progress: - progress.set_task("Creating strips", 0.65, 0.95) - strips, loose = self.stripify(mesh, progress) + resource.write_to_file(out_fn) + progress.pop_task() - if progress: - progress.set_task("Writing file", 0.95, 1.0) + def export_mesh(self, context, mesh_or_obj, progress): + from .mesh import Mesh, create_mesh_from_object - from .outfile import open_output - out_file = open_output(out_file) - - fmt = ["NORMAL3"] - if texunits: - for i, u in texunits: - size = str(len(mesh.vertices[0].uvs[i])) - if u.unit==0 or force_unit0: - fmt.append("TEXCOORD"+size) + if type(mesh_or_obj)==Mesh: + mesh = mesh_or_obj + else: + progress.push_task("", 0.0, 0.9) + mesh = create_mesh_from_object(context, mesh_or_obj, progress) + progress.pop_task() + + from .datafile import Resource, Statement, Token + resource = Resource(mesh.name+".mesh", "mesh") + statements = resource.statements + + st = Statement("vertices", Token("NORMAL3")) + if mesh.vertices[0].color: + st.append(Token("COLOR4_UBYTE")) + if mesh.uv_layers: + for u in mesh.uv_layers: + size = str(len(u.uvs[0])) + if u.unit==0: + st.append(Token("TEXCOORD"+size)) else: - fmt.append("TEXCOORD%s_%d"%(size, u.unit)) - if mesh.tbn_vecs: - fmt += ["TANGENT3", "BINORMAL3"] + st.append(Token("TEXCOORD{}_{}".format(size, u.unit))) + if mesh.tangent_vecs: + st.append(Token("TANGENT3")) if mesh.vertex_groups: - fmt.append("ATTRIB%d_5"%(mesh.max_groups_per_vertex*2)) - fmt.append("VERTEX3") - out_file.begin("vertices", *fmt) + st.append(Token("GROUP{}".format(mesh.max_groups_per_vertex))) + st.append(Token("WEIGHT{}".format(mesh.max_groups_per_vertex))) + st.append(Token("VERTEX3")) + normal = None - uvs = {} + color = None + uvs = [None]*len(mesh.uv_layers) tan = None - bino = None group = None + weight = None for v in mesh.vertices: if v.normal!=normal: - out_file.write("normal3", *v.normal) + st.sub.append(Statement("normal", *v.normal)) normal = v.normal - for i, u in texunits: - if v.uvs[i]!=uvs.get(i): - size = str(len(v.uvs[i])) - if u.unit==0 or force_unit0: - out_file.write("texcoord"+size, *v.uvs[i]) + if v.color!=color: + st.sub.append(Statement("color", *v.color)) + color = v.color + for i, u in enumerate(mesh.uv_layers): + if v.uvs[i]!=uvs[i]: + if u.unit==0: + st.sub.append(Statement("texcoord", *v.uvs[i])) else: - out_file.write("multitexcoord"+size, u.unit, *v.uvs[i]) + st.sub.append(Statement("multitexcoord", u.unit, *v.uvs[i])) uvs[i] = v.uvs[i] - if mesh.tbn_vecs: + if mesh.tangent_vecs: if v.tan!=tan: - out_file.write("tangent3", *v.tan) + st.sub.append(Statement("tangent", *v.tan)) tan = v.tan - if v.bino!=bino: - out_file.write("binormal3", *v.bino) - bino = v.bino if mesh.vertex_groups: - group_attr = [(group_index_map[g.group], g.weight*v.group_weight_scale) for g in v.groups[:mesh.max_groups_per_vertex]] - while len(group_attr)=32: - out_file.write("indices", *indices) - indices = [] - if indices: - out_file.write("indices", *indices) - out_file.end() + v_group = [g.group for g in v.groups] + v_weight = [g.weight for g in v.groups] + if v_group!=group: + st.sub.append(Statement("group", *v_group)) + group = v_group + if v_weight!=weight: + st.sub.append(Statement("weight", *v_weight)) + weight = v_weight + st.sub.append(Statement("vertex", *v.co)) + + statements.append(st) - if loose: - out_file.begin("batch", "TRIANGLES") - for f in loose: - for i in range(2, len(f.vertices)): - out_file.write("indices", f.vertices[0].index, f.vertices[i-1].index, f.vertices[i].index) - out_file.end() + if self.use_strips: + strips = mesh.vertex_sequence + if self.use_degen_tris: + strips = [self.join_strips(strips)] - if mesh.use_lines and mesh.lines: - out_file.begin("batch", "LINES") - for l in mesh.lines: - out_file.write("indices", l.vertices[0].index, l.vertices[1].index) - out_file.end() + for s in strips: + st = Statement("batch", Token("TRIANGLE_STRIP")) + for i in range(0, len(s), 32): + st.sub.append(Statement("indices", *(v.index for v in s[i:i+32]))) + statements.append(st) + else: + st = Statement("batch", Token('TRIANGLES')) + for f in mesh.faces: + st.sub.append(Statement("indices", *(v.index for v in f.vertices))) + statements.append(st) - if winding_test: - out_file.write("winding", "COUNTERCLOCKWISE") + if mesh.lines: + st = Statement("batch", Token('LINES')) + for l in mesh.lines: + st.sub.append(Statement("indices", *(v.index for v in l.vertices))) + statements.append(st) - if progress: - progress.set_task("Done", 1.0, 1.0) + if mesh.winding_test: + statements.append(Statement("winding", Token('COUNTERCLOCKWISE'))) - for m in bmeshes: - bpy.data.meshes.remove(m) + progress.set_progress(1.0) - return mesh + return resource