5 def make_edge_key(i1, i2):
6 return (min(i1, i2), max(i1, i2))
9 def __init__(self, edge):
10 if edge.__class__==Edge:
11 self._edge = edge._edge
12 self.smooth = edge.smooth
17 self.vertices = edge.vertices[:]
24 def __getattr__(self, attr):
25 return getattr(self._edge, attr)
27 def check_smooth(self, limit):
28 if len(self.faces)!=2:
31 d = self.faces[0].normal.dot(self.faces[1].normal)
32 self.smooth = ((d>limit and self.faces[0].use_smooth and self.faces[1].use_smooth) or d>0.99995)
34 def other_face(self, f):
35 if f.index==self.faces[0].index:
36 if len(self.faces)>=2:
43 def other_vertex(self, v):
44 if v.index==self.vertices[0].index:
45 return self.vertices[1]
47 return self.vertices[0]
51 def __init__(self, vertex):
52 if vertex.__class__==Vertex:
53 self._vertex = vertex._vertex
54 self.uvs = vertex.uvs[:]
56 self.bino = vertex.bino
62 self.index = vertex.index
64 self.normal = vertex.normal
68 self.groups = vertex.groups[:]
70 def __getattr__(self, attr):
71 return getattr(self._vertex, attr)
73 def __cmp__(self, other):
76 return cmp(self.index, other.index)
80 def __init__(self, group):
82 self.group = group.group
83 self.weight = group.weight
85 def __getattr__(self, attr):
86 return getattr(self._group, attr)
90 def __init__(self, face):
92 self.index = face.index
94 self.vertices = face.vertices[:]
98 def __getattr__(self, attr):
99 return getattr(self._face, attr)
101 def __cmp__(self, other):
104 return cmp(self.index, other.index)
106 def pivot_vertex(self, v):
107 n = self.vertices.index(v)
108 return [(n+i)%len(self.vertices) for i in range(len(self.vertices))]
110 def pivot_vertices(self, *vt):
111 flags = [(v in vt) for v in self.vertices]
112 l = len(self.vertices)
114 if flags[i] and not flags[(i+l-1)%l]:
115 return self.vertices[i:]+self.vertices[:i]
117 def get_edge(self, v1, v2):
118 key = make_edge_key(v1.index, v2.index)
122 raise KeyError("No edge %s"%(key,))
124 def other_edge(self, e, v):
126 if d!=e and v in d.vertices:
129 def get_neighbors(self):
130 neighbors = [e.other_face(self) for e in self.edges]
131 return list(filter(bool, neighbors))
135 def __init__(self, e):
137 self.vertices = e.vertices[:]
142 def __init__(self, arg):
149 self.uvs = [d.uv for d in self.data]
154 dot = self.name.find('.')
156 ext = self.name[dot:]
157 if ext.startswith(".unit") and ext[5:].isdigit():
158 self.unit = int(ext[5:])
162 def __getattr__(self, attr):
163 return getattr(self._layer, attr)
167 def __init__(self, mesh):
170 self.winding_test = mesh.winding_test
171 self.tbn_vecs = mesh.tbn_vecs
172 self.vertex_groups = mesh.vertex_groups
175 self.vertices = [Vertex(v) for v in self.vertices]
176 for v in self.vertices:
177 v.groups = [VertexGroup(g) for g in v.groups]
179 self.faces = [Face(f) for f in self.polygons]
180 self.edges = [Edge(e) for e in self.edges]
181 self.loops = self.loops[:]
182 self.materials = self.materials[:]
184 # Clone only the desired UV layers
185 if self.use_uv=='NONE' or not self.uv_layers:
188 self.uv_layers = [UvLayer(u) for u in self.uv_layers]
189 self.uv_layers = sorted([u for u in self.uv_layers if not u.hidden], key=(lambda u: (u.unit or 1000, u.name)))
191 if self.use_uv=='UNIT0':
192 self.uv_layers = [self.uv_layers[0]]
194 # Assign texture unit numbers to UV layers that lack one
195 next_unit = max((u.unit+1 for u in self.uv_layers if u.unit is not None), default=0)
196 for u in self.uv_layers:
201 # Rewrite links between elements to point to cloned data, or create links
202 # where they don't exist
203 edge_map = {e.key: e for e in self.edges}
205 if len(f.vertices)>4:
206 raise ValueError("Ngons are not supported")
208 f.vertices = [self.vertices[i] for i in f.vertices]
212 for k in f.edge_keys:
218 e.vertices = [self.vertices[i] for i in e.vertices]
222 # Store loose edges as lines
224 self.lines = [Line(e) for e in self.edges if not e.faces]
228 self.vertex_sequence = []
230 def __getattr__(self, attr):
231 return getattr(self._mesh, attr)
233 def transform(self, matrix):
234 for v in self.vertices:
237 def splice(self, other):
238 if len(self.uv_layers)!=len(other.uv_layers):
239 raise ValueError("Meshes have incompatible UV layers")
240 for i, u in enumerate(self.uv_layers):
241 if u.name!=other.uv_layers[i].name:
242 raise ValueError("Meshes have incompatible UV layers")
244 # Merge materials and form a lookup from source material indices to the
245 # merged material list
247 for m in other.materials:
248 if m in self.materials:
249 material_map.append(self.materials.index(m))
251 material_map.append(len(self.materials))
252 self.materials.append(m)
254 # Append data and adjust indices where necessary. Since the data is
255 # spliced from the source mesh, rebuilding references is not necessary.
256 for i, u in enumerate(self.uv_layers):
257 u.uvs += other.uv_layers[i].uvs
259 offset = len(self.vertices)
260 self.vertices += other.vertices
261 for v in self.vertices[offset:]:
264 loop_offset = len(self.loops)
265 self.loops += other.loops
267 offset = len(self.faces)
268 self.faces += other.faces
269 for f in self.faces[offset:]:
271 f.loop_start += loop_offset
272 f.loop_indices = range(f.loop_start, f.loop_start+f.loop_total)
274 f.material_index = material_map[f.material_index]
276 offset = len(self.edges)
277 self.edges += other.edges
278 for e in self.edges[offset:]:
280 e.key = make_edge_key(e.vertices[0].index, e.vertices[1].index)
282 self.lines += other.lines
284 def prepare_triangles(self, progress):
285 face_count = len(self.faces)
286 for i in range(face_count):
288 nverts = len(f.vertices)
292 # Calculate normals at each vertex of the face
294 for j in range(nverts):
295 edge_vecs.append(f.vertices[(j+1)%nverts].co-f.vertices[j].co)
298 for j in range(nverts):
299 normals.append(edge_vecs[j].cross(edge_vecs[j-1]).normalized())
301 # Check which diagonal results in a flatter triangulation
302 flatness1 = normals[0].dot(normals[2])
303 flatness2 = normals[1].dot(normals[3])
304 cut_index = 1 if flatness1>flatness2 else 0
307 nf.index = len(self.faces)
308 self.faces.append(nf)
311 ne.index = len(self.edges)
312 self.edges.append(ne)
314 nf.vertices = [f.vertices[cut_index], f.vertices[2], f.vertices[3]]
315 nf.loop_indices = [f.loop_indices[cut_index], f.loop_indices[2], f.loop_indices[3]]
316 for v in nf.vertices:
319 ne.vertices = [f.vertices[cut_index], f.vertices[2+cut_index]]
320 for v in ne.vertices:
322 ne.key = make_edge_key(ne.vertices[0].index, ne.vertices[1].index)
325 f.vertices[3-cut_index].faces.remove(f)
326 del f.vertices[3-cut_index]
327 f.loop_indices = [f.loop_indices[0], f.loop_indices[1], f.loop_indices[2+cut_index]]
331 nf.edges = [ne, f.edges[2], f.edges[3]]
332 f.edges = [f.edges[0], f.edges[1], ne]
334 nf.edges = [f.edges[1], f.edges[2], ne]
335 f.edges = [f.edges[0], ne, f.edges[3]]
337 f.normal = normals[1-cut_index]
338 nf.normal = normals[3-cut_index]
340 progress.set_progress(i/face_count)
342 def prepare_smoothing(self, progress):
344 if self.smoothing=='NONE':
349 elif self.use_auto_smooth:
350 smooth_limit = math.cos(self.auto_smooth_angle)
353 e.check_smooth(smooth_limit)
355 progress.push_task("Sharp edges", 0.0, 0.7)
356 self.split_vertices(self.find_smooth_group, progress)
358 if self.smoothing!='BLENDER':
359 progress.set_task("Updating normals", 0.7, 1.0)
360 self.compute_normals(progress)
364 def prepare_vertex_groups(self, obj):
365 for v in self.vertices:
367 weight_sum = sum(g.weight for g in v.groups)
368 v.groups = sorted(v.groups, key=(lambda g: g.weight), reverse=True)[:self.max_groups_per_vertex]
369 weight_scale = weight_sum/sum(g.weight for g in v.groups)
371 g.weight *= weight_scale
373 if obj.parent and obj.parent.type=="ARMATURE":
374 armature = obj.parent.data
375 bone_indices = {b.name: i for i, b in enumerate(armature.bones)}
376 group_index_map = {i: i for i in range(len(obj.vertex_groups))}
377 for g in first_obj.vertex_groups:
378 if g.name in bone_indices:
379 group_index_map[g.index] = bone_indices[g.name]
381 for v in self.vertices:
383 g.group = group_index_map[g.group]
385 def prepare_uv(self, obj, progress):
386 if obj.material_tex and self.use_uv!='NONE':
387 layer = UvLayer("material_tex")
389 if self.use_uv=='UNIT0':
390 self.uv_layers = [layer]
393 self.uv_layers.append(layer)
394 layer.unit = max((u.unit+1 for u in self.uv_layers if u.unit is not None), default=0)
396 layer.uvs = [None]*len(self.loops)
398 uv = mathutils.Vector(((f.material_index+0.5)/len(self.materials), 0.5))
399 for i in f.loop_indices:
402 # Form a list of UV layers referenced by materials with the array atlas
404 array_uv_layers = [t.uv_layer for m in self.materials if m.array_atlas for t in m.texture_slots if t and t.texture_coords=='UV']
405 array_uv_layers = [u for u in self.uv_layers if u.name in array_uv_layers]
410 if f.material_index<len(self.materials):
411 mat = self.materials[f.material_index]
412 if mat and mat.array_atlas:
413 layer = mat.array_layer
415 for l in array_uv_layers:
416 for i in f.loop_indices:
417 l.uvs[i] = mathutils.Vector((*l.uvs[i], layer))
419 # Copy UVs from layers to faces
421 for u in self.uv_layers:
422 f.uvs.append([u.uvs[i] for i in f.loop_indices])
424 prog_count = len(self.uv_layers)
427 # Split by the UV layer used for TBN vectors first so connectivity
428 # remains intact for TBN vector computation
431 uv_names = [u.name for u in self.uv_layers]
432 if self.tbn_uvtex in uv_names:
434 tbn_layer_index = uv_names.index(self.tbn_uvtex)
435 progress.push_task_slice("Computing TBN", 0, prog_count)
436 self.split_vertices(self.find_uv_group, progress, tbn_layer_index)
437 progress.set_task_slice(self.tbn_uvtex, 1, prog_count)
438 self.compute_tbn(tbn_layer_index, progress)
442 # Split by the remaining UV layers
443 for i, u in enumerate(self.uv_layers):
444 if i==tbn_layer_index:
447 progress.push_task_slice(u.name, prog_step, prog_count)
448 self.split_vertices(self.find_uv_group, progress, i)
452 # Copy UVs from faces to vertices
453 for v in self.vertices:
455 # All faces still connected to the vertex have the same UV value
457 i = f.vertices.index(v)
458 v.uvs = [u[i] for u in f.uvs]
460 v.uvs = [(0.0, 0.0)]*len(self.uv_layers)
462 def split_vertices(self, find_group_func, progress, *args):
463 vertex_count = len(self.vertices)
464 for i in range(vertex_count):
469 # Find all groups of faces on this vertex
473 groups.append(find_group_func(v, f, *args))
475 # Give groups after the first separate copies of the vertex
478 nv.index = len(self.vertices)
479 self.vertices.append(nv)
482 e_faces_in_g = [f for f in e.faces if f in g]
486 if len(e_faces_in_g)<len(e.faces):
487 # Create a copy of an edge at the boundary of the group
489 ne.index = len(self.edges)
490 self.edges.append(ne)
492 ne.other_vertex(v).edges.append(ne)
494 for f in e_faces_in_g:
496 f.edges[f.edges.index(e)] = ne
501 e.vertices[e.vertices.index(v)] = nv
504 e.key = make_edge_key(e.vertices[0].index, e.vertices[1].index)
506 # Filter out any edges that were removed from the original vertex
507 v.edges = [e for e in v.edges if v in e.vertices]
511 f.vertices[f.vertices.index(v)] = nv
514 progress.set_progress(i/vertex_count)
516 def find_smooth_group(self, vertex, face):
519 edges = [e for e in face.edges if vertex in e.vertices]
531 e = f.other_edge(e, vertex)
535 def find_uv_group(self, vertex, face, index):
536 uv = face.uvs[index][face.vertices.index(vertex)]
540 for f in vertex.faces:
541 if not f.flag and f.uvs[index][f.vertices.index(vertex)]==uv:
547 def compute_normals(self, progress):
548 for i, v in enumerate(self.vertices):
549 v.normal = mathutils.Vector()
551 fv = f.pivot_vertices(v)
552 edge1 = fv[1].co-fv[0].co
553 edge2 = fv[-1].co-fv[0].co
554 if edge1.length and edge2.length:
555 # Use the angle between edges as a weighting factor. This gives
556 # more consistent normals on bends with an inequal number of
557 # faces on each side.
558 v.normal += f.normal*edge1.angle(edge2)
563 v.normal = mathutils.Vector((0, 0, 1))
565 progress.set_progress(i/len(self.vertices))
567 def compute_tbn(self, index, progress):
568 # This function is called at an early stage during UV preparation when
569 # face UVs are not available yet
570 layer_uvs = self.uv_layers[index].uvs
572 for i, v in enumerate(self.vertices):
573 v.tan = mathutils.Vector()
574 v.bino = mathutils.Vector()
576 vi = f.pivot_vertex(v)
577 uv0 = layer_uvs[f.loop_indices[vi[0]]]
578 uv1 = layer_uvs[f.loop_indices[vi[1]]]
579 uv2 = layer_uvs[f.loop_indices[vi[-1]]]
584 edge1 = f.vertices[vi[1]].co-f.vertices[vi[0]].co
585 edge2 = f.vertices[vi[-1]].co-f.vertices[vi[0]].co
586 div = (du1*dv2-du2*dv1)
588 mul = edge1.angle(edge2)/div
589 v.tan += (edge1*dv2-edge2*dv1)*mul
590 v.bino += (edge2*du1-edge1*du2)*mul
597 progress.set_progress(i/len(self.vertices))
599 def prepare_sequence(self, progress):
600 progress.push_task("Reordering faces", 0.0, 0.5)
601 self.reorder_faces(progress)
603 progress.set_task("Building sequence", 0.5, 1.0)
605 for i, f in enumerate(self.faces):
608 # Rotate the first three vertices so that the new face can be added
609 if sequence[0] in f.vertices and sequence[1] not in f.vertices:
610 sequence.append(sequence[0])
612 elif sequence[2] not in f.vertices and sequence[1] in f.vertices:
613 sequence.insert(0, sequence[-1])
616 if sequence[-1] not in f.vertices:
619 to_add = [v for v in f.vertices if v!=sequence[-1] and v!=sequence[-2]]
621 if (f.vertices[1]==sequence[-1]) != (len(sequence)%2==1):
623 sequence.append(sequence[-1])
627 sequence = f.vertices[:]
628 self.vertex_sequence.append(sequence)
630 progress.set_progress(i/len(self.faces))
634 self.reorder_vertices()
636 def reorder_faces(self, progress):
637 # Tom Forsyth's vertex cache optimization algorithm
638 # http://eelpi.gotdns.org/papers/fast_vert_cache_opt.html
643 last_triangle_score = 0.75
644 cache_decay_power = 1.5
645 valence_boost_scale = 2.0
646 valence_boost_power = -0.5
651 # Keep track of the score and number of unused faces for each vertex
652 vertex_info = [[0, len(v.faces)] for v in self.vertices]
653 for vi in vertex_info:
654 vi[0] = valence_boost_scale*(vi[1]**valence_boost_power)
662 # Previous iteration gave no candidate for best face (or this is
663 # the first iteration). Scan all faces for the highest score.
669 score = sum(vertex_info[v.index][0] for v in f.vertices)
677 reordered_faces.append(face)
680 for v in face.vertices:
681 vertex_info[v.index][1] -= 1
683 # Shuffle the vertex into the front of the cache
684 if v in cached_vertices:
685 cached_vertices.remove(v)
686 cached_vertices.insert(0, v)
688 # Update scores for all vertices in the cache
689 for i, v in enumerate(cached_vertices):
692 score += last_triangle_score
693 elif i<max_cache_size:
694 score += (1-(i-3)/(max_cache_size-3))**cache_decay_power
695 if vertex_info[v.index][1]:
696 score += valence_boost_scale*(vertex_info[v.index][1]**valence_boost_power)
697 vertex_info[v.index][0] = score
701 for v in cached_vertices:
704 score = sum(vertex_info[fv.index][0] for fv in f.vertices)
709 del cached_vertices[max_cache_size:]
712 progress.set_progress(n_processed/len(self.faces))
714 self.faces = reordered_faces
715 for i, f in enumerate(self.faces):
718 def reorder_vertices(self):
719 for v in self.vertices:
722 reordered_vertices = []
723 for s in self.vertex_sequence:
726 v.index = len(reordered_vertices)
727 reordered_vertices.append(v)
729 self.vertices = reordered_vertices
732 e.key = make_edge_key(e.vertices[0].index, e.vertices[1].index)
734 def drop_references(self):
735 for v in self.vertices:
743 for u in self.uv_layers:
748 def create_mesh_from_object(context, obj, progress):
750 raise Exception("Object is not a mesh")
752 progress.push_task("Preparing mesh", 0.0, 0.2)
754 objs = [(obj, mathutils.Matrix())]
760 if c.type=="MESH" and c.compound:
761 objs.append((c, m*c.matrix_local))
766 bmesh = o.to_mesh(context.scene, True, "PREVIEW")
767 bmeshes.append(bmesh)
769 # Object.to_mesh does not copy custom properties
770 bmesh.winding_test = o.data.winding_test
771 bmesh.smoothing = o.data.smoothing
772 bmesh.use_lines = o.data.use_lines
773 bmesh.vertex_groups = o.data.vertex_groups
774 bmesh.max_groups_per_vertex = o.data.max_groups_per_vertex
775 bmesh.use_uv = o.data.use_uv
776 bmesh.tbn_vecs = o.data.tbn_vecs
777 bmesh.tbn_uvtex = o.data.tbn_uvtex
787 progress.set_task("Triangulating", 0.2, 0.3)
788 mesh.prepare_triangles(progress)
789 progress.set_task("Smoothing", 0.3, 0.5)
790 mesh.prepare_smoothing(progress)
791 progress.set_task("Vertex groups", 0.5, 0.6)
792 mesh.prepare_vertex_groups(obj)
793 progress.set_task("Preparing UVs", 0.6, 0.8)
794 mesh.prepare_uv(obj, progress)
795 progress.set_task("Render sequence", 0.8, 1.0)
796 mesh.prepare_sequence(progress)
798 # Discard the temporary Blender meshes after making sure there's no
799 # references to the data
800 mesh.drop_references()
802 bpy.data.meshes.remove(m)