5 def make_edge_key(i1, i2):
6 return (min(i1, i2), max(i1, i2))
9 def __init__(self, edge):
10 if edge.__class__==Edge:
11 self._edge = edge._edge
12 self.smooth = edge.smooth
17 self.vertices = edge.vertices[:]
24 def __getattr__(self, attr):
25 return getattr(self._edge, attr)
27 def check_smooth(self, limit):
28 if len(self.faces)!=2:
31 d = self.faces[0].normal.dot(self.faces[1].normal)
32 self.smooth = ((d>limit and self.faces[0].use_smooth and self.faces[1].use_smooth) or d>0.99995)
34 def other_face(self, f):
35 if f.index==self.faces[0].index:
36 if len(self.faces)>=2:
43 def other_vertex(self, v):
44 if v.index==self.vertices[0].index:
45 return self.vertices[1]
47 return self.vertices[0]
51 def __init__(self, vertex):
52 if vertex.__class__==Vertex:
53 self._vertex = vertex._vertex
54 self.uvs = vertex.uvs[:]
56 self.bino = vertex.bino
62 self.index = vertex.index
64 self.normal = vertex.normal
68 self.groups = vertex.groups[:]
70 def __getattr__(self, attr):
71 return getattr(self._vertex, attr)
73 def __cmp__(self, other):
76 return cmp(self.index, other.index)
80 def __init__(self, group):
82 self.group = group.group
83 self.weight = group.weight
85 def __getattr__(self, attr):
86 return getattr(self._group, attr)
90 def __init__(self, face):
92 self.index = face.index
94 self.vertices = face.vertices[:]
98 def __getattr__(self, attr):
99 return getattr(self._face, attr)
101 def __cmp__(self, other):
104 return cmp(self.index, other.index)
106 def pivot_vertex(self, v):
107 n = self.vertices.index(v)
108 return [(n+i)%len(self.vertices) for i in range(len(self.vertices))]
110 def pivot_vertices(self, *vt):
111 flags = [(v in vt) for v in self.vertices]
112 l = len(self.vertices)
114 if flags[i] and not flags[(i+l-1)%l]:
115 return self.vertices[i:]+self.vertices[:i]
117 def get_edge(self, v1, v2):
118 key = make_edge_key(v1.index, v2.index)
122 raise KeyError("No edge %s"%(key,))
124 def other_edge(self, e, v):
126 if d!=e and v in d.vertices:
129 def get_neighbors(self):
130 neighbors = [e.other_face(self) for e in self.edges]
131 return list(filter(bool, neighbors))
135 def __init__(self, e):
137 self.vertices = e.vertices[:]
142 def __init__(self, arg):
150 self.uvs = [d.uv for d in self.data]
155 dot = self.name.find('.')
157 ext = self.name[dot:]
158 if ext.startswith(".unit") and ext[5:].isdigit():
159 self.unit = int(ext[5:])
163 def __getattr__(self, attr):
164 return getattr(self._layer, attr)
168 def __init__(self, mesh):
171 self.winding_test = mesh.winding_test
172 self.tbn_vecs = mesh.tbn_vecs
173 self.vertex_groups = mesh.vertex_groups
176 self.vertices = [Vertex(v) for v in mesh.vertices]
177 for v in self.vertices:
178 v.groups = [VertexGroup(g) for g in v.groups]
180 self.faces = [Face(f) for f in mesh.polygons]
181 self.edges = [Edge(e) for e in mesh.edges]
182 self.loops = mesh.loops[:]
183 self.materials = mesh.materials[:]
185 # Clone only the desired UV layers
186 if self.use_uv=='NONE' or not mesh.uv_layers:
189 self.uv_layers = [UvLayer(u) for u in mesh.uv_layers]
190 self.uv_layers = sorted([u for u in self.uv_layers if not u.hidden], key=(lambda u: (u.unit or 1000, u.name)))
192 if self.use_uv=='UNIT0':
193 self.uv_layers = [self.uv_layers[0]]
195 # Assign texture unit numbers to UV layers that lack one
196 next_unit = max((u.unit+1 for u in self.uv_layers if u.unit is not None), default=0)
197 for u in self.uv_layers:
202 # Rewrite links between elements to point to cloned data, or create links
203 # where they don't exist
204 edge_map = {e.key: e for e in self.edges}
206 if len(f.vertices)>4:
207 raise ValueError("Ngons are not supported")
209 f.vertices = [self.vertices[i] for i in f.vertices]
213 for k in f.edge_keys:
219 e.vertices = [self.vertices[i] for i in e.vertices]
223 # Store loose edges as lines
225 self.lines = [Line(e) for e in self.edges if not e.faces]
229 self.vertex_sequence = []
231 def __getattr__(self, attr):
232 return getattr(self._mesh, attr)
234 def transform(self, matrix):
235 for v in self.vertices:
238 def splice(self, other):
239 if len(self.uv_layers)!=len(other.uv_layers):
240 raise ValueError("Meshes have incompatible UV layers")
241 for i, u in enumerate(self.uv_layers):
242 if u.name!=other.uv_layers[i].name:
243 raise ValueError("Meshes have incompatible UV layers")
245 # Merge materials and form a lookup from source material indices to the
246 # merged material list
248 for m in other.materials:
249 if m in self.materials:
250 material_map.append(self.materials.index(m))
252 material_map.append(len(self.materials))
253 self.materials.append(m)
255 # Append data and adjust indices where necessary. Since the data is
256 # spliced from the source mesh, rebuilding references is not necessary.
257 for i, u in enumerate(self.uv_layers):
258 u.uvs += other.uv_layers[i].uvs
260 offset = len(self.vertices)
261 self.vertices += other.vertices
262 for v in self.vertices[offset:]:
265 loop_offset = len(self.loops)
266 self.loops += other.loops
268 offset = len(self.faces)
269 self.faces += other.faces
270 for f in self.faces[offset:]:
272 f.loop_start += loop_offset
273 f.loop_indices = range(f.loop_start, f.loop_start+f.loop_total)
275 f.material_index = material_map[f.material_index]
277 offset = len(self.edges)
278 self.edges += other.edges
279 for e in self.edges[offset:]:
281 e.key = make_edge_key(e.vertices[0].index, e.vertices[1].index)
283 self.lines += other.lines
285 def prepare_triangles(self, progress):
286 face_count = len(self.faces)
287 for i in range(face_count):
289 nverts = len(f.vertices)
293 # Calculate normals at each vertex of the face
295 for j in range(nverts):
296 edge_vecs.append(f.vertices[(j+1)%nverts].co-f.vertices[j].co)
299 for j in range(nverts):
300 normals.append(edge_vecs[j-1].cross(edge_vecs[j]).normalized())
302 # Check which diagonal results in a flatter triangulation
303 flatness1 = normals[0].dot(normals[2])
304 flatness2 = normals[1].dot(normals[3])
305 cut_index = 1 if flatness1>flatness2 else 0
308 nf.index = len(self.faces)
309 self.faces.append(nf)
312 ne.index = len(self.edges)
313 self.edges.append(ne)
315 nf.vertices = [f.vertices[cut_index], f.vertices[2], f.vertices[3]]
316 nf.loop_indices = [f.loop_indices[cut_index], f.loop_indices[2], f.loop_indices[3]]
317 for v in nf.vertices:
320 ne.vertices = [f.vertices[cut_index], f.vertices[2+cut_index]]
321 for v in ne.vertices:
323 ne.key = make_edge_key(ne.vertices[0].index, ne.vertices[1].index)
326 f.vertices[3-cut_index].faces.remove(f)
327 del f.vertices[3-cut_index]
328 f.loop_indices = [f.loop_indices[0], f.loop_indices[1], f.loop_indices[2+cut_index]]
332 nf.edges = [ne, f.edges[2], f.edges[3]]
333 f.edges = [f.edges[0], f.edges[1], ne]
335 nf.edges = [f.edges[1], f.edges[2], ne]
336 f.edges = [f.edges[0], ne, f.edges[3]]
342 f.normal = normals[1-cut_index]
343 nf.normal = normals[3-cut_index]
345 progress.set_progress(i/face_count)
347 def prepare_smoothing(self, progress):
349 if self.smoothing=='NONE':
354 elif self.use_auto_smooth:
355 smooth_limit = math.cos(self.auto_smooth_angle)
358 e.check_smooth(smooth_limit)
360 progress.push_task("Sharp edges", 0.0, 0.7)
361 self.split_vertices(self.find_smooth_group, progress)
363 if self.smoothing!='BLENDER':
364 progress.set_task("Updating normals", 0.7, 1.0)
365 self.compute_normals(progress)
369 def prepare_vertex_groups(self, obj):
370 for v in self.vertices:
372 weight_sum = sum(g.weight for g in v.groups)
373 v.groups = sorted(v.groups, key=(lambda g: g.weight), reverse=True)[:self.max_groups_per_vertex]
374 weight_scale = weight_sum/sum(g.weight for g in v.groups)
376 g.weight *= weight_scale
378 if obj.parent and obj.parent.type=="ARMATURE":
379 armature = obj.parent.data
380 bone_indices = {b.name: i for i, b in enumerate(armature.bones)}
381 group_index_map = {i: i for i in range(len(obj.vertex_groups))}
382 for g in first_obj.vertex_groups:
383 if g.name in bone_indices:
384 group_index_map[g.index] = bone_indices[g.name]
386 for v in self.vertices:
388 g.group = group_index_map[g.group]
390 def prepare_uv(self, obj, progress):
391 if obj.material_tex and self.use_uv!='NONE':
392 layer = UvLayer("material_tex")
394 if self.use_uv=='UNIT0':
395 self.uv_layers = [layer]
398 self.uv_layers.append(layer)
399 layer.unit = max((u.unit+1 for u in self.uv_layers if u.unit is not None), default=0)
401 layer.uvs = [None]*len(self.loops)
403 uv = mathutils.Vector(((f.material_index+0.5)/len(self.materials), 0.5))
404 for i in f.loop_indices:
407 # Form a list of UV layers referenced by materials with the array atlas
409 array_uv_layers = [t.uv_layer for m in self.materials if m.array_atlas for t in m.texture_slots if t and t.texture_coords=='UV']
410 array_uv_layers = [u for u in self.uv_layers if u.name in array_uv_layers]
415 if f.material_index<len(self.materials):
416 mat = self.materials[f.material_index]
417 if mat and mat.array_atlas:
418 layer = mat.array_layer
420 for l in array_uv_layers:
421 for i in f.loop_indices:
422 l.uvs[i] = mathutils.Vector((*l.uvs[i], layer))
424 # Copy UVs from layers to faces
426 for u in self.uv_layers:
427 f.uvs.append([u.uvs[i] for i in f.loop_indices])
429 prog_count = len(self.uv_layers)
432 # Split by the UV layer used for TBN vectors first so connectivity
433 # remains intact for TBN vector computation
436 uv_names = [u.name for u in self.uv_layers]
437 if self.tbn_uvtex in uv_names:
439 tbn_layer_index = uv_names.index(self.tbn_uvtex)
440 progress.push_task_slice("Computing TBN", 0, prog_count)
441 self.split_vertices(self.find_uv_group, progress, tbn_layer_index)
442 progress.set_task_slice(self.tbn_uvtex, 1, prog_count)
443 self.compute_tbn(tbn_layer_index, progress)
447 # Split by the remaining UV layers
448 for i, u in enumerate(self.uv_layers):
449 if i==tbn_layer_index:
452 progress.push_task_slice(u.name, prog_step, prog_count)
453 self.split_vertices(self.find_uv_group, progress, i)
457 # Copy UVs from faces to vertices
458 for v in self.vertices:
460 # All faces still connected to the vertex have the same UV value
462 i = f.vertices.index(v)
463 v.uvs = [u[i] for u in f.uvs]
465 v.uvs = [(0.0, 0.0)]*len(self.uv_layers)
467 def split_vertices(self, find_group_func, progress, *args):
468 vertex_count = len(self.vertices)
469 for i in range(vertex_count):
474 # Find all groups of faces on this vertex
478 groups.append(find_group_func(v, f, *args))
480 # Give groups after the first separate copies of the vertex
483 nv.index = len(self.vertices)
484 self.vertices.append(nv)
487 e_faces_in_g = [f for f in e.faces if f in g]
491 if len(e_faces_in_g)<len(e.faces):
492 # Create a copy of an edge at the boundary of the group
494 ne.index = len(self.edges)
495 self.edges.append(ne)
497 ne.other_vertex(v).edges.append(ne)
499 for f in e_faces_in_g:
501 f.edges[f.edges.index(e)] = ne
506 e.vertices[e.vertices.index(v)] = nv
509 e.key = make_edge_key(e.vertices[0].index, e.vertices[1].index)
511 # Filter out any edges that were removed from the original vertex
512 v.edges = [e for e in v.edges if v in e.vertices]
516 f.vertices[f.vertices.index(v)] = nv
519 progress.set_progress(i/vertex_count)
521 def find_smooth_group(self, vertex, face):
524 edges = [e for e in face.edges if vertex in e.vertices]
536 e = f.other_edge(e, vertex)
540 def find_uv_group(self, vertex, face, index):
541 uv = face.uvs[index][face.vertices.index(vertex)]
545 for f in vertex.faces:
546 if not f.flag and f.uvs[index][f.vertices.index(vertex)]==uv:
552 def compute_normals(self, progress):
553 for i, v in enumerate(self.vertices):
554 v.normal = mathutils.Vector()
556 fv = f.pivot_vertices(v)
557 edge1 = fv[1].co-fv[0].co
558 edge2 = fv[-1].co-fv[0].co
559 if edge1.length and edge2.length:
560 # Use the angle between edges as a weighting factor. This gives
561 # more consistent normals on bends with an inequal number of
562 # faces on each side.
563 v.normal += f.normal*edge1.angle(edge2)
568 v.normal = mathutils.Vector((0, 0, 1))
570 progress.set_progress(i/len(self.vertices))
572 def compute_tbn(self, index, progress):
573 # This function is called at an early stage during UV preparation when
574 # face UVs are not available yet
575 layer_uvs = self.uv_layers[index].uvs
577 for i, v in enumerate(self.vertices):
578 v.tan = mathutils.Vector()
579 v.bino = mathutils.Vector()
581 vi = f.pivot_vertex(v)
582 uv0 = layer_uvs[f.loop_indices[vi[0]]]
583 uv1 = layer_uvs[f.loop_indices[vi[1]]]
584 uv2 = layer_uvs[f.loop_indices[vi[-1]]]
589 edge1 = f.vertices[vi[1]].co-f.vertices[vi[0]].co
590 edge2 = f.vertices[vi[-1]].co-f.vertices[vi[0]].co
591 div = (du1*dv2-du2*dv1)
593 mul = edge1.angle(edge2)/div
594 v.tan += (edge1*dv2-edge2*dv1)*mul
595 v.bino += (edge2*du1-edge1*du2)*mul
602 progress.set_progress(i/len(self.vertices))
604 def prepare_sequence(self, progress):
605 progress.push_task("Reordering faces", 0.0, 0.5)
606 self.reorder_faces(progress)
608 progress.set_task("Building sequence", 0.5, 1.0)
610 for i, f in enumerate(self.faces):
613 # Rotate the first three vertices so that the new face can be added
614 if sequence[0] in f.vertices and sequence[1] not in f.vertices:
615 sequence.append(sequence[0])
617 elif sequence[2] not in f.vertices and sequence[1] in f.vertices:
618 sequence.insert(0, sequence[-1])
621 if sequence[-1] not in f.vertices:
624 to_add = [v for v in f.vertices if v!=sequence[-1] and v!=sequence[-2]]
626 if (f.vertices[1]==sequence[-1]) != (len(sequence)%2==1):
628 sequence.append(sequence[-1])
632 sequence = f.vertices[:]
633 self.vertex_sequence.append(sequence)
635 progress.set_progress(i/len(self.faces))
639 self.reorder_vertices()
641 def reorder_faces(self, progress):
642 # Tom Forsyth's vertex cache optimization algorithm
643 # http://eelpi.gotdns.org/papers/fast_vert_cache_opt.html
648 last_triangle_score = 0.75
649 cache_decay_power = 1.5
650 valence_boost_scale = 2.0
651 valence_boost_power = -0.5
656 # Keep track of the score and number of unused faces for each vertex
657 vertex_info = [[0, len(v.faces)] for v in self.vertices]
658 for vi in vertex_info:
659 vi[0] = valence_boost_scale*(vi[1]**valence_boost_power)
667 # Previous iteration gave no candidate for best face (or this is
668 # the first iteration). Scan all faces for the highest score.
674 score = sum(vertex_info[v.index][0] for v in f.vertices)
682 reordered_faces.append(face)
685 for v in face.vertices:
686 vertex_info[v.index][1] -= 1
688 # Shuffle the vertex into the front of the cache
689 if v in cached_vertices:
690 cached_vertices.remove(v)
691 cached_vertices.insert(0, v)
693 # Update scores for all vertices in the cache
694 for i, v in enumerate(cached_vertices):
697 score += last_triangle_score
698 elif i<max_cache_size:
699 score += (1-(i-3)/(max_cache_size-3))**cache_decay_power
700 if vertex_info[v.index][1]:
701 score += valence_boost_scale*(vertex_info[v.index][1]**valence_boost_power)
702 vertex_info[v.index][0] = score
706 for v in cached_vertices:
709 score = sum(vertex_info[fv.index][0] for fv in f.vertices)
714 del cached_vertices[max_cache_size:]
717 progress.set_progress(n_processed/len(self.faces))
719 self.faces = reordered_faces
720 for i, f in enumerate(self.faces):
723 def reorder_vertices(self):
724 for v in self.vertices:
727 reordered_vertices = []
728 for s in self.vertex_sequence:
731 v.index = len(reordered_vertices)
732 reordered_vertices.append(v)
734 self.vertices = reordered_vertices
737 e.key = make_edge_key(e.vertices[0].index, e.vertices[1].index)
739 def drop_references(self):
740 for v in self.vertices:
748 for u in self.uv_layers:
753 def create_mesh_from_object(context, obj, progress):
755 raise Exception("Object is not a mesh")
757 progress.push_task("Preparing mesh", 0.0, 0.2)
759 objs = [(obj, mathutils.Matrix())]
765 if c.type=="MESH" and c.compound:
766 objs.append((c, m*c.matrix_local))
771 bmesh = o.to_mesh(context.scene, True, "PREVIEW")
772 bmeshes.append(bmesh)
774 # Object.to_mesh does not copy custom properties
775 bmesh.winding_test = o.data.winding_test
776 bmesh.smoothing = o.data.smoothing
777 bmesh.use_lines = o.data.use_lines
778 bmesh.vertex_groups = o.data.vertex_groups
779 bmesh.max_groups_per_vertex = o.data.max_groups_per_vertex
780 bmesh.use_uv = o.data.use_uv
781 bmesh.tbn_vecs = o.data.tbn_vecs
782 bmesh.tbn_uvtex = o.data.tbn_uvtex
792 progress.set_task("Triangulating", 0.2, 0.3)
793 mesh.prepare_triangles(progress)
794 progress.set_task("Smoothing", 0.3, 0.5)
795 mesh.prepare_smoothing(progress)
796 progress.set_task("Vertex groups", 0.5, 0.6)
797 mesh.prepare_vertex_groups(obj)
798 progress.set_task("Preparing UVs", 0.6, 0.8)
799 mesh.prepare_uv(obj, progress)
800 progress.set_task("Render sequence", 0.8, 1.0)
801 mesh.prepare_sequence(progress)
803 # Discard the temporary Blender meshes after making sure there's no
804 # references to the data
805 mesh.drop_references()
807 bpy.data.meshes.remove(m)