5 def make_edge_key(i1, i2):
6 return (min(i1, i2), max(i1, i2))
9 def __init__(self, edge):
10 if edge.__class__==Edge:
11 self._edge = edge._edge
12 self.smooth = edge.smooth
17 self.vertices = edge.vertices[:]
24 def __getattr__(self, attr):
25 return getattr(self._edge, attr)
27 def check_smooth(self, limit):
28 if len(self.faces)!=2:
31 d = self.faces[0].normal.dot(self.faces[1].normal)
32 self.smooth = ((d>limit and self.faces[0].use_smooth and self.faces[1].use_smooth) or d>0.99995)
34 def other_face(self, f):
35 if f.index==self.faces[0].index:
36 if len(self.faces)>=2:
43 def other_vertex(self, v):
44 if v.index==self.vertices[0].index:
45 return self.vertices[1]
47 return self.vertices[0]
51 def __init__(self, vertex):
52 if vertex.__class__==Vertex:
53 self._vertex = vertex._vertex
54 self.uvs = vertex.uvs[:]
56 self.bino = vertex.bino
62 self.index = vertex.index
64 self.normal = vertex.normal
68 self.groups = vertex.groups[:]
70 def __getattr__(self, attr):
71 return getattr(self._vertex, attr)
73 def __cmp__(self, other):
76 return cmp(self.index, other.index)
80 def __init__(self, group):
82 self.group = group.group
83 self.weight = group.weight
85 def __getattr__(self, attr):
86 return getattr(self._group, attr)
90 def __init__(self, face):
92 self.index = face.index
94 self.vertices = face.vertices[:]
98 def __getattr__(self, attr):
99 return getattr(self._face, attr)
101 def __cmp__(self, other):
104 return cmp(self.index, other.index)
106 def pivot_vertex(self, v):
107 n = self.vertices.index(v)
108 return [(n+i)%len(self.vertices) for i in range(len(self.vertices))]
110 def pivot_vertices(self, *vt):
111 flags = [(v in vt) for v in self.vertices]
112 l = len(self.vertices)
114 if flags[i] and not flags[(i+l-1)%l]:
115 return self.vertices[i:]+self.vertices[:i]
117 def get_edge(self, v1, v2):
118 key = make_edge_key(v1.index, v2.index)
122 raise KeyError("No edge %s"%(key,))
124 def other_edge(self, e, v):
126 if d!=e and v in d.vertices:
129 def get_neighbors(self):
130 neighbors = [e.other_face(self) for e in self.edges]
131 return list(filter(bool, neighbors))
135 def __init__(self, e):
137 self.vertices = e.vertices[:]
142 def __init__(self, arg):
150 self.uvs = [d.uv for d in self.data]
155 dot = self.name.find('.')
157 ext = self.name[dot:]
158 if ext.startswith(".unit") and ext[5:].isdigit():
159 self.unit = int(ext[5:])
163 def __getattr__(self, attr):
164 return getattr(self._layer, attr)
168 def __init__(self, mesh):
171 self.winding_test = mesh.winding_test
172 self.tbn_vecs = mesh.tbn_vecs
173 self.vertex_groups = mesh.vertex_groups
176 self.vertices = [Vertex(v) for v in mesh.vertices]
177 for v in self.vertices:
178 v.groups = [VertexGroup(g) for g in v.groups]
180 self.faces = [Face(f) for f in mesh.polygons]
181 self.edges = [Edge(e) for e in mesh.edges]
182 self.loops = mesh.loops[:]
183 self.materials = mesh.materials[:]
185 # Clone only the desired UV layers
186 if self.use_uv=='NONE' or not mesh.uv_layers:
189 self.uv_layers = [UvLayer(u) for u in mesh.uv_layers]
190 self.uv_layers = sorted([u for u in self.uv_layers if not u.hidden], key=(lambda u: (u.unit or 1000, u.name)))
192 if self.use_uv=='UNIT0':
193 self.uv_layers = [self.uv_layers[0]]
195 # Assign texture unit numbers to UV layers that lack one
196 next_unit = max((u.unit+1 for u in self.uv_layers if u.unit is not None), default=0)
197 for u in self.uv_layers:
202 # Rewrite links between elements to point to cloned data, or create links
203 # where they don't exist
204 edge_map = {e.key: e for e in self.edges}
206 if len(f.vertices)>4:
207 raise ValueError("Ngons are not supported")
209 f.vertices = [self.vertices[i] for i in f.vertices]
213 for k in f.edge_keys:
219 e.vertices = [self.vertices[i] for i in e.vertices]
223 # Store loose edges as lines
225 self.lines = [Line(e) for e in self.edges if not e.faces]
229 self.vertex_sequence = []
231 def __getattr__(self, attr):
232 return getattr(self._mesh, attr)
234 def transform(self, matrix):
235 for v in self.vertices:
238 def splice(self, other):
239 if len(self.uv_layers)!=len(other.uv_layers):
240 raise ValueError("Meshes have incompatible UV layers")
241 for i, u in enumerate(self.uv_layers):
242 if u.name!=other.uv_layers[i].name:
243 raise ValueError("Meshes have incompatible UV layers")
245 # Merge materials and form a lookup from source material indices to the
246 # merged material list
248 for m in other.materials:
249 if m in self.materials:
250 material_map.append(self.materials.index(m))
252 material_map.append(len(self.materials))
253 self.materials.append(m)
255 # Append data and adjust indices where necessary. Since the data is
256 # spliced from the source mesh, rebuilding references is not necessary.
257 for i, u in enumerate(self.uv_layers):
258 u.uvs += other.uv_layers[i].uvs
260 offset = len(self.vertices)
261 self.vertices += other.vertices
262 for v in self.vertices[offset:]:
265 loop_offset = len(self.loops)
266 self.loops += other.loops
268 offset = len(self.faces)
269 self.faces += other.faces
270 for f in self.faces[offset:]:
272 f.loop_start += loop_offset
273 f.loop_indices = range(f.loop_start, f.loop_start+f.loop_total)
275 f.material_index = material_map[f.material_index]
277 offset = len(self.edges)
278 self.edges += other.edges
279 for e in self.edges[offset:]:
281 e.key = make_edge_key(e.vertices[0].index, e.vertices[1].index)
283 self.lines += other.lines
285 def prepare_triangles(self, progress):
286 face_count = len(self.faces)
287 for i in range(face_count):
289 nverts = len(f.vertices)
293 # Calculate normals at each vertex of the face
295 for j in range(nverts):
296 edge_vecs.append(f.vertices[(j+1)%nverts].co-f.vertices[j].co)
299 for j in range(nverts):
300 normals.append(edge_vecs[j-1].cross(edge_vecs[j]).normalized())
302 # Check which diagonal results in a flatter triangulation
303 flatness1 = normals[0].dot(normals[2])
304 flatness2 = normals[1].dot(normals[3])
305 cut_index = 1 if flatness1>flatness2 else 0
308 nf.index = len(self.faces)
309 self.faces.append(nf)
312 ne.index = len(self.edges)
313 self.edges.append(ne)
315 nf.vertices = [f.vertices[cut_index], f.vertices[2], f.vertices[3]]
316 nf.loop_indices = [f.loop_indices[cut_index], f.loop_indices[2], f.loop_indices[3]]
317 for v in nf.vertices:
320 ne.vertices = [f.vertices[cut_index], f.vertices[2+cut_index]]
321 for v in ne.vertices:
323 ne.key = make_edge_key(ne.vertices[0].index, ne.vertices[1].index)
326 f.vertices[3-cut_index].faces.remove(f)
327 del f.vertices[3-cut_index]
328 f.loop_indices = [f.loop_indices[0], f.loop_indices[1], f.loop_indices[2+cut_index]]
332 nf.edges = [ne, f.edges[2], f.edges[3]]
333 f.edges = [f.edges[0], f.edges[1], ne]
335 nf.edges = [f.edges[1], f.edges[2], ne]
336 f.edges = [f.edges[0], ne, f.edges[3]]
342 f.normal = normals[1-cut_index]
343 nf.normal = normals[3-cut_index]
345 progress.set_progress(i/face_count)
347 def prepare_smoothing(self, progress):
349 if self.smoothing=='NONE':
354 elif self.use_auto_smooth:
355 smooth_limit = math.cos(self.auto_smooth_angle)
358 e.check_smooth(smooth_limit)
360 progress.push_task("Sharp edges", 0.0, 0.7)
361 self.split_vertices(self.find_smooth_group, progress)
363 if self.smoothing!='BLENDER':
364 progress.set_task("Updating normals", 0.7, 1.0)
365 self.compute_normals(progress)
369 def prepare_vertex_groups(self, obj):
370 for v in self.vertices:
372 weight_sum = sum(g.weight for g in v.groups)
373 v.groups = sorted(v.groups, key=(lambda g: g.weight), reverse=True)[:self.max_groups_per_vertex]
374 weight_scale = weight_sum/sum(g.weight for g in v.groups)
376 g.weight *= weight_scale
378 if obj.parent and obj.parent.type=="ARMATURE":
379 armature = obj.parent.data
380 bone_indices = {b.name: i for i, b in enumerate(armature.bones)}
381 group_index_map = {i: i for i in range(len(obj.vertex_groups))}
382 for g in first_obj.vertex_groups:
383 if g.name in bone_indices:
384 group_index_map[g.index] = bone_indices[g.name]
386 for v in self.vertices:
388 g.group = group_index_map[g.group]
390 def prepare_uv(self, progress):
391 # Form a list of UV layers referenced by materials with the array atlas
393 array_uv_layers = [t.uv_layer for m in self.materials if m.array_atlas for t in m.texture_slots if t and t.texture_coords=='UV']
394 array_uv_layers = [u for u in self.uv_layers if u.name in array_uv_layers]
399 if f.material_index<len(self.materials):
400 mat = self.materials[f.material_index]
401 if mat and mat.array_atlas:
402 layer = mat.array_layer
404 for l in array_uv_layers:
405 for i in f.loop_indices:
406 l.uvs[i] = mathutils.Vector((*l.uvs[i], layer))
408 # Copy UVs from layers to faces
410 for u in self.uv_layers:
411 f.uvs.append([u.uvs[i] for i in f.loop_indices])
413 prog_count = len(self.uv_layers)
416 # Split by the UV layer used for TBN vectors first so connectivity
417 # remains intact for TBN vector computation
420 uv_names = [u.name for u in self.uv_layers]
421 if self.tbn_uvtex in uv_names:
423 tbn_layer_index = uv_names.index(self.tbn_uvtex)
424 progress.push_task_slice("Computing TBN", 0, prog_count)
425 self.split_vertices(self.find_uv_group, progress, tbn_layer_index)
426 progress.set_task_slice(self.tbn_uvtex, 1, prog_count)
427 self.compute_tbn(tbn_layer_index, progress)
431 # Split by the remaining UV layers
432 for i, u in enumerate(self.uv_layers):
433 if i==tbn_layer_index:
436 progress.push_task_slice(u.name, prog_step, prog_count)
437 self.split_vertices(self.find_uv_group, progress, i)
441 # Copy UVs from faces to vertices
442 for v in self.vertices:
444 # All faces still connected to the vertex have the same UV value
446 i = f.vertices.index(v)
447 v.uvs = [u[i] for u in f.uvs]
449 v.uvs = [(0.0, 0.0)]*len(self.uv_layers)
451 def split_vertices(self, find_group_func, progress, *args):
452 vertex_count = len(self.vertices)
453 for i in range(vertex_count):
458 # Find all groups of faces on this vertex
462 groups.append(find_group_func(v, f, *args))
464 # Give groups after the first separate copies of the vertex
467 nv.index = len(self.vertices)
468 self.vertices.append(nv)
471 e_faces_in_g = [f for f in e.faces if f in g]
475 if len(e_faces_in_g)<len(e.faces):
476 # Create a copy of an edge at the boundary of the group
478 ne.index = len(self.edges)
479 self.edges.append(ne)
481 ne.other_vertex(v).edges.append(ne)
483 for f in e_faces_in_g:
485 f.edges[f.edges.index(e)] = ne
490 e.vertices[e.vertices.index(v)] = nv
493 e.key = make_edge_key(e.vertices[0].index, e.vertices[1].index)
495 # Filter out any edges that were removed from the original vertex
496 v.edges = [e for e in v.edges if v in e.vertices]
500 f.vertices[f.vertices.index(v)] = nv
503 progress.set_progress(i/vertex_count)
505 def find_smooth_group(self, vertex, face):
508 edges = [e for e in face.edges if vertex in e.vertices]
520 e = f.other_edge(e, vertex)
524 def find_uv_group(self, vertex, face, index):
525 uv = face.uvs[index][face.vertices.index(vertex)]
529 for f in vertex.faces:
530 if not f.flag and f.uvs[index][f.vertices.index(vertex)]==uv:
536 def compute_normals(self, progress):
537 for i, v in enumerate(self.vertices):
538 v.normal = mathutils.Vector()
540 fv = f.pivot_vertices(v)
541 edge1 = fv[1].co-fv[0].co
542 edge2 = fv[-1].co-fv[0].co
543 if edge1.length and edge2.length:
544 # Use the angle between edges as a weighting factor. This gives
545 # more consistent normals on bends with an inequal number of
546 # faces on each side.
547 v.normal += f.normal*edge1.angle(edge2)
552 v.normal = mathutils.Vector((0, 0, 1))
554 progress.set_progress(i/len(self.vertices))
556 def compute_tbn(self, index, progress):
557 # This function is called at an early stage during UV preparation when
558 # face UVs are not available yet
559 layer_uvs = self.uv_layers[index].uvs
561 for i, v in enumerate(self.vertices):
562 v.tan = mathutils.Vector()
563 v.bino = mathutils.Vector()
565 vi = f.pivot_vertex(v)
566 uv0 = layer_uvs[f.loop_indices[vi[0]]]
567 uv1 = layer_uvs[f.loop_indices[vi[1]]]
568 uv2 = layer_uvs[f.loop_indices[vi[-1]]]
573 edge1 = f.vertices[vi[1]].co-f.vertices[vi[0]].co
574 edge2 = f.vertices[vi[-1]].co-f.vertices[vi[0]].co
575 div = (du1*dv2-du2*dv1)
577 mul = edge1.angle(edge2)/div
578 v.tan += (edge1*dv2-edge2*dv1)*mul
579 v.bino += (edge2*du1-edge1*du2)*mul
586 progress.set_progress(i/len(self.vertices))
588 def prepare_sequence(self, progress):
589 progress.push_task("Reordering faces", 0.0, 0.5)
590 self.reorder_faces(progress)
592 progress.set_task("Building sequence", 0.5, 1.0)
594 for i, f in enumerate(self.faces):
597 # Rotate the first three vertices so that the new face can be added
598 if sequence[0] in f.vertices and sequence[1] not in f.vertices:
599 sequence.append(sequence[0])
601 elif sequence[2] not in f.vertices and sequence[1] in f.vertices:
602 sequence.insert(0, sequence[-1])
605 if sequence[-1] not in f.vertices:
608 to_add = [v for v in f.vertices if v!=sequence[-1] and v!=sequence[-2]]
610 if (f.vertices[1]==sequence[-1]) != (len(sequence)%2==1):
612 sequence.append(sequence[-1])
616 sequence = f.vertices[:]
617 self.vertex_sequence.append(sequence)
619 progress.set_progress(i/len(self.faces))
623 self.reorder_vertices()
625 def reorder_faces(self, progress):
626 # Tom Forsyth's vertex cache optimization algorithm
627 # http://eelpi.gotdns.org/papers/fast_vert_cache_opt.html
632 last_triangle_score = 0.75
633 cache_decay_power = 1.5
634 valence_boost_scale = 2.0
635 valence_boost_power = -0.5
640 # Keep track of the score and number of unused faces for each vertex
641 vertex_info = [[0, len(v.faces)] for v in self.vertices]
642 for vi in vertex_info:
643 vi[0] = valence_boost_scale*(vi[1]**valence_boost_power)
651 # Previous iteration gave no candidate for best face (or this is
652 # the first iteration). Scan all faces for the highest score.
658 score = sum(vertex_info[v.index][0] for v in f.vertices)
666 reordered_faces.append(face)
669 for v in face.vertices:
670 vertex_info[v.index][1] -= 1
672 # Shuffle the vertex into the front of the cache
673 if v in cached_vertices:
674 cached_vertices.remove(v)
675 cached_vertices.insert(0, v)
677 # Update scores for all vertices in the cache
678 for i, v in enumerate(cached_vertices):
681 score += last_triangle_score
682 elif i<max_cache_size:
683 score += (1-(i-3)/(max_cache_size-3))**cache_decay_power
684 if vertex_info[v.index][1]:
685 score += valence_boost_scale*(vertex_info[v.index][1]**valence_boost_power)
686 vertex_info[v.index][0] = score
690 for v in cached_vertices:
693 score = sum(vertex_info[fv.index][0] for fv in f.vertices)
698 del cached_vertices[max_cache_size:]
701 progress.set_progress(n_processed/len(self.faces))
703 self.faces = reordered_faces
704 for i, f in enumerate(self.faces):
707 def reorder_vertices(self):
708 for v in self.vertices:
711 reordered_vertices = []
712 for s in self.vertex_sequence:
715 v.index = len(reordered_vertices)
716 reordered_vertices.append(v)
718 self.vertices = reordered_vertices
721 e.key = make_edge_key(e.vertices[0].index, e.vertices[1].index)
723 def drop_references(self):
724 for v in self.vertices:
732 for u in self.uv_layers:
737 def create_mesh_from_object(context, obj, progress):
739 raise Exception("Object is not a mesh")
741 progress.push_task("Preparing mesh", 0.0, 0.2)
743 objs = [(obj, mathutils.Matrix())]
749 if c.type=="MESH" and c.compound:
750 objs.append((c, m*c.matrix_local))
755 bmesh = o.to_mesh(context.scene, True, "PREVIEW")
756 bmeshes.append(bmesh)
758 # Object.to_mesh does not copy custom properties
759 bmesh.winding_test = o.data.winding_test
760 bmesh.smoothing = o.data.smoothing
761 bmesh.use_lines = o.data.use_lines
762 bmesh.vertex_groups = o.data.vertex_groups
763 bmesh.max_groups_per_vertex = o.data.max_groups_per_vertex
764 bmesh.use_uv = o.data.use_uv
765 bmesh.tbn_vecs = o.data.tbn_vecs
766 bmesh.tbn_uvtex = o.data.tbn_uvtex
776 progress.set_task("Triangulating", 0.2, 0.3)
777 mesh.prepare_triangles(progress)
778 progress.set_task("Smoothing", 0.3, 0.5)
779 mesh.prepare_smoothing(progress)
780 progress.set_task("Vertex groups", 0.5, 0.6)
781 mesh.prepare_vertex_groups(obj)
782 progress.set_task("Preparing UVs", 0.6, 0.8)
783 mesh.prepare_uv(progress)
784 progress.set_task("Render sequence", 0.8, 1.0)
785 mesh.prepare_sequence(progress)
787 # Discard the temporary Blender meshes after making sure there's no
788 # references to the data
789 mesh.drop_references()
791 bpy.data.meshes.remove(m)