5 def make_edge_key(i1, i2):
6 return (min(i1, i2), max(i1, i2))
9 def __init__(self, edge):
10 if edge.__class__==Edge:
11 self._edge = edge._edge
12 self.smooth = edge.smooth
17 self.vertices = edge.vertices[:]
24 def __getattr__(self, attr):
25 return getattr(self._edge, attr)
27 def check_smooth(self, limit):
28 if len(self.faces)!=2:
31 d = self.faces[0].normal.dot(self.faces[1].normal)
32 self.smooth = ((d>limit and self.faces[0].use_smooth and self.faces[1].use_smooth) or d>0.99995)
34 def other_face(self, f):
35 if f.index==self.faces[0].index:
36 if len(self.faces)>=2:
43 def other_vertex(self, v):
44 if v.index==self.vertices[0].index:
45 return self.vertices[1]
47 return self.vertices[0]
51 def __init__(self, vertex):
52 if vertex.__class__==Vertex:
53 self._vertex = vertex._vertex
54 self.uvs = vertex.uvs[:]
56 self.bino = vertex.bino
62 self.index = vertex.index
64 self.normal = vertex.normal
68 self.groups = vertex.groups[:]
70 def __getattr__(self, attr):
71 return getattr(self._vertex, attr)
73 def __cmp__(self, other):
76 return cmp(self.index, other.index)
80 def __init__(self, group):
82 self.group = group.group
83 self.weight = group.weight
85 def __getattr__(self, attr):
86 return getattr(self._group, attr)
90 def __init__(self, face):
92 self.index = face.index
94 self.vertices = face.vertices[:]
98 def __getattr__(self, attr):
99 return getattr(self._face, attr)
101 def __cmp__(self, other):
104 return cmp(self.index, other.index)
106 def pivot_vertex(self, v):
107 n = self.vertices.index(v)
108 return [(n+i)%len(self.vertices) for i in range(len(self.vertices))]
110 def pivot_vertices(self, *vt):
111 flags = [(v in vt) for v in self.vertices]
112 l = len(self.vertices)
114 if flags[i] and not flags[(i+l-1)%l]:
115 return self.vertices[i:]+self.vertices[:i]
117 def get_edge(self, v1, v2):
118 key = make_edge_key(v1.index, v2.index)
122 raise KeyError("No edge %s"%(key,))
124 def other_edge(self, e, v):
126 if d!=e and v in d.vertices:
129 def get_neighbors(self):
130 neighbors = [e.other_face(self) for e in self.edges]
131 return list(filter(bool, neighbors))
135 def __init__(self, e):
137 self.vertices = e.vertices[:]
142 def __init__(self, arg):
150 self.uvs = [d.uv for d in self.data]
155 dot = self.name.find('.')
157 ext = self.name[dot:]
158 if ext.startswith(".unit") and ext[5:].isdigit():
159 self.unit = int(ext[5:])
163 def __getattr__(self, attr):
164 return getattr(self._layer, attr)
168 def __init__(self, mesh):
170 self.name = mesh.name
172 self.winding_test = mesh.winding_test
173 self.tbn_vecs = mesh.tbn_vecs
174 self.vertex_groups = mesh.vertex_groups
177 self.vertices = [Vertex(v) for v in mesh.vertices]
178 for v in self.vertices:
179 v.groups = [VertexGroup(g) for g in v.groups]
181 self.faces = [Face(f) for f in mesh.polygons]
182 self.edges = [Edge(e) for e in mesh.edges]
183 self.loops = mesh.loops[:]
184 self.materials = mesh.materials[:]
186 # Clone only the desired UV layers
187 if self.use_uv=='NONE' or not mesh.uv_layers:
190 self.uv_layers = [UvLayer(u) for u in mesh.uv_layers]
191 self.uv_layers = sorted([u for u in self.uv_layers if not u.hidden], key=(lambda u: (u.unit or 1000, u.name)))
193 if self.use_uv=='UNIT0':
194 self.uv_layers = [self.uv_layers[0]]
196 # Assign texture unit numbers to UV layers that lack one
197 next_unit = max((u.unit+1 for u in self.uv_layers if u.unit is not None), default=0)
198 for u in self.uv_layers:
203 # Rewrite links between elements to point to cloned data, or create links
204 # where they don't exist
205 edge_map = {e.key: e for e in self.edges}
207 if len(f.vertices)>4:
208 raise ValueError("Ngons are not supported")
210 f.vertices = [self.vertices[i] for i in f.vertices]
214 for k in f.edge_keys:
220 e.vertices = [self.vertices[i] for i in e.vertices]
224 # Store loose edges as lines
226 self.lines = [Line(e) for e in self.edges if not e.faces]
230 self.vertex_sequence = []
232 def __getattr__(self, attr):
233 return getattr(self._mesh, attr)
235 def transform(self, matrix):
236 for v in self.vertices:
239 def splice(self, other):
240 if len(self.uv_layers)!=len(other.uv_layers):
241 raise ValueError("Meshes have incompatible UV layers")
242 for i, u in enumerate(self.uv_layers):
243 if u.name!=other.uv_layers[i].name:
244 raise ValueError("Meshes have incompatible UV layers")
246 # Merge materials and form a lookup from source material indices to the
247 # merged material list
249 for m in other.materials:
250 if m in self.materials:
251 material_map.append(self.materials.index(m))
253 material_map.append(len(self.materials))
254 self.materials.append(m)
256 # Append data and adjust indices where necessary. Since the data is
257 # spliced from the source mesh, rebuilding references is not necessary.
258 for i, u in enumerate(self.uv_layers):
259 u.uvs += other.uv_layers[i].uvs
261 offset = len(self.vertices)
262 self.vertices += other.vertices
263 for v in self.vertices[offset:]:
266 loop_offset = len(self.loops)
267 self.loops += other.loops
269 offset = len(self.faces)
270 self.faces += other.faces
271 for f in self.faces[offset:]:
273 f.loop_start += loop_offset
274 f.loop_indices = range(f.loop_start, f.loop_start+f.loop_total)
276 f.material_index = material_map[f.material_index]
278 offset = len(self.edges)
279 self.edges += other.edges
280 for e in self.edges[offset:]:
282 e.key = make_edge_key(e.vertices[0].index, e.vertices[1].index)
284 self.lines += other.lines
286 def prepare_triangles(self, progress):
287 face_count = len(self.faces)
288 for i in range(face_count):
290 nverts = len(f.vertices)
294 # Calculate normals at each vertex of the face
296 for j in range(nverts):
297 edge_vecs.append(f.vertices[(j+1)%nverts].co-f.vertices[j].co)
300 for j in range(nverts):
301 normals.append(edge_vecs[j-1].cross(edge_vecs[j]).normalized())
303 # Check which diagonal results in a flatter triangulation
304 flatness1 = normals[0].dot(normals[2])
305 flatness2 = normals[1].dot(normals[3])
306 cut_index = 1 if flatness1>flatness2 else 0
309 nf.index = len(self.faces)
310 self.faces.append(nf)
313 ne.index = len(self.edges)
314 self.edges.append(ne)
316 nf.vertices = [f.vertices[cut_index], f.vertices[2], f.vertices[3]]
317 nf.loop_indices = [f.loop_indices[cut_index], f.loop_indices[2], f.loop_indices[3]]
318 for v in nf.vertices:
321 ne.vertices = [f.vertices[cut_index], f.vertices[2+cut_index]]
322 for v in ne.vertices:
324 ne.key = make_edge_key(ne.vertices[0].index, ne.vertices[1].index)
327 f.vertices[3-cut_index].faces.remove(f)
328 del f.vertices[3-cut_index]
329 f.loop_indices = [f.loop_indices[0], f.loop_indices[1], f.loop_indices[2+cut_index]]
333 nf.edges = [ne, f.edges[2], f.edges[3]]
334 f.edges = [f.edges[0], f.edges[1], ne]
336 nf.edges = [f.edges[1], f.edges[2], ne]
337 f.edges = [f.edges[0], ne, f.edges[3]]
343 f.normal = normals[1-cut_index]
344 nf.normal = normals[3-cut_index]
346 progress.set_progress(i/face_count)
348 def prepare_smoothing(self, progress):
350 if self.smoothing=='NONE':
355 elif self.use_auto_smooth:
356 smooth_limit = math.cos(self.auto_smooth_angle)
359 e.check_smooth(smooth_limit)
361 progress.push_task("Sharp edges", 0.0, 0.7)
362 self.split_vertices(self.find_smooth_group, progress)
364 if self.smoothing!='BLENDER':
365 progress.set_task("Updating normals", 0.7, 1.0)
366 self.compute_normals(progress)
370 def prepare_vertex_groups(self, obj):
371 for v in self.vertices:
373 weight_sum = sum(g.weight for g in v.groups)
374 v.groups = sorted(v.groups, key=(lambda g: g.weight), reverse=True)[:self.max_groups_per_vertex]
375 weight_scale = weight_sum/sum(g.weight for g in v.groups)
377 g.weight *= weight_scale
379 if obj.parent and obj.parent.type=="ARMATURE":
380 armature = obj.parent.data
381 bone_indices = {b.name: i for i, b in enumerate(armature.bones)}
382 group_index_map = {i: i for i in range(len(obj.vertex_groups))}
383 for g in first_obj.vertex_groups:
384 if g.name in bone_indices:
385 group_index_map[g.index] = bone_indices[g.name]
387 for v in self.vertices:
389 g.group = group_index_map[g.group]
391 def prepare_uv(self, progress):
392 # Form a list of UV layers referenced by materials with the array atlas
394 array_uv_layers = [t.uv_layer for m in self.materials if m.array_atlas for t in m.texture_slots if t and t.texture_coords=='UV']
395 array_uv_layers = [u for u in self.uv_layers if u.name in array_uv_layers]
400 if f.material_index<len(self.materials):
401 mat = self.materials[f.material_index]
402 if mat and mat.array_atlas:
403 layer = mat.array_layer
405 for l in array_uv_layers:
406 for i in f.loop_indices:
407 l.uvs[i] = mathutils.Vector((*l.uvs[i], layer))
409 # Copy UVs from layers to faces
411 for u in self.uv_layers:
412 f.uvs.append([u.uvs[i] for i in f.loop_indices])
414 prog_count = len(self.uv_layers)
417 # Split by the UV layer used for TBN vectors first so connectivity
418 # remains intact for TBN vector computation
421 uv_names = [u.name for u in self.uv_layers]
422 if self.tbn_uvtex in uv_names:
424 tbn_layer_index = uv_names.index(self.tbn_uvtex)
425 progress.push_task_slice("Computing TBN", 0, prog_count)
426 self.split_vertices(self.find_uv_group, progress, tbn_layer_index)
427 progress.set_task_slice(self.tbn_uvtex, 1, prog_count)
428 self.compute_tbn(tbn_layer_index, progress)
432 # Split by the remaining UV layers
433 for i, u in enumerate(self.uv_layers):
434 if i==tbn_layer_index:
437 progress.push_task_slice(u.name, prog_step, prog_count)
438 self.split_vertices(self.find_uv_group, progress, i)
442 # Copy UVs from faces to vertices
443 for v in self.vertices:
445 # All faces still connected to the vertex have the same UV value
447 i = f.vertices.index(v)
448 v.uvs = [u[i] for u in f.uvs]
450 v.uvs = [(0.0, 0.0)]*len(self.uv_layers)
452 def split_vertices(self, find_group_func, progress, *args):
453 vertex_count = len(self.vertices)
454 for i in range(vertex_count):
459 # Find all groups of faces on this vertex
463 groups.append(find_group_func(v, f, *args))
465 # Give groups after the first separate copies of the vertex
468 nv.index = len(self.vertices)
469 self.vertices.append(nv)
472 e_faces_in_g = [f for f in e.faces if f in g]
476 if len(e_faces_in_g)<len(e.faces):
477 # Create a copy of an edge at the boundary of the group
479 ne.index = len(self.edges)
480 self.edges.append(ne)
482 ne.other_vertex(v).edges.append(ne)
484 for f in e_faces_in_g:
486 f.edges[f.edges.index(e)] = ne
491 e.vertices[e.vertices.index(v)] = nv
494 e.key = make_edge_key(e.vertices[0].index, e.vertices[1].index)
496 # Filter out any edges that were removed from the original vertex
497 v.edges = [e for e in v.edges if v in e.vertices]
501 f.vertices[f.vertices.index(v)] = nv
504 progress.set_progress(i/vertex_count)
506 def find_smooth_group(self, vertex, face):
509 edges = [e for e in face.edges if vertex in e.vertices]
521 e = f.other_edge(e, vertex)
525 def find_uv_group(self, vertex, face, index):
526 uv = face.uvs[index][face.vertices.index(vertex)]
530 for f in vertex.faces:
531 if not f.flag and f.uvs[index][f.vertices.index(vertex)]==uv:
537 def compute_normals(self, progress):
538 for i, v in enumerate(self.vertices):
539 v.normal = mathutils.Vector()
541 fv = f.pivot_vertices(v)
542 edge1 = fv[1].co-fv[0].co
543 edge2 = fv[-1].co-fv[0].co
544 if edge1.length and edge2.length:
545 # Use the angle between edges as a weighting factor. This gives
546 # more consistent normals on bends with an inequal number of
547 # faces on each side.
548 v.normal += f.normal*edge1.angle(edge2)
553 v.normal = mathutils.Vector((0, 0, 1))
555 progress.set_progress(i/len(self.vertices))
557 def compute_tbn(self, index, progress):
558 # This function is called at an early stage during UV preparation when
559 # face UVs are not available yet
560 layer_uvs = self.uv_layers[index].uvs
562 for i, v in enumerate(self.vertices):
563 v.tan = mathutils.Vector()
564 v.bino = mathutils.Vector()
566 vi = f.pivot_vertex(v)
567 uv0 = layer_uvs[f.loop_indices[vi[0]]]
568 uv1 = layer_uvs[f.loop_indices[vi[1]]]
569 uv2 = layer_uvs[f.loop_indices[vi[-1]]]
574 edge1 = f.vertices[vi[1]].co-f.vertices[vi[0]].co
575 edge2 = f.vertices[vi[-1]].co-f.vertices[vi[0]].co
576 div = (du1*dv2-du2*dv1)
578 mul = edge1.angle(edge2)/div
579 v.tan += (edge1*dv2-edge2*dv1)*mul
580 v.bino += (edge2*du1-edge1*du2)*mul
587 progress.set_progress(i/len(self.vertices))
589 def prepare_sequence(self, progress):
590 progress.push_task("Reordering faces", 0.0, 0.5)
591 self.reorder_faces(progress)
593 progress.set_task("Building sequence", 0.5, 1.0)
595 for i, f in enumerate(self.faces):
598 # Rotate the first three vertices so that the new face can be added
599 if sequence[0] in f.vertices and sequence[1] not in f.vertices:
600 sequence.append(sequence[0])
602 elif sequence[2] not in f.vertices and sequence[1] in f.vertices:
603 sequence.insert(0, sequence[-1])
606 if sequence[-1] not in f.vertices:
609 to_add = [v for v in f.vertices if v!=sequence[-1] and v!=sequence[-2]]
611 if (f.vertices[1]==sequence[-1]) != (len(sequence)%2==1):
613 sequence.append(sequence[-1])
617 sequence = f.vertices[:]
618 self.vertex_sequence.append(sequence)
620 progress.set_progress(i/len(self.faces))
624 self.reorder_vertices()
626 def reorder_faces(self, progress):
627 # Tom Forsyth's vertex cache optimization algorithm
628 # http://eelpi.gotdns.org/papers/fast_vert_cache_opt.html
633 last_triangle_score = 0.75
634 cache_decay_power = 1.5
635 valence_boost_scale = 2.0
636 valence_boost_power = -0.5
641 # Keep track of the score and number of unused faces for each vertex
642 vertex_info = [[0, len(v.faces)] for v in self.vertices]
643 for vi in vertex_info:
644 vi[0] = valence_boost_scale*(vi[1]**valence_boost_power)
652 # Previous iteration gave no candidate for best face (or this is
653 # the first iteration). Scan all faces for the highest score.
659 score = sum(vertex_info[v.index][0] for v in f.vertices)
667 reordered_faces.append(face)
670 for v in face.vertices:
671 vertex_info[v.index][1] -= 1
673 # Shuffle the vertex into the front of the cache
674 if v in cached_vertices:
675 cached_vertices.remove(v)
676 cached_vertices.insert(0, v)
678 # Update scores for all vertices in the cache
679 for i, v in enumerate(cached_vertices):
682 score += last_triangle_score
683 elif i<max_cache_size:
684 score += (1-(i-3)/(max_cache_size-3))**cache_decay_power
685 if vertex_info[v.index][1]:
686 score += valence_boost_scale*(vertex_info[v.index][1]**valence_boost_power)
687 vertex_info[v.index][0] = score
691 for v in cached_vertices:
694 score = sum(vertex_info[fv.index][0] for fv in f.vertices)
699 del cached_vertices[max_cache_size:]
702 progress.set_progress(n_processed/len(self.faces))
704 self.faces = reordered_faces
705 for i, f in enumerate(self.faces):
708 def reorder_vertices(self):
709 for v in self.vertices:
712 reordered_vertices = []
713 for s in self.vertex_sequence:
716 v.index = len(reordered_vertices)
717 reordered_vertices.append(v)
719 self.vertices = reordered_vertices
722 e.key = make_edge_key(e.vertices[0].index, e.vertices[1].index)
724 def drop_references(self):
725 for v in self.vertices:
733 for u in self.uv_layers:
738 def create_mesh_from_object(context, obj, progress):
740 raise Exception("Object is not a mesh")
742 progress.push_task("Preparing mesh", 0.0, 0.2)
744 objs = [(obj, mathutils.Matrix())]
750 if c.type=="MESH" and c.compound:
751 objs.append((c, m*c.matrix_local))
756 bmesh = o.to_mesh(context.scene, True, "PREVIEW")
757 bmeshes.append(bmesh)
759 # Object.to_mesh does not copy custom properties
760 bmesh.winding_test = o.data.winding_test
761 bmesh.smoothing = o.data.smoothing
762 bmesh.use_lines = o.data.use_lines
763 bmesh.vertex_groups = o.data.vertex_groups
764 bmesh.max_groups_per_vertex = o.data.max_groups_per_vertex
765 bmesh.use_uv = o.data.use_uv
766 bmesh.tbn_vecs = o.data.tbn_vecs
767 bmesh.tbn_uvtex = o.data.tbn_uvtex
777 mesh.name = obj.data.name
779 progress.set_task("Triangulating", 0.2, 0.3)
780 mesh.prepare_triangles(progress)
781 progress.set_task("Smoothing", 0.3, 0.5)
782 mesh.prepare_smoothing(progress)
783 progress.set_task("Vertex groups", 0.5, 0.6)
784 mesh.prepare_vertex_groups(obj)
785 progress.set_task("Preparing UVs", 0.6, 0.8)
786 mesh.prepare_uv(progress)
787 progress.set_task("Render sequence", 0.8, 1.0)
788 mesh.prepare_sequence(progress)
790 # Discard the temporary Blender meshes after making sure there's no
791 # references to the data
792 mesh.drop_references()
794 bpy.data.meshes.remove(m)