6 def make_edge_key(i1, i2):
7 return (min(i1, i2), max(i1, i2))
10 def __init__(self, edge):
11 if edge.__class__==Edge:
12 self.smooth = edge.smooth
16 self.vertices = edge.vertices[:]
23 def check_smooth(self, limit):
24 if len(self.faces)!=2:
27 d = self.faces[0].normal.dot(self.faces[1].normal)
28 self.smooth = ((d>limit and self.faces[0].use_smooth and self.faces[1].use_smooth) or d>0.99995)
30 def other_face(self, f):
31 if f.index==self.faces[0].index:
32 if len(self.faces)>=2:
39 def other_vertex(self, v):
40 if v.index==self.vertices[0].index:
41 return self.vertices[1]
43 return self.vertices[0]
47 def __init__(self, vertex):
48 if vertex.__class__==Vertex:
49 self.uvs = vertex.uvs[:]
54 self.index = vertex.index
55 self.co = mathutils.Vector(vertex.co)
56 self.normal = mathutils.Vector(vertex.normal)
61 self.groups = vertex.groups[:]
63 def __cmp__(self, other):
66 return cmp(self.index, other.index)
70 def __init__(self, group):
72 self.group = group.group
73 self.weight = group.weight
80 def __init__(self, face):
81 self.index = face.index
83 self.edge_keys = face.edge_keys
84 self.vertices = face.vertices[:]
85 self.loop_indices = face.loop_indices
86 self.normal = face.normal
87 self.use_smooth = face.use_smooth
88 self.material_index = face.material_index
91 def __cmp__(self, other):
94 return cmp(self.index, other.index)
96 def pivot_vertex(self, v):
97 n = self.vertices.index(v)
98 return [(n+i)%len(self.vertices) for i in range(len(self.vertices))]
100 def get_loop_index(self, v):
101 return self.loop_indices[self.vertices.index(v)]
103 def get_edge(self, v1, v2):
104 key = make_edge_key(v1.index, v2.index)
108 raise KeyError("No edge %s"%(key,))
110 def other_edge(self, e, v):
112 if d!=e and v in d.vertices:
115 def get_neighbors(self):
116 neighbors = [e.other_face(self) for e in self.edges]
117 return list(filter(bool, neighbors))
121 def __init__(self, e):
123 self.vertices = e.vertices[:]
128 def __init__(self, arg):
134 self.uvs = [mathutils.Vector(d.uv) for d in arg.data]
139 dot = self.name.find('.')
141 ext = self.name[dot:]
142 if ext.startswith(".unit") and ext[5:].isdigit():
143 self.unit = int(ext[5:])
149 def __init__(self, l):
151 self.colors = [c.color[:] for c in l.data]
155 def __init__(self, mesh):
156 self.name = mesh.name
158 self.winding_test = mesh.winding_test
159 self.smoothing = mesh.smoothing
160 self.use_uv = mesh.use_uv
161 self.tangent_vecs = mesh.tangent_vecs
162 self.tangent_uvtex = mesh.tangent_uvtex
163 self.vertex_groups = mesh.vertex_groups
166 self.vertices = [Vertex(v) for v in mesh.vertices]
167 if self.vertex_groups:
168 for v in self.vertices:
169 v.groups = [VertexGroup(g) for g in v.groups]
171 self.faces = [Face(f) for f in mesh.polygons]
172 self.edges = [Edge(e) for e in mesh.edges]
173 self.loops = mesh.loops[:]
174 self.materials = mesh.materials[:]
176 self.use_auto_smooth = mesh.use_auto_smooth
177 self.auto_smooth_angle = mesh.auto_smooth_angle
178 self.max_groups_per_vertex = mesh.max_groups_per_vertex
180 # Clone only the desired UV layers
181 if mesh.use_uv=='NONE' or not mesh.uv_layers:
184 self.uv_layers = [UvLayer(u) for u in mesh.uv_layers]
186 # Assign texture unit numbers to UV layers that lack one
187 missing_unit = [u for u in self.uv_layers if u.unit is None]
189 missing_unit = sorted(missing_unit, key=(lambda u: u.name))
190 used_units = [u.unit for u in self.uv_layers if u.unit is not None]
191 for u, n in zip(missing_unit, (i for i in itertools.count() if i not in used_units)):
194 self.uv_layers = sorted(self.uv_layers, key=(lambda u: u.unit))
196 if mesh.use_uv=='UNIT0':
197 self.uv_layers = [self.uv_layers[0]]
198 if self.uv_layers[0].unit!=0:
202 if mesh.vertex_colors:
203 self.colors = ColorLayer(mesh.vertex_colors[0])
205 # Rewrite links between elements to point to cloned data, or create links
206 # where they don't exist
207 edge_map = {e.key: e for e in self.edges}
209 if len(f.vertices)>4:
210 raise ValueError("Ngons are not supported")
212 f.vertices = [self.vertices[i] for i in f.vertices]
216 for k in f.edge_keys:
222 e.vertices = [self.vertices[i] for i in e.vertices]
226 # Store loose edges as lines
228 self.lines = [Line(e) for e in self.edges if not e.faces]
232 self.vertex_sequence = []
234 def transform(self, matrix):
235 for v in self.vertices:
238 def splice(self, other):
239 if len(self.uv_layers)!=len(other.uv_layers):
240 raise ValueError("Meshes have incompatible UV layers")
241 for i, u in enumerate(self.uv_layers):
242 if u.name!=other.uv_layers[i].name:
243 raise ValueError("Meshes have incompatible UV layers")
245 # Merge materials and form a lookup from source material indices to the
246 # merged material list
248 for m in other.materials:
249 if m in self.materials:
250 material_atlas.append(self.materials.index(m))
252 material_atlas.append(len(self.materials))
253 self.materials.append(m)
255 # Append data and adjust indices where necessary. Since the data is
256 # spliced from the source mesh, rebuilding references is not necessary.
257 for i, u in enumerate(self.uv_layers):
258 u.uvs += other.uv_layers[i].uvs
262 self.colors.colors += other.colors.colors
264 self.colors.colors += [(1.0, 1.0, 1.0, 1.0)]*len(other.loops)
266 self.colors = ColorLayer(other.colors.name)
267 self.colors.colors = [(1.0, 1.0, 1.0, 1.0)]*len(self.loops)+other.colors.colors
269 offset = len(self.vertices)
270 self.vertices += other.vertices
271 for v in self.vertices[offset:]:
274 loop_offset = len(self.loops)
275 self.loops += other.loops
277 offset = len(self.faces)
278 self.faces += other.faces
279 for f in self.faces[offset:]:
281 f.loop_indices = range(f.loop_indices.start+offset, f.loop_indices.stop+offset)
283 f.material_index = material_atlas[f.material_index]
285 offset = len(self.edges)
286 self.edges += other.edges
287 for e in self.edges[offset:]:
289 e.key = make_edge_key(e.vertices[0].index, e.vertices[1].index)
291 self.lines += other.lines
293 def prepare_triangles(self, progress):
294 face_count = len(self.faces)
295 for i in range(face_count):
297 nverts = len(f.vertices)
301 # Calculate normals at each vertex of the face
303 for j in range(nverts):
304 edge_vecs.append(f.vertices[(j+1)%nverts].co-f.vertices[j].co)
307 for j in range(nverts):
308 normals.append(edge_vecs[j-1].cross(edge_vecs[j]).normalized())
310 # Check which diagonal results in a flatter triangulation
311 flatness1 = normals[0].dot(normals[2])
312 flatness2 = normals[1].dot(normals[3])
313 cut_index = 1 if flatness1>flatness2 else 0
316 nf.index = len(self.faces)
317 self.faces.append(nf)
320 ne.index = len(self.edges)
321 self.edges.append(ne)
323 nf.vertices = [f.vertices[cut_index], f.vertices[2], f.vertices[3]]
324 nf.loop_indices = [f.loop_indices[cut_index], f.loop_indices[2], f.loop_indices[3]]
325 for v in nf.vertices:
328 ne.vertices = [f.vertices[cut_index], f.vertices[2+cut_index]]
329 for v in ne.vertices:
331 ne.key = make_edge_key(ne.vertices[0].index, ne.vertices[1].index)
334 f.vertices[3-cut_index].faces.remove(f)
335 del f.vertices[3-cut_index]
336 f.loop_indices = [f.loop_indices[0], f.loop_indices[1], f.loop_indices[2+cut_index]]
340 nf.edges = [ne, f.edges[2], f.edges[3]]
341 f.edges = [f.edges[0], f.edges[1], ne]
343 nf.edges = [f.edges[1], f.edges[2], ne]
344 f.edges = [f.edges[0], ne, f.edges[3]]
350 f.normal = normals[1-cut_index]
351 nf.normal = normals[3-cut_index]
353 progress.set_progress(i/face_count)
355 def prepare_smoothing(self, progress):
357 if self.smoothing=='NONE':
362 elif self.use_auto_smooth:
363 smooth_limit = math.cos(self.auto_smooth_angle)
366 e.check_smooth(smooth_limit)
368 progress.push_task("Sharp edges", 0.0, 0.7)
369 self.split_vertices(self.find_smooth_group, progress)
371 if self.smoothing!='BLENDER':
372 progress.set_task("Updating normals", 0.7, 1.0)
373 self.compute_normals(progress)
377 def prepare_vertex_groups(self, obj):
378 if not self.vertex_groups:
381 for v in self.vertices:
383 weight_sum = sum(g.weight for g in v.groups)
384 v.groups = sorted(v.groups, key=(lambda g: g.weight), reverse=True)[:self.max_groups_per_vertex]
385 weight_scale = weight_sum/sum(g.weight for g in v.groups)
387 g.weight *= weight_scale
388 while len(v.groups)<self.max_groups_per_vertex:
389 v.groups.append(VertexGroup(None))
391 if obj.parent and obj.parent.type=="ARMATURE":
392 armature = obj.parent.data
393 bone_indices = {b.name: i for i, b in enumerate(armature.bones)}
394 group_index_map = {i: i for i in range(len(obj.vertex_groups))}
395 for g in first_obj.vertex_groups:
396 if g.name in bone_indices:
397 group_index_map[g.index] = bone_indices[g.name]
399 for v in self.vertices:
401 g.group = group_index_map[g.group]
403 def apply_material_atlas(self, material_atlas):
404 for m in self.materials:
405 if m.name not in material_atlas.material_names:
406 raise Exception("Material atlas is not compatible with Mesh")
408 if self.use_uv=='NONE':
411 layer = UvLayer("material_atlas")
412 if self.use_uv=='UNIT0':
413 self.uv_layers = [layer]
416 self.uv_layers.append(layer)
417 used_units = [u.unit for u in self.uv_layers]
418 layer.unit = next(i for i in itertools.count() if i not in used_units)
419 self.uv_layers.sort(key=lambda u: u.unit)
421 layer.uvs = [(0.0, 0.0)]*len(self.loops)
423 uv = material_atlas.get_material_uv(self.materials[f.material_index])
424 for i in f.loop_indices:
427 def prepare_uv(self, progress):
428 # Form a list of UV layers referenced by materials with the array atlas
430 array_uv_layers = [] #[t.uv_layer for m in self.materials if m.array_atlas for t in m.texture_slots if t and t.texture_coords=='UV']
431 array_uv_layers = [u for u in self.uv_layers if u.name in array_uv_layers]
436 if f.material_index<len(self.materials):
437 mat = self.materials[f.material_index]
438 if mat and mat.array_atlas:
439 layer = mat.array_layer
441 for l in array_uv_layers:
442 for i in f.loop_indices:
443 l.uvs[i] = mathutils.Vector((*l.uvs[i], layer))
445 prog_count = len(self.uv_layers)
448 # Split by the UV layer used for tangent vectors first so connectivity
449 # remains intact for tangent vector computation
450 tangent_layer_index = -1
451 if self.tangent_vecs:
452 if self.tangent_uvtex:
453 uv_names = [u.name for u in self.uv_layers]
454 if self.tangent_uvtex in uv_names:
455 tangent_layer_index = uv_names.index(self.tangent_uvtex)
456 elif self.uv_layers[0].unit==0:
457 tangent_layer_index = 0
459 if tangent_layer_index>=0:
461 progress.push_task_slice("Computing tangents", 0, prog_count)
462 self.split_vertices(self.find_uv_group, progress, tangent_layer_index)
463 progress.set_task_slice(self.tangent_uvtex, 1, prog_count)
464 self.compute_tangents(tangent_layer_index, progress)
468 raise Exception("Tangent UV layer not found")
470 # Split by the remaining UV layers
471 for i, u in enumerate(self.uv_layers):
472 if i==tangent_layer_index:
475 progress.push_task_slice(u.name, prog_step, prog_count)
476 self.split_vertices(self.find_uv_group, progress, i)
480 # Copy UVs from layers to vertices
481 for v in self.vertices:
483 # All faces still connected to the vertex have the same UV value
485 i = f.get_loop_index(v)
486 v.uvs = [u.uvs[i] for u in self.uv_layers]
488 v.uvs = [(0.0, 0.0)]*len(self.uv_layers)
490 def prepare_colors(self, progress):
494 self.split_vertices(self.find_color_group, progress)
496 for v in self.vertices:
499 v.color = self.colors.colors[f.get_loop_index(v)]
501 v.color = (1.0, 1.0, 1.0, 1.0)
503 def split_vertices(self, find_group_func, progress, *args):
504 vertex_count = len(self.vertices)
505 for i in range(vertex_count):
510 # Find all groups of faces on this vertex
514 groups.append(find_group_func(v, f, *args))
516 # Give groups after the first separate copies of the vertex
519 nv.index = len(self.vertices)
520 self.vertices.append(nv)
523 e_faces_in_g = [f for f in e.faces if f in g]
527 if len(e_faces_in_g)<len(e.faces):
528 # Create a copy of an edge at the boundary of the group
530 ne.index = len(self.edges)
531 self.edges.append(ne)
533 ne.other_vertex(v).edges.append(ne)
535 for f in e_faces_in_g:
537 f.edges[f.edges.index(e)] = ne
542 e.vertices[e.vertices.index(v)] = nv
545 e.key = make_edge_key(e.vertices[0].index, e.vertices[1].index)
547 # Filter out any edges that were removed from the original vertex
548 v.edges = [e for e in v.edges if v in e.vertices]
552 f.vertices[f.vertices.index(v)] = nv
555 progress.set_progress(i/vertex_count)
557 def find_smooth_group(self, vertex, face):
560 edges = [e for e in face.edges if vertex in e.vertices]
572 e = f.other_edge(e, vertex)
576 def find_uv_group(self, vertex, face, index):
577 layer = self.uv_layers[index]
578 uv = layer.uvs[face.get_loop_index(vertex)]
582 for f in vertex.faces:
583 if not f.flag and layer.uvs[f.get_loop_index(vertex)]==uv:
589 def find_color_group(self, vertex, face):
590 color = self.colors.colors[face.get_loop_index(vertex)]
594 for f in vertex.faces:
595 if not f.flag and self.colors.colors[f.get_loop_index(vertex)]==color:
601 def compute_normals(self, progress):
602 for i, v in enumerate(self.vertices):
603 v.normal = mathutils.Vector()
605 vi = f.pivot_vertex(v)
606 edge1 = f.vertices[vi[1]].co-v.co
607 edge2 = f.vertices[vi[-1]].co-v.co
608 if edge1.length and edge2.length:
609 # Use the angle between edges as a weighting factor. This gives
610 # more consistent normals on bends with an inequal number of
611 # faces on each side.
612 v.normal += f.normal*edge1.angle(edge2)
617 v.normal = mathutils.Vector((0, 0, 1))
619 progress.set_progress(i/len(self.vertices))
621 def compute_tangents(self, index, progress):
622 layer_uvs = self.uv_layers[index].uvs
624 for i, v in enumerate(self.vertices):
625 v.tan = mathutils.Vector()
627 vi = f.pivot_vertex(v)
628 uv0 = layer_uvs[f.loop_indices[vi[0]]]
629 uv1 = layer_uvs[f.loop_indices[vi[1]]]
630 uv2 = layer_uvs[f.loop_indices[vi[-1]]]
635 edge1 = f.vertices[vi[1]].co-f.vertices[vi[0]].co
636 edge2 = f.vertices[vi[-1]].co-f.vertices[vi[0]].co
637 div = (du1*dv2-du2*dv1)
639 mul = edge1.angle(edge2)/div
640 v.tan += (edge1*dv2-edge2*dv1)*mul
645 progress.set_progress(i/len(self.vertices))
647 def prepare_sequence(self, progress):
648 progress.push_task("Reordering faces", 0.0, 0.5)
649 self.reorder_faces(progress)
651 progress.set_task("Building sequence", 0.5, 1.0)
653 for i, f in enumerate(self.faces):
656 # Rotate the first three vertices so that the new face can be added
657 if sequence[0] in f.vertices and sequence[1] not in f.vertices:
658 sequence.append(sequence[0])
660 elif sequence[2] not in f.vertices and sequence[1] in f.vertices:
661 sequence.insert(0, sequence[-1])
664 if sequence[-1] not in f.vertices:
667 to_add = [v for v in f.vertices if v!=sequence[-1] and v!=sequence[-2]]
669 if (f.vertices[1]==sequence[-1]) != (len(sequence)%2==1):
671 sequence.append(sequence[-1])
675 sequence = f.vertices[:]
676 self.vertex_sequence.append(sequence)
678 progress.set_progress(i/len(self.faces))
682 self.reorder_vertices()
684 def reorder_faces(self, progress):
685 # Tom Forsyth's vertex cache optimization algorithm
686 # http://eelpi.gotdns.org/papers/fast_vert_cache_opt.html
691 last_triangle_score = 0.75
692 cache_decay_power = 1.5
693 valence_boost_scale = 2.0
694 valence_boost_power = -0.5
699 # Keep track of the score and number of unused faces for each vertex
700 vertex_info = [[0, len(v.faces)] for v in self.vertices]
701 for vi in vertex_info:
702 vi[0] = valence_boost_scale*(vi[1]**valence_boost_power)
710 # Previous iteration gave no candidate for best face (or this is
711 # the first iteration). Scan all faces for the highest score.
717 score = sum(vertex_info[v.index][0] for v in f.vertices)
725 reordered_faces.append(face)
728 for v in face.vertices:
729 vertex_info[v.index][1] -= 1
731 # Shuffle the vertex into the front of the cache
732 if v in cached_vertices:
733 cached_vertices.remove(v)
734 cached_vertices.insert(0, v)
736 # Update scores for all vertices in the cache
737 for i, v in enumerate(cached_vertices):
740 score += last_triangle_score
741 elif i<max_cache_size:
742 score += (1-(i-3)/(max_cache_size-3))**cache_decay_power
743 if vertex_info[v.index][1]:
744 score += valence_boost_scale*(vertex_info[v.index][1]**valence_boost_power)
745 vertex_info[v.index][0] = score
749 for v in cached_vertices:
752 score = sum(vertex_info[fv.index][0] for fv in f.vertices)
757 del cached_vertices[max_cache_size:]
760 progress.set_progress(n_processed/len(self.faces))
762 self.faces = reordered_faces
763 for i, f in enumerate(self.faces):
766 def reorder_vertices(self):
767 for v in self.vertices:
770 reordered_vertices = []
771 for s in self.vertex_sequence:
774 v.index = len(reordered_vertices)
775 reordered_vertices.append(v)
777 self.vertices = reordered_vertices
780 e.key = make_edge_key(e.vertices[0].index, e.vertices[1].index)
783 def create_mesh_from_object(context, obj, progress, *, material_atlas=None):
785 raise Exception("Object is not a mesh")
787 progress.push_task("Preparing mesh", 0.0, 0.2)
789 objs = [(obj, mathutils.Matrix())]
795 if c.type=="MESH" and c.compound:
796 objs.append((c, m*c.matrix_local))
798 dg = context.evaluated_depsgraph_get()
802 eval_obj = o.evaluated_get(dg)
803 bmesh = eval_obj.to_mesh()
805 # Object.to_mesh does not copy custom properties
806 bmesh.winding_test = o.data.winding_test
807 bmesh.smoothing = o.data.smoothing
808 bmesh.use_lines = o.data.use_lines
809 bmesh.vertex_groups = o.data.vertex_groups
810 bmesh.max_groups_per_vertex = o.data.max_groups_per_vertex
811 bmesh.use_uv = o.data.use_uv
812 bmesh.tangent_vecs = o.data.tangent_vecs
813 bmesh.tangent_uvtex = o.data.tangent_uvtex
818 for i, s in enumerate(eval_obj.material_slots):
820 me.materials[i] = s.material
827 mesh.name = obj.data.name
830 mesh.apply_material_atlas(material_atlas)
832 progress.set_task("Triangulating", 0.2, 0.3)
833 mesh.prepare_triangles(progress)
834 progress.set_task("Smoothing", 0.3, 0.5)
835 mesh.prepare_smoothing(progress)
836 progress.set_task("Vertex groups", 0.5, 0.6)
837 mesh.prepare_vertex_groups(obj)
838 progress.set_task("Preparing UVs", 0.6, 0.75)
839 mesh.prepare_uv(progress)
840 progress.set_task("Preparing vertex colors", 0.75, 0.85)
841 mesh.prepare_colors(progress)
842 progress.set_task("Render sequence", 0.85, 1.0)
843 mesh.prepare_sequence(progress)