glTF importer: more perf
This commit is contained in:
parent
ed161459d1
commit
f9e25350dc
|
@ -15,7 +15,7 @@
|
|||
bl_info = {
|
||||
'name': 'glTF 2.0 format',
|
||||
'author': 'Julien Duroure, Norbert Nopper, Urs Hanselmann, Moritz Becher, Benjamin Schmithüsen, Jim Eckerlein, and many external contributors',
|
||||
"version": (0, 9, 79),
|
||||
"version": (0, 9, 80),
|
||||
'blender': (2, 81, 6),
|
||||
'location': 'File > Import-Export',
|
||||
'description': 'Import-Export as glTF 2.0',
|
||||
|
|
|
@ -145,26 +145,17 @@ class BlenderMesh():
|
|||
for fi in range(face_idx, face_idx + prim.num_faces):
|
||||
mesh.polygons[fi].use_smooth = True
|
||||
elif gltf.import_settings['import_shading'] == "NORMALS":
|
||||
mesh_loops = mesh.loops
|
||||
for fi in range(face_idx, face_idx + prim.num_faces):
|
||||
poly = mesh.polygons[fi]
|
||||
calc_norm_vertices = []
|
||||
# "Flat normals" are when all the vertices in poly have the
|
||||
# poly's normal. Otherwise, smooth the poly.
|
||||
for loop_idx in range(poly.loop_start, poly.loop_start + poly.loop_total):
|
||||
vert_idx = mesh.loops[loop_idx].vertex_index
|
||||
calc_norm_vertices.append(vert_idx)
|
||||
vi = mesh_loops[loop_idx].vertex_index
|
||||
if poly.normal.dot(bme.verts[vi].normal) <= 0.9999999:
|
||||
poly.use_smooth = True
|
||||
break
|
||||
|
||||
if len(calc_norm_vertices) == 3:
|
||||
# Calcul normal
|
||||
vert0 = mesh.vertices[calc_norm_vertices[0]].co
|
||||
vert1 = mesh.vertices[calc_norm_vertices[1]].co
|
||||
vert2 = mesh.vertices[calc_norm_vertices[2]].co
|
||||
calc_normal = (vert1 - vert0).cross(vert2 - vert0).normalized()
|
||||
|
||||
# Compare normal to vertex normal
|
||||
for i in calc_norm_vertices:
|
||||
vec = Vector(bme.verts[i].normal)
|
||||
if not calc_normal.dot(vec) > 0.9999999:
|
||||
poly.use_smooth = True
|
||||
break
|
||||
else:
|
||||
# shouldn't happen
|
||||
pass
|
||||
|
|
|
@ -67,12 +67,11 @@ class BinaryData():
|
|||
else:
|
||||
stride = stride_
|
||||
|
||||
data = []
|
||||
offset = 0
|
||||
while len(data) < accessor.count:
|
||||
element = struct.unpack_from(fmt, buffer_data, offset)
|
||||
data.append(element)
|
||||
offset += stride
|
||||
unpack_from = struct.Struct(fmt).unpack_from
|
||||
data = [
|
||||
unpack_from(buffer_data, offset)
|
||||
for offset in range(0, accessor.count*stride, stride)
|
||||
]
|
||||
|
||||
if accessor.sparse:
|
||||
sparse_indices_data = BinaryData.get_data_from_sparse(gltf, accessor.sparse, "indices")
|
||||
|
@ -142,12 +141,11 @@ class BinaryData():
|
|||
else:
|
||||
stride = stride_
|
||||
|
||||
data = []
|
||||
offset = 0
|
||||
while len(data) < sparse.count:
|
||||
element = struct.unpack_from(fmt, bin_data, offset)
|
||||
data.append(element)
|
||||
offset += stride
|
||||
unpack_from = struct.Struct(fmt).unpack_from
|
||||
data = [
|
||||
unpack_from(bin_data, offset)
|
||||
for offset in range(0, sparse.count*stride, stride)
|
||||
]
|
||||
|
||||
return data
|
||||
|
||||
|
|
Loading…
Reference in New Issue