From bb4dc6f1daab2cc19c79e981222387069fb97ec4 Mon Sep 17 00:00:00 2001
From: Julien Duroure <julien.duroure@gmail.com>
Date: Tue, 21 Jul 2020 20:21:30 +0200
Subject: [PATCH] glTF importer: performance: rewrite importer using numpy

Thanks scurest!
---
 io_scene_gltf2/__init__.py                    |   2 +-
 .../blender/imp/gltf2_blender_gltf.py         |   4 +-
 .../blender/imp/gltf2_blender_mesh.py         | 596 +++++++++++++++---
 .../blender/imp/gltf2_blender_primitive.py    | 344 ----------
 io_scene_gltf2/io/imp/gltf2_io_binary.py      | 136 ++--
 io_scene_gltf2/io/imp/gltf2_io_gltf.py        |   1 +
 6 files changed, 583 insertions(+), 500 deletions(-)
 delete mode 100755 io_scene_gltf2/blender/imp/gltf2_blender_primitive.py

diff --git a/io_scene_gltf2/__init__.py b/io_scene_gltf2/__init__.py
index 0beb10a15..6404fc20f 100755
--- a/io_scene_gltf2/__init__.py
+++ b/io_scene_gltf2/__init__.py
@@ -15,7 +15,7 @@
 bl_info = {
     'name': 'glTF 2.0 format',
     'author': 'Julien Duroure, Norbert Nopper, Urs Hanselmann, Moritz Becher, Benjamin Schmithüsen, Jim Eckerlein, and many external contributors',
-    "version": (1, 3, 34),
+    "version": (1, 3, 35),
     'blender': (2, 90, 0),
     'location': 'File > Import-Export',
     'description': 'Import-Export as glTF 2.0',
diff --git a/io_scene_gltf2/blender/imp/gltf2_blender_gltf.py b/io_scene_gltf2/blender/imp/gltf2_blender_gltf.py
index efa7f003b..226720a3c 100755
--- a/io_scene_gltf2/blender/imp/gltf2_blender_gltf.py
+++ b/io_scene_gltf2/blender/imp/gltf2_blender_gltf.py
@@ -50,9 +50,9 @@ class BlenderGlTF():
 
     @staticmethod
     def set_convert_functions(gltf):
-        yup2zup = bpy.app.debug_value != 100
+        gltf.yup2zup = bpy.app.debug_value != 100
 
-        if yup2zup:
+        if gltf.yup2zup:
             # glTF Y-Up space --> Blender Z-up space
             # X,Y,Z --> X,-Z,Y
             def convert_loc(x): return Vector([x[0], -x[2], x[1]])
diff --git a/io_scene_gltf2/blender/imp/gltf2_blender_mesh.py b/io_scene_gltf2/blender/imp/gltf2_blender_mesh.py
index 7914a41bd..33578de86 100755
--- a/io_scene_gltf2/blender/imp/gltf2_blender_mesh.py
+++ b/io_scene_gltf2/blender/imp/gltf2_blender_mesh.py
@@ -13,11 +13,12 @@
 # limitations under the License.
 
 import bpy
-import bmesh
+from mathutils import Vector, Matrix
+import numpy as np
 
+from ...io.imp.gltf2_io_binary import BinaryData
 from ..com.gltf2_blender_extras import set_extras
 from .gltf2_blender_material import BlenderMaterial
-from .gltf2_blender_primitive import BlenderPrimitive
 
 
 class BlenderMesh():
@@ -28,118 +29,511 @@ class BlenderMesh():
     @staticmethod
     def create(gltf, mesh_idx, skin_idx):
         """Mesh creation."""
-        pymesh = gltf.data.meshes[mesh_idx]
+        return create_mesh(gltf, mesh_idx, skin_idx)
 
-        # Create one bmesh, add all primitives to it, and then convert it to a
-        # mesh.
-        bme = bmesh.new()
 
-        # List of all the materials this mesh will use. The material each
-        # primitive uses is set by giving an index into this list.
-        materials = []
+# Maximum number of TEXCOORD_n/COLOR_n sets to import
+UV_MAX = 8
+COLOR_MAX = 8
 
-        # Process all primitives
-        for prim in pymesh.primitives:
-            if prim.material is None:
-                material_idx = None
-            else:
-                pymaterial = gltf.data.materials[prim.material]
-
-                vertex_color = None
-                if 'COLOR_0' in prim.attributes:
-                    vertex_color = 'COLOR_0'
 
-                # Create Blender material if needed
-                if vertex_color not in pymaterial.blender_material:
-                    BlenderMaterial.create(gltf, prim.material, vertex_color)
-                material_name = pymaterial.blender_material[vertex_color]
-                material = bpy.data.materials[material_name]
+def create_mesh(gltf, mesh_idx, skin_idx):
+    pymesh = gltf.data.meshes[mesh_idx]
+    name = pymesh.name or 'Mesh_%d' % mesh_idx
+    mesh = bpy.data.meshes.new(name)
 
-                try:
-                    material_idx = materials.index(material.name)
-                except ValueError:
-                    materials.append(material.name)
-                    material_idx = len(materials) - 1
+    # Temporarily parent the mesh to an object.
+    # This is used to set skin weights and shapekeys.
+    tmp_ob = None
+    try:
+        tmp_ob = bpy.data.objects.new('##gltf-import:tmp-object##', mesh)
+        do_primitives(gltf, mesh_idx, skin_idx, mesh, tmp_ob)
 
-            BlenderPrimitive.add_primitive_to_bmesh(gltf, bme, pymesh, prim, skin_idx, material_idx)
+    finally:
+        if tmp_ob:
+            bpy.data.objects.remove(tmp_ob)
 
-        name = pymesh.name or 'Mesh_' + str(mesh_idx)
-        mesh = bpy.data.meshes.new(name)
-        BlenderMesh.bmesh_to_mesh(gltf, pymesh, bme, mesh)
-        bme.free()
-        for name_material in materials:
-            mesh.materials.append(bpy.data.materials[name_material])
-        mesh.update()
+    return mesh
 
-        set_extras(mesh, pymesh.extras, exclude=['targetNames'])
 
-        # Clear accessor cache after all primitives are done
-        gltf.accessor_cache = {}
+def do_primitives(gltf, mesh_idx, skin_idx, mesh, ob):
+    """Put all primitive data into the mesh."""
+    pymesh = gltf.data.meshes[mesh_idx]
 
-        return mesh
+    # Scan the primitives to find out what we need to create
 
-    @staticmethod
-    def bmesh_to_mesh(gltf, pymesh, bme, mesh):
-        bme.to_mesh(mesh)
-
-        # Unfortunately need to do shapekeys/normals/smoothing ourselves.
-
-        # Shapekeys
-        if len(bme.verts.layers.shape) != 0:
-            # The only way I could find to create a shape key was to temporarily
-            # parent mesh to an object and use obj.shape_key_add.
-            tmp_ob = None
-            try:
-                tmp_ob = bpy.data.objects.new('##gltf-import:tmp-object##', mesh)
-                tmp_ob.shape_key_add(name='Basis')
-                mesh.shape_keys.name = mesh.name
-                for layer_name in bme.verts.layers.shape.keys():
-                    tmp_ob.shape_key_add(name=layer_name)
-                    key_block = mesh.shape_keys.key_blocks[layer_name]
-                    layer = bme.verts.layers.shape[layer_name]
-
-                    for i, v in enumerate(bme.verts):
-                        key_block.data[i].co = v[layer]
-            finally:
-                if tmp_ob:
-                    bpy.data.objects.remove(tmp_ob)
-
-        # Normals
-        mesh.update()
+    has_normals = False
+    num_uvs = 0
+    num_cols = 0
+    num_joint_sets = 0
+    for prim in pymesh.primitives:
+        if 'POSITION' not in prim.attributes:
+            continue
 
         if gltf.import_settings['import_shading'] == "NORMALS":
-            mesh.create_normals_split()
-
-        use_smooths = []  # whether to smooth for each poly
-        face_idx = 0
-        for prim in pymesh.primitives:
-            if gltf.import_settings['import_shading'] == "FLAT" or \
-                    'NORMAL' not in prim.attributes:
-                use_smooths += [False] * prim.num_faces
-            elif gltf.import_settings['import_shading'] == "SMOOTH":
-                use_smooths += [True] * prim.num_faces
-            elif gltf.import_settings['import_shading'] == "NORMALS":
-                mesh_loops = mesh.loops
-                for fi in range(face_idx, face_idx + prim.num_faces):
-                    poly = mesh.polygons[fi]
-                    # "Flat normals" are when all the vertices in poly have the
-                    # poly's normal. Otherwise, smooth the poly.
-                    for loop_idx in range(poly.loop_start, poly.loop_start + poly.loop_total):
-                        vi = mesh_loops[loop_idx].vertex_index
-                        if poly.normal.dot(bme.verts[vi].normal) <= 0.9999999:
-                            use_smooths.append(True)
-                            break
-                    else:
-                        use_smooths.append(False)
+            if 'NORMAL' in prim.attributes:
+                has_normals = True
+
+        if skin_idx is not None:
+            i = 0
+            while ('JOINTS_%d' % i) in prim.attributes and \
+                    ('WEIGHTS_%d' % i) in prim.attributes:
+                i += 1
+            num_joint_sets = max(i, num_joint_sets)
+
+        i = 0
+        while i < UV_MAX and ('TEXCOORD_%d' % i) in prim.attributes: i += 1
+        num_uvs = max(i, num_uvs)
+
+        i = 0
+        while i < COLOR_MAX and ('COLOR_%d' % i) in prim.attributes: i += 1
+        num_cols = max(i, num_cols)
+
+    num_shapekeys = 0
+    for morph_i, _ in enumerate(pymesh.primitives[0].targets or []):
+        if pymesh.shapekey_names[morph_i] is not None:
+            num_shapekeys += 1
+
+    # -------------
+    # We'll process all the primitives gathering arrays to feed into the
+    # various foreach_set function that create the mesh data.
+
+    num_faces = 0  # total number of faces
+    vert_locs = np.empty(dtype=np.float32, shape=(0,3))  # coordinate for each vert
+    vert_normals = np.empty(dtype=np.float32, shape=(0,3))  # normal for each vert
+    edge_vidxs = np.array([], dtype=np.uint32)  # vertex_index for each loose edge
+    loop_vidxs = np.array([], dtype=np.uint32)  # vertex_index for each loop
+    loop_uvs = [
+        np.empty(dtype=np.float32, shape=(0,2))  # UV for each loop for each layer
+        for _ in range(num_uvs)
+    ]
+    loop_cols = [
+        np.empty(dtype=np.float32, shape=(0,4))  # color for each loop for each layer
+        for _ in range(num_cols)
+    ]
+    vert_joints = [
+        np.empty(dtype=np.uint32, shape=(0,4))  # 4 joints for each vert for each set
+        for _ in range(num_joint_sets)
+    ]
+    vert_weights = [
+        np.empty(dtype=np.float32, shape=(0,4))  # 4 weights for each vert for each set
+        for _ in range(num_joint_sets)
+    ]
+    sk_vert_locs = [
+        np.empty(dtype=np.float32, shape=(0,3))  # coordinate for each vert for each shapekey
+        for _ in range(num_shapekeys)
+    ]
+
+    for prim in pymesh.primitives:
+        prim.num_faces = 0
+
+        if 'POSITION' not in prim.attributes:
+            continue
+
+        vert_index_base = len(vert_locs)
+
+        if prim.indices is not None:
+            indices = BinaryData.decode_accessor(gltf, prim.indices)
+            indices = indices.reshape(len(indices))
+        else:
+            num_verts = gltf.data.accessors[prim.attributes['POSITION']].count
+            indices = np.arange(0, num_verts, dtype=np.uint32)
+
+        mode = 4 if prim.mode is None else prim.mode
+        points, edges, tris = points_edges_tris(mode, indices)
+        if points is not None:
+            indices = points
+        elif edges is not None:
+            indices = edges
+        else:
+            indices = tris
+
+        # We'll add one vert to the arrays for each index used in indices
+        unique_indices, inv_indices = np.unique(indices, return_inverse=True)
+
+        vs = BinaryData.decode_accessor(gltf, prim.attributes['POSITION'], cache=True)
+        vert_locs = np.concatenate((vert_locs, vs[unique_indices]))
+
+        if has_normals:
+            if 'NORMAL' in prim.attributes:
+                ns = BinaryData.decode_accessor(gltf, prim.attributes['NORMAL'], cache=True)
+                ns = ns[unique_indices]
+            else:
+                ns = np.zeros((len(unique_indices), 3), dtype=np.float32)
+            vert_normals = np.concatenate((vert_normals, ns))
+
+        for i in range(num_joint_sets):
+            if ('JOINTS_%d' % i) in prim.attributes and ('WEIGHTS_%d' % i) in prim.attributes:
+                js = BinaryData.decode_accessor(gltf, prim.attributes['JOINTS_%d' % i], cache=True)
+                ws = BinaryData.decode_accessor(gltf, prim.attributes['WEIGHTS_%d' % i], cache=True)
+                js = js[unique_indices]
+                ws = ws[unique_indices]
             else:
-                # shouldn't happen
-                assert False
+                js = np.zeros((len(unique_indices), 4), dtype=np.uint32)
+                ws = np.zeros((len(unique_indices), 4), dtype=np.float32)
+            vert_joints[i] = np.concatenate((vert_joints[i], js))
+            vert_weights[i] = np.concatenate((vert_weights[i], ws))
 
-            face_idx += prim.num_faces
-        mesh.polygons.foreach_set('use_smooth', use_smooths)
+        for morph_i, target in enumerate(prim.targets or []):
+            if pymesh.shapekey_names[morph_i] is None:
+                continue
+            morph_vs = BinaryData.decode_accessor(gltf, target['POSITION'], cache=True)
+            morph_vs = morph_vs[unique_indices]
+            sk_vert_locs[morph_i] = np.concatenate((sk_vert_locs[morph_i], morph_vs))
 
-        # Custom normals, now that every update is done
-        if gltf.import_settings['import_shading'] == "NORMALS":
-            custom_normals = [v.normal for v in bme.verts]
-            mesh.normals_split_custom_set_from_vertices(custom_normals)
-            mesh.use_auto_smooth = True
+        # inv_indices are the indices into the verts just for this prim;
+        # calculate indices into the overall verts array
+        prim_vidxs = inv_indices.astype(np.uint32, copy=False)
+        prim_vidxs += vert_index_base  # offset for verts from previous prims
+
+        if edges is not None:
+            edge_vidxs = np.concatenate((edge_vidxs, prim_vidxs))
+
+        if tris is not None:
+            prim.num_faces = len(indices) // 3
+            num_faces += prim.num_faces
+
+            loop_vidxs = np.concatenate((loop_vidxs, prim_vidxs))
+
+            for uv_i in range(num_uvs):
+                if ('TEXCOORD_%d' % uv_i) in prim.attributes:
+                    uvs = BinaryData.decode_accessor(gltf, prim.attributes['TEXCOORD_%d' % uv_i], cache=True)
+                    uvs = uvs[indices]
+                else:
+                    uvs = np.zeros((len(indices), 3), dtype=np.float32)
+                loop_uvs[uv_i] = np.concatenate((loop_uvs[uv_i], uvs))
+
+            for col_i in range(num_cols):
+                if ('COLOR_%d' % col_i) in prim.attributes:
+                    cols = BinaryData.decode_accessor(gltf, prim.attributes['COLOR_%d' % col_i], cache=True)
+                    cols = cols[indices]
+                    if cols.shape[1] == 3:
+                        cols = colors_rgb_to_rgba(cols)
+                else:
+                    cols = np.ones((len(indices), 4), dtype=np.float32)
+                loop_cols[col_i] = np.concatenate((loop_cols[col_i], cols))
+
+    # Accessors are cached in case they are shared between primitives; clear
+    # the cache now that all prims are done.
+    gltf.decode_accessor_cache = {}
+
+    # ---------------
+    # Convert all the arrays glTF -> Blender
+
+    # Change from relative to absolute positions for morph locs
+    for sk_locs in sk_vert_locs:
+        sk_locs += vert_locs
+
+    if gltf.yup2zup:
+        locs_yup_to_zup(vert_locs)
+        locs_yup_to_zup(vert_normals)
+        for sk_locs in sk_vert_locs:
+            locs_yup_to_zup(sk_locs)
+
+    if num_joint_sets:
+        skin_into_bind_pose(
+            gltf, skin_idx, vert_joints, vert_weights,
+            locs=[vert_locs] + sk_vert_locs,
+            vert_normals=vert_normals,
+        )
+
+    for uvs in loop_uvs:
+        uvs_gltf_to_blender(uvs)
+
+    for cols in loop_cols:
+        colors_linear_to_srgb(cols[:, :-1])
+
+    # ---------------
+    # Start creating things
+
+    mesh.vertices.add(len(vert_locs))
+    mesh.vertices.foreach_set('co', squish(vert_locs))
+
+    mesh.loops.add(len(loop_vidxs))
+    mesh.loops.foreach_set('vertex_index', loop_vidxs)
+
+    mesh.edges.add(len(edge_vidxs) // 2)
+    mesh.edges.foreach_set('vertices', edge_vidxs)
+
+    mesh.polygons.add(num_faces)
+
+    # All polys are tris
+    loop_starts = np.arange(0, 3 * num_faces, step=3)
+    loop_totals = np.full(num_faces, 3)
+    mesh.polygons.foreach_set('loop_start', loop_starts)
+    mesh.polygons.foreach_set('loop_total', loop_totals)
+
+    for uv_i in range(num_uvs):
+        name = 'UVMap' if uv_i == 0 else 'UVMap.%03d' % uv_i
+        layer = mesh.uv_layers.new(name=name)
+        layer.data.foreach_set('uv', squish(loop_uvs[uv_i]))
+
+    for col_i in range(num_cols):
+        name = 'Col' if col_i == 0 else 'Col.%03d' % col_i
+        layer = mesh.vertex_colors.new(name=name)
+
+        layer.data.foreach_set('color', squish(loop_cols[col_i]))
+
+    # Skinning
+    # TODO: this is slow :/
+    if num_joint_sets:
+        pyskin = gltf.data.skins[skin_idx]
+        for i, _ in enumerate(pyskin.joints):
+            # ob is a temp object, so don't worry about the name.
+            ob.vertex_groups.new(name='X%d' % i)
+
+        vgs = list(ob.vertex_groups)
+
+        for i in range(num_joint_sets):
+            js = vert_joints[i].tolist()  # tolist() is faster
+            ws = vert_weights[i].tolist()
+            for vi in range(len(vert_locs)):
+                w0, w1, w2, w3 = ws[vi]
+                j0, j1, j2, j3 = js[vi]
+                if w0 != 0: vgs[j0].add((vi,), w0, 'REPLACE')
+                if w1 != 0: vgs[j1].add((vi,), w1, 'REPLACE')
+                if w2 != 0: vgs[j2].add((vi,), w2, 'REPLACE')
+                if w3 != 0: vgs[j3].add((vi,), w3, 'REPLACE')
+
+    # Shapekeys
+    if num_shapekeys:
+        ob.shape_key_add(name='Basis')
+        mesh.shape_keys.name = mesh.name
+
+        sk_i = 0
+        for sk_name in pymesh.shapekey_names:
+            if sk_name is None:
+                continue
+
+            ob.shape_key_add(name=sk_name)
+            key_block = mesh.shape_keys.key_blocks[sk_name]
+            key_block.data.foreach_set('co', squish(sk_vert_locs[sk_i]))
+
+            sk_i += 1
+
+    # ----
+    # Assign materials to faces
+
+    # Initialize to no-material, ie. an index guaranteed to be OOB for the
+    # material slots. A mesh obviously can't have more materials than it has
+    # primitives...
+    oob_material_idx = len(pymesh.primitives)
+    material_indices = np.full(num_faces, oob_material_idx)
+
+    f = 0
+    for prim in pymesh.primitives:
+        if prim.material is not None:
+            # Get the material
+            pymaterial = gltf.data.materials[prim.material]
+            vertex_color = 'COLOR_0' if 'COLOR_0' in prim.attributes else None
+            if vertex_color not in pymaterial.blender_material:
+                BlenderMaterial.create(gltf, prim.material, vertex_color)
+            material_name = pymaterial.blender_material[vertex_color]
+
+            # Put material in slot (if not there)
+            if material_name not in mesh.materials:
+                mesh.materials.append(bpy.data.materials[material_name])
+            material_index = mesh.materials.find(material_name)
+
+            material_indices[f:f + prim.num_faces].fill(material_index)
+
+        f += prim.num_faces
+
+    mesh.polygons.foreach_set('material_index', material_indices)
+
+    # ----
+    # Normals
+
+    # Set poly smoothing
+    # TODO: numpyify?
+    smooths = []  # use_smooth for each poly
+    f = 0
+    for prim in pymesh.primitives:
+        if gltf.import_settings['import_shading'] == "FLAT" or \
+                'NORMAL' not in prim.attributes:
+            smooths += [False] * prim.num_faces
+
+        elif gltf.import_settings['import_shading'] == "SMOOTH":
+            smooths += [True] * prim.num_faces
+
+        elif gltf.import_settings['import_shading'] == "NORMALS":
+            for fi in range(f, f + prim.num_faces):
+                # Make the face flat if the face's normal is
+                # equal to all of its loops' normals.
+                poly_normal = mesh.polygons[fi].normal
+                smooths.append(
+                    poly_normal.dot(vert_normals[loop_vidxs[3*fi + 0]]) <= 0.9999999 or
+                    poly_normal.dot(vert_normals[loop_vidxs[3*fi + 1]]) <= 0.9999999 or
+                    poly_normal.dot(vert_normals[loop_vidxs[3*fi + 2]]) <= 0.9999999
+                )
+
+        f += prim.num_faces
+
+    mesh.polygons.foreach_set('use_smooth', smooths)
+
+    mesh.validate()
+    has_loose_edges = len(edge_vidxs) != 0  # need to calc_loose_edges for them to show up
+    mesh.update(calc_edges_loose=has_loose_edges)
+
+    if has_normals:
+        mesh.create_normals_split()
+        mesh.normals_split_custom_set_from_vertices(vert_normals)
+        mesh.use_auto_smooth = True
+
+
+def points_edges_tris(mode, indices):
+    points = None
+    edges = None
+    tris = None
+
+    if mode == 0:
+        # POINTS
+        points = indices
+
+    elif mode == 1:
+        # LINES
+        #   1   3
+        #  /   /
+        # 0   2
+        edges = indices
+
+    elif mode == 2:
+        # LINE LOOP
+        #   1---2
+        #  /     \
+        # 0-------3
+        # in:  0123
+        # out: 01122330
+        edges = np.empty(2 * len(indices), dtype=np.uint32)
+        edges[[0, -1]] = indices[[0, 0]]  # 0______0
+        edges[1:-1] = np.repeat(indices[1:], 2)  # 01122330
+
+    elif mode == 3:
+        # LINE STRIP
+        #   1---2
+        #  /     \
+        # 0       3
+        # in:  0123
+        # out: 011223
+        edges = np.empty(2 * len(indices) - 2, dtype=np.uint32)
+        edges[[0, -1]] = indices[[0, -1]]  # 0____3
+        edges[1:-1] = np.repeat(indices[1:-1], 2)  # 011223
+
+    elif mode == 4:
+        # TRIANGLES
+        #   2     3
+        #  / \   / \
+        # 0---1 4---5
+        tris = indices
+
+    elif mode == 5:
+        # TRIANGLE STRIP
+        # 0---2---4
+        #  \ / \ /
+        #   1---3
+        # TODO: numpyify
+        def alternate(i, xs):
+            even = i % 2 == 0
+            return xs if even else (xs[0], xs[2], xs[1])
+        tris = np.array([
+            alternate(i, (indices[i], indices[i + 1], indices[i + 2]))
+            for i in range(0, len(indices) - 2)
+        ])
+        tris = squish(tris)
+
+    elif mode == 6:
+        # TRIANGLE FAN
+        #   3---2
+        #  / \ / \
+        # 4---0---1
+        # TODO: numpyify
+        tris = np.array([
+            (indices[0], indices[i], indices[i + 1])
+            for i in range(1, len(indices) - 1)
+        ])
+        tris = squish(tris)
+
+    else:
+        raise Exception('primitive mode unimplemented: %d' % mode)
+
+    return points, edges, tris
+
+
+def squish(array):
+    """Squish nD array into 1D array (required by foreach_set)."""
+    return array.reshape(array.size)
+
+
+def colors_rgb_to_rgba(rgb):
+    rgba = np.ones((len(rgb), 4), dtype=np.float32)
+    rgba[:, :3] = rgb
+    return rgba
+
+
+def colors_linear_to_srgb(color):
+    assert color.shape[1] == 3  # only change RGB, not A
+
+    not_small = color >= 0.0031308
+    small_result = np.where(color < 0.0, 0.0, color * 12.92)
+    large_result = 1.055 * np.power(color, 1.0 / 2.4, where=not_small) - 0.055
+    color[:] = np.where(not_small, large_result, small_result)
+
+
+def locs_yup_to_zup(vecs):
+    # x,y,z -> x,-z,y
+    vecs[:, [1,2]] = vecs[:, [2,1]]
+    vecs[:, 1] *= -1
+
+
+def uvs_gltf_to_blender(uvs):
+    # u,v -> u,1-v
+    uvs[:, 1] *= -1
+    uvs[:, 1] += 1
+
+
+def skin_into_bind_pose(gltf, skin_idx, vert_joints, vert_weights, locs, vert_normals):
+    # Skin each position/normal using the bind pose.
+    # Skinning equation: vert' = sum_(j,w) w * joint_mat[j] * vert
+    # where the sum is over all (joint,weight) pairs.
+
+    # Calculate joint matrices
+    joint_mats = []
+    pyskin = gltf.data.skins[skin_idx]
+    if pyskin.inverse_bind_matrices is not None:
+        inv_binds = BinaryData.get_data_from_accessor(gltf, pyskin.inverse_bind_matrices)
+        inv_binds = [gltf.matrix_gltf_to_blender(m) for m in inv_binds]
+    else:
+        inv_binds = [Matrix.Identity(4) for i in range(len(pyskin.joints))]
+    bind_mats = [gltf.vnodes[joint].bind_arma_mat for joint in pyskin.joints]
+    joint_mats = [bind_mat @ inv_bind for bind_mat, inv_bind in zip(bind_mats, inv_binds)]
+
+    # TODO: check if joint_mats are all (approximately) 1, and skip skinning
+
+    joint_mats = np.array(joint_mats, dtype=np.float32)
+
+    # Compute the skinning matrices for every vert
+    num_verts = len(locs[0])
+    skinning_mats = np.zeros((num_verts, 4, 4), dtype=np.float32)
+    weight_sums = np.zeros(num_verts, dtype=np.float32)
+    for js, ws in zip(vert_joints, vert_weights):
+        for i in range(4):
+            skinning_mats += ws[:, i].reshape(len(ws), 1, 1) * joint_mats[js[:, i]]
+            weight_sums += ws[:, i]
+    # Normalize weights to one; necessary for old files / quantized weights
+    skinning_mats /= weight_sums.reshape(num_verts, 1, 1)
+
+    skinning_mats_3x3 = skinning_mats[:, :3, :3]
+    skinning_trans = skinning_mats[:, :3, 3]
+
+    for vs in locs:
+        vs[:] = mul_mats_vecs(skinning_mats_3x3, vs)
+        vs[:] += skinning_trans
+
+    if len(vert_normals) != 0:
+        vert_normals[:] = mul_mats_vecs(skinning_mats_3x3, vert_normals)
+        # Don't translate normals!
+
+
+def mul_mats_vecs(mats, vecs):
+    """Given [m1,m2,...] and [v1,v2,...], returns [m1@v1,m2@v2,...]. 3D only."""
+    return np.matmul(mats, vecs.reshape(len(vecs), 3, 1)).reshape(len(vecs), 3)
diff --git a/io_scene_gltf2/blender/imp/gltf2_blender_primitive.py b/io_scene_gltf2/blender/imp/gltf2_blender_primitive.py
deleted file mode 100755
index d544778ca..000000000
--- a/io_scene_gltf2/blender/imp/gltf2_blender_primitive.py
+++ /dev/null
@@ -1,344 +0,0 @@
-# Copyright 2018-2019 The glTF-Blender-IO authors.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import bpy
-from mathutils import Vector, Matrix
-
-from ...io.imp.gltf2_io_binary import BinaryData
-from ...io.com.gltf2_io_color_management import color_linear_to_srgb
-from ...io.com import gltf2_io_debug
-
-
-MAX_NUM_COLOR_SETS = 8
-MAX_NUM_TEXCOORD_SETS = 8
-
-class BlenderPrimitive():
-    """Blender Primitive."""
-    def __new__(cls, *args, **kwargs):
-        raise RuntimeError("%s should not be instantiated" % cls)
-
-    @staticmethod
-    def get_layer(bme_layers, name):
-        if name not in bme_layers:
-            return bme_layers.new(name)
-        return bme_layers[name]
-
-    @staticmethod
-    def add_primitive_to_bmesh(gltf, bme, pymesh, pyprimitive, skin_idx, material_index):
-        attributes = pyprimitive.attributes
-
-        if 'POSITION' not in attributes:
-            pyprimitive.num_faces = 0
-            return
-
-        positions = BinaryData.get_data_from_accessor(gltf, attributes['POSITION'], cache=True)
-
-        if pyprimitive.indices is not None:
-            # Not using cache, this is not useful for indices
-            indices = BinaryData.get_data_from_accessor(gltf, pyprimitive.indices)
-            indices = [i[0] for i in indices]
-        else:
-            indices = list(range(len(positions)))
-
-        bme_verts = bme.verts
-        bme_edges = bme.edges
-        bme_faces = bme.faces
-
-        # Gather up the joints/weights (multiple sets allow >4 influences)
-        joint_sets = []
-        weight_sets = []
-        set_num = 0
-        while 'JOINTS_%d' % set_num in attributes and 'WEIGHTS_%d' % set_num in attributes:
-            joint_data = BinaryData.get_data_from_accessor(gltf, attributes['JOINTS_%d' % set_num], cache=True)
-            weight_data = BinaryData.get_data_from_accessor(gltf, attributes['WEIGHTS_%d' % set_num], cache=True)
-
-            joint_sets.append(joint_data)
-            weight_sets.append(weight_data)
-
-            set_num += 1
-
-        # For skinned meshes, we will need to calculate the position of the
-        # verts in the bind pose, ie. the pose the edit bones are in.
-        if skin_idx is not None:
-            pyskin = gltf.data.skins[skin_idx]
-            if pyskin.inverse_bind_matrices is not None:
-                inv_binds = BinaryData.get_data_from_accessor(gltf, pyskin.inverse_bind_matrices)
-                inv_binds = [gltf.matrix_gltf_to_blender(m) for m in inv_binds]
-            else:
-                inv_binds = [Matrix.Identity(4) for i in range(len(pyskin.joints))]
-            bind_mats = [gltf.vnodes[joint].bind_arma_mat for joint in pyskin.joints]
-            joint_mats = [bind_mat @ inv_bind for bind_mat, inv_bind in zip(bind_mats, inv_binds)]
-
-            def skin_vert(pos, pidx):
-                out = Vector((0, 0, 0))
-                # Spec says weights should already sum to 1 but some models
-                # don't do it (ex. CesiumMan), so normalize.
-                weight_sum = 0
-                for joint_set, weight_set in zip(joint_sets, weight_sets):
-                    for j in range(0, 4):
-                        weight = weight_set[pidx][j]
-                        if weight != 0.0:
-                            weight_sum += weight
-                            joint = joint_set[pidx][j]
-                            out += weight * (joint_mats[joint] @ pos)
-                out /= weight_sum
-                return out
-
-            def skin_normal(norm, pidx):
-                # TODO: not sure this is right
-                norm = Vector([norm[0], norm[1], norm[2], 0])
-                out = Vector((0, 0, 0, 0))
-                weight_sum = 0
-                for joint_set, weight_set in zip(joint_sets, weight_sets):
-                    for j in range(0, 4):
-                        weight = weight_set[pidx][j]
-                        if weight != 0.0:
-                            weight_sum += weight
-                            joint = joint_set[pidx][j]
-                            out += weight * (joint_mats[joint] @ norm)
-                out /= weight_sum
-                out = out.to_3d().normalized()
-                return out
-
-        # Every vertex has an index into the primitive's attribute arrays and a
-        #  *different* index into the BMesh's list of verts. Call the first one the
-        #  pidx and the second the bidx. Need to keep them straight!
-
-        # The pidx of all the vertices that are actually used by the primitive (only
-        # indices that appear in the pyprimitive.indices list are actually used)
-        used_pidxs = set(indices)
-        # Contains a pair (bidx, pidx) for every vertex in the primitive
-        vert_idxs = []
-        # pidx_to_bidx[pidx] will be the bidx of the vertex with that pidx (or -1 if
-        # unused)
-        pidx_to_bidx = [-1] * len(positions)
-        bidx = len(bme_verts)
-        if bpy.app.debug:
-            used_pidxs = list(used_pidxs)
-            used_pidxs.sort()
-        for pidx in used_pidxs:
-            pos = gltf.loc_gltf_to_blender(positions[pidx])
-            if skin_idx is not None:
-                pos = skin_vert(pos, pidx)
-
-            bme_verts.new(pos)
-            vert_idxs.append((bidx, pidx))
-            pidx_to_bidx[pidx] = bidx
-            bidx += 1
-        bme_verts.ensure_lookup_table()
-
-        # Add edges/faces to bmesh
-        mode = 4 if pyprimitive.mode is None else pyprimitive.mode
-        edges, faces = BlenderPrimitive.edges_and_faces(mode, indices)
-        # NOTE: edges and vertices are in terms of pidxs!
-        for edge in edges:
-            try:
-                bme_edges.new((
-                    bme_verts[pidx_to_bidx[edge[0]]],
-                    bme_verts[pidx_to_bidx[edge[1]]],
-                ))
-            except ValueError:
-                # Ignores duplicate/degenerate edges
-                pass
-        pyprimitive.num_faces = 0
-        for face in faces:
-            try:
-                face = bme_faces.new((
-                    bme_verts[pidx_to_bidx[face[0]]],
-                    bme_verts[pidx_to_bidx[face[1]]],
-                    bme_verts[pidx_to_bidx[face[2]]],
-                ))
-
-                if material_index is not None:
-                    face.material_index = material_index
-
-                pyprimitive.num_faces += 1
-
-            except ValueError:
-                # Ignores duplicate/degenerate faces
-                pass
-
-        # Set normals
-        if 'NORMAL' in attributes:
-            normals = BinaryData.get_data_from_accessor(gltf, attributes['NORMAL'], cache=True)
-
-            if skin_idx is None:
-                for bidx, pidx in vert_idxs:
-                    bme_verts[bidx].normal = gltf.normal_gltf_to_blender(normals[pidx])
-            else:
-                for bidx, pidx in vert_idxs:
-                    normal = gltf.normal_gltf_to_blender(normals[pidx])
-                    bme_verts[bidx].normal = skin_normal(normal, pidx)
-
-        # Set vertex colors. Add them in the order COLOR_0, COLOR_1, etc.
-        set_num = 0
-        while 'COLOR_%d' % set_num in attributes:
-            if set_num >= MAX_NUM_COLOR_SETS:
-                gltf2_io_debug.print_console("WARNING",
-                    "too many color sets; COLOR_%d will be ignored" % set_num
-                )
-                break
-
-            layer_name = 'Col' if set_num == 0 else 'Col.%03d' % set_num
-            layer = BlenderPrimitive.get_layer(bme.loops.layers.color, layer_name)
-
-            colors = BinaryData.get_data_from_accessor(gltf, attributes['COLOR_%d' % set_num], cache=True)
-            is_rgba = len(colors[0]) == 4
-
-            for bidx, pidx in vert_idxs:
-                color = colors[pidx]
-                col = (
-                    color_linear_to_srgb(color[0]),
-                    color_linear_to_srgb(color[1]),
-                    color_linear_to_srgb(color[2]),
-                    color[3] if is_rgba else 1.0,
-                )
-                for loop in bme_verts[bidx].link_loops:
-                    loop[layer] = col
-
-            set_num += 1
-
-        # Set texcoords
-        set_num = 0
-        while 'TEXCOORD_%d' % set_num in attributes:
-            if set_num >= MAX_NUM_TEXCOORD_SETS:
-                gltf2_io_debug.print_console("WARNING",
-                    "too many UV sets; TEXCOORD_%d will be ignored" % set_num
-                )
-                break
-
-            layer_name = 'UVMap' if set_num == 0 else 'UVMap.%03d' % set_num
-            layer = BlenderPrimitive.get_layer(bme.loops.layers.uv, layer_name)
-
-            uvs = BinaryData.get_data_from_accessor(gltf, attributes['TEXCOORD_%d' % set_num], cache=True)
-
-            for bidx, pidx in vert_idxs:
-                # UV transform
-                u, v = uvs[pidx]
-                uv = (u, 1 - v)
-
-                for loop in bme_verts[bidx].link_loops:
-                    loop[layer].uv = uv
-
-            set_num += 1
-
-        # Set joints/weights for skinning
-        if joint_sets:
-            layer = BlenderPrimitive.get_layer(bme.verts.layers.deform, 'Vertex Weights')
-
-            for joint_set, weight_set in zip(joint_sets, weight_sets):
-                for bidx, pidx in vert_idxs:
-                    for j in range(0, 4):
-                        weight = weight_set[pidx][j]
-                        if weight != 0.0:
-                            joint = joint_set[pidx][j]
-                            bme_verts[bidx][layer][joint] = weight
-
-        # Set morph target positions (no normals/tangents)
-        for sk, target in enumerate(pyprimitive.targets or []):
-            if pymesh.shapekey_names[sk] is None:
-                continue
-
-            layer_name = pymesh.shapekey_names[sk]
-            layer = BlenderPrimitive.get_layer(bme.verts.layers.shape, layer_name)
-
-            morph_positions = BinaryData.get_data_from_accessor(gltf, target['POSITION'], cache=True)
-
-            if skin_idx is None:
-                for bidx, pidx in vert_idxs:
-                    bme_verts[bidx][layer] = (
-                        gltf.loc_gltf_to_blender(positions[pidx]) +
-                        gltf.loc_gltf_to_blender(morph_positions[pidx])
-                    )
-            else:
-                for bidx, pidx in vert_idxs:
-                    pos = (
-                        gltf.loc_gltf_to_blender(positions[pidx]) +
-                        gltf.loc_gltf_to_blender(morph_positions[pidx])
-                    )
-                    bme_verts[bidx][layer] = skin_vert(pos, pidx)
-
-    @staticmethod
-    def edges_and_faces(mode, indices):
-        """Converts the indices in a particular primitive mode into standard lists of
-        edges (pairs of indices) and faces (tuples of CCW indices).
-        """
-        es = []
-        fs = []
-
-        if mode == 0:
-            # POINTS
-            pass
-        elif mode == 1:
-            # LINES
-            #   1   3
-            #  /   /
-            # 0   2
-            es = [
-                (indices[i], indices[i + 1])
-                for i in range(0, len(indices), 2)
-            ]
-        elif mode == 2:
-            # LINE LOOP
-            #   1---2
-            #  /     \
-            # 0-------3
-            es = [
-                (indices[i], indices[i + 1])
-                for i in range(0, len(indices) - 1)
-            ]
-            es.append((indices[-1], indices[0]))
-        elif mode == 3:
-            # LINE STRIP
-            #   1---2
-            #  /     \
-            # 0       3
-            es = [
-                (indices[i], indices[i + 1])
-                for i in range(0, len(indices) - 1)
-            ]
-        elif mode == 4:
-            # TRIANGLES
-            #   2     3
-            #  / \   / \
-            # 0---1 4---5
-            fs = [
-                (indices[i], indices[i + 1], indices[i + 2])
-                for i in range(0, len(indices), 3)
-            ]
-        elif mode == 5:
-            # TRIANGLE STRIP
-            # 0---2---4
-            #  \ / \ /
-            #   1---3
-            def alternate(i, xs):
-                even = i % 2 == 0
-                return xs if even else (xs[0], xs[2], xs[1])
-            fs = [
-                alternate(i, (indices[i], indices[i + 1], indices[i + 2]))
-                for i in range(0, len(indices) - 2)
-            ]
-        elif mode == 6:
-            # TRIANGLE FAN
-            #   3---2
-            #  / \ / \
-            # 4---0---1
-            fs = [
-                (indices[0], indices[i], indices[i + 1])
-                for i in range(1, len(indices) - 1)
-            ]
-        else:
-            raise Exception('primitive mode unimplemented: %d' % mode)
-
-        return es, fs
diff --git a/io_scene_gltf2/io/imp/gltf2_io_binary.py b/io_scene_gltf2/io/imp/gltf2_io_binary.py
index 7cfcbc40e..728cf0f03 100755
--- a/io_scene_gltf2/io/imp/gltf2_io_binary.py
+++ b/io_scene_gltf2/io/imp/gltf2_io_binary.py
@@ -13,6 +13,7 @@
 # limitations under the License.
 
 import struct
+import numpy as np
 
 from ..com.gltf2_io import Accessor
 
@@ -22,8 +23,8 @@ class BinaryData():
     def __new__(cls, *args, **kwargs):
         raise RuntimeError("%s should not be instantiated" % cls)
 
-# Note that this function is not used in Blender importer, but is kept in
-# Source code to be used in any pipeline that want to manage gltf/glb file in python
+    # Note that this function is not used in Blender importer, but is kept in
+    # Source code to be used in any pipeline that want to manage gltf/glb file in python
     @staticmethod
     def get_binary_from_accessor(gltf, accessor_idx):
         """Get binary from accessor."""
@@ -63,8 +64,7 @@ class BinaryData():
         if accessor_idx in gltf.accessor_cache:
             return gltf.accessor_cache[accessor_idx]
 
-        accessor = gltf.data.accessors[accessor_idx]
-        data = BinaryData.get_data_from_accessor_obj(gltf, accessor)
+        data = BinaryData.decode_accessor(gltf, accessor_idx).tolist()
 
         if cache:
             gltf.accessor_cache[accessor_idx] = data
@@ -72,7 +72,36 @@ class BinaryData():
         return data
 
     @staticmethod
-    def get_data_from_accessor_obj(gltf, accessor):
+    def decode_accessor(gltf, accessor_idx, cache=False):
+        """Decodes accessor to 2D numpy array (count x num_components)."""
+        if accessor_idx in gltf.decode_accessor_cache:
+            return gltf.accessor_cache[accessor_idx]
+
+        accessor = gltf.data.accessors[accessor_idx]
+        array = BinaryData.decode_accessor_obj(gltf, accessor)
+
+        if cache:
+            gltf.accessor_cache[accessor_idx] = array
+            # Prevent accidentally modifying cached arrays
+            array.flags.writeable = False
+
+        return array
+
+    @staticmethod
+    def decode_accessor_obj(gltf, accessor):
+        # MAT2/3 have special alignment requirements that aren't handled. But it
+        # doesn't matter because nothing uses them.
+        assert accessor.type not in ['MAT2', 'MAT3']
+
+        dtype = {
+            5120: np.int8,
+            5121: np.uint8,
+            5122: np.int16,
+            5123: np.uint16,
+            5125: np.uint32,
+            5126: np.float32,
+        }[accessor.component_type]
+
         if accessor.buffer_view is not None:
             bufferView = gltf.data.buffer_views[accessor.buffer_view]
             buffer_data = BinaryData.get_buffer_view(gltf, accessor.buffer_view)
@@ -80,40 +109,45 @@ class BinaryData():
             accessor_offset = accessor.byte_offset or 0
             buffer_data = buffer_data[accessor_offset:]
 
-            fmt_char = gltf.fmt_char_dict[accessor.component_type]
             component_nb = gltf.component_nb_dict[accessor.type]
-            fmt = '<' + (fmt_char * component_nb)
-            default_stride = struct.calcsize(fmt)
-
-            # Special layouts for certain formats; see the section about
-            # data alignment in the glTF 2.0 spec.
-            component_size = struct.calcsize('<' + fmt_char)
-            if accessor.type == 'MAT2' and component_size == 1:
-                fmt = '<FFxxFF'.replace('F', fmt_char)
-                default_stride = 8
-            elif accessor.type == 'MAT3' and component_size == 1:
-                fmt = '<FFFxFFFxFFF'.replace('F', fmt_char)
-                default_stride = 12
-            elif accessor.type == 'MAT3' and component_size == 2:
-                fmt = '<FFFxxFFFxxFFF'.replace('F', fmt_char)
-                default_stride = 24
+            bytes_per_elem = dtype(1).nbytes
 
+            default_stride = bytes_per_elem * component_nb
             stride = bufferView.byte_stride or default_stride
 
-            # Decode
-            unpack_from = struct.Struct(fmt).unpack_from
-            data = [
-                unpack_from(buffer_data, offset)
-                for offset in range(0, accessor.count*stride, stride)
-            ]
+            if stride == default_stride:
+                array = np.frombuffer(
+                    buffer_data,
+                    dtype=np.dtype(dtype).newbyteorder('<'),
+                    count=accessor.count * component_nb,
+                )
+                array = array.reshape(accessor.count, component_nb)
+
+            else:
+                # The data looks like
+                #   XXXppXXXppXXXppXXX
+                # where X are the components and p are padding.
+                # One XXXpp group is one stride's worth of data.
+                assert stride % bytes_per_elem == 0
+                elems_per_stride = stride // bytes_per_elem
+                num_elems = (accessor.count - 1) * elems_per_stride + component_nb
+
+                array = np.frombuffer(
+                    buffer_data,
+                    dtype=np.dtype(dtype).newbyteorder('<'),
+                    count=num_elems,
+                )
+                assert array.strides[0] == bytes_per_elem
+                array = np.lib.stride_tricks.as_strided(
+                    array,
+                    shape=(accessor.count, component_nb),
+                    strides=(stride, bytes_per_elem),
+                )
 
         else:
             # No buffer view; initialize to zeros
             component_nb = gltf.component_nb_dict[accessor.type]
-            data = [
-                (0,) * component_nb
-                for i in range(accessor.count)
-            ]
+            array = np.zeros((accessor.count, component_nb), dtype=dtype)
 
         if accessor.sparse:
             sparse_indices_obj = Accessor.from_dict({
@@ -123,6 +157,9 @@ class BinaryData():
                 'componentType': accessor.sparse.indices.component_type,
                 'type': 'SCALAR',
             })
+            sparse_indices = BinaryData.decode_accessor_obj(gltf, sparse_indices_obj)
+            sparse_indices = sparse_indices.reshape(len(sparse_indices))
+
             sparse_values_obj = Accessor.from_dict({
                 'count': accessor.sparse.count,
                 'bufferView': accessor.sparse.values.buffer_view,
@@ -130,31 +167,26 @@ class BinaryData():
                 'componentType': accessor.component_type,
                 'type': accessor.type,
             })
-            sparse_indices = BinaryData.get_data_from_accessor_obj(gltf, sparse_indices_obj)
-            sparse_values = BinaryData.get_data_from_accessor_obj(gltf, sparse_values_obj)
+            sparse_values = BinaryData.decode_accessor_obj(gltf, sparse_values_obj)
 
-            # Apply sparse
-            for i in range(accessor.sparse.count):
-                data[sparse_indices[i][0]] = sparse_values[i]
+            if not array.flags.writeable:
+                array = array.copy()
+            array[sparse_indices] = sparse_values
 
         # Normalization
         if accessor.normalized:
-            for idx, tuple in enumerate(data):
-                new_tuple = ()
-                for i in tuple:
-                    if accessor.component_type == 5120:
-                        new_tuple += (max(float(i / 127.0 ), -1.0),)
-                    elif accessor.component_type == 5121:
-                        new_tuple += (float(i / 255.0),)
-                    elif accessor.component_type == 5122:
-                        new_tuple += (max(float(i / 32767.0), -1.0),)
-                    elif accessor.component_type == 5123:
-                        new_tuple += (i / 65535.0,)
-                    else:
-                        new_tuple += (float(i),)
-                data[idx] = new_tuple
-
-        return data
+            if accessor.component_type == 5120:  # int8
+                array = np.maximum(-1.0, array / 127.0)
+            elif accessor.component_type == 5121:  # uint8
+                array = array / 255.0
+            elif accessor.component_type == 5122:  # int16
+                array = np.maximum(-1.0, array / 32767.0)
+            elif accessor.component_type == 5123:  # uint16
+                array = array / 65535.0
+            else:
+                array = array.astype(np.float64)
+
+        return array
 
     @staticmethod
     def get_image_data(gltf, img_idx):
diff --git a/io_scene_gltf2/io/imp/gltf2_io_gltf.py b/io_scene_gltf2/io/imp/gltf2_io_gltf.py
index e63f1f551..c494e966c 100755
--- a/io_scene_gltf2/io/imp/gltf2_io_gltf.py
+++ b/io_scene_gltf2/io/imp/gltf2_io_gltf.py
@@ -32,6 +32,7 @@ class glTFImporter():
         self.glb_buffer = None
         self.buffers = {}
         self.accessor_cache = {}
+        self.decode_accessor_cache = {}
 
         if 'loglevel' not in self.import_settings.keys():
             self.import_settings['loglevel'] = logging.ERROR
-- 
GitLab