diff --git a/blenderkit/autothumb.py b/blenderkit/autothumb.py
index 0008d670891fcfc02cd8a1d917c17b6c36b2b2ae..da6e6d29d7199d54cb4c8b4d4f4ec4e395e6386e 100644
--- a/blenderkit/autothumb.py
+++ b/blenderkit/autothumb.py
@@ -120,7 +120,7 @@ def start_thumbnailer(self, context):
         obnames = []
         for ob in obs:
             obnames.append(ob.name)
-        with open(datafile, 'w') as s:
+        with open(datafile, 'w', encoding = 'utf-8') as s:
             bkit = mainmodel.blenderkit
             json.dump({
                 "type": "model",
@@ -131,7 +131,7 @@ def start_thumbnailer(self, context):
                 "thumbnail_resolution": bkit.thumbnail_resolution,
                 "thumbnail_samples": bkit.thumbnail_samples,
                 "thumbnail_denoising": bkit.thumbnail_denoising,
-            }, s)
+            }, s, ensure_ascii=False, indent=4)
 
         proc = subprocess.Popen([
             binary_path,
@@ -190,7 +190,7 @@ def start_material_thumbnailer(self, context, wait=False):
         # save a copy of actual scene but don't interfere with the users models
         bpy.ops.wm.save_as_mainfile(filepath=filepath, compress=False, copy=True)
 
-        with open(datafile, 'w') as s:
+        with open(datafile, 'w', encoding = 'utf-8') as s:
             bkit = mat.blenderkit
             json.dump({
                 "type": "material",
@@ -204,7 +204,7 @@ def start_material_thumbnailer(self, context, wait=False):
                 "thumbnail_denoising": bkit.thumbnail_denoising,
                 "adaptive_subdivision": bkit.adaptive_subdivision,
                 "texture_size_meters": bkit.texture_size_meters,
-            }, s)
+            }, s,  ensure_ascii=False, indent=4)
 
         proc = subprocess.Popen([
             binary_path,
diff --git a/blenderkit/autothumb_material_bg.py b/blenderkit/autothumb_material_bg.py
index 3e1c2f2b372f589f6c11015e588db0deef5a7bf2..d27d7b90ad05903da48733b52dd54333c6aad662 100644
--- a/blenderkit/autothumb_material_bg.py
+++ b/blenderkit/autothumb_material_bg.py
@@ -44,7 +44,7 @@ def unhide_collection(cname):
 if __name__ == "__main__":
     try:
         bg_blender.progress('preparing thumbnail scene')
-        with open(BLENDERKIT_EXPORT_DATA, 'r') as s:
+        with open(BLENDERKIT_EXPORT_DATA, 'r',encoding='utf-8') as s:
             data = json.load(s)
             # append_material(file_name, matname = None, link = False, fake_user = True)
         mat = append_link.append_material(file_name=BLENDERKIT_EXPORT_FILE_INPUT, matname=data["material"], link=True,
diff --git a/blenderkit/autothumb_model_bg.py b/blenderkit/autothumb_model_bg.py
index ebb509ffe93016e9b48fcbdc6efbb95a61590ddc..87acfa1904a0c9b8c761f3eb219969a525bd53f3 100644
--- a/blenderkit/autothumb_model_bg.py
+++ b/blenderkit/autothumb_model_bg.py
@@ -32,7 +32,7 @@ BLENDERKIT_EXPORT_DATA = sys.argv[-4]
 
 
 def get_obnames():
-    with open(BLENDERKIT_EXPORT_DATA, 'r') as s:
+    with open(BLENDERKIT_EXPORT_DATA, 'r',encoding='utf-8') as s:
         data = json.load(s)
     obnames = eval(data['models'])
     return obnames
@@ -79,7 +79,7 @@ def render_thumbnails():
 
 if __name__ == "__main__":
     try:
-        with open(BLENDERKIT_EXPORT_DATA, 'r') as s:
+        with open(BLENDERKIT_EXPORT_DATA, 'r',encoding='utf-8') as s:
             data = json.load(s)
 
         user_preferences = bpy.context.preferences.addons['blenderkit'].preferences
diff --git a/blenderkit/categories.py b/blenderkit/categories.py
index f452120b67d340f2af7ad15cbe666c18a34ea03b..71e14f64c3c1bcf5c1874aa6989dec07d42172a2 100644
--- a/blenderkit/categories.py
+++ b/blenderkit/categories.py
@@ -168,7 +168,7 @@ def load_categories():
 
     wm = bpy.context.window_manager
     try:
-        with open(categories_filepath, 'r') as catfile:
+        with open(categories_filepath, 'r', encoding='utf-8') as catfile:
             wm['bkit_categories'] = json.load(catfile)
 
         wm['active_category'] = {
@@ -207,8 +207,8 @@ def fetch_categories(API_key, force = False):
             categories = rdata['results']
             fix_category_counts(categories)
             # filter_categories(categories) #TODO this should filter categories for search, but not for upload. by now off.
-            with open(categories_filepath, 'w') as s:
-                json.dump(categories, s, indent=4)
+            with open(categories_filepath, 'w', encoding = 'utf-8') as s:
+                json.dump(categories, s,  ensure_ascii=False, indent=4)
         tasks_queue.add_task((load_categories, ()))
     except Exception as e:
         bk_logger.debug('category fetching failed')
diff --git a/blenderkit/resolutions.py b/blenderkit/resolutions.py
index 03026c683c2a1f71d409b617a44560c312139b14..a5b5d72305fb37039b1e30d50c638313fac44e06 100644
--- a/blenderkit/resolutions.py
+++ b/blenderkit/resolutions.py
@@ -632,8 +632,8 @@ def get_assets_search():
             retries += 1
 
     fpath = assets_db_path()
-    with open(fpath, 'w') as s:
-        json.dump(results, s)
+    with open(fpath, 'w', encoding = 'utf-8') as s:
+        json.dump(results, s, ensure_ascii=False, indent=4)
 
 
 def get_assets_for_resolutions(page_size=100, max_results=100000000):
@@ -698,13 +698,13 @@ def get_materials_for_validation(page_size=100, max_results=100000000):
 #             retries += 1
 #
 #     fpath = assets_db_path()
-#     with open(fpath, 'w') as s:
-#         json.dump(results, s)
+#     with open(fpath, 'w', encoding = 'utf-8') as s:
+#         json.dump(results, s, ensure_ascii=False, indent=4)
 
 
 def load_assets_list(filepath):
     if os.path.exists(filepath):
-        with open(filepath, 'r') as s:
+        with open(filepath, 'r', encoding='utf-8') as s:
             assets = json.load(s)
     return assets
 
@@ -821,8 +821,8 @@ def send_to_bg(asset_data, fpath, command='generate_resolutions', wait=True):
     tempdir = tempfile.mkdtemp()
     datafile = os.path.join(tempdir + 'resdata.json')
     script_path = os.path.dirname(os.path.realpath(__file__))
-    with open(datafile, 'w') as s:
-        json.dump(data, s)
+    with open(datafile, 'w', encoding = 'utf-8') as s:
+        json.dump(data, s,  ensure_ascii=False, indent=4)
 
     print('opening Blender instance to do processing - ', command)
 
@@ -856,7 +856,7 @@ def write_data_back(asset_data):
 
 def run_bg(datafile):
     print('background file operation')
-    with open(datafile, 'r') as f:
+    with open(datafile, 'r',encoding='utf-8') as f:
         data = json.load(f)
     bpy.app.debug_value = data['debug_value']
     write_data_back(data['asset_data'])
diff --git a/blenderkit/search.py b/blenderkit/search.py
index 2252d3f1521efe88c9fbf41d6c248bf848b13036..484830e0f09b28aa18af5d38498ff77014254c4f 100644
--- a/blenderkit/search.py
+++ b/blenderkit/search.py
@@ -1017,8 +1017,8 @@ class Searcher(threading.Thread):
         if params['get_next']:
             rdata['results'][0:0] = self.result['results']
         self.result = rdata
-        # with open(json_filepath, 'w') as outfile:
-        #     json.dump(rdata, outfile)
+        # with open(json_filepath, 'w', encoding = 'utf-8') as outfile:
+        #     json.dump(rdata, outfile, ensure_ascii=False, indent=4)
 
         killthreads_sml = []
         for k in thumb_sml_download_threads.keys():
@@ -1311,8 +1311,8 @@ def get_search_simple(parameters, filepath=None, page_size=100, max_results=1000
     if not filepath:
         return results
 
-    with open(filepath, 'w') as s:
-        json.dump(results, s)
+    with open(filepath, 'w', encoding = 'utf-8') as s:
+        json.dump(results, s, ensure_ascii=False, indent=4)
     bk_logger.info(f'retrieved {len(results)} assets from elastic search')
     return results
 
diff --git a/blenderkit/upload.py b/blenderkit/upload.py
index e811eaa3e4ce90eb484eac723cb4f4f17a381064..6915023048c41d7bb1ec85280a216d935d93a8ef 100644
--- a/blenderkit/upload.py
+++ b/blenderkit/upload.py
@@ -893,8 +893,8 @@ class Uploader(threading.Thread):
                     }
                     datafile = os.path.join(self.export_data['temp_dir'], BLENDERKIT_EXPORT_DATA_FILE)
 
-                    with open(datafile, 'w') as s:
-                        json.dump(data, s)
+                    with open(datafile, 'w', encoding = 'utf-8') as s:
+                        json.dump(data, s, ensure_ascii=False, indent=4)
 
                     # non waiting method - not useful here..
                     # proc = subprocess.Popen([
diff --git a/blenderkit/upload_bg.py b/blenderkit/upload_bg.py
index 2e27dbf1e1817c9cab3ed973373304d7ccfe240d..685e280a57b19eb0b978dee6fc796d6b6aff033a 100644
--- a/blenderkit/upload_bg.py
+++ b/blenderkit/upload_bg.py
@@ -126,7 +126,7 @@ def upload_files(upload_data, files):
 if __name__ == "__main__":
     try:
         # bg_blender.progress('preparing scene - append data')
-        with open(BLENDERKIT_EXPORT_DATA, 'r') as s:
+        with open(BLENDERKIT_EXPORT_DATA, 'r',encoding='utf-8') as s:
             data = json.load(s)
 
         bpy.app.debug_value = data.get('debug_value', 0)
diff --git a/blenderkit/utils.py b/blenderkit/utils.py
index e0ced9b1329020ad476361d7a8584b4d4895ad44..94f795c14071e2a81405bc474651b8348447414e 100644
--- a/blenderkit/utils.py
+++ b/blenderkit/utils.py
@@ -254,7 +254,7 @@ def load_prefs():
     fpath = paths.BLENDERKIT_SETTINGS_FILENAME
     if os.path.exists(fpath):
         try:
-            with open(fpath, 'r') as s:
+            with open(fpath, 'r', encoding = 'utf-8') as s:
                 prefs = json.load(s)
                 user_preferences.api_key = prefs.get('API_key', '')
                 user_preferences.global_dir = prefs.get('global_dir', paths.default_global_dict())
@@ -262,6 +262,7 @@ def load_prefs():
         except Exception as e:
             print('failed to read addon preferences.')
             print(e)
+            os.remove(fpath)
 
 
 def save_prefs(self, context):
@@ -285,8 +286,8 @@ def save_prefs(self, context):
             fpath = paths.BLENDERKIT_SETTINGS_FILENAME
             if not os.path.exists(paths._presets):
                 os.makedirs(paths._presets)
-            with open(fpath, 'w') as s:
-                json.dump(prefs, s)
+            with open(fpath, 'w', encoding = 'utf-8') as s:
+                json.dump(prefs, s, ensure_ascii=False, indent=4)
         except Exception as e:
             print(e)
 
diff --git a/blenderkit/version_checker.py b/blenderkit/version_checker.py
index 49423dbbbbf3054fa0d3bd7529e7ab80fd9ca958..993ff238193763f0fc8d49043c361fc5e252a782 100644
--- a/blenderkit/version_checker.py
+++ b/blenderkit/version_checker.py
@@ -43,8 +43,8 @@ def check_version(url, api_key, module):
         tempdir = paths.get_temp_dir()
 
         ver_filepath = os.path.join(tempdir, 'addon_version.json')
-        with open(ver_filepath, 'w') as s:
-            json.dump(ver_online, s, indent=4)
+        with open(ver_filepath, 'w', encoding = 'utf-8') as s:
+            json.dump(ver_online, s,  ensure_ascii=False, indent=4)
     except:
         print("couldn't check online for version updates")
 
@@ -56,7 +56,7 @@ def compare_versions(module):
 
         tempdir = paths.get_temp_dir()
         ver_filepath = os.path.join(tempdir, 'addon_version.json')
-        with open(ver_filepath, 'r') as s:
+        with open(ver_filepath, 'r',encoding='utf-8') as s:
             data = json.load(s)
 
         ver_online = data['addonVersion2.8'].split('.')