From af50ac234088b0d7eeab54cd498c0318a1f95c76 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Vil=C3=A9m=20Duha?= <vilda.novak@gmail.com>
Date: Mon, 18 Jan 2021 14:16:58 +0100
Subject: [PATCH] BlenderKit: fix T84766 - Specify utf-8 encoding for all json
 file write/reads This is used on various places of the addon, and was
 introduced to fix (once more and better) T48766.

---
 blenderkit/autothumb.py             |  8 ++++----
 blenderkit/autothumb_material_bg.py |  2 +-
 blenderkit/autothumb_model_bg.py    |  4 ++--
 blenderkit/categories.py            |  6 +++---
 blenderkit/resolutions.py           | 16 ++++++++--------
 blenderkit/search.py                |  8 ++++----
 blenderkit/upload.py                |  4 ++--
 blenderkit/upload_bg.py             |  2 +-
 blenderkit/utils.py                 |  7 ++++---
 blenderkit/version_checker.py       |  6 +++---
 10 files changed, 32 insertions(+), 31 deletions(-)

diff --git a/blenderkit/autothumb.py b/blenderkit/autothumb.py
index 0008d6708..da6e6d29d 100644
--- a/blenderkit/autothumb.py
+++ b/blenderkit/autothumb.py
@@ -120,7 +120,7 @@ def start_thumbnailer(self, context):
         obnames = []
         for ob in obs:
             obnames.append(ob.name)
-        with open(datafile, 'w') as s:
+        with open(datafile, 'w', encoding = 'utf-8') as s:
             bkit = mainmodel.blenderkit
             json.dump({
                 "type": "model",
@@ -131,7 +131,7 @@ def start_thumbnailer(self, context):
                 "thumbnail_resolution": bkit.thumbnail_resolution,
                 "thumbnail_samples": bkit.thumbnail_samples,
                 "thumbnail_denoising": bkit.thumbnail_denoising,
-            }, s)
+            }, s, ensure_ascii=False, indent=4)
 
         proc = subprocess.Popen([
             binary_path,
@@ -190,7 +190,7 @@ def start_material_thumbnailer(self, context, wait=False):
         # save a copy of actual scene but don't interfere with the users models
         bpy.ops.wm.save_as_mainfile(filepath=filepath, compress=False, copy=True)
 
-        with open(datafile, 'w') as s:
+        with open(datafile, 'w', encoding = 'utf-8') as s:
             bkit = mat.blenderkit
             json.dump({
                 "type": "material",
@@ -204,7 +204,7 @@ def start_material_thumbnailer(self, context, wait=False):
                 "thumbnail_denoising": bkit.thumbnail_denoising,
                 "adaptive_subdivision": bkit.adaptive_subdivision,
                 "texture_size_meters": bkit.texture_size_meters,
-            }, s)
+            }, s,  ensure_ascii=False, indent=4)
 
         proc = subprocess.Popen([
             binary_path,
diff --git a/blenderkit/autothumb_material_bg.py b/blenderkit/autothumb_material_bg.py
index 3e1c2f2b3..d27d7b90a 100644
--- a/blenderkit/autothumb_material_bg.py
+++ b/blenderkit/autothumb_material_bg.py
@@ -44,7 +44,7 @@ def unhide_collection(cname):
 if __name__ == "__main__":
     try:
         bg_blender.progress('preparing thumbnail scene')
-        with open(BLENDERKIT_EXPORT_DATA, 'r') as s:
+        with open(BLENDERKIT_EXPORT_DATA, 'r',encoding='utf-8') as s:
             data = json.load(s)
             # append_material(file_name, matname = None, link = False, fake_user = True)
         mat = append_link.append_material(file_name=BLENDERKIT_EXPORT_FILE_INPUT, matname=data["material"], link=True,
diff --git a/blenderkit/autothumb_model_bg.py b/blenderkit/autothumb_model_bg.py
index ebb509ffe..87acfa190 100644
--- a/blenderkit/autothumb_model_bg.py
+++ b/blenderkit/autothumb_model_bg.py
@@ -32,7 +32,7 @@ BLENDERKIT_EXPORT_DATA = sys.argv[-4]
 
 
 def get_obnames():
-    with open(BLENDERKIT_EXPORT_DATA, 'r') as s:
+    with open(BLENDERKIT_EXPORT_DATA, 'r',encoding='utf-8') as s:
         data = json.load(s)
     obnames = eval(data['models'])
     return obnames
@@ -79,7 +79,7 @@ def render_thumbnails():
 
 if __name__ == "__main__":
     try:
-        with open(BLENDERKIT_EXPORT_DATA, 'r') as s:
+        with open(BLENDERKIT_EXPORT_DATA, 'r',encoding='utf-8') as s:
             data = json.load(s)
 
         user_preferences = bpy.context.preferences.addons['blenderkit'].preferences
diff --git a/blenderkit/categories.py b/blenderkit/categories.py
index f452120b6..71e14f64c 100644
--- a/blenderkit/categories.py
+++ b/blenderkit/categories.py
@@ -168,7 +168,7 @@ def load_categories():
 
     wm = bpy.context.window_manager
     try:
-        with open(categories_filepath, 'r') as catfile:
+        with open(categories_filepath, 'r', encoding='utf-8') as catfile:
             wm['bkit_categories'] = json.load(catfile)
 
         wm['active_category'] = {
@@ -207,8 +207,8 @@ def fetch_categories(API_key, force = False):
             categories = rdata['results']
             fix_category_counts(categories)
             # filter_categories(categories) #TODO this should filter categories for search, but not for upload. by now off.
-            with open(categories_filepath, 'w') as s:
-                json.dump(categories, s, indent=4)
+            with open(categories_filepath, 'w', encoding = 'utf-8') as s:
+                json.dump(categories, s,  ensure_ascii=False, indent=4)
         tasks_queue.add_task((load_categories, ()))
     except Exception as e:
         bk_logger.debug('category fetching failed')
diff --git a/blenderkit/resolutions.py b/blenderkit/resolutions.py
index 03026c683..a5b5d7230 100644
--- a/blenderkit/resolutions.py
+++ b/blenderkit/resolutions.py
@@ -632,8 +632,8 @@ def get_assets_search():
             retries += 1
 
     fpath = assets_db_path()
-    with open(fpath, 'w') as s:
-        json.dump(results, s)
+    with open(fpath, 'w', encoding = 'utf-8') as s:
+        json.dump(results, s, ensure_ascii=False, indent=4)
 
 
 def get_assets_for_resolutions(page_size=100, max_results=100000000):
@@ -698,13 +698,13 @@ def get_materials_for_validation(page_size=100, max_results=100000000):
 #             retries += 1
 #
 #     fpath = assets_db_path()
-#     with open(fpath, 'w') as s:
-#         json.dump(results, s)
+#     with open(fpath, 'w', encoding = 'utf-8') as s:
+#         json.dump(results, s, ensure_ascii=False, indent=4)
 
 
 def load_assets_list(filepath):
     if os.path.exists(filepath):
-        with open(filepath, 'r') as s:
+        with open(filepath, 'r', encoding='utf-8') as s:
             assets = json.load(s)
     return assets
 
@@ -821,8 +821,8 @@ def send_to_bg(asset_data, fpath, command='generate_resolutions', wait=True):
     tempdir = tempfile.mkdtemp()
     datafile = os.path.join(tempdir + 'resdata.json')
     script_path = os.path.dirname(os.path.realpath(__file__))
-    with open(datafile, 'w') as s:
-        json.dump(data, s)
+    with open(datafile, 'w', encoding = 'utf-8') as s:
+        json.dump(data, s,  ensure_ascii=False, indent=4)
 
     print('opening Blender instance to do processing - ', command)
 
@@ -856,7 +856,7 @@ def write_data_back(asset_data):
 
 def run_bg(datafile):
     print('background file operation')
-    with open(datafile, 'r') as f:
+    with open(datafile, 'r',encoding='utf-8') as f:
         data = json.load(f)
     bpy.app.debug_value = data['debug_value']
     write_data_back(data['asset_data'])
diff --git a/blenderkit/search.py b/blenderkit/search.py
index 2252d3f15..484830e0f 100644
--- a/blenderkit/search.py
+++ b/blenderkit/search.py
@@ -1017,8 +1017,8 @@ class Searcher(threading.Thread):
         if params['get_next']:
             rdata['results'][0:0] = self.result['results']
         self.result = rdata
-        # with open(json_filepath, 'w') as outfile:
-        #     json.dump(rdata, outfile)
+        # with open(json_filepath, 'w', encoding = 'utf-8') as outfile:
+        #     json.dump(rdata, outfile, ensure_ascii=False, indent=4)
 
         killthreads_sml = []
         for k in thumb_sml_download_threads.keys():
@@ -1311,8 +1311,8 @@ def get_search_simple(parameters, filepath=None, page_size=100, max_results=1000
     if not filepath:
         return results
 
-    with open(filepath, 'w') as s:
-        json.dump(results, s)
+    with open(filepath, 'w', encoding = 'utf-8') as s:
+        json.dump(results, s, ensure_ascii=False, indent=4)
     bk_logger.info(f'retrieved {len(results)} assets from elastic search')
     return results
 
diff --git a/blenderkit/upload.py b/blenderkit/upload.py
index e811eaa3e..691502304 100644
--- a/blenderkit/upload.py
+++ b/blenderkit/upload.py
@@ -893,8 +893,8 @@ class Uploader(threading.Thread):
                     }
                     datafile = os.path.join(self.export_data['temp_dir'], BLENDERKIT_EXPORT_DATA_FILE)
 
-                    with open(datafile, 'w') as s:
-                        json.dump(data, s)
+                    with open(datafile, 'w', encoding = 'utf-8') as s:
+                        json.dump(data, s, ensure_ascii=False, indent=4)
 
                     # non waiting method - not useful here..
                     # proc = subprocess.Popen([
diff --git a/blenderkit/upload_bg.py b/blenderkit/upload_bg.py
index 2e27dbf1e..685e280a5 100644
--- a/blenderkit/upload_bg.py
+++ b/blenderkit/upload_bg.py
@@ -126,7 +126,7 @@ def upload_files(upload_data, files):
 if __name__ == "__main__":
     try:
         # bg_blender.progress('preparing scene - append data')
-        with open(BLENDERKIT_EXPORT_DATA, 'r') as s:
+        with open(BLENDERKIT_EXPORT_DATA, 'r',encoding='utf-8') as s:
             data = json.load(s)
 
         bpy.app.debug_value = data.get('debug_value', 0)
diff --git a/blenderkit/utils.py b/blenderkit/utils.py
index e0ced9b13..94f795c14 100644
--- a/blenderkit/utils.py
+++ b/blenderkit/utils.py
@@ -254,7 +254,7 @@ def load_prefs():
     fpath = paths.BLENDERKIT_SETTINGS_FILENAME
     if os.path.exists(fpath):
         try:
-            with open(fpath, 'r') as s:
+            with open(fpath, 'r', encoding = 'utf-8') as s:
                 prefs = json.load(s)
                 user_preferences.api_key = prefs.get('API_key', '')
                 user_preferences.global_dir = prefs.get('global_dir', paths.default_global_dict())
@@ -262,6 +262,7 @@ def load_prefs():
         except Exception as e:
             print('failed to read addon preferences.')
             print(e)
+            os.remove(fpath)
 
 
 def save_prefs(self, context):
@@ -285,8 +286,8 @@ def save_prefs(self, context):
             fpath = paths.BLENDERKIT_SETTINGS_FILENAME
             if not os.path.exists(paths._presets):
                 os.makedirs(paths._presets)
-            with open(fpath, 'w') as s:
-                json.dump(prefs, s)
+            with open(fpath, 'w', encoding = 'utf-8') as s:
+                json.dump(prefs, s, ensure_ascii=False, indent=4)
         except Exception as e:
             print(e)
 
diff --git a/blenderkit/version_checker.py b/blenderkit/version_checker.py
index 49423dbbb..993ff2381 100644
--- a/blenderkit/version_checker.py
+++ b/blenderkit/version_checker.py
@@ -43,8 +43,8 @@ def check_version(url, api_key, module):
         tempdir = paths.get_temp_dir()
 
         ver_filepath = os.path.join(tempdir, 'addon_version.json')
-        with open(ver_filepath, 'w') as s:
-            json.dump(ver_online, s, indent=4)
+        with open(ver_filepath, 'w', encoding = 'utf-8') as s:
+            json.dump(ver_online, s,  ensure_ascii=False, indent=4)
     except:
         print("couldn't check online for version updates")
 
@@ -56,7 +56,7 @@ def compare_versions(module):
 
         tempdir = paths.get_temp_dir()
         ver_filepath = os.path.join(tempdir, 'addon_version.json')
-        with open(ver_filepath, 'r') as s:
+        with open(ver_filepath, 'r',encoding='utf-8') as s:
             data = json.load(s)
 
         ver_online = data['addonVersion2.8'].split('.')
-- 
GitLab