Skip to content
Snippets Groups Projects
Commit 59cc0c74 authored by Vilém Duha's avatar Vilém Duha
Browse files

BlenderKit: fix too many requests from addon

 - for author data - now there's almost no risk of duplicated data, but can still be improved
 - for categories - only requests categories from server once per day.
parent 2ffb3576
No related branches found
No related tags found
No related merge requests found
...@@ -30,6 +30,7 @@ import requests ...@@ -30,6 +30,7 @@ import requests
import json import json
import os import os
import bpy import bpy
import time
import shutil import shutil
import threading import threading
...@@ -106,15 +107,27 @@ def load_categories(): ...@@ -106,15 +107,27 @@ def load_categories():
except: except:
print('categories failed to read') print('categories failed to read')
def fetch_categories(API_key): #
catfetch_counter = 0
def fetch_categories(API_key, force = False):
url = paths.get_api_url() + 'categories/' url = paths.get_api_url() + 'categories/'
headers = utils.get_headers(API_key) headers = utils.get_headers(API_key)
tempdir = paths.get_temp_dir() tempdir = paths.get_temp_dir()
categories_filepath = os.path.join(tempdir, 'categories.json') categories_filepath = os.path.join(tempdir, 'categories.json')
catfile_age = time.time() - os.path.getmtime(categories_filepath)
# global catfetch_counter
# catfetch_counter += 1
# utils.p('fetching categories: ', catfetch_counter)
# utils.p('age of cat file', catfile_age)
try: try:
# read categories only once per day maximum, or when forced to do so.
if catfile_age > 86400 or force:
utils.p('requesting categories')
r = rerequests.get(url, headers=headers) r = rerequests.get(url, headers=headers)
rdata = r.json() rdata = r.json()
categories = rdata['results'] categories = rdata['results']
...@@ -131,6 +144,6 @@ def fetch_categories(API_key): ...@@ -131,6 +144,6 @@ def fetch_categories(API_key):
shutil.copy(source_path, categories_filepath) shutil.copy(source_path, categories_filepath)
def fetch_categories_thread(API_key): def fetch_categories_thread(API_key, force = False):
cat_thread = threading.Thread(target=fetch_categories, args=([API_key]), daemon=True) cat_thread = threading.Thread(target=fetch_categories, args=([API_key, force]), daemon=True)
cat_thread.start() cat_thread.start()
...@@ -110,8 +110,9 @@ def fetch_server_data(): ...@@ -110,8 +110,9 @@ def fetch_server_data():
len(user_preferences.api_key) < 38 and \ len(user_preferences.api_key) < 38 and \
user_preferences.api_key_timeout < time.time() + 3600: user_preferences.api_key_timeout < time.time() + 3600:
bkit_oauth.refresh_token_thread() bkit_oauth.refresh_token_thread()
if api_key != '': if api_key != '' and bpy.context.window_manager.get('bkit profile') == None:
get_profile() get_profile()
if bpy.context.window_manager.get('bkit_categories') is None:
categories.fetch_categories_thread(api_key) categories.fetch_categories_thread(api_key)
...@@ -622,6 +623,7 @@ def fetch_author(a_id, api_key): ...@@ -622,6 +623,7 @@ def fetch_author(a_id, api_key):
utils.p(e) utils.p(e)
utils.p('finish fetch') utils.p('finish fetch')
# profile_counter =0
def get_author(r): def get_author(r):
a_id = str(r['author']['id']) a_id = str(r['author']['id'])
...@@ -630,11 +632,13 @@ def get_author(r): ...@@ -630,11 +632,13 @@ def get_author(r):
if authors == {}: if authors == {}:
bpy.context.window_manager['bkit authors'] = authors bpy.context.window_manager['bkit authors'] = authors
a = authors.get(a_id) a = authors.get(a_id)
if a is None or a is '' or \ if a is None:# or a is '' or (a.get('gravatarHash') is not None and a.get('gravatarImg') is None):
(a.get('gravatarHash') is not None and a.get('gravatarImg') is None): authors[a_id] = ''
authors[a_id] = None
thread = threading.Thread(target=fetch_author, args=(a_id, preferences.api_key), daemon=True) thread = threading.Thread(target=fetch_author, args=(a_id, preferences.api_key), daemon=True)
thread.start() thread.start()
# global profile_counter
# profile_counter+=1
# print(profile_counter,'author:', a_id)
return a return a
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment