Skip to content

Commit

Permalink
use diskcache library for caching
Browse files Browse the repository at this point in the history
  • Loading branch information
AUTOMATIC1111 committed Mar 17, 2024
1 parent 93c7b9d commit 66355b4
Show file tree
Hide file tree
Showing 4 changed files with 42 additions and 47 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -38,3 +38,4 @@ notification.mp3
/package-lock.json
/.coverage*
/test/test_outputs
/cache
86 changes: 39 additions & 47 deletions modules/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,48 +2,47 @@
import os
import os.path
import threading
import time

import diskcache
import tqdm

from modules.paths import data_path, script_path

cache_filename = os.environ.get('SD_WEBUI_CACHE_FILE', os.path.join(data_path, "cache.json"))
cache_data = None
cache_dir = os.environ.get('SD_WEBUI_CACHE_DIR', os.path.join(data_path, "cache"))
caches = {}
cache_lock = threading.Lock()

dump_cache_after = None
dump_cache_thread = None


def dump_cache():
"""
Marks cache for writing to disk. 5 seconds after no one else flags the cache for writing, it is written.
"""
"""old function for dumping cache to disk; does nothing since diskcache."""

global dump_cache_after
global dump_cache_thread
pass

def thread_func():
global dump_cache_after
global dump_cache_thread

while dump_cache_after is not None and time.time() < dump_cache_after:
time.sleep(1)
def convert_old_cached_data():
try:
with open(cache_filename, "r", encoding="utf8") as file:
data = json.load(file)
except FileNotFoundError:
return
except Exception:
os.replace(cache_filename, os.path.join(script_path, "tmp", "cache.json"))
print('[ERROR] issue occurred while trying to read cache.json; old cache has been moved to tmp/cache.json')
return

with cache_lock:
cache_filename_tmp = cache_filename + "-"
with open(cache_filename_tmp, "w", encoding="utf8") as file:
json.dump(cache_data, file, indent=4, ensure_ascii=False)
total_count = sum(len(keyvalues) for keyvalues in data.values())

os.replace(cache_filename_tmp, cache_filename)
with tqdm.tqdm(total=total_count, desc="converting cache") as progress:
for subsection, keyvalues in data.items():
cache_obj = caches.get(subsection)
if cache_obj is None:
cache_obj = diskcache.Cache(os.path.join(cache_dir, subsection))
caches[subsection] = cache_obj

dump_cache_after = None
dump_cache_thread = None

with cache_lock:
dump_cache_after = time.time() + 5
if dump_cache_thread is None:
dump_cache_thread = threading.Thread(name='cache-writer', target=thread_func)
dump_cache_thread.start()
for key, value in keyvalues.items():
cache_obj[key] = value
progress.update(1)


def cache(subsection):
Expand All @@ -54,28 +53,21 @@ def cache(subsection):
subsection (str): The subsection identifier for the cache.
Returns:
dict: The cache data for the specified subsection.
diskcache.Cache: The cache data for the specified subsection.
"""

global cache_data

if cache_data is None:
cache_obj = caches.get(subsection)
if not cache_obj:
with cache_lock:
if cache_data is None:
try:
with open(cache_filename, "r", encoding="utf8") as file:
cache_data = json.load(file)
except FileNotFoundError:
cache_data = {}
except Exception:
os.replace(cache_filename, os.path.join(script_path, "tmp", "cache.json"))
print('[ERROR] issue occurred while trying to read cache.json, move current cache to tmp/cache.json and create new cache')
cache_data = {}

s = cache_data.get(subsection, {})
cache_data[subsection] = s

return s
if not os.path.exists(cache_dir) and os.path.isfile(cache_filename):
convert_old_cached_data()

cache_obj = caches.get(subsection)
if not cache_obj:
cache_obj = diskcache.Cache(os.path.join(cache_dir, subsection))
caches[subsection] = cache_obj

return cache_obj


def cached_data_for_file(subsection, title, filename, func):
Expand Down
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ accelerate

blendmodes
clean-fid
diskcache
einops
facexlib
fastapi>=0.90.1
Expand Down
1 change: 1 addition & 0 deletions requirements_versions.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ Pillow==9.5.0
accelerate==0.21.0
blendmodes==2022
clean-fid==0.1.35
diskcache==5.6.3
einops==0.4.1
facexlib==0.3.0
fastapi==0.94.0
Expand Down

1 comment on commit 66355b4

@freecoderwaifu
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Much smaller cache, 160MB with 26k Lora files including thumbs (13k Loras) + 340 checkpoints, compared to almost 500mb. Faster UI load and reload too, thank you and wew and other contribs for this.

Please sign in to comment.