first version of caching
This commit is contained in:
parent
78c138ddc5
commit
da279b3575
3 changed files with 33 additions and 8 deletions
|
@ -46,4 +46,7 @@ def create(path):
|
||||||
os.makedirs(path + '/archive')
|
os.makedirs(path + '/archive')
|
||||||
os.makedirs(path + '/toml_imports')
|
os.makedirs(path + '/toml_imports')
|
||||||
os.makedirs(path + '/import_scripts/template.py')
|
os.makedirs(path + '/import_scripts/template.py')
|
||||||
|
with open(path + "/.gitignore", "w") as fp:
|
||||||
|
fp.write(".cache")
|
||||||
|
fp.close()
|
||||||
dl.save(path, dataset=path, message="Initialize backlogger directory.")
|
dl.save(path, dataset=path, message="Initialize backlogger directory.")
|
||||||
|
|
|
@ -5,8 +5,9 @@ import sqlite3
|
||||||
from .input import sfcf,openQCD
|
from .input import sfcf,openQCD
|
||||||
import json
|
import json
|
||||||
from typing import Union
|
from typing import Union
|
||||||
from pyerrors import Obs, Corr
|
from pyerrors import Obs, Corr, dump_obj, load_obj
|
||||||
from hashlib import sha256
|
from hashlib import sha256
|
||||||
|
from .tools import cached
|
||||||
|
|
||||||
|
|
||||||
def write_measurement(path, ensemble, measurement, uuid, code, parameter_file=None):
|
def write_measurement(path, ensemble, measurement, uuid, code, parameter_file=None):
|
||||||
|
@ -135,16 +136,35 @@ def load_records(path: str, meas_paths: list[str], preloaded = {}) -> list[Union
|
||||||
needed_data[file] = []
|
needed_data[file] = []
|
||||||
key = mpath.split("::")[1]
|
key = mpath.split("::")[1]
|
||||||
needed_data[file].append(key)
|
needed_data[file].append(key)
|
||||||
for filename in needed_data.keys():
|
|
||||||
if not filename in preloaded:
|
|
||||||
preloaded[filename] = preload(path, filename)
|
|
||||||
returned_data: list = []
|
returned_data: list = []
|
||||||
for filename in needed_data.keys():
|
for file in needed_data.keys():
|
||||||
for key in list(needed_data[filename]):
|
for key in list(needed_data[file]):
|
||||||
returned_data.append(preloaded[filename][key])
|
if os.path.exists(cache_path(path, file, key)):
|
||||||
|
returned_data.append(load_obj(cache_path(path, file, key)))
|
||||||
|
else:
|
||||||
|
if file not in preloaded:
|
||||||
|
preloaded[file] = preload(path, file)
|
||||||
|
returned_data.append(preloaded[file][key])
|
||||||
|
if cached:
|
||||||
|
if not os.path.exists(cache_dir(path, file)):
|
||||||
|
os.makedirs(cache_dir(path, file))
|
||||||
|
dump_obj(preloaded[file][key], cache_path(path, file, key))
|
||||||
return returned_data
|
return returned_data
|
||||||
|
|
||||||
|
|
||||||
|
def cache_dir(path, file):
|
||||||
|
cache_path_list = [path]
|
||||||
|
cache_path_list.append(".cache")
|
||||||
|
cache_path_list.extend(file.split("/")[1:])
|
||||||
|
cache_path = os.path.join(cache_path_list)
|
||||||
|
return cache_path
|
||||||
|
|
||||||
|
|
||||||
|
def cache_path(path, file, key):
|
||||||
|
cache_path = os.path.join(cache_dir(path, file), key)
|
||||||
|
return cache_path + ".p"
|
||||||
|
|
||||||
|
|
||||||
def preload(path: str, file: str):
|
def preload(path: str, file: str):
|
||||||
dl.get(os.path.join(path, file), dataset=path)
|
dl.get(os.path.join(path, file), dataset=path)
|
||||||
filedict = pj.load_json_dict(os.path.join(path, file))
|
filedict = pj.load_json_dict(os.path.join(path, file))
|
||||||
|
|
|
@ -6,4 +6,6 @@ def str2list(string):
|
||||||
|
|
||||||
def list2str(mylist):
|
def list2str(mylist):
|
||||||
s = ",".join(mylist)
|
s = ",".join(mylist)
|
||||||
return s
|
return s
|
||||||
|
|
||||||
|
cached = True
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue