Compare commits

..

No commits in common. "da279b3575fd8696b53f8e38389cbf7084061627" and "4eeb2e2e24f66882fd36cb6bbc1db8aee749e9ce" have entirely different histories.

5 changed files with 11 additions and 44 deletions

View file

@ -3,12 +3,6 @@
## Features ## Features
- [ ] implement import of non-datalad projects - [ ] implement import of non-datalad projects
- [ ] implement a way to use another backlog repo as a project - [ ] implement a way to use another backlog repo as a project
- [ ] find a way to convey the mathematical structure of what EXACTLY is the form of the correlator in a specific project
- this could e.g. be done along the lines of mandatory documentation
- [ ] keep better track of the versions the code, that was used for a specific measurement.
- maybe let this be an input in the project file?
- git repo and commit hash/version tag
- [ ] implement local caching with pickle files - [ ] implement local caching with pickle files
## Bugfixes ## Bugfixes

View file

@ -46,7 +46,4 @@ def create(path):
os.makedirs(path + '/archive') os.makedirs(path + '/archive')
os.makedirs(path + '/toml_imports') os.makedirs(path + '/toml_imports')
os.makedirs(path + '/import_scripts/template.py') os.makedirs(path + '/import_scripts/template.py')
with open(path + "/.gitignore", "w") as fp:
fp.write(".cache")
fp.close()
dl.save(path, dataset=path, message="Initialize backlogger directory.") dl.save(path, dataset=path, message="Initialize backlogger directory.")

View file

@ -5,9 +5,8 @@ import sqlite3
from .input import sfcf,openQCD from .input import sfcf,openQCD
import json import json
from typing import Union from typing import Union
from pyerrors import Obs, Corr, dump_obj, load_obj from pyerrors import Obs, Corr
from hashlib import sha256 from hashlib import sha256
from .tools import cached
def write_measurement(path, ensemble, measurement, uuid, code, parameter_file=None): def write_measurement(path, ensemble, measurement, uuid, code, parameter_file=None):
@ -136,35 +135,16 @@ def load_records(path: str, meas_paths: list[str], preloaded = {}) -> list[Union
needed_data[file] = [] needed_data[file] = []
key = mpath.split("::")[1] key = mpath.split("::")[1]
needed_data[file].append(key) needed_data[file].append(key)
for filename in needed_data.keys():
if not filename in preloaded:
preloaded[filename] = preload(path, filename)
returned_data: list = [] returned_data: list = []
for file in needed_data.keys(): for filename in needed_data.keys():
for key in list(needed_data[file]): for key in list(needed_data[filename]):
if os.path.exists(cache_path(path, file, key)): returned_data.append(preloaded[filename][key])
returned_data.append(load_obj(cache_path(path, file, key)))
else:
if file not in preloaded:
preloaded[file] = preload(path, file)
returned_data.append(preloaded[file][key])
if cached:
if not os.path.exists(cache_dir(path, file)):
os.makedirs(cache_dir(path, file))
dump_obj(preloaded[file][key], cache_path(path, file, key))
return returned_data return returned_data
def cache_dir(path, file):
cache_path_list = [path]
cache_path_list.append(".cache")
cache_path_list.extend(file.split("/")[1:])
cache_path = os.path.join(cache_path_list)
return cache_path
def cache_path(path, file, key):
cache_path = os.path.join(cache_dir(path, file), key)
return cache_path + ".p"
def preload(path: str, file: str): def preload(path: str, file: str):
dl.get(os.path.join(path, file), dataset=path) dl.get(os.path.join(path, file), dataset=path)
filedict = pj.load_json_dict(os.path.join(path, file)) filedict = pj.load_json_dict(os.path.join(path, file))

View file

@ -104,14 +104,12 @@ def import_toml(path: str, file: str, copy_file: bool=True) -> None:
for rwp in ["integrator", "eps", "ntot", "dnms"]: for rwp in ["integrator", "eps", "ntot", "dnms"]:
param[rwp] = "Unknown" param[rwp] = "Unknown"
param['type'] = 't0' param['type'] = 't0'
measurement = openQCD.extract_t0(path, uuid, md['path'], param, md["prefix"], md["dtr_read"], md["xmin"], md["spatial_extent"], measurement = openQCD.extract_t0(path, uuid, md['path'], param, md["prefix"], md["dtr_read"], md["xmin"], md["spatial_extent"], fit_range=md.get('fit_range', 5), postfix=md.get('postfix', None), names=md.get('names', None))
fit_range=md.get('fit_range', 5), postfix=md.get('postfix', None), names=md.get('names', None), files=md.get('files', None))
elif md['measurement'] == 't1': elif md['measurement'] == 't1':
if 'param_file' in md: if 'param_file' in md:
param = openQCD.read_ms3_param(path, uuid, md['param_file']) param = openQCD.read_ms3_param(path, uuid, md['param_file'])
param['type'] = 't1' param['type'] = 't1'
measurement = openQCD.extract_t1(path, uuid, md['path'], param, md["prefix"], md["dtr_read"], md["xmin"], md["spatial_extent"], measurement = openQCD.extract_t1(path, uuid, md['path'], param, md["prefix"], md["dtr_read"], md["xmin"], md["spatial_extent"], fit_range=md.get('fit_range', 5), postfix=md.get('postfix', None), names=md.get('names', None))
fit_range=md.get('fit_range', 5), postfix=md.get('postfix', None), names=md.get('names', None), files=md.get('files', None))
write_measurement(path, ensemble, measurement, uuid, project['code'], (md['param_file'] if 'param_file' in md else None)) write_measurement(path, ensemble, measurement, uuid, project['code'], (md['param_file'] if 'param_file' in md else None))

View file

@ -7,5 +7,3 @@ def str2list(string):
def list2str(mylist): def list2str(mylist):
s = ",".join(mylist) s = ",".join(mylist)
return s return s
cached = True