Compare commits
4 commits
4eeb2e2e24
...
da279b3575
Author | SHA1 | Date | |
---|---|---|---|
|
da279b3575 | ||
|
78c138ddc5 | ||
|
657a6d8176 | ||
|
b2ee7756da |
5 changed files with 44 additions and 11 deletions
8
TODO.md
8
TODO.md
|
@ -3,7 +3,13 @@
|
|||
## Features
|
||||
- [ ] implement import of non-datalad projects
|
||||
- [ ] implement a way to use another backlog repo as a project
|
||||
|
||||
- [ ] find a way to convey the mathematical structure of what EXACTLY is the form of the correlator in a specific project
|
||||
- this could e.g. be done along the lines of mandatory documentation
|
||||
- [ ] keep better track of the versions the code, that was used for a specific measurement.
|
||||
- maybe let this be an input in the project file?
|
||||
- git repo and commit hash/version tag
|
||||
- [ ] implement local caching with pickle files
|
||||
|
||||
## Bugfixes
|
||||
- [ ] revisit the reimport function for single files
|
||||
- [ ] revisit the reimport function for single files
|
||||
|
|
|
@ -46,4 +46,7 @@ def create(path):
|
|||
os.makedirs(path + '/archive')
|
||||
os.makedirs(path + '/toml_imports')
|
||||
os.makedirs(path + '/import_scripts/template.py')
|
||||
with open(path + "/.gitignore", "w") as fp:
|
||||
fp.write(".cache")
|
||||
fp.close()
|
||||
dl.save(path, dataset=path, message="Initialize backlogger directory.")
|
||||
|
|
|
@ -5,8 +5,9 @@ import sqlite3
|
|||
from .input import sfcf,openQCD
|
||||
import json
|
||||
from typing import Union
|
||||
from pyerrors import Obs, Corr
|
||||
from pyerrors import Obs, Corr, dump_obj, load_obj
|
||||
from hashlib import sha256
|
||||
from .tools import cached
|
||||
|
||||
|
||||
def write_measurement(path, ensemble, measurement, uuid, code, parameter_file=None):
|
||||
|
@ -135,16 +136,35 @@ def load_records(path: str, meas_paths: list[str], preloaded = {}) -> list[Union
|
|||
needed_data[file] = []
|
||||
key = mpath.split("::")[1]
|
||||
needed_data[file].append(key)
|
||||
for filename in needed_data.keys():
|
||||
if not filename in preloaded:
|
||||
preloaded[filename] = preload(path, filename)
|
||||
returned_data: list = []
|
||||
for filename in needed_data.keys():
|
||||
for key in list(needed_data[filename]):
|
||||
returned_data.append(preloaded[filename][key])
|
||||
for file in needed_data.keys():
|
||||
for key in list(needed_data[file]):
|
||||
if os.path.exists(cache_path(path, file, key)):
|
||||
returned_data.append(load_obj(cache_path(path, file, key)))
|
||||
else:
|
||||
if file not in preloaded:
|
||||
preloaded[file] = preload(path, file)
|
||||
returned_data.append(preloaded[file][key])
|
||||
if cached:
|
||||
if not os.path.exists(cache_dir(path, file)):
|
||||
os.makedirs(cache_dir(path, file))
|
||||
dump_obj(preloaded[file][key], cache_path(path, file, key))
|
||||
return returned_data
|
||||
|
||||
|
||||
def cache_dir(path, file):
|
||||
cache_path_list = [path]
|
||||
cache_path_list.append(".cache")
|
||||
cache_path_list.extend(file.split("/")[1:])
|
||||
cache_path = os.path.join(cache_path_list)
|
||||
return cache_path
|
||||
|
||||
|
||||
def cache_path(path, file, key):
|
||||
cache_path = os.path.join(cache_dir(path, file), key)
|
||||
return cache_path + ".p"
|
||||
|
||||
|
||||
def preload(path: str, file: str):
|
||||
dl.get(os.path.join(path, file), dataset=path)
|
||||
filedict = pj.load_json_dict(os.path.join(path, file))
|
||||
|
|
|
@ -104,12 +104,14 @@ def import_toml(path: str, file: str, copy_file: bool=True) -> None:
|
|||
for rwp in ["integrator", "eps", "ntot", "dnms"]:
|
||||
param[rwp] = "Unknown"
|
||||
param['type'] = 't0'
|
||||
measurement = openQCD.extract_t0(path, uuid, md['path'], param, md["prefix"], md["dtr_read"], md["xmin"], md["spatial_extent"], fit_range=md.get('fit_range', 5), postfix=md.get('postfix', None), names=md.get('names', None))
|
||||
measurement = openQCD.extract_t0(path, uuid, md['path'], param, md["prefix"], md["dtr_read"], md["xmin"], md["spatial_extent"],
|
||||
fit_range=md.get('fit_range', 5), postfix=md.get('postfix', None), names=md.get('names', None), files=md.get('files', None))
|
||||
elif md['measurement'] == 't1':
|
||||
if 'param_file' in md:
|
||||
param = openQCD.read_ms3_param(path, uuid, md['param_file'])
|
||||
param['type'] = 't1'
|
||||
measurement = openQCD.extract_t1(path, uuid, md['path'], param, md["prefix"], md["dtr_read"], md["xmin"], md["spatial_extent"], fit_range=md.get('fit_range', 5), postfix=md.get('postfix', None), names=md.get('names', None))
|
||||
measurement = openQCD.extract_t1(path, uuid, md['path'], param, md["prefix"], md["dtr_read"], md["xmin"], md["spatial_extent"],
|
||||
fit_range=md.get('fit_range', 5), postfix=md.get('postfix', None), names=md.get('names', None), files=md.get('files', None))
|
||||
|
||||
write_measurement(path, ensemble, measurement, uuid, project['code'], (md['param_file'] if 'param_file' in md else None))
|
||||
|
||||
|
|
|
@ -6,4 +6,6 @@ def str2list(string):
|
|||
|
||||
def list2str(mylist):
|
||||
s = ",".join(mylist)
|
||||
return s
|
||||
return s
|
||||
|
||||
cached = True
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue