Compare commits

...

2 commits

Author SHA1 Message Date
Justus Kuhlmann
4f7d9d71b3 Implement better finding for sfcf projects 2025-05-13 09:45:37 +00:00
Justus Kuhlmann
2bf263679d Finish first version of cache 2025-05-13 09:45:00 +00:00
4 changed files with 41 additions and 12 deletions

View file

@ -1,15 +1,14 @@
# TODO # TODO
## Features ## Features
- [ ] implement import of non-datalad projects - implement import of non-datalad projects
- [ ] implement a way to use another backlog repo as a project - implement a way to use another backlog repo as a project
- [ ] find a way to convey the mathematical structure of what EXACTLY is the form of the correlator in a specific project - find a way to convey the mathematical structure of what EXACTLY is the form of the correlator in a specific project
- this could e.g. be done along the lines of mandatory documentation - this could e.g. be done along the lines of mandatory documentation
- [ ] keep better track of the versions the code, that was used for a specific measurement. - keep better track of the versions of the code, that was used for a specific measurement.
- maybe let this be an input in the project file? - maybe let this be an input in the project file?
- git repo and commit hash/version tag - git repo and commit hash/version tag
- [ ] implement local caching with pickle files
## Bugfixes ## Bugfixes
- [ ] revisit the reimport function for single files - [ ] revisit the reimport function for single files

View file

@ -5,6 +5,7 @@ import json
import pandas as pd import pandas as pd
import numpy as np import numpy as np
from .input.implementations import codes from .input.implementations import codes
from .tools import m2k, k2m
# this will implement the search functionality # this will implement the search functionality
@ -65,11 +66,33 @@ def sfcf_filter(results, **kwargs):
if kwargs.get('offset') != param['offset']: if kwargs.get('offset') != param['offset']:
drops.append(ind) drops.append(ind)
continue continue
if 'quark_masses' in kwargs: if 'quark_kappas' in kwargs:
quark_masses = kwargs['quark_masses'] kappas = kwargs['quark_kappas']
if (not np.isclose(quark_masses[0], param['quarks'][0]['mass']) or not np.isclose(quark_masses[1], param['quarks'][1]['mass'])): if (not np.isclose(kappas[0], param['quarks'][0]['mass']) or not np.isclose(kappas[1], param['quarks'][1]['mass'])):
drops.append(ind) drops.append(ind)
continue continue
if 'qm1' in kwargs:
quark_mass1 = kwargs['qm1']
if not isinstance(quark_mass1, list):
if (not np.isclose(quark_mass1, k2m(param['quarks'][0]['mass']))):
drops.append(ind)
continue
else:
if len(quark_mass1) == 2:
if (quark_mass1[0] > k2m(param['quarks'][0]['mass'])) or (quark_mass1[1] < k2m(param['quarks'][0]['mass'])):
drops.append(ind)
continue
if 'qm2' in kwargs:
quark_mass2 = kwargs['qm2']
if not isinstance(quark_mass2, list):
if (not np.isclose(quark_mass2, k2m(param['quarks'][1]['mass']))):
drops.append(ind)
continue
else:
if len(quark_mass2) == 2:
if (quark_mass2[0] > k2m(param['quarks'][1]['mass'])) or (quark_mass2[1] < k2m(param['quarks'][1]['mass'])):
drops.append(ind)
continue
if 'quark_thetas' in kwargs: if 'quark_thetas' in kwargs:
quark_thetas = kwargs['quark_thetas'] quark_thetas = kwargs['quark_thetas']
if (quark_thetas[0] != param['quarks'][0]['thetas'] and quark_thetas[1] != param['quarks'][1]['thetas']) or (quark_thetas[0] != param['quarks'][1]['thetas'] and quark_thetas[1] != param['quarks'][0]['thetas']): if (quark_thetas[0] != param['quarks'][0]['thetas'] and quark_thetas[1] != param['quarks'][1]['thetas']) or (quark_thetas[0] != param['quarks'][1]['thetas'] and quark_thetas[1] != param['quarks'][0]['thetas']):

View file

@ -139,8 +139,8 @@ def load_records(path: str, meas_paths: list[str], preloaded = {}) -> list[Union
returned_data: list = [] returned_data: list = []
for file in needed_data.keys(): for file in needed_data.keys():
for key in list(needed_data[file]): for key in list(needed_data[file]):
if os.path.exists(cache_path(path, file, key)): if os.path.exists(cache_path(path, file, key) + ".p"):
returned_data.append(load_object(cache_path(path, file, key))) returned_data.append(load_object(cache_path(path, file, key) + ".p"))
else: else:
if file not in preloaded: if file not in preloaded:
preloaded[file] = preload(path, file) preloaded[file] = preload(path, file)
@ -156,13 +156,13 @@ def cache_dir(path, file):
cache_path_list = [path] cache_path_list = [path]
cache_path_list.append(".cache") cache_path_list.append(".cache")
cache_path_list.extend(file.split("/")[1:]) cache_path_list.extend(file.split("/")[1:])
cache_path = os.path.join(cache_path_list) cache_path = "/".join(cache_path_list)
return cache_path return cache_path
def cache_path(path, file, key): def cache_path(path, file, key):
cache_path = os.path.join(cache_dir(path, file), key) cache_path = os.path.join(cache_dir(path, file), key)
return cache_path + ".p" return cache_path
def preload(path: str, file: str): def preload(path: str, file: str):

View file

@ -9,3 +9,10 @@ def list2str(mylist):
return s return s
cached = True cached = True
def m2k(m):
return 1/(2*m+8)
def k2m(k):
return (1/(2*k))-4