Merge pull request 'docs/update' (#15) from docs/update into develop
Reviewed-on: https://www.kuhl-mann.de/git/git/jkuhl/corrlib/pulls/15
This commit is contained in:
commit
5e712b64cf
10 changed files with 663 additions and 93 deletions
|
|
@ -134,6 +134,9 @@ def reimporter(
|
||||||
),
|
),
|
||||||
ident: str = typer.Argument()
|
ident: str = typer.Argument()
|
||||||
) -> None:
|
) -> None:
|
||||||
|
"""
|
||||||
|
Reimport the toml file identfied by the ident string.
|
||||||
|
"""
|
||||||
uuid = ident.split("::")[0]
|
uuid = ident.split("::")[0]
|
||||||
if len(ident.split("::")) > 1:
|
if len(ident.split("::")) > 1:
|
||||||
toml_file = os.path.join(path, "toml_imports", ident.split("::")[1])
|
toml_file = os.path.join(path, "toml_imports", ident.split("::")[1])
|
||||||
|
|
|
||||||
256
corrlib/find.py
256
corrlib/find.py
|
|
@ -8,11 +8,23 @@ from .tools import k2m, get_db_file
|
||||||
from .tracker import get
|
from .tracker import get
|
||||||
from typing import Any, Optional
|
from typing import Any, Optional
|
||||||
|
|
||||||
# this will implement the search functionality
|
|
||||||
|
|
||||||
|
|
||||||
def _project_lookup_by_alias(db: str, alias: str) -> str:
|
def _project_lookup_by_alias(db: str, alias: str) -> str:
|
||||||
# this will lookup the project name based on the alias
|
"""
|
||||||
|
Lookup a projects UUID by its (human-readable) alias.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
db: str
|
||||||
|
The database to look up the project.
|
||||||
|
alias: str
|
||||||
|
The alias to look up.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
uuid: str
|
||||||
|
The UUID of the project with the given alias.
|
||||||
|
"""
|
||||||
conn = sqlite3.connect(db)
|
conn = sqlite3.connect(db)
|
||||||
c = conn.cursor()
|
c = conn.cursor()
|
||||||
c.execute(f"SELECT * FROM 'projects' WHERE alias = '{alias}'")
|
c.execute(f"SELECT * FROM 'projects' WHERE alias = '{alias}'")
|
||||||
|
|
@ -26,6 +38,21 @@ def _project_lookup_by_alias(db: str, alias: str) -> str:
|
||||||
|
|
||||||
|
|
||||||
def _project_lookup_by_id(db: str, uuid: str) -> list[tuple[str, str]]:
|
def _project_lookup_by_id(db: str, uuid: str) -> list[tuple[str, str]]:
|
||||||
|
"""
|
||||||
|
Return the project information available in the database by UUID.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
db: str
|
||||||
|
The database to look up the project.
|
||||||
|
uuid: str
|
||||||
|
The uuid of the project in question.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
results: list
|
||||||
|
The row of the project in the database.
|
||||||
|
"""
|
||||||
conn = sqlite3.connect(db)
|
conn = sqlite3.connect(db)
|
||||||
c = conn.cursor()
|
c = conn.cursor()
|
||||||
c.execute(f"SELECT * FROM 'projects' WHERE id = '{uuid}'")
|
c.execute(f"SELECT * FROM 'projects' WHERE id = '{uuid}'")
|
||||||
|
|
@ -36,6 +63,38 @@ def _project_lookup_by_id(db: str, uuid: str) -> list[tuple[str, str]]:
|
||||||
|
|
||||||
def _db_lookup(db: str, ensemble: str, correlator_name: str, code: str, project: Optional[str]=None, parameters: Optional[str]=None,
|
def _db_lookup(db: str, ensemble: str, correlator_name: str, code: str, project: Optional[str]=None, parameters: Optional[str]=None,
|
||||||
created_before: Optional[str]=None, created_after: Optional[Any]=None, updated_before: Optional[Any]=None, updated_after: Optional[Any]=None) -> pd.DataFrame:
|
created_before: Optional[str]=None, created_after: Optional[Any]=None, updated_before: Optional[Any]=None, updated_after: Optional[Any]=None) -> pd.DataFrame:
|
||||||
|
"""
|
||||||
|
Look up a correlator record in the database by the data given to the method.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
db: str
|
||||||
|
The database to look up the record.
|
||||||
|
ensemble: str
|
||||||
|
The ensemble the record is associated with.
|
||||||
|
correlator_name: str
|
||||||
|
The name of the correlator in question.
|
||||||
|
code: str
|
||||||
|
The name of the code which was used to calculate the correlator.
|
||||||
|
project: str, optional
|
||||||
|
The UUID of the project the correlator was calculated in.
|
||||||
|
parameters: str, optional
|
||||||
|
A dictionary holding the exact parameters for the measurement that are held in the database.
|
||||||
|
created_before: str, optional
|
||||||
|
Timestamp string before which the meaurement has been created.
|
||||||
|
created_after: str, optional
|
||||||
|
Timestamp string after which the meaurement has been created.
|
||||||
|
updated_before: str, optional
|
||||||
|
Timestamp string before which the meaurement has been updated.
|
||||||
|
updated_after: str, optional
|
||||||
|
Timestamp string after which the meaurement has been updated.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
results: pd.DataFrame
|
||||||
|
A pandas DataFrame holding the information received form the DB query.
|
||||||
|
"""
|
||||||
|
|
||||||
project_str = project
|
project_str = project
|
||||||
|
|
||||||
search_expr = f"SELECT * FROM 'backlogs' WHERE name = '{correlator_name}' AND ensemble = '{ensemble}'"
|
search_expr = f"SELECT * FROM 'backlogs' WHERE name = '{correlator_name}' AND ensemble = '{ensemble}'"
|
||||||
|
|
@ -60,85 +119,112 @@ def _db_lookup(db: str, ensemble: str, correlator_name: str, code: str, project:
|
||||||
|
|
||||||
|
|
||||||
def sfcf_filter(results: pd.DataFrame, **kwargs: Any) -> pd.DataFrame:
|
def sfcf_filter(results: pd.DataFrame, **kwargs: Any) -> pd.DataFrame:
|
||||||
|
"""
|
||||||
|
Filter method for the Database entries holding SFCF calculations.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
results: pd.DataFrame
|
||||||
|
The unfiltered pandas DataFrame holding the entries from the database.
|
||||||
|
|
||||||
|
offset: list[float], optional
|
||||||
|
quark_kappas: list[float]
|
||||||
|
quarks_masses: list[float]
|
||||||
|
qk1: float, optional
|
||||||
|
Mass parameter $\kappa_1$ of the first quark.
|
||||||
|
qk2: float, optional
|
||||||
|
Mass parameter $\kappa_2$ of the first quark.
|
||||||
|
qm1: float, optional
|
||||||
|
Bare quak mass $m_1$ of the first quark.
|
||||||
|
qm2: float, optional
|
||||||
|
Bare quak mass $m_1$ of the first quark.
|
||||||
|
quarks_thetas: list[list[float]], optional
|
||||||
|
wf1: optional
|
||||||
|
wf2: optional
|
||||||
|
|
||||||
|
Results
|
||||||
|
-------
|
||||||
|
results: pd.DataFrame
|
||||||
|
The filtered DataFrame, only holding the records that fit to the parameters given.
|
||||||
|
"""
|
||||||
drops = []
|
drops = []
|
||||||
for ind in range(len(results)):
|
for ind in range(len(results)):
|
||||||
result = results.iloc[ind]
|
result = results.iloc[ind]
|
||||||
if result['code'] == 'sfcf':
|
param = json.loads(result['parameters'])
|
||||||
param = json.loads(result['parameters'])
|
if 'offset' in kwargs:
|
||||||
if 'offset' in kwargs:
|
if kwargs.get('offset') != param['offset']:
|
||||||
if kwargs.get('offset') != param['offset']:
|
drops.append(ind)
|
||||||
|
continue
|
||||||
|
if 'quark_kappas' in kwargs:
|
||||||
|
kappas = kwargs['quark_kappas']
|
||||||
|
if (not np.isclose(kappas[0], param['quarks'][0]['mass']) or not np.isclose(kappas[1], param['quarks'][1]['mass'])):
|
||||||
|
drops.append(ind)
|
||||||
|
continue
|
||||||
|
if 'quark_masses' in kwargs:
|
||||||
|
masses = kwargs['quark_masses']
|
||||||
|
if (not np.isclose(masses[0], k2m(param['quarks'][0]['mass'])) or not np.isclose(masses[1], k2m(param['quarks'][1]['mass']))):
|
||||||
|
drops.append(ind)
|
||||||
|
continue
|
||||||
|
if 'qk1' in kwargs:
|
||||||
|
quark_kappa1 = kwargs['qk1']
|
||||||
|
if not isinstance(quark_kappa1, list):
|
||||||
|
if (not np.isclose(quark_kappa1, param['quarks'][0]['mass'])):
|
||||||
drops.append(ind)
|
drops.append(ind)
|
||||||
continue
|
continue
|
||||||
if 'quark_kappas' in kwargs:
|
else:
|
||||||
kappas = kwargs['quark_kappas']
|
if len(quark_kappa1) == 2:
|
||||||
if (not np.isclose(kappas[0], param['quarks'][0]['mass']) or not np.isclose(kappas[1], param['quarks'][1]['mass'])):
|
if (quark_kappa1[0] > param['quarks'][0]['mass']) or (quark_kappa1[1] < param['quarks'][0]['mass']):
|
||||||
drops.append(ind)
|
|
||||||
continue
|
|
||||||
if 'quark_masses' in kwargs:
|
|
||||||
masses = kwargs['quark_masses']
|
|
||||||
if (not np.isclose(masses[0], k2m(param['quarks'][0]['mass'])) or not np.isclose(masses[1], k2m(param['quarks'][1]['mass']))):
|
|
||||||
drops.append(ind)
|
|
||||||
continue
|
|
||||||
if 'qk1' in kwargs:
|
|
||||||
quark_kappa1 = kwargs['qk1']
|
|
||||||
if not isinstance(quark_kappa1, list):
|
|
||||||
if (not np.isclose(quark_kappa1, param['quarks'][0]['mass'])):
|
|
||||||
drops.append(ind)
|
drops.append(ind)
|
||||||
continue
|
continue
|
||||||
else:
|
if 'qk2' in kwargs:
|
||||||
if len(quark_kappa1) == 2:
|
quark_kappa2 = kwargs['qk2']
|
||||||
if (quark_kappa1[0] > param['quarks'][0]['mass']) or (quark_kappa1[1] < param['quarks'][0]['mass']):
|
if not isinstance(quark_kappa2, list):
|
||||||
drops.append(ind)
|
if (not np.isclose(quark_kappa2, param['quarks'][1]['mass'])):
|
||||||
continue
|
|
||||||
if 'qk2' in kwargs:
|
|
||||||
quark_kappa2 = kwargs['qk2']
|
|
||||||
if not isinstance(quark_kappa2, list):
|
|
||||||
if (not np.isclose(quark_kappa2, param['quarks'][1]['mass'])):
|
|
||||||
drops.append(ind)
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
if len(quark_kappa2) == 2:
|
|
||||||
if (quark_kappa2[0] > param['quarks'][1]['mass']) or (quark_kappa2[1] < param['quarks'][1]['mass']):
|
|
||||||
drops.append(ind)
|
|
||||||
continue
|
|
||||||
if 'qm1' in kwargs:
|
|
||||||
quark_mass1 = kwargs['qm1']
|
|
||||||
if not isinstance(quark_mass1, list):
|
|
||||||
if (not np.isclose(quark_mass1, k2m(param['quarks'][0]['mass']))):
|
|
||||||
drops.append(ind)
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
if len(quark_mass1) == 2:
|
|
||||||
if (quark_mass1[0] > k2m(param['quarks'][0]['mass'])) or (quark_mass1[1] < k2m(param['quarks'][0]['mass'])):
|
|
||||||
drops.append(ind)
|
|
||||||
continue
|
|
||||||
if 'qm2' in kwargs:
|
|
||||||
quark_mass2 = kwargs['qm2']
|
|
||||||
if not isinstance(quark_mass2, list):
|
|
||||||
if (not np.isclose(quark_mass2, k2m(param['quarks'][1]['mass']))):
|
|
||||||
drops.append(ind)
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
if len(quark_mass2) == 2:
|
|
||||||
if (quark_mass2[0] > k2m(param['quarks'][1]['mass'])) or (quark_mass2[1] < k2m(param['quarks'][1]['mass'])):
|
|
||||||
drops.append(ind)
|
|
||||||
continue
|
|
||||||
if 'quark_thetas' in kwargs:
|
|
||||||
quark_thetas = kwargs['quark_thetas']
|
|
||||||
if (quark_thetas[0] != param['quarks'][0]['thetas'] and quark_thetas[1] != param['quarks'][1]['thetas']) or (quark_thetas[0] != param['quarks'][1]['thetas'] and quark_thetas[1] != param['quarks'][0]['thetas']):
|
|
||||||
drops.append(ind)
|
drops.append(ind)
|
||||||
continue
|
continue
|
||||||
# careful, this is not save, when multiple contributions are present!
|
else:
|
||||||
if 'wf1' in kwargs:
|
if len(quark_kappa2) == 2:
|
||||||
wf1 = kwargs['wf1']
|
if (quark_kappa2[0] > param['quarks'][1]['mass']) or (quark_kappa2[1] < param['quarks'][1]['mass']):
|
||||||
if not (np.isclose(wf1[0][0], param['wf1'][0][0], 1e-8) and np.isclose(wf1[0][1][0], param['wf1'][0][1][0], 1e-8) and np.isclose(wf1[0][1][1], param['wf1'][0][1][1], 1e-8)):
|
drops.append(ind)
|
||||||
|
continue
|
||||||
|
if 'qm1' in kwargs:
|
||||||
|
quark_mass1 = kwargs['qm1']
|
||||||
|
if not isinstance(quark_mass1, list):
|
||||||
|
if (not np.isclose(quark_mass1, k2m(param['quarks'][0]['mass']))):
|
||||||
drops.append(ind)
|
drops.append(ind)
|
||||||
continue
|
continue
|
||||||
if 'wf2' in kwargs:
|
else:
|
||||||
wf2 = kwargs['wf2']
|
if len(quark_mass1) == 2:
|
||||||
if not (np.isclose(wf2[0][0], param['wf2'][0][0], 1e-8) and np.isclose(wf2[0][1][0], param['wf2'][0][1][0], 1e-8) and np.isclose(wf2[0][1][1], param['wf2'][0][1][1], 1e-8)):
|
if (quark_mass1[0] > k2m(param['quarks'][0]['mass'])) or (quark_mass1[1] < k2m(param['quarks'][0]['mass'])):
|
||||||
|
drops.append(ind)
|
||||||
|
continue
|
||||||
|
if 'qm2' in kwargs:
|
||||||
|
quark_mass2 = kwargs['qm2']
|
||||||
|
if not isinstance(quark_mass2, list):
|
||||||
|
if (not np.isclose(quark_mass2, k2m(param['quarks'][1]['mass']))):
|
||||||
drops.append(ind)
|
drops.append(ind)
|
||||||
continue
|
continue
|
||||||
|
else:
|
||||||
|
if len(quark_mass2) == 2:
|
||||||
|
if (quark_mass2[0] > k2m(param['quarks'][1]['mass'])) or (quark_mass2[1] < k2m(param['quarks'][1]['mass'])):
|
||||||
|
drops.append(ind)
|
||||||
|
continue
|
||||||
|
if 'quark_thetas' in kwargs:
|
||||||
|
quark_thetas = kwargs['quark_thetas']
|
||||||
|
if (quark_thetas[0] != param['quarks'][0]['thetas'] and quark_thetas[1] != param['quarks'][1]['thetas']) or (quark_thetas[0] != param['quarks'][1]['thetas'] and quark_thetas[1] != param['quarks'][0]['thetas']):
|
||||||
|
drops.append(ind)
|
||||||
|
continue
|
||||||
|
# careful, this is not save, when multiple contributions are present!
|
||||||
|
if 'wf1' in kwargs:
|
||||||
|
wf1 = kwargs['wf1']
|
||||||
|
if not (np.isclose(wf1[0][0], param['wf1'][0][0], 1e-8) and np.isclose(wf1[0][1][0], param['wf1'][0][1][0], 1e-8) and np.isclose(wf1[0][1][1], param['wf1'][0][1][1], 1e-8)):
|
||||||
|
drops.append(ind)
|
||||||
|
continue
|
||||||
|
if 'wf2' in kwargs:
|
||||||
|
wf2 = kwargs['wf2']
|
||||||
|
if not (np.isclose(wf2[0][0], param['wf2'][0][0], 1e-8) and np.isclose(wf2[0][1][0], param['wf2'][0][1][0], 1e-8) and np.isclose(wf2[0][1][1], param['wf2'][0][1][1], 1e-8)):
|
||||||
|
drops.append(ind)
|
||||||
|
continue
|
||||||
return results.drop(drops)
|
return results.drop(drops)
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -152,17 +238,49 @@ def find_record(path: str, ensemble: str, correlator_name: str, code: str, proje
|
||||||
results = _db_lookup(db, ensemble, correlator_name,code, project, parameters=parameters, created_before=created_before, created_after=created_after, updated_before=updated_before, updated_after=updated_after)
|
results = _db_lookup(db, ensemble, correlator_name,code, project, parameters=parameters, created_before=created_before, created_after=created_after, updated_before=updated_before, updated_after=updated_after)
|
||||||
if code == "sfcf":
|
if code == "sfcf":
|
||||||
results = sfcf_filter(results, **kwargs)
|
results = sfcf_filter(results, **kwargs)
|
||||||
|
elif code == "openQCD":
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
raise Exception
|
||||||
print("Found " + str(len(results)) + " result" + ("s" if len(results)>1 else ""))
|
print("Found " + str(len(results)) + " result" + ("s" if len(results)>1 else ""))
|
||||||
return results.reset_index()
|
return results.reset_index()
|
||||||
|
|
||||||
|
|
||||||
def find_project(path: str, name: str) -> str:
|
def find_project(path: str, name: str) -> str:
|
||||||
|
"""
|
||||||
|
Find a project by it's human readable name.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
path: str
|
||||||
|
The path of the library.
|
||||||
|
name: str
|
||||||
|
The name of the project to look for in the library.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
uuid: str
|
||||||
|
The uuid of the project in question.
|
||||||
|
"""
|
||||||
db_file = get_db_file(path)
|
db_file = get_db_file(path)
|
||||||
get(path, db_file)
|
get(path, db_file)
|
||||||
return _project_lookup_by_alias(os.path.join(path, db_file), name)
|
return _project_lookup_by_alias(os.path.join(path, db_file), name)
|
||||||
|
|
||||||
|
|
||||||
def list_projects(path: str) -> list[tuple[str, str]]:
|
def list_projects(path: str) -> list[tuple[str, str]]:
|
||||||
|
"""
|
||||||
|
List all projects known to the library.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
path: str
|
||||||
|
The path of the library.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
results: list[Any]
|
||||||
|
The projects known to the library.
|
||||||
|
"""
|
||||||
db_file = get_db_file(path)
|
db_file = get_db_file(path)
|
||||||
get(path, db_file)
|
get(path, db_file)
|
||||||
conn = sqlite3.connect(os.path.join(path, db_file))
|
conn = sqlite3.connect(os.path.join(path, db_file))
|
||||||
|
|
|
||||||
|
|
@ -8,6 +8,10 @@ def _create_db(db: str) -> None:
|
||||||
"""
|
"""
|
||||||
Create the database file and the table.
|
Create the database file and the table.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
db: str
|
||||||
|
Path of the database file.
|
||||||
"""
|
"""
|
||||||
conn = sqlite3.connect(db)
|
conn = sqlite3.connect(db)
|
||||||
c = conn.cursor()
|
c = conn.cursor()
|
||||||
|
|
@ -38,7 +42,21 @@ def _create_db(db: str) -> None:
|
||||||
|
|
||||||
def _create_config(path: str, tracker: str, cached: bool) -> ConfigParser:
|
def _create_config(path: str, tracker: str, cached: bool) -> ConfigParser:
|
||||||
"""
|
"""
|
||||||
Create the config file for backlogger.
|
Create the config file construction for backlogger.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
path: str
|
||||||
|
The path of the libaray to create.
|
||||||
|
tracker: str
|
||||||
|
Type of the tracker to use for the library (only DataLad is supported at the moment).
|
||||||
|
cached: bool
|
||||||
|
Whether or not the library will create a cache folder for multiple reads when downloaded.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
config: ConfigParser
|
||||||
|
Cpnfig parser with the default configuration printed.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
config = ConfigParser()
|
config = ConfigParser()
|
||||||
|
|
@ -60,6 +78,13 @@ def _create_config(path: str, tracker: str, cached: bool) -> ConfigParser:
|
||||||
def _write_config(path: str, config: ConfigParser) -> None:
|
def _write_config(path: str, config: ConfigParser) -> None:
|
||||||
"""
|
"""
|
||||||
Write the config file to disk.
|
Write the config file to disk.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
path: str
|
||||||
|
The path of the libaray to create.
|
||||||
|
config: ConfigParser
|
||||||
|
The configuration to be used as a ConfigParser, e.g. generated by _create_config.
|
||||||
"""
|
"""
|
||||||
with open(os.path.join(path, '.corrlib'), 'w') as configfile:
|
with open(os.path.join(path, '.corrlib'), 'w') as configfile:
|
||||||
config.write(configfile)
|
config.write(configfile)
|
||||||
|
|
@ -70,6 +95,14 @@ def create(path: str, tracker: str = 'datalad', cached: bool = True) -> None:
|
||||||
"""
|
"""
|
||||||
Create folder of backlogs.
|
Create folder of backlogs.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
path: str
|
||||||
|
The path at which the library will be created.
|
||||||
|
tracker: str, optional
|
||||||
|
The tracker to use for the library. The delauft is DataLad, which is also the only one that is supported at the moment.
|
||||||
|
cached: bool, optional
|
||||||
|
Whether or not hte librarby will be cached. By default, it does cache already read entries.
|
||||||
"""
|
"""
|
||||||
config = _create_config(path, tracker, cached)
|
config = _create_config(path, tracker, cached)
|
||||||
init(path, tracker)
|
init(path, tracker)
|
||||||
|
|
|
||||||
|
|
@ -1,2 +1,2 @@
|
||||||
|
# List of supported input implementations
|
||||||
codes = ['sfcf', 'openQCD']
|
codes = ['sfcf', 'openQCD']
|
||||||
|
|
|
||||||
|
|
@ -6,6 +6,24 @@ from typing import Any, Optional
|
||||||
|
|
||||||
|
|
||||||
def read_ms1_param(path: str, project: str, file_in_project: str) -> dict[str, Any]:
|
def read_ms1_param(path: str, project: str, file_in_project: str) -> dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Read the parameters for ms1 measurements from a parameter file in the project.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
path: str
|
||||||
|
The path to the backlogger folder.
|
||||||
|
project: str
|
||||||
|
The project from which to read the parameter file.
|
||||||
|
file_in_project: str
|
||||||
|
The path to the parameter file within the project.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
param: dict[str, Any]
|
||||||
|
The parameters read from the file.
|
||||||
|
"""
|
||||||
|
|
||||||
file = os.path.join(path, "projects", project, file_in_project)
|
file = os.path.join(path, "projects", project, file_in_project)
|
||||||
ds = os.path.join(path, "projects", project)
|
ds = os.path.join(path, "projects", project)
|
||||||
dl.get(file, dataset=ds)
|
dl.get(file, dataset=ds)
|
||||||
|
|
@ -52,6 +70,24 @@ def read_ms1_param(path: str, project: str, file_in_project: str) -> dict[str, A
|
||||||
|
|
||||||
|
|
||||||
def read_ms3_param(path: str, project: str, file_in_project: str) -> dict[str, Any]:
|
def read_ms3_param(path: str, project: str, file_in_project: str) -> dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Read the parameters for ms3 measurements from a parameter file in the project.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
path: str
|
||||||
|
The path to the backlogger folder.
|
||||||
|
project: str
|
||||||
|
The project from which to read the parameter file.
|
||||||
|
file_in_project: str
|
||||||
|
The path to the parameter file within the project.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
param: dict[str, Any]
|
||||||
|
The parameters read from the file.
|
||||||
|
"""
|
||||||
|
|
||||||
file = os.path.join(path, "projects", project, file_in_project)
|
file = os.path.join(path, "projects", project, file_in_project)
|
||||||
ds = os.path.join(path, "projects", project)
|
ds = os.path.join(path, "projects", project)
|
||||||
dl.get(file, dataset=ds)
|
dl.get(file, dataset=ds)
|
||||||
|
|
@ -68,6 +104,36 @@ def read_ms3_param(path: str, project: str, file_in_project: str) -> dict[str, A
|
||||||
|
|
||||||
|
|
||||||
def read_rwms(path: str, project: str, dir_in_project: str, param: dict[str, Any], prefix: str, postfix: str="ms1", version: str='2.0', names: Optional[list[str]]=None, files: Optional[list[str]]=None) -> dict[str, Any]:
|
def read_rwms(path: str, project: str, dir_in_project: str, param: dict[str, Any], prefix: str, postfix: str="ms1", version: str='2.0', names: Optional[list[str]]=None, files: Optional[list[str]]=None) -> dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Read reweighting factor measurements from the project.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
path: str
|
||||||
|
The path to the backlogger folder.
|
||||||
|
project: str
|
||||||
|
The project from which to read the measurements.
|
||||||
|
dir_in_project: str
|
||||||
|
The directory within the project where the measurements are located.
|
||||||
|
param: dict[str, Any]
|
||||||
|
The parameters for the measurements.
|
||||||
|
prefix: str
|
||||||
|
The prefix of the measurement files.
|
||||||
|
postfix: str
|
||||||
|
The postfix of the measurement files.
|
||||||
|
version: str
|
||||||
|
The version of the openQCD used.
|
||||||
|
names: list[str]
|
||||||
|
Specific names for the replica of the ensemble the measurement file belongs to.
|
||||||
|
files: list[str]
|
||||||
|
Specific files to read.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
rw_dict: dict[str, dict[str, Any]]
|
||||||
|
The reweighting factor measurements read from the files.
|
||||||
|
"""
|
||||||
|
|
||||||
dataset = os.path.join(path, "projects", project)
|
dataset = os.path.join(path, "projects", project)
|
||||||
directory = os.path.join(dataset, dir_in_project)
|
directory = os.path.join(dataset, dir_in_project)
|
||||||
if files is None:
|
if files is None:
|
||||||
|
|
@ -95,6 +161,42 @@ def read_rwms(path: str, project: str, dir_in_project: str, param: dict[str, Any
|
||||||
|
|
||||||
|
|
||||||
def extract_t0(path: str, project: str, dir_in_project: str, param: dict[str, Any], prefix: str, dtr_read: int, xmin: int, spatial_extent: int, fit_range: int = 5, postfix: str="", names: Optional[list[str]]=None, files: Optional[list[str]]=None) -> dict[str, Any]:
|
def extract_t0(path: str, project: str, dir_in_project: str, param: dict[str, Any], prefix: str, dtr_read: int, xmin: int, spatial_extent: int, fit_range: int = 5, postfix: str="", names: Optional[list[str]]=None, files: Optional[list[str]]=None) -> dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Extract t0 measurements from the project.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
path: str
|
||||||
|
The path to the backlogger folder.
|
||||||
|
project: str
|
||||||
|
The project from which to read the measurements.
|
||||||
|
dir_in_project: str
|
||||||
|
The directory within the project where the measurements are located.
|
||||||
|
param: dict[str, Any]
|
||||||
|
The parameters for the measurements.
|
||||||
|
prefix: str
|
||||||
|
The prefix of the measurement files.
|
||||||
|
dtr_read: int
|
||||||
|
The dtr_read parameter for the extraction.
|
||||||
|
xmin: int
|
||||||
|
The xmin parameter for the extraction.
|
||||||
|
spatial_extent: int
|
||||||
|
The spatial_extent parameter for the extraction.
|
||||||
|
fit_range: int
|
||||||
|
The fit_range parameter for the extraction.
|
||||||
|
postfix: str
|
||||||
|
The postfix of the measurement files.
|
||||||
|
names: list[str]
|
||||||
|
Specific names for the replica of the ensemble the measurement file belongs to.
|
||||||
|
files: list[str]
|
||||||
|
Specific files to read.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
t0_dict: dict
|
||||||
|
Dictionary of t0 values in the pycorrlib style, with the parameters at hand.
|
||||||
|
"""
|
||||||
|
|
||||||
dataset = os.path.join(path, "projects", project)
|
dataset = os.path.join(path, "projects", project)
|
||||||
directory = os.path.join(dataset, dir_in_project)
|
directory = os.path.join(dataset, dir_in_project)
|
||||||
if files is None:
|
if files is None:
|
||||||
|
|
@ -133,6 +235,42 @@ def extract_t0(path: str, project: str, dir_in_project: str, param: dict[str, An
|
||||||
|
|
||||||
|
|
||||||
def extract_t1(path: str, project: str, dir_in_project: str, param: dict[str, Any], prefix: str, dtr_read: int, xmin: int, spatial_extent: int, fit_range: int = 5, postfix: str = "", names: Optional[list[str]]=None, files: Optional[list[str]]=None) -> dict[str, Any]:
|
def extract_t1(path: str, project: str, dir_in_project: str, param: dict[str, Any], prefix: str, dtr_read: int, xmin: int, spatial_extent: int, fit_range: int = 5, postfix: str = "", names: Optional[list[str]]=None, files: Optional[list[str]]=None) -> dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Extract t1 measurements from the project.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
path: str
|
||||||
|
The path to the backlogger folder.
|
||||||
|
project: str
|
||||||
|
The project from which to read the measurements.
|
||||||
|
dir_in_project: str
|
||||||
|
The directory within the project where the measurements are located.
|
||||||
|
param: dict[str, Any]
|
||||||
|
The parameters for the measurements.
|
||||||
|
prefix: str
|
||||||
|
The prefix of the measurement files.
|
||||||
|
dtr_read: int
|
||||||
|
The dtr_read parameter for the extraction.
|
||||||
|
xmin: int
|
||||||
|
The xmin parameter for the extraction.
|
||||||
|
spatial_extent: int
|
||||||
|
The spatial_extent parameter for the extraction.
|
||||||
|
fit_range: int
|
||||||
|
The fit_range parameter for the extraction.
|
||||||
|
postfix: str
|
||||||
|
The postfix of the measurement files.
|
||||||
|
names: list[str]
|
||||||
|
Specific names for the replica of the ensemble the measurement file belongs to.
|
||||||
|
files: list[str]
|
||||||
|
Specific files to read.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
t1_dict: dict
|
||||||
|
Dictionary of t1 values in the pycorrlib style, with the parameters at hand.
|
||||||
|
"""
|
||||||
|
|
||||||
directory = os.path.join(path, "projects", project, dir_in_project)
|
directory = os.path.join(path, "projects", project, dir_in_project)
|
||||||
if files is None:
|
if files is None:
|
||||||
files = []
|
files = []
|
||||||
|
|
@ -161,7 +299,7 @@ def extract_t1(path: str, project: str, dir_in_project: str, param: dict[str, An
|
||||||
for k in ["integrator", "eps", "ntot", "dnms"]:
|
for k in ["integrator", "eps", "ntot", "dnms"]:
|
||||||
par_list.append(str(param[k]))
|
par_list.append(str(param[k]))
|
||||||
pars = "/".join(par_list)
|
pars = "/".join(par_list)
|
||||||
t0_dict: dict[str, Any] = {}
|
t1_dict: dict[str, Any] = {}
|
||||||
t0_dict[param["type"]] = {}
|
t1_dict[param["type"]] = {}
|
||||||
t0_dict[param["type"]][pars] = t0
|
t1_dict[param["type"]][pars] = t0
|
||||||
return t0_dict
|
return t1_dict
|
||||||
|
|
|
||||||
|
|
@ -229,6 +229,24 @@ def _map_params(params: dict[str, Any], spec_list: list[str]) -> dict[str, Any]:
|
||||||
|
|
||||||
|
|
||||||
def get_specs(key: str, parameters: dict[str, Any], sep: str = '/') -> str:
|
def get_specs(key: str, parameters: dict[str, Any], sep: str = '/') -> str:
|
||||||
|
"""
|
||||||
|
Get sepcification from the parameter file for a specific key in the read measurements
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
key: str
|
||||||
|
The key for whioch the parameters are to be looked up.
|
||||||
|
parameters: dict[str, Any]
|
||||||
|
The dictionary with the parameters from the parameter file.
|
||||||
|
sep: str
|
||||||
|
Separator string for the key. (default="/")
|
||||||
|
|
||||||
|
Return
|
||||||
|
------
|
||||||
|
s: str
|
||||||
|
json string holding the parameters.
|
||||||
|
"""
|
||||||
|
|
||||||
key_parts = key.split(sep)
|
key_parts = key.split(sep)
|
||||||
if corr_types[key_parts[0]] == 'bi':
|
if corr_types[key_parts[0]] == 'bi':
|
||||||
param = _map_params(parameters, key_parts[1:-1])
|
param = _map_params(parameters, key_parts[1:-1])
|
||||||
|
|
|
||||||
|
|
@ -45,9 +45,25 @@ def create_project(path: str, uuid: str, owner: Union[str, None]=None, tags: Uni
|
||||||
conn.commit()
|
conn.commit()
|
||||||
conn.close()
|
conn.close()
|
||||||
save(path, message="Added entry for project " + uuid + " to database", files=[db_file])
|
save(path, message="Added entry for project " + uuid + " to database", files=[db_file])
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
def update_project_data(path: str, uuid: str, prop: str, value: Union[str, None] = None) -> None:
|
def update_project_data(path: str, uuid: str, prop: str, value: Union[str, None] = None) -> None:
|
||||||
|
"""
|
||||||
|
Update/Edit a project entry in the database.
|
||||||
|
Thin wrapper around sql3 call.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
path: str
|
||||||
|
The path to the backlogger folder.
|
||||||
|
uuid: str
|
||||||
|
The uuid of the project.
|
||||||
|
prop: str
|
||||||
|
Property of the entry to edit
|
||||||
|
value: str or None
|
||||||
|
Value to se `prop` to.
|
||||||
|
"""
|
||||||
db_file = get_db_file(path)
|
db_file = get_db_file(path)
|
||||||
get(path, db_file)
|
get(path, db_file)
|
||||||
conn = sqlite3.connect(os.path.join(path, db_file))
|
conn = sqlite3.connect(os.path.join(path, db_file))
|
||||||
|
|
@ -88,6 +104,8 @@ def update_aliases(path: str, uuid: str, aliases: list[str]) -> None:
|
||||||
|
|
||||||
def import_project(path: str, url: str, owner: Union[str, None]=None, tags: Optional[list[str]]=None, aliases: Optional[list[str]]=None, code: Optional[str]=None, isDataset: bool=True) -> str:
|
def import_project(path: str, url: str, owner: Union[str, None]=None, tags: Optional[list[str]]=None, aliases: Optional[list[str]]=None, code: Optional[str]=None, isDataset: bool=True) -> str:
|
||||||
"""
|
"""
|
||||||
|
Import a datalad dataset into the backlogger.
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
|
|
||||||
|
|
@ -95,22 +113,19 @@ def import_project(path: str, url: str, owner: Union[str, None]=None, tags: Opti
|
||||||
The url of the project to import. This can be any url that datalad can handle.
|
The url of the project to import. This can be any url that datalad can handle.
|
||||||
path: str
|
path: str
|
||||||
The path to the backlogger folder.
|
The path to the backlogger folder.
|
||||||
aliases: list[str]
|
owner: str, optional
|
||||||
Custom name of the project, alias of the project.
|
Person responsible for the maintainance of the project to be impoerted.
|
||||||
code: str
|
tags: list[str], optional
|
||||||
|
Custom tags of the imported project.
|
||||||
|
aliases: list[str], optional
|
||||||
|
Custom names of the project, alias of the project.
|
||||||
|
code: str, optional
|
||||||
Code that was used to create the measurements.
|
Code that was used to create the measurements.
|
||||||
|
|
||||||
Import a datalad dataset into the backlogger.
|
Returns
|
||||||
|
-------
|
||||||
Parameters
|
uuid: str
|
||||||
----------
|
The unique identifier of the imported project.
|
||||||
path: str
|
|
||||||
The path to the backlogger directory.
|
|
||||||
url: str
|
|
||||||
The url of the project to import. This can be any url that datalad can handle.
|
|
||||||
Also supported are non-datalad datasets, which will be converted to datalad datasets,
|
|
||||||
in order to receive a uuid and have a consistent interface.
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
tmp_path = os.path.join(path, 'projects/tmp')
|
tmp_path = os.path.join(path, 'projects/tmp')
|
||||||
clone(path, source=url, target=tmp_path)
|
clone(path, source=url, target=tmp_path)
|
||||||
|
|
@ -144,6 +159,15 @@ def import_project(path: str, url: str, owner: Union[str, None]=None, tags: Opti
|
||||||
def drop_project_data(path: str, uuid: str, path_in_project: str = "") -> None:
|
def drop_project_data(path: str, uuid: str, path_in_project: str = "") -> None:
|
||||||
"""
|
"""
|
||||||
Drop (parts of) a project to free up diskspace
|
Drop (parts of) a project to free up diskspace
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
path: str
|
||||||
|
Path of the library.
|
||||||
|
uuid: str
|
||||||
|
The UUID ofthe project rom which data is to be dropped.
|
||||||
|
path_pn_project: str, optional
|
||||||
|
If set, only the given path within the project is dropped.
|
||||||
"""
|
"""
|
||||||
drop(path + "/projects/" + uuid + "/" + path_in_project)
|
drop(path + "/projects/" + uuid + "/" + path_in_project)
|
||||||
return
|
return
|
||||||
|
|
|
||||||
|
|
@ -27,6 +27,10 @@ def write_measurement(path: str, ensemble: str, measurement: dict[str, dict[str,
|
||||||
Measurements to be captured in the backlogging system.
|
Measurements to be captured in the backlogging system.
|
||||||
uuid: str
|
uuid: str
|
||||||
The uuid of the project.
|
The uuid of the project.
|
||||||
|
code: str
|
||||||
|
Name of the code that was used for the project.
|
||||||
|
parameter_file: str
|
||||||
|
The parameter file used for the measurement.
|
||||||
"""
|
"""
|
||||||
db_file = get_db_file(path)
|
db_file = get_db_file(path)
|
||||||
db = os.path.join(path, db_file)
|
db = os.path.join(path, db_file)
|
||||||
|
|
@ -97,6 +101,7 @@ def write_measurement(path: str, ensemble: str, measurement: dict[str, dict[str,
|
||||||
files.append(os.path.join(path, db_file))
|
files.append(os.path.join(path, db_file))
|
||||||
conn.close()
|
conn.close()
|
||||||
save(path, message="Add measurements to database", files=files)
|
save(path, message="Add measurements to database", files=files)
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
def load_record(path: str, meas_path: str) -> Union[Corr, Obs]:
|
def load_record(path: str, meas_path: str) -> Union[Corr, Obs]:
|
||||||
|
|
@ -128,10 +133,13 @@ def load_records(path: str, meas_paths: list[str], preloaded: dict[str, Any] = {
|
||||||
Path of the correlator library.
|
Path of the correlator library.
|
||||||
meas_paths: list[str]
|
meas_paths: list[str]
|
||||||
A list of the paths to the correlator in the backlog system.
|
A list of the paths to the correlator in the backlog system.
|
||||||
|
perloaded: dict[str, Any]
|
||||||
|
The data that is already prelaoded. Of interest if data has alread been loaded in the same script.
|
||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
List
|
retruned_data: list
|
||||||
|
The loaded records.
|
||||||
"""
|
"""
|
||||||
needed_data: dict[str, list[str]] = {}
|
needed_data: dict[str, list[str]] = {}
|
||||||
for mpath in meas_paths:
|
for mpath in meas_paths:
|
||||||
|
|
@ -157,6 +165,20 @@ def load_records(path: str, meas_paths: list[str], preloaded: dict[str, Any] = {
|
||||||
|
|
||||||
|
|
||||||
def cache_dir(path: str, file: str) -> str:
|
def cache_dir(path: str, file: str) -> str:
|
||||||
|
"""
|
||||||
|
Returns the directory corresponding to the cache for the given file.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
path: str
|
||||||
|
The path of the library.
|
||||||
|
file: str
|
||||||
|
The file in the library that we want to access the cached data of.
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
cache_path: str
|
||||||
|
The path holding the cached data for the given file.
|
||||||
|
"""
|
||||||
cache_path_list = [path]
|
cache_path_list = [path]
|
||||||
cache_path_list.append(".cache")
|
cache_path_list.append(".cache")
|
||||||
cache_path_list.extend(file.split("/")[1:])
|
cache_path_list.extend(file.split("/")[1:])
|
||||||
|
|
@ -165,11 +187,41 @@ def cache_dir(path: str, file: str) -> str:
|
||||||
|
|
||||||
|
|
||||||
def cache_path(path: str, file: str, key: str) -> str:
|
def cache_path(path: str, file: str, key: str) -> str:
|
||||||
|
"""
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
path: str
|
||||||
|
The path of the library.
|
||||||
|
file: str
|
||||||
|
The file in the library that we want to access the cached data of.
|
||||||
|
key: str
|
||||||
|
The key within the archive file.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
cache_path: str
|
||||||
|
The path at which the measurement of the given file and key is cached.
|
||||||
|
"""
|
||||||
cache_path = os.path.join(cache_dir(path, file), key)
|
cache_path = os.path.join(cache_dir(path, file), key)
|
||||||
return cache_path
|
return cache_path
|
||||||
|
|
||||||
|
|
||||||
def preload(path: str, file: str) -> dict[str, Any]:
|
def preload(path: str, file: str) -> dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Read the contents of a file into a json dictionary with the pyerrors.json.load_json_dict method.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
path: str
|
||||||
|
The path of the library.
|
||||||
|
file: str
|
||||||
|
The file within the library to be laoded.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
filedict: dict[str, Any]
|
||||||
|
The data read from the file.
|
||||||
|
"""
|
||||||
get(path, file)
|
get(path, file)
|
||||||
filedict: dict[str, Any] = pj.load_json_dict(os.path.join(path, file))
|
filedict: dict[str, Any] = pj.load_json_dict(os.path.join(path, file))
|
||||||
print("> read file")
|
print("> read file")
|
||||||
|
|
@ -177,6 +229,16 @@ def preload(path: str, file: str) -> dict[str, Any]:
|
||||||
|
|
||||||
|
|
||||||
def drop_record(path: str, meas_path: str) -> None:
|
def drop_record(path: str, meas_path: str) -> None:
|
||||||
|
"""
|
||||||
|
Drop a record by it's path.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
path: str
|
||||||
|
The path of the library.
|
||||||
|
meas_path: str
|
||||||
|
The measurement path as noted in the database.
|
||||||
|
"""
|
||||||
file_in_archive = meas_path.split("::")[0]
|
file_in_archive = meas_path.split("::")[0]
|
||||||
file = os.path.join(path, file_in_archive)
|
file = os.path.join(path, file_in_archive)
|
||||||
db_file = get_db_file(path)
|
db_file = get_db_file(path)
|
||||||
|
|
@ -204,6 +266,14 @@ def drop_record(path: str, meas_path: str) -> None:
|
||||||
|
|
||||||
|
|
||||||
def drop_cache(path: str) -> None:
|
def drop_cache(path: str) -> None:
|
||||||
|
"""
|
||||||
|
Drop the cache directory of the library.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
path: str
|
||||||
|
The path of the library.
|
||||||
|
"""
|
||||||
cache_dir = os.path.join(path, ".cache")
|
cache_dir = os.path.join(path, ".cache")
|
||||||
for f in os.listdir(cache_dir):
|
for f in os.listdir(cache_dir):
|
||||||
shutil.rmtree(os.path.join(cache_dir, f))
|
shutil.rmtree(os.path.join(cache_dir, f))
|
||||||
|
|
|
||||||
|
|
@ -22,6 +22,18 @@ from typing import Any
|
||||||
|
|
||||||
|
|
||||||
def replace_string(string: str, name: str, val: str) -> str:
|
def replace_string(string: str, name: str, val: str) -> str:
|
||||||
|
"""
|
||||||
|
Replace a placeholder {name} with a value in a string.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
string: str
|
||||||
|
String in which the placeholders are to be replaced.
|
||||||
|
name: str
|
||||||
|
The name of the placeholder.
|
||||||
|
val: str
|
||||||
|
The value the placeholder is to be replaced with.
|
||||||
|
"""
|
||||||
if '{' + name + '}' in string:
|
if '{' + name + '}' in string:
|
||||||
n = string.replace('{' + name + '}', val)
|
n = string.replace('{' + name + '}', val)
|
||||||
return n
|
return n
|
||||||
|
|
@ -30,7 +42,16 @@ def replace_string(string: str, name: str, val: str) -> str:
|
||||||
|
|
||||||
|
|
||||||
def replace_in_meas(measurements: dict[str, dict[str, Any]], vars: dict[str, str]) -> dict[str, dict[str, Any]]:
|
def replace_in_meas(measurements: dict[str, dict[str, Any]], vars: dict[str, str]) -> dict[str, dict[str, Any]]:
|
||||||
# replace global variables
|
"""
|
||||||
|
Replace placeholders in the defiitions for a measurement.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
measurements: dict[str, dict[str, Any]]
|
||||||
|
The measurements read from the toml file.
|
||||||
|
vars: dict[str, str]
|
||||||
|
Simple key:value dictionary with the keys to be replaced by the values.
|
||||||
|
"""
|
||||||
for name, value in vars.items():
|
for name, value in vars.items():
|
||||||
for m in measurements.keys():
|
for m in measurements.keys():
|
||||||
for key in measurements[m].keys():
|
for key in measurements[m].keys():
|
||||||
|
|
@ -43,6 +64,16 @@ def replace_in_meas(measurements: dict[str, dict[str, Any]], vars: dict[str, str
|
||||||
|
|
||||||
|
|
||||||
def fill_cons(measurements: dict[str, dict[str, Any]], constants: dict[str, str]) -> dict[str, dict[str, Any]]:
|
def fill_cons(measurements: dict[str, dict[str, Any]], constants: dict[str, str]) -> dict[str, dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
Fill in defined constants into the measurements of the toml-file.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
measurements: dict[str, dict[str, Any]]
|
||||||
|
The measurements read from the toml file.
|
||||||
|
constants: dict[str, str]
|
||||||
|
Simple key:value dictionary with the keys to be replaced by the values.
|
||||||
|
"""
|
||||||
for m in measurements.keys():
|
for m in measurements.keys():
|
||||||
for name, val in constants.items():
|
for name, val in constants.items():
|
||||||
if name not in measurements[m].keys():
|
if name not in measurements[m].keys():
|
||||||
|
|
@ -51,6 +82,14 @@ def fill_cons(measurements: dict[str, dict[str, Any]], constants: dict[str, str]
|
||||||
|
|
||||||
|
|
||||||
def check_project_data(d: dict[str, dict[str, str]]) -> None:
|
def check_project_data(d: dict[str, dict[str, str]]) -> None:
|
||||||
|
"""
|
||||||
|
Check the data given in the toml import file for the project we want to import.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
d: dict
|
||||||
|
The dictionary holding the data necessary to import the project.
|
||||||
|
"""
|
||||||
if 'project' not in d.keys() or 'measurements' not in d.keys() or len(list(d.keys())) > 4:
|
if 'project' not in d.keys() or 'measurements' not in d.keys() or len(list(d.keys())) > 4:
|
||||||
raise ValueError('There should only be maximally be four keys on the top level, "project" and "measurements" are mandatory, "contants" is optional!')
|
raise ValueError('There should only be maximally be four keys on the top level, "project" and "measurements" are mandatory, "contants" is optional!')
|
||||||
project_data = d['project']
|
project_data = d['project']
|
||||||
|
|
@ -64,6 +103,16 @@ def check_project_data(d: dict[str, dict[str, str]]) -> None:
|
||||||
|
|
||||||
|
|
||||||
def check_measurement_data(measurements: dict[str, dict[str, str]], code: str) -> None:
|
def check_measurement_data(measurements: dict[str, dict[str, str]], code: str) -> None:
|
||||||
|
"""
|
||||||
|
Check syntax of the measurements we want to import.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
measurements: dict[str, dict[str, str]]
|
||||||
|
The dictionary holding the necessary data to import the project.
|
||||||
|
code: str
|
||||||
|
The code used for the project.
|
||||||
|
"""
|
||||||
var_names: list[str] = []
|
var_names: list[str] = []
|
||||||
if code == "sfcf":
|
if code == "sfcf":
|
||||||
var_names = ["path", "ensemble", "param_file", "version", "prefix", "cfg_seperator", "names"]
|
var_names = ["path", "ensemble", "param_file", "version", "prefix", "cfg_seperator", "names"]
|
||||||
|
|
@ -78,8 +127,21 @@ def check_measurement_data(measurements: dict[str, dict[str, str]], code: str) -
|
||||||
|
|
||||||
|
|
||||||
def import_tomls(path: str, files: list[str], copy_files: bool=True) -> None:
|
def import_tomls(path: str, files: list[str], copy_files: bool=True) -> None:
|
||||||
|
"""
|
||||||
|
Import multiple toml files.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
path: str
|
||||||
|
Path to the backlog directory.
|
||||||
|
files: list[str]
|
||||||
|
Path to the description files.
|
||||||
|
copy_files: bool, optional
|
||||||
|
Whether the toml-files will be copied into the library. Default is True.
|
||||||
|
"""
|
||||||
for file in files:
|
for file in files:
|
||||||
import_toml(path, file, copy_files)
|
import_toml(path, file, copy_files)
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
def import_toml(path: str, file: str, copy_file: bool=True) -> None:
|
def import_toml(path: str, file: str, copy_file: bool=True) -> None:
|
||||||
|
|
@ -92,6 +154,8 @@ def import_toml(path: str, file: str, copy_file: bool=True) -> None:
|
||||||
Path to the backlog directory.
|
Path to the backlog directory.
|
||||||
file: str
|
file: str
|
||||||
Path to the description file.
|
Path to the description file.
|
||||||
|
copy_file: bool, optional
|
||||||
|
Whether the toml-files will be copied into the library. Default is True.
|
||||||
"""
|
"""
|
||||||
print("Import project as decribed in " + file)
|
print("Import project as decribed in " + file)
|
||||||
with open(file, 'rb') as fp:
|
with open(file, 'rb') as fp:
|
||||||
|
|
@ -180,6 +244,16 @@ def reimport_project(path: str, uuid: str) -> None:
|
||||||
|
|
||||||
|
|
||||||
def update_project(path: str, uuid: str) -> None:
|
def update_project(path: str, uuid: str) -> None:
|
||||||
|
"""
|
||||||
|
Update all entries associated with a given project.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
path: str
|
||||||
|
The path of the library.
|
||||||
|
uuid: str
|
||||||
|
The unique identifier of the project to be updated.
|
||||||
|
"""
|
||||||
dl.update(how='merge', follow='sibling', dataset=os.path.join(path, "projects", uuid))
|
dl.update(how='merge', follow='sibling', dataset=os.path.join(path, "projects", uuid))
|
||||||
# reimport_project(path, uuid)
|
# reimport_project(path, uuid)
|
||||||
return
|
return
|
||||||
|
|
|
||||||
|
|
@ -3,26 +3,91 @@ from configparser import ConfigParser
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
CONFIG_FILENAME = ".corrlib"
|
CONFIG_FILENAME = ".corrlib"
|
||||||
|
cached: bool = True
|
||||||
|
|
||||||
|
|
||||||
def str2list(string: str) -> list[str]:
|
def str2list(string: str) -> list[str]:
|
||||||
|
"""
|
||||||
|
Convert a comma-separated string to a list.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
string: str
|
||||||
|
The sting holding a comma-sparated list.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
s: list[str]
|
||||||
|
The list of strings that was held bythe comma separated string.
|
||||||
|
"""
|
||||||
return string.split(",")
|
return string.split(",")
|
||||||
|
|
||||||
def list2str(mylist: list[str]) -> str:
|
def list2str(mylist: list[str]) -> str:
|
||||||
|
"""
|
||||||
|
Convert a list to a comma-separated string.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
mylist: list[str]
|
||||||
|
A list of strings to be concatinated.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
s: list[str]
|
||||||
|
The sting holding a comma-sparated list.
|
||||||
|
"""
|
||||||
s = ",".join(mylist)
|
s = ",".join(mylist)
|
||||||
return s
|
return s
|
||||||
|
|
||||||
cached: bool = True
|
|
||||||
|
|
||||||
def m2k(m: float) -> float:
|
def m2k(m: float) -> float:
|
||||||
|
"""
|
||||||
|
Convert to bare quark mas $m$ to inverse mass parameter $kappa$.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
m: float
|
||||||
|
Bare quark mass.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
k: float
|
||||||
|
The corresponing $kappa$.
|
||||||
|
"""
|
||||||
return 1/(2*m+8)
|
return 1/(2*m+8)
|
||||||
|
|
||||||
|
|
||||||
def k2m(k: float) -> float:
|
def k2m(k: float) -> float:
|
||||||
|
"""
|
||||||
|
Convert from the inverse bare quark parameter $kappa$ to the bare quark mass $m$.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
k: float
|
||||||
|
Inverse bare quark mass parameter $kappa$.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
m: float
|
||||||
|
The corresponing bare quark mass.
|
||||||
|
"""
|
||||||
return (1/(2*k))-4
|
return (1/(2*k))-4
|
||||||
|
|
||||||
|
|
||||||
def set_config(path: str, section: str, option: str, value: Any) -> None:
|
def set_config(path: str, section: str, option: str, value: Any) -> None:
|
||||||
|
"""
|
||||||
|
Set configuration parameters for the library.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
path: str
|
||||||
|
The path of the library.
|
||||||
|
section: str
|
||||||
|
The section within the configuration file.
|
||||||
|
option: str
|
||||||
|
The option to be set to value.
|
||||||
|
value: Any
|
||||||
|
The value we set the option to.
|
||||||
|
"""
|
||||||
config_path = os.path.join(path, '.corrlib')
|
config_path = os.path.join(path, '.corrlib')
|
||||||
config = ConfigParser()
|
config = ConfigParser()
|
||||||
if os.path.exists(config_path):
|
if os.path.exists(config_path):
|
||||||
|
|
@ -36,6 +101,19 @@ def set_config(path: str, section: str, option: str, value: Any) -> None:
|
||||||
|
|
||||||
|
|
||||||
def get_db_file(path: str) -> str:
|
def get_db_file(path: str) -> str:
|
||||||
|
"""
|
||||||
|
Get the database file associated with the library at the given path.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
path: str
|
||||||
|
The path of the library.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
db_file: str
|
||||||
|
The file holding the database.
|
||||||
|
"""
|
||||||
config_path = os.path.join(path, CONFIG_FILENAME)
|
config_path = os.path.join(path, CONFIG_FILENAME)
|
||||||
config = ConfigParser()
|
config = ConfigParser()
|
||||||
if os.path.exists(config_path):
|
if os.path.exists(config_path):
|
||||||
|
|
@ -45,6 +123,20 @@ def get_db_file(path: str) -> str:
|
||||||
|
|
||||||
|
|
||||||
def cache_enabled(path: str) -> bool:
|
def cache_enabled(path: str) -> bool:
|
||||||
|
"""
|
||||||
|
Check, whether the library is cached.
|
||||||
|
Fallback is true.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
path: str
|
||||||
|
The path of the library.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
cached_bool: bool
|
||||||
|
Whether the given library is cached.
|
||||||
|
"""
|
||||||
config_path = os.path.join(path, CONFIG_FILENAME)
|
config_path = os.path.join(path, CONFIG_FILENAME)
|
||||||
config = ConfigParser()
|
config = ConfigParser()
|
||||||
if os.path.exists(config_path):
|
if os.path.exists(config_path):
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue