add a way to load multiple correlators at once
This commit is contained in:
parent
2312b5972d
commit
25cbf5b6f6
1 changed files with 50 additions and 6 deletions
|
@ -4,6 +4,8 @@ import datalad.api as dl
|
||||||
import sqlite3
|
import sqlite3
|
||||||
from .input import sfcf,openQCD
|
from .input import sfcf,openQCD
|
||||||
import json
|
import json
|
||||||
|
from typing import Union
|
||||||
|
from pyerrors import Obs, Corr
|
||||||
|
|
||||||
|
|
||||||
def write_measurement(path, ensemble, measurement, uuid, code, parameter_file):
|
def write_measurement(path, ensemble, measurement, uuid, code, parameter_file):
|
||||||
|
@ -66,14 +68,56 @@ def write_measurement(path, ensemble, measurement, uuid, code, parameter_file):
|
||||||
dl.save(files, message="Add measurements to database", dataset=path)
|
dl.save(files, message="Add measurements to database", dataset=path)
|
||||||
|
|
||||||
|
|
||||||
def get_record(path, meas_path):
|
def load_record(path: str, meas_path: str):
|
||||||
file = meas_path.split("::")[0]
|
"""
|
||||||
sub_key = meas_path.split("::")[1]
|
Load a list of records by their paths.
|
||||||
dl.get(file, dataset=path)
|
|
||||||
return pj.load_json_dict(file)[sub_key]
|
Parameters
|
||||||
|
----------
|
||||||
|
path: str
|
||||||
|
Path of the correlator library.
|
||||||
|
meas_path: str
|
||||||
|
The path to the correlator in the backlog system.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
co : Corr or Obs
|
||||||
|
The correlator in question.
|
||||||
|
"""
|
||||||
|
return load_records(path, [meas_path])[0]
|
||||||
|
|
||||||
|
|
||||||
def drop_record(path, meas_path):
|
def load_records(path: str, meas_paths: list[str]) -> list[Union[Corr, Obs]]:
|
||||||
|
"""
|
||||||
|
Load a list of records by their paths.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
path: str
|
||||||
|
Path of the correlator library.
|
||||||
|
meas_paths: list[str]
|
||||||
|
A list of the paths to the correlator in the backlog system.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
List
|
||||||
|
"""
|
||||||
|
needed_data: dict[str, list[str]] = {}
|
||||||
|
for mpath in meas_paths:
|
||||||
|
file = path.split("::")[0]
|
||||||
|
if file not in needed_data.keys():
|
||||||
|
needed_data[file] = []
|
||||||
|
key = mpath.split("::")[1]
|
||||||
|
needed_data[file].append(key)
|
||||||
|
returned_data: list = []
|
||||||
|
for filename in needed_data.keys():
|
||||||
|
filedict = pj.load_json_dict(filename)
|
||||||
|
for key in list(needed_data[filename]):
|
||||||
|
returned_data.append(filedict[key])
|
||||||
|
return returned_data
|
||||||
|
|
||||||
|
|
||||||
|
def drop_record(path: str, meas_path: str):
|
||||||
file = meas_path.split("::")[0]
|
file = meas_path.split("::")[0]
|
||||||
sub_key = meas_path.split("::")[1]
|
sub_key = meas_path.split("::")[1]
|
||||||
dl.unlock(path + '/backlogger.db', dataset=path)
|
dl.unlock(path + '/backlogger.db', dataset=path)
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue