implement write to db prperly
This commit is contained in:
parent
4c81410798
commit
391fa556ef
2 changed files with 53 additions and 32 deletions
|
@ -1,5 +1,15 @@
|
|||
import pyerrors as pe
|
||||
import datalad.api as dl
|
||||
import json
|
||||
|
||||
corr_types = {
|
||||
'f_A': 'bi',
|
||||
'f_P': 'bi',
|
||||
'g_A': 'bi',
|
||||
'g_P': 'bi',
|
||||
'f_1': 'bb',
|
||||
'k_1': 'bb',
|
||||
}
|
||||
|
||||
|
||||
def read_param(path, project, file_in_project):
|
||||
|
@ -105,7 +115,7 @@ def _map_params(params, spec_list):
|
|||
Map the extracted parameters to the extracted data.
|
||||
|
||||
"""
|
||||
|
||||
# quarks/offset/wf/wf2
|
||||
new_specs = {}
|
||||
# quarks
|
||||
quarks = spec_list[0].split(" ")
|
||||
|
@ -133,6 +143,17 @@ def _map_params(params, spec_list):
|
|||
return new_specs
|
||||
|
||||
|
||||
def get_specs(key, parameters, sep='/'):
|
||||
key_parts = key.split(sep)
|
||||
if corr_types[key_parts[0]] == 'bi':
|
||||
param = _map_params(parameters, key_parts[1:-1])
|
||||
else:
|
||||
param = _map_params(parameters, key_parts[1:])
|
||||
print(param)
|
||||
s = json.dumps(param)
|
||||
return s
|
||||
|
||||
|
||||
def read_data(path, project, dir_in_project, prefix, param, version='1.0c', cfg_seperator='n', sep='/', **kwargs):
|
||||
"""
|
||||
Extract the data from the sfcf file.
|
||||
|
@ -144,14 +165,7 @@ def read_data(path, project, dir_in_project, prefix, param, version='1.0c', cfg_
|
|||
|
||||
"""
|
||||
names = kwargs.get('names', None)
|
||||
corr_types = {
|
||||
'f_A': 'bi',
|
||||
'f_P': 'bi',
|
||||
'g_A': 'bi',
|
||||
'g_P': 'bi',
|
||||
'f_1': 'bb',
|
||||
'k_1': 'bb',
|
||||
}
|
||||
|
||||
directory = path + "/projects/" + project + '/' + dir_in_project
|
||||
dl.get(directory, dataset=path)
|
||||
corr_type_list = []
|
||||
|
@ -185,11 +199,7 @@ def read_data(path, project, dir_in_project, prefix, param, version='1.0c', cfg_
|
|||
for key in data.keys():
|
||||
key_parts = key.split(sep)
|
||||
corr = key_parts[0]
|
||||
if corr_types[corr] == 'bi':
|
||||
specs = _map_params(param, key_parts[1:-1])
|
||||
else:
|
||||
specs = _map_params(param, key_parts[1:])
|
||||
if corr not in sorted_data:
|
||||
sorted_data[corr] = {}
|
||||
sorted_data[corr][sep.join(key_parts[1:])] = data[key]
|
||||
return sorted_data, specs
|
||||
return sorted_data
|
||||
|
|
|
@ -2,9 +2,11 @@ from pyerrors.input import json as pj
|
|||
import os
|
||||
import datalad.api as dl
|
||||
import sqlite3
|
||||
from .input.sfcf import get_specs
|
||||
from .input.sfcf import read_param
|
||||
|
||||
|
||||
def write_measurement(path, ensemble, measurement, uuid, code, parameters, parameter_file):
|
||||
def write_measurement(path, ensemble, measurement, uuid, code, parameter_file):
|
||||
"""
|
||||
Write a measurement to the backlog.
|
||||
If the file for the measurement already exists, update the measurement.
|
||||
|
@ -20,27 +22,36 @@ def write_measurement(path, ensemble, measurement, uuid, code, parameters, param
|
|||
uuid: str
|
||||
The uuid of the project.
|
||||
"""
|
||||
|
||||
parameters = read_param(path, uuid, parameter_file)
|
||||
print(parameters)
|
||||
dl.unlock(path + '/backlogger.db', dataset=path)
|
||||
conn = sqlite3.connect(path + '/backlogger.db')
|
||||
c = conn.cursor()
|
||||
files = []
|
||||
for corr in measurement.keys():
|
||||
file = path + "/archive/" + ensemble + "/" + corr + '/' + uuid + '.json.gz'
|
||||
files.append(file)
|
||||
if not os.path.exists(path + "/archive/" + ensemble + "/" + corr):
|
||||
os.makedirs(path + "/archive/" + ensemble + "/" + corr)
|
||||
conn = sqlite3.connect(path + '/backlogger.db')
|
||||
c = conn.cursor()
|
||||
if os.path.exists(file):
|
||||
dl.unlock(file, dataset=path)
|
||||
known_meas = pj.load_json_dict(file)
|
||||
for key in measurement[corr].keys():
|
||||
known_meas[key] = measurement[corr][key]
|
||||
else:
|
||||
known_meas = measurement[corr]
|
||||
pj.dump_dict_to_json(measurement[corr], file)
|
||||
|
||||
for subkey in measurement[corr].keys():
|
||||
meas_path = file + "::" + subkey
|
||||
if not os.path.exists(file):
|
||||
c.execute("INSERT INTO backlogs (name, ensemble, code, path, project, parameters, parameter_file, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?, ?, datetime('now'), datetime('now'))", (corr, ensemble, code, meas_path, uuid, parameters, parameter_file))
|
||||
else:
|
||||
c.execute("UPDATE backlogs SET updated_at=datetime('now') WHERE path=?", (file,))
|
||||
|
||||
if os.path.exists(file):
|
||||
dl.unlock(file, dataset=path)
|
||||
known_meas = pj.load_json_dict(file)
|
||||
for key in measurement[corr].keys():
|
||||
known_meas[key] = measurement[corr][key]
|
||||
# this should be only set if something changed.
|
||||
else:
|
||||
known_meas = measurement[corr]
|
||||
|
||||
if c.execute("SELECT * FROM backlogs WHERE path = ?", (meas_path,)).fetchone() is not None:
|
||||
c.execute("UPDATE backlogs SET name = ?, ensemble = ?, code = ?, project = ?, parameters = ?, parameter_file = ?, updated_at = datetime('now') WHERE path = ?", (meas_path, ))
|
||||
else:
|
||||
c.execute("INSERT INTO backlogs (name, ensemble, code, path, project, parameters, parameter_file, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?, ?, datetime('now'), datetime('now'))", (corr, ensemble, code, meas_path, uuid, get_specs(corr + "/" + subkey, parameters), parameter_file))
|
||||
conn.commit()
|
||||
conn.close()
|
||||
dl.save([path + '/backlogger.db', file], message="Add measurement to database", dataset=path)
|
||||
pj.dump_dict_to_json(measurement[corr], file)
|
||||
files.append(path + '/backlogger.db')
|
||||
conn.close()
|
||||
dl.save(files, message="Add measurement to database", dataset=path)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue