reformat parts of meas writing
This commit is contained in:
parent
005840d212
commit
17d2d186e7
1 changed files with 25 additions and 29 deletions
|
@ -25,16 +25,11 @@ def write_measurement(path, ensemble, measurement, uuid, code, parameter_file):
|
|||
uuid: str
|
||||
The uuid of the project.
|
||||
"""
|
||||
if code == "sfcf":
|
||||
parameters = sfcf.read_param(path, uuid, parameter_file)
|
||||
elif code == "openQCD":
|
||||
parameters = openQCD.read_param(path, uuid, parameter_file)
|
||||
|
||||
|
||||
dl.unlock(path + '/backlogger.db', dataset=path)
|
||||
conn = sqlite3.connect(path + '/backlogger.db')
|
||||
c = conn.cursor()
|
||||
files = []
|
||||
hashed_measurement = {}
|
||||
for corr in measurement.keys():
|
||||
file = path + "/archive/" + ensemble + "/" + corr + '/' + uuid + '.json.gz'
|
||||
files.append(file)
|
||||
|
@ -46,34 +41,35 @@ def write_measurement(path, ensemble, measurement, uuid, code, parameter_file):
|
|||
dl.unlock(file, dataset=path)
|
||||
known_meas = pj.load_json_dict(file)
|
||||
if code == "sfcf":
|
||||
hashed_measurement[corr] = {}
|
||||
for subkey in measurement[corr].keys():
|
||||
pars = sfcf.get_specs(corr + "/" + subkey, parameters)
|
||||
parHash = sha256(str(pars).encode('UTF-8')).hexdigest()
|
||||
meas_path = file + "::" + parHash
|
||||
known_meas[parHash] = measurement[corr][subkey]
|
||||
parameters = sfcf.read_param(path, uuid, parameter_file)
|
||||
pars = {}
|
||||
subkeys = list(measurement[corr].keys())
|
||||
for subkey in subkeys:
|
||||
pars[subkey] = sfcf.get_specs(corr + "/" + subkey, parameters)
|
||||
|
||||
if c.execute("SELECT * FROM backlogs WHERE path = ?", (meas_path,)).fetchone() is not None:
|
||||
c.execute("UPDATE backlogs SET updated_at = datetime('now') WHERE path = ?", (meas_path, ))
|
||||
else:
|
||||
c.execute("INSERT INTO backlogs (name, ensemble, code, path, project, parameters, parameter_file, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?, ?, datetime('now'), datetime('now'))",
|
||||
(corr, ensemble, code, meas_path, uuid, pars, parameter_file))
|
||||
elif code == "openQCD":
|
||||
parameters = openQCD.read_param(path, uuid, parameter_file)
|
||||
pars = {}
|
||||
subkeys = []
|
||||
for i in range(len(parameters["rw_fcts"])):
|
||||
pars = json.dumps(parameters["rw_fcts"][i])
|
||||
parHash = sha256(str(pars).encode('UTF-8')).hexdigest()
|
||||
meas_path = file + "::" + parHash
|
||||
corr = 'ms1'
|
||||
par_list = []
|
||||
par_list = []
|
||||
for k in parameters["rw_fcts"][i].keys():
|
||||
par_list.append(str(parameters["rw_fcts"][i][k]))
|
||||
pars = "/".join(par_list)
|
||||
known_meas[parHash] = measurement[corr][pars]
|
||||
if c.execute("SELECT * FROM backlogs WHERE path = ?", (meas_path,)).fetchone() is not None:
|
||||
c.execute("UPDATE backlogs SET updated_at = datetime('now') WHERE path = ?", (meas_path, ))
|
||||
else:
|
||||
c.execute("INSERT INTO backlogs (name, ensemble, code, path, project, parameters, parameter_file, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?, ?, datetime('now'), datetime('now'))",
|
||||
(corr, ensemble, code, meas_path, uuid, json.dumps(parameters["rw_fcts"][i]), parameter_file))
|
||||
subkey = "/".join(par_list)
|
||||
subkeys.append(subkey)
|
||||
pars[subkey] = json.dumps(parameters["rw_fcts"][i])
|
||||
|
||||
for subkey in subkeys:
|
||||
parHash = sha256(str(pars[subkey]).encode('UTF-8')).hexdigest()
|
||||
meas_path = file + "::" + parHash
|
||||
|
||||
known_meas[parHash] = measurement[corr][subkey]
|
||||
|
||||
if c.execute("SELECT * FROM backlogs WHERE path = ?", (meas_path,)).fetchone() is not None:
|
||||
c.execute("UPDATE backlogs SET updated_at = datetime('now') WHERE path = ?", (meas_path, ))
|
||||
else:
|
||||
c.execute("INSERT INTO backlogs (name, ensemble, code, path, project, parameters, parameter_file, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?, ?, datetime('now'), datetime('now'))",
|
||||
(corr, ensemble, code, meas_path, uuid, pars[subkey], parameter_file))
|
||||
conn.commit()
|
||||
pj.dump_dict_to_json(known_meas, file)
|
||||
files.append(path + '/backlogger.db')
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue