Compare commits
1 commit
develop
...
feat/hadro
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d3511b77c8 |
11 changed files with 69 additions and 101 deletions
3
.gitignore
vendored
3
.gitignore
vendored
|
|
@ -1,4 +1,3 @@
|
|||
pyerrors_corrlib.egg-info
|
||||
__pycache__
|
||||
*.egg-info
|
||||
test.ipynb
|
||||
*.egg-info
|
||||
|
|
@ -5,7 +5,7 @@ import json
|
|||
import pandas as pd
|
||||
import numpy as np
|
||||
from .input.implementations import codes
|
||||
from .tools import k2m, get_file
|
||||
from .tools import k2m
|
||||
# this will implement the search functionality
|
||||
|
||||
|
||||
|
|
@ -143,22 +143,21 @@ def find_record(path, ensemble, correlator_name, code, project=None, parameters=
|
|||
db = path + '/backlogger.db'
|
||||
if code not in codes:
|
||||
raise ValueError("Code " + code + "unknown, take one of the following:" + ", ".join(codes))
|
||||
get_file(path, "backlogger.db")
|
||||
if os.path.exists(db):
|
||||
dl.get(db, dataset=path)
|
||||
results = _db_lookup(db, ensemble, correlator_name,code, project, parameters=parameters, created_before=created_before, created_after=created_after, updated_before=updated_before, updated_after=updated_after, revision=revision)
|
||||
if code == "sfcf":
|
||||
results = sfcf_filter(results, **kwargs)
|
||||
print("Found " + str(len(results)) + " result" + ("s" if len(results)>1 else ""))
|
||||
print("Found " + str(len(results)) + " results")
|
||||
return results.reset_index()
|
||||
|
||||
|
||||
def find_project(path, name):
|
||||
get_file(path, "backlogger.db")
|
||||
return _project_lookup_by_alias(os.path.join(path, "backlogger.db"), name)
|
||||
def find_project(db, name):
|
||||
return _project_lookup_by_alias(db, name)
|
||||
|
||||
|
||||
def list_projects(path):
|
||||
db = path + '/backlogger.db'
|
||||
get_file(path, "backlogger.db")
|
||||
conn = sqlite3.connect(db)
|
||||
c = conn.cursor()
|
||||
c.execute("SELECT id,aliases FROM projects")
|
||||
|
|
|
|||
48
corrlib/input/hadrons.py
Normal file
48
corrlib/input/hadrons.py
Normal file
|
|
@ -0,0 +1,48 @@
|
|||
import pyerrors.input.hadrons as input
|
||||
import datalad.api as dl
|
||||
import os
|
||||
|
||||
# same as in Grid/qcd/spin/Gamma.h
|
||||
|
||||
defd_gammas=[
|
||||
'MinusGamma5', 'Gamma5',
|
||||
'MinusGammaT', 'GammaT', 'MinusGammaTGamma5', 'GammaTGamma5',
|
||||
'MinusGammaX', 'GammaX', 'MinusGammaXGamma5', 'GammaXGamma5',
|
||||
'MinusGammaY', 'GammaY', 'MinusGammaYGamma5', 'GammaYGamma5',
|
||||
'MinusGammaZ', 'GammaZ', 'MinusGammaZGamma5', 'GammaZGamma5',
|
||||
'MinusIdentity', 'Identity',
|
||||
'MinusSigmaXT', 'SigmaXT',
|
||||
'MinusSigmaXY', 'SigmaXY',
|
||||
'MinusSigmaXZ', 'SigmaXZ',
|
||||
'MinusSigmaYT', 'SigmaYT',
|
||||
'MinusSigmaYZ', 'SigmaYZ',
|
||||
'MinusSigmaZT', 'SigmaZT'
|
||||
]
|
||||
|
||||
|
||||
def read_meson_hd5(path, project, dir_in_project, prefix, ensemble, gammas):
|
||||
directory = os.path.join(path, "projects", project, dir_in_project)
|
||||
measurements = {}
|
||||
|
||||
dl.get(directory, dataset=path)
|
||||
|
||||
if gammas == 'all':
|
||||
for g1 in defd_gammas:
|
||||
for g2 in defd_gammas:
|
||||
try:
|
||||
corr = input.read_meson_hd5(directory, prefix, ensemble, (g1, g2))
|
||||
measurements[g1][g2] = corr
|
||||
except Exception:
|
||||
raise Exception("Combination (" + g1 + "," + g2 + ") not in data.")
|
||||
else:
|
||||
for gs in gammas:
|
||||
if len(gs) == 2:
|
||||
if gs[0] not in defd_gammas:
|
||||
raise ValueError(gammas[0] + " is none of the defined gammas.")
|
||||
if gs[1] not in defd_gammas:
|
||||
raise ValueError(gammas[1] + " is none of the defined gammas.")
|
||||
corr = input.read_meson_hd5(directory, prefix, ensemble, gs)
|
||||
measurements[gs[0]][gs[1]] = corr
|
||||
else:
|
||||
raise Exception("Each element in 'gammas' has to have length 2.")
|
||||
return measurements
|
||||
|
|
@ -5,7 +5,7 @@ import os
|
|||
from .git_tools import move_submodule
|
||||
import shutil
|
||||
from .find import _project_lookup_by_id
|
||||
from .tools import list2str, str2list, get_file
|
||||
from .tools import list2str, str2list
|
||||
from typing import Union
|
||||
|
||||
|
||||
|
|
@ -24,15 +24,13 @@ def create_project(path: str, uuid: str, owner: Union[str, None]=None, tags: Uni
|
|||
code: str (optional)
|
||||
The code that was used to create the measurements.
|
||||
"""
|
||||
db = path + "/backlogger.db"
|
||||
get_file(path, "backlogger.db")
|
||||
conn = sqlite3.connect(db)
|
||||
conn = sqlite3.connect(path + "/backlogger.db")
|
||||
c = conn.cursor()
|
||||
known_projects = c.execute("SELECT * FROM projects WHERE id=?", (uuid,))
|
||||
if known_projects.fetchone():
|
||||
raise ValueError("Project already imported, use update_project() instead.")
|
||||
|
||||
dl.unlock(db, dataset=path)
|
||||
dl.unlock(path + "/backlogger.db", dataset=path)
|
||||
alias_str = None
|
||||
if aliases is not None:
|
||||
alias_str = list2str(aliases)
|
||||
|
|
@ -42,12 +40,11 @@ def create_project(path: str, uuid: str, owner: Union[str, None]=None, tags: Uni
|
|||
c.execute("INSERT INTO projects (id, aliases, customTags, owner, code, created_at, updated_at) VALUES (?, ?, ?, ?, ?, datetime('now'), datetime('now'))", (uuid, alias_str, tag_str, owner, code))
|
||||
conn.commit()
|
||||
conn.close()
|
||||
dl.save(db, message="Added entry for project " + uuid + " to database", dataset=path)
|
||||
dl.save(path + "/backlogger.db", message="Added entry for project " + uuid + " to database", dataset=path)
|
||||
|
||||
|
||||
def update_project_data(path, uuid, prop, value = None):
|
||||
get_file(path, "backlogger.db")
|
||||
conn = sqlite3.connect(os.path.join(path, "backlogger.db"))
|
||||
def update_project_data(db, uuid, prop, value = None):
|
||||
conn = sqlite3.connect(db)
|
||||
c = conn.cursor()
|
||||
c.execute(f"UPDATE projects SET '{prop}' = '{value}' WHERE id == '{uuid}'")
|
||||
conn.commit()
|
||||
|
|
@ -57,7 +54,6 @@ def update_project_data(path, uuid, prop, value = None):
|
|||
|
||||
def update_aliases(path: str, uuid: str, aliases: list[str]):
|
||||
db = os.path.join(path, "backlogger.db")
|
||||
get_file(path, "backlogger.db")
|
||||
known_data = _project_lookup_by_id(db, uuid)[0]
|
||||
known_aliases = known_data[1]
|
||||
|
||||
|
|
@ -77,7 +73,7 @@ def update_aliases(path: str, uuid: str, aliases: list[str]):
|
|||
if not len(new_alias_list) == len(known_alias_list):
|
||||
alias_str = list2str(new_alias_list)
|
||||
dl.unlock(db, dataset=path)
|
||||
update_project_data(path, uuid, "aliases", alias_str)
|
||||
update_project_data(db, uuid, "aliases", alias_str)
|
||||
dl.save(db, dataset=path)
|
||||
return
|
||||
|
||||
|
|
@ -121,13 +117,11 @@ def import_project(path: str, url: str, owner: Union[str, None]=None, tags: Unio
|
|||
if not uuid:
|
||||
raise ValueError("The dataset does not have a uuid!")
|
||||
if not os.path.exists(path + "/projects/" + uuid):
|
||||
db = path + "/backlogger.db"
|
||||
get_file(path, "backlogger.db")
|
||||
dl.unlock(db, dataset=path)
|
||||
dl.unlock(path + "/backlogger.db", dataset=path)
|
||||
create_project(path, uuid, owner, tags, aliases, code)
|
||||
move_submodule(path, 'projects/tmp', 'projects/' + uuid)
|
||||
os.mkdir(path + '/import_scripts/' + uuid)
|
||||
dl.save([db, path + '/projects/' + uuid], message="Import project from " + url, dataset=path)
|
||||
dl.save([path + "/backlogger.db", path + '/projects/' + uuid], message="Import project from " + url, dataset=path)
|
||||
else:
|
||||
dl.drop(tmp_path, reckless='kill')
|
||||
shutil.rmtree(tmp_path)
|
||||
|
|
@ -140,11 +134,3 @@ def import_project(path: str, url: str, owner: Union[str, None]=None, tags: Unio
|
|||
|
||||
# make this more concrete
|
||||
return uuid
|
||||
|
||||
|
||||
def drop_project_data(path: str, uuid: str, path_in_project: str = ""):
|
||||
"""
|
||||
Drop (parts of) a prject to free up diskspace
|
||||
"""
|
||||
dl.drop(path + "/projects/" + uuid + "/" + path_in_project)
|
||||
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ import json
|
|||
from typing import Union
|
||||
from pyerrors import Obs, Corr, dump_object, load_object
|
||||
from hashlib import sha256
|
||||
from .tools import cached, get_file
|
||||
from .tools import cached
|
||||
import shutil
|
||||
|
||||
|
||||
|
|
@ -28,7 +28,6 @@ def write_measurement(path, ensemble, measurement, uuid, code, parameter_file=No
|
|||
The uuid of the project.
|
||||
"""
|
||||
db = os.path.join(path, 'backlogger.db')
|
||||
get_file(path, "backlogger.db")
|
||||
dl.unlock(db, dataset=path)
|
||||
conn = sqlite3.connect(db)
|
||||
c = conn.cursor()
|
||||
|
|
@ -168,9 +167,8 @@ def cache_path(path, file, key):
|
|||
|
||||
|
||||
def preload(path: str, file: str):
|
||||
get_file(path, file)
|
||||
dl.get(os.path.join(path, file), dataset=path)
|
||||
filedict = pj.load_json_dict(os.path.join(path, file))
|
||||
print("> read file")
|
||||
return filedict
|
||||
|
||||
|
||||
|
|
@ -178,7 +176,6 @@ def drop_record(path: str, meas_path: str):
|
|||
file_in_archive = meas_path.split("::")[0]
|
||||
file = os.path.join(path, file_in_archive)
|
||||
db = os.path.join(path, 'backlogger.db')
|
||||
get_file(path, 'backlogger.db')
|
||||
sub_key = meas_path.split("::")[1]
|
||||
dl.unlock(db, dataset=path)
|
||||
conn = sqlite3.connect(db)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
import os
|
||||
import datalad.api as dl
|
||||
|
||||
|
||||
|
||||
def str2list(string):
|
||||
|
|
@ -17,13 +16,3 @@ def m2k(m):
|
|||
|
||||
def k2m(k):
|
||||
return (1/(2*k))-4
|
||||
|
||||
|
||||
def get_file(path, file):
|
||||
if file == "backlogger.db":
|
||||
print("Downloading database...")
|
||||
else:
|
||||
print("Downloading data...")
|
||||
dl.get(os.path.join(path, file), dataset=path)
|
||||
print("> downloaded file")
|
||||
|
||||
|
|
@ -1 +1 @@
|
|||
__version__ = "0.2.3"
|
||||
__version__ = "0.1.4"
|
||||
|
|
|
|||
|
|
@ -1,28 +0,0 @@
|
|||
['project']
|
||||
url = "git@kuhl-mann.de:lattice/cA_data.git"
|
||||
code = "sfcf"
|
||||
[measurements]
|
||||
[measurements.a]
|
||||
path = "/path/to/measurement"
|
||||
ensemble = "A1k1"
|
||||
param_file = "/path/to/file"
|
||||
version = "1.1"
|
||||
prefix = "pref"
|
||||
cfg_seperator = "n"
|
||||
names = ['list', 'of', 'names']
|
||||
[measurements.b]
|
||||
path = "/path/to/measurement"
|
||||
ensemble = "A1k1"
|
||||
param_file = "/path/to/file"
|
||||
version = "1.1"
|
||||
prefix = "pref"
|
||||
cfg_seperator = "n"
|
||||
names = ['list', 'of', 'names']
|
||||
[measurements.c]
|
||||
path = "/path/to/measurement"
|
||||
ensemble = "A1k1"
|
||||
param_file = "/path/to/file"
|
||||
version = "1.1"
|
||||
prefix = "pref"
|
||||
cfg_seperator = "n"
|
||||
names = ['list', 'of', 'names']
|
||||
1
projects/tmp
Submodule
1
projects/tmp
Submodule
|
|
@ -0,0 +1 @@
|
|||
Subproject commit 216fe4ed3467ed486390735f8072856cf3d0a409
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
[build-system]
|
||||
requires = ["setuptools >= 63.0.0", "wheel"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[tool.ruff.lint]
|
||||
ignore = ["F403"]
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
import corrlib.toml as t
|
||||
|
||||
|
||||
def test_toml_check_measurement_data():
|
||||
measurements = {
|
||||
"a":
|
||||
{
|
||||
"path": "/path/to/measurement",
|
||||
"ensemble": "A1k1",
|
||||
"param_file": "/path/to/file",
|
||||
"version": "1.1",
|
||||
"prefix": "pref",
|
||||
"cfg_seperator": "n",
|
||||
"names": ['list', 'of', 'names']
|
||||
}
|
||||
}
|
||||
t.check_measurement_data(measurements)
|
||||
Loading…
Add table
Add a link
Reference in a new issue