Compare commits

...

3 commits

4 changed files with 30 additions and 20 deletions

View file

@ -5,7 +5,7 @@ import json
import pandas as pd import pandas as pd
import numpy as np import numpy as np
from .input.implementations import codes from .input.implementations import codes
from .tools import k2m from .tools import k2m, get_file
# this will implement the search functionality # this will implement the search functionality
@ -143,7 +143,7 @@ def find_record(path, ensemble, correlator_name, code, project=None, parameters=
db = path + '/backlogger.db' db = path + '/backlogger.db'
if code not in codes: if code not in codes:
raise ValueError("Code " + code + "unknown, take one of the following:" + ", ".join(codes)) raise ValueError("Code " + code + "unknown, take one of the following:" + ", ".join(codes))
dl.get(db, dataset=path) get_file(path, "backlogger.db")
results = _db_lookup(db, ensemble, correlator_name,code, project, parameters=parameters, created_before=created_before, created_after=created_after, updated_before=updated_before, updated_after=updated_after, revision=revision) results = _db_lookup(db, ensemble, correlator_name,code, project, parameters=parameters, created_before=created_before, created_after=created_after, updated_before=updated_before, updated_after=updated_after, revision=revision)
if code == "sfcf": if code == "sfcf":
results = sfcf_filter(results, **kwargs) results = sfcf_filter(results, **kwargs)
@ -151,14 +151,14 @@ def find_record(path, ensemble, correlator_name, code, project=None, parameters=
return results.reset_index() return results.reset_index()
def find_project(path, db, name): def find_project(path, name):
dl.get(db, dataset=path) get_file(path, "backlogger.db")
return _project_lookup_by_alias(db, name) return _project_lookup_by_alias(os.path.join(path, "backlogger.db"), name)
def list_projects(path): def list_projects(path):
db = path + '/backlogger.db' db = path + '/backlogger.db'
dl.get(db, dataset=path) get_file(path, "backlogger.db")
conn = sqlite3.connect(db) conn = sqlite3.connect(db)
c = conn.cursor() c = conn.cursor()
c.execute("SELECT id,aliases FROM projects") c.execute("SELECT id,aliases FROM projects")

View file

@ -5,7 +5,7 @@ import os
from .git_tools import move_submodule from .git_tools import move_submodule
import shutil import shutil
from .find import _project_lookup_by_id from .find import _project_lookup_by_id
from .tools import list2str, str2list from .tools import list2str, str2list, get_file
from typing import Union from typing import Union
@ -25,7 +25,7 @@ def create_project(path: str, uuid: str, owner: Union[str, None]=None, tags: Uni
The code that was used to create the measurements. The code that was used to create the measurements.
""" """
db = path + "/backlogger.db" db = path + "/backlogger.db"
dl.get(db, dataset=path) get_file(path, "backlogger.db")
conn = sqlite3.connect(db) conn = sqlite3.connect(db)
c = conn.cursor() c = conn.cursor()
known_projects = c.execute("SELECT * FROM projects WHERE id=?", (uuid,)) known_projects = c.execute("SELECT * FROM projects WHERE id=?", (uuid,))
@ -45,9 +45,9 @@ def create_project(path: str, uuid: str, owner: Union[str, None]=None, tags: Uni
dl.save(db, message="Added entry for project " + uuid + " to database", dataset=path) dl.save(db, message="Added entry for project " + uuid + " to database", dataset=path)
def update_project_data(path, db, uuid, prop, value = None): def update_project_data(path, uuid, prop, value = None):
dl.get(db, dataset=path) get_file(path, "backlogger.db")
conn = sqlite3.connect(db) conn = sqlite3.connect(os.path.join(path, "backlogger.db"))
c = conn.cursor() c = conn.cursor()
c.execute(f"UPDATE projects SET '{prop}' = '{value}' WHERE id == '{uuid}'") c.execute(f"UPDATE projects SET '{prop}' = '{value}' WHERE id == '{uuid}'")
conn.commit() conn.commit()
@ -57,7 +57,7 @@ def update_project_data(path, db, uuid, prop, value = None):
def update_aliases(path: str, uuid: str, aliases: list[str]): def update_aliases(path: str, uuid: str, aliases: list[str]):
db = os.path.join(path, "backlogger.db") db = os.path.join(path, "backlogger.db")
dl.get(db, dataset=path) get_file(path, "backlogger.db")
known_data = _project_lookup_by_id(db, uuid)[0] known_data = _project_lookup_by_id(db, uuid)[0]
known_aliases = known_data[1] known_aliases = known_data[1]
@ -77,7 +77,7 @@ def update_aliases(path: str, uuid: str, aliases: list[str]):
if not len(new_alias_list) == len(known_alias_list): if not len(new_alias_list) == len(known_alias_list):
alias_str = list2str(new_alias_list) alias_str = list2str(new_alias_list)
dl.unlock(db, dataset=path) dl.unlock(db, dataset=path)
update_project_data(db, uuid, "aliases", alias_str) update_project_data(path, uuid, "aliases", alias_str)
dl.save(db, dataset=path) dl.save(db, dataset=path)
return return
@ -122,7 +122,7 @@ def import_project(path: str, url: str, owner: Union[str, None]=None, tags: Unio
raise ValueError("The dataset does not have a uuid!") raise ValueError("The dataset does not have a uuid!")
if not os.path.exists(path + "/projects/" + uuid): if not os.path.exists(path + "/projects/" + uuid):
db = path + "/backlogger.db" db = path + "/backlogger.db"
dl.get(db, ds=path) get_file(path, "backlogger.db")
dl.unlock(db, dataset=path) dl.unlock(db, dataset=path)
create_project(path, uuid, owner, tags, aliases, code) create_project(path, uuid, owner, tags, aliases, code)
move_submodule(path, 'projects/tmp', 'projects/' + uuid) move_submodule(path, 'projects/tmp', 'projects/' + uuid)

View file

@ -7,7 +7,7 @@ import json
from typing import Union from typing import Union
from pyerrors import Obs, Corr, dump_object, load_object from pyerrors import Obs, Corr, dump_object, load_object
from hashlib import sha256 from hashlib import sha256
from .tools import cached from .tools import cached, get_file
import shutil import shutil
@ -28,7 +28,7 @@ def write_measurement(path, ensemble, measurement, uuid, code, parameter_file=No
The uuid of the project. The uuid of the project.
""" """
db = os.path.join(path, 'backlogger.db') db = os.path.join(path, 'backlogger.db')
dl.get(db, ds=path) get_file(path, "backlogger.db")
dl.unlock(db, dataset=path) dl.unlock(db, dataset=path)
conn = sqlite3.connect(db) conn = sqlite3.connect(db)
c = conn.cursor() c = conn.cursor()
@ -168,9 +168,7 @@ def cache_path(path, file, key):
def preload(path: str, file: str): def preload(path: str, file: str):
print("Loading data...") get_file(path, file)
dl.get(os.path.join(path, file), dataset=path)
print("> downloaded file")
filedict = pj.load_json_dict(os.path.join(path, file)) filedict = pj.load_json_dict(os.path.join(path, file))
print("> read file") print("> read file")
return filedict return filedict
@ -180,7 +178,7 @@ def drop_record(path: str, meas_path: str):
file_in_archive = meas_path.split("::")[0] file_in_archive = meas_path.split("::")[0]
file = os.path.join(path, file_in_archive) file = os.path.join(path, file_in_archive)
db = os.path.join(path, 'backlogger.db') db = os.path.join(path, 'backlogger.db')
dl.get(db, ds=path) get_file(path, 'backlogger.db')
sub_key = meas_path.split("::")[1] sub_key = meas_path.split("::")[1]
dl.unlock(db, dataset=path) dl.unlock(db, dataset=path)
conn = sqlite3.connect(db) conn = sqlite3.connect(db)

View file

@ -1,3 +1,5 @@
import os
import datalad.api as dl
def str2list(string): def str2list(string):
@ -15,3 +17,13 @@ def m2k(m):
def k2m(k): def k2m(k):
return (1/(2*k))-4 return (1/(2*k))-4
def get_file(path, file):
if file == "backlogger.db":
print("Downloading database...")
else:
print("Downloading data...")
dl.get(os.path.join(path, file), dataset=path)
print("> downloaded file")