rename getter for the database file name
This commit is contained in:
parent
14d19ce9dd
commit
3d91509ab6
5 changed files with 30 additions and 30 deletions
|
|
@ -1,22 +1,22 @@
|
||||||
from typing import Union, Optional
|
from typing import Optional
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
from .tools import record2name_key
|
from .tools import record2name_key
|
||||||
from pyerrors import dump_object
|
|
||||||
import datalad.api as dl
|
import datalad.api as dl
|
||||||
import sqlite3
|
import sqlite3
|
||||||
|
from tools import db_filename
|
||||||
|
|
||||||
|
|
||||||
def get_version_hash(path, record):
|
def get_version_hash(path: str, record: str) -> str:
|
||||||
db = os.path.join(path, "backlogger.db")
|
db = os.path.join(path, db_filename(path))
|
||||||
dl.get(db, dataset=path)
|
dl.get(db, dataset=path)
|
||||||
conn = sqlite3.connect(db)
|
conn = sqlite3.connect(db)
|
||||||
c = conn.cursor()
|
c = conn.cursor()
|
||||||
c.execute(f"SELECT current_version FROM 'backlogs' WHERE path = '{record}'")
|
c.execute(f"SELECT current_version FROM 'backlogs' WHERE path = '{record}'")
|
||||||
return c.fetchall()[0][0]
|
return str(c.fetchall()[0][0])
|
||||||
|
|
||||||
|
|
||||||
def drop_cache_files(path: str, fs: Optional[list[str]]=None):
|
def drop_cache_files(path: str, fs: Optional[list[str]]=None) -> None:
|
||||||
cache_dir = os.path.join(path, ".cache")
|
cache_dir = os.path.join(path, ".cache")
|
||||||
if fs is None:
|
if fs is None:
|
||||||
fs = os.listdir(cache_dir)
|
fs = os.listdir(cache_dir)
|
||||||
|
|
@ -24,7 +24,7 @@ def drop_cache_files(path: str, fs: Optional[list[str]]=None):
|
||||||
shutil.rmtree(os.path.join(cache_dir, f))
|
shutil.rmtree(os.path.join(cache_dir, f))
|
||||||
|
|
||||||
|
|
||||||
def cache_dir(path, file):
|
def cache_dir(path: str, file: str) -> str:
|
||||||
cache_path_list = [path]
|
cache_path_list = [path]
|
||||||
cache_path_list.append(".cache")
|
cache_path_list.append(".cache")
|
||||||
cache_path_list.extend(file.split("/")[1:])
|
cache_path_list.extend(file.split("/")[1:])
|
||||||
|
|
@ -32,27 +32,27 @@ def cache_dir(path, file):
|
||||||
return cache_path
|
return cache_path
|
||||||
|
|
||||||
|
|
||||||
def cache_path(path, file, sha_hash, key):
|
def cache_path(path: str, file: str, sha_hash: str, key: str) -> str:
|
||||||
cache_path = os.path.join(cache_dir(path, file), key + "_" + sha_hash)
|
cache_path = os.path.join(cache_dir(path, file), key + "_" + sha_hash)
|
||||||
return cache_path
|
return cache_path
|
||||||
|
|
||||||
|
|
||||||
def is_old_version(path, record):
|
def is_old_version(path: str, record: str) -> bool:
|
||||||
version_hash = get_version_hash(path, record)
|
version_hash = get_version_hash(path, record)
|
||||||
file, key = record2name_key(record)
|
file, key = record2name_key(record)
|
||||||
meas_cache_path = os.path.join(cache_dir(path, file))
|
meas_cache_path = os.path.join(cache_dir(path, file))
|
||||||
ls = []
|
ls = []
|
||||||
|
is_old = True
|
||||||
for p, ds, fs in os.walk(meas_cache_path):
|
for p, ds, fs in os.walk(meas_cache_path):
|
||||||
ls.extend(fs)
|
ls.extend(fs)
|
||||||
for filename in ls:
|
for filename in ls:
|
||||||
if key == filename.split("_")[0]:
|
if key == filename.split("_")[0]:
|
||||||
if not version_hash == filename.split("_")[1][:-2]:
|
if version_hash == filename.split("_")[1][:-2]:
|
||||||
return True
|
is_old = False
|
||||||
else:
|
return is_old
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def is_in_cache(path, record):
|
def is_in_cache(path: str, record: str) -> bool:
|
||||||
version_hash = get_version_hash(path, record)
|
version_hash = get_version_hash(path, record)
|
||||||
file, key = record2name_key(record)
|
file, key = record2name_key(record)
|
||||||
return os.path.exists(cache_path(path, file, version_hash, key) + ".p")
|
return os.path.exists(cache_path(path, file, version_hash, key) + ".p")
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,7 @@ import json
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from .input.implementations import codes
|
from .input.implementations import codes
|
||||||
from .tools import k2m, get_db_file
|
from .tools import k2m, db_filename
|
||||||
from .tracker import get
|
from .tracker import get
|
||||||
from typing import Any, Optional
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
|
@ -230,7 +230,7 @@ def sfcf_filter(results: pd.DataFrame, **kwargs: Any) -> pd.DataFrame:
|
||||||
|
|
||||||
def find_record(path: str, ensemble: str, correlator_name: str, code: str, project: Optional[str]=None, parameters: Optional[str]=None,
|
def find_record(path: str, ensemble: str, correlator_name: str, code: str, project: Optional[str]=None, parameters: Optional[str]=None,
|
||||||
created_before: Optional[str]=None, created_after: Optional[str]=None, updated_before: Optional[str]=None, updated_after: Optional[str]=None, revision: Optional[str]=None, **kwargs: Any) -> pd.DataFrame:
|
created_before: Optional[str]=None, created_after: Optional[str]=None, updated_before: Optional[str]=None, updated_after: Optional[str]=None, revision: Optional[str]=None, **kwargs: Any) -> pd.DataFrame:
|
||||||
db_file = get_db_file(path)
|
db_file = db_filename(path)
|
||||||
db = os.path.join(path, db_file)
|
db = os.path.join(path, db_file)
|
||||||
if code not in codes:
|
if code not in codes:
|
||||||
raise ValueError("Code " + code + "unknown, take one of the following:" + ", ".join(codes))
|
raise ValueError("Code " + code + "unknown, take one of the following:" + ", ".join(codes))
|
||||||
|
|
@ -262,7 +262,7 @@ def find_project(path: str, name: str) -> str:
|
||||||
uuid: str
|
uuid: str
|
||||||
The uuid of the project in question.
|
The uuid of the project in question.
|
||||||
"""
|
"""
|
||||||
db_file = get_db_file(path)
|
db_file = db_filename(path)
|
||||||
get(path, db_file)
|
get(path, db_file)
|
||||||
return _project_lookup_by_alias(os.path.join(path, db_file), name)
|
return _project_lookup_by_alias(os.path.join(path, db_file), name)
|
||||||
|
|
||||||
|
|
@ -281,7 +281,7 @@ def list_projects(path: str) -> list[tuple[str, str]]:
|
||||||
results: list[Any]
|
results: list[Any]
|
||||||
The projects known to the library.
|
The projects known to the library.
|
||||||
"""
|
"""
|
||||||
db_file = get_db_file(path)
|
db_file = db_filename(path)
|
||||||
get(path, db_file)
|
get(path, db_file)
|
||||||
conn = sqlite3.connect(os.path.join(path, db_file))
|
conn = sqlite3.connect(os.path.join(path, db_file))
|
||||||
c = conn.cursor()
|
c = conn.cursor()
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,7 @@ import os
|
||||||
from .git_tools import move_submodule
|
from .git_tools import move_submodule
|
||||||
import shutil
|
import shutil
|
||||||
from .find import _project_lookup_by_id
|
from .find import _project_lookup_by_id
|
||||||
from .tools import list2str, str2list, get_db_file
|
from .tools import list2str, str2list, db_filename
|
||||||
from .tracker import get, save, unlock, clone, drop
|
from .tracker import get, save, unlock, clone, drop
|
||||||
from typing import Union, Optional
|
from typing import Union, Optional
|
||||||
|
|
||||||
|
|
@ -25,7 +25,7 @@ def create_project(path: str, uuid: str, owner: Union[str, None]=None, tags: Uni
|
||||||
code: str (optional)
|
code: str (optional)
|
||||||
The code that was used to create the measurements.
|
The code that was used to create the measurements.
|
||||||
"""
|
"""
|
||||||
db_file = get_db_file(path)
|
db_file = db_filename(path)
|
||||||
db = os.path.join(path, db_file)
|
db = os.path.join(path, db_file)
|
||||||
get(path, db_file)
|
get(path, db_file)
|
||||||
conn = sqlite3.connect(db)
|
conn = sqlite3.connect(db)
|
||||||
|
|
@ -64,7 +64,7 @@ def update_project_data(path: str, uuid: str, prop: str, value: Union[str, None]
|
||||||
value: str or None
|
value: str or None
|
||||||
Value to se `prop` to.
|
Value to se `prop` to.
|
||||||
"""
|
"""
|
||||||
db_file = get_db_file(path)
|
db_file = db_filename(path)
|
||||||
get(path, db_file)
|
get(path, db_file)
|
||||||
conn = sqlite3.connect(os.path.join(path, db_file))
|
conn = sqlite3.connect(os.path.join(path, db_file))
|
||||||
c = conn.cursor()
|
c = conn.cursor()
|
||||||
|
|
@ -75,7 +75,7 @@ def update_project_data(path: str, uuid: str, prop: str, value: Union[str, None]
|
||||||
|
|
||||||
|
|
||||||
def update_aliases(path: str, uuid: str, aliases: list[str]) -> None:
|
def update_aliases(path: str, uuid: str, aliases: list[str]) -> None:
|
||||||
db_file = get_db_file(path)
|
db_file = db_filename(path)
|
||||||
db = os.path.join(path, db_file)
|
db = os.path.join(path, db_file)
|
||||||
get(path, db_file)
|
get(path, db_file)
|
||||||
known_data = _project_lookup_by_id(db, uuid)[0]
|
known_data = _project_lookup_by_id(db, uuid)[0]
|
||||||
|
|
@ -135,7 +135,7 @@ def import_project(path: str, url: str, owner: Union[str, None]=None, tags: Opti
|
||||||
if not uuid:
|
if not uuid:
|
||||||
raise ValueError("The dataset does not have a uuid!")
|
raise ValueError("The dataset does not have a uuid!")
|
||||||
if not os.path.exists(path + "/projects/" + uuid):
|
if not os.path.exists(path + "/projects/" + uuid):
|
||||||
db_file = get_db_file(path)
|
db_file = db_filename(path)
|
||||||
get(path, db_file)
|
get(path, db_file)
|
||||||
unlock(path, db_file)
|
unlock(path, db_file)
|
||||||
create_project(path, uuid, owner, tags, aliases, code)
|
create_project(path, uuid, owner, tags, aliases, code)
|
||||||
|
|
|
||||||
|
|
@ -3,12 +3,12 @@ import os
|
||||||
import sqlite3
|
import sqlite3
|
||||||
from .input import sfcf,openQCD
|
from .input import sfcf,openQCD
|
||||||
import json
|
import json
|
||||||
from typing import Union, Optional,Any
|
from typing import Union, Any
|
||||||
from pyerrors import Obs, Corr, load_object, dump_object
|
from pyerrors import Obs, Corr, load_object, dump_object
|
||||||
from hashlib import sha256, sha1
|
from hashlib import sha256
|
||||||
from .tools import record2name_key, name_key2record, make_version_hash
|
from .tools import record2name_key, name_key2record, make_version_hash
|
||||||
from .cache_io import is_in_cache, cache_path, cache_dir, get_version_hash
|
from .cache_io import is_in_cache, cache_path, cache_dir, get_version_hash
|
||||||
from .tools import get_db_file, cache_enabled
|
from .tools import db_filename, cache_enabled
|
||||||
from .tracker import get, save, unlock
|
from .tracker import get, save, unlock
|
||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
|
|
@ -33,7 +33,7 @@ def write_measurement(path: str, ensemble: str, measurement: dict[str, dict[str,
|
||||||
parameter_file: str
|
parameter_file: str
|
||||||
The parameter file used for the measurement.
|
The parameter file used for the measurement.
|
||||||
"""
|
"""
|
||||||
db_file = get_db_file(path)
|
db_file = db_filename(path)
|
||||||
db = os.path.join(path, db_file)
|
db = os.path.join(path, db_file)
|
||||||
get(path, db_file)
|
get(path, db_file)
|
||||||
unlock(path, db_file)
|
unlock(path, db_file)
|
||||||
|
|
@ -204,7 +204,7 @@ def drop_record(path: str, meas_path: str) -> None:
|
||||||
"""
|
"""
|
||||||
file_in_archive = meas_path.split("::")[0]
|
file_in_archive = meas_path.split("::")[0]
|
||||||
file = os.path.join(path, file_in_archive)
|
file = os.path.join(path, file_in_archive)
|
||||||
db_file = get_db_file(path)
|
db_file = db_filename(path)
|
||||||
db = os.path.join(path, db_file)
|
db = os.path.join(path, db_file)
|
||||||
get(path, db_file)
|
get(path, db_file)
|
||||||
sub_key = meas_path.split("::")[1]
|
sub_key = meas_path.split("::")[1]
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,7 @@ from configparser import ConfigParser
|
||||||
import datalad.api as dl
|
import datalad.api as dl
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
import shutil
|
import shutil
|
||||||
from .tools import get_db_file
|
from .tools import db_filename
|
||||||
|
|
||||||
|
|
||||||
def get_tracker(path: str) -> str:
|
def get_tracker(path: str) -> str:
|
||||||
|
|
@ -43,7 +43,7 @@ def get(path: str, file: str) -> None:
|
||||||
"""
|
"""
|
||||||
tracker = get_tracker(path)
|
tracker = get_tracker(path)
|
||||||
if tracker == 'datalad':
|
if tracker == 'datalad':
|
||||||
if file == get_db_file(path):
|
if file == db_filename(path):
|
||||||
print("Downloading database...")
|
print("Downloading database...")
|
||||||
else:
|
else:
|
||||||
print("Downloading data...")
|
print("Downloading data...")
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue