All checks were successful
Mypy / mypy (push) Successful in 45s
Pytest / pytest (3.12) (push) Successful in 50s
Pytest / pytest (3.13) (push) Successful in 48s
Pytest / pytest (3.14) (push) Successful in 49s
Ruff / ruff (push) Successful in 34s
Mypy / mypy (pull_request) Successful in 44s
Pytest / pytest (3.13) (pull_request) Successful in 49s
Ruff / ruff (pull_request) Successful in 34s
Pytest / pytest (3.12) (pull_request) Successful in 50s
Pytest / pytest (3.14) (pull_request) Successful in 49s
150 lines
5.2 KiB
Python
150 lines
5.2 KiB
Python
import sqlite3
|
|
import datalad.api as dl
|
|
import datalad.config as dlc
|
|
import os
|
|
from .git_tools import move_submodule
|
|
import shutil
|
|
from .find import _project_lookup_by_id
|
|
from .tools import list2str, str2list, get_db_file
|
|
from .tracker import get, save, unlock, clone, drop
|
|
from typing import Union, Optional
|
|
|
|
|
|
def create_project(path: str, uuid: str, owner: Union[str, None]=None, tags: Union[list[str], None]=None, aliases: Union[list[str], None]=None, code: Union[str, None]=None) -> None:
|
|
"""
|
|
Create a new project entry in the database.
|
|
|
|
Parameters
|
|
----------
|
|
path: str
|
|
The path to the backlogger folder.
|
|
uuid: str
|
|
The uuid of the project.
|
|
name: str (optional)
|
|
Costum name for the project (e.g. 'cA determination on exponential clover').
|
|
code: str (optional)
|
|
The code that was used to create the measurements.
|
|
"""
|
|
db_file = get_db_file(path)
|
|
db = os.path.join(path, db_file)
|
|
get(path, db_file)
|
|
conn = sqlite3.connect(db)
|
|
c = conn.cursor()
|
|
known_projects = c.execute("SELECT * FROM projects WHERE id=?", (uuid,))
|
|
if known_projects.fetchone():
|
|
raise ValueError("Project already imported, use update_project() instead.")
|
|
|
|
unlock(path, db_file)
|
|
alias_str = ""
|
|
if aliases is not None:
|
|
alias_str = list2str(aliases)
|
|
tag_str = ""
|
|
if tags is not None:
|
|
tag_str = list2str(tags)
|
|
c.execute("INSERT INTO projects (id, aliases, customTags, owner, code, created_at, updated_at) VALUES (?, ?, ?, ?, ?, datetime('now'), datetime('now'))", (uuid, alias_str, tag_str, owner, code))
|
|
conn.commit()
|
|
conn.close()
|
|
save(path, message="Added entry for project " + uuid + " to database", files=[db_file])
|
|
|
|
|
|
def update_project_data(path: str, uuid: str, prop: str, value: Union[str, None] = None) -> None:
|
|
db_file = get_db_file(path)
|
|
get(path, db_file)
|
|
conn = sqlite3.connect(os.path.join(path, db_file))
|
|
c = conn.cursor()
|
|
c.execute(f"UPDATE projects SET '{prop}' = '{value}' WHERE id == '{uuid}'")
|
|
conn.commit()
|
|
conn.close()
|
|
return
|
|
|
|
|
|
def update_aliases(path: str, uuid: str, aliases: list[str]) -> None:
|
|
db_file = get_db_file(path)
|
|
db = os.path.join(path, db_file)
|
|
get(path, db_file)
|
|
known_data = _project_lookup_by_id(db, uuid)[0]
|
|
known_aliases = known_data[1]
|
|
|
|
if aliases is None:
|
|
aliases = []
|
|
if known_aliases is None:
|
|
print(f"Project {uuid} has no known aliases.")
|
|
known_alias_list = []
|
|
else:
|
|
print(f"Project {uuid} is known by names: {known_aliases}")
|
|
known_alias_list = str2list(known_aliases)
|
|
new_alias_list = known_alias_list.copy()
|
|
for aka in aliases:
|
|
if aka not in known_alias_list:
|
|
new_alias_list.append(aka)
|
|
|
|
if not len(new_alias_list) == len(known_alias_list):
|
|
alias_str = list2str(new_alias_list)
|
|
unlock(path, db_file)
|
|
update_project_data(path, uuid, "aliases", alias_str)
|
|
save(path, message="Updated aliases for project " + uuid, files=[db_file])
|
|
return
|
|
|
|
|
|
def import_project(path: str, url: str, owner: Union[str, None]=None, tags: Optional[list[str]]=None, aliases: Optional[list[str]]=None, code: Optional[str]=None, isDataset: bool=True) -> str:
|
|
"""
|
|
Parameters
|
|
----------
|
|
|
|
url: str
|
|
The url of the project to import. This can be any url that datalad can handle.
|
|
path: str
|
|
The path to the backlogger folder.
|
|
aliases: list[str]
|
|
Custom name of the project, alias of the project.
|
|
code: str
|
|
Code that was used to create the measurements.
|
|
|
|
Import a datalad dataset into the backlogger.
|
|
|
|
Parameters
|
|
----------
|
|
path: str
|
|
The path to the backlogger directory.
|
|
url: str
|
|
The url of the project to import. This can be any url that datalad can handle.
|
|
Also supported are non-datalad datasets, which will be converted to datalad datasets,
|
|
in order to receive a uuid and have a consistent interface.
|
|
|
|
"""
|
|
tmp_path = os.path.join(path, 'projects/tmp')
|
|
clone(path, source=url, target=tmp_path)
|
|
tmp_ds = dl.Dataset(tmp_path)
|
|
conf = dlc.ConfigManager(tmp_ds)
|
|
uuid = str(conf.get("datalad.dataset.id"))
|
|
if not uuid:
|
|
raise ValueError("The dataset does not have a uuid!")
|
|
if not os.path.exists(path + "/projects/" + uuid):
|
|
db_file = get_db_file(path)
|
|
get(path, db_file)
|
|
unlock(path, db_file)
|
|
create_project(path, uuid, owner, tags, aliases, code)
|
|
move_submodule(path, 'projects/tmp', 'projects/' + uuid)
|
|
os.mkdir(path + '/import_scripts/' + uuid)
|
|
save(path, message="Import project from " + url, files=['projects/' + uuid, db_file])
|
|
else:
|
|
dl.drop(tmp_path, reckless='kill')
|
|
shutil.rmtree(tmp_path)
|
|
if aliases is not None:
|
|
if isinstance(aliases, str):
|
|
alias_list = [aliases]
|
|
else:
|
|
alias_list = aliases
|
|
update_aliases(path, uuid, alias_list)
|
|
|
|
# make this more concrete
|
|
return uuid
|
|
|
|
|
|
def drop_project_data(path: str, uuid: str, path_in_project: str = "") -> None:
|
|
"""
|
|
Drop (parts of) a project to free up diskspace
|
|
"""
|
|
drop(path + "/projects/" + uuid + "/" + path_in_project)
|
|
return
|
|
|