corrlib/corrlib/main.py
Justus Kuhlmann 2537fea06c
Some checks failed
Pytest / pytest (3.13) (push) Has been cancelled
Pytest / pytest (3.14) (push) Has been cancelled
Ruff / ruff (push) Waiting to run
Mypy / mypy (push) Successful in 45s
Pytest / pytest (3.12) (push) Has been cancelled
roll out save replacement
2025-12-04 12:29:31 +01:00

152 lines
5.4 KiB
Python

import sqlite3
import datalad.api as dl
import datalad.config as dlc
import os
from .git_tools import move_submodule
import shutil
from .find import _project_lookup_by_id
from .tools import list2str, str2list
from .tracker import get, save
from typing import Union, Optional
def create_project(path: str, uuid: str, owner: Union[str, None]=None, tags: Union[list[str], None]=None, aliases: Union[list[str], None]=None, code: Union[str, None]=None) -> None:
"""
Create a new project entry in the database.
Parameters
----------
path: str
The path to the backlogger folder.
uuid: str
The uuid of the project.
name: str (optional)
Costum name for the project (e.g. 'cA determination on exponential clover').
code: str (optional)
The code that was used to create the measurements.
"""
db = path + "/backlogger.db"
get(path, "backlogger.db")
conn = sqlite3.connect(db)
c = conn.cursor()
known_projects = c.execute("SELECT * FROM projects WHERE id=?", (uuid,))
if known_projects.fetchone():
raise ValueError("Project already imported, use update_project() instead.")
dl.unlock(db, dataset=path)
alias_str = ""
if aliases is not None:
alias_str = list2str(aliases)
tag_str = ""
if tags is not None:
tag_str = list2str(tags)
c.execute("INSERT INTO projects (id, aliases, customTags, owner, code, created_at, updated_at) VALUES (?, ?, ?, ?, ?, datetime('now'), datetime('now'))", (uuid, alias_str, tag_str, owner, code))
conn.commit()
conn.close()
save(path, message="Added entry for project " + uuid + " to database", files=["backlogger.db"])
def update_project_data(path: str, uuid: str, prop: str, value: Union[str, None] = None) -> None:
get(path, "backlogger.db")
conn = sqlite3.connect(os.path.join(path, "backlogger.db"))
c = conn.cursor()
c.execute(f"UPDATE projects SET '{prop}' = '{value}' WHERE id == '{uuid}'")
conn.commit()
conn.close()
return
def update_aliases(path: str, uuid: str, aliases: list[str]) -> None:
db = os.path.join(path, "backlogger.db")
get(path, "backlogger.db")
known_data = _project_lookup_by_id(db, uuid)[0]
known_aliases = known_data[1]
if aliases is None:
aliases = []
if known_aliases is None:
print(f"Project {uuid} has no known aliases.")
known_alias_list = []
else:
print(f"Project {uuid} is known by names: {known_aliases}")
known_alias_list = str2list(known_aliases)
new_alias_list = known_alias_list.copy()
for aka in aliases:
if aka not in known_alias_list:
new_alias_list.append(aka)
if not len(new_alias_list) == len(known_alias_list):
alias_str = list2str(new_alias_list)
dl.unlock(db, dataset=path)
update_project_data(path, uuid, "aliases", alias_str)
save(path, message="Updated aliases for project " + uuid, files=["backlogger.db"])
return
def import_project(path: str, url: str, owner: Union[str, None]=None, tags: Optional[list[str]]=None, aliases: Optional[list[str]]=None, code: Optional[str]=None, isDataset: bool=True) -> str:
"""
Parameters
----------
url: str
The url of the project to import. This can be any url that datalad can handle.
path: str
The path to the backlogger folder.
aliases: list[str]
Custom name of the project, alias of the project.
code: str
Code that was used to create the measurements.
Import a datalad dataset into the backlogger.
Parameters
----------
path: str
The path to the backlogger directory.
url: str
The url of the project to import. This can be any url that datalad can handle.
Also supported are non-datalad datasets, which will be converted to datalad datasets,
in order to receive a uuid and have a consistent interface.
"""
tmp_path = os.path.join(path, 'projects/tmp')
if not isDataset:
dl.create(tmp_path, dataset=path)
shutil.copytree(url + "/*", path + '/projects/tmp/')
save(path, message="Created temporary project dataset", files=['projects/tmp'])
else:
dl.install(path=tmp_path, source=url, dataset=path)
tmp_ds = dl.Dataset(tmp_path)
conf = dlc.ConfigManager(tmp_ds)
uuid = str(conf.get("datalad.dataset.id"))
if not uuid:
raise ValueError("The dataset does not have a uuid!")
if not os.path.exists(path + "/projects/" + uuid):
db = path + "/backlogger.db"
get(path, "backlogger.db")
dl.unlock(db, dataset=path)
create_project(path, uuid, owner, tags, aliases, code)
move_submodule(path, 'projects/tmp', 'projects/' + uuid)
os.mkdir(path + '/import_scripts/' + uuid)
save(path, message="Import project from " + url, files=['projects/' + uuid, 'backlogger.db'])
else:
dl.drop(tmp_path, reckless='kill')
shutil.rmtree(tmp_path)
if aliases is not None:
if isinstance(aliases, str):
alias_list = [aliases]
else:
alias_list = aliases
update_aliases(path, uuid, alias_list)
# make this more concrete
return uuid
def drop_project_data(path: str, uuid: str, path_in_project: str = "") -> None:
"""
Drop (parts of) a prject to free up diskspace
"""
dl.drop(path + "/projects/" + uuid + "/" + path_in_project)
return