refactor/data_backend #12
3 changed files with 53 additions and 40 deletions
remove temporary non-datalad implementation
commit
bc57087a5a
|
|
@ -6,7 +6,7 @@ from .git_tools import move_submodule
|
|||
import shutil
|
||||
from .find import _project_lookup_by_id
|
||||
from .tools import list2str, str2list, get_db_file
|
||||
from .tracker import get, save
|
||||
from .tracker import get, save, unlock, init, clone, drop
|
||||
from typing import Union, Optional
|
||||
|
||||
|
||||
|
|
@ -34,7 +34,7 @@ def create_project(path: str, uuid: str, owner: Union[str, None]=None, tags: Uni
|
|||
if known_projects.fetchone():
|
||||
raise ValueError("Project already imported, use update_project() instead.")
|
||||
|
||||
dl.unlock(db, dataset=path)
|
||||
unlock(path, db_file)
|
||||
alias_str = ""
|
||||
if aliases is not None:
|
||||
alias_str = list2str(aliases)
|
||||
|
|
@ -80,7 +80,7 @@ def update_aliases(path: str, uuid: str, aliases: list[str]) -> None:
|
|||
|
||||
if not len(new_alias_list) == len(known_alias_list):
|
||||
alias_str = list2str(new_alias_list)
|
||||
dl.unlock(db, dataset=path)
|
||||
unlock(path, db_file)
|
||||
update_project_data(path, uuid, "aliases", alias_str)
|
||||
save(path, message="Updated aliases for project " + uuid, files=[db_file])
|
||||
return
|
||||
|
|
@ -113,12 +113,7 @@ def import_project(path: str, url: str, owner: Union[str, None]=None, tags: Opti
|
|||
|
||||
"""
|
||||
tmp_path = os.path.join(path, 'projects/tmp')
|
||||
if not isDataset:
|
||||
dl.create(tmp_path, dataset=path)
|
||||
shutil.copytree(url + "/*", path + '/projects/tmp/')
|
||||
save(path, message="Created temporary project dataset", files=['projects/tmp'])
|
||||
else:
|
||||
dl.install(path=tmp_path, source=url, dataset=path)
|
||||
clone(path, source=url, target=tmp_path)
|
||||
tmp_ds = dl.Dataset(tmp_path)
|
||||
conf = dlc.ConfigManager(tmp_ds)
|
||||
uuid = str(conf.get("datalad.dataset.id"))
|
||||
|
|
@ -126,9 +121,8 @@ def import_project(path: str, url: str, owner: Union[str, None]=None, tags: Opti
|
|||
raise ValueError("The dataset does not have a uuid!")
|
||||
if not os.path.exists(path + "/projects/" + uuid):
|
||||
db_file = get_db_file(path)
|
||||
db = os.path.join(path, db_file)
|
||||
get(path, db_file)
|
||||
dl.unlock(db, dataset=path)
|
||||
unlock(path, db_file)
|
||||
create_project(path, uuid, owner, tags, aliases, code)
|
||||
move_submodule(path, 'projects/tmp', 'projects/' + uuid)
|
||||
os.mkdir(path + '/import_scripts/' + uuid)
|
||||
|
|
@ -151,6 +145,6 @@ def drop_project_data(path: str, uuid: str, path_in_project: str = "") -> None:
|
|||
"""
|
||||
Drop (parts of) a project to free up diskspace
|
||||
"""
|
||||
dl.drop(path + "/projects/" + uuid + "/" + path_in_project)
|
||||
drop(path + "/projects/" + uuid + "/" + path_in_project)
|
||||
return
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,9 @@
|
|||
import os
|
||||
from configparser import ConfigParser
|
||||
from .trackers import datalad as dl
|
||||
import datalad.api as dl
|
||||
from typing import Optional
|
||||
import shutil
|
||||
from .tools import get_db_file
|
||||
|
||||
|
||||
def get_tracker(path: str) -> str:
|
||||
|
|
@ -18,7 +20,12 @@ def get_tracker(path: str) -> str:
|
|||
def get(path: str, file: str) -> None:
|
||||
tracker = get_tracker(path)
|
||||
if tracker == 'datalad':
|
||||
dl.get(path, file)
|
||||
if file == get_db_file(path):
|
||||
print("Downloading database...")
|
||||
else:
|
||||
print("Downloading data...")
|
||||
dl.get(os.path.join(path, file), dataset=path)
|
||||
print("> downloaded file")
|
||||
elif tracker == 'None':
|
||||
pass
|
||||
else:
|
||||
|
|
@ -29,7 +36,9 @@ def get(path: str, file: str) -> None:
|
|||
def save(path: str, message: str, files: Optional[list[str]]=None) -> None:
|
||||
tracker = get_tracker(path)
|
||||
if tracker == 'datalad':
|
||||
dl.save(path, message, files)
|
||||
if files is not None:
|
||||
files = [os.path.join(path, f) for f in files]
|
||||
dl.save(files, message=message, dataset=path)
|
||||
elif tracker == 'None':
|
||||
pass
|
||||
else:
|
||||
|
|
@ -44,3 +53,38 @@ def init(path: str, tracker: str='datalad') -> None:
|
|||
else:
|
||||
raise ValueError(f"Tracker {tracker} is not supported.")
|
||||
return
|
||||
|
||||
|
||||
def unlock(path: str, file: str) -> None:
|
||||
tracker = get_tracker(path)
|
||||
if tracker == 'datalad':
|
||||
dl.unlock(file, dataset=path)
|
||||
elif tracker == 'None':
|
||||
pass
|
||||
else:
|
||||
raise ValueError(f"Tracker {tracker} is not supported.")
|
||||
return
|
||||
|
||||
|
||||
def clone(path: str, source: str, target: str) -> None:
|
||||
tracker = get_tracker(path)
|
||||
if tracker == 'datalad':
|
||||
dl.clone(target=target, source=source, dataset=path)
|
||||
elif tracker == 'None':
|
||||
os.makedirs(path, exist_ok=True)
|
||||
# Implement a simple clone by copying files
|
||||
shutil.copytree(source, target, dirs_exist_ok=False)
|
||||
else:
|
||||
raise ValueError(f"Tracker {tracker} is not supported.")
|
||||
return
|
||||
|
||||
|
||||
def drop(path: str, reckless: Optional[str]=None) -> None:
|
||||
tracker = get_tracker(path)
|
||||
if tracker == 'datalad':
|
||||
dl.drop(path, reckless=reckless)
|
||||
elif tracker == 'None':
|
||||
shutil.rmtree(path)
|
||||
else:
|
||||
raise ValueError(f"Tracker {tracker} is not supported.")
|
||||
return
|
||||
|
|
@ -1,25 +0,0 @@
|
|||
import datalad.api as dl
|
||||
import os
|
||||
from typing import Optional
|
||||
|
||||
|
||||
def get(path: str, file: str) -> None:
|
||||
if file == "backlogger.db":
|
||||
print("Downloading database...")
|
||||
else:
|
||||
print("Downloading data...")
|
||||
dl.get(os.path.join(path, file), dataset=path)
|
||||
print("> downloaded file")
|
||||
return
|
||||
|
||||
|
||||
def save(path: str, message: str, files: Optional[list[str]]=None) -> None:
|
||||
if files is not None:
|
||||
files = [os.path.join(path, f) for f in files]
|
||||
dl.save(files, message=message, dataset=path)
|
||||
return
|
||||
|
||||
|
||||
def create(path: str) -> None:
|
||||
dl.create(path)
|
||||
return
|
||||
Loading…
Add table
Add a link
Reference in a new issue