diff --git a/corrlib/cli.py b/corrlib/cli.py index 3f4eb8f..6f0d8cf 100644 --- a/corrlib/cli.py +++ b/corrlib/cli.py @@ -26,9 +26,9 @@ def update( str('./corrlib'), "--dataset", "-d", - ), + ), uuid: str = typer.Argument(), - ) -> None: +) -> None: """ Update a project by it's UUID. """ @@ -43,7 +43,7 @@ def list( "-d", ), entities: str = typer.Argument('ensembles'), - ) -> None: +) -> None: """ List entities. (ensembles, projects) """ @@ -72,10 +72,10 @@ def alias_add( str('./corrlib'), "--dataset", "-d", - ), + ), uuid: str = typer.Argument(), alias: str = typer.Argument(), - ) -> None: +) -> None: """ Add an alias to a project UUID. """ @@ -90,11 +90,11 @@ def find( str('./corrlib'), "--dataset", "-d", - ), + ), ensemble: str = typer.Argument(), corr: str = typer.Argument(), code: str = typer.Argument(), - ) -> None: +) -> None: """ Find a record in the backlog at hand. Through specifying it's ensemble and the measured correlator. """ @@ -108,15 +108,15 @@ def importer( str('./corrlib'), "--dataset", "-d", - ), + ), files: str = typer.Argument( - ), + ), copy_file: bool = typer.Option( bool(True), "--save", "-s", - ), - ) -> None: + ), +) -> None: """ Import a project from a .toml-file via CLI. """ @@ -152,17 +152,12 @@ def init( str('./corrlib'), "--dataset", "-d", - ), - tracker: str = typer.Option( - str('datalad'), - "--tracker", - "-t", - ), - ) -> None: + ), +) -> None: """ Initialize a new backlog-database. """ - create(path, tracker) + create(path) return @@ -172,8 +167,8 @@ def drop_cache( str('./corrlib'), "--dataset", "-d", - ), - ) -> None: + ), +) -> None: """ Drop the currect cache directory of the dataset. """ @@ -190,6 +185,6 @@ def main( help="Show the application's version and exit.", callback=_version_callback, is_eager=True, - ) - ) -> None: + ) +) -> None: return diff --git a/corrlib/find.py b/corrlib/find.py index 901c09c..402cfb1 100644 --- a/corrlib/find.py +++ b/corrlib/find.py @@ -4,10 +4,8 @@ import json import pandas as pd import numpy as np from .input.implementations import codes -from .tools import k2m, get_db_file -from .tracker import get +from .tools import k2m, get_file from typing import Any, Optional - # this will implement the search functionality @@ -144,11 +142,10 @@ def sfcf_filter(results: pd.DataFrame, **kwargs: Any) -> pd.DataFrame: def find_record(path: str, ensemble: str, correlator_name: str, code: str, project: Optional[str]=None, parameters: Optional[str]=None, created_before: Optional[str]=None, created_after: Optional[str]=None, updated_before: Optional[str]=None, updated_after: Optional[str]=None, revision: Optional[str]=None, **kwargs: Any) -> pd.DataFrame: - db_file = get_db_file(path) - db = os.path.join(path, db_file) + db = path + '/backlogger.db' if code not in codes: raise ValueError("Code " + code + "unknown, take one of the following:" + ", ".join(codes)) - get(path, db_file) + get_file(path, "backlogger.db") results = _db_lookup(db, ensemble, correlator_name,code, project, parameters=parameters, created_before=created_before, created_after=created_after, updated_before=updated_before, updated_after=updated_after) if code == "sfcf": results = sfcf_filter(results, **kwargs) @@ -157,15 +154,14 @@ def find_record(path: str, ensemble: str, correlator_name: str, code: str, proje def find_project(path: str, name: str) -> str: - db_file = get_db_file(path) - get(path, db_file) - return _project_lookup_by_alias(os.path.join(path, db_file), name) + get_file(path, "backlogger.db") + return _project_lookup_by_alias(os.path.join(path, "backlogger.db"), name) def list_projects(path: str) -> list[tuple[str, str]]: - db_file = get_db_file(path) - get(path, db_file) - conn = sqlite3.connect(os.path.join(path, db_file)) + db = path + '/backlogger.db' + get_file(path, "backlogger.db") + conn = sqlite3.connect(db) c = conn.cursor() c.execute("SELECT id,aliases FROM projects") results = c.fetchall() diff --git a/corrlib/git_tools.py b/corrlib/git_tools.py index c6e7522..bde9871 100644 --- a/corrlib/git_tools.py +++ b/corrlib/git_tools.py @@ -1,5 +1,5 @@ import os -from .tracker import save +import datalad.api as dl import git GITMODULES_FILE = '.gitmodules' @@ -40,6 +40,5 @@ def move_submodule(repo_path: str, old_path: str, new_path: str) -> None: repo = git.Repo(repo_path) repo.git.add('.gitmodules') # save new state of the dataset - save(repo_path, message=f"Move module from {old_path} to {new_path}", files=['.gitmodules', repo_path]) - + dl.save(repo_path, message=f"Move module from {old_path} to {new_path}", dataset=repo_path) return diff --git a/corrlib/initialization.py b/corrlib/initialization.py index 8aa8287..b76c06c 100644 --- a/corrlib/initialization.py +++ b/corrlib/initialization.py @@ -1,7 +1,6 @@ -from configparser import ConfigParser import sqlite3 +import datalad.api as dl import os -from .tracker import save, init def _create_db(db: str) -> None: @@ -36,52 +35,20 @@ def _create_db(db: str) -> None: return -def _create_config(path: str, tracker: str, cached: bool) -> ConfigParser: - """ - Create the config file for backlogger. - - """ - config = ConfigParser() - config['core'] = { - 'version': '1.0', - 'tracker': tracker, - 'cached': str(cached), - } - config['paths'] = { - 'db': 'backlogger.db', - 'projects_path': 'projects', - 'archive_path': 'archive', - 'toml_imports_path': 'toml_imports', - 'import_scripts_path': 'import_scripts', - } - return config - - -def _write_config(path: str, config: ConfigParser) -> None: - """ - Write the config file to disk. - """ - with open(os.path.join(path, '.corrlib'), 'w') as configfile: - config.write(configfile) - return - - -def create(path: str, tracker: str = 'datalad', cached: bool = True) -> None: +def create(path: str) -> None: """ Create folder of backlogs. """ - config = _create_config(path, tracker, cached) - init(path, tracker) - _write_config(path, config) - _create_db(os.path.join(path, config['paths']['db'])) - os.chmod(os.path.join(path, config['paths']['db']), 0o666) - os.makedirs(os.path.join(path, config['paths']['projects_path'])) - os.makedirs(os.path.join(path, config['paths']['archive_path'])) - os.makedirs(os.path.join(path, config['paths']['toml_imports_path'])) - os.makedirs(os.path.join(path, config['paths']['import_scripts_path'], 'template.py')) - with open(os.path.join(path, ".gitignore"), "w") as fp: + dl.create(path) + _create_db(path + '/backlogger.db') + os.chmod(path + '/backlogger.db', 0o666) # why does this not work? + os.makedirs(path + '/projects') + os.makedirs(path + '/archive') + os.makedirs(path + '/toml_imports') + os.makedirs(path + '/import_scripts/template.py') + with open(path + "/.gitignore", "w") as fp: fp.write(".cache") fp.close() - save(path, message="Initialized correlator library") + dl.save(path, dataset=path, message="Initialize backlogger directory.") return diff --git a/corrlib/main.py b/corrlib/main.py index 24d5103..e0bfbf2 100644 --- a/corrlib/main.py +++ b/corrlib/main.py @@ -5,8 +5,7 @@ import os from .git_tools import move_submodule import shutil from .find import _project_lookup_by_id -from .tools import list2str, str2list, get_db_file -from .tracker import get, save, unlock, clone, drop +from .tools import list2str, str2list, get_file from typing import Union, Optional @@ -25,16 +24,15 @@ def create_project(path: str, uuid: str, owner: Union[str, None]=None, tags: Uni code: str (optional) The code that was used to create the measurements. """ - db_file = get_db_file(path) - db = os.path.join(path, db_file) - get(path, db_file) + db = path + "/backlogger.db" + get_file(path, "backlogger.db") conn = sqlite3.connect(db) c = conn.cursor() known_projects = c.execute("SELECT * FROM projects WHERE id=?", (uuid,)) if known_projects.fetchone(): raise ValueError("Project already imported, use update_project() instead.") - unlock(path, db_file) + dl.unlock(db, dataset=path) alias_str = "" if aliases is not None: alias_str = list2str(aliases) @@ -44,13 +42,12 @@ def create_project(path: str, uuid: str, owner: Union[str, None]=None, tags: Uni c.execute("INSERT INTO projects (id, aliases, customTags, owner, code, created_at, updated_at) VALUES (?, ?, ?, ?, ?, datetime('now'), datetime('now'))", (uuid, alias_str, tag_str, owner, code)) conn.commit() conn.close() - save(path, message="Added entry for project " + uuid + " to database", files=[db_file]) + dl.save(db, message="Added entry for project " + uuid + " to database", dataset=path) def update_project_data(path: str, uuid: str, prop: str, value: Union[str, None] = None) -> None: - db_file = get_db_file(path) - get(path, db_file) - conn = sqlite3.connect(os.path.join(path, db_file)) + get_file(path, "backlogger.db") + conn = sqlite3.connect(os.path.join(path, "backlogger.db")) c = conn.cursor() c.execute(f"UPDATE projects SET '{prop}' = '{value}' WHERE id == '{uuid}'") conn.commit() @@ -59,9 +56,8 @@ def update_project_data(path: str, uuid: str, prop: str, value: Union[str, None] def update_aliases(path: str, uuid: str, aliases: list[str]) -> None: - db_file = get_db_file(path) - db = os.path.join(path, db_file) - get(path, db_file) + db = os.path.join(path, "backlogger.db") + get_file(path, "backlogger.db") known_data = _project_lookup_by_id(db, uuid)[0] known_aliases = known_data[1] @@ -80,9 +76,9 @@ def update_aliases(path: str, uuid: str, aliases: list[str]) -> None: if not len(new_alias_list) == len(known_alias_list): alias_str = list2str(new_alias_list) - unlock(path, db_file) + dl.unlock(db, dataset=path) update_project_data(path, uuid, "aliases", alias_str) - save(path, message="Updated aliases for project " + uuid, files=[db_file]) + dl.save(db, dataset=path) return @@ -112,21 +108,26 @@ def import_project(path: str, url: str, owner: Union[str, None]=None, tags: Opti in order to receive a uuid and have a consistent interface. """ - tmp_path = os.path.join(path, 'projects/tmp') - clone(path, source=url, target=tmp_path) + tmp_path = path + '/projects/tmp' + if not isDataset: + dl.create(tmp_path, dataset=path) + shutil.copytree(url + "/*", path + '/projects/tmp/') + dl.save(tmp_path, dataset=path) + else: + dl.install(path=tmp_path, source=url, dataset=path) tmp_ds = dl.Dataset(tmp_path) conf = dlc.ConfigManager(tmp_ds) uuid = str(conf.get("datalad.dataset.id")) if not uuid: raise ValueError("The dataset does not have a uuid!") if not os.path.exists(path + "/projects/" + uuid): - db_file = get_db_file(path) - get(path, db_file) - unlock(path, db_file) + db = path + "/backlogger.db" + get_file(path, "backlogger.db") + dl.unlock(db, dataset=path) create_project(path, uuid, owner, tags, aliases, code) move_submodule(path, 'projects/tmp', 'projects/' + uuid) os.mkdir(path + '/import_scripts/' + uuid) - save(path, message="Import project from " + url, files=['projects/' + uuid, db_file]) + dl.save([db, path + '/projects/' + uuid], message="Import project from " + url, dataset=path) else: dl.drop(tmp_path, reckless='kill') shutil.rmtree(tmp_path) @@ -143,8 +144,8 @@ def import_project(path: str, url: str, owner: Union[str, None]=None, tags: Opti def drop_project_data(path: str, uuid: str, path_in_project: str = "") -> None: """ - Drop (parts of) a project to free up diskspace + Drop (parts of) a prject to free up diskspace """ - drop(path + "/projects/" + uuid + "/" + path_in_project) + dl.drop(path + "/projects/" + uuid + "/" + path_in_project) return diff --git a/corrlib/meas_io.py b/corrlib/meas_io.py index a00079e..8d8a055 100644 --- a/corrlib/meas_io.py +++ b/corrlib/meas_io.py @@ -1,13 +1,13 @@ from pyerrors.input import json as pj import os +import datalad.api as dl import sqlite3 from .input import sfcf,openQCD import json from typing import Union from pyerrors import Obs, Corr, dump_object, load_object from hashlib import sha256 -from .tools import get_db_file, cache_enabled -from .tracker import get, save, unlock +from .tools import cached, get_file import shutil from typing import Any @@ -28,10 +28,9 @@ def write_measurement(path: str, ensemble: str, measurement: dict[str, dict[str, uuid: str The uuid of the project. """ - db_file = get_db_file(path) - db = os.path.join(path, db_file) - get(path, db_file) - unlock(path, db_file) + db = os.path.join(path, 'backlogger.db') + get_file(path, "backlogger.db") + dl.unlock(db, dataset=path) conn = sqlite3.connect(db) c = conn.cursor() files = [] @@ -44,7 +43,7 @@ def write_measurement(path: str, ensemble: str, measurement: dict[str, dict[str, os.makedirs(os.path.join(path, '.', 'archive', ensemble, corr)) else: if os.path.exists(file): - unlock(path, file_in_archive) + dl.unlock(file, dataset=path) known_meas = pj.load_json_dict(file) if code == "sfcf": parameters = sfcf.read_param(path, uuid, parameter_file) @@ -94,9 +93,9 @@ def write_measurement(path: str, ensemble: str, measurement: dict[str, dict[str, (corr, ensemble, code, meas_path, uuid, pars[subkey], parameter_file)) conn.commit() pj.dump_dict_to_json(known_meas, file) - files.append(os.path.join(path, db_file)) + files.append(path + '/backlogger.db') conn.close() - save(path, message="Add measurements to database", files=files) + dl.save(files, message="Add measurements to database", dataset=path) def load_record(path: str, meas_path: str) -> Union[Corr, Obs]: @@ -149,7 +148,7 @@ def load_records(path: str, meas_paths: list[str], preloaded: dict[str, Any] = { if file not in preloaded: preloaded[file] = preload(path, file) returned_data.append(preloaded[file][key]) - if cache_enabled(path): + if cached: if not os.path.exists(cache_dir(path, file)): os.makedirs(cache_dir(path, file)) dump_object(preloaded[file][key], cache_path(path, file, key)) @@ -170,7 +169,7 @@ def cache_path(path: str, file: str, key: str) -> str: def preload(path: str, file: str) -> dict[str, Any]: - get(path, file) + get_file(path, file) filedict: dict[str, Any] = pj.load_json_dict(os.path.join(path, file)) print("> read file") return filedict @@ -179,11 +178,10 @@ def preload(path: str, file: str) -> dict[str, Any]: def drop_record(path: str, meas_path: str) -> None: file_in_archive = meas_path.split("::")[0] file = os.path.join(path, file_in_archive) - db_file = get_db_file(path) - db = os.path.join(path, db_file) - get(path, db_file) + db = os.path.join(path, 'backlogger.db') + get_file(path, 'backlogger.db') sub_key = meas_path.split("::")[1] - unlock(path, db_file) + dl.unlock(db, dataset=path) conn = sqlite3.connect(db) c = conn.cursor() if c.execute("SELECT * FROM backlogs WHERE path = ?", (meas_path, )).fetchone() is not None: @@ -195,9 +193,9 @@ def drop_record(path: str, meas_path: str) -> None: known_meas = pj.load_json_dict(file) if sub_key in known_meas: del known_meas[sub_key] - unlock(path, file_in_archive) + dl.unlock(file, dataset=path) pj.dump_dict_to_json(known_meas, file) - save(path, message="Drop measurements to database", files=[db, file]) + dl.save([db, file], message="Drop measurements to database", dataset=path) return else: raise ValueError("This measurement does not exist as a file!") diff --git a/corrlib/toml.py b/corrlib/toml.py index c1c4d5b..11065fe 100644 --- a/corrlib/toml.py +++ b/corrlib/toml.py @@ -10,17 +10,14 @@ the import of projects via TOML. import tomllib as toml import shutil - -import datalad.api as dl -from .tracker import save from .input import sfcf, openQCD from .main import import_project, update_aliases from .meas_io import write_measurement +import datalad.api as dl import os from .input.implementations import codes as known_codes from typing import Any - def replace_string(string: str, name: str, val: str) -> str: if '{' + name + '}' in string: n = string.replace('{' + name + '}', val) @@ -28,7 +25,6 @@ def replace_string(string: str, name: str, val: str) -> str: else: return string - def replace_in_meas(measurements: dict[str, dict[str, Any]], vars: dict[str, str]) -> dict[str, dict[str, Any]]: # replace global variables for name, value in vars.items(): @@ -41,7 +37,6 @@ def replace_in_meas(measurements: dict[str, dict[str, Any]], vars: dict[str, str measurements[m][key][i] = replace_string(measurements[m][key][i], name, value) return measurements - def fill_cons(measurements: dict[str, dict[str, Any]], constants: dict[str, str]) -> dict[str, dict[str, Any]]: for m in measurements.keys(): for name, val in constants.items(): @@ -155,7 +150,7 @@ def import_toml(path: str, file: str, copy_file: bool=True) -> None: if copy_file: import_file = os.path.join(path, "toml_imports", uuid, file.split("/")[-1]) shutil.copy(file, import_file) - save(path, files=[import_file], message="Import using " + import_file) + dl.save(import_file, message="Import using " + import_file, dataset=path) print("File copied to " + import_file) print("Imported project.") return diff --git a/corrlib/tools.py b/corrlib/tools.py index 9c39d7c..b4ae89e 100644 --- a/corrlib/tools.py +++ b/corrlib/tools.py @@ -1,8 +1,5 @@ import os -from configparser import ConfigParser -from typing import Any - -CONFIG_FILENAME = ".corrlib" +import datalad.api as dl def str2list(string: str) -> list[str]: @@ -22,33 +19,11 @@ def k2m(k: float) -> float: return (1/(2*k))-4 -def set_config(path: str, section: str, option: str, value: Any) -> None: - config_path = os.path.join(path, '.corrlib') - config = ConfigParser() - if os.path.exists(config_path): - config.read(config_path) - if not config.has_section(section): - config.add_section(section) - config.set(section, option, value) - with open(config_path, 'w') as configfile: - config.write(configfile) +def get_file(path: str, file: str) -> None: + if file == "backlogger.db": + print("Downloading database...") + else: + print("Downloading data...") + dl.get(os.path.join(path, file), dataset=path) + print("> downloaded file") return - - -def get_db_file(path: str) -> str: - config_path = os.path.join(path, CONFIG_FILENAME) - config = ConfigParser() - if os.path.exists(config_path): - config.read(config_path) - db_file = config.get('paths', 'db', fallback='backlogger.db') - return db_file - - -def cache_enabled(path: str) -> bool: - config_path = os.path.join(path, CONFIG_FILENAME) - config = ConfigParser() - if os.path.exists(config_path): - config.read(config_path) - cached_str = config.get('core', 'cached', fallback='True') - cached_bool = cached_str == ('True') - return cached_bool diff --git a/corrlib/tracker.py b/corrlib/tracker.py deleted file mode 100644 index 5cc281c..0000000 --- a/corrlib/tracker.py +++ /dev/null @@ -1,169 +0,0 @@ -import os -from configparser import ConfigParser -import datalad.api as dl -from typing import Optional -import shutil -from .tools import get_db_file - - -def get_tracker(path: str) -> str: - """ - Get the tracker used in the dataset located at path. - - Parameters - ---------- - path: str - The path to the backlogger folder. - - Returns - ------- - tracker: str - The tracker used in the dataset. - """ - config_path = os.path.join(path, '.corrlib') - config = ConfigParser() - if os.path.exists(config_path): - config.read(config_path) - else: - raise FileNotFoundError(f"No config file found in {path}.") - tracker = config.get('core', 'tracker', fallback='datalad') - return tracker - - -def get(path: str, file: str) -> None: - """ - Wrapper function to get a file from the dataset located at path with the specified tracker. - - Parameters - ---------- - path: str - The path to the backlogger folder. - file: str - The file to get. - """ - tracker = get_tracker(path) - if tracker == 'datalad': - if file == get_db_file(path): - print("Downloading database...") - else: - print("Downloading data...") - dl.get(os.path.join(path, file), dataset=path) - print("> downloaded file") - elif tracker == 'None': - pass - else: - raise ValueError(f"Tracker {tracker} is not supported.") - return - - -def save(path: str, message: str, files: Optional[list[str]]=None) -> None: - """ - Wrapper function to save a file to the dataset located at path with the specified tracker. - - Parameters - ---------- - path: str - The path to the backlogger folder. - message: str - The commit message. - files: list[str], optional - The files to save. If None, all changes are saved. - """ - tracker = get_tracker(path) - if tracker == 'datalad': - if files is not None: - files = [os.path.join(path, f) for f in files] - dl.save(files, message=message, dataset=path) - elif tracker == 'None': - Warning("Tracker 'None' does not implement save.") - pass - else: - raise ValueError(f"Tracker {tracker} is not supported.") - - -def init(path: str, tracker: str='datalad') -> None: - """ - Initialize a dataset at the specified path with the specified tracker. - - Parameters - ---------- - path: str - The path to initialize the dataset. - tracker: str - The tracker to use. Currently only 'datalad' and 'None' are supported. - """ - if tracker == 'datalad': - dl.create(path) - elif tracker == 'None': - os.makedirs(path, exist_ok=True) - else: - raise ValueError(f"Tracker {tracker} is not supported.") - return - - -def unlock(path: str, file: str) -> None: - """ - Wrapper function to unlock a file in the dataset located at path with the specified tracker. - - Parameters - ---------- - path : str - The path to the backlogger folder. - file : str - The file to unlock. - """ - tracker = get_tracker(path) - if tracker == 'datalad': - dl.unlock(file, dataset=path) - elif tracker == 'None': - Warning("Tracker 'None' does not implement unlock.") - pass - else: - raise ValueError(f"Tracker {tracker} is not supported.") - return - - -def clone(path: str, source: str, target: str) -> None: - """ - Wrapper function to clone a dataset from source to target with the specified tracker. - Parameters - ---------- - path: str - The path to the backlogger folder. - source: str - The source dataset to clone. - target: str - The target path to clone the dataset to. - """ - tracker = get_tracker(path) - if tracker == 'datalad': - dl.clone(target=target, source=source, dataset=path) - elif tracker == 'None': - os.makedirs(path, exist_ok=True) - # Implement a simple clone by copying files - shutil.copytree(source, target, dirs_exist_ok=False) - else: - raise ValueError(f"Tracker {tracker} is not supported.") - return - - -def drop(path: str, reckless: Optional[str]=None) -> None: - """ - Wrapper function to drop data from a dataset located at path with the specified tracker. - - Parameters - ---------- - path: str - The path to the backlogger folder. - reckless: Optional[str] - The datalad's reckless option for dropping data. - """ - tracker = get_tracker(path) - if tracker == 'datalad': - dl.drop(path, reckless=reckless) - elif tracker == 'None': - Warning("Tracker 'None' does not implement drop.") - pass - else: - raise ValueError(f"Tracker {tracker} is not supported.") - return diff --git a/tests/test_import_project.py b/tests/test_import_project.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/test_initialization.py b/tests/test_initialization.py index 1ea0ece..38357bf 100644 --- a/tests/test_initialization.py +++ b/tests/test_initialization.py @@ -9,31 +9,6 @@ def test_init_folders(tmp_path): assert os.path.exists(str(dataset_path / "backlogger.db")) -def test_init_folders_no_tracker(tmp_path): - dataset_path = tmp_path / "test_dataset" - init.create(str(dataset_path), tracker="None") - assert os.path.exists(str(dataset_path)) - assert os.path.exists(str(dataset_path / "backlogger.db")) - - -def test_init_config(tmp_path): - dataset_path = tmp_path / "test_dataset" - init.create(str(dataset_path), tracker="None") - config_path = dataset_path / ".corrlib" - assert os.path.exists(str(config_path)) - from configparser import ConfigParser - config = ConfigParser() - config.read(str(config_path)) - assert config.get("core", "tracker") == "None" - assert config.get("core", "version") == "1.0" - assert config.get("core", "cached") == "True" - assert config.get("paths", "db") == "backlogger.db" - assert config.get("paths", "projects_path") == "projects" - assert config.get("paths", "archive_path") == "archive" - assert config.get("paths", "toml_imports_path") == "toml_imports" - assert config.get("paths", "import_scripts_path") == "import_scripts" - - def test_init_db(tmp_path): dataset_path = tmp_path / "test_dataset" init.create(str(dataset_path)) @@ -49,7 +24,7 @@ def test_init_db(tmp_path): table_names = [table[0] for table in tables] for expected_table in expected_tables: assert expected_table in table_names - + cursor.execute("SELECT * FROM projects;") projects = cursor.fetchall() assert len(projects) == 0 @@ -72,7 +47,7 @@ def test_init_db(tmp_path): project_column_names = [col[1] for col in project_columns] for expected_col in expected_project_columns: assert expected_col in project_column_names - + cursor.execute("PRAGMA table_info('backlogs');") backlog_columns = cursor.fetchall() expected_backlog_columns = [