All checks were successful
Mypy / mypy (push) Successful in 48s
Pytest / pytest (3.12) (push) Successful in 51s
Pytest / pytest (3.13) (push) Successful in 1m19s
Pytest / pytest (3.14) (push) Successful in 51s
Ruff / ruff (push) Successful in 35s
Mypy / mypy (pull_request) Successful in 44s
Pytest / pytest (3.12) (pull_request) Successful in 49s
Pytest / pytest (3.13) (pull_request) Successful in 49s
Pytest / pytest (3.14) (pull_request) Successful in 47s
Ruff / ruff (pull_request) Successful in 33s
93 lines
3 KiB
Python
93 lines
3 KiB
Python
import corrlib.initialization as init
|
|
import os
|
|
import sqlite3 as sql
|
|
|
|
def test_init_folders(tmp_path):
|
|
dataset_path = tmp_path / "test_dataset"
|
|
init.create(str(dataset_path))
|
|
assert os.path.exists(str(dataset_path))
|
|
assert os.path.exists(str(dataset_path / "backlogger.db"))
|
|
|
|
|
|
def test_init_folders_no_tracker(tmp_path):
|
|
dataset_path = tmp_path / "test_dataset"
|
|
init.create(str(dataset_path), tracker="None")
|
|
assert os.path.exists(str(dataset_path))
|
|
assert os.path.exists(str(dataset_path / "backlogger.db"))
|
|
|
|
|
|
def test_init_config(tmp_path):
|
|
dataset_path = tmp_path / "test_dataset"
|
|
init.create(str(dataset_path), tracker="None")
|
|
config_path = dataset_path / ".corrlib"
|
|
assert os.path.exists(str(config_path))
|
|
from configparser import ConfigParser
|
|
config = ConfigParser()
|
|
config.read(str(config_path))
|
|
assert config.get("core", "tracker") == "None"
|
|
assert config.get("core", "version") == "1.0"
|
|
assert config.get("core", "cached") == "True"
|
|
assert config.get("paths", "db") == "backlogger.db"
|
|
assert config.get("paths", "projects_path") == "projects"
|
|
assert config.get("paths", "archive_path") == "archive"
|
|
assert config.get("paths", "toml_imports_path") == "toml_imports"
|
|
assert config.get("paths", "import_scripts_path") == "import_scripts"
|
|
|
|
|
|
def test_init_db(tmp_path):
|
|
dataset_path = tmp_path / "test_dataset"
|
|
init.create(str(dataset_path))
|
|
assert os.path.exists(str(dataset_path / "backlogger.db"))
|
|
conn = sql.connect(str(dataset_path / "backlogger.db"))
|
|
cursor = conn.cursor()
|
|
cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
|
|
tables = cursor.fetchall()
|
|
expected_tables = [
|
|
'projects',
|
|
'backlogs',
|
|
]
|
|
table_names = [table[0] for table in tables]
|
|
for expected_table in expected_tables:
|
|
assert expected_table in table_names
|
|
|
|
cursor.execute("SELECT * FROM projects;")
|
|
projects = cursor.fetchall()
|
|
assert len(projects) == 0
|
|
|
|
cursor.execute("SELECT * FROM backlogs;")
|
|
backlogs = cursor.fetchall()
|
|
assert len(backlogs) == 0
|
|
|
|
cursor.execute("PRAGMA table_info('projects');")
|
|
project_columns = cursor.fetchall()
|
|
expected_project_columns = [
|
|
"id",
|
|
"aliases",
|
|
"customTags",
|
|
"owner",
|
|
"code",
|
|
"created_at",
|
|
"updated_at"
|
|
]
|
|
project_column_names = [col[1] for col in project_columns]
|
|
for expected_col in expected_project_columns:
|
|
assert expected_col in project_column_names
|
|
|
|
cursor.execute("PRAGMA table_info('backlogs');")
|
|
backlog_columns = cursor.fetchall()
|
|
expected_backlog_columns = [
|
|
"id",
|
|
"name",
|
|
"ensemble",
|
|
"code",
|
|
"path",
|
|
"project",
|
|
"customTags",
|
|
"parameters",
|
|
"parameter_file",
|
|
"created_at",
|
|
"updated_at"
|
|
]
|
|
backlog_column_names = [col[1] for col in backlog_columns]
|
|
for expected_col in expected_backlog_columns:
|
|
assert expected_col in backlog_column_names
|