Add Flask web UI, Docker Compose, core engine + tests
- physcom core: CLI, 5-pass pipeline, SQLite repo, 37 tests - physcom_web: Flask app with HTMX for entity/domain/pipeline/results CRUD - Docker Compose: web + cli services sharing a named volume for the DB - Clean up local settings to use wildcard permissions Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
0
src/physcom/db/__init__.py
Normal file
0
src/physcom/db/__init__.py
Normal file
414
src/physcom/db/repository.py
Normal file
414
src/physcom/db/repository.py
Normal file
@@ -0,0 +1,414 @@
|
||||
"""CRUD operations for all entities."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
import sqlite3
|
||||
from typing import Sequence
|
||||
|
||||
from physcom.models.entity import Dependency, Entity
|
||||
from physcom.models.domain import Domain, MetricBound
|
||||
from physcom.models.combination import Combination
|
||||
|
||||
|
||||
class Repository:
|
||||
"""Thin data-access layer over the SQLite database."""
|
||||
|
||||
def __init__(self, conn: sqlite3.Connection) -> None:
|
||||
self.conn = conn
|
||||
self.conn.row_factory = sqlite3.Row
|
||||
|
||||
# ── Dimensions ──────────────────────────────────────────────
|
||||
|
||||
def ensure_dimension(self, name: str, description: str = "") -> int:
|
||||
"""Insert dimension if it doesn't exist, return its id."""
|
||||
cur = self.conn.execute(
|
||||
"INSERT OR IGNORE INTO dimensions (name, description) VALUES (?, ?)",
|
||||
(name, description),
|
||||
)
|
||||
if cur.lastrowid and cur.rowcount:
|
||||
self.conn.commit()
|
||||
return cur.lastrowid
|
||||
row = self.conn.execute(
|
||||
"SELECT id FROM dimensions WHERE name = ?", (name,)
|
||||
).fetchone()
|
||||
return row["id"]
|
||||
|
||||
def list_dimensions(self) -> list[dict]:
|
||||
rows = self.conn.execute("SELECT * FROM dimensions ORDER BY name").fetchall()
|
||||
return [dict(r) for r in rows]
|
||||
|
||||
# ── Entities ────────────────────────────────────────────────
|
||||
|
||||
def add_entity(self, entity: Entity) -> Entity:
|
||||
"""Persist an Entity (and its dependencies). Returns it with id set."""
|
||||
dim_id = self.ensure_dimension(entity.dimension)
|
||||
cur = self.conn.execute(
|
||||
"INSERT INTO entities (dimension_id, name, description) VALUES (?, ?, ?)",
|
||||
(dim_id, entity.name, entity.description),
|
||||
)
|
||||
entity.id = cur.lastrowid
|
||||
entity.dimension_id = dim_id
|
||||
for dep in entity.dependencies:
|
||||
dep_cur = self.conn.execute(
|
||||
"""INSERT INTO dependencies
|
||||
(entity_id, category, key, value, unit, constraint_type)
|
||||
VALUES (?, ?, ?, ?, ?, ?)""",
|
||||
(entity.id, dep.category, dep.key, dep.value, dep.unit, dep.constraint_type),
|
||||
)
|
||||
dep.id = dep_cur.lastrowid
|
||||
self.conn.commit()
|
||||
return entity
|
||||
|
||||
def get_entity(self, entity_id: int) -> Entity | None:
|
||||
row = self.conn.execute(
|
||||
"""SELECT e.id, e.name, e.description, d.name as dimension, e.dimension_id
|
||||
FROM entities e JOIN dimensions d ON e.dimension_id = d.id
|
||||
WHERE e.id = ?""",
|
||||
(entity_id,),
|
||||
).fetchone()
|
||||
if not row:
|
||||
return None
|
||||
deps = self._load_dependencies(row["id"])
|
||||
return Entity(
|
||||
id=row["id"],
|
||||
name=row["name"],
|
||||
description=row["description"] or "",
|
||||
dimension=row["dimension"],
|
||||
dimension_id=row["dimension_id"],
|
||||
dependencies=deps,
|
||||
)
|
||||
|
||||
def list_entities(self, dimension: str | None = None) -> list[Entity]:
|
||||
if dimension:
|
||||
rows = self.conn.execute(
|
||||
"""SELECT e.id, e.name, e.description, d.name as dimension, e.dimension_id
|
||||
FROM entities e JOIN dimensions d ON e.dimension_id = d.id
|
||||
WHERE d.name = ? ORDER BY e.name""",
|
||||
(dimension,),
|
||||
).fetchall()
|
||||
else:
|
||||
rows = self.conn.execute(
|
||||
"""SELECT e.id, e.name, e.description, d.name as dimension, e.dimension_id
|
||||
FROM entities e JOIN dimensions d ON e.dimension_id = d.id
|
||||
ORDER BY d.name, e.name"""
|
||||
).fetchall()
|
||||
entities = []
|
||||
for r in rows:
|
||||
deps = self._load_dependencies(r["id"])
|
||||
entities.append(Entity(
|
||||
id=r["id"], name=r["name"], description=r["description"] or "",
|
||||
dimension=r["dimension"], dimension_id=r["dimension_id"],
|
||||
dependencies=deps,
|
||||
))
|
||||
return entities
|
||||
|
||||
def _load_dependencies(self, entity_id: int) -> list[Dependency]:
|
||||
rows = self.conn.execute(
|
||||
"SELECT * FROM dependencies WHERE entity_id = ?", (entity_id,)
|
||||
).fetchall()
|
||||
return [
|
||||
Dependency(
|
||||
id=r["id"], category=r["category"], key=r["key"],
|
||||
value=r["value"], unit=r["unit"], constraint_type=r["constraint_type"],
|
||||
)
|
||||
for r in rows
|
||||
]
|
||||
|
||||
def update_entity(self, entity_id: int, name: str, description: str) -> None:
|
||||
self.conn.execute(
|
||||
"UPDATE entities SET name = ?, description = ? WHERE id = ?",
|
||||
(name, description, entity_id),
|
||||
)
|
||||
self.conn.commit()
|
||||
|
||||
def delete_entity(self, entity_id: int) -> None:
|
||||
self.conn.execute("DELETE FROM dependencies WHERE entity_id = ?", (entity_id,))
|
||||
self.conn.execute("DELETE FROM combination_entities WHERE entity_id = ?", (entity_id,))
|
||||
self.conn.execute("DELETE FROM entities WHERE id = ?", (entity_id,))
|
||||
self.conn.commit()
|
||||
|
||||
def add_dependency(self, entity_id: int, dep: Dependency) -> Dependency:
|
||||
cur = self.conn.execute(
|
||||
"""INSERT INTO dependencies
|
||||
(entity_id, category, key, value, unit, constraint_type)
|
||||
VALUES (?, ?, ?, ?, ?, ?)""",
|
||||
(entity_id, dep.category, dep.key, dep.value, dep.unit, dep.constraint_type),
|
||||
)
|
||||
dep.id = cur.lastrowid
|
||||
self.conn.commit()
|
||||
return dep
|
||||
|
||||
def update_dependency(self, dep_id: int, dep: Dependency) -> None:
|
||||
self.conn.execute(
|
||||
"""UPDATE dependencies
|
||||
SET category = ?, key = ?, value = ?, unit = ?, constraint_type = ?
|
||||
WHERE id = ?""",
|
||||
(dep.category, dep.key, dep.value, dep.unit, dep.constraint_type, dep_id),
|
||||
)
|
||||
self.conn.commit()
|
||||
|
||||
def delete_dependency(self, dep_id: int) -> None:
|
||||
self.conn.execute("DELETE FROM dependencies WHERE id = ?", (dep_id,))
|
||||
self.conn.commit()
|
||||
|
||||
def get_dependency(self, dep_id: int) -> Dependency | None:
|
||||
row = self.conn.execute(
|
||||
"SELECT * FROM dependencies WHERE id = ?", (dep_id,)
|
||||
).fetchone()
|
||||
if not row:
|
||||
return None
|
||||
return Dependency(
|
||||
id=row["id"], category=row["category"], key=row["key"],
|
||||
value=row["value"], unit=row["unit"], constraint_type=row["constraint_type"],
|
||||
)
|
||||
|
||||
# ── Domains & Metrics ───────────────────────────────────────
|
||||
|
||||
def ensure_metric(self, name: str, unit: str = "", description: str = "") -> int:
|
||||
self.conn.execute(
|
||||
"INSERT OR IGNORE INTO metrics (name, unit, description) VALUES (?, ?, ?)",
|
||||
(name, unit, description),
|
||||
)
|
||||
row = self.conn.execute("SELECT id FROM metrics WHERE name = ?", (name,)).fetchone()
|
||||
self.conn.commit()
|
||||
return row["id"]
|
||||
|
||||
def add_domain(self, domain: Domain) -> Domain:
|
||||
cur = self.conn.execute(
|
||||
"INSERT INTO domains (name, description) VALUES (?, ?)",
|
||||
(domain.name, domain.description),
|
||||
)
|
||||
domain.id = cur.lastrowid
|
||||
for mb in domain.metric_bounds:
|
||||
metric_id = self.ensure_metric(mb.metric_name)
|
||||
mb.metric_id = metric_id
|
||||
self.conn.execute(
|
||||
"""INSERT INTO domain_metric_weights
|
||||
(domain_id, metric_id, weight, norm_min, norm_max)
|
||||
VALUES (?, ?, ?, ?, ?)""",
|
||||
(domain.id, metric_id, mb.weight, mb.norm_min, mb.norm_max),
|
||||
)
|
||||
self.conn.commit()
|
||||
return domain
|
||||
|
||||
def get_domain(self, name: str) -> Domain | None:
|
||||
row = self.conn.execute("SELECT * FROM domains WHERE name = ?", (name,)).fetchone()
|
||||
if not row:
|
||||
return None
|
||||
weights = self.conn.execute(
|
||||
"""SELECT m.name, dmw.weight, dmw.norm_min, dmw.norm_max, dmw.metric_id
|
||||
FROM domain_metric_weights dmw
|
||||
JOIN metrics m ON dmw.metric_id = m.id
|
||||
WHERE dmw.domain_id = ?""",
|
||||
(row["id"],),
|
||||
).fetchall()
|
||||
return Domain(
|
||||
id=row["id"],
|
||||
name=row["name"],
|
||||
description=row["description"] or "",
|
||||
metric_bounds=[
|
||||
MetricBound(
|
||||
metric_name=w["name"], weight=w["weight"],
|
||||
norm_min=w["norm_min"], norm_max=w["norm_max"],
|
||||
metric_id=w["metric_id"],
|
||||
)
|
||||
for w in weights
|
||||
],
|
||||
)
|
||||
|
||||
def list_domains(self) -> list[Domain]:
|
||||
rows = self.conn.execute("SELECT name FROM domains ORDER BY name").fetchall()
|
||||
return [self.get_domain(r["name"]) for r in rows]
|
||||
|
||||
# ── Combinations ────────────────────────────────────────────
|
||||
|
||||
@staticmethod
|
||||
def compute_hash(entity_ids: Sequence[int]) -> str:
|
||||
key = ",".join(str(eid) for eid in sorted(entity_ids))
|
||||
return hashlib.sha256(key.encode()).hexdigest()[:16]
|
||||
|
||||
def save_combination(self, combination: Combination) -> Combination:
|
||||
entity_ids = [e.id for e in combination.entities]
|
||||
combination.hash = self.compute_hash(entity_ids)
|
||||
|
||||
existing = self.conn.execute(
|
||||
"SELECT id FROM combinations WHERE hash = ?", (combination.hash,)
|
||||
).fetchone()
|
||||
if existing:
|
||||
combination.id = existing["id"]
|
||||
return combination
|
||||
|
||||
cur = self.conn.execute(
|
||||
"INSERT INTO combinations (hash, status, block_reason) VALUES (?, ?, ?)",
|
||||
(combination.hash, combination.status, combination.block_reason),
|
||||
)
|
||||
combination.id = cur.lastrowid
|
||||
for eid in entity_ids:
|
||||
self.conn.execute(
|
||||
"INSERT INTO combination_entities (combination_id, entity_id) VALUES (?, ?)",
|
||||
(combination.id, eid),
|
||||
)
|
||||
self.conn.commit()
|
||||
return combination
|
||||
|
||||
def update_combination_status(
|
||||
self, combo_id: int, status: str, block_reason: str | None = None
|
||||
) -> None:
|
||||
self.conn.execute(
|
||||
"UPDATE combinations SET status = ?, block_reason = ? WHERE id = ?",
|
||||
(status, block_reason, combo_id),
|
||||
)
|
||||
self.conn.commit()
|
||||
|
||||
def get_combination(self, combo_id: int) -> Combination | None:
|
||||
row = self.conn.execute("SELECT * FROM combinations WHERE id = ?", (combo_id,)).fetchone()
|
||||
if not row:
|
||||
return None
|
||||
entity_rows = self.conn.execute(
|
||||
"SELECT entity_id FROM combination_entities WHERE combination_id = ?",
|
||||
(combo_id,),
|
||||
).fetchall()
|
||||
entities = [self.get_entity(er["entity_id"]) for er in entity_rows]
|
||||
return Combination(
|
||||
id=row["id"], hash=row["hash"], status=row["status"],
|
||||
block_reason=row["block_reason"], entities=entities,
|
||||
)
|
||||
|
||||
def list_combinations(self, status: str | None = None) -> list[Combination]:
|
||||
if status:
|
||||
rows = self.conn.execute(
|
||||
"SELECT id FROM combinations WHERE status = ? ORDER BY id", (status,)
|
||||
).fetchall()
|
||||
else:
|
||||
rows = self.conn.execute("SELECT id FROM combinations ORDER BY id").fetchall()
|
||||
return [self.get_combination(r["id"]) for r in rows]
|
||||
|
||||
# ── Scores & Results ────────────────────────────────────────
|
||||
|
||||
def save_scores(
|
||||
self,
|
||||
combo_id: int,
|
||||
domain_id: int,
|
||||
scores: list[dict],
|
||||
) -> None:
|
||||
"""Save per-metric scores. Each dict: metric_id, raw_value, normalized_score, estimation_method, confidence."""
|
||||
for s in scores:
|
||||
self.conn.execute(
|
||||
"""INSERT OR REPLACE INTO combination_scores
|
||||
(combination_id, domain_id, metric_id, raw_value, normalized_score,
|
||||
estimation_method, confidence)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?)""",
|
||||
(combo_id, domain_id, s["metric_id"], s["raw_value"],
|
||||
s["normalized_score"], s["estimation_method"], s["confidence"]),
|
||||
)
|
||||
self.conn.commit()
|
||||
|
||||
def save_result(
|
||||
self,
|
||||
combo_id: int,
|
||||
domain_id: int,
|
||||
composite_score: float,
|
||||
pass_reached: int,
|
||||
novelty_flag: str | None = None,
|
||||
llm_review: str | None = None,
|
||||
human_notes: str | None = None,
|
||||
) -> None:
|
||||
self.conn.execute(
|
||||
"""INSERT OR REPLACE INTO combination_results
|
||||
(combination_id, domain_id, composite_score, novelty_flag,
|
||||
llm_review, human_notes, pass_reached)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?)""",
|
||||
(combo_id, domain_id, composite_score, novelty_flag,
|
||||
llm_review, human_notes, pass_reached),
|
||||
)
|
||||
self.conn.commit()
|
||||
|
||||
def get_combination_scores(self, combo_id: int, domain_id: int) -> list[dict]:
|
||||
"""Return per-metric scores for a combination in a domain."""
|
||||
rows = self.conn.execute(
|
||||
"""SELECT cs.*, m.name as metric_name
|
||||
FROM combination_scores cs
|
||||
JOIN metrics m ON cs.metric_id = m.id
|
||||
WHERE cs.combination_id = ? AND cs.domain_id = ?""",
|
||||
(combo_id, domain_id),
|
||||
).fetchall()
|
||||
return [dict(r) for r in rows]
|
||||
|
||||
def count_combinations_by_status(self) -> dict[str, int]:
|
||||
rows = self.conn.execute(
|
||||
"SELECT status, COUNT(*) as cnt FROM combinations GROUP BY status"
|
||||
).fetchall()
|
||||
return {r["status"]: r["cnt"] for r in rows}
|
||||
|
||||
def get_result(self, combo_id: int, domain_id: int) -> dict | None:
|
||||
"""Return a single combination_result row."""
|
||||
row = self.conn.execute(
|
||||
"""SELECT cr.*, d.name as domain_name
|
||||
FROM combination_results cr
|
||||
JOIN domains d ON cr.domain_id = d.id
|
||||
WHERE cr.combination_id = ? AND cr.domain_id = ?""",
|
||||
(combo_id, domain_id),
|
||||
).fetchone()
|
||||
return dict(row) if row else None
|
||||
|
||||
def get_all_results(self, domain_name: str, status: str | None = None) -> list[dict]:
|
||||
"""Return all results for a domain, optionally filtered by combo status."""
|
||||
if status:
|
||||
rows = self.conn.execute(
|
||||
"""SELECT cr.*, c.hash, c.status as combo_status, d.name as domain_name
|
||||
FROM combination_results cr
|
||||
JOIN combinations c ON cr.combination_id = c.id
|
||||
JOIN domains d ON cr.domain_id = d.id
|
||||
WHERE d.name = ? AND c.status = ?
|
||||
ORDER BY cr.composite_score DESC""",
|
||||
(domain_name, status),
|
||||
).fetchall()
|
||||
else:
|
||||
rows = self.conn.execute(
|
||||
"""SELECT cr.*, c.hash, c.status as combo_status, d.name as domain_name
|
||||
FROM combination_results cr
|
||||
JOIN combinations c ON cr.combination_id = c.id
|
||||
JOIN domains d ON cr.domain_id = d.id
|
||||
WHERE d.name = ?
|
||||
ORDER BY cr.composite_score DESC""",
|
||||
(domain_name,),
|
||||
).fetchall()
|
||||
results = []
|
||||
for r in rows:
|
||||
combo = self.get_combination(r["combination_id"])
|
||||
results.append({
|
||||
"combination": combo,
|
||||
"composite_score": r["composite_score"],
|
||||
"novelty_flag": r["novelty_flag"],
|
||||
"llm_review": r["llm_review"],
|
||||
"human_notes": r["human_notes"],
|
||||
"pass_reached": r["pass_reached"],
|
||||
"domain_id": r["domain_id"],
|
||||
})
|
||||
return results
|
||||
|
||||
def get_top_results(self, domain_name: str, limit: int = 10) -> list[dict]:
|
||||
"""Return top-N results for a domain, ordered by composite_score DESC."""
|
||||
rows = self.conn.execute(
|
||||
"""SELECT cr.*, c.hash, c.status, d.name as domain_name
|
||||
FROM combination_results cr
|
||||
JOIN combinations c ON cr.combination_id = c.id
|
||||
JOIN domains d ON cr.domain_id = d.id
|
||||
WHERE d.name = ?
|
||||
ORDER BY cr.composite_score DESC
|
||||
LIMIT ?""",
|
||||
(domain_name, limit),
|
||||
).fetchall()
|
||||
results = []
|
||||
for r in rows:
|
||||
combo = self.get_combination(r["combination_id"])
|
||||
results.append({
|
||||
"combination": combo,
|
||||
"composite_score": r["composite_score"],
|
||||
"novelty_flag": r["novelty_flag"],
|
||||
"llm_review": r["llm_review"],
|
||||
"human_notes": r["human_notes"],
|
||||
"pass_reached": r["pass_reached"],
|
||||
})
|
||||
return results
|
||||
111
src/physcom/db/schema.py
Normal file
111
src/physcom/db/schema.py
Normal file
@@ -0,0 +1,111 @@
|
||||
"""DDL, table creation, and schema initialization."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import sqlite3
|
||||
from pathlib import Path
|
||||
|
||||
DDL = """
|
||||
CREATE TABLE IF NOT EXISTS dimensions (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT UNIQUE NOT NULL,
|
||||
description TEXT
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS entities (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
dimension_id INTEGER NOT NULL REFERENCES dimensions(id),
|
||||
name TEXT NOT NULL,
|
||||
description TEXT,
|
||||
UNIQUE(dimension_id, name)
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS dependencies (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
entity_id INTEGER NOT NULL REFERENCES entities(id),
|
||||
category TEXT NOT NULL,
|
||||
key TEXT NOT NULL,
|
||||
value TEXT NOT NULL,
|
||||
unit TEXT,
|
||||
constraint_type TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS domains (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT UNIQUE NOT NULL,
|
||||
description TEXT
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS metrics (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT UNIQUE NOT NULL,
|
||||
unit TEXT,
|
||||
description TEXT
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS domain_metric_weights (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
domain_id INTEGER NOT NULL REFERENCES domains(id),
|
||||
metric_id INTEGER NOT NULL REFERENCES metrics(id),
|
||||
weight REAL NOT NULL,
|
||||
norm_min REAL,
|
||||
norm_max REAL,
|
||||
UNIQUE(domain_id, metric_id)
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS combinations (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
hash TEXT UNIQUE NOT NULL,
|
||||
status TEXT NOT NULL DEFAULT 'pending',
|
||||
block_reason TEXT,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS combination_entities (
|
||||
combination_id INTEGER NOT NULL REFERENCES combinations(id),
|
||||
entity_id INTEGER NOT NULL REFERENCES entities(id),
|
||||
PRIMARY KEY (combination_id, entity_id)
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS combination_scores (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
combination_id INTEGER NOT NULL REFERENCES combinations(id),
|
||||
domain_id INTEGER NOT NULL REFERENCES domains(id),
|
||||
metric_id INTEGER NOT NULL REFERENCES metrics(id),
|
||||
raw_value REAL,
|
||||
normalized_score REAL,
|
||||
estimation_method TEXT,
|
||||
confidence REAL,
|
||||
UNIQUE(combination_id, domain_id, metric_id)
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS combination_results (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
combination_id INTEGER NOT NULL REFERENCES combinations(id),
|
||||
domain_id INTEGER NOT NULL REFERENCES domains(id),
|
||||
composite_score REAL,
|
||||
novelty_flag TEXT,
|
||||
llm_review TEXT,
|
||||
human_notes TEXT,
|
||||
pass_reached INTEGER,
|
||||
UNIQUE(combination_id, domain_id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_deps_entity ON dependencies(entity_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_deps_category_key ON dependencies(category, key);
|
||||
CREATE INDEX IF NOT EXISTS idx_combo_status ON combinations(status);
|
||||
CREATE INDEX IF NOT EXISTS idx_scores_combo_domain ON combination_scores(combination_id, domain_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_results_domain_score ON combination_results(domain_id, composite_score DESC);
|
||||
"""
|
||||
|
||||
|
||||
def init_db(db_path: str | Path) -> sqlite3.Connection:
|
||||
"""Create/open the database and ensure all tables exist."""
|
||||
db_path = Path(db_path)
|
||||
db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
conn = sqlite3.connect(str(db_path))
|
||||
conn.execute("PRAGMA journal_mode=WAL")
|
||||
conn.execute("PRAGMA foreign_keys=ON")
|
||||
conn.executescript(DDL)
|
||||
conn.commit()
|
||||
return conn
|
||||
Reference in New Issue
Block a user