drop old db tables and remove old functions
This commit is contained in:
parent
4220ff285e
commit
74115b7e5b
3 changed files with 43 additions and 390 deletions
400
crud.py
400
crud.py
|
|
@ -177,384 +177,28 @@ async def get_or_create_user_account(
|
||||||
# ===== JOURNAL ENTRY OPERATIONS =====
|
# ===== JOURNAL ENTRY OPERATIONS =====
|
||||||
|
|
||||||
|
|
||||||
async def create_journal_entry(
|
# ===== JOURNAL ENTRY OPERATIONS (REMOVED) =====
|
||||||
data: CreateJournalEntry, created_by: str
|
#
|
||||||
) -> JournalEntry:
|
# All journal entry operations have been moved to Fava/Beancount.
|
||||||
entry_id = urlsafe_short_hash()
|
# Castle no longer maintains its own journal_entries and entry_lines tables.
|
||||||
|
#
|
||||||
# Validate that entry balances (sum of all amounts = 0)
|
# For journal entry operations, see:
|
||||||
# Beancount-style: positive amounts cancel out negative amounts
|
# - views_api.py: api_create_journal_entry() - writes to Fava via FavaClient
|
||||||
total_amount = sum(line.amount for line in data.lines)
|
# - views_api.py: API endpoints query Fava via FavaClient for reading entries
|
||||||
|
#
|
||||||
if total_amount != 0:
|
# Migration: m016_drop_obsolete_journal_tables
|
||||||
raise ValueError(
|
# Removed functions:
|
||||||
f"Journal entry must balance (sum of amounts = 0): sum={total_amount}"
|
# - create_journal_entry()
|
||||||
)
|
# - get_journal_entry()
|
||||||
|
# - get_journal_entry_by_reference()
|
||||||
entry_date = data.entry_date or datetime.now()
|
# - get_entry_lines()
|
||||||
|
# - get_all_journal_entries()
|
||||||
journal_entry = JournalEntry(
|
# - get_journal_entries_by_user()
|
||||||
id=entry_id,
|
# - count_all_journal_entries()
|
||||||
description=data.description,
|
# - count_journal_entries_by_user()
|
||||||
entry_date=entry_date,
|
# - get_journal_entries_by_user_and_account_type()
|
||||||
created_by=created_by,
|
# - count_journal_entries_by_user_and_account_type()
|
||||||
created_at=datetime.now(),
|
# - get_account_transactions()
|
||||||
reference=data.reference,
|
|
||||||
lines=[],
|
|
||||||
flag=data.flag,
|
|
||||||
meta=data.meta,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Insert journal entry without the lines field (lines are stored in entry_lines table)
|
|
||||||
await db.execute(
|
|
||||||
"""
|
|
||||||
INSERT INTO journal_entries (id, description, entry_date, created_by, created_at, reference, flag, meta)
|
|
||||||
VALUES (:id, :description, :entry_date, :created_by, :created_at, :reference, :flag, :meta)
|
|
||||||
""",
|
|
||||||
{
|
|
||||||
"id": journal_entry.id,
|
|
||||||
"description": journal_entry.description,
|
|
||||||
"entry_date": journal_entry.entry_date,
|
|
||||||
"created_by": journal_entry.created_by,
|
|
||||||
"created_at": journal_entry.created_at,
|
|
||||||
"reference": journal_entry.reference,
|
|
||||||
"flag": journal_entry.flag.value,
|
|
||||||
"meta": json.dumps(journal_entry.meta),
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create entry lines
|
|
||||||
lines = []
|
|
||||||
for line_data in data.lines:
|
|
||||||
line_id = urlsafe_short_hash()
|
|
||||||
line = EntryLine(
|
|
||||||
id=line_id,
|
|
||||||
journal_entry_id=entry_id,
|
|
||||||
account_id=line_data.account_id,
|
|
||||||
amount=line_data.amount,
|
|
||||||
description=line_data.description,
|
|
||||||
metadata=line_data.metadata,
|
|
||||||
)
|
|
||||||
# Insert with metadata as JSON string
|
|
||||||
await db.execute(
|
|
||||||
"""
|
|
||||||
INSERT INTO entry_lines (id, journal_entry_id, account_id, amount, description, metadata)
|
|
||||||
VALUES (:id, :journal_entry_id, :account_id, :amount, :description, :metadata)
|
|
||||||
""",
|
|
||||||
{
|
|
||||||
"id": line.id,
|
|
||||||
"journal_entry_id": line.journal_entry_id,
|
|
||||||
"account_id": line.account_id,
|
|
||||||
"amount": line.amount,
|
|
||||||
"description": line.description,
|
|
||||||
"metadata": json.dumps(line.metadata),
|
|
||||||
},
|
|
||||||
)
|
|
||||||
lines.append(line)
|
|
||||||
|
|
||||||
journal_entry.lines = lines
|
|
||||||
return journal_entry
|
|
||||||
|
|
||||||
|
|
||||||
async def get_journal_entry(entry_id: str) -> Optional[JournalEntry]:
|
|
||||||
entry = await db.fetchone(
|
|
||||||
"SELECT * FROM journal_entries WHERE id = :id",
|
|
||||||
{"id": entry_id},
|
|
||||||
JournalEntry,
|
|
||||||
)
|
|
||||||
|
|
||||||
if entry:
|
|
||||||
entry.lines = await get_entry_lines(entry_id)
|
|
||||||
|
|
||||||
return entry
|
|
||||||
|
|
||||||
|
|
||||||
async def get_journal_entry_by_reference(reference: str) -> Optional[JournalEntry]:
|
|
||||||
"""Get a journal entry by its reference field (e.g., payment_hash)"""
|
|
||||||
entry = await db.fetchone(
|
|
||||||
"SELECT * FROM journal_entries WHERE reference = :reference",
|
|
||||||
{"reference": reference},
|
|
||||||
JournalEntry,
|
|
||||||
)
|
|
||||||
|
|
||||||
if entry:
|
|
||||||
entry.lines = await get_entry_lines(entry.id)
|
|
||||||
|
|
||||||
return entry
|
|
||||||
|
|
||||||
|
|
||||||
async def get_entry_lines(journal_entry_id: str) -> list[EntryLine]:
|
|
||||||
rows = await db.fetchall(
|
|
||||||
"SELECT * FROM entry_lines WHERE journal_entry_id = :id",
|
|
||||||
{"id": journal_entry_id},
|
|
||||||
)
|
|
||||||
|
|
||||||
lines = []
|
|
||||||
for row in rows:
|
|
||||||
# Parse metadata from JSON string
|
|
||||||
metadata = json.loads(row.metadata) if row.metadata else {}
|
|
||||||
line = EntryLine(
|
|
||||||
id=row.id,
|
|
||||||
journal_entry_id=row.journal_entry_id,
|
|
||||||
account_id=row.account_id,
|
|
||||||
amount=row.amount,
|
|
||||||
description=row.description,
|
|
||||||
metadata=metadata,
|
|
||||||
)
|
|
||||||
lines.append(line)
|
|
||||||
|
|
||||||
return lines
|
|
||||||
|
|
||||||
|
|
||||||
async def get_all_journal_entries(limit: int = 100, offset: int = 0) -> list[JournalEntry]:
|
|
||||||
entries_data = await db.fetchall(
|
|
||||||
"""
|
|
||||||
SELECT * FROM journal_entries
|
|
||||||
ORDER BY entry_date DESC, created_at DESC
|
|
||||||
LIMIT :limit OFFSET :offset
|
|
||||||
""",
|
|
||||||
{"limit": limit, "offset": offset},
|
|
||||||
)
|
|
||||||
|
|
||||||
entries = []
|
|
||||||
for entry_data in entries_data:
|
|
||||||
# Parse flag and meta from database
|
|
||||||
from .models import JournalEntryFlag
|
|
||||||
flag = JournalEntryFlag(entry_data.get("flag", "*"))
|
|
||||||
meta = json.loads(entry_data.get("meta", "{}")) if entry_data.get("meta") else {}
|
|
||||||
|
|
||||||
entry = JournalEntry(
|
|
||||||
id=entry_data["id"],
|
|
||||||
description=entry_data["description"],
|
|
||||||
entry_date=entry_data["entry_date"],
|
|
||||||
created_by=entry_data["created_by"],
|
|
||||||
created_at=entry_data["created_at"],
|
|
||||||
reference=entry_data["reference"],
|
|
||||||
flag=flag,
|
|
||||||
meta=meta,
|
|
||||||
lines=[],
|
|
||||||
)
|
|
||||||
entry.lines = await get_entry_lines(entry.id)
|
|
||||||
entries.append(entry)
|
|
||||||
|
|
||||||
return entries
|
|
||||||
|
|
||||||
|
|
||||||
async def get_journal_entries_by_user(
|
|
||||||
user_id: str, limit: int = 100, offset: int = 0
|
|
||||||
) -> list[JournalEntry]:
|
|
||||||
"""Get journal entries that affect the user's accounts"""
|
|
||||||
# Get all user-specific accounts
|
|
||||||
user_accounts = await db.fetchall(
|
|
||||||
"SELECT id FROM accounts WHERE user_id = :user_id",
|
|
||||||
{"user_id": user_id},
|
|
||||||
)
|
|
||||||
|
|
||||||
if not user_accounts:
|
|
||||||
return []
|
|
||||||
|
|
||||||
account_ids = [acc["id"] for acc in user_accounts]
|
|
||||||
|
|
||||||
# Get all journal entries that have lines affecting these accounts
|
|
||||||
# Build the IN clause with named parameters
|
|
||||||
placeholders = ','.join([f":account_{i}" for i in range(len(account_ids))])
|
|
||||||
params = {f"account_{i}": acc_id for i, acc_id in enumerate(account_ids)}
|
|
||||||
params["limit"] = limit
|
|
||||||
params["offset"] = offset
|
|
||||||
|
|
||||||
entries_data = await db.fetchall(
|
|
||||||
f"""
|
|
||||||
SELECT DISTINCT je.*
|
|
||||||
FROM journal_entries je
|
|
||||||
JOIN entry_lines el ON je.id = el.journal_entry_id
|
|
||||||
WHERE el.account_id IN ({placeholders})
|
|
||||||
ORDER BY je.entry_date DESC, je.created_at DESC
|
|
||||||
LIMIT :limit OFFSET :offset
|
|
||||||
""",
|
|
||||||
params,
|
|
||||||
)
|
|
||||||
|
|
||||||
entries = []
|
|
||||||
for entry_data in entries_data:
|
|
||||||
# Parse flag and meta from database
|
|
||||||
from .models import JournalEntryFlag
|
|
||||||
flag = JournalEntryFlag(entry_data.get("flag", "*"))
|
|
||||||
meta = json.loads(entry_data.get("meta", "{}")) if entry_data.get("meta") else {}
|
|
||||||
|
|
||||||
entry = JournalEntry(
|
|
||||||
id=entry_data["id"],
|
|
||||||
description=entry_data["description"],
|
|
||||||
entry_date=entry_data["entry_date"],
|
|
||||||
created_by=entry_data["created_by"],
|
|
||||||
created_at=entry_data["created_at"],
|
|
||||||
reference=entry_data["reference"],
|
|
||||||
flag=flag,
|
|
||||||
meta=meta,
|
|
||||||
lines=[],
|
|
||||||
)
|
|
||||||
entry.lines = await get_entry_lines(entry.id)
|
|
||||||
entries.append(entry)
|
|
||||||
|
|
||||||
return entries
|
|
||||||
|
|
||||||
|
|
||||||
async def count_all_journal_entries() -> int:
|
|
||||||
"""Count total number of journal entries"""
|
|
||||||
result = await db.fetchone(
|
|
||||||
"SELECT COUNT(*) as total FROM journal_entries"
|
|
||||||
)
|
|
||||||
return result["total"] if result else 0
|
|
||||||
|
|
||||||
|
|
||||||
async def count_journal_entries_by_user(user_id: str) -> int:
|
|
||||||
"""Count journal entries that affect the user's accounts"""
|
|
||||||
# Get all user-specific accounts
|
|
||||||
user_accounts = await db.fetchall(
|
|
||||||
"SELECT id FROM accounts WHERE user_id = :user_id",
|
|
||||||
{"user_id": user_id},
|
|
||||||
)
|
|
||||||
|
|
||||||
if not user_accounts:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
account_ids = [acc["id"] for acc in user_accounts]
|
|
||||||
|
|
||||||
# Count journal entries that have lines affecting these accounts
|
|
||||||
placeholders = ','.join([f":account_{i}" for i in range(len(account_ids))])
|
|
||||||
params = {f"account_{i}": acc_id for i, acc_id in enumerate(account_ids)}
|
|
||||||
|
|
||||||
result = await db.fetchone(
|
|
||||||
f"""
|
|
||||||
SELECT COUNT(DISTINCT je.id) as total
|
|
||||||
FROM journal_entries je
|
|
||||||
JOIN entry_lines el ON je.id = el.journal_entry_id
|
|
||||||
WHERE el.account_id IN ({placeholders})
|
|
||||||
""",
|
|
||||||
params,
|
|
||||||
)
|
|
||||||
return result["total"] if result else 0
|
|
||||||
|
|
||||||
|
|
||||||
async def get_journal_entries_by_user_and_account_type(
|
|
||||||
user_id: str, account_type: str, limit: int = 100, offset: int = 0
|
|
||||||
) -> list[JournalEntry]:
|
|
||||||
"""Get journal entries that affect the user's accounts filtered by account type"""
|
|
||||||
# Get all user-specific accounts of the specified type
|
|
||||||
user_accounts = await db.fetchall(
|
|
||||||
"SELECT id FROM accounts WHERE user_id = :user_id AND account_type = :account_type",
|
|
||||||
{"user_id": user_id, "account_type": account_type},
|
|
||||||
)
|
|
||||||
|
|
||||||
if not user_accounts:
|
|
||||||
return []
|
|
||||||
|
|
||||||
account_ids = [acc["id"] for acc in user_accounts]
|
|
||||||
|
|
||||||
# Get all journal entries that have lines affecting these accounts
|
|
||||||
placeholders = ','.join([f":account_{i}" for i in range(len(account_ids))])
|
|
||||||
params = {f"account_{i}": acc_id for i, acc_id in enumerate(account_ids)}
|
|
||||||
params["limit"] = limit
|
|
||||||
params["offset"] = offset
|
|
||||||
|
|
||||||
entries_data = await db.fetchall(
|
|
||||||
f"""
|
|
||||||
SELECT DISTINCT je.*
|
|
||||||
FROM journal_entries je
|
|
||||||
JOIN entry_lines el ON je.id = el.journal_entry_id
|
|
||||||
WHERE el.account_id IN ({placeholders})
|
|
||||||
ORDER BY je.entry_date DESC, je.created_at DESC
|
|
||||||
LIMIT :limit OFFSET :offset
|
|
||||||
""",
|
|
||||||
params,
|
|
||||||
)
|
|
||||||
|
|
||||||
entries = []
|
|
||||||
for entry_data in entries_data:
|
|
||||||
# Parse flag and meta from database
|
|
||||||
from .models import JournalEntryFlag
|
|
||||||
flag = JournalEntryFlag(entry_data.get("flag", "*"))
|
|
||||||
meta = json.loads(entry_data.get("meta", "{}")) if entry_data.get("meta") else {}
|
|
||||||
|
|
||||||
entry = JournalEntry(
|
|
||||||
id=entry_data["id"],
|
|
||||||
description=entry_data["description"],
|
|
||||||
entry_date=entry_data["entry_date"],
|
|
||||||
created_by=entry_data["created_by"],
|
|
||||||
created_at=entry_data["created_at"],
|
|
||||||
reference=entry_data["reference"],
|
|
||||||
flag=flag,
|
|
||||||
meta=meta,
|
|
||||||
lines=[],
|
|
||||||
)
|
|
||||||
entry.lines = await get_entry_lines(entry.id)
|
|
||||||
entries.append(entry)
|
|
||||||
|
|
||||||
return entries
|
|
||||||
|
|
||||||
|
|
||||||
async def count_journal_entries_by_user_and_account_type(user_id: str, account_type: str) -> int:
|
|
||||||
"""Count journal entries that affect the user's accounts filtered by account type"""
|
|
||||||
# Get all user-specific accounts of the specified type
|
|
||||||
user_accounts = await db.fetchall(
|
|
||||||
"SELECT id FROM accounts WHERE user_id = :user_id AND account_type = :account_type",
|
|
||||||
{"user_id": user_id, "account_type": account_type},
|
|
||||||
)
|
|
||||||
|
|
||||||
if not user_accounts:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
account_ids = [acc["id"] for acc in user_accounts]
|
|
||||||
|
|
||||||
# Count journal entries that have lines affecting these accounts
|
|
||||||
placeholders = ','.join([f":account_{i}" for i in range(len(account_ids))])
|
|
||||||
params = {f"account_{i}": acc_id for i, acc_id in enumerate(account_ids)}
|
|
||||||
|
|
||||||
result = await db.fetchone(
|
|
||||||
f"""
|
|
||||||
SELECT COUNT(DISTINCT je.id) as total
|
|
||||||
FROM journal_entries je
|
|
||||||
JOIN entry_lines el ON je.id = el.journal_entry_id
|
|
||||||
WHERE el.account_id IN ({placeholders})
|
|
||||||
""",
|
|
||||||
params,
|
|
||||||
)
|
|
||||||
return result["total"] if result else 0
|
|
||||||
|
|
||||||
|
|
||||||
# ===== BALANCE AND REPORTING =====
|
|
||||||
|
|
||||||
|
|
||||||
async def get_account_transactions(
|
|
||||||
account_id: str, limit: int = 100
|
|
||||||
) -> list[tuple[JournalEntry, EntryLine]]:
|
|
||||||
"""Get all transactions affecting a specific account"""
|
|
||||||
rows = await db.fetchall(
|
|
||||||
"""
|
|
||||||
SELECT * FROM entry_lines
|
|
||||||
WHERE account_id = :id
|
|
||||||
ORDER BY id DESC
|
|
||||||
LIMIT :limit
|
|
||||||
""",
|
|
||||||
{"id": account_id, "limit": limit},
|
|
||||||
)
|
|
||||||
|
|
||||||
transactions = []
|
|
||||||
for row in rows:
|
|
||||||
# Parse metadata from JSON string
|
|
||||||
metadata = json.loads(row.metadata) if row.metadata else {}
|
|
||||||
line = EntryLine(
|
|
||||||
id=row.id,
|
|
||||||
journal_entry_id=row.journal_entry_id,
|
|
||||||
account_id=row.account_id,
|
|
||||||
amount=row.amount,
|
|
||||||
description=row.description,
|
|
||||||
metadata=metadata,
|
|
||||||
)
|
|
||||||
entry = await get_journal_entry(line.journal_entry_id)
|
|
||||||
if entry:
|
|
||||||
transactions.append((entry, line))
|
|
||||||
|
|
||||||
return transactions
|
|
||||||
|
|
||||||
|
|
||||||
# ===== SETTINGS =====
|
# ===== SETTINGS =====
|
||||||
|
|
|
||||||
|
|
@ -628,3 +628,24 @@ async def m015_convert_to_single_amount_field(db):
|
||||||
CREATE INDEX idx_entry_lines_account ON entry_lines (account_id)
|
CREATE INDEX idx_entry_lines_account ON entry_lines (account_id)
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def m016_drop_obsolete_journal_tables(db):
|
||||||
|
"""
|
||||||
|
Drop journal_entries and entry_lines tables.
|
||||||
|
|
||||||
|
Castle now uses Fava/Beancount as the single source of truth for accounting data.
|
||||||
|
These tables are no longer written to or read from.
|
||||||
|
|
||||||
|
All journal entry operations now:
|
||||||
|
- Write: Submit to Fava via FavaClient.add_entry()
|
||||||
|
- Read: Query Fava via FavaClient.get_entries()
|
||||||
|
|
||||||
|
Migration completed as part of Castle extension cleanup (Nov 2025).
|
||||||
|
No backwards compatibility concerns - user explicitly approved.
|
||||||
|
"""
|
||||||
|
# Drop entry_lines first (has foreign key to journal_entries)
|
||||||
|
await db.execute("DROP TABLE IF EXISTS entry_lines")
|
||||||
|
|
||||||
|
# Drop journal_entries
|
||||||
|
await db.execute("DROP TABLE IF EXISTS journal_entries")
|
||||||
|
|
|
||||||
12
views_api.py
12
views_api.py
|
|
@ -32,7 +32,6 @@ from .crud import (
|
||||||
get_all_user_wallet_settings,
|
get_all_user_wallet_settings,
|
||||||
get_balance_assertion,
|
get_balance_assertion,
|
||||||
get_balance_assertions,
|
get_balance_assertions,
|
||||||
get_journal_entry,
|
|
||||||
get_manual_payment_request,
|
get_manual_payment_request,
|
||||||
get_or_create_user_account,
|
get_or_create_user_account,
|
||||||
get_user_manual_payment_requests,
|
get_user_manual_payment_requests,
|
||||||
|
|
@ -748,17 +747,6 @@ async def api_get_pending_entries(
|
||||||
return pending_entries
|
return pending_entries
|
||||||
|
|
||||||
|
|
||||||
@castle_api_router.get("/api/v1/entries/{entry_id}")
|
|
||||||
async def api_get_journal_entry(entry_id: str) -> JournalEntry:
|
|
||||||
"""Get a specific journal entry"""
|
|
||||||
entry = await get_journal_entry(entry_id)
|
|
||||||
if not entry:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=HTTPStatus.NOT_FOUND, detail="Journal entry not found"
|
|
||||||
)
|
|
||||||
return entry
|
|
||||||
|
|
||||||
|
|
||||||
@castle_api_router.post("/api/v1/entries", status_code=HTTPStatus.CREATED)
|
@castle_api_router.post("/api/v1/entries", status_code=HTTPStatus.CREATED)
|
||||||
async def api_create_journal_entry(
|
async def api_create_journal_entry(
|
||||||
data: CreateJournalEntry,
|
data: CreateJournalEntry,
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue