drop old db tables and remove old functions
This commit is contained in:
parent
4220ff285e
commit
74115b7e5b
3 changed files with 43 additions and 390 deletions
400
crud.py
400
crud.py
|
|
@ -177,384 +177,28 @@ async def get_or_create_user_account(
|
|||
# ===== JOURNAL ENTRY OPERATIONS =====
|
||||
|
||||
|
||||
async def create_journal_entry(
|
||||
data: CreateJournalEntry, created_by: str
|
||||
) -> JournalEntry:
|
||||
entry_id = urlsafe_short_hash()
|
||||
|
||||
# Validate that entry balances (sum of all amounts = 0)
|
||||
# Beancount-style: positive amounts cancel out negative amounts
|
||||
total_amount = sum(line.amount for line in data.lines)
|
||||
|
||||
if total_amount != 0:
|
||||
raise ValueError(
|
||||
f"Journal entry must balance (sum of amounts = 0): sum={total_amount}"
|
||||
)
|
||||
|
||||
entry_date = data.entry_date or datetime.now()
|
||||
|
||||
journal_entry = JournalEntry(
|
||||
id=entry_id,
|
||||
description=data.description,
|
||||
entry_date=entry_date,
|
||||
created_by=created_by,
|
||||
created_at=datetime.now(),
|
||||
reference=data.reference,
|
||||
lines=[],
|
||||
flag=data.flag,
|
||||
meta=data.meta,
|
||||
)
|
||||
|
||||
# Insert journal entry without the lines field (lines are stored in entry_lines table)
|
||||
await db.execute(
|
||||
"""
|
||||
INSERT INTO journal_entries (id, description, entry_date, created_by, created_at, reference, flag, meta)
|
||||
VALUES (:id, :description, :entry_date, :created_by, :created_at, :reference, :flag, :meta)
|
||||
""",
|
||||
{
|
||||
"id": journal_entry.id,
|
||||
"description": journal_entry.description,
|
||||
"entry_date": journal_entry.entry_date,
|
||||
"created_by": journal_entry.created_by,
|
||||
"created_at": journal_entry.created_at,
|
||||
"reference": journal_entry.reference,
|
||||
"flag": journal_entry.flag.value,
|
||||
"meta": json.dumps(journal_entry.meta),
|
||||
},
|
||||
)
|
||||
|
||||
# Create entry lines
|
||||
lines = []
|
||||
for line_data in data.lines:
|
||||
line_id = urlsafe_short_hash()
|
||||
line = EntryLine(
|
||||
id=line_id,
|
||||
journal_entry_id=entry_id,
|
||||
account_id=line_data.account_id,
|
||||
amount=line_data.amount,
|
||||
description=line_data.description,
|
||||
metadata=line_data.metadata,
|
||||
)
|
||||
# Insert with metadata as JSON string
|
||||
await db.execute(
|
||||
"""
|
||||
INSERT INTO entry_lines (id, journal_entry_id, account_id, amount, description, metadata)
|
||||
VALUES (:id, :journal_entry_id, :account_id, :amount, :description, :metadata)
|
||||
""",
|
||||
{
|
||||
"id": line.id,
|
||||
"journal_entry_id": line.journal_entry_id,
|
||||
"account_id": line.account_id,
|
||||
"amount": line.amount,
|
||||
"description": line.description,
|
||||
"metadata": json.dumps(line.metadata),
|
||||
},
|
||||
)
|
||||
lines.append(line)
|
||||
|
||||
journal_entry.lines = lines
|
||||
return journal_entry
|
||||
|
||||
|
||||
async def get_journal_entry(entry_id: str) -> Optional[JournalEntry]:
|
||||
entry = await db.fetchone(
|
||||
"SELECT * FROM journal_entries WHERE id = :id",
|
||||
{"id": entry_id},
|
||||
JournalEntry,
|
||||
)
|
||||
|
||||
if entry:
|
||||
entry.lines = await get_entry_lines(entry_id)
|
||||
|
||||
return entry
|
||||
|
||||
|
||||
async def get_journal_entry_by_reference(reference: str) -> Optional[JournalEntry]:
|
||||
"""Get a journal entry by its reference field (e.g., payment_hash)"""
|
||||
entry = await db.fetchone(
|
||||
"SELECT * FROM journal_entries WHERE reference = :reference",
|
||||
{"reference": reference},
|
||||
JournalEntry,
|
||||
)
|
||||
|
||||
if entry:
|
||||
entry.lines = await get_entry_lines(entry.id)
|
||||
|
||||
return entry
|
||||
|
||||
|
||||
async def get_entry_lines(journal_entry_id: str) -> list[EntryLine]:
|
||||
rows = await db.fetchall(
|
||||
"SELECT * FROM entry_lines WHERE journal_entry_id = :id",
|
||||
{"id": journal_entry_id},
|
||||
)
|
||||
|
||||
lines = []
|
||||
for row in rows:
|
||||
# Parse metadata from JSON string
|
||||
metadata = json.loads(row.metadata) if row.metadata else {}
|
||||
line = EntryLine(
|
||||
id=row.id,
|
||||
journal_entry_id=row.journal_entry_id,
|
||||
account_id=row.account_id,
|
||||
amount=row.amount,
|
||||
description=row.description,
|
||||
metadata=metadata,
|
||||
)
|
||||
lines.append(line)
|
||||
|
||||
return lines
|
||||
|
||||
|
||||
async def get_all_journal_entries(limit: int = 100, offset: int = 0) -> list[JournalEntry]:
|
||||
entries_data = await db.fetchall(
|
||||
"""
|
||||
SELECT * FROM journal_entries
|
||||
ORDER BY entry_date DESC, created_at DESC
|
||||
LIMIT :limit OFFSET :offset
|
||||
""",
|
||||
{"limit": limit, "offset": offset},
|
||||
)
|
||||
|
||||
entries = []
|
||||
for entry_data in entries_data:
|
||||
# Parse flag and meta from database
|
||||
from .models import JournalEntryFlag
|
||||
flag = JournalEntryFlag(entry_data.get("flag", "*"))
|
||||
meta = json.loads(entry_data.get("meta", "{}")) if entry_data.get("meta") else {}
|
||||
|
||||
entry = JournalEntry(
|
||||
id=entry_data["id"],
|
||||
description=entry_data["description"],
|
||||
entry_date=entry_data["entry_date"],
|
||||
created_by=entry_data["created_by"],
|
||||
created_at=entry_data["created_at"],
|
||||
reference=entry_data["reference"],
|
||||
flag=flag,
|
||||
meta=meta,
|
||||
lines=[],
|
||||
)
|
||||
entry.lines = await get_entry_lines(entry.id)
|
||||
entries.append(entry)
|
||||
|
||||
return entries
|
||||
|
||||
|
||||
async def get_journal_entries_by_user(
|
||||
user_id: str, limit: int = 100, offset: int = 0
|
||||
) -> list[JournalEntry]:
|
||||
"""Get journal entries that affect the user's accounts"""
|
||||
# Get all user-specific accounts
|
||||
user_accounts = await db.fetchall(
|
||||
"SELECT id FROM accounts WHERE user_id = :user_id",
|
||||
{"user_id": user_id},
|
||||
)
|
||||
|
||||
if not user_accounts:
|
||||
return []
|
||||
|
||||
account_ids = [acc["id"] for acc in user_accounts]
|
||||
|
||||
# Get all journal entries that have lines affecting these accounts
|
||||
# Build the IN clause with named parameters
|
||||
placeholders = ','.join([f":account_{i}" for i in range(len(account_ids))])
|
||||
params = {f"account_{i}": acc_id for i, acc_id in enumerate(account_ids)}
|
||||
params["limit"] = limit
|
||||
params["offset"] = offset
|
||||
|
||||
entries_data = await db.fetchall(
|
||||
f"""
|
||||
SELECT DISTINCT je.*
|
||||
FROM journal_entries je
|
||||
JOIN entry_lines el ON je.id = el.journal_entry_id
|
||||
WHERE el.account_id IN ({placeholders})
|
||||
ORDER BY je.entry_date DESC, je.created_at DESC
|
||||
LIMIT :limit OFFSET :offset
|
||||
""",
|
||||
params,
|
||||
)
|
||||
|
||||
entries = []
|
||||
for entry_data in entries_data:
|
||||
# Parse flag and meta from database
|
||||
from .models import JournalEntryFlag
|
||||
flag = JournalEntryFlag(entry_data.get("flag", "*"))
|
||||
meta = json.loads(entry_data.get("meta", "{}")) if entry_data.get("meta") else {}
|
||||
|
||||
entry = JournalEntry(
|
||||
id=entry_data["id"],
|
||||
description=entry_data["description"],
|
||||
entry_date=entry_data["entry_date"],
|
||||
created_by=entry_data["created_by"],
|
||||
created_at=entry_data["created_at"],
|
||||
reference=entry_data["reference"],
|
||||
flag=flag,
|
||||
meta=meta,
|
||||
lines=[],
|
||||
)
|
||||
entry.lines = await get_entry_lines(entry.id)
|
||||
entries.append(entry)
|
||||
|
||||
return entries
|
||||
|
||||
|
||||
async def count_all_journal_entries() -> int:
|
||||
"""Count total number of journal entries"""
|
||||
result = await db.fetchone(
|
||||
"SELECT COUNT(*) as total FROM journal_entries"
|
||||
)
|
||||
return result["total"] if result else 0
|
||||
|
||||
|
||||
async def count_journal_entries_by_user(user_id: str) -> int:
|
||||
"""Count journal entries that affect the user's accounts"""
|
||||
# Get all user-specific accounts
|
||||
user_accounts = await db.fetchall(
|
||||
"SELECT id FROM accounts WHERE user_id = :user_id",
|
||||
{"user_id": user_id},
|
||||
)
|
||||
|
||||
if not user_accounts:
|
||||
return 0
|
||||
|
||||
account_ids = [acc["id"] for acc in user_accounts]
|
||||
|
||||
# Count journal entries that have lines affecting these accounts
|
||||
placeholders = ','.join([f":account_{i}" for i in range(len(account_ids))])
|
||||
params = {f"account_{i}": acc_id for i, acc_id in enumerate(account_ids)}
|
||||
|
||||
result = await db.fetchone(
|
||||
f"""
|
||||
SELECT COUNT(DISTINCT je.id) as total
|
||||
FROM journal_entries je
|
||||
JOIN entry_lines el ON je.id = el.journal_entry_id
|
||||
WHERE el.account_id IN ({placeholders})
|
||||
""",
|
||||
params,
|
||||
)
|
||||
return result["total"] if result else 0
|
||||
|
||||
|
||||
async def get_journal_entries_by_user_and_account_type(
|
||||
user_id: str, account_type: str, limit: int = 100, offset: int = 0
|
||||
) -> list[JournalEntry]:
|
||||
"""Get journal entries that affect the user's accounts filtered by account type"""
|
||||
# Get all user-specific accounts of the specified type
|
||||
user_accounts = await db.fetchall(
|
||||
"SELECT id FROM accounts WHERE user_id = :user_id AND account_type = :account_type",
|
||||
{"user_id": user_id, "account_type": account_type},
|
||||
)
|
||||
|
||||
if not user_accounts:
|
||||
return []
|
||||
|
||||
account_ids = [acc["id"] for acc in user_accounts]
|
||||
|
||||
# Get all journal entries that have lines affecting these accounts
|
||||
placeholders = ','.join([f":account_{i}" for i in range(len(account_ids))])
|
||||
params = {f"account_{i}": acc_id for i, acc_id in enumerate(account_ids)}
|
||||
params["limit"] = limit
|
||||
params["offset"] = offset
|
||||
|
||||
entries_data = await db.fetchall(
|
||||
f"""
|
||||
SELECT DISTINCT je.*
|
||||
FROM journal_entries je
|
||||
JOIN entry_lines el ON je.id = el.journal_entry_id
|
||||
WHERE el.account_id IN ({placeholders})
|
||||
ORDER BY je.entry_date DESC, je.created_at DESC
|
||||
LIMIT :limit OFFSET :offset
|
||||
""",
|
||||
params,
|
||||
)
|
||||
|
||||
entries = []
|
||||
for entry_data in entries_data:
|
||||
# Parse flag and meta from database
|
||||
from .models import JournalEntryFlag
|
||||
flag = JournalEntryFlag(entry_data.get("flag", "*"))
|
||||
meta = json.loads(entry_data.get("meta", "{}")) if entry_data.get("meta") else {}
|
||||
|
||||
entry = JournalEntry(
|
||||
id=entry_data["id"],
|
||||
description=entry_data["description"],
|
||||
entry_date=entry_data["entry_date"],
|
||||
created_by=entry_data["created_by"],
|
||||
created_at=entry_data["created_at"],
|
||||
reference=entry_data["reference"],
|
||||
flag=flag,
|
||||
meta=meta,
|
||||
lines=[],
|
||||
)
|
||||
entry.lines = await get_entry_lines(entry.id)
|
||||
entries.append(entry)
|
||||
|
||||
return entries
|
||||
|
||||
|
||||
async def count_journal_entries_by_user_and_account_type(user_id: str, account_type: str) -> int:
|
||||
"""Count journal entries that affect the user's accounts filtered by account type"""
|
||||
# Get all user-specific accounts of the specified type
|
||||
user_accounts = await db.fetchall(
|
||||
"SELECT id FROM accounts WHERE user_id = :user_id AND account_type = :account_type",
|
||||
{"user_id": user_id, "account_type": account_type},
|
||||
)
|
||||
|
||||
if not user_accounts:
|
||||
return 0
|
||||
|
||||
account_ids = [acc["id"] for acc in user_accounts]
|
||||
|
||||
# Count journal entries that have lines affecting these accounts
|
||||
placeholders = ','.join([f":account_{i}" for i in range(len(account_ids))])
|
||||
params = {f"account_{i}": acc_id for i, acc_id in enumerate(account_ids)}
|
||||
|
||||
result = await db.fetchone(
|
||||
f"""
|
||||
SELECT COUNT(DISTINCT je.id) as total
|
||||
FROM journal_entries je
|
||||
JOIN entry_lines el ON je.id = el.journal_entry_id
|
||||
WHERE el.account_id IN ({placeholders})
|
||||
""",
|
||||
params,
|
||||
)
|
||||
return result["total"] if result else 0
|
||||
|
||||
|
||||
# ===== BALANCE AND REPORTING =====
|
||||
|
||||
|
||||
async def get_account_transactions(
|
||||
account_id: str, limit: int = 100
|
||||
) -> list[tuple[JournalEntry, EntryLine]]:
|
||||
"""Get all transactions affecting a specific account"""
|
||||
rows = await db.fetchall(
|
||||
"""
|
||||
SELECT * FROM entry_lines
|
||||
WHERE account_id = :id
|
||||
ORDER BY id DESC
|
||||
LIMIT :limit
|
||||
""",
|
||||
{"id": account_id, "limit": limit},
|
||||
)
|
||||
|
||||
transactions = []
|
||||
for row in rows:
|
||||
# Parse metadata from JSON string
|
||||
metadata = json.loads(row.metadata) if row.metadata else {}
|
||||
line = EntryLine(
|
||||
id=row.id,
|
||||
journal_entry_id=row.journal_entry_id,
|
||||
account_id=row.account_id,
|
||||
amount=row.amount,
|
||||
description=row.description,
|
||||
metadata=metadata,
|
||||
)
|
||||
entry = await get_journal_entry(line.journal_entry_id)
|
||||
if entry:
|
||||
transactions.append((entry, line))
|
||||
|
||||
return transactions
|
||||
# ===== JOURNAL ENTRY OPERATIONS (REMOVED) =====
|
||||
#
|
||||
# All journal entry operations have been moved to Fava/Beancount.
|
||||
# Castle no longer maintains its own journal_entries and entry_lines tables.
|
||||
#
|
||||
# For journal entry operations, see:
|
||||
# - views_api.py: api_create_journal_entry() - writes to Fava via FavaClient
|
||||
# - views_api.py: API endpoints query Fava via FavaClient for reading entries
|
||||
#
|
||||
# Migration: m016_drop_obsolete_journal_tables
|
||||
# Removed functions:
|
||||
# - create_journal_entry()
|
||||
# - get_journal_entry()
|
||||
# - get_journal_entry_by_reference()
|
||||
# - get_entry_lines()
|
||||
# - get_all_journal_entries()
|
||||
# - get_journal_entries_by_user()
|
||||
# - count_all_journal_entries()
|
||||
# - count_journal_entries_by_user()
|
||||
# - get_journal_entries_by_user_and_account_type()
|
||||
# - count_journal_entries_by_user_and_account_type()
|
||||
# - get_account_transactions()
|
||||
|
||||
|
||||
# ===== SETTINGS =====
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue