From 8342318fde0290a7d46c3f2a224185a26cf70112 Mon Sep 17 00:00:00 2001 From: padreug Date: Mon, 10 Nov 2025 10:25:05 +0100 Subject: [PATCH] Refactors duplicate payment check in Fava Improves payment recording logic by fetching recent entries and filtering using Python, replacing the BQL query. This addresses issues with matching against set types in BQL, enhancing reliability. --- tasks.py | 32 +++++++++++++++++++++----------- views_api.py | 36 +++++++++++++++++++++++------------- 2 files changed, 44 insertions(+), 24 deletions(-) diff --git a/tasks.py b/tasks.py index d6e6c56..6331a16 100644 --- a/tasks.py +++ b/tasks.py @@ -152,18 +152,28 @@ async def on_invoice_paid(payment: Payment) -> None: fava = get_fava_client() try: - # Query Fava for existing payment entry - query = f"SELECT * WHERE links ~ 'ln-{payment.payment_hash[:16]}'" - async with httpx.AsyncClient(timeout=5.0) as client: - response = await client.get( - f"{fava.base_url}/query", - params={"query_string": query} - ) - result = response.json() + # Check if payment already recorded by fetching recent entries + # Note: We can't use BQL query with `links ~ 'pattern'` because links is a set type + # and BQL doesn't support regex matching on sets. Instead, fetch entries and filter in Python. + link_to_find = f"ln-{payment.payment_hash[:16]}" - if result.get('data', {}).get('rows'): - logger.info(f"Payment {payment.payment_hash} already recorded in Fava, skipping") - return + async with httpx.AsyncClient(timeout=5.0) as client: + # Get recent entries from Fava's journal endpoint + response = await client.get( + f"{fava.base_url}/api/journal", + params={"time": ""} # Get all entries + ) + + if response.status_code == 200: + data = response.json() + entries = data.get('entries', []) + + # Check if any entry has our payment link + for entry in entries: + entry_links = entry.get('links', []) + if link_to_find in entry_links: + logger.info(f"Payment {payment.payment_hash} already recorded in Fava, skipping") + return except Exception as e: logger.warning(f"Could not check Fava for duplicate payment: {e}") diff --git a/views_api.py b/views_api.py index 590aa9f..d668e01 100644 --- a/views_api.py +++ b/views_api.py @@ -1356,24 +1356,34 @@ async def api_record_payment( fava = get_fava_client() - # Query Fava for existing entry with this payment hash link - query = f"SELECT * WHERE links ~ 'ln-{data.payment_hash[:16]}'" + # Check if payment already recorded by fetching recent entries + # Note: We can't use BQL query with `links ~ 'pattern'` because links is a set type + # and BQL doesn't support regex matching on sets. Instead, fetch entries and filter in Python. + link_to_find = f"ln-{data.payment_hash[:16]}" + try: async with httpx.AsyncClient(timeout=5.0) as client: + # Get recent entries from Fava's journal endpoint response = await client.get( - f"{fava.base_url}/query", - params={"query_string": query} + f"{fava.base_url}/api/journal", + params={"time": ""} # Get all entries ) - result = response.json() - if result.get('data', {}).get('rows'): - # Payment already recorded, return existing entry - balance_data = await fava.get_user_balance(target_user_id) - return { - "journal_entry_id": f"fava-exists-{data.payment_hash[:16]}", - "new_balance": balance_data["balance"], - "message": "Payment already recorded", - } + if response.status_code == 200: + response_data = response.json() + entries = response_data.get('entries', []) + + # Check if any entry has our payment link + for entry in entries: + entry_links = entry.get('links', []) + if link_to_find in entry_links: + # Payment already recorded, return existing entry + balance_data = await fava.get_user_balance(target_user_id) + return { + "journal_entry_id": f"fava-exists-{data.payment_hash[:16]}", + "new_balance": balance_data["balance"], + "message": "Payment already recorded", + } except Exception as e: logger.warning(f"Could not check Fava for duplicate payment: {e}") # Continue anyway - Fava/Beancount will catch duplicate if it exists