Performance improvements (#19)
* fix: increase the wait time for re-connecting to a relay * fix: blocking sleep * fix: remove blocking sleep * fix: allow multiple filters per request
This commit is contained in:
parent
9d9fbc0189
commit
e6624f76bd
5 changed files with 24 additions and 18 deletions
|
|
@ -1,4 +1,4 @@
|
|||
import time
|
||||
import asyncio
|
||||
from typing import List
|
||||
|
||||
from ..relay_manager import RelayManager
|
||||
|
|
@ -21,7 +21,7 @@ class NostrClient:
|
|||
def close(self):
|
||||
self.relay_manager.close_connections()
|
||||
|
||||
def subscribe(
|
||||
async def subscribe(
|
||||
self,
|
||||
callback_events_func=None,
|
||||
callback_notices_func=None,
|
||||
|
|
@ -41,4 +41,4 @@ class NostrClient:
|
|||
if callback_eosenotices_func:
|
||||
callback_eosenotices_func(event_msg)
|
||||
|
||||
time.sleep(0.1)
|
||||
await asyncio.sleep(0.5)
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import asyncio
|
||||
import json
|
||||
import time
|
||||
from queue import Queue
|
||||
|
|
@ -95,7 +96,7 @@ class Relay:
|
|||
json_str = json.dumps(["REQ", s["id"], s["filters"][0]])
|
||||
self.publish(json_str)
|
||||
|
||||
def queue_worker(self):
|
||||
async def queue_worker(self):
|
||||
while True:
|
||||
if self.connected:
|
||||
try:
|
||||
|
|
@ -105,7 +106,7 @@ class Relay:
|
|||
except:
|
||||
pass
|
||||
else:
|
||||
time.sleep(0.1)
|
||||
await asyncio.sleep(1)
|
||||
|
||||
if self.shutdown:
|
||||
logger.warning(f"Closing queue worker for '{self.url}'.")
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
|
||||
import asyncio
|
||||
import ssl
|
||||
import threading
|
||||
import time
|
||||
|
|
@ -95,20 +96,22 @@ class RelayManager:
|
|||
)
|
||||
self.threads[relay.url].start()
|
||||
|
||||
def wrap_async_queue_worker():
|
||||
asyncio.run(relay.queue_worker())
|
||||
|
||||
self.queue_threads[relay.url] = threading.Thread(
|
||||
target=relay.queue_worker,
|
||||
target=wrap_async_queue_worker,
|
||||
name=f"{relay.url}-queue",
|
||||
daemon=True,
|
||||
)
|
||||
self.queue_threads[relay.url].start()
|
||||
|
||||
def _restart_relay(self, relay: Relay):
|
||||
if relay.error_threshold_reached:
|
||||
time_since_last_error = time.time() - relay.last_error_date
|
||||
if time_since_last_error < 60 * 60 * 2: # last day
|
||||
|
||||
min_wait_time = min(60 * relay.error_counter, 60 * 60 * 24) # try at least once a day
|
||||
if time_since_last_error < min_wait_time:
|
||||
return
|
||||
relay.error_counter = 0
|
||||
relay.error_list = []
|
||||
|
||||
logger.info(f"Restarting connection to relay '{relay.url}'")
|
||||
|
||||
|
|
|
|||
|
|
@ -170,13 +170,13 @@ class NostrRouter:
|
|||
subscription_id = json_data[1]
|
||||
subscription_id_rewritten = urlsafe_short_hash()
|
||||
self.original_subscription_ids[subscription_id_rewritten] = subscription_id
|
||||
fltr = json_data[2]
|
||||
fltr = json_data[2:]
|
||||
filters = self._marshall_nostr_filters(fltr)
|
||||
|
||||
nostr.client.relay_manager.add_subscription(
|
||||
subscription_id_rewritten, filters
|
||||
)
|
||||
request_rewritten = json.dumps([json_data[0], subscription_id_rewritten, fltr])
|
||||
request_rewritten = json.dumps([json_data[0], subscription_id_rewritten] + fltr)
|
||||
|
||||
self.subscriptions.append(subscription_id_rewritten)
|
||||
nostr.client.relay_manager.publish_message(request_rewritten)
|
||||
|
|
|
|||
10
tasks.py
10
tasks.py
|
|
@ -66,13 +66,15 @@ async def subscribe_events():
|
|||
|
||||
return
|
||||
|
||||
t = threading.Thread(
|
||||
target=nostr.client.subscribe,
|
||||
args=(
|
||||
def wrap_async_subscribe():
|
||||
asyncio.run(nostr.client.subscribe(
|
||||
callback_events,
|
||||
callback_notices,
|
||||
callback_eose_notices,
|
||||
),
|
||||
))
|
||||
|
||||
t = threading.Thread(
|
||||
target=wrap_async_subscribe,
|
||||
name="Nostr-event-subscription",
|
||||
daemon=True,
|
||||
)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue