Performance improvements (#19)

* fix: increase the wait time for re-connecting to a relay

* fix: blocking sleep

* fix: remove blocking sleep

* fix: allow multiple filters per request
This commit is contained in:
Vlad Stan 2023-09-12 15:06:28 +03:00 committed by GitHub
parent 9d9fbc0189
commit e6624f76bd
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
5 changed files with 24 additions and 18 deletions

View file

@ -1,4 +1,4 @@
import time import asyncio
from typing import List from typing import List
from ..relay_manager import RelayManager from ..relay_manager import RelayManager
@ -21,7 +21,7 @@ class NostrClient:
def close(self): def close(self):
self.relay_manager.close_connections() self.relay_manager.close_connections()
def subscribe( async def subscribe(
self, self,
callback_events_func=None, callback_events_func=None,
callback_notices_func=None, callback_notices_func=None,
@ -41,4 +41,4 @@ class NostrClient:
if callback_eosenotices_func: if callback_eosenotices_func:
callback_eosenotices_func(event_msg) callback_eosenotices_func(event_msg)
time.sleep(0.1) await asyncio.sleep(0.5)

View file

@ -1,3 +1,4 @@
import asyncio
import json import json
import time import time
from queue import Queue from queue import Queue
@ -95,7 +96,7 @@ class Relay:
json_str = json.dumps(["REQ", s["id"], s["filters"][0]]) json_str = json.dumps(["REQ", s["id"], s["filters"][0]])
self.publish(json_str) self.publish(json_str)
def queue_worker(self): async def queue_worker(self):
while True: while True:
if self.connected: if self.connected:
try: try:
@ -105,7 +106,7 @@ class Relay:
except: except:
pass pass
else: else:
time.sleep(0.1) await asyncio.sleep(1)
if self.shutdown: if self.shutdown:
logger.warning(f"Closing queue worker for '{self.url}'.") logger.warning(f"Closing queue worker for '{self.url}'.")

View file

@ -1,4 +1,5 @@
import asyncio
import ssl import ssl
import threading import threading
import time import time
@ -95,20 +96,22 @@ class RelayManager:
) )
self.threads[relay.url].start() self.threads[relay.url].start()
def wrap_async_queue_worker():
asyncio.run(relay.queue_worker())
self.queue_threads[relay.url] = threading.Thread( self.queue_threads[relay.url] = threading.Thread(
target=relay.queue_worker, target=wrap_async_queue_worker,
name=f"{relay.url}-queue", name=f"{relay.url}-queue",
daemon=True, daemon=True,
) )
self.queue_threads[relay.url].start() self.queue_threads[relay.url].start()
def _restart_relay(self, relay: Relay): def _restart_relay(self, relay: Relay):
if relay.error_threshold_reached: time_since_last_error = time.time() - relay.last_error_date
time_since_last_error = time.time() - relay.last_error_date
if time_since_last_error < 60 * 60 * 2: # last day min_wait_time = min(60 * relay.error_counter, 60 * 60 * 24) # try at least once a day
return if time_since_last_error < min_wait_time:
relay.error_counter = 0 return
relay.error_list = []
logger.info(f"Restarting connection to relay '{relay.url}'") logger.info(f"Restarting connection to relay '{relay.url}'")

View file

@ -170,13 +170,13 @@ class NostrRouter:
subscription_id = json_data[1] subscription_id = json_data[1]
subscription_id_rewritten = urlsafe_short_hash() subscription_id_rewritten = urlsafe_short_hash()
self.original_subscription_ids[subscription_id_rewritten] = subscription_id self.original_subscription_ids[subscription_id_rewritten] = subscription_id
fltr = json_data[2] fltr = json_data[2:]
filters = self._marshall_nostr_filters(fltr) filters = self._marshall_nostr_filters(fltr)
nostr.client.relay_manager.add_subscription( nostr.client.relay_manager.add_subscription(
subscription_id_rewritten, filters subscription_id_rewritten, filters
) )
request_rewritten = json.dumps([json_data[0], subscription_id_rewritten, fltr]) request_rewritten = json.dumps([json_data[0], subscription_id_rewritten] + fltr)
self.subscriptions.append(subscription_id_rewritten) self.subscriptions.append(subscription_id_rewritten)
nostr.client.relay_manager.publish_message(request_rewritten) nostr.client.relay_manager.publish_message(request_rewritten)

View file

@ -66,13 +66,15 @@ async def subscribe_events():
return return
t = threading.Thread( def wrap_async_subscribe():
target=nostr.client.subscribe, asyncio.run(nostr.client.subscribe(
args=(
callback_events, callback_events,
callback_notices, callback_notices,
callback_eose_notices, callback_eose_notices,
), ))
t = threading.Thread(
target=wrap_async_subscribe,
name="Nostr-event-subscription", name="Nostr-event-subscription",
daemon=True, daemon=True,
) )