From 7e54d9a20982674cd401b4f145ba84eb5d50241e Mon Sep 17 00:00:00 2001 From: Yonle Date: Sat, 11 May 2024 19:04:15 +0700 Subject: [PATCH] bouncer: fix duplicates being forwarded to client. Signed-off-by: Yonle --- config.js.example | 5 +++-- worker_bouncer.js | 4 ++-- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/config.js.example b/config.js.example index 1dff7d5..ac1afb7 100644 --- a/config.js.example +++ b/config.js.example @@ -60,8 +60,9 @@ module.exports = { // Maximum Known Events // Used for knowing what events has been forwarded to client in order to prevent duplicates to be forwarded. // - // Setting as 0 will store known events to memory without limits. - max_known_events: 1000, + // Setting as 0 will store known events to memory without limit until the subscription is closed. + // Reduce this value if memory usage is high. But don't go too low as duplicates will be forwarded to client. + max_known_events: 0, // Wait for every connected relays send EOSE. // Could improve accuracy on received events. diff --git a/worker_bouncer.js b/worker_bouncer.js index 57f5c03..d39fed2 100644 --- a/worker_bouncer.js +++ b/worker_bouncer.js @@ -318,7 +318,7 @@ function newConn(addr, id, reconn_t = 0) { if (!client.subalias.hasOwnProperty(data[1])) return; data[1] = client.subalias[data[1]]; - if (client.events[data[1]].hasOwnProperty(data[2]?.id)) return; // No need to transmit once it has been transmitted before. + if (client.events[data[1]].has(data[2]?.id)) return; // No need to transmit once it has been transmitted before. if (!relay.isCache) bc(["EVENT", data[2]], id, true); // store to cache relay const filter = client.mergedFilters[data[1]]; if (client.pause_subs.has(data[1]) && (filter.since > data[2].created_at) && !relay.isCache) return; @@ -334,7 +334,7 @@ function newConn(addr, id, reconn_t = 0) { if (!relay.isLoadBalancer) client.events[data[1]].add(data[2]?.id); parentPort.postMessage({ type: "upstream_msg", id, data: JSON.stringify(data) }); - if (max_known_events && client.events[data[1]].size > max_known_events) + if (max_known_events && client.events[data[1]].size >= max_known_events) client.events[data[1]].delete(client.events[data[1]].values().next().value); stats._global.rx++;