From 624e1a85ce2142b3b553633050e6435beb01c6ce Mon Sep 17 00:00:00 2001 From: Tim Abbott Date: Fri, 2 Feb 2024 13:11:41 -0800 Subject: [PATCH] message_fetch: Add small backfill delay. This should help reduce the risk of hitting rate limits when users have a very large number of messages to fetch via this mechanism. Inline the `messages` variable that was only used in one place while we're touching this. --- web/src/message_fetch.js | 26 ++++++++++++++++---------- web/tests/message_fetch.test.js | 6 ++++++ 2 files changed, 22 insertions(+), 10 deletions(-) diff --git a/web/src/message_fetch.js b/web/src/message_fetch.js index 9eb6b23317..88067fbdac 100644 --- a/web/src/message_fetch.js +++ b/web/src/message_fetch.js @@ -38,6 +38,9 @@ const consts = { narrowed_view_forward_batch_size: 100, recent_view_fetch_more_batch_size: 1000, catch_up_batch_size: 1000, + // Delay in milliseconds after processing a catch-up request + // before sending the next one. + catch_up_backfill_delay: 150, }; function process_result(data, opts) { @@ -563,16 +566,19 @@ export function initialize(home_view_loaded) { // If we fall through here, we need to keep fetching more data, and // we'll call back to the function we're in. - const messages = data.messages; - const latest_id = messages.at(-1).id; - - load_messages({ - anchor: latest_id, - num_before: 0, - num_after: consts.catch_up_batch_size, - msg_list: message_lists.home, - cont: load_more, - }); + // + // But we do it with a bit of delay, to reduce risk that we + // hit rate limits with these backfills. + const latest_id = data.messages.at(-1).id; + setTimeout(() => { + load_messages({ + anchor: latest_id, + num_before: 0, + num_after: consts.catch_up_batch_size, + msg_list: message_lists.home, + cont: load_more, + }); + }, consts.catch_up_backfill_delay); } let anchor; diff --git a/web/tests/message_fetch.test.js b/web/tests/message_fetch.test.js index 80468b36b3..ae2c55a841 100644 --- a/web/tests/message_fetch.test.js +++ b/web/tests/message_fetch.test.js @@ -222,6 +222,12 @@ function forward_fill_step() { let fetch; self.prep = () => { + /* Don't wait for the timeout before recursively calling `load_messages`. */ + const expected_delay = 150; + set_global("setTimeout", (f, delay) => { + assert.equal(delay, expected_delay); + f(); + }); fetch = config_fake_channel({ expected_opts_data: initialize_data.forward_fill.req, });