message_fetch: Add small backfill delay.

This should help reduce the risk of hitting rate limits when users
have a very large number of messages to fetch via this mechanism.

Inline the `messages` variable that was only used in one place while
we're touching this.
This commit is contained in:
Tim Abbott
2024-02-02 13:11:41 -08:00
parent 370afd6464
commit 624e1a85ce
2 changed files with 22 additions and 10 deletions

View File

@@ -38,6 +38,9 @@ const consts = {
narrowed_view_forward_batch_size: 100,
recent_view_fetch_more_batch_size: 1000,
catch_up_batch_size: 1000,
// Delay in milliseconds after processing a catch-up request
// before sending the next one.
catch_up_backfill_delay: 150,
};
function process_result(data, opts) {
@@ -563,16 +566,19 @@ export function initialize(home_view_loaded) {
// If we fall through here, we need to keep fetching more data, and
// we'll call back to the function we're in.
const messages = data.messages;
const latest_id = messages.at(-1).id;
load_messages({
anchor: latest_id,
num_before: 0,
num_after: consts.catch_up_batch_size,
msg_list: message_lists.home,
cont: load_more,
});
//
// But we do it with a bit of delay, to reduce risk that we
// hit rate limits with these backfills.
const latest_id = data.messages.at(-1).id;
setTimeout(() => {
load_messages({
anchor: latest_id,
num_before: 0,
num_after: consts.catch_up_batch_size,
msg_list: message_lists.home,
cont: load_more,
});
}, consts.catch_up_backfill_delay);
}
let anchor;

View File

@@ -222,6 +222,12 @@ function forward_fill_step() {
let fetch;
self.prep = () => {
/* Don't wait for the timeout before recursively calling `load_messages`. */
const expected_delay = 150;
set_global("setTimeout", (f, delay) => {
assert.equal(delay, expected_delay);
f();
});
fetch = config_fake_channel({
expected_opts_data: initialize_data.forward_fill.req,
});