mirror of
https://github.com/zulip/zulip.git
synced 2025-11-04 22:13:26 +00:00
refactor: Move all_topics_in_cache.
This is a pure code move. The diff is bit cluttered by the necessity to add with_field when we moved the test.
This commit is contained in:
@@ -13,7 +13,6 @@ const color_data = zrequire("color_data");
|
|||||||
const stream_topic_history = zrequire("stream_topic_history");
|
const stream_topic_history = zrequire("stream_topic_history");
|
||||||
const people = zrequire("people");
|
const people = zrequire("people");
|
||||||
const stream_data = zrequire("stream_data");
|
const stream_data = zrequire("stream_data");
|
||||||
const message_list = zrequire("message_list");
|
|
||||||
const settings_config = zrequire("settings_config");
|
const settings_config = zrequire("settings_config");
|
||||||
|
|
||||||
const me = {
|
const me = {
|
||||||
@@ -809,37 +808,3 @@ test("get_invite_stream_data", () => {
|
|||||||
});
|
});
|
||||||
assert.deepEqual(stream_data.get_invite_stream_data(), expected_list);
|
assert.deepEqual(stream_data.get_invite_stream_data(), expected_list);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("all_topics_in_cache", (override) => {
|
|
||||||
// Add a new stream with first_message_id set.
|
|
||||||
const general = {
|
|
||||||
name: "general",
|
|
||||||
stream_id: 21,
|
|
||||||
first_message_id: null,
|
|
||||||
};
|
|
||||||
const messages = [
|
|
||||||
{id: 1, stream_id: 21},
|
|
||||||
{id: 2, stream_id: 21},
|
|
||||||
{id: 3, stream_id: 21},
|
|
||||||
];
|
|
||||||
const sub = stream_data.create_sub_from_server_data(general);
|
|
||||||
|
|
||||||
assert.equal(stream_data.all_topics_in_cache(sub), false);
|
|
||||||
|
|
||||||
message_list.all.data.clear();
|
|
||||||
message_list.all.data.add_messages(messages);
|
|
||||||
|
|
||||||
let has_found_newest = false;
|
|
||||||
|
|
||||||
override(message_list.all.data.fetch_status, "has_found_newest", () => has_found_newest);
|
|
||||||
|
|
||||||
assert.equal(stream_data.all_topics_in_cache(sub), false);
|
|
||||||
has_found_newest = true;
|
|
||||||
assert.equal(stream_data.all_topics_in_cache(sub), true);
|
|
||||||
|
|
||||||
sub.first_message_id = 0;
|
|
||||||
assert.equal(stream_data.all_topics_in_cache(sub), false);
|
|
||||||
|
|
||||||
sub.first_message_id = 2;
|
|
||||||
assert.equal(stream_data.all_topics_in_cache(sub), true);
|
|
||||||
});
|
|
||||||
|
|||||||
@@ -2,13 +2,13 @@
|
|||||||
|
|
||||||
const {strict: assert} = require("assert");
|
const {strict: assert} = require("assert");
|
||||||
|
|
||||||
const {mock_esm, zrequire} = require("../zjsunit/namespace");
|
const {mock_esm, with_field, zrequire} = require("../zjsunit/namespace");
|
||||||
const {run_test} = require("../zjsunit/test");
|
const {run_test} = require("../zjsunit/test");
|
||||||
|
|
||||||
const channel = mock_esm("../../static/js/channel");
|
const channel = mock_esm("../../static/js/channel");
|
||||||
const message_list = mock_esm("../../static/js/message_list");
|
|
||||||
const message_util = mock_esm("../../static/js/message_util");
|
const message_util = mock_esm("../../static/js/message_util");
|
||||||
|
|
||||||
|
const message_list = zrequire("message_list");
|
||||||
const unread = zrequire("unread");
|
const unread = zrequire("unread");
|
||||||
const stream_data = zrequire("stream_data");
|
const stream_data = zrequire("stream_data");
|
||||||
const stream_topic_history = zrequire("stream_topic_history");
|
const stream_topic_history = zrequire("stream_topic_history");
|
||||||
@@ -128,7 +128,7 @@ test("is_complete_for_stream_id", () => {
|
|||||||
};
|
};
|
||||||
stream_data.add_sub(sub);
|
stream_data.add_sub(sub);
|
||||||
|
|
||||||
message_list.all = {
|
const message_list_all = {
|
||||||
empty: () => false,
|
empty: () => false,
|
||||||
data: {
|
data: {
|
||||||
fetch_status: {
|
fetch_status: {
|
||||||
@@ -138,6 +138,7 @@ test("is_complete_for_stream_id", () => {
|
|||||||
first: () => ({id: 5}),
|
first: () => ({id: 5}),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
with_field(message_list, "all", message_list_all, () => {
|
||||||
assert.equal(stream_topic_history.is_complete_for_stream_id(sub.stream_id), true);
|
assert.equal(stream_topic_history.is_complete_for_stream_id(sub.stream_id), true);
|
||||||
|
|
||||||
// Now simulate a more recent message id.
|
// Now simulate a more recent message id.
|
||||||
@@ -153,6 +154,7 @@ test("is_complete_for_stream_id", () => {
|
|||||||
|
|
||||||
assert.equal(stream_topic_history.is_complete_for_stream_id(sub.stream_id), false);
|
assert.equal(stream_topic_history.is_complete_for_stream_id(sub.stream_id), false);
|
||||||
});
|
});
|
||||||
|
});
|
||||||
|
|
||||||
test("server_history", () => {
|
test("server_history", () => {
|
||||||
const sub = {
|
const sub = {
|
||||||
@@ -336,3 +338,37 @@ test("server_history_end_to_end", () => {
|
|||||||
});
|
});
|
||||||
assert(on_success_called);
|
assert(on_success_called);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test("all_topics_in_cache", (override) => {
|
||||||
|
// Add a new stream with first_message_id set.
|
||||||
|
const general = {
|
||||||
|
name: "general",
|
||||||
|
stream_id: 21,
|
||||||
|
first_message_id: null,
|
||||||
|
};
|
||||||
|
const messages = [
|
||||||
|
{id: 1, stream_id: 21},
|
||||||
|
{id: 2, stream_id: 21},
|
||||||
|
{id: 3, stream_id: 21},
|
||||||
|
];
|
||||||
|
const sub = stream_data.create_sub_from_server_data(general);
|
||||||
|
|
||||||
|
assert.equal(stream_topic_history.all_topics_in_cache(sub), false);
|
||||||
|
|
||||||
|
message_list.all.data.clear();
|
||||||
|
message_list.all.data.add_messages(messages);
|
||||||
|
|
||||||
|
let has_found_newest = false;
|
||||||
|
|
||||||
|
override(message_list.all.data.fetch_status, "has_found_newest", () => has_found_newest);
|
||||||
|
|
||||||
|
assert.equal(stream_topic_history.all_topics_in_cache(sub), false);
|
||||||
|
has_found_newest = true;
|
||||||
|
assert.equal(stream_topic_history.all_topics_in_cache(sub), true);
|
||||||
|
|
||||||
|
sub.first_message_id = 0;
|
||||||
|
assert.equal(stream_topic_history.all_topics_in_cache(sub), false);
|
||||||
|
|
||||||
|
sub.first_message_id = 2;
|
||||||
|
assert.equal(stream_topic_history.all_topics_in_cache(sub), true);
|
||||||
|
});
|
||||||
|
|||||||
@@ -3,7 +3,6 @@ import * as color_data from "./color_data";
|
|||||||
import {FoldDict} from "./fold_dict";
|
import {FoldDict} from "./fold_dict";
|
||||||
import * as hash_util from "./hash_util";
|
import * as hash_util from "./hash_util";
|
||||||
import {i18n} from "./i18n";
|
import {i18n} from "./i18n";
|
||||||
import * as message_list from "./message_list";
|
|
||||||
import {page_params} from "./page_params";
|
import {page_params} from "./page_params";
|
||||||
import * as peer_data from "./peer_data";
|
import * as peer_data from "./peer_data";
|
||||||
import * as people from "./people";
|
import * as people from "./people";
|
||||||
@@ -602,39 +601,6 @@ export function get_invite_only(stream_name) {
|
|||||||
return sub.invite_only;
|
return sub.invite_only;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function all_topics_in_cache(sub) {
|
|
||||||
// Checks whether this browser's cache of contiguous messages
|
|
||||||
// (used to locally render narrows) in message_list.all has all
|
|
||||||
// messages from a given stream, and thus all historical topics
|
|
||||||
// for it. Because message_list.all is a range, we just need to
|
|
||||||
// compare it to the range of history on the stream.
|
|
||||||
|
|
||||||
// If the cache isn't initialized, it's a clear false.
|
|
||||||
if (message_list.all === undefined || message_list.all.empty()) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// If the cache doesn't have the latest messages, we can't be sure
|
|
||||||
// we have all topics.
|
|
||||||
if (!message_list.all.data.fetch_status.has_found_newest()) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (sub.first_message_id === null) {
|
|
||||||
// If the stream has no message history, we have it all
|
|
||||||
// vacuously. This should be a very rare condition, since
|
|
||||||
// stream creation sends a message.
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Now, we can just compare the first cached message to the first
|
|
||||||
// message ID in the stream; if it's older, we're good, otherwise,
|
|
||||||
// we might be missing the oldest topics in this stream in our
|
|
||||||
// cache.
|
|
||||||
const first_cached_message = message_list.all.first();
|
|
||||||
return first_cached_message.id <= sub.first_message_id;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function set_realm_default_streams(realm_default_streams) {
|
export function set_realm_default_streams(realm_default_streams) {
|
||||||
default_stream_ids.clear();
|
default_stream_ids.clear();
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
import * as channel from "./channel";
|
import * as channel from "./channel";
|
||||||
import {FoldDict} from "./fold_dict";
|
import {FoldDict} from "./fold_dict";
|
||||||
|
import * as message_list from "./message_list";
|
||||||
import * as message_util from "./message_util";
|
import * as message_util from "./message_util";
|
||||||
import * as stream_data from "./stream_data";
|
import * as stream_data from "./stream_data";
|
||||||
import * as unread from "./unread";
|
import * as unread from "./unread";
|
||||||
@@ -7,20 +8,46 @@ import * as unread from "./unread";
|
|||||||
const stream_dict = new Map(); // stream_id -> PerStreamHistory object
|
const stream_dict = new Map(); // stream_id -> PerStreamHistory object
|
||||||
const fetched_stream_ids = new Set();
|
const fetched_stream_ids = new Set();
|
||||||
|
|
||||||
|
export function all_topics_in_cache(sub) {
|
||||||
|
// Checks whether this browser's cache of contiguous messages
|
||||||
|
// (used to locally render narrows) in message_list.all has all
|
||||||
|
// messages from a given stream, and thus all historical topics
|
||||||
|
// for it. Because message_list.all is a range, we just need to
|
||||||
|
// compare it to the range of history on the stream.
|
||||||
|
|
||||||
|
// If the cache isn't initialized, it's a clear false.
|
||||||
|
if (message_list.all === undefined || message_list.all.empty()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the cache doesn't have the latest messages, we can't be sure
|
||||||
|
// we have all topics.
|
||||||
|
if (!message_list.all.data.fetch_status.has_found_newest()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (sub.first_message_id === null) {
|
||||||
|
// If the stream has no message history, we have it all
|
||||||
|
// vacuously. This should be a very rare condition, since
|
||||||
|
// stream creation sends a message.
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now, we can just compare the first cached message to the first
|
||||||
|
// message ID in the stream; if it's older, we're good, otherwise,
|
||||||
|
// we might be missing the oldest topics in this stream in our
|
||||||
|
// cache.
|
||||||
|
const first_cached_message = message_list.all.first();
|
||||||
|
return first_cached_message.id <= sub.first_message_id;
|
||||||
|
}
|
||||||
|
|
||||||
export function is_complete_for_stream_id(stream_id) {
|
export function is_complete_for_stream_id(stream_id) {
|
||||||
if (fetched_stream_ids.has(stream_id)) {
|
if (fetched_stream_ids.has(stream_id)) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
|
||||||
TODO: We should possibly move all_topics_in_cache
|
|
||||||
from stream_data to here, since the function
|
|
||||||
mostly looks at message_list.all and has little
|
|
||||||
to do with typical stream_data stuff. (We just
|
|
||||||
need sub.first_message_id.)
|
|
||||||
*/
|
|
||||||
const sub = stream_data.get_sub_by_id(stream_id);
|
const sub = stream_data.get_sub_by_id(stream_id);
|
||||||
const in_cache = stream_data.all_topics_in_cache(sub);
|
const in_cache = all_topics_in_cache(sub);
|
||||||
|
|
||||||
if (in_cache) {
|
if (in_cache) {
|
||||||
/*
|
/*
|
||||||
|
|||||||
Reference in New Issue
Block a user