tornado: Drop WebReloadClientError logic.

The widening of the time between when a process is marked for
reload (at Tornado startup) and when it sends reload events makes it
unlikely-to-impossible that a single `/` request will span both of
them, and thus hit the WebReloadClientError corner case.

Remove it, as it is not worth the complication.  The bad behaviour it
is attempting to prevent (of a reload right after opening `/`) was
always still possible  -- if the `/` request completed right before
Tornado restarted -- so it is not clear that it was ever worth the
complication.
This commit is contained in:
Alex Vandiver
2024-02-14 19:27:17 +00:00
committed by Tim Abbott
parent da6b0b1cc6
commit 0079688c49
3 changed files with 72 additions and 178 deletions

View File

@@ -1,6 +1,7 @@
# See https://zulip.readthedocs.io/en/latest/subsystems/events-system.html for
# high-level documentation on how this system works.
import copy
import logging
import time
from typing import Any, Callable, Collection, Dict, Iterable, Mapping, Optional, Sequence, Set
@@ -91,13 +92,6 @@ from zerver.tornado.django_api import get_user_events, request_event_queue
from zproject.backends import email_auth_enabled, password_auth_enabled
class WebReloadClientError(Exception):
"""Special error for handling web_reload_client events in
apply_events.
"""
def add_realm_logo_fields(state: Dict[str, Any], realm: Realm) -> None:
state["realm_logo_url"] = get_realm_logo_url(realm, night=False)
state["realm_logo_source"] = get_realm_logo_source(realm, night=False)
@@ -712,8 +706,6 @@ def apply_events(
user_list_incomplete: bool,
) -> None:
for event in events:
if event["type"] == "web_reload_client":
raise WebReloadClientError
if fetch_event_types is not None and event["type"] not in fetch_event_types:
# TODO: continuing here is not, most precisely, correct.
# In theory, an event of one type, e.g. `realm_user`,
@@ -1523,6 +1515,14 @@ def apply_event(
state["user_topics"].append({x: event[x] for x in fields})
elif event["type"] == "has_zoom_token":
state["has_zoom_token"] = event["value"]
elif event["type"] == "web_reload_client":
# This is an unlikely race, where the queue was created with a
# previous Tornado process, which restarted, and subsequently
# was told by restart-server to tell its old clients to
# reload. We warn, since we do not expect this race to be
# possible, but the worst expected outcome is that the client
# retains the old JS instead of reloading.
logging.warning("Got a web_reload_client event during apply_events")
else:
raise AssertionError("Unexpected event type {}".format(event["type"]))
@@ -1600,68 +1600,57 @@ def do_events_register(
legacy_narrow = [[nt.operator, nt.operand] for nt in narrow]
while True:
# Note that we pass event_types, not fetch_event_types here, since
# that's what controls which future events are sent.
queue_id = request_event_queue(
user_profile,
user_client,
apply_markdown,
client_gravatar,
slim_presence,
queue_lifespan_secs,
event_types,
all_public_streams,
narrow=legacy_narrow,
bulk_message_deletion=bulk_message_deletion,
stream_typing_notifications=stream_typing_notifications,
user_settings_object=user_settings_object,
pronouns_field_type_supported=pronouns_field_type_supported,
linkifier_url_template=linkifier_url_template,
user_list_incomplete=user_list_incomplete,
)
# Note that we pass event_types, not fetch_event_types here, since
# that's what controls which future events are sent.
queue_id = request_event_queue(
user_profile,
user_client,
apply_markdown,
client_gravatar,
slim_presence,
queue_lifespan_secs,
event_types,
all_public_streams,
narrow=legacy_narrow,
bulk_message_deletion=bulk_message_deletion,
stream_typing_notifications=stream_typing_notifications,
user_settings_object=user_settings_object,
pronouns_field_type_supported=pronouns_field_type_supported,
linkifier_url_template=linkifier_url_template,
user_list_incomplete=user_list_incomplete,
)
if queue_id is None:
raise JsonableError(_("Could not allocate event queue"))
if queue_id is None:
raise JsonableError(_("Could not allocate event queue"))
ret = fetch_initial_state_data(
user_profile,
event_types=event_types_set,
queue_id=queue_id,
client_gravatar=client_gravatar,
user_avatar_url_field_optional=user_avatar_url_field_optional,
user_settings_object=user_settings_object,
slim_presence=slim_presence,
include_subscribers=include_subscribers,
include_streams=include_streams,
pronouns_field_type_supported=pronouns_field_type_supported,
linkifier_url_template=linkifier_url_template,
user_list_incomplete=user_list_incomplete,
)
ret = fetch_initial_state_data(
user_profile,
event_types=event_types_set,
queue_id=queue_id,
client_gravatar=client_gravatar,
user_avatar_url_field_optional=user_avatar_url_field_optional,
user_settings_object=user_settings_object,
slim_presence=slim_presence,
include_subscribers=include_subscribers,
include_streams=include_streams,
pronouns_field_type_supported=pronouns_field_type_supported,
linkifier_url_template=linkifier_url_template,
user_list_incomplete=user_list_incomplete,
)
# Apply events that came in while we were fetching initial data
events = get_user_events(user_profile, queue_id, -1)
try:
apply_events(
user_profile,
state=ret,
events=events,
fetch_event_types=fetch_event_types,
client_gravatar=client_gravatar,
slim_presence=slim_presence,
include_subscribers=include_subscribers,
linkifier_url_template=linkifier_url_template,
user_list_incomplete=user_list_incomplete,
)
except WebReloadClientError:
# This represents a rare race condition, where Tornado
# restarted (and sent `restart` events) while we were waiting
# for fetch_initial_state_data to return. To avoid the client
# needing to reload shortly after loading, we recursively call
# do_events_register here.
continue
else:
break
# Apply events that came in while we were fetching initial data
events = get_user_events(user_profile, queue_id, -1)
apply_events(
user_profile,
state=ret,
events=events,
fetch_event_types=fetch_event_types,
client_gravatar=client_gravatar,
slim_presence=slim_presence,
include_subscribers=include_subscribers,
linkifier_url_template=linkifier_url_template,
user_list_incomplete=user_list_incomplete,
)
post_process_state(user_profile, ret, notification_settings_null)

View File

@@ -1,5 +1,5 @@
import time
from typing import Any, Callable, Dict, List, Optional
from typing import Any, Callable, Dict, List
from unittest import mock
from urllib.parse import urlsplit
@@ -41,12 +41,8 @@ from zerver.tornado.event_queue import (
send_web_reload_client_events,
)
from zerver.tornado.exceptions import BadEventQueueIdError
from zerver.tornado.views import get_events, get_events_backend
from zerver.views.events_register import (
_default_all_public_streams,
_default_narrow,
events_register_backend,
)
from zerver.tornado.views import get_events
from zerver.views.events_register import _default_all_public_streams, _default_narrow
class EventsEndpointTest(ZulipTestCase):
@@ -1113,28 +1109,7 @@ class ClientDescriptorsTest(ZulipTestCase):
)
class WebReloadClientsTest(ZulipTestCase):
def tornado_call(
self,
view_func: Callable[[HttpRequest, UserProfile], HttpResponse],
user_profile: UserProfile,
post_data: Dict[str, Any],
client_name: Optional[str] = None,
user_agent: Optional[str] = None,
) -> HttpResponse:
meta_data: Optional[Dict[str, Any]] = None
if user_agent is not None:
meta_data = {"HTTP_USER_AGENT": user_agent}
request = HostRequestMock(
post_data,
user_profile,
client_name=client_name,
tornado_handler=dummy_handler,
meta_data=meta_data,
)
return view_func(request, user_profile)
class ReloadWebClientsTest(ZulipTestCase):
def test_web_reload_clients(self) -> None:
hamlet = self.example_user("hamlet")
realm = hamlet.realm
@@ -1172,79 +1147,6 @@ class WebReloadClientsTest(ZulipTestCase):
),
)
def test_web_reload_client_event_recursive_call_logic(self) -> None:
# This is a test for a subtle corner case; see the comments
# around WebReloadClientError for details.
hamlet = self.example_user("hamlet")
realm = hamlet.realm
# Set up an empty event queue
clear_client_event_queues_for_testing()
queue_data = dict(
all_public_streams=False,
apply_markdown=True,
client_gravatar=True,
client_type_name="website",
event_types=None,
last_connection_time=time.time(),
queue_timeout=0,
realm_id=realm.id,
user_profile_id=hamlet.id,
)
client = allocate_client_descriptor(queue_data)
# Add a reload event to it.
send_web_reload_client_events()
# Make a second queue after the reload events were sent.
second_client = allocate_client_descriptor(queue_data)
# Fetch the reload event just sent above, without removing it
# from the queue. We will use this as a mock return value in
# get_user_events.
reload_event = orjson.loads(
self.tornado_call(
get_events_backend,
hamlet,
post_data={
"queue_id": client.event_queue.id,
"last_event_id": -1,
"dont_block": "true",
"user_profile_id": hamlet.id,
"secret": settings.SHARED_SECRET,
"client": "internal",
},
client_name="internal",
).content
)["events"]
# Now the tricky part: We call events_register_backend,
# arranging it so that the first `get_user_events` call
# returns our reload event (triggering the recursive
# behavior), but the second (with a new queue) returns no
# events.
#
# Because get_user_events always returns [] in tests, we need
# to mock its return value as well; in an ideal world, we
# would only need to mock client / second_client.
with mock.patch(
"zerver.lib.events.request_event_queue",
side_effect=[client.event_queue.id, second_client.event_queue.id],
), mock.patch("zerver.lib.events.get_user_events", side_effect=[reload_event, []]):
self.tornado_call(
events_register_backend,
hamlet,
{
"queue_id": client.event_queue.id,
"user_client": "website",
"last_event_id": -1,
"dont_block": orjson.dumps(True).decode(),
},
client_name="website",
user_agent="Mozilla/5.0 (Windows NT 10.0; Win64; x64)",
)
class FetchQueriesTest(ZulipTestCase):
def test_queries(self) -> None:

View File

@@ -200,12 +200,7 @@ from zerver.lib.event_schema import (
check_user_status,
check_user_topic,
)
from zerver.lib.events import (
WebReloadClientError,
apply_events,
fetch_initial_state_data,
post_process_state,
)
from zerver.lib.events import apply_events, fetch_initial_state_data, post_process_state
from zerver.lib.markdown import render_message_markdown
from zerver.lib.mention import MentionBackend, MentionData
from zerver.lib.muted_users import get_mute_object
@@ -3468,8 +3463,16 @@ class NormalActionsTest(BaseAction):
num_events=0,
state_change_expected=False,
)
with self.assertRaises(WebReloadClientError):
self.verify_action(lambda: send_web_reload_client_events(), client_is_old=True)
with self.assertLogs(level="WARNING") as logs:
self.verify_action(
lambda: send_web_reload_client_events(),
client_is_old=True,
num_events=1,
state_change_expected=False,
)
self.assertEqual(
logs.output, ["WARNING:root:Got a web_reload_client event during apply_events"]
)
def test_display_setting_event_not_sent(self) -> None:
events = self.verify_action(