mirror of
https://github.com/zulip/zulip.git
synced 2025-11-03 05:23:35 +00:00
events: Add support for fetch_event_types option to events_register.
This new feature makes it possible to request a different set of initial data from the event_types an API client is subscribing to. Primarily useful for mobile apps, where bandwidth constraints might mean one wants to subscribe to events for a broader set of data than is initially fetched, and plan to fetch the current state in future requests.
This commit is contained in:
@@ -170,9 +170,20 @@ def fetch_initial_state_data(user_profile, event_types, queue_id,
|
||||
|
||||
return state
|
||||
|
||||
def apply_events(state, events, user_profile, include_subscribers=True):
|
||||
# type: (Dict[str, Any], Iterable[Dict[str, Any]], UserProfile, bool) -> None
|
||||
def apply_events(state, events, user_profile, include_subscribers=True,
|
||||
fetch_event_types=None):
|
||||
# type: (Dict[str, Any], Iterable[Dict[str, Any]], UserProfile, bool, Optional[Iterable[str]]) -> None
|
||||
for event in events:
|
||||
if fetch_event_types is not None and event['type'] not in fetch_event_types:
|
||||
# TODO: continuing here is not, most precisely, correct.
|
||||
# In theory, an event of one type, e.g. `realm_user`,
|
||||
# could modify state that doesn't come from that
|
||||
# `fetch_event_types` value, e.g. the `our_person` part of
|
||||
# that code path. But it should be extremely rare, and
|
||||
# fixing that will require a nontrivial refactor of
|
||||
# `apply_event`. For now, be careful in your choice of
|
||||
# `fetch_event_types`.
|
||||
continue
|
||||
apply_event(state, event, user_profile, include_subscribers)
|
||||
|
||||
def apply_event(state, event, user_profile, include_subscribers):
|
||||
@@ -419,20 +430,27 @@ def apply_event(state, event, user_profile, include_subscribers):
|
||||
|
||||
def do_events_register(user_profile, user_client, apply_markdown=True,
|
||||
event_types=None, queue_lifespan_secs=0, all_public_streams=False,
|
||||
include_subscribers=True, narrow=[]):
|
||||
# type: (UserProfile, Client, bool, Optional[Iterable[str]], int, bool, bool, Iterable[Sequence[Text]]) -> Dict[str, Any]
|
||||
include_subscribers=True, narrow=[], fetch_event_types=None):
|
||||
# type: (UserProfile, Client, bool, Optional[Iterable[str]], int, bool, bool, Iterable[Sequence[Text]], Optional[Iterable[str]]) -> Dict[str, Any]
|
||||
|
||||
# Technically we don't need to check this here because
|
||||
# build_narrow_filter will check it, but it's nicer from an error
|
||||
# handling perspective to do it before contacting Tornado
|
||||
check_supported_events_narrow_filter(narrow)
|
||||
|
||||
# Note that we pass event_types, not fetch_event_types here, since
|
||||
# that's what controls which future events are sent.
|
||||
queue_id = request_event_queue(user_profile, user_client, apply_markdown,
|
||||
queue_lifespan_secs, event_types, all_public_streams,
|
||||
narrow=narrow)
|
||||
|
||||
if queue_id is None:
|
||||
raise JsonableError(_("Could not allocate event queue"))
|
||||
if event_types is not None:
|
||||
event_types_set = set(event_types) # type: Optional[Set[str]]
|
||||
|
||||
if fetch_event_types is not None:
|
||||
event_types_set = set(fetch_event_types) # type: Optional[Set[str]]
|
||||
elif event_types is not None:
|
||||
event_types_set = set(event_types)
|
||||
else:
|
||||
event_types_set = None
|
||||
|
||||
@@ -441,7 +459,8 @@ def do_events_register(user_profile, user_client, apply_markdown=True,
|
||||
|
||||
# Apply events that came in while we were fetching initial data
|
||||
events = get_user_events(user_profile, queue_id, -1)
|
||||
apply_events(ret, events, user_profile, include_subscribers=include_subscribers)
|
||||
apply_events(ret, events, user_profile, include_subscribers=include_subscribers,
|
||||
fetch_event_types=fetch_event_types)
|
||||
if len(events) > 0:
|
||||
ret['last_event_id'] = events[-1]['id']
|
||||
else:
|
||||
|
||||
@@ -138,6 +138,51 @@ class EventsEndpointTest(ZulipTestCase):
|
||||
self.assertEqual(result_dict['pointer'], 15)
|
||||
self.assertEqual(result_dict['queue_id'], '15:12')
|
||||
|
||||
# Now test with `fetch_event_types` not matching the event
|
||||
with mock.patch('zerver.lib.events.request_event_queue', return_value='15:13'):
|
||||
with mock.patch('zerver.lib.events.get_user_events',
|
||||
return_value=[{
|
||||
'id': 6,
|
||||
'type': 'pointer',
|
||||
'pointer': 15,
|
||||
}]):
|
||||
result = self.client_post('/json/register',
|
||||
dict(event_types=ujson.dumps(['pointer']),
|
||||
fetch_event_types=ujson.dumps(['message'])),
|
||||
**self.api_auth(email))
|
||||
self.assert_json_success(result)
|
||||
result_dict = ujson.loads(result.content)
|
||||
self.assertEqual(result_dict['last_event_id'], 6)
|
||||
# Check that the message event types data is in there
|
||||
self.assertIn('max_message_id', result_dict)
|
||||
# Check that the pointer event types data is not in there
|
||||
self.assertNotIn('pointer', result_dict)
|
||||
self.assertEqual(result_dict['queue_id'], '15:13')
|
||||
|
||||
# Now test with `fetch_event_types` matching the event
|
||||
with mock.patch('zerver.lib.events.request_event_queue', return_value='15:13'):
|
||||
with mock.patch('zerver.lib.events.get_user_events',
|
||||
return_value=[{
|
||||
'id': 6,
|
||||
'type': 'pointer',
|
||||
'pointer': 15,
|
||||
}]):
|
||||
result = self.client_post('/json/register',
|
||||
dict(fetch_event_types=ujson.dumps(['pointer']),
|
||||
event_types=ujson.dumps(['message'])),
|
||||
**self.api_auth(email))
|
||||
self.assert_json_success(result)
|
||||
result_dict = ujson.loads(result.content)
|
||||
self.assertEqual(result_dict['last_event_id'], 6)
|
||||
# Check that we didn't fetch the messages data
|
||||
self.assertNotIn('max_message_id', result_dict)
|
||||
# Check that the pointer data is in there, and is correctly
|
||||
# updated (presering our atomicity guaranteed), though of
|
||||
# course any future pointer events won't be distributed
|
||||
self.assertIn('pointer', result_dict)
|
||||
self.assertEqual(result_dict['pointer'], 15)
|
||||
self.assertEqual(result_dict['queue_id'], '15:13')
|
||||
|
||||
def test_tornado_endpoint(self):
|
||||
# type: () -> None
|
||||
|
||||
|
||||
@@ -30,13 +30,15 @@ def events_register_backend(request, user_profile,
|
||||
all_public_streams=REQ(default=None, validator=check_bool),
|
||||
include_subscribers=REQ(default=False, validator=check_bool),
|
||||
event_types=REQ(validator=check_list(check_string), default=None),
|
||||
fetch_event_types=REQ(validator=check_list(check_string), default=None),
|
||||
narrow=REQ(validator=check_list(check_list(check_string, length=2)), default=[]),
|
||||
queue_lifespan_secs=REQ(converter=int, default=0)):
|
||||
# type: (HttpRequest, UserProfile, bool, Optional[bool], bool, Optional[Iterable[str]], Iterable[Sequence[Text]], int) -> HttpResponse
|
||||
# type: (HttpRequest, UserProfile, bool, Optional[bool], bool, Optional[Iterable[str]], Optional[Iterable[str]], Iterable[Sequence[Text]], int) -> HttpResponse
|
||||
all_public_streams = _default_all_public_streams(user_profile, all_public_streams)
|
||||
narrow = _default_narrow(user_profile, narrow)
|
||||
|
||||
ret = do_events_register(user_profile, request.client, apply_markdown,
|
||||
event_types, queue_lifespan_secs, all_public_streams,
|
||||
narrow=narrow, include_subscribers=include_subscribers)
|
||||
narrow=narrow, include_subscribers=include_subscribers,
|
||||
fetch_event_types=fetch_event_types)
|
||||
return json_success(ret)
|
||||
|
||||
Reference in New Issue
Block a user