topics history: Simplify data in /topics endpoint.

In anticipation of have all unread message ids available to the
web app in page_params (via a separate effort), we are simplifying
the /topics endpoint to no longer return unread counts.

Instead we have a list of tiny dictionaries with these fields:

    name - name of the topic
    max_id - max message id for the topic (aka most recent)

The items in the list are order by most-recent-topic-first.
This commit is contained in:
Steve Howell
2017-07-20 15:40:11 -04:00
committed by Tim Abbott
parent 466757c3f1
commit 69fd967c5d
3 changed files with 60 additions and 59 deletions

View File

@@ -174,49 +174,42 @@ def realm_user_count(realm):
return UserProfile.objects.filter(realm=realm, is_active=True, is_bot=False).count()
def get_topic_history_for_stream(user_profile, recipient):
# type: (UserProfile, Recipient) -> List[Tuple[str, int]]
# type: (UserProfile, Recipient) -> List[Dict[str, Any]]
# We tested the below query on some large prod datasets, and we never
# saw more than 50ms to execute it, so we think that's acceptable,
# but we will monitor it, and we may later optimize it further.
query = '''
SELECT topic, read, count(*)
FROM (
SELECT
("zerver_usermessage"."flags" & 1) as read,
"zerver_message"."subject" as topic,
"zerver_message"."id" as message_id
FROM "zerver_usermessage"
INNER JOIN "zerver_message" ON (
"zerver_usermessage"."message_id" = "zerver_message"."id"
) WHERE (
"zerver_usermessage"."user_profile_id" = %s AND
"zerver_message"."recipient_id" = %s
) ORDER BY "zerver_usermessage"."message_id" DESC
) messages_for_stream
GROUP BY topic, read
ORDER BY max(message_id) desc
SELECT
"zerver_message"."subject" as topic,
max("zerver_message".id) as max_message_id
FROM "zerver_message"
INNER JOIN "zerver_usermessage" ON (
"zerver_usermessage"."message_id" = "zerver_message"."id"
)
WHERE (
"zerver_usermessage"."user_profile_id" = %s AND
"zerver_message"."recipient_id" = %s
)
GROUP BY (
"zerver_message"."subject"
)
ORDER BY max("zerver_message".id) DESC
'''
cursor = connection.cursor()
cursor.execute(query, [user_profile.id, recipient.id])
rows = cursor.fetchall()
cursor.close()
topic_names = dict() # type: Dict[str, str]
topic_counts = dict() # type: Dict[str, int]
topics = []
for row in rows:
topic_name, read, count = row
if topic_name.lower() not in topic_names:
topic_names[topic_name.lower()] = topic_name
topic_name = topic_names[topic_name.lower()]
if topic_name not in topic_counts:
topic_counts[topic_name] = 0
topics.append(topic_name)
if not read:
topic_counts[topic_name] += count
canonical_topic_names = set() # type: Set[str]
history = []
for (topic_name, max_message_id) in rows:
canonical_name = topic_name.lower()
if canonical_name in canonical_topic_names:
continue
canonical_topic_names.add(canonical_name)
history.append(dict(
name=topic_name,
max_id=max_message_id))
history = [(topic, topic_counts[topic]) for topic in topics]
return history
def send_signup_message(sender, signups_stream, user_profile,

View File

@@ -70,8 +70,8 @@ class TopicHistoryTest(ZulipTestCase):
stream = get_stream(stream_name, user_profile.realm)
recipient = get_recipient(Recipient.STREAM, stream.id)
def create_test_message(topic, read, starred=False):
# type: (str, bool, bool) -> None
def create_test_message(topic):
# type: (str) -> int
hamlet = self.example_user('hamlet')
message = Message.objects.create(
@@ -82,28 +82,31 @@ class TopicHistoryTest(ZulipTestCase):
pub_date=timezone_now(),
sending_client=get_client('whatever'),
)
flags = 0
if read:
flags |= UserMessage.flags.read
# use this to make sure our query isn't confused
# by other flags
if starred:
flags |= UserMessage.flags.starred
UserMessage.objects.create(
user_profile=user_profile,
message=message,
flags=flags,
flags=0,
)
create_test_message('topic2', read=False)
create_test_message('toPIc1', read=False, starred=True)
create_test_message('topic2', read=False)
create_test_message('topic2', read=True)
create_test_message('topic2', read=False, starred=True)
create_test_message('Topic2', read=False)
create_test_message('already_read', read=True)
return message.id
# our most recent topics are topic0, topic1, topic2
# Create old messages with strange spellings.
create_test_message('topic2')
create_test_message('toPIc1')
create_test_message('toPIc0')
create_test_message('topic2')
create_test_message('topic2')
create_test_message('Topic2')
# Create new messages
topic2_msg_id = create_test_message('topic2')
create_test_message('topic1')
create_test_message('topic1')
topic1_msg_id = create_test_message('topic1')
topic0_msg_id = create_test_message('topic0')
endpoint = '/json/users/me/%d/topics' % (stream.id,)
result = self.client_get(endpoint, dict())
@@ -112,10 +115,18 @@ class TopicHistoryTest(ZulipTestCase):
# We only look at the most recent three topics, because
# the prior fixture data may be unreliable.
self.assertEqual(history[:3], [
[u'already_read', 0],
[u'Topic2', 4],
[u'toPIc1', 1],
history = history[:3]
self.assertEqual([topic['name'] for topic in history], [
'topic0',
'topic1',
'topic2',
])
self.assertEqual([topic['max_id'] for topic in history], [
topic0_msg_id,
topic1_msg_id,
topic2_msg_id,
])
def test_bad_stream_id(self):

View File

@@ -377,9 +377,6 @@ def get_topics_backend(request, user_profile,
recipient=recipient,
)
# Our data structure here is a list of tuples of
# (topic name, unread count), and it's reverse chronological,
# so the most recent topic is the first element of the list.
return json_success(dict(topics=result))
@authenticated_json_post_view