python: Reformat with Black, except quotes.

Signed-off-by: Anders Kaseorg <anders@zulip.com>
This commit is contained in:
Anders Kaseorg
2021-02-11 23:19:30 -08:00
committed by Tim Abbott
parent 5028c081cb
commit 11741543da
817 changed files with 44952 additions and 24860 deletions

View File

@@ -55,6 +55,7 @@ from zerver.models import (
RealmAlertWord = Dict[int, List[str]]
class RawReactionRow(TypedDict):
emoji_code: str
emoji_name: str
@@ -64,6 +65,7 @@ class RawReactionRow(TypedDict):
user_profile__full_name: str
user_profile__id: int
class RawUnreadMessagesResult(TypedDict):
pm_dict: Dict[int, Any]
stream_dict: Dict[int, Any]
@@ -72,6 +74,7 @@ class RawUnreadMessagesResult(TypedDict):
muted_stream_ids: List[int]
unmuted_stream_msgs: Set[int]
class UnreadMessagesResult(TypedDict):
pms: List[Dict[str, Any]]
streams: List[Dict[str, Any]]
@@ -79,6 +82,7 @@ class UnreadMessagesResult(TypedDict):
mentions: List[int]
count: int
@dataclass
class SendMessageRequest:
message: Message
@@ -102,17 +106,20 @@ class SendMessageRequest:
deliver_at: Optional[datetime.datetime] = None
delivery_type: Optional[str] = None
# We won't try to fetch more unread message IDs from the database than
# this limit. The limit is super high, in large part because it means
# client-side code mostly doesn't need to think about the case that a
# user has more older unread messages that were cut off.
MAX_UNREAD_MESSAGES = 50000
def truncate_content(content: str, max_length: int, truncation_message: str) -> str:
if len(content) > max_length:
content = content[:max_length - len(truncation_message)] + truncation_message
content = content[: max_length - len(truncation_message)] + truncation_message
return content
def normalize_body(body: str) -> str:
body = body.rstrip()
if len(body) == 0:
@@ -121,15 +128,19 @@ def normalize_body(body: str) -> str:
raise JsonableError(_("Message must not contain null bytes"))
return truncate_content(body, MAX_MESSAGE_LENGTH, "\n[message truncated]")
def truncate_topic(topic: str) -> str:
return truncate_content(topic, MAX_TOPIC_NAME_LENGTH, "...")
def messages_for_ids(message_ids: List[int],
user_message_flags: Dict[int, List[str]],
search_fields: Dict[int, Dict[str, str]],
apply_markdown: bool,
client_gravatar: bool,
allow_edit_history: bool) -> List[Dict[str, Any]]:
def messages_for_ids(
message_ids: List[int],
user_message_flags: Dict[int, List[str]],
search_fields: Dict[int, Dict[str, str]],
apply_markdown: bool,
client_gravatar: bool,
allow_edit_history: bool,
) -> List[Dict[str, Any]]:
cache_transformer = MessageDict.build_dict_from_raw_db_row
id_fetcher = lambda row: row['id']
@@ -141,7 +152,8 @@ def messages_for_ids(message_ids: List[int],
id_fetcher=id_fetcher,
cache_transformer=cache_transformer,
extractor=extract_message_dict,
setter=stringify_message_dict)
setter=stringify_message_dict,
)
message_list: List[Dict[str, Any]] = []
@@ -160,8 +172,10 @@ def messages_for_ids(message_ids: List[int],
return message_list
def sew_messages_and_reactions(messages: List[Dict[str, Any]],
reactions: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
def sew_messages_and_reactions(
messages: List[Dict[str, Any]], reactions: List[Dict[str, Any]]
) -> List[Dict[str, Any]]:
"""Given a iterable of messages and reactions stitch reactions
into messages.
"""
@@ -173,14 +187,14 @@ def sew_messages_and_reactions(messages: List[Dict[str, Any]],
converted_messages = {message['id']: message for message in messages}
for reaction in reactions:
converted_messages[reaction['message_id']]['reactions'].append(
reaction)
converted_messages[reaction['message_id']]['reactions'].append(reaction)
return list(converted_messages.values())
def sew_messages_and_submessages(messages: List[Dict[str, Any]],
submessages: List[Dict[str, Any]]) -> None:
def sew_messages_and_submessages(
messages: List[Dict[str, Any]], submessages: List[Dict[str, Any]]
) -> None:
# This is super similar to sew_messages_and_reactions.
for message in messages:
message['submessages'] = []
@@ -193,16 +207,20 @@ def sew_messages_and_submessages(messages: List[Dict[str, Any]],
message = message_dict[message_id]
message['submessages'].append(submessage)
def extract_message_dict(message_bytes: bytes) -> Dict[str, Any]:
return orjson.loads(zlib.decompress(message_bytes))
def stringify_message_dict(message_dict: Dict[str, Any]) -> bytes:
return zlib.compress(orjson.dumps(message_dict))
@cache_with_key(to_dict_cache_key, timeout=3600*24)
def message_to_dict_json(message: Message, realm_id: Optional[int]=None) -> bytes:
@cache_with_key(to_dict_cache_key, timeout=3600 * 24)
def message_to_dict_json(message: Message, realm_id: Optional[int] = None) -> bytes:
return MessageDict.to_dict_uncached([message], realm_id)[message.id]
def save_message_rendered_content(message: Message, content: str) -> str:
rendered_content = render_markdown(message, content, realm=message.get_realm())
message.rendered_content = rendered_content
@@ -210,6 +228,7 @@ def save_message_rendered_content(message: Message, content: str) -> str:
message.save_rendered_content()
return rendered_content
class MessageDict:
"""MessageDict is the core class responsible for marshalling Message
objects obtained from the database into a format that can be sent
@@ -231,13 +250,14 @@ class MessageDict:
dictionaries.
"""
@staticmethod
def wide_dict(message: Message, realm_id: Optional[int]=None) -> Dict[str, Any]:
'''
def wide_dict(message: Message, realm_id: Optional[int] = None) -> Dict[str, Any]:
"""
The next two lines get the cacheable field related
to our message object, with the side effect of
populating the cache.
'''
"""
json = message_to_dict_json(message, realm_id)
obj = extract_message_dict(json)
@@ -253,15 +273,17 @@ class MessageDict:
return obj
@staticmethod
def post_process_dicts(objs: List[Dict[str, Any]], apply_markdown: bool, client_gravatar: bool) -> None:
'''
def post_process_dicts(
objs: List[Dict[str, Any]], apply_markdown: bool, client_gravatar: bool
) -> None:
"""
NOTE: This function mutates the objects in
the `objs` list, rather than making
shallow copies. It might be safer to
make shallow copies here, but performance
is somewhat important here, as we are
often fetching hundreds of messages.
'''
"""
MessageDict.bulk_hydrate_sender_info(objs)
MessageDict.bulk_hydrate_recipient_info(objs)
@@ -269,16 +291,18 @@ class MessageDict:
MessageDict.finalize_payload(obj, apply_markdown, client_gravatar, skip_copy=True)
@staticmethod
def finalize_payload(obj: Dict[str, Any],
apply_markdown: bool,
client_gravatar: bool,
keep_rendered_content: bool=False,
skip_copy: bool=False) -> Dict[str, Any]:
'''
def finalize_payload(
obj: Dict[str, Any],
apply_markdown: bool,
client_gravatar: bool,
keep_rendered_content: bool = False,
skip_copy: bool = False,
) -> Dict[str, Any]:
"""
By default, we make a shallow copy of the incoming dict to avoid
mutation-related bugs. Code paths that are passing a unique object
can pass skip_copy=True to avoid this extra work.
'''
"""
if not skip_copy:
obj = copy.copy(obj)
@@ -302,7 +326,9 @@ class MessageDict:
return obj
@staticmethod
def sew_submessages_and_reactions_to_msgs(messages: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
def sew_submessages_and_reactions_to_msgs(
messages: List[Dict[str, Any]]
) -> List[Dict[str, Any]]:
msg_ids = [msg['id'] for msg in messages]
submessages = SubMessage.get_raw_db_rows(msg_ids)
sew_messages_and_submessages(messages, submessages)
@@ -311,14 +337,17 @@ class MessageDict:
return sew_messages_and_reactions(messages, reactions)
@staticmethod
def to_dict_uncached(messages: List[Message], realm_id: Optional[int]=None) -> Dict[int, bytes]:
def to_dict_uncached(
messages: List[Message], realm_id: Optional[int] = None
) -> Dict[int, bytes]:
messages_dict = MessageDict.to_dict_uncached_helper(messages, realm_id)
encoded_messages = {msg['id']: stringify_message_dict(msg) for msg in messages_dict}
return encoded_messages
@staticmethod
def to_dict_uncached_helper(messages: List[Message],
realm_id: Optional[int]=None) -> List[Dict[str, Any]]:
def to_dict_uncached_helper(
messages: List[Message], realm_id: Optional[int] = None
) -> List[Dict[str, Any]]:
# Near duplicate of the build_message_dict + get_raw_db_rows
# code path that accepts already fetched Message objects
# rather than message IDs.
@@ -332,23 +361,26 @@ class MessageDict:
return Stream.objects.get(id=message.recipient.type_id).realm_id
return message.sender.realm_id
message_rows = [{
'id': message.id,
DB_TOPIC_NAME: message.topic_name(),
"date_sent": message.date_sent,
"last_edit_time": message.last_edit_time,
"edit_history": message.edit_history,
"content": message.content,
"rendered_content": message.rendered_content,
"rendered_content_version": message.rendered_content_version,
"recipient_id": message.recipient.id,
"recipient__type": message.recipient.type,
"recipient__type_id": message.recipient.type_id,
"rendering_realm_id": get_rendering_realm_id(message),
"sender_id": message.sender.id,
"sending_client__name": message.sending_client.name,
"sender__realm_id": message.sender.realm_id,
} for message in messages]
message_rows = [
{
'id': message.id,
DB_TOPIC_NAME: message.topic_name(),
"date_sent": message.date_sent,
"last_edit_time": message.last_edit_time,
"edit_history": message.edit_history,
"content": message.content,
"rendered_content": message.rendered_content,
"rendered_content_version": message.rendered_content_version,
"recipient_id": message.recipient.id,
"recipient__type": message.recipient.type,
"recipient__type_id": message.recipient.type_id,
"rendering_realm_id": get_rendering_realm_id(message),
"sender_id": message.sender.id,
"sending_client__name": message.sending_client.name,
"sender__realm_id": message.sender.realm_id,
}
for message in messages
]
MessageDict.sew_submessages_and_reactions_to_msgs(message_rows)
return [MessageDict.build_dict_from_raw_db_row(row) for row in message_rows]
@@ -378,60 +410,61 @@ class MessageDict:
@staticmethod
def build_dict_from_raw_db_row(row: Dict[str, Any]) -> Dict[str, Any]:
'''
"""
row is a row from a .values() call, and it needs to have
all the relevant fields populated
'''
"""
return MessageDict.build_message_dict(
message_id = row['id'],
last_edit_time = row['last_edit_time'],
edit_history = row['edit_history'],
content = row['content'],
topic_name = row[DB_TOPIC_NAME],
date_sent = row['date_sent'],
rendered_content = row['rendered_content'],
rendered_content_version = row['rendered_content_version'],
sender_id = row['sender_id'],
sender_realm_id = row['sender__realm_id'],
sending_client_name = row['sending_client__name'],
rendering_realm_id = row.get('rendering_realm_id', row['sender__realm_id']),
recipient_id = row['recipient_id'],
recipient_type = row['recipient__type'],
recipient_type_id = row['recipient__type_id'],
message_id=row['id'],
last_edit_time=row['last_edit_time'],
edit_history=row['edit_history'],
content=row['content'],
topic_name=row[DB_TOPIC_NAME],
date_sent=row['date_sent'],
rendered_content=row['rendered_content'],
rendered_content_version=row['rendered_content_version'],
sender_id=row['sender_id'],
sender_realm_id=row['sender__realm_id'],
sending_client_name=row['sending_client__name'],
rendering_realm_id=row.get('rendering_realm_id', row['sender__realm_id']),
recipient_id=row['recipient_id'],
recipient_type=row['recipient__type'],
recipient_type_id=row['recipient__type_id'],
reactions=row['reactions'],
submessages=row['submessages'],
)
@staticmethod
def build_message_dict(
message_id: int,
last_edit_time: Optional[datetime.datetime],
edit_history: Optional[str],
content: str,
topic_name: str,
date_sent: datetime.datetime,
rendered_content: Optional[str],
rendered_content_version: Optional[int],
sender_id: int,
sender_realm_id: int,
sending_client_name: str,
rendering_realm_id: int,
recipient_id: int,
recipient_type: int,
recipient_type_id: int,
reactions: List[RawReactionRow],
submessages: List[Dict[str, Any]],
message_id: int,
last_edit_time: Optional[datetime.datetime],
edit_history: Optional[str],
content: str,
topic_name: str,
date_sent: datetime.datetime,
rendered_content: Optional[str],
rendered_content_version: Optional[int],
sender_id: int,
sender_realm_id: int,
sending_client_name: str,
rendering_realm_id: int,
recipient_id: int,
recipient_type: int,
recipient_type_id: int,
reactions: List[RawReactionRow],
submessages: List[Dict[str, Any]],
) -> Dict[str, Any]:
obj = dict(
id = message_id,
sender_id = sender_id,
content = content,
recipient_type_id = recipient_type_id,
recipient_type = recipient_type,
recipient_id = recipient_id,
timestamp = datetime_to_timestamp(date_sent),
client = sending_client_name)
id=message_id,
sender_id=sender_id,
content=content,
recipient_type_id=recipient_type_id,
recipient_type=recipient_type,
recipient_id=recipient_id,
timestamp=datetime_to_timestamp(date_sent),
client=sending_client_name,
)
obj[TOPIC_NAME] = topic_name
obj['sender_realm_id'] = sender_realm_id
@@ -446,7 +479,9 @@ class MessageDict:
assert edit_history is not None
obj['edit_history'] = orjson.loads(edit_history)
if Message.need_to_render_content(rendered_content, rendered_content_version, markdown_version):
if Message.need_to_render_content(
rendered_content, rendered_content_version, markdown_version
):
# We really shouldn't be rendering objects in this method, but there is
# a scenario where we upgrade the version of Markdown and fail to run
# management commands to re-render historical messages, and then we
@@ -467,26 +502,26 @@ class MessageDict:
if rendered_content is not None:
obj['rendered_content'] = rendered_content
else:
obj['rendered_content'] = ('<p>[Zulip note: Sorry, we could not ' +
'understand the formatting of your message]</p>')
obj['rendered_content'] = (
'<p>[Zulip note: Sorry, we could not '
+ 'understand the formatting of your message]</p>'
)
if rendered_content is not None:
obj['is_me_message'] = Message.is_status_message(content, rendered_content)
else:
obj['is_me_message'] = False
obj['reactions'] = [ReactionDict.build_dict_from_raw_db_row(reaction)
for reaction in reactions]
obj['reactions'] = [
ReactionDict.build_dict_from_raw_db_row(reaction) for reaction in reactions
]
obj['submessages'] = submessages
return obj
@staticmethod
def bulk_hydrate_sender_info(objs: List[Dict[str, Any]]) -> None:
sender_ids = list({
obj['sender_id']
for obj in objs
})
sender_ids = list({obj['sender_id'] for obj in objs})
if not sender_ids:
return
@@ -504,10 +539,7 @@ class MessageDict:
rows = query_for_ids(query, sender_ids, 'zerver_userprofile.id')
sender_dict = {
row['id']: row
for row in rows
}
sender_dict = {row['id']: row for row in rows}
for obj in objs:
sender_id = obj['sender_id']
@@ -522,12 +554,12 @@ class MessageDict:
@staticmethod
def hydrate_recipient_info(obj: Dict[str, Any], display_recipient: DisplayRecipientT) -> None:
'''
"""
This method hyrdrates recipient info with things
like full names and emails of senders. Eventually
our clients should be able to hyrdrate these fields
themselves with info they already have on users.
'''
"""
recipient_type = obj['recipient_type']
recipient_type_id = obj['recipient_type_id']
@@ -569,7 +601,8 @@ class MessageDict:
obj['recipient_id'],
obj['recipient_type'],
obj['recipient_type_id'],
) for obj in objs
)
for obj in objs
}
display_recipients = bulk_fetch_display_recipients(recipient_tuples)
@@ -594,26 +627,33 @@ class MessageDict:
client_gravatar=client_gravatar,
)
class ReactionDict:
@staticmethod
def build_dict_from_raw_db_row(row: RawReactionRow) -> Dict[str, Any]:
return {'emoji_name': row['emoji_name'],
'emoji_code': row['emoji_code'],
'reaction_type': row['reaction_type'],
# TODO: We plan to remove this redundant user dictionary once
# clients are updated to support accessing use user_id. See
# https://github.com/zulip/zulip/pull/14711 for details.
#
# When we do that, we can likely update the `.values()` query to
# not fetch the extra user_profile__* fields from the database
# as a small performance optimization.
'user': {'email': row['user_profile__email'],
'id': row['user_profile__id'],
'full_name': row['user_profile__full_name']},
'user_id': row['user_profile__id']}
return {
'emoji_name': row['emoji_name'],
'emoji_code': row['emoji_code'],
'reaction_type': row['reaction_type'],
# TODO: We plan to remove this redundant user dictionary once
# clients are updated to support accessing use user_id. See
# https://github.com/zulip/zulip/pull/14711 for details.
#
# When we do that, we can likely update the `.values()` query to
# not fetch the extra user_profile__* fields from the database
# as a small performance optimization.
'user': {
'email': row['user_profile__email'],
'id': row['user_profile__id'],
'full_name': row['user_profile__full_name'],
},
'user_id': row['user_profile__id'],
}
def access_message(user_profile: UserProfile, message_id: int) -> Tuple[Message, Optional[UserMessage]]:
def access_message(
user_profile: UserProfile, message_id: int
) -> Tuple[Message, Optional[UserMessage]]:
"""You can access a message by ID in our APIs that either:
(1) You received or have previously accessed via starring
(aka have a UserMessage row for).
@@ -633,8 +673,10 @@ def access_message(user_profile: UserProfile, message_id: int) -> Tuple[Message,
return (message, user_message)
raise JsonableError(_("Invalid message(s)"))
def has_message_access(user_profile: UserProfile, message: Message,
user_message: Optional[UserMessage]) -> bool:
def has_message_access(
user_profile: UserProfile, message: Message, user_message: Optional[UserMessage]
) -> bool:
if user_message is None:
if message.recipient.type != Recipient.STREAM:
# You can't access private messages you didn't receive
@@ -654,15 +696,17 @@ def has_message_access(user_profile: UserProfile, message: Message,
# This stream is an invite-only stream where message
# history is available to subscribers. So we check if
# you're subscribed.
if not Subscription.objects.filter(user_profile=user_profile, active=True,
recipient=message.recipient).exists():
if not Subscription.objects.filter(
user_profile=user_profile, active=True, recipient=message.recipient
).exists():
return False
# You are subscribed, so let this fall through to the public stream case.
elif user_profile.is_guest:
# Guest users don't get automatic access to public stream messages
if not Subscription.objects.filter(user_profile=user_profile, active=True,
recipient=message.recipient).exists():
if not Subscription.objects.filter(
user_profile=user_profile, active=True, recipient=message.recipient
).exists():
return False
else:
# Otherwise, the message was sent to a public stream in
@@ -671,6 +715,7 @@ def has_message_access(user_profile: UserProfile, message: Message,
return True
def bulk_access_messages(user_profile: UserProfile, messages: Sequence[Message]) -> List[Message]:
filtered_messages = []
@@ -680,9 +725,11 @@ def bulk_access_messages(user_profile: UserProfile, messages: Sequence[Message])
filtered_messages.append(message)
return filtered_messages
def bulk_access_messages_expect_usermessage(
user_profile_id: int, message_ids: Sequence[int]) -> List[int]:
'''
user_profile_id: int, message_ids: Sequence[int]
) -> List[int]:
"""
Like bulk_access_messages, but faster and potentially stricter.
Returns a subset of `message_ids` containing only messages the
@@ -694,21 +741,24 @@ def bulk_access_messages_expect_usermessage(
the user has access (e.g. because it went to a public stream.)
See also: `access_message`, `bulk_access_messages`.
'''
"""
return UserMessage.objects.filter(
user_profile_id=user_profile_id,
message_id__in=message_ids,
).values_list('message_id', flat=True)
def render_markdown(message: Message,
content: str,
realm: Optional[Realm]=None,
realm_alert_words_automaton: Optional[ahocorasick.Automaton]=None,
mention_data: Optional[MentionData]=None,
email_gateway: bool=False) -> str:
'''
def render_markdown(
message: Message,
content: str,
realm: Optional[Realm] = None,
realm_alert_words_automaton: Optional[ahocorasick.Automaton] = None,
mention_data: Optional[MentionData] = None,
email_gateway: bool = False,
) -> str:
"""
This is basically just a wrapper for do_render_markdown.
'''
"""
if realm is None:
realm = message.get_realm()
@@ -730,14 +780,17 @@ def render_markdown(message: Message,
return rendered_content
def do_render_markdown(message: Message,
content: str,
realm: Realm,
sent_by_bot: bool,
translate_emoticons: bool,
realm_alert_words_automaton: Optional[ahocorasick.Automaton]=None,
mention_data: Optional[MentionData]=None,
email_gateway: bool=False) -> str:
def do_render_markdown(
message: Message,
content: str,
realm: Realm,
sent_by_bot: bool,
translate_emoticons: bool,
realm_alert_words_automaton: Optional[ahocorasick.Automaton] = None,
mention_data: Optional[MentionData] = None,
email_gateway: bool = False,
) -> str:
"""Return HTML for given Markdown. Markdown may add properties to the
message object such as `mentions_user_ids`, `mentions_user_group_ids`, and
`mentions_wildcard`. These are only on this Django object and are not
@@ -764,9 +817,12 @@ def do_render_markdown(message: Message,
)
return rendered_content
def huddle_users(recipient_id: int) -> str:
display_recipient: DisplayRecipientT = get_display_recipient_by_id(
recipient_id, Recipient.HUDDLE, None,
recipient_id,
Recipient.HUDDLE,
None,
)
# str is for streams.
@@ -776,9 +832,10 @@ def huddle_users(recipient_id: int) -> str:
user_ids = sorted(user_ids)
return ','.join(str(uid) for uid in user_ids)
def aggregate_message_dict(input_dict: Dict[int, Dict[str, Any]],
lookup_fields: List[str],
collect_senders: bool) -> List[Dict[str, Any]]:
def aggregate_message_dict(
input_dict: Dict[int, Dict[str, Any]], lookup_fields: List[str], collect_senders: bool
) -> List[Dict[str, Any]]:
lookup_dict: Dict[Tuple[Any, ...], Dict[str, Any]] = {}
'''
@@ -838,56 +895,75 @@ def aggregate_message_dict(input_dict: Dict[int, Dict[str, Any]],
return [lookup_dict[k] for k in sorted_keys]
def get_inactive_recipient_ids(user_profile: UserProfile) -> List[int]:
rows = get_stream_subscriptions_for_user(user_profile).filter(
active=False,
).values(
'recipient_id',
rows = (
get_stream_subscriptions_for_user(user_profile)
.filter(
active=False,
)
.values(
'recipient_id',
)
)
inactive_recipient_ids = [
row['recipient_id']
for row in rows]
inactive_recipient_ids = [row['recipient_id'] for row in rows]
return inactive_recipient_ids
def get_muted_stream_ids(user_profile: UserProfile) -> List[int]:
rows = get_stream_subscriptions_for_user(user_profile).filter(
active=True,
is_muted=True,
).values(
'recipient__type_id',
rows = (
get_stream_subscriptions_for_user(user_profile)
.filter(
active=True,
is_muted=True,
)
.values(
'recipient__type_id',
)
)
muted_stream_ids = [
row['recipient__type_id']
for row in rows]
muted_stream_ids = [row['recipient__type_id'] for row in rows]
return muted_stream_ids
def get_starred_message_ids(user_profile: UserProfile) -> List[int]:
return list(UserMessage.objects.filter(
user_profile=user_profile,
).extra(
where=[UserMessage.where_starred()],
).order_by(
'message_id',
).values_list('message_id', flat=True)[0:10000])
return list(
UserMessage.objects.filter(
user_profile=user_profile,
)
.extra(
where=[UserMessage.where_starred()],
)
.order_by(
'message_id',
)
.values_list('message_id', flat=True)[0:10000]
)
def get_raw_unread_data(user_profile: UserProfile) -> RawUnreadMessagesResult:
excluded_recipient_ids = get_inactive_recipient_ids(user_profile)
user_msgs = UserMessage.objects.filter(
user_profile=user_profile,
).exclude(
message__recipient_id__in=excluded_recipient_ids,
).extra(
where=[UserMessage.where_unread()],
).values(
'message_id',
'message__sender_id',
MESSAGE__TOPIC,
'message__recipient_id',
'message__recipient__type',
'message__recipient__type_id',
'flags',
).order_by("-message_id")
user_msgs = (
UserMessage.objects.filter(
user_profile=user_profile,
)
.exclude(
message__recipient_id__in=excluded_recipient_ids,
)
.extra(
where=[UserMessage.where_unread()],
)
.values(
'message_id',
'message__sender_id',
MESSAGE__TOPIC,
'message__recipient_id',
'message__recipient__type',
'message__recipient__type_id',
'flags',
)
.order_by("-message_id")
)
# Limit unread messages for performance reasons.
user_msgs = list(user_msgs[:MAX_UNREAD_MESSAGES])
@@ -895,9 +971,9 @@ def get_raw_unread_data(user_profile: UserProfile) -> RawUnreadMessagesResult:
rows = list(reversed(user_msgs))
return extract_unread_data_from_um_rows(rows, user_profile)
def extract_unread_data_from_um_rows(
rows: List[Dict[str, Any]],
user_profile: Optional[UserProfile]
rows: List[Dict[str, Any]], user_profile: Optional[UserProfile]
) -> RawUnreadMessagesResult:
pm_dict: Dict[int, Any] = {}
@@ -997,6 +1073,7 @@ def extract_unread_data_from_um_rows(
return raw_unread_messages
def aggregate_unread_data(raw_data: RawUnreadMessagesResult) -> UnreadMessagesResult:
pm_dict = raw_data['pm_dict']
@@ -1037,22 +1114,23 @@ def aggregate_unread_data(raw_data: RawUnreadMessagesResult) -> UnreadMessagesRe
streams=stream_objects,
huddles=huddle_objects,
mentions=mentions,
count=count)
count=count,
)
return result
def apply_unread_message_event(user_profile: UserProfile,
state: RawUnreadMessagesResult,
message: Dict[str, Any],
flags: List[str]) -> None:
def apply_unread_message_event(
user_profile: UserProfile,
state: RawUnreadMessagesResult,
message: Dict[str, Any],
flags: List[str],
) -> None:
message_id = message['id']
if message['type'] == 'stream':
message_type = 'stream'
elif message['type'] == 'private':
others = [
recip for recip in message['display_recipient']
if recip['id'] != user_profile.id
]
others = [recip for recip in message['display_recipient'] if recip['id'] != user_profile.id]
if len(others) <= 1:
message_type = 'private'
else:
@@ -1105,8 +1183,8 @@ def apply_unread_message_event(user_profile: UserProfile,
if message_id in state['unmuted_stream_msgs']:
state['mentions'].add(message_id)
def remove_message_id_from_unread_mgs(state: RawUnreadMessagesResult,
message_id: int) -> None:
def remove_message_id_from_unread_mgs(state: RawUnreadMessagesResult, message_id: int) -> None:
# The opposite of apply_unread_message_event; removes a read or
# deleted message from a raw_unread_msgs data structure.
state['pm_dict'].pop(message_id, None)
@@ -1115,32 +1193,44 @@ def remove_message_id_from_unread_mgs(state: RawUnreadMessagesResult,
state['unmuted_stream_msgs'].discard(message_id)
state['mentions'].discard(message_id)
def estimate_recent_messages(realm: Realm, hours: int) -> int:
stat = COUNT_STATS['messages_sent:is_bot:hour']
d = timezone_now() - datetime.timedelta(hours=hours)
return RealmCount.objects.filter(property=stat.property, end_time__gt=d,
realm=realm).aggregate(Sum('value'))['value__sum'] or 0
return (
RealmCount.objects.filter(property=stat.property, end_time__gt=d, realm=realm).aggregate(
Sum('value')
)['value__sum']
or 0
)
def get_first_visible_message_id(realm: Realm) -> int:
return realm.first_visible_message_id
def maybe_update_first_visible_message_id(realm: Realm, lookback_hours: int) -> None:
recent_messages_count = estimate_recent_messages(realm, lookback_hours)
if realm.message_visibility_limit is not None and recent_messages_count > 0:
update_first_visible_message_id(realm)
def update_first_visible_message_id(realm: Realm) -> None:
if realm.message_visibility_limit is None:
realm.first_visible_message_id = 0
else:
try:
first_visible_message_id = Message.objects.filter(sender__realm=realm).values('id').\
order_by('-id')[realm.message_visibility_limit - 1]["id"]
first_visible_message_id = (
Message.objects.filter(sender__realm=realm)
.values('id')
.order_by('-id')[realm.message_visibility_limit - 1]["id"]
)
except IndexError:
first_visible_message_id = 0
realm.first_visible_message_id = first_visible_message_id
realm.save(update_fields=["first_visible_message_id"])
def get_last_message_id() -> int:
# We generally use this function to populate RealmAuditLog, and
# the max id here is actually systemwide, not per-realm. I
@@ -1153,9 +1243,10 @@ def get_last_message_id() -> int:
last_id = -1
return last_id
def get_recent_conversations_recipient_id(user_profile: UserProfile,
recipient_id: int,
sender_id: int) -> int:
def get_recent_conversations_recipient_id(
user_profile: UserProfile, recipient_id: int, sender_id: int
) -> int:
"""Helper for doing lookups of the recipient_id that
get_recent_private_conversations would have used to record that
message in its data structure.
@@ -1165,6 +1256,7 @@ def get_recent_conversations_recipient_id(user_profile: UserProfile,
return UserProfile.objects.values_list('recipient_id', flat=True).get(id=sender_id)
return recipient_id
def get_recent_private_conversations(user_profile: UserProfile) -> Dict[int, Dict[str, Any]]:
"""This function uses some carefully optimized SQL queries, designed
to use the UserMessage index on private_messages. It is
@@ -1198,7 +1290,8 @@ def get_recent_private_conversations(user_profile: UserProfile) -> Dict[int, Dic
recipient_map = {}
my_recipient_id = user_profile.recipient_id
query = SQL('''
query = SQL(
'''
SELECT
subquery.recipient_id, MAX(subquery.message_id)
FROM (
@@ -1233,14 +1326,18 @@ def get_recent_private_conversations(user_profile: UserProfile) -> Dict[int, Dic
LIMIT %(conversation_limit)s)
) AS subquery
GROUP BY subquery.recipient_id
''')
'''
)
with connection.cursor() as cursor:
cursor.execute(query, {
"user_profile_id": user_profile.id,
"conversation_limit": RECENT_CONVERSATIONS_LIMIT,
"my_recipient_id": my_recipient_id,
})
cursor.execute(
query,
{
"user_profile_id": user_profile.id,
"conversation_limit": RECENT_CONVERSATIONS_LIMIT,
"my_recipient_id": my_recipient_id,
},
)
rows = cursor.fetchall()
# The resulting rows will be (recipient_id, max_message_id)
@@ -1254,10 +1351,11 @@ def get_recent_private_conversations(user_profile: UserProfile) -> Dict[int, Dic
)
# Now we need to map all the recipient_id objects to lists of user IDs
for (recipient_id, user_profile_id) in Subscription.objects.filter(
recipient_id__in=recipient_map.keys()).exclude(
user_profile_id=user_profile.id).values_list(
"recipient_id", "user_profile_id"):
for (recipient_id, user_profile_id) in (
Subscription.objects.filter(recipient_id__in=recipient_map.keys())
.exclude(user_profile_id=user_profile.id)
.values_list("recipient_id", "user_profile_id")
):
recipient_map[recipient_id]['user_ids'].append(user_profile_id)
# Sort to prevent test flakes and client bugs.
@@ -1266,6 +1364,7 @@ def get_recent_private_conversations(user_profile: UserProfile) -> Dict[int, Dic
return recipient_map
def wildcard_mention_allowed(sender: UserProfile, stream: Stream) -> bool:
realm = sender.realm