Add PEP-484 type annotations to zerver/lib/.

This commit is contained in:
Tim Abbott
2016-01-25 14:42:16 -08:00
parent d8f7d89fb4
commit 2059f650ab
17 changed files with 62 additions and 47 deletions

View File

@@ -1,5 +1,6 @@
from __future__ import absolute_import
from __future__ import print_function
from typing import *
from django.conf import settings
from django.core import validators
@@ -575,7 +576,7 @@ def do_send_messages(messages):
message['message'].update_calculated_fields()
# Save the message receipts in the database
user_message_flags = defaultdict(dict)
user_message_flags = defaultdict(dict) # type: Dict[int, Dict[int, List[str]]]
with transaction.atomic():
Message.objects.bulk_create([message['message'] for message in messages])
ums = []
@@ -1042,7 +1043,7 @@ def bulk_get_subscriber_user_ids(stream_dicts, user_profile, sub_dict):
user_profile__is_active=True,
active=True).values("user_profile_id", "recipient__type_id")
result = dict((stream["id"], []) for stream in stream_dicts)
result = dict((stream["id"], []) for stream in stream_dicts) # type: Dict[int, List[int]]
for sub in subscriptions:
result[sub["recipient__type_id"]].append(sub["user_profile_id"])
@@ -1114,7 +1115,7 @@ def get_subscribers_to_streams(streams):
arrays of all the streams within 'streams' to which that user is
subscribed.
"""
subscribes_to = {}
subscribes_to = {} # type: Dict[str, List[Stream]]
for stream in streams:
try:
subscribers = get_subscribers(stream)
@@ -1160,7 +1161,7 @@ def bulk_add_subscriptions(streams, users):
for stream in streams:
stream_map[recipients_map[stream.id].id] = stream
subs_by_user = defaultdict(list)
subs_by_user = defaultdict(list) # type: Dict[int, List[Subscription]]
all_subs_query = Subscription.objects.select_related("user_profile")
for sub in all_subs_query.filter(user_profile__in=users,
recipient__type=Recipient.STREAM):
@@ -1222,8 +1223,8 @@ def bulk_add_subscriptions(streams, users):
user_profile__is_active=True,
active=True).select_related('recipient', 'user_profile')
all_subs_by_stream = defaultdict(list)
emails_by_stream = defaultdict(list)
all_subs_by_stream = defaultdict(list) # type: Dict[int, List[UserProfile]]
emails_by_stream = defaultdict(list) # type: Dict[int, List[str]]
for sub in all_subs:
all_subs_by_stream[sub.recipient.type_id].append(sub.user_profile)
emails_by_stream[sub.recipient.type_id].append(sub.user_profile.email)
@@ -1233,7 +1234,7 @@ def bulk_add_subscriptions(streams, users):
return []
return emails_by_stream[stream.id]
sub_tuples_by_user = defaultdict(list)
sub_tuples_by_user = defaultdict(list) # type: Dict[int, List[Tuple[Subscription, Stream]]]
new_streams = set()
for (sub, stream) in subs_to_add + subs_to_activate:
sub_tuples_by_user[sub.user_profile.id].append((sub, stream))
@@ -1336,7 +1337,7 @@ def bulk_remove_subscriptions(users, streams):
for stream in streams:
stream_map[recipients_map[stream.id].id] = stream
subs_by_user = dict((user_profile.id, []) for user_profile in users)
subs_by_user = dict((user_profile.id, []) for user_profile in users) # type: Dict[int, List[Subscription]]
for sub in Subscription.objects.select_related("user_profile").filter(user_profile__in=users,
recipient__in=list(recipients_map.values()),
active=True):
@@ -1369,7 +1370,7 @@ def bulk_remove_subscriptions(users, streams):
for stream in new_vacant_streams])
send_event(event, active_user_ids(user_profile.realm))
streams_by_user = defaultdict(list)
streams_by_user = defaultdict(list) # type: Dict[int, List[Stream]]
for (sub, stream) in subs_to_deactivate:
streams_by_user[sub.user_profile_id].append(stream)
@@ -2786,7 +2787,7 @@ def do_invite_users(user_profile, invitee_emails, streams):
skipped = []
ret_error = None
ret_error_data = {}
ret_error_data = {} # type: Dict[str, List[Tuple[str, str]]]
for email in invitee_emails:
if email == '':

View File

@@ -1,6 +1,7 @@
from __future__ import absolute_import
# Zulip's main markdown implementation. See docs/markdown.md for
# detailed documentation on our markdown syntax.
from typing import *
import codecs
import markdown
@@ -561,7 +562,7 @@ class Emoji(markdown.inlinepatterns.Pattern):
orig_syntax = match.group("syntax")
name = orig_syntax[1:-1]
realm_emoji = {}
realm_emoji = {} # type: Dict[str, str]
if db_data is not None:
realm_emoji = db_data['emoji']
@@ -992,7 +993,7 @@ def make_md_engine(key, opts):
def subject_links(domain, subject):
from zerver.models import get_realm, RealmFilter, realm_filters_for_domain
matches = []
matches = [] # type: List[str]
try:
realm_filters = realm_filters_for_domain(domain)
@@ -1048,12 +1049,12 @@ def _sanitize_for_log(md):
# Filters such as UserMentionPattern need a message, but python-markdown
# provides no way to pass extra params through to a pattern. Thus, a global.
current_message = None
current_message = None # type: Any # Should be Message but bugdown doesn't import models.py.
# We avoid doing DB queries in our markdown thread to avoid the overhead of
# opening a new DB connection. These connections tend to live longer than the
# threads themselves, as well.
db_data = None
db_data = None # type: Dict[str, Any]
def do_convert(md, realm_domain=None, message=None):
"""Convert Markdown to HTML, with Zulip-specific settings and hacks."""

View File

@@ -1,4 +1,5 @@
from __future__ import absolute_import
from typing import *
# This file needs to be different from cache.py because cache.py
# cannot import anything from zerver.models or we'd have an import
@@ -75,7 +76,7 @@ cache_fillers = {
def fill_remote_cache(cache):
remote_cache_time_start = get_remote_cache_time()
remote_cache_requests_start = get_remote_cache_requests()
items_for_remote_cache = {}
items_for_remote_cache = {} # type: Dict[str, Any]
(objects, items_filler, timeout, batch_size) = cache_fillers[cache]
count = 0
for obj in objects():

View File

@@ -29,7 +29,7 @@ class TimeTrackingConnection(connection):
"""A psycopg2 connection class that uses TimeTrackingCursors."""
def __init__(self, *args, **kwargs):
self.queries = []
self.queries = [] # type: List[Dict[str, str]]
super(TimeTrackingConnection, self).__init__(*args, **kwargs)
def cursor(self, name=None):

View File

@@ -1,4 +1,5 @@
from __future__ import absolute_import
from typing import *
from collections import defaultdict
import datetime
@@ -40,8 +41,8 @@ def gather_hot_conversations(user_profile, stream_messages):
# Returns a list of dictionaries containing the templating
# information for each hot conversation.
conversation_length = defaultdict(int)
conversation_diversity = defaultdict(set)
conversation_length = defaultdict(int) # type: Dict[Tuple[int, str], int]
conversation_diversity = defaultdict(set) # type: Dict[Tuple[int, str], Set[str]]
for user_message in stream_messages:
if not user_message.message.sent_by_human():
# Don't include automated messages in the count.
@@ -99,7 +100,7 @@ def gather_new_users(user_profile, threshold):
# Gather information on users in the realm who have recently
# joined.
if user_profile.realm.domain == "mit.edu":
new_users = []
new_users = [] # type: List[UserProfile]
else:
new_users = list(UserProfile.objects.filter(
realm=user_profile.realm, date_joined__gt=threshold,
@@ -110,7 +111,7 @@ def gather_new_users(user_profile, threshold):
def gather_new_streams(user_profile, threshold):
if user_profile.realm.domain == "mit.edu":
new_streams = []
new_streams = [] # type: List[Stream]
else:
new_streams = list(get_active_streams(user_profile.realm).filter(
invite_only=False, date_created__gt=threshold))

View File

@@ -235,7 +235,7 @@ def find_emailgateway_recipient(message):
# it is more accurate, so try to find the most-accurate
# recipient list in descending priority order
recipient_headers = ["X-Gm-Original-To", "Delivered-To", "To"]
recipients = []
recipients = [] # type: List[str]
for recipient_header in recipient_headers:
r = message.get_all(recipient_header, None)
if r:

View File

@@ -1,4 +1,5 @@
from __future__ import absolute_import
from typing import *
from django.conf import settings
from django.utils.timezone import now
@@ -58,8 +59,8 @@ class ClientDescriptor(object):
self.user_profile_id = user_profile_id
self.user_profile_email = user_profile_email
self.realm_id = realm_id
self.current_handler_id = None
self.current_client_name = None
self.current_handler_id = None # type: int
self.current_client_name = None # type: str
self.event_queue = event_queue
self.queue_timeout = lifespan_secs
self.event_types = event_types
@@ -192,7 +193,7 @@ class ClientDescriptor(object):
do_gc_event_queues([self.event_queue.id], [self.user_profile_id],
[self.realm_id])
descriptors_by_handler_id = {}
descriptors_by_handler_id = {} # type: Dict[int, ClientDescriptor]
def get_descriptor_by_handler_id(handler_id):
return descriptors_by_handler_id.get(handler_id)
@@ -213,10 +214,11 @@ def compute_full_event_type(event):
class EventQueue(object):
def __init__(self, id):
self.queue = deque()
# type: (Any) -> None
self.queue = deque() # type: deque[Dict[str, str]]
self.next_event_id = 0
self.id = id
self.virtual_events = {}
self.virtual_events = {} # type: Dict[str, Dict[str, str]]
def to_dict(self):
# If you add a new key to this dict, make sure you add appropriate
@@ -296,7 +298,7 @@ class EventQueue(object):
return contents
# maps queue ids to client descriptors
clients = {} # type: Dict[int, ClientDescriptor]
clients = {} # type: Dict[str, ClientDescriptor]
# maps user id to list of client descriptors
user_clients = {} # type: Dict[int, List[ClientDescriptor]]
# maps realm id to list of client descriptors with all_public_streams=True
@@ -432,7 +434,7 @@ def setup_event_queue():
atexit.register(dump_event_queues)
# Make sure we dump event queues even if we exit via signal
signal.signal(signal.SIGTERM, lambda signum, stack: sys.exit(1))
tornado.autoreload.add_reload_hook(dump_event_queues)
tornado.autoreload.add_reload_hook(dump_event_queues) # type: ignore # TODO: Fix missing tornado.autoreload stub
try:
os.rename(settings.JSON_PERSISTENT_QUEUE_FILENAME, "/var/tmp/event_queues.json.last")

View File

@@ -1,8 +1,9 @@
import logging
from zerver.middleware import async_request_restart
from typing import *
current_handler_id = 0
handlers = {}
handlers = {} # type: Dict[int, Any] # TODO: Should be AsyncDjangoHandler but we don't important runtornado.py.
def get_handler_by_id(handler_id):
return handlers[handler_id]

View File

@@ -1,4 +1,6 @@
from __future__ import print_function
from typing import *
from confirmation.models import Confirmation
from django.conf import settings
from django.core.mail import EmailMultiAlternatives
@@ -7,7 +9,7 @@ from zerver.decorator import statsd_increment, uses_mandrill
from zerver.models import Recipient, ScheduledJob, UserMessage, \
Stream, get_display_recipient, get_user_profile_by_email, \
get_user_profile_by_id, receives_offline_notifications, \
get_context_for_message
get_context_for_message, Message
import datetime
import re
@@ -58,7 +60,7 @@ def build_message_list(user_profile, messages):
The messages are collapsed into per-recipient and per-sender blocks, like
our web interface
"""
messages_to_render = []
messages_to_render = [] # type: List[Dict[str, Any]]
def sender_string(message):
sender = ''
@@ -324,7 +326,7 @@ def handle_missedmessage_emails(user_profile_id, missed_email_events):
if not messages:
return
messages_by_recipient_subject = defaultdict(list)
messages_by_recipient_subject = defaultdict(list) # type: Dict[Tuple[int, str], List[Message]]
for msg in messages:
messages_by_recipient_subject[(msg.recipient_id, msg.subject)].append(msg)

View File

@@ -1,5 +1,6 @@
from __future__ import absolute_import
from __future__ import print_function
from typing import *
import os
import pty
@@ -7,7 +8,8 @@ import sys
import errno
def run_parallel(job, data, threads=6):
pids = {}
# type: (Any, Iterable[Any], int) -> Generator[Tuple[int, Any], None, None]
pids = {} # type: Dict[int, Any]
def wait_for_one():
while True:

View File

@@ -11,6 +11,7 @@ import atexit
from collections import defaultdict
from zerver.lib.utils import statsd
from typing import *
# This simple queuing library doesn't expose much of the power of
# rabbitmq/pika's queuing system; its purpose is to just provide an
@@ -19,9 +20,9 @@ from zerver.lib.utils import statsd
class SimpleQueueClient(object):
def __init__(self):
self.log = logging.getLogger('zulip.queue')
self.queues = set()
self.channel = None
self.consumers = defaultdict(set)
self.queues = set() # type: Set[str]
self.channel = None # type: Any
self.consumers = defaultdict(set) # type: Dict[str, Set[Any]]
self._connect()
def _connect(self):
@@ -156,7 +157,7 @@ class TornadoQueueClient(SimpleQueueClient):
# https://pika.readthedocs.org/en/0.9.8/examples/asynchronous_consumer_example.html
def __init__(self):
super(TornadoQueueClient, self).__init__()
self._on_open_cbs = []
self._on_open_cbs = [] # type: List[Callable[[], None]]
def _connect(self, on_open_cb = None):
self.log.info("Beginning TornadoQueueClient connection")
@@ -230,7 +231,7 @@ class TornadoQueueClient(SimpleQueueClient):
lambda: self.channel.basic_consume(wrapped_consumer, queue=queue_name,
consumer_tag=self._generate_ctag(queue_name)))
queue_client = None
queue_client = None # type: SimpleQueueClient
def get_queue_client():
global queue_client
if queue_client is None:

View File

@@ -1,4 +1,5 @@
from __future__ import absolute_import
from typing import *
from django.conf import settings
from django.utils.importlib import import_module
@@ -79,7 +80,7 @@ class SocketConnection(sockjs.tornado.SockJSConnection):
self.authenticated = False
self.session.user_profile = None
self.close_info = None
self.close_info = None # type: CloseErrorInfo
self.did_close = False
try:
@@ -226,7 +227,7 @@ class SocketConnection(sockjs.tornado.SockJSConnection):
self.did_close = True
def fake_message_sender(event):
log_data = dict()
log_data = dict() # type: Dict[str, Any]
record_request_start_data(log_data)
req = event['request']

View File

@@ -166,7 +166,7 @@ class DummyObject(object):
class DummyTornadoRequest(object):
def __init__(self):
self.connection = DummyObject()
self.connection.stream = DummyStream()
self.connection.stream = DummyStream() # type: ignore # monkey-patching here
class DummyHandler(object):
def __init__(self, assert_callback):
@@ -202,7 +202,7 @@ class POSTRequestMock(object):
self.user = user_profile
self._tornado_handler = DummyHandler(assert_callback)
self.session = DummySession()
self._log_data = {}
self._log_data = {} # type: Dict[str, Any]
self.META = {'PATH_INFO': 'test'}
class AuthedTestCase(TestCase):

View File

@@ -1,4 +1,5 @@
from __future__ import absolute_import
from typing import *
import sys
import time
@@ -33,8 +34,8 @@ def timeout(timeout, func, *args, **kwargs):
class TimeoutThread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.result = None
self.exc_info = None
self.result = None # type: Any
self.exc_info = None # type: Tuple[type, BaseException, Any]
# Don't block the whole program from exiting
# if this is the only thread left.

View File

@@ -1,5 +1,6 @@
from __future__ import absolute_import
from __future__ import division
from typing import *
import logging
import time
@@ -37,7 +38,7 @@ except:
class InstrumentedPoll(object):
def __init__(self):
self._underlying = orig_poll_impl()
self._times = []
self._times = [] # type: List[Tuple[float, float]]
self._last_print = 0.0
# Python won't let us subclass e.g. select.epoll, so instead

View File

@@ -11,7 +11,7 @@ class SourceMap(object):
def __init__(self, sourcemap_dir):
self._dir = sourcemap_dir
self._indices = {}
self._indices = {} # type: Dict[str, sourcemap.SourceMapDecoder]
def _index_for(self, minified_src):
'''Return the source map index for minified_src, loading it if not

View File

@@ -59,7 +59,7 @@ class QueueProcessingWorker(object):
queue_name = None
def __init__(self):
self.q = None
self.q = None # type: SimpleQueueClient
if self.queue_name is None:
raise WorkerDeclarationException("Queue worker declared without queue_name")