Fix annotations related to make_safe_digest and hashes.

This commit is contained in:
Eklavya Sharma
2016-06-12 17:52:20 +05:30
committed by Tim Abbott
parent 9020177418
commit 2841aa642d
4 changed files with 15 additions and 14 deletions

View File

@@ -9,7 +9,7 @@ if False:
from six import text_type
def gravatar_hash(email):
# type: (str) -> str
# type: (text_type) -> text_type
"""Compute the Gravatar hash for an email address."""
# Non-ASCII characters aren't permitted by the currently active e-mail
# RFCs. However, the IETF has published https://tools.ietf.org/html/rfc4952,
@@ -19,7 +19,7 @@ def gravatar_hash(email):
return make_safe_digest(email.lower(), hashlib.md5)
def user_avatar_hash(email):
# type: (str) -> str
# type: (text_type) -> text_type
# Salting the user_key may be overkill, but it prevents us from
# basically mimicking Gravatar's hashing scheme, which could lead
# to some abuse scenarios like folks using us as a free Gravatar
@@ -35,17 +35,17 @@ def avatar_url(user_profile):
)
def get_avatar_url(avatar_source, email):
# type: (str, str) -> str
if avatar_source == 'U':
# type: (text_type, text_type) -> text_type
if avatar_source == u'U':
hash_key = user_avatar_hash(email)
if settings.LOCAL_UPLOADS_DIR is not None:
# ?x=x allows templates to append additional parameters with &s
return "/user_avatars/%s.png?x=x" % (hash_key)
return u"/user_avatars/%s.png?x=x" % (hash_key)
else:
bucket = settings.S3_AVATAR_BUCKET
return "https://%s.s3.amazonaws.com/%s?x=x" % (bucket, hash_key)
return u"https://%s.s3.amazonaws.com/%s?x=x" % (bucket, hash_key)
elif settings.ENABLE_GRAVATAR:
hash_key = gravatar_hash(email)
return "https://secure.gravatar.com/avatar/%s?d=identicon" % (hash_key,)
return u"https://secure.gravatar.com/avatar/%s?d=identicon" % (hash_key,)
else:
return settings.DEFAULT_AVATAR_URI+'?x=x'

View File

@@ -27,7 +27,7 @@ import os
import re
import six
from PIL import Image, ImageOps
from six import text_type
from six import binary_type, text_type
from six.moves import cStringIO as StringIO
import random
import logging
@@ -102,7 +102,7 @@ class ZulipUploadBackend(object):
### S3
def get_bucket(conn, bucket_name):
# type: (S3Connection, str) -> Bucket
# type: (S3Connection, text_type) -> Bucket
# Calling get_bucket() with validate=True can apparently lead
# to expensive S3 bills:
# http://www.appneta.com/blog/s3-list-get-bucket-default/
@@ -121,7 +121,7 @@ def upload_image_to_s3(
user_profile,
contents,
):
# type: (str, str, str, UserProfile, str) -> None
# type: (text_type, text_type, text_type, UserProfile, text_type) -> None
conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
bucket = get_bucket(conn, bucket_name)
@@ -234,7 +234,7 @@ def mkdirs(path):
os.makedirs(dirname)
def write_local_file(type, path, file_data):
# type: (str, str, str) -> None
# type: (text_type, text_type, binary_type) -> None
file_path = os.path.join(settings.LOCAL_UPLOADS_DIR, type, path)
mkdirs(file_path)
with open(file_path, 'wb') as f:

View File

@@ -11,6 +11,7 @@ from time import sleep
from django.conf import settings
from six.moves import range
from zerver.lib.str_utils import force_text
T = TypeVar('T')
@@ -85,13 +86,13 @@ def run_in_batches(all_list, batch_size, callback, sleep_time = 0, logger = None
sleep(sleep_time)
def make_safe_digest(string, hash_func=hashlib.sha1):
# type: (text_type, Callable[[binary_type], Any]) -> str
# type: (text_type, Callable[[binary_type], Any]) -> text_type
"""
return a hex digest of `string`.
"""
# hashlib.sha1, md5, etc. expect bytes, so non-ASCII strings must
# be encoded.
return hash_func(string.encode('utf-8')).hexdigest()
return force_text(hash_func(string.encode('utf-8')).hexdigest())
def log_statsd_event(name):

View File

@@ -1312,7 +1312,7 @@ class Huddle(models.Model):
huddle_hash = models.CharField(max_length=40, db_index=True, unique=True)
def get_huddle_hash(id_list):
# type: (List[int]) -> str
# type: (List[int]) -> text_type
id_list = sorted(set(id_list))
hash_key = ",".join(str(x) for x in id_list)
return make_safe_digest(hash_key)