mypy: strict optional fixes.

This commit is contained in:
Christian Hudon
2017-05-24 12:28:26 -07:00
committed by Tim Abbott
parent 2f227a97d3
commit 1761a3b1c1
4 changed files with 7 additions and 4 deletions

View File

@@ -103,11 +103,11 @@ def der_encode_octet_string(val):
return der_encode_tlv(0x04, val)
def der_encode_sequence(tlvs, tagged=True):
# type: (List[bytes], Optional[bool]) -> bytes
# type: (List[Optional[bytes]], Optional[bool]) -> bytes
body = []
for i, tlv in enumerate(tlvs):
# Missing optional elements represented as None.
if not tlv:
if tlv is None:
continue
if tagged:
# Assume kerberos-style explicit tagging of components.

View File

@@ -203,6 +203,7 @@ class MessageDict(object):
if last_edit_time is not None:
obj['last_edit_timestamp'] = datetime_to_timestamp(last_edit_time)
assert edit_history is not None
obj['edit_history'] = ujson.loads(edit_history)
if apply_markdown:
@@ -220,6 +221,7 @@ class MessageDict(object):
# TODO: see #1379 to eliminate bugdown dependencies
message = Message.objects.select_related().get(id=message_id)
assert message is not None # Hint for mypy.
# It's unfortunate that we need to have side effects on the message
# in some cases.
rendered_content = render_markdown(message, content, realm=message.get_realm())

View File

@@ -163,6 +163,7 @@ def query_chunker(queries, id_collector=None, chunk_size=1000, db_chunk_size=Non
q = q.order_by('id')
min_id = -1
while True:
assert db_chunk_size is not None # Hint for mypy, but also workaround for mypy bug #3442.
rows = list(q.filter(id__gt=min_id)[0:db_chunk_size])
if len(rows) == 0:
break

View File

@@ -192,9 +192,9 @@ def get_commits_content(commits_data, is_truncated=False):
commits_content = u''
for commit in commits_data[:COMMITS_LIMIT]:
commits_content += COMMIT_ROW_TEMPLATE.format(
commit_short_sha=get_short_sha(commit.get('sha')),
commit_short_sha=get_short_sha(commit['sha']),
commit_url=commit.get('url'),
commit_msg=commit.get('message').partition('\n')[0]
commit_msg=commit['message'].partition('\n')[0]
)
if len(commits_data) > COMMITS_LIMIT: