mirror of
https://github.com/zulip/zulip.git
synced 2025-10-23 16:14:02 +00:00
Compare commits
8 Commits
shared-0.0
...
3.1
Author | SHA1 | Date | |
---|---|---|---|
|
31f7006309 | ||
|
d8b966e528 | ||
|
444359ebd3 | ||
|
c78bdd6330 | ||
|
f4e02f0e80 | ||
|
77234ef40b | ||
|
00f9cd672b | ||
|
c33a7dfff4 |
@@ -10,7 +10,7 @@ def clear_duplicate_counts(apps: StateApps, schema_editor: DatabaseSchemaEditor)
|
||||
The backstory is that Django's unique_together indexes do not properly
|
||||
handle the subgroup=None corner case (allowing duplicate rows that have a
|
||||
subgroup of None), which meant that in race conditions, rather than updating
|
||||
an existing row for the property/realm/time with subgroup=None, Django would
|
||||
an existing row for the property/(realm, stream, user)/time with subgroup=None, Django would
|
||||
create a duplicate row.
|
||||
|
||||
In the next migration, we'll add a proper constraint to fix this bug, but
|
||||
@@ -20,26 +20,32 @@ def clear_duplicate_counts(apps: StateApps, schema_editor: DatabaseSchemaEditor)
|
||||
this means deleting the extra rows, but for LoggingCountStat objects, we need to
|
||||
additionally combine the sums.
|
||||
"""
|
||||
RealmCount = apps.get_model('analytics', 'RealmCount')
|
||||
count_tables = dict(realm=apps.get_model('analytics', 'RealmCount'),
|
||||
user=apps.get_model('analytics', 'UserCount'),
|
||||
stream=apps.get_model('analytics', 'StreamCount'),
|
||||
installation=apps.get_model('analytics', 'InstallationCount'))
|
||||
|
||||
realm_counts = RealmCount.objects.filter(subgroup=None).values(
|
||||
'realm_id', 'property', 'end_time').annotate(
|
||||
for name, count_table in count_tables.items():
|
||||
value = [name, 'property', 'end_time']
|
||||
if name == 'installation':
|
||||
value = ['property', 'end_time']
|
||||
counts = count_table.objects.filter(subgroup=None).values(*value).annotate(
|
||||
Count('id'), Sum('value')).filter(id__count__gt=1)
|
||||
|
||||
for realm_count in realm_counts:
|
||||
realm_count.pop('id__count')
|
||||
total_value = realm_count.pop('value__sum')
|
||||
duplicate_counts = list(RealmCount.objects.filter(**realm_count))
|
||||
first_count = duplicate_counts[0]
|
||||
if realm_count['property'] in ["invites_sent::day", "active_users_log:is_bot:day"]:
|
||||
# For LoggingCountStat objects, the right fix is to combine the totals;
|
||||
# for other CountStat objects, we expect the duplicates to have the same value.
|
||||
# And so all we need to do is delete them.
|
||||
first_count.value = total_value
|
||||
first_count.save()
|
||||
to_cleanup = duplicate_counts[1:]
|
||||
for duplicate_count in to_cleanup:
|
||||
duplicate_count.delete()
|
||||
for count in counts:
|
||||
count.pop('id__count')
|
||||
total_value = count.pop('value__sum')
|
||||
duplicate_counts = list(count_table.objects.filter(**count))
|
||||
first_count = duplicate_counts[0]
|
||||
if count['property'] in ["invites_sent::day", "active_users_log:is_bot:day"]:
|
||||
# For LoggingCountStat objects, the right fix is to combine the totals;
|
||||
# for other CountStat objects, we expect the duplicates to have the same value.
|
||||
# And so all we need to do is delete them.
|
||||
first_count.value = total_value
|
||||
first_count.save()
|
||||
to_cleanup = duplicate_counts[1:]
|
||||
for duplicate_count in to_cleanup:
|
||||
duplicate_count.delete()
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
|
@@ -7,6 +7,26 @@ All notable changes to the Zulip server are documented in this file.
|
||||
This section lists notable unreleased changes; it is generally updated
|
||||
in bursts.
|
||||
|
||||
### 3.1 -- July 30, 2020
|
||||
|
||||
- Removed unused `short_name` field from the User model. This field
|
||||
had no purpose and could leak the local part of email addresses
|
||||
when email address visiblity was restricted.
|
||||
- Fixed a bug where loading spinners would sometimes not be displayed.
|
||||
- Fixed incoming email gateway exception with unstructured headers.
|
||||
- Fixed AlertWords not being included in data import/export.
|
||||
- Fixed Twitter previews not including a clear link to the tweet.
|
||||
- Fixed compose box incorrectly opening after uploading a file in a
|
||||
message edit widget.
|
||||
- Fixed exception in SAML integration with encrypted assertions.
|
||||
- Fixed an analytics migration bug that could cause upgrading from 2.x
|
||||
releases to fail.
|
||||
- Added a Thinkst Canary integration (and renamed the old one, which
|
||||
was actually an integration for canarytokens.org).
|
||||
- Reformatted the frontend codebase using prettier. This change was
|
||||
included in this maintenance release to ensure backporting patches
|
||||
from master remains easy.
|
||||
|
||||
### 3.0 -- July 16, 2020
|
||||
|
||||
#### Highlights
|
||||
|
@@ -339,11 +339,11 @@ working correctly.
|
||||
```
|
||||
apt remove upstart -y
|
||||
/home/zulip/deployments/current/scripts/zulip-puppet-apply -f
|
||||
pg_dropcluster 9.5 main --stop
|
||||
pg_dropcluster 11 main --stop
|
||||
systemctl stop postgresql
|
||||
pg_upgradecluster -m upgrade 9.3 main
|
||||
pg_dropcluster 9.3 main
|
||||
apt remove postgresql-9.3
|
||||
pg_upgradecluster -m upgrade 9.6 main
|
||||
pg_dropcluster 9.6 main
|
||||
apt remove postgresql-9.6
|
||||
systemctl start postgresql
|
||||
service memcached restart
|
||||
```
|
||||
|
@@ -228,7 +228,7 @@ exports.setup_upload = function (config) {
|
||||
}
|
||||
const split_uri = uri.split("/");
|
||||
const filename = split_uri[split_uri.length - 1];
|
||||
if (!compose_state.composing()) {
|
||||
if (config.mode === "compose" && !compose_state.composing()) {
|
||||
compose_actions.start("stream");
|
||||
}
|
||||
const absolute_uri = exports.make_upload_absolute(uri);
|
||||
|
@@ -2133,8 +2133,6 @@ div.topic_edit_spinner .loading_indicator_spinner {
|
||||
}
|
||||
|
||||
#do_delete_message_spinner {
|
||||
display: none;
|
||||
width: 0;
|
||||
margin: 0 auto;
|
||||
}
|
||||
|
||||
|
@@ -1,6 +1,6 @@
|
||||
import os
|
||||
|
||||
ZULIP_VERSION = "4.0-dev+git"
|
||||
ZULIP_VERSION = "3.1"
|
||||
# Add information on number of commits and commit hash to version, if available
|
||||
zulip_git_version_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'zulip-git-version')
|
||||
if os.path.exists(zulip_git_version_file):
|
||||
@@ -10,7 +10,7 @@ if os.path.exists(zulip_git_version_file):
|
||||
ZULIP_VERSION = version
|
||||
|
||||
LATEST_MAJOR_VERSION = "3.0"
|
||||
LATEST_RELEASE_VERSION = "3.0"
|
||||
LATEST_RELEASE_VERSION = "3.1"
|
||||
LATEST_RELEASE_ANNOUNCEMENT = "https://blog.zulip.org/2020/07/16/zulip-3-0-released/"
|
||||
LATEST_DESKTOP_VERSION = "5.3.0"
|
||||
|
||||
|
@@ -1,5 +1,6 @@
|
||||
import logging
|
||||
import re
|
||||
from email.headerregistry import AddressHeader
|
||||
from email.message import EmailMessage
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
|
||||
@@ -310,9 +311,14 @@ def find_emailgateway_recipient(message: EmailMessage) -> str:
|
||||
|
||||
for header_name in recipient_headers:
|
||||
for header_value in message.get_all(header_name, []):
|
||||
for addr in header_value.addresses:
|
||||
if match_email_re.match(addr.addr_spec):
|
||||
return addr.addr_spec
|
||||
if isinstance(header_value, AddressHeader):
|
||||
emails = [addr.addr_spec for addr in header_value.addresses]
|
||||
else:
|
||||
emails = [str(header_value)]
|
||||
|
||||
for email in emails:
|
||||
if match_email_re.match(email):
|
||||
return email
|
||||
|
||||
raise ZulipEmailForwardError("Missing recipient in mirror email")
|
||||
|
||||
|
@@ -232,6 +232,36 @@ class TestStreamEmailMessagesSuccess(ZulipTestCase):
|
||||
self.assertEqual(get_display_recipient(message.recipient), stream.name)
|
||||
self.assertEqual(message.topic_name(), incoming_valid_message['Subject'])
|
||||
|
||||
# Test receiving an email with the address on an UnstructuredHeader
|
||||
# (e.g. Envelope-To) instead of an AddressHeader (e.g. To).
|
||||
# https://github.com/zulip/zulip/issues/15864
|
||||
def test_receive_stream_email_messages_other_header_success(self) -> None:
|
||||
user_profile = self.example_user('hamlet')
|
||||
self.login_user(user_profile)
|
||||
self.subscribe(user_profile, "Denmark")
|
||||
stream = get_stream("Denmark", user_profile.realm)
|
||||
|
||||
stream_to_address = encode_email_address(stream)
|
||||
|
||||
incoming_valid_message = EmailMessage()
|
||||
incoming_valid_message.set_content('TestStreamEmailMessages Body')
|
||||
|
||||
incoming_valid_message['Subject'] = 'TestStreamEmailMessages Subject'
|
||||
incoming_valid_message['From'] = self.example_email('hamlet')
|
||||
# Simulate a mailing list
|
||||
incoming_valid_message['To'] = "foo-mailinglist@example.com"
|
||||
incoming_valid_message['Envelope-To'] = stream_to_address
|
||||
incoming_valid_message['Reply-to'] = self.example_email('othello')
|
||||
|
||||
process_message(incoming_valid_message)
|
||||
|
||||
# Hamlet is subscribed to this stream so should see the email message from Othello.
|
||||
message = most_recent_message(user_profile)
|
||||
|
||||
self.assertEqual(message.content, "TestStreamEmailMessages Body")
|
||||
self.assertEqual(get_display_recipient(message.recipient), stream.name)
|
||||
self.assertEqual(message.topic_name(), incoming_valid_message['Subject'])
|
||||
|
||||
def test_receive_stream_email_messages_blank_subject_success(self) -> None:
|
||||
user_profile = self.example_user('hamlet')
|
||||
self.login_user(user_profile)
|
||||
|
@@ -38,6 +38,7 @@ from jwt.exceptions import PyJWTError
|
||||
from lxml.etree import XMLSyntaxError
|
||||
from onelogin.saml2.errors import OneLogin_Saml2_Error
|
||||
from onelogin.saml2.response import OneLogin_Saml2_Response
|
||||
from onelogin.saml2.settings import OneLogin_Saml2_Settings
|
||||
from requests import HTTPError
|
||||
from social_core.backends.apple import AppleIdAuth
|
||||
from social_core.backends.azuread import AzureADOAuth2
|
||||
@@ -1774,8 +1775,7 @@ class SAMLAuthBackend(SocialAuthMixin, SAMLAuth):
|
||||
|
||||
return data
|
||||
|
||||
@classmethod
|
||||
def get_issuing_idp(cls, SAMLResponse: str) -> Optional[str]:
|
||||
def get_issuing_idp(self, SAMLResponse: str) -> Optional[str]:
|
||||
"""
|
||||
Given a SAMLResponse, returns which of the configured IdPs is declared as the issuer.
|
||||
This value MUST NOT be trusted as the true issuer!
|
||||
@@ -1786,11 +1786,12 @@ class SAMLAuthBackend(SocialAuthMixin, SAMLAuth):
|
||||
of the configured IdPs' information to use for parsing and validating the response.
|
||||
"""
|
||||
try:
|
||||
resp = OneLogin_Saml2_Response(settings={}, response=SAMLResponse)
|
||||
config = self.generate_saml_config()
|
||||
saml_settings = OneLogin_Saml2_Settings(config, sp_validation_only=True)
|
||||
resp = OneLogin_Saml2_Response(settings=saml_settings, response=SAMLResponse)
|
||||
issuers = resp.get_issuers()
|
||||
except cls.SAMLRESPONSE_PARSING_EXCEPTIONS:
|
||||
logger = logging.getLogger(f"zulip.auth.{cls.name}")
|
||||
logger.info("Error while parsing SAMLResponse:", exc_info=True)
|
||||
except self.SAMLRESPONSE_PARSING_EXCEPTIONS:
|
||||
self.logger.info("Error while parsing SAMLResponse:", exc_info=True)
|
||||
return None
|
||||
|
||||
for idp_name, idp_config in settings.SOCIAL_AUTH_SAML_ENABLED_IDPS.items():
|
||||
|
Reference in New Issue
Block a user