deliver_scheduled_emails: Use a queue, instead of infinite retries.

`deliver_scheduled_emails` tries to deliver the email synchronously,
and if it fails, it retries after 10 seconds.  Since it does not track
retries, and always tries the earliest-scheduled-but-due message
first, the worker will not make forward progress if there is a
persistent failure with that message, and will retry indefinitely.
This can result in excessive network or email delivery charges from
the remote SMTP server.

Switch to delivering emails via a new queue worker.  The
`deliver_scheduled_emails` job now serves only to pull deferred jobs
out of the table once they are due, insert them into RabbitMQ, and
then delete them.  This limits the potential for head-of-queue
failures to failures inserting into RabbitMQ, which is more reasonable
than failures speaking to a complex external system we do not control.
Retries and any connections to the SMTP server are left to the
RabbitMQ consumer.

We build a new RabbitMQ queue, rather than use the existing
`email_senders` queue, because that queue is expected to be reasonably
low-latency, for things like missed message notifications.  The
`send_future_email` codepath which inserts into ScheduledEmails is
also (ab)used to digest emails, which are extremely bursty in their
frequency -- and a large burst could significantly delay emails behind
it in the queue.

The new queue is explicitly only for messages which were not initiated
by user actions (e.g., invitation reminders, digests, new account
follow-ups) which are thus not latency-sensitive.

Fixes: #32463.
This commit is contained in:
Alex Vandiver
2023-03-03 15:39:28 +00:00
committed by Tim Abbott
parent 7fde5fd0a4
commit c5200e8b05
13 changed files with 130 additions and 105 deletions

View File

@@ -388,6 +388,12 @@ define service {
check_command check_rabbitmq_consumers!email_senders
}
define service {
use rabbitmq-consumer-service
service_description Check RabbitMQ deferred_email_senders consumers
check_command check_rabbitmq_consumers!deferred_email_senders
}
define service {
use rabbitmq-consumer-service
service_description Check RabbitMQ embed_links consumers

View File

@@ -151,6 +151,7 @@ class zulip::app_frontend_base {
'embed_links',
'embedded_bots',
'email_senders',
'deferred_email_senders',
'missedmessage_emails',
'missedmessage_mobile_notifications',
'outgoing_webhooks',

View File

@@ -14,6 +14,7 @@ from scripts.lib.zulip_tools import atomic_nagios_write, get_config, get_config_
normal_queues = [
"deferred_work",
"deferred_email_senders",
"digest_emails",
"email_mirror",
"email_senders",
@@ -49,7 +50,7 @@ MAX_SECONDS_TO_CLEAR: defaultdict[str, int] = defaultdict(
digest_emails=1200,
missedmessage_mobile_notifications=120,
embed_links=60,
email_senders=240,
deferred_email_senders=240,
)
CRITICAL_SECONDS_TO_CLEAR: defaultdict[str, int] = defaultdict(
lambda: 60,
@@ -57,7 +58,7 @@ CRITICAL_SECONDS_TO_CLEAR: defaultdict[str, int] = defaultdict(
missedmessage_mobile_notifications=180,
digest_emails=1800,
embed_links=90,
email_senders=300,
deferred_email_senders=300,
)

View File

@@ -76,7 +76,6 @@ not_yet_fully_covered = [
"zerver/lib/markdown/nested_code_blocks.py",
# Workers should get full coverage; many have it already
"zerver/worker/deferred_work.py",
"zerver/worker/email_senders.py",
"zerver/worker/invites.py",
"zerver/worker/missedmessage_emails.py",
# Worker-associated files; lower-priority to get testing on

View File

@@ -34,6 +34,7 @@ from django.utils.translation import override as override_language
from confirmation.models import generate_key
from zerver.lib.logging_util import log_to_file
from zerver.lib.queue import queue_event_on_commit
from zerver.models import Realm, ScheduledEmail, UserProfile
from zerver.models.scheduled_jobs import EMAIL_TYPES
from zerver.models.users import get_user_profile_by_id
@@ -487,7 +488,7 @@ def handle_send_email_format_changes(job: dict[str, Any]) -> None:
del job["to_user_id"]
def deliver_scheduled_emails(email: ScheduledEmail) -> None:
def queue_scheduled_emails(email: ScheduledEmail) -> None:
data = orjson.loads(email.data)
user_ids = list(email.users.values_list("id", flat=True))
if not user_ids and not email.address:
@@ -505,8 +506,7 @@ def deliver_scheduled_emails(email: ScheduledEmail) -> None:
data["to_user_ids"] = user_ids
if email.address is not None:
data["to_emails"] = [email.address]
handle_send_email_format_changes(data)
send_email(**data)
queue_event_on_commit("deferred_email_senders", data)
email.delete()

View File

@@ -16,7 +16,7 @@ from typing_extensions import override
from zerver.lib.logging_util import log_to_file
from zerver.lib.management import ZulipBaseCommand
from zerver.lib.send_email import EmailNotDeliveredError, deliver_scheduled_emails
from zerver.lib.send_email import EmailNotDeliveredError, queue_scheduled_emails
from zerver.models import ScheduledEmail
## Setup ##
@@ -47,7 +47,7 @@ Usage: ./manage.py deliver_scheduled_emails
)
if job:
try:
deliver_scheduled_emails(job)
queue_scheduled_emails(job)
except EmailNotDeliveredError:
logger.warning("%r not delivered", job)
else:

View File

@@ -17,7 +17,7 @@ from zerver.lib.email_notifications import (
send_account_registered_email,
)
from zerver.lib.send_email import (
deliver_scheduled_emails,
queue_scheduled_emails,
send_custom_email,
send_custom_server_email,
)
@@ -490,7 +490,8 @@ class TestFollowupEmails(ZulipTestCase):
"zerver/emails/onboarding_team_to_zulip",
)
deliver_scheduled_emails(scheduled_emails[0])
with self.captureOnCommitCallbacks(execute=True):
queue_scheduled_emails(scheduled_emails[0])
from django.core.mail import outbox
self.assert_length(outbox, 1)
@@ -525,7 +526,8 @@ class TestFollowupEmails(ZulipTestCase):
"zerver/emails/onboarding_team_to_zulip",
)
deliver_scheduled_emails(scheduled_emails[0])
with self.captureOnCommitCallbacks(execute=True):
queue_scheduled_emails(scheduled_emails[0])
from django.core.mail import outbox
self.assert_length(outbox, 1)

View File

@@ -50,7 +50,7 @@ from zerver.actions.users import change_user_is_active
from zerver.context_processors import common_context
from zerver.lib.create_user import create_user
from zerver.lib.default_streams import get_slim_realm_default_streams
from zerver.lib.send_email import FromAddress, deliver_scheduled_emails, send_future_email
from zerver.lib.send_email import FromAddress, queue_scheduled_emails, send_future_email
from zerver.lib.streams import ensure_stream
from zerver.lib.test_classes import ZulipTestCase
from zerver.lib.test_helpers import find_key_by_email
@@ -1838,7 +1838,8 @@ so we didn't send them an invitation. We did send invitations to everyone else!"
self.assert_length(email_jobs_to_deliver, 1)
email_count = len(mail.outbox)
for job in email_jobs_to_deliver:
deliver_scheduled_emails(job)
with self.captureOnCommitCallbacks(execute=True):
queue_scheduled_emails(job)
self.assert_length(mail.outbox, email_count + 1)
self.assertEqual(self.email_envelope_from(mail.outbox[-1]), settings.NOREPLY_EMAIL_ADDRESS)
self.assertIn(FromAddress.NOREPLY, self.email_display_from(mail.outbox[-1]))

View File

@@ -32,7 +32,7 @@ from zerver.models.streams import get_stream
from zerver.tornado.event_queue import build_offline_notification
from zerver.worker import base as base_worker
from zerver.worker.email_mirror import MirrorWorker
from zerver.worker.email_senders import EmailSendingWorker
from zerver.worker.email_senders import ImmediateEmailSenderWorker
from zerver.worker.embed_links import FetchLinksEmbedData
from zerver.worker.missedmessage_emails import MissedMessageWorker
from zerver.worker.missedmessage_mobile_notifications import PushNotificationsWorker
@@ -695,7 +695,7 @@ class WorkerTest(ZulipTestCase):
fake_client.enqueue(queue_name, event)
with simulated_queue_client(fake_client):
worker = EmailSendingWorker()
worker = ImmediateEmailSenderWorker()
worker.setup()
with (
patch("zerver.lib.send_email.build_email", side_effect=EmailNotDeliveredError),

View File

@@ -37,11 +37,7 @@ from zerver.lib.bulk_create import create_users
from zerver.lib.create_user import copy_default_settings
from zerver.lib.events import do_events_register
from zerver.lib.exceptions import JsonableError
from zerver.lib.send_email import (
clear_scheduled_emails,
deliver_scheduled_emails,
send_future_email,
)
from zerver.lib.send_email import clear_scheduled_emails, queue_scheduled_emails, send_future_email
from zerver.lib.stream_topic import StreamTopicTarget
from zerver.lib.test_classes import ZulipTestCase
from zerver.lib.test_helpers import (
@@ -2155,7 +2151,8 @@ class ActivateTest(ZulipTestCase):
)
self.assertEqual(ScheduledEmail.objects.count(), 1)
email = ScheduledEmail.objects.all().first()
deliver_scheduled_emails(assert_is_not_none(email))
with self.captureOnCommitCallbacks(execute=True):
queue_scheduled_emails(assert_is_not_none(email))
from django.core.mail import outbox
self.assert_length(outbox, 1)
@@ -2186,8 +2183,11 @@ class ActivateTest(ZulipTestCase):
email_id = email.id
scheduled_at = email.scheduled_timestamp
with self.assertLogs("zulip.send_email", level="INFO") as info_log:
deliver_scheduled_emails(email)
with (
self.assertLogs("zulip.send_email", level="INFO") as info_log,
self.captureOnCommitCallbacks(execute=True),
):
queue_scheduled_emails(email)
from django.core.mail import outbox
self.assert_length(outbox, 0)

View File

@@ -0,0 +1,8 @@
# Documented in https://zulip.readthedocs.io/en/latest/subsystems/queuing.html
from zerver.worker.base import assign_queue
from zerver.worker.email_senders_base import EmailSendingWorker
@assign_queue("deferred_email_senders")
class ImmediateEmailSenderWorker(EmailSendingWorker):
pass

View File

@@ -1,86 +1,8 @@
# Documented in https://zulip.readthedocs.io/en/latest/subsystems/queuing.html
import copy
import logging
import socket
from collections.abc import Callable
from functools import wraps
from typing import Any
from django.core.mail.backends.base import BaseEmailBackend
from typing_extensions import override
from zerver.lib.queue import retry_event
from zerver.lib.send_email import (
EmailNotDeliveredError,
handle_send_email_format_changes,
initialize_connection,
send_email,
)
from zerver.models import Realm
from zerver.worker.base import ConcreteQueueWorker, LoopQueueProcessingWorker, assign_queue
logger = logging.getLogger(__name__)
# If you change the function on which this decorator is used be careful that the new
# function doesn't delete the "failed_tries" attribute of "data" which is needed for
# "retry_event" to work correctly; see EmailSendingWorker for an example with deepcopy.
def retry_send_email_failures(
func: Callable[[ConcreteQueueWorker, dict[str, Any]], None],
) -> Callable[[ConcreteQueueWorker, dict[str, Any]], None]:
@wraps(func)
def wrapper(worker: ConcreteQueueWorker, data: dict[str, Any]) -> None:
try:
func(worker, data)
except (socket.gaierror, TimeoutError, EmailNotDeliveredError) as e:
error_class_name = type(e).__name__
def on_failure(event: dict[str, Any]) -> None:
logging.exception(
"Event %r failed due to exception %s", event, error_class_name, stack_info=True
)
retry_event(worker.queue_name, data, on_failure)
return wrapper
from zerver.worker.base import assign_queue
from zerver.worker.email_senders_base import EmailSendingWorker
@assign_queue("email_senders")
class EmailSendingWorker(LoopQueueProcessingWorker):
def __init__(
self,
threaded: bool = False,
disable_timeout: bool = False,
worker_num: int | None = None,
) -> None:
super().__init__(threaded, disable_timeout, worker_num)
self.connection: BaseEmailBackend | None = None
@retry_send_email_failures
def send_email(self, event: dict[str, Any]) -> None:
# Copy the event, so that we don't pass the `failed_tries'
# data to send_email (which neither takes that
# argument nor needs that data).
copied_event = copy.deepcopy(event)
if "failed_tries" in copied_event:
del copied_event["failed_tries"]
handle_send_email_format_changes(copied_event)
if "realm_id" in copied_event:
# "realm" does not serialize over the queue, so we send the realm_id
copied_event["realm"] = Realm.objects.get(id=copied_event["realm_id"])
del copied_event["realm_id"]
self.connection = initialize_connection(self.connection)
send_email(**copied_event, connection=self.connection)
@override
def consume_batch(self, events: list[dict[str, Any]]) -> None:
for event in events:
self.send_email(event)
@override
def stop(self) -> None:
try:
if self.connection is not None:
self.connection.close()
finally:
super().stop()
class ImmediateEmailSenderWorker(EmailSendingWorker):
pass

View File

@@ -0,0 +1,85 @@
# Documented in https://zulip.readthedocs.io/en/latest/subsystems/queuing.html
import copy
import logging
import socket
from collections.abc import Callable
from functools import wraps
from typing import Any
from django.core.mail.backends.base import BaseEmailBackend
from typing_extensions import override
from zerver.lib.queue import retry_event
from zerver.lib.send_email import (
EmailNotDeliveredError,
handle_send_email_format_changes,
initialize_connection,
send_email,
)
from zerver.models import Realm
from zerver.worker.base import ConcreteQueueWorker, LoopQueueProcessingWorker
logger = logging.getLogger(__name__)
# If you change the function on which this decorator is used be careful that the new
# function doesn't delete the "failed_tries" attribute of "data" which is needed for
# "retry_event" to work correctly; see EmailSendingWorker for an example with deepcopy.
def retry_send_email_failures(
func: Callable[[ConcreteQueueWorker, dict[str, Any]], None],
) -> Callable[[ConcreteQueueWorker, dict[str, Any]], None]:
@wraps(func)
def wrapper(worker: ConcreteQueueWorker, data: dict[str, Any]) -> None:
try:
func(worker, data)
except (socket.gaierror, TimeoutError, EmailNotDeliveredError) as e:
error_class_name = type(e).__name__
def on_failure(event: dict[str, Any]) -> None:
logging.exception(
"Event %r failed due to exception %s", event, error_class_name, stack_info=True
)
retry_event(worker.queue_name, data, on_failure)
return wrapper
class EmailSendingWorker(LoopQueueProcessingWorker):
def __init__(
self,
threaded: bool = False,
disable_timeout: bool = False,
worker_num: int | None = None,
) -> None:
super().__init__(threaded, disable_timeout, worker_num)
self.connection: BaseEmailBackend | None = None
@retry_send_email_failures
def send_email(self, event: dict[str, Any]) -> None:
# Copy the event, so that we don't pass the `failed_tries'
# data to send_email (which neither takes that
# argument nor needs that data).
copied_event = copy.deepcopy(event)
if "failed_tries" in copied_event:
del copied_event["failed_tries"]
handle_send_email_format_changes(copied_event)
if "realm_id" in copied_event:
# "realm" does not serialize over the queue, so we send the realm_id
copied_event["realm"] = Realm.objects.get(id=copied_event["realm_id"])
del copied_event["realm_id"]
self.connection = initialize_connection(self.connection)
send_email(**copied_event, connection=self.connection)
@override
def consume_batch(self, events: list[dict[str, Any]]) -> None:
for event in events:
self.send_email(event)
@override
def stop(self) -> None: # nocoverage
try:
if self.connection is not None:
self.connection.close()
finally:
super().stop()