queue_processors: Remove the slow_queries queue.

While this functionality to post slow queries to a Zulip stream was
very useful in the early days of Zulip, when there were only a few
hundred accounts, it's long since been useless since (1) the total
request volume on larger Zulip servers run by Zulip developers, and
(2) other server operators don't want real-time notifications of slow
backend queries.  The right structure for this is just a log file.

We get rid of the queue and replace it with a "zulip.slow_queries"
logger, which will still log to /var/log/zulip/slow_queries.log for
ease of access to this information and propagate to the other logging
handlers.  Reducing the amount of queues is good for lowering zulip's
memory footprint and restart performance, since we run at least one
dedicated queue worker process for each one in most configurations.
This commit is contained in:
Mateusz Mandera
2020-05-08 16:37:58 +02:00
committed by Tim Abbott
parent 180c16c80e
commit dd40649e04
12 changed files with 33 additions and 138 deletions

View File

@@ -27,7 +27,7 @@ from zerver.lib.push_notifications import handle_push_notification, handle_remov
initialize_push_notifications, clear_push_device_tokens
from zerver.lib.actions import do_send_confirmation_email, \
do_update_user_activity, do_update_user_activity_interval, do_update_user_presence, \
internal_send_stream_message, internal_send_private_message, notify_realm_export, \
internal_send_private_message, notify_realm_export, \
render_incoming_message, do_update_embedded_data, do_mark_stream_messages_as_read
from zerver.lib.url_preview import preview as url_preview
from zerver.lib.digest import handle_digest_email
@@ -39,7 +39,7 @@ from zerver.lib.streams import access_stream_by_id
from zerver.lib.db import reset_queries
from zerver.context_processors import common_context
from zerver.lib.outgoing_webhook import do_rest_call, get_outgoing_webhook_service_handler
from zerver.models import get_bot_services, get_stream, RealmAuditLog
from zerver.models import get_bot_services, RealmAuditLog
from zulip_bots.lib import ExternalBotHandler, extract_query_without_mention
from zerver.lib.bot_lib import EmbeddedBotHandler, get_bot_handler, EmbeddedBotQuitException
from zerver.lib.exceptions import RateLimited
@@ -494,44 +494,6 @@ class ErrorReporter(QueueProcessingWorker):
if settings.ERROR_REPORTING:
do_report_error(event['report']['host'], event['type'], event['report'])
@assign_queue('slow_queries', queue_type="loop")
class SlowQueryWorker(LoopQueueProcessingWorker):
# Sleep 1 minute between checking the queue unconditionally,
# regardless of whether anything is in the queue.
sleep_delay = 60 * 1
sleep_only_if_empty = False
def consume_batch(self, slow_query_events: List[Dict[str, Any]]) -> None:
for event in slow_query_events:
logging.info("Slow query: %s", event["query"])
if settings.SLOW_QUERY_LOGS_STREAM is None:
return
if settings.ERROR_BOT is None:
return
if len(slow_query_events) > 0:
topic = "%s: slow queries" % (settings.EXTERNAL_HOST,)
content = ""
for event in slow_query_events:
content += " %s\n" % (event["query"],)
error_bot = get_system_bot(settings.ERROR_BOT)
realm = error_bot.realm
errors_stream = get_stream(
settings.SLOW_QUERY_LOGS_STREAM,
realm
)
internal_send_stream_message(
realm,
error_bot,
errors_stream,
topic,
content
)
@assign_queue('digest_emails')
class DigestWorker(QueueProcessingWorker): # nocoverage
# Who gets a digest is entirely determined by the enqueue_digest_emails