mirror of
https://github.com/zulip/zulip.git
synced 2025-11-02 21:13:36 +00:00
delete_in_topic: Split up the deletion into batches.
Fixes #22821. As explained in the comment in the code: Topics can be large enough that this request will inevitably time out. In such a case, it's good for some progress to be accomplished, so that full deletion can be achieved by repeating the request. For that purpose, we delete messages in atomic batches, committing after each batch. The additional perk is that the ordering of messages should prevent some hypothetical deadlocks - ref #19054
This commit is contained in:
committed by
Tim Abbott
parent
cf2f14f04c
commit
940830055b
@@ -55,6 +55,7 @@ from zerver.lib.exceptions import (
|
||||
from zerver.lib.mention import MentionBackend, silent_mention_syntax_for_user
|
||||
from zerver.lib.request import REQ, has_request_variables
|
||||
from zerver.lib.response import json_success
|
||||
from zerver.lib.retention import STREAM_MESSAGE_BATCH_SIZE as RETENTION_STREAM_MESSAGE_BATCH_SIZE
|
||||
from zerver.lib.retention import parse_message_retention_days
|
||||
from zerver.lib.streams import (
|
||||
StreamDict,
|
||||
@@ -848,7 +849,6 @@ def get_topics_backend(
|
||||
return json_success(request, data=dict(topics=result))
|
||||
|
||||
|
||||
@transaction.atomic
|
||||
@require_realm_admin
|
||||
@has_request_variables
|
||||
def delete_in_topic(
|
||||
@@ -867,9 +867,20 @@ def delete_in_topic(
|
||||
).values_list("message_id", flat=True)
|
||||
messages = messages.filter(id__in=deletable_message_ids)
|
||||
|
||||
messages = messages.select_for_update(of=("self",))
|
||||
|
||||
do_delete_messages(user_profile.realm, messages)
|
||||
# Topics can be large enough that this request will inevitably time out.
|
||||
# In such a case, it's good for some progress to be accomplished, so that
|
||||
# full deletion can be achieved by repeating the request. For that purpose,
|
||||
# we delete messages in atomic batches, committing after each batch.
|
||||
# TODO: Ideally this should be moved to the deferred_work queue.
|
||||
batch_size = RETENTION_STREAM_MESSAGE_BATCH_SIZE
|
||||
while True:
|
||||
with transaction.atomic(durable=True):
|
||||
messages_to_delete = messages.order_by("-id")[0:batch_size].select_for_update(
|
||||
of=("self",)
|
||||
)
|
||||
if not messages_to_delete:
|
||||
break
|
||||
do_delete_messages(user_profile.realm, messages_to_delete)
|
||||
|
||||
return json_success(request)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user