transaction: Add durable=True to the outermost db transactions.

This commit adds `durable=True` to the outermost db transactions
created in the following:
* confirm_email_change
* handle_upload_pre_finish_hook
* deliver_scheduled_emails
* restore_data_from_archive
* do_change_realm_subdomain
* do_create_realm
* do_deactivate_realm
* do_reactivate_realm
* do_delete_user
* do_delete_user_preserving_messages
* create_stripe_customer
* process_initial_upgrade
* do_update_plan
* request_sponsorship
* upload_message_attachment
* register_remote_server
* do_soft_deactivate_users
* maybe_send_batched_emails

It helps to avoid creating unintended savepoints in the future.

This is as a part of our plan to explicitly mark all the
transaction.atomic calls with either 'savepoint=False' or
'durable=True' as required.

* 'savepoint=True' is used in special cases.
This commit is contained in:
Prakhar Pratyush
2024-11-04 10:49:11 +05:30
committed by Tim Abbott
parent 174a458928
commit 9c9866461a
12 changed files with 18 additions and 18 deletions

View File

@@ -1094,7 +1094,7 @@ class BillingSession(ABC):
metadata=stripe_customer_data.metadata,
)
event_time = timestamp_to_datetime(stripe_customer.created)
with transaction.atomic():
with transaction.atomic(durable=True):
self.write_to_audit_log(BillingSessionEventType.STRIPE_CUSTOMER_CREATED, event_time)
customer = self.update_or_create_customer(stripe_customer.id)
return customer
@@ -1793,7 +1793,7 @@ class BillingSession(ABC):
# TODO: The correctness of this relies on user creation, deactivation, etc being
# in a transaction.atomic() with the relevant RealmAuditLog entries
with transaction.atomic():
with transaction.atomic(durable=True):
# We get the current license count here in case the number of billable
# licenses has changed since the upgrade process began.
current_licenses_count = self.get_billable_licenses_for_customer(
@@ -2912,7 +2912,7 @@ class BillingSession(ABC):
if status is not None:
if status == CustomerPlan.ACTIVE:
assert plan.status < CustomerPlan.LIVE_STATUS_THRESHOLD
with transaction.atomic():
with transaction.atomic(durable=True):
# Switch to a different plan was cancelled. We end the next plan
# and set the current one as active.
if plan.status == CustomerPlan.SWITCH_PLAN_TIER_AT_PLAN_END:
@@ -3412,7 +3412,7 @@ class BillingSession(ABC):
raise BillingError("Form validation error", message=message)
request_context = self.get_sponsorship_request_session_specific_context()
with transaction.atomic():
with transaction.atomic(durable=True):
# Ensures customer is created first before updating sponsorship status.
self.update_customer_sponsorship_status(True)
sponsorship_request = ZulipSponsorshipRequest(

View File

@@ -61,7 +61,7 @@ def do_change_realm_subdomain(
# deleting, clear that state.
realm.demo_organization_scheduled_deletion_date = None
realm.string_id = new_subdomain
with transaction.atomic():
with transaction.atomic(durable=True):
realm.save(update_fields=["string_id", "demo_organization_scheduled_deletion_date"])
RealmAuditLog.objects.create(
realm=realm,
@@ -232,7 +232,7 @@ def do_create_realm(
# is required to do so correctly.
kwargs["push_notifications_enabled"] = sends_notifications_directly()
with transaction.atomic():
with transaction.atomic(durable=True):
realm = Realm(string_id=string_id, name=name, **kwargs)
if is_demo_organization:
realm.demo_organization_scheduled_deletion_date = realm.date_created + timedelta(

View File

@@ -516,7 +516,7 @@ def do_deactivate_realm(
if settings.BILLING_ENABLED:
from corporate.lib.stripe import RealmBillingSession
with transaction.atomic():
with transaction.atomic(durable=True):
realm.deactivated = True
realm.save(update_fields=["deactivated"])
@@ -575,7 +575,7 @@ def do_reactivate_realm(realm: Realm) -> None:
return
realm.deactivated = False
with transaction.atomic():
with transaction.atomic(durable=True):
realm.save(update_fields=["deactivated"])
event_time = timezone_now()

View File

@@ -80,7 +80,7 @@ def do_delete_user(user_profile: UserProfile, *, acting_user: UserProfile | None
date_joined = user_profile.date_joined
personal_recipient = user_profile.recipient
with transaction.atomic():
with transaction.atomic(durable=True):
user_profile.delete()
# Recipient objects don't get deleted through CASCADE, so we need to handle
# the user's personal recipient manually. This will also delete all Messages pointing
@@ -180,7 +180,7 @@ def do_delete_user_preserving_messages(user_profile: UserProfile) -> None:
realm = user_profile.realm
date_joined = user_profile.date_joined
with transaction.atomic():
with transaction.atomic(durable=True):
# The strategy is that before calling user_profile.delete(), we need to
# reassign Messages sent by the user to a dummy user, so that they don't
# get affected by CASCADE. We cannot yet create a dummy user with .id

View File

@@ -596,7 +596,7 @@ def restore_data_from_archive(archive_transaction: ArchiveTransaction) -> int:
# so that when we log "Finished", the process has indeed finished - and that happens only after
# leaving the atomic block - Django does work committing the changes to the database when
# the block ends.
with transaction.atomic():
with transaction.atomic(durable=True):
msg_ids = restore_messages_from_archive(archive_transaction.id)
restore_models_with_message_key_from_archive(archive_transaction.id)
restore_attachments_from_archive(archive_transaction.id)

View File

@@ -277,7 +277,7 @@ def do_soft_deactivate_users(
(user_batch, users) = (users[0:BATCH_SIZE], users[BATCH_SIZE:])
if len(user_batch) == 0:
break
with transaction.atomic():
with transaction.atomic(durable=True):
realm_logs = []
for user in user_batch:
do_soft_deactivate_user(user)

View File

@@ -158,7 +158,7 @@ def upload_message_attachment(
str(target_realm.id), sanitize_name(uploaded_file_name)
)
with transaction.atomic():
with transaction.atomic(durable=True):
upload_backend.upload_message_attachment(
path_id,
uploaded_file_name,

View File

@@ -37,7 +37,7 @@ Usage: ./manage.py deliver_scheduled_emails
def handle(self, *args: Any, **options: Any) -> None:
try:
while True:
with transaction.atomic():
with transaction.atomic(durable=True):
job = (
ScheduledEmail.objects.filter(scheduled_timestamp__lte=timezone_now())
.prefetch_related("users")

View File

@@ -194,7 +194,7 @@ def handle_upload_pre_finish_hook(
StorageClass=settings.S3_UPLOADS_STORAGE_CLASS,
)
with transaction.atomic():
with transaction.atomic(durable=True):
create_attachment(
filename,
path_id,

View File

@@ -100,7 +100,7 @@ def confirm_email_change(request: HttpRequest, confirmation_key: str) -> HttpRes
assert isinstance(email_change_object, EmailChangeStatus)
new_email = email_change_object.new_email
old_email = email_change_object.old_email
with transaction.atomic():
with transaction.atomic(durable=True):
user_profile = UserProfile.objects.select_for_update().get(
id=email_change_object.user_profile_id
)

View File

@@ -216,7 +216,7 @@ class MissedMessageWorker(QueueProcessingWorker):
def maybe_send_batched_emails(self) -> None:
current_time = timezone_now()
with transaction.atomic():
with transaction.atomic(durable=True):
events_to_process = ScheduledMessageNotificationEmail.objects.filter(
scheduled_timestamp__lte=current_time
).select_for_update()

View File

@@ -211,7 +211,7 @@ def register_remote_server(
_("A server with hostname {hostname} already exists").format(hostname=hostname)
)
with transaction.atomic():
with transaction.atomic(durable=True):
if remote_server is None:
created = True
remote_server = RemoteZulipServer.objects.create(