mirror of
https://github.com/zulip/zulip.git
synced 2025-11-16 20:02:15 +00:00
committed by
Tim Abbott
parent
5eeb616666
commit
4a43856ba7
@@ -18,11 +18,11 @@
|
|||||||
{{/if}}
|
{{/if}}
|
||||||
</td>
|
</td>
|
||||||
<td class="actions">
|
<td class="actions">
|
||||||
{{#unless time_deleted}}
|
{{#if url}}
|
||||||
<button class="button rounded small delete btn-danger" data-export-id="{{id}}">
|
<button class="button rounded small delete btn-danger" data-export-id="{{id}}">
|
||||||
<i class="fa fa-trash-o" aria-hidden="true"></i>
|
<i class="fa fa-trash-o" aria-hidden="true"></i>
|
||||||
</button>
|
</button>
|
||||||
{{/unless}}
|
{{/if}}
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
{{/with}}
|
{{/with}}
|
||||||
|
|||||||
@@ -2373,24 +2373,25 @@ def get_realm_exports_serialized(user: UserProfile) -> List[Dict[str, Any]]:
|
|||||||
)
|
)
|
||||||
exports_dict = {}
|
exports_dict = {}
|
||||||
for export in all_exports:
|
for export in all_exports:
|
||||||
pending = True
|
|
||||||
export_url = None
|
export_url = None
|
||||||
deleted_timestamp = None
|
deleted_timestamp = None
|
||||||
failed_timestamp = None
|
failed_timestamp = None
|
||||||
acting_user = export.acting_user
|
acting_user = export.acting_user
|
||||||
|
|
||||||
|
export_data = {}
|
||||||
if export.extra_data is not None:
|
if export.extra_data is not None:
|
||||||
pending = False
|
|
||||||
|
|
||||||
export_data = orjson.loads(export.extra_data)
|
export_data = orjson.loads(export.extra_data)
|
||||||
deleted_timestamp = export_data.get("deleted_timestamp")
|
|
||||||
failed_timestamp = export_data.get("failed_timestamp")
|
|
||||||
export_path = export_data.get("export_path")
|
|
||||||
|
|
||||||
if export_path and not deleted_timestamp:
|
deleted_timestamp = export_data.get("deleted_timestamp")
|
||||||
export_url = zerver.lib.upload.upload_backend.get_export_tarball_url(
|
failed_timestamp = export_data.get("failed_timestamp")
|
||||||
user.realm, export_path
|
export_path = export_data.get("export_path")
|
||||||
)
|
|
||||||
|
pending = deleted_timestamp is None and failed_timestamp is None and export_path is None
|
||||||
|
|
||||||
|
if export_path is not None and not deleted_timestamp:
|
||||||
|
export_url = zerver.lib.upload.upload_backend.get_export_tarball_url(
|
||||||
|
user.realm, export_path
|
||||||
|
)
|
||||||
|
|
||||||
assert acting_user is not None
|
assert acting_user is not None
|
||||||
exports_dict[export.id] = dict(
|
exports_dict[export.id] = dict(
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import os
|
import os
|
||||||
|
from typing import Optional, Set
|
||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
|
||||||
import botocore.exceptions
|
import botocore.exceptions
|
||||||
@@ -16,7 +17,7 @@ from zerver.lib.test_helpers import (
|
|||||||
stdout_suppressed,
|
stdout_suppressed,
|
||||||
use_s3_backend,
|
use_s3_backend,
|
||||||
)
|
)
|
||||||
from zerver.models import RealmAuditLog
|
from zerver.models import Realm, RealmAuditLog
|
||||||
from zerver.views.realm_export import export_realm
|
from zerver.views.realm_export import export_realm
|
||||||
|
|
||||||
|
|
||||||
@@ -117,18 +118,48 @@ class RealmExportTest(ZulipTestCase):
|
|||||||
tarball_path = create_dummy_file("test-export.tar.gz")
|
tarball_path = create_dummy_file("test-export.tar.gz")
|
||||||
|
|
||||||
# Test the export logic.
|
# Test the export logic.
|
||||||
with patch("zerver.lib.export.do_export_realm", return_value=tarball_path) as mock_export:
|
def fake_export_realm(
|
||||||
|
realm: Realm,
|
||||||
|
output_dir: str,
|
||||||
|
threads: int,
|
||||||
|
exportable_user_ids: Optional[Set[int]] = None,
|
||||||
|
public_only: bool = False,
|
||||||
|
consent_message_id: Optional[int] = None,
|
||||||
|
export_as_active: Optional[bool] = None,
|
||||||
|
) -> str:
|
||||||
|
self.assertEqual(realm, admin.realm)
|
||||||
|
self.assertEqual(public_only, True)
|
||||||
|
self.assertTrue(os.path.basename(output_dir).startswith("zulip-export-"))
|
||||||
|
self.assertEqual(threads, 6)
|
||||||
|
|
||||||
|
# Check that the export shows up as in progress
|
||||||
|
result = self.client_get("/json/export/realm")
|
||||||
|
response_dict = self.assert_json_success(result)
|
||||||
|
export_dict = response_dict["exports"]
|
||||||
|
self.assert_length(export_dict, 1)
|
||||||
|
id = export_dict[0]["id"]
|
||||||
|
self.assertEqual(export_dict[0]["pending"], True)
|
||||||
|
self.assertIsNone(export_dict[0]["export_url"])
|
||||||
|
self.assertIsNone(export_dict[0]["deleted_timestamp"])
|
||||||
|
self.assertIsNone(export_dict[0]["failed_timestamp"])
|
||||||
|
self.assertEqual(export_dict[0]["acting_user_id"], admin.id)
|
||||||
|
|
||||||
|
# While the export is in progress, we can't delete it
|
||||||
|
result = self.client_delete(f"/json/export/realm/{id}")
|
||||||
|
self.assert_json_error(result, "Export still in progress")
|
||||||
|
|
||||||
|
return tarball_path
|
||||||
|
|
||||||
|
with patch(
|
||||||
|
"zerver.lib.export.do_export_realm", side_effect=fake_export_realm
|
||||||
|
) as mock_export:
|
||||||
with stdout_suppressed(), self.assertLogs(level="INFO") as info_logs:
|
with stdout_suppressed(), self.assertLogs(level="INFO") as info_logs:
|
||||||
with self.captureOnCommitCallbacks(execute=True):
|
with self.captureOnCommitCallbacks(execute=True):
|
||||||
result = self.client_post("/json/export/realm")
|
result = self.client_post("/json/export/realm")
|
||||||
self.assertTrue("INFO:root:Completed data export for zulip in " in info_logs.output[0])
|
self.assertTrue("INFO:root:Completed data export for zulip in " in info_logs.output[0])
|
||||||
|
mock_export.assert_called_once()
|
||||||
self.assert_json_success(result)
|
self.assert_json_success(result)
|
||||||
self.assertFalse(os.path.exists(tarball_path))
|
self.assertFalse(os.path.exists(tarball_path))
|
||||||
args = mock_export.call_args_list[0][1]
|
|
||||||
self.assertEqual(args["realm"], admin.realm)
|
|
||||||
self.assertEqual(args["public_only"], True)
|
|
||||||
self.assertTrue(os.path.basename(args["output_dir"]).startswith("zulip-export-"))
|
|
||||||
self.assertEqual(args["threads"], 6)
|
|
||||||
|
|
||||||
# Get the entry and test that iago initiated it.
|
# Get the entry and test that iago initiated it.
|
||||||
audit_log_entry = RealmAuditLog.objects.filter(
|
audit_log_entry = RealmAuditLog.objects.filter(
|
||||||
@@ -201,12 +232,17 @@ class RealmExportTest(ZulipTestCase):
|
|||||||
response_dict = self.assert_json_success(result)
|
response_dict = self.assert_json_success(result)
|
||||||
export_dict = response_dict["exports"]
|
export_dict = response_dict["exports"]
|
||||||
self.assert_length(export_dict, 1)
|
self.assert_length(export_dict, 1)
|
||||||
|
export_id = export_dict[0]["id"]
|
||||||
self.assertEqual(export_dict[0]["pending"], False)
|
self.assertEqual(export_dict[0]["pending"], False)
|
||||||
self.assertIsNone(export_dict[0]["export_url"])
|
self.assertIsNone(export_dict[0]["export_url"])
|
||||||
self.assertIsNone(export_dict[0]["deleted_timestamp"])
|
self.assertIsNone(export_dict[0]["deleted_timestamp"])
|
||||||
self.assertIsNotNone(export_dict[0]["failed_timestamp"])
|
self.assertIsNotNone(export_dict[0]["failed_timestamp"])
|
||||||
self.assertEqual(export_dict[0]["acting_user_id"], admin.id)
|
self.assertEqual(export_dict[0]["acting_user_id"], admin.id)
|
||||||
|
|
||||||
|
# Check that we can't delete it
|
||||||
|
result = self.client_delete(f"/json/export/realm/{export_id}")
|
||||||
|
self.assert_json_error(result, "Export failed, nothing to delete")
|
||||||
|
|
||||||
def test_realm_export_rate_limited(self) -> None:
|
def test_realm_export_rate_limited(self) -> None:
|
||||||
admin = self.example_user("iago")
|
admin = self.example_user("iago")
|
||||||
self.login_user(admin)
|
self.login_user(admin)
|
||||||
|
|||||||
@@ -14,7 +14,6 @@ from zerver.lib.exceptions import JsonableError
|
|||||||
from zerver.lib.export import get_realm_exports_serialized
|
from zerver.lib.export import get_realm_exports_serialized
|
||||||
from zerver.lib.queue import queue_json_publish
|
from zerver.lib.queue import queue_json_publish
|
||||||
from zerver.lib.response import json_success
|
from zerver.lib.response import json_success
|
||||||
from zerver.lib.utils import assert_is_not_none
|
|
||||||
from zerver.models import RealmAuditLog, UserProfile
|
from zerver.models import RealmAuditLog, UserProfile
|
||||||
|
|
||||||
|
|
||||||
@@ -104,8 +103,14 @@ def delete_realm_export(request: HttpRequest, user: UserProfile, export_id: int)
|
|||||||
except RealmAuditLog.DoesNotExist:
|
except RealmAuditLog.DoesNotExist:
|
||||||
raise JsonableError(_("Invalid data export ID"))
|
raise JsonableError(_("Invalid data export ID"))
|
||||||
|
|
||||||
export_data = orjson.loads(assert_is_not_none(audit_log_entry.extra_data))
|
export_data = {}
|
||||||
if "deleted_timestamp" in export_data:
|
if audit_log_entry.extra_data is not None:
|
||||||
|
export_data = orjson.loads(audit_log_entry.extra_data)
|
||||||
|
if export_data.get("deleted_timestamp") is not None:
|
||||||
raise JsonableError(_("Export already deleted"))
|
raise JsonableError(_("Export already deleted"))
|
||||||
|
if export_data.get("export_path") is None:
|
||||||
|
if export_data.get("failed_timestamp") is not None:
|
||||||
|
raise JsonableError(_("Export failed, nothing to delete"))
|
||||||
|
raise JsonableError(_("Export still in progress"))
|
||||||
do_delete_realm_export(user, audit_log_entry)
|
do_delete_realm_export(user, audit_log_entry)
|
||||||
return json_success(request)
|
return json_success(request)
|
||||||
|
|||||||
@@ -1063,6 +1063,10 @@ class DeferredWorker(QueueProcessingWorker):
|
|||||||
output_dir = tempfile.mkdtemp(prefix="zulip-export-")
|
output_dir = tempfile.mkdtemp(prefix="zulip-export-")
|
||||||
export_event = RealmAuditLog.objects.get(id=event["id"])
|
export_event = RealmAuditLog.objects.get(id=event["id"])
|
||||||
user_profile = get_user_profile_by_id(event["user_profile_id"])
|
user_profile = get_user_profile_by_id(event["user_profile_id"])
|
||||||
|
extra_data = {}
|
||||||
|
if export_event.extra_data is not None:
|
||||||
|
extra_data = orjson.loads(export_event.extra_data)
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
"Starting realm export for realm %s into %s, initiated by user_profile_id %s",
|
"Starting realm export for realm %s into %s, initiated by user_profile_id %s",
|
||||||
realm.string_id,
|
realm.string_id,
|
||||||
@@ -1079,11 +1083,8 @@ class DeferredWorker(QueueProcessingWorker):
|
|||||||
public_only=True,
|
public_only=True,
|
||||||
)
|
)
|
||||||
except Exception:
|
except Exception:
|
||||||
export_event.extra_data = orjson.dumps(
|
extra_data["failed_timestamp"] = timezone_now().timestamp()
|
||||||
dict(
|
export_event.extra_data = orjson.dumps(extra_data).decode()
|
||||||
failed_timestamp=timezone_now().timestamp(),
|
|
||||||
)
|
|
||||||
).decode()
|
|
||||||
export_event.save(update_fields=["extra_data"])
|
export_event.save(update_fields=["extra_data"])
|
||||||
logging.exception(
|
logging.exception(
|
||||||
"Data export for %s failed after %s",
|
"Data export for %s failed after %s",
|
||||||
@@ -1097,11 +1098,8 @@ class DeferredWorker(QueueProcessingWorker):
|
|||||||
assert public_url is not None
|
assert public_url is not None
|
||||||
|
|
||||||
# Update the extra_data field now that the export is complete.
|
# Update the extra_data field now that the export is complete.
|
||||||
export_event.extra_data = orjson.dumps(
|
extra_data["export_path"] = urllib.parse.urlparse(public_url).path
|
||||||
dict(
|
export_event.extra_data = orjson.dumps(extra_data).decode()
|
||||||
export_path=urllib.parse.urlparse(public_url).path,
|
|
||||||
)
|
|
||||||
).decode()
|
|
||||||
export_event.save(update_fields=["extra_data"])
|
export_event.save(update_fields=["extra_data"])
|
||||||
|
|
||||||
# Send a private message notification letting the user who
|
# Send a private message notification letting the user who
|
||||||
|
|||||||
Reference in New Issue
Block a user