import: import RealmAuditLog when 'zerver_realmauditlog` is missing.

* If `zerver_realmauditlog` is present in the exported data,
  `RealmAuditLog` would be imported normally.

* If it is not present, `create_subscription_events`
  function in would create the `subscription_created`
  events for RealmAuditLog. The reason this function
  is in `import_realm` module and not in the individual
  export tool scripts (like Slack) is because this
  function would be common for all export tools.

This fixes #9846 for users who have not already done an import of
their organization from Slack.

Fixes #9846.
This commit is contained in:
Rhea Parekh
2018-07-06 00:58:21 +05:30
committed by Tim Abbott
parent 70b4794816
commit 4bbccd8287
2 changed files with 62 additions and 14 deletions

View File

@@ -9,7 +9,8 @@ from boto.s3.connection import S3Connection
from boto.s3.key import Key
from django.conf import settings
from django.db import connection
from django.utils.timezone import utc as timezone_utc
from django.db.models import Max
from django.utils.timezone import utc as timezone_utc, now as timezone_now
from typing import Any, Dict, List, Optional, Set, Tuple, \
Iterable
@@ -95,6 +96,42 @@ def fix_upload_links(data: TableData, message_table: TableName) -> None:
if message['rendered_content']:
message['rendered_content'] = message['rendered_content'].replace(key, value)
def create_subscription_events(data: TableData, table: TableName) -> None:
"""
When the export data doesn't contain the table `zerver_realmauditlog`,
this function creates RealmAuditLog objects for `subscription_created`
type event for all the existing Stream subscriptions.
This is needed for all the export tools which do not include the
table `zerver_realmauditlog` (Slack, Gitter, etc.) because the appropriate
data about when a user was subscribed is not exported by the third-party
service.
"""
all_subscription_logs = []
# from bulk_add_subscriptions in lib/actions
event_last_message_id = Message.objects.aggregate(Max('id'))['id__max']
if event_last_message_id is None:
event_last_message_id = -1
event_time = timezone_now()
for item in data[table]:
recipient = Recipient.objects.get(id=item['recipient_id'])
if recipient.type != Recipient.STREAM:
continue
stream = Stream.objects.get(id=recipient.type_id)
user = UserProfile.objects.get(id=item['user_profile_id'])
all_subscription_logs.append(RealmAuditLog(realm=user.realm,
acting_user=user,
modified_user=user,
modified_stream=stream,
event_last_message_id=event_last_message_id,
event_time=event_time,
event_type=RealmAuditLog.SUBSCRIPTION_CREATED))
RealmAuditLog.objects.bulk_create(all_subscription_logs)
def current_table_ids(data: TableData, table: TableName) -> List[int]:
"""
Returns the ids present in the current table
@@ -512,6 +549,21 @@ def do_import_realm(import_dir: Path, subdomain: str) -> Realm:
update_model_ids(Subscription, data, 'zerver_subscription', 'subscription')
bulk_import_model(data, Subscription, 'zerver_subscription')
if 'zerver_realmauditlog' in data:
fix_datetime_fields(data, 'zerver_realmauditlog')
re_map_foreign_keys(data, 'zerver_realmauditlog', 'realm', related_table="realm")
re_map_foreign_keys(data, 'zerver_realmauditlog', 'modified_user',
related_table='user_profile')
re_map_foreign_keys(data, 'zerver_realmauditlog', 'acting_user',
related_table='user_profile')
re_map_foreign_keys(data, 'zerver_realmauditlog', 'modified_stream',
related_table="stream")
update_model_ids(RealmAuditLog, data, 'zerver_realmauditlog',
related_table="realmauditlog")
bulk_import_model(data, RealmAuditLog, 'zerver_realmauditlog')
else:
create_subscription_events(data, 'zerver_subscription')
fix_datetime_fields(data, 'zerver_userpresence')
re_map_foreign_keys(data, 'zerver_userpresence', 'user_profile', related_table="user_profile")
re_map_foreign_keys(data, 'zerver_userpresence', 'client', related_table='client')
@@ -543,18 +595,6 @@ def do_import_realm(import_dir: Path, subdomain: str) -> Realm:
related_table="customprofilefieldvalue")
bulk_import_model(data, CustomProfileFieldValue, 'zerver_customprofilefieldvalue')
fix_datetime_fields(data, 'zerver_realmauditlog')
re_map_foreign_keys(data, 'zerver_realmauditlog', 'realm', related_table="realm")
re_map_foreign_keys(data, 'zerver_realmauditlog', 'modified_user',
related_table='user_profile')
re_map_foreign_keys(data, 'zerver_realmauditlog', 'acting_user',
related_table='user_profile')
re_map_foreign_keys(data, 'zerver_realmauditlog', 'modified_stream',
related_table="stream")
update_model_ids(RealmAuditLog, data, 'zerver_realmauditlog',
related_table="realmauditlog")
bulk_import_model(data, RealmAuditLog, 'zerver_realmauditlog')
# Import uploaded files and avatars
import_uploads(os.path.join(import_dir, "avatars"), processing_avatars=True)
import_uploads(os.path.join(import_dir, "uploads"))

View File

@@ -36,6 +36,7 @@ from zerver.lib.test_classes import (
from zerver.models import (
Realm,
get_realm,
RealmAuditLog,
)
from zerver.lib import mdiff
@@ -539,7 +540,14 @@ class SlackImporter(ZulipTestCase):
# test import of the converted slack data into an existing database
do_import_realm(output_dir, test_realm_subdomain)
self.assertTrue(get_realm(test_realm_subdomain).name, test_realm_subdomain)
realm = get_realm(test_realm_subdomain)
self.assertTrue(realm.name, test_realm_subdomain)
# test RealmAuditLog
realmauditlog = RealmAuditLog.objects.filter(realm=realm)
realmauditlog_event_type = {log.event_type for log in realmauditlog}
self.assertEqual(realmauditlog_event_type, {'subscription_created'})
Realm.objects.filter(name=test_realm_subdomain).delete()
remove_folder(output_dir)