mirror of
https://github.com/zulip/zulip.git
synced 2025-11-06 15:03:34 +00:00
utils: Add process_list_in_batches().
This commit is contained in:
@@ -23,7 +23,7 @@ from zerver.lib.message import save_message_rendered_content
|
|||||||
from zerver.lib.bugdown import version as bugdown_version
|
from zerver.lib.bugdown import version as bugdown_version
|
||||||
from zerver.lib.upload import random_name, sanitize_name, \
|
from zerver.lib.upload import random_name, sanitize_name, \
|
||||||
S3UploadBackend, LocalUploadBackend, guess_type
|
S3UploadBackend, LocalUploadBackend, guess_type
|
||||||
from zerver.lib.utils import generate_api_key
|
from zerver.lib.utils import generate_api_key, process_list_in_batches
|
||||||
from zerver.models import UserProfile, Realm, Client, Huddle, Stream, \
|
from zerver.models import UserProfile, Realm, Client, Huddle, Stream, \
|
||||||
UserMessage, Subscription, Message, RealmEmoji, \
|
UserMessage, Subscription, Message, RealmEmoji, \
|
||||||
RealmDomain, Recipient, get_user_profile_by_id, \
|
RealmDomain, Recipient, get_user_profile_by_id, \
|
||||||
@@ -435,15 +435,13 @@ def bulk_import_user_message_data(data: TableData, dump_file_id: int) -> None:
|
|||||||
]
|
]
|
||||||
bulk_insert_ums(ums)
|
bulk_insert_ums(ums)
|
||||||
|
|
||||||
offset = 0
|
|
||||||
chunk_size = 10000
|
chunk_size = 10000
|
||||||
|
|
||||||
while True:
|
process_list_in_batches(
|
||||||
items = lst[offset:offset+chunk_size]
|
lst=lst,
|
||||||
if not items:
|
chunk_size=chunk_size,
|
||||||
break
|
process_batch=process_batch,
|
||||||
process_batch(items)
|
)
|
||||||
offset += chunk_size
|
|
||||||
|
|
||||||
logging.info("Successfully imported %s from %s[%s]." % (model, table, dump_file_id))
|
logging.info("Successfully imported %s from %s[%s]." % (model, table, dump_file_id))
|
||||||
|
|
||||||
|
|||||||
@@ -177,6 +177,18 @@ def query_chunker(queries: List[Any],
|
|||||||
|
|
||||||
yield [row for row_id, i, row in tup_chunk]
|
yield [row for row_id, i, row in tup_chunk]
|
||||||
|
|
||||||
|
def process_list_in_batches(lst: List[Any],
|
||||||
|
chunk_size: int,
|
||||||
|
process_batch: Callable[[List[Any]], None]) -> None:
|
||||||
|
offset = 0
|
||||||
|
|
||||||
|
while True:
|
||||||
|
items = lst[offset:offset+chunk_size]
|
||||||
|
if not items:
|
||||||
|
break
|
||||||
|
process_batch(items)
|
||||||
|
offset += chunk_size
|
||||||
|
|
||||||
def split_by(array: List[Any], group_size: int, filler: Any) -> List[List[Any]]:
|
def split_by(array: List[Any], group_size: int, filler: Any) -> List[List[Any]]:
|
||||||
"""
|
"""
|
||||||
Group elements into list of size `group_size` and fill empty cells with
|
Group elements into list of size `group_size` and fill empty cells with
|
||||||
|
|||||||
Reference in New Issue
Block a user