Files
zulip/zerver/migrations/0239_usermessage_copy_id_to_bigint_id.py
Anders Kaseorg 69730a78cc python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:

import re
import sys

last_filename = None
last_row = None
lines = []

for msg in sys.stdin:
    m = re.match(
        r"\x1b\[35mflake8    \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
    )
    if m:
        filename, row_str, col_str, err = m.groups()
        row, col = int(row_str), int(col_str)

        if filename == last_filename:
            assert last_row != row
        else:
            if last_filename is not None:
                with open(last_filename, "w") as f:
                    f.writelines(lines)

            with open(filename) as f:
                lines = f.readlines()
            last_filename = filename
        last_row = row

        line = lines[row - 1]
        if err in ["C812", "C815"]:
            lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
        elif err in ["C819"]:
            assert line[col - 2] == ","
            lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")

if last_filename is not None:
    with open(last_filename, "w") as f:
        f.writelines(lines)

Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-06-11 16:04:12 -07:00

78 lines
2.7 KiB
Python

# Generated by Django 1.11.23 on 2019-08-21 21:43
import time
from django.db import connection, migrations
from django.db.backends.postgresql.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
from django.db.models import Min
from psycopg2.sql import SQL
BATCH_SIZE = 10000
def sql_copy_id_to_bigint_id(id_range_lower_bound: int, id_range_upper_bound: int) -> None:
query = SQL("""
UPDATE zerver_usermessage
SET bigint_id = id
WHERE id BETWEEN %(lower_bound)s AND %(upper_bound)s
""")
with connection.cursor() as cursor:
cursor.execute(query, {
"lower_bound": id_range_lower_bound,
"upper_bound": id_range_upper_bound,
})
def copy_id_to_bigid(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
UserMessage = apps.get_model('zerver', 'UserMessage')
if not UserMessage.objects.exists():
# Nothing to do
return
# TODO: is the below lookup fast enough, considering there's no index on bigint_id?
first_uncopied_id = UserMessage.objects.filter(bigint_id__isnull=True,
).aggregate(Min('id'))['id__min']
# Note: the below id can fall in a segment
# where bigint_id = id already, but it's not a big problem
# this will just do some redundant UPDATEs.
last_id = UserMessage.objects.latest("id").id
id_range_lower_bound = first_uncopied_id
id_range_upper_bound = first_uncopied_id + BATCH_SIZE
while id_range_upper_bound <= last_id:
sql_copy_id_to_bigint_id(id_range_lower_bound, id_range_upper_bound)
id_range_lower_bound = id_range_upper_bound + 1
id_range_upper_bound = id_range_lower_bound + BATCH_SIZE
time.sleep(0.1)
if last_id > id_range_lower_bound:
# Copy for the last batch.
sql_copy_id_to_bigint_id(id_range_lower_bound, last_id)
class Migration(migrations.Migration):
atomic = False
dependencies = [
('zerver', '0238_usermessage_bigint_id'),
]
operations = [
migrations.RunSQL("""
CREATE FUNCTION zerver_usermessage_bigint_id_to_id_trigger_function()
RETURNS trigger AS $$
BEGIN
NEW.bigint_id = NEW.id;
RETURN NEW;
END
$$ LANGUAGE 'plpgsql';
CREATE TRIGGER zerver_usermessage_bigint_id_to_id_trigger
BEFORE INSERT ON zerver_usermessage
FOR EACH ROW
EXECUTE PROCEDURE zerver_usermessage_bigint_id_to_id_trigger_function();
"""),
migrations.RunPython(copy_id_to_bigid, elidable=True),
migrations.RunSQL("""
CREATE UNIQUE INDEX CONCURRENTLY zerver_usermessage_bigint_id_idx ON zerver_usermessage (bigint_id);
"""),
]