ruff: Fix UP006 Use list instead of List for type annotation.

Signed-off-by: Anders Kaseorg <anders@zulip.com>
This commit is contained in:
Anders Kaseorg
2024-07-11 17:30:17 -07:00
committed by Tim Abbott
parent c2214b3904
commit e08a24e47f
457 changed files with 3588 additions and 3857 deletions

View File

@@ -2,7 +2,7 @@ import logging
import time
from collections import OrderedDict, defaultdict
from datetime import datetime, timedelta
from typing import Callable, Dict, List, Optional, Sequence, Tuple, Type, Union
from typing import Callable, Optional, Sequence, Union
from django.conf import settings
from django.db import connection, models
@@ -82,7 +82,7 @@ class CountStat:
class LoggingCountStat(CountStat):
def __init__(self, property: str, output_table: Type[BaseCount], frequency: str) -> None:
def __init__(self, property: str, output_table: type[BaseCount], frequency: str) -> None:
CountStat.__init__(self, property, DataCollector(output_table, None), frequency)
@@ -102,7 +102,7 @@ class DependentCountStat(CountStat):
class DataCollector:
def __init__(
self,
output_table: Type[BaseCount],
output_table: type[BaseCount],
pull_function: Optional[Callable[[str, datetime, datetime, Optional[Realm]], int]],
) -> None:
self.output_table = output_table
@@ -311,8 +311,8 @@ def do_increment_logging_stat(
return
table = stat.data_collector.output_table
id_args: Dict[str, Union[int, None]] = {}
conflict_args: List[str] = []
id_args: dict[str, Union[int, None]] = {}
conflict_args: list[str] = []
if table == RealmCount:
assert isinstance(model_object_for_bucket, Realm)
id_args = {"realm_id": model_object_for_bucket.id}
@@ -425,7 +425,7 @@ def do_drop_single_stat(property: str) -> None:
## DataCollector-level operations ##
QueryFn: TypeAlias = Callable[[Dict[str, Composable]], Composable]
QueryFn: TypeAlias = Callable[[dict[str, Composable]], Composable]
def do_pull_by_sql_query(
@@ -433,7 +433,7 @@ def do_pull_by_sql_query(
start_time: datetime,
end_time: datetime,
query: QueryFn,
group_by: Optional[Tuple[Type[models.Model], str]],
group_by: Optional[tuple[type[models.Model], str]],
) -> int:
if group_by is None:
subgroup: Composable = SQL("NULL")
@@ -467,9 +467,9 @@ def do_pull_by_sql_query(
def sql_data_collector(
output_table: Type[BaseCount],
output_table: type[BaseCount],
query: QueryFn,
group_by: Optional[Tuple[Type[models.Model], str]],
group_by: Optional[tuple[type[models.Model], str]],
) -> DataCollector:
def pull_function(
property: str, start_time: datetime, end_time: datetime, realm: Optional[Realm] = None
@@ -533,7 +533,7 @@ def do_pull_minutes_active(
.values_list("user_profile_id", "user_profile__realm_id", "start", "end")
)
seconds_active: Dict[Tuple[int, int], float] = defaultdict(float)
seconds_active: dict[tuple[int, int], float] = defaultdict(float)
for user_id, realm_id, interval_start, interval_end in user_activity_intervals:
if realm is None or realm.id == realm_id:
start = max(start_time, interval_start)
@@ -817,7 +817,7 @@ count_stream_by_realm_query = lambda kwargs: SQL(
).format(**kwargs)
def get_count_stats(realm: Optional[Realm] = None) -> Dict[str, CountStat]:
def get_count_stats(realm: Optional[Realm] = None) -> dict[str, CountStat]:
## CountStat declarations ##
count_stats_ = [

View File

@@ -1,6 +1,5 @@
from math import sqrt
from random import Random
from typing import List
from analytics.lib.counts import CountStat
@@ -16,7 +15,7 @@ def generate_time_series_data(
frequency: str = CountStat.DAY,
partial_sum: bool = False,
random_seed: int = 26,
) -> List[int]:
) -> list[int]:
"""
Generate semi-realistic looking time series data for testing analytics graphs.

View File

@@ -1,5 +1,5 @@
from datetime import datetime, timedelta
from typing import List, Optional
from typing import Optional
from analytics.lib.counts import CountStat
from zerver.lib.timestamp import floor_to_day, floor_to_hour, verify_UTC
@@ -11,7 +11,7 @@ from zerver.lib.timestamp import floor_to_day, floor_to_hour, verify_UTC
# and time_range(Sep 20, Sep 22, day, 5) returns [Sep 18, Sep 19, Sep 20, Sep 21, Sep 22]
def time_range(
start: datetime, end: datetime, frequency: str, min_length: Optional[int]
) -> List[datetime]:
) -> list[datetime]:
verify_UTC(start)
verify_UTC(end)
if frequency == CountStat.HOUR:

View File

@@ -1,5 +1,5 @@
from datetime import timedelta
from typing import Any, Dict, List, Mapping, Type, Union
from typing import Any, Mapping, Union
from django.core.files.uploadedfile import UploadedFile
from django.utils.timezone import now as timezone_now
@@ -53,7 +53,7 @@ class Command(ZulipBaseCommand):
spikiness: float,
holiday_rate: float = 0,
partial_sum: bool = False,
) -> List[int]:
) -> list[int]:
self.random_seed += 1
return generate_time_series_data(
days=self.DAYS_OF_DATA,
@@ -147,18 +147,18 @@ class Command(ZulipBaseCommand):
with open(IMAGE_FILE_PATH, "rb") as fp:
upload_message_attachment_from_request(UploadedFile(fp), shylock)
FixtureData: TypeAlias = Mapping[Union[str, int, None], List[int]]
FixtureData: TypeAlias = Mapping[Union[str, int, None], list[int]]
def insert_fixture_data(
stat: CountStat,
fixture_data: FixtureData,
table: Type[BaseCount],
table: type[BaseCount],
) -> None:
end_times = time_range(
last_end_time, last_end_time, stat.frequency, len(next(iter(fixture_data.values())))
)
if table == InstallationCount:
id_args: Dict[str, Any] = {}
id_args: dict[str, Any] = {}
if table == RealmCount:
id_args = {"realm": realm}
if table == UserCount:
@@ -330,7 +330,7 @@ class Command(ZulipBaseCommand):
"true": self.generate_fixture_data(stat, 20, 2, 3, 0.2, 3),
}
insert_fixture_data(stat, realm_data, RealmCount)
stream_data: Mapping[Union[int, str, None], List[int]] = {
stream_data: Mapping[Union[int, str, None], list[int]] = {
"false": self.generate_fixture_data(stat, 10, 7, 5, 0.6, 4),
"true": self.generate_fixture_data(stat, 5, 3, 2, 0.4, 2),
}

View File

@@ -2,7 +2,7 @@ import hashlib
import time
from argparse import ArgumentParser
from datetime import timezone
from typing import Any, Dict
from typing import Any
from django.conf import settings
from django.utils.dateparse import parse_datetime
@@ -43,7 +43,7 @@ class Command(ZulipBaseCommand):
def handle(self, *args: Any, **options: Any) -> None:
self.run_update_analytics_counts(options)
def run_update_analytics_counts(self, options: Dict[str, Any]) -> None:
def run_update_analytics_counts(self, options: dict[str, Any]) -> None:
# installation_epoch relies on there being at least one realm; we
# shouldn't run the analytics code if that condition isn't satisfied
if not Realm.objects.exists():

View File

@@ -1,6 +1,6 @@
from contextlib import AbstractContextManager, ExitStack, contextmanager
from datetime import datetime, timedelta, timezone
from typing import Any, Dict, Iterator, List, Optional, Tuple, Type
from typing import Any, Iterator, Optional
from unittest import mock
import time_machine
@@ -132,7 +132,7 @@ class AnalyticsTestCase(ZulipTestCase):
kwargs[key] = kwargs.get(key, value)
kwargs["delivery_email"] = kwargs["email"]
with time_machine.travel(kwargs["date_joined"], tick=False):
pass_kwargs: Dict[str, Any] = {}
pass_kwargs: dict[str, Any] = {}
if kwargs["is_bot"]:
pass_kwargs["bot_type"] = UserProfile.DEFAULT_BOT
pass_kwargs["bot_owner"] = None
@@ -158,7 +158,7 @@ class AnalyticsTestCase(ZulipTestCase):
)
return user
def create_stream_with_recipient(self, **kwargs: Any) -> Tuple[Stream, Recipient]:
def create_stream_with_recipient(self, **kwargs: Any) -> tuple[Stream, Recipient]:
self.name_counter += 1
defaults = {
"name": f"stream name {self.name_counter}",
@@ -174,7 +174,7 @@ class AnalyticsTestCase(ZulipTestCase):
stream.save(update_fields=["recipient"])
return stream, recipient
def create_huddle_with_recipient(self, **kwargs: Any) -> Tuple[DirectMessageGroup, Recipient]:
def create_huddle_with_recipient(self, **kwargs: Any) -> tuple[DirectMessageGroup, Recipient]:
self.name_counter += 1
defaults = {"huddle_hash": f"hash{self.name_counter}"}
for key, value in defaults.items():
@@ -224,7 +224,7 @@ class AnalyticsTestCase(ZulipTestCase):
# kwargs should only ever be a UserProfile or Stream.
def assert_table_count(
self,
table: Type[BaseCount],
table: type[BaseCount],
value: int,
property: Optional[str] = None,
subgroup: Optional[str] = None,
@@ -246,7 +246,7 @@ class AnalyticsTestCase(ZulipTestCase):
self.assertEqual(queryset.values_list("value", flat=True)[0], value)
def assertTableState(
self, table: Type[BaseCount], arg_keys: List[str], arg_values: List[List[object]]
self, table: type[BaseCount], arg_keys: list[str], arg_values: list[list[object]]
) -> None:
"""Assert that the state of a *Count table is what it should be.
@@ -276,7 +276,7 @@ class AnalyticsTestCase(ZulipTestCase):
"value": 1,
}
for values in arg_values:
kwargs: Dict[str, Any] = {}
kwargs: dict[str, Any] = {}
for i in range(len(values)):
kwargs[arg_keys[i]] = values[i]
for key, value in defaults.items():
@@ -1619,7 +1619,7 @@ class TestLoggingCountStats(AnalyticsTestCase):
def invite_context(
too_many_recent_realm_invites: bool = False, failure: bool = False
) -> Iterator[None]:
managers: List[AbstractContextManager[Any]] = [
managers: list[AbstractContextManager[Any]] = [
mock.patch(
"zerver.actions.invites.too_many_recent_realm_invites", return_value=False
),

View File

@@ -1,5 +1,5 @@
from datetime import datetime, timedelta, timezone
from typing import List, Optional
from typing import Optional
from django.utils.timezone import now as timezone_now
from typing_extensions import override
@@ -84,11 +84,11 @@ class TestGetChartData(ZulipTestCase):
ceiling_to_day(self.realm.date_created) + timedelta(days=i) for i in range(4)
]
def data(self, i: int) -> List[int]:
def data(self, i: int) -> list[int]:
return [0, 0, i, 0]
def insert_data(
self, stat: CountStat, realm_subgroups: List[Optional[str]], user_subgroups: List[str]
self, stat: CountStat, realm_subgroups: list[Optional[str]], user_subgroups: list[str]
) -> None:
if stat.frequency == CountStat.HOUR:
insert_time = self.end_times_hour[2]
@@ -605,7 +605,7 @@ class TestGetChartData(ZulipTestCase):
class TestGetChartDataHelpers(ZulipTestCase):
def test_sort_by_totals(self) -> None:
empty: List[int] = []
empty: list[int] = []
value_arrays = {"c": [0, 1], "a": [9], "b": [1, 1, 1], "d": empty}
self.assertEqual(sort_by_totals(value_arrays), ["a", "b", "c", "d"])

View File

@@ -1,4 +1,4 @@
from typing import List, Union
from typing import Union
from django.conf import settings
from django.conf.urls import include
@@ -16,7 +16,7 @@ from analytics.views.stats import (
)
from zerver.lib.rest import rest_path
i18n_urlpatterns: List[Union[URLPattern, URLResolver]] = [
i18n_urlpatterns: list[Union[URLPattern, URLResolver]] = [
# Server admin (user_profile.is_staff) visible stats pages
path("stats/realm/<realm_str>/", stats_for_realm),
path("stats/installation", stats_for_installation),

View File

@@ -1,7 +1,7 @@
import logging
from collections import defaultdict
from datetime import datetime, timedelta, timezone
from typing import Any, Dict, List, Optional, Tuple, Type, TypeVar, Union, cast
from typing import Any, Optional, TypeVar, Union, cast
from django.conf import settings
from django.db.models import QuerySet
@@ -260,10 +260,10 @@ def get_chart_data(
stream: Optional[Stream] = None,
) -> HttpResponse:
TableType: TypeAlias = Union[
Type["RemoteInstallationCount"],
Type[InstallationCount],
Type["RemoteRealmCount"],
Type[RealmCount],
type["RemoteInstallationCount"],
type[InstallationCount],
type["RemoteRealmCount"],
type[RealmCount],
]
if for_installation:
if remote:
@@ -282,7 +282,7 @@ def get_chart_data(
aggregate_table = RealmCount
tables: Union[
Tuple[TableType], Tuple[TableType, Type[UserCount]], Tuple[TableType, Type[StreamCount]]
tuple[TableType], tuple[TableType, type[UserCount]], tuple[TableType, type[StreamCount]]
]
if chart_name == "number_of_humans":
@@ -292,7 +292,7 @@ def get_chart_data(
COUNT_STATS["active_users_audit:is_bot:day"],
]
tables = (aggregate_table,)
subgroup_to_label: Dict[CountStat, Dict[Optional[str], str]] = {
subgroup_to_label: dict[CountStat, dict[Optional[str], str]] = {
stats[0]: {None: "_1day"},
stats[1]: {None: "_15day"},
stats[2]: {"false": "all_time"},
@@ -372,7 +372,7 @@ def get_chart_data(
assert server is not None
assert aggregate_table is RemoteInstallationCount or aggregate_table is RemoteRealmCount
aggregate_table_remote = cast(
Union[Type[RemoteInstallationCount], Type[RemoteRealmCount]], aggregate_table
Union[type[RemoteInstallationCount], type[RemoteRealmCount]], aggregate_table
) # https://stackoverflow.com/questions/68540528/mypy-assertions-on-the-types-of-types
if not aggregate_table_remote.objects.filter(server=server).exists():
raise JsonableError(
@@ -418,7 +418,7 @@ def get_chart_data(
assert len({stat.frequency for stat in stats}) == 1
end_times = time_range(start, end, stats[0].frequency, min_length)
data: Dict[str, Any] = {
data: dict[str, Any] = {
"end_times": [int(end_time.timestamp()) for end_time in end_times],
"frequency": stats[0].frequency,
}
@@ -471,7 +471,7 @@ def get_chart_data(
return json_success(request, data=data)
def sort_by_totals(value_arrays: Dict[str, List[int]]) -> List[str]:
def sort_by_totals(value_arrays: dict[str, list[int]]) -> list[str]:
totals = sorted(((sum(values), label) for label, values in value_arrays.items()), reverse=True)
return [label for total, label in totals]
@@ -482,10 +482,10 @@ def sort_by_totals(value_arrays: Dict[str, List[int]]) -> List[str]:
# understanding the realm's traffic and the user's traffic. This function
# tries to rank the clients so that taking the first N elements of the
# sorted list has a reasonable chance of doing so.
def sort_client_labels(data: Dict[str, Dict[str, List[int]]]) -> List[str]:
def sort_client_labels(data: dict[str, dict[str, list[int]]]) -> list[str]:
realm_order = sort_by_totals(data["everyone"])
user_order = sort_by_totals(data["user"])
label_sort_values: Dict[str, float] = {label: i for i, label in enumerate(realm_order)}
label_sort_values: dict[str, float] = {label: i for i, label in enumerate(realm_order)}
for i, label in enumerate(user_order):
label_sort_values[label] = min(i - 0.1, label_sort_values.get(label, i))
return [label for label, sort_value in sorted(label_sort_values.items(), key=lambda x: x[1])]
@@ -494,7 +494,7 @@ def sort_client_labels(data: Dict[str, Dict[str, List[int]]]) -> List[str]:
CountT = TypeVar("CountT", bound=BaseCount)
def table_filtered_to_id(table: Type[CountT], key_id: int) -> QuerySet[CountT]:
def table_filtered_to_id(table: type[CountT], key_id: int) -> QuerySet[CountT]:
if table == RealmCount:
return table._default_manager.filter(realm_id=key_id)
elif table == UserCount:
@@ -535,8 +535,8 @@ def client_label_map(name: str) -> str:
return name
def rewrite_client_arrays(value_arrays: Dict[str, List[int]]) -> Dict[str, List[int]]:
mapped_arrays: Dict[str, List[int]] = {}
def rewrite_client_arrays(value_arrays: dict[str, list[int]]) -> dict[str, list[int]]:
mapped_arrays: dict[str, list[int]] = {}
for label, array in value_arrays.items():
mapped_label = client_label_map(label)
if mapped_label in mapped_arrays:
@@ -549,18 +549,18 @@ def rewrite_client_arrays(value_arrays: Dict[str, List[int]]) -> Dict[str, List[
def get_time_series_by_subgroup(
stat: CountStat,
table: Type[BaseCount],
table: type[BaseCount],
key_id: int,
end_times: List[datetime],
subgroup_to_label: Dict[Optional[str], str],
end_times: list[datetime],
subgroup_to_label: dict[Optional[str], str],
include_empty_subgroups: bool,
) -> Dict[str, List[int]]:
) -> dict[str, list[int]]:
queryset = (
table_filtered_to_id(table, key_id)
.filter(property=stat.property)
.values_list("subgroup", "end_time", "value")
)
value_dicts: Dict[Optional[str], Dict[datetime, int]] = defaultdict(lambda: defaultdict(int))
value_dicts: dict[Optional[str], dict[datetime, int]] = defaultdict(lambda: defaultdict(int))
for subgroup, end_time, value in queryset:
value_dicts[subgroup][end_time] = value
value_arrays = {}