ruff: Fix PLC0206 Extracting value from dictionary without calling .items().

Signed-off-by: Anders Kaseorg <anders@zulip.com>
This commit is contained in:
Anders Kaseorg
2024-12-20 12:18:06 -08:00
committed by Tim Abbott
parent 5bad79dd5b
commit 19b8cde27f
18 changed files with 87 additions and 117 deletions

View File

@@ -79,8 +79,8 @@ def get_realm_day_counts() -> dict[str, dict[str, Markup]]:
return Markup('<td class="number {good_bad}">{cnt}</td>').format(good_bad=good_bad, cnt=cnt) return Markup('<td class="number {good_bad}">{cnt}</td>').format(good_bad=good_bad, cnt=cnt)
result = {} result = {}
for string_id in counts: for string_id, realm_counts in counts.items():
raw_cnts = [counts[string_id].get(age, 0) for age in range(8)] raw_cnts = [realm_counts.get(age, 0) for age in range(8)]
min_cnt = min(raw_cnts[1:]) min_cnt = min(raw_cnts[1:])
max_cnt = max(raw_cnts[1:]) max_cnt = max(raw_cnts[1:])

View File

@@ -68,9 +68,9 @@ def main() -> None:
metrics = metrics_file["metrics"] metrics = metrics_file["metrics"]
while True: while True:
for metric_id in metrics: for metric_id, query in metrics.items():
try: try:
update_metric(metric_id, metrics.get(metric_id), page_id, oauth_token) update_metric(metric_id, query, page_id, oauth_token)
except Exception as e: except Exception as e:
logging.exception(e) logging.exception(e)
time.sleep(30) time.sleep(30)

View File

@@ -54,7 +54,7 @@ for line in output.split("\n"):
now = int(time.time()) now = int(time.time())
for queue_name in consumers: for queue_name, count in consumers.items():
target_count = 1 target_count = 1
if queue_name == "notify_tornado": if queue_name == "notify_tornado":
target_count = TORNADO_PROCESSES target_count = TORNADO_PROCESSES
@@ -67,6 +67,6 @@ for queue_name in consumers:
atomic_nagios_write( atomic_nagios_write(
"check-rabbitmq-consumers-" + queue_name, "check-rabbitmq-consumers-" + queue_name,
"critical" if consumers[queue_name] < target_count else "ok", "critical" if count < target_count else "ok",
f"queue {queue_name} has {consumers[queue_name]} consumers, needs {target_count}", f"queue {queue_name} has {count} consumers, needs {target_count}",
) )

View File

@@ -212,17 +212,17 @@ def update_for_legacy_stream_translations(
) -> None: ) -> None:
number_of_updates = 0 number_of_updates = 0
updated_translations: dict[str, str] = {} updated_translations: dict[str, str] = {}
for line in current: for line, translation in current.items():
# If the string has a legacy string mapped and see if it's # If the string has a legacy string mapped and see if it's
# not currently translated (e.g. an empty string), then use # not currently translated (e.g. an empty string), then use
# the legacy translated string (which might be an empty string). # the legacy translated string (which might be an empty string).
if line in LEGACY_STRINGS_MAP and current[line] == "": if line in LEGACY_STRINGS_MAP and translation == "":
legacy_string = LEGACY_STRINGS_MAP[line] legacy_string = LEGACY_STRINGS_MAP[line]
if legacy_string in legacy: if legacy_string in legacy:
updated_translations[line] = legacy[legacy_string] updated_translations[line] = legacy[legacy_string]
number_of_updates += 1 number_of_updates += 1
else: else:
updated_translations[line] = current[line] updated_translations[line] = translation
# Only replace file content if we've made any updates for legacy # Only replace file content if we've made any updates for legacy
# translated strings. # translated strings.

View File

@@ -25,12 +25,14 @@ langs = {
for alias in aliases for alias in aliases
} }
for name in priorities: langs |= {
if langs.get(name) is None: name: {
langs[name] = { "priority": priority,
"priority": priorities[name], "pretty_name": name,
"pretty_name": name, }
} for name, priority in priorities.items()
if name not in langs
}
with open(OUT_PATH, "w") as f: with open(OUT_PATH, "w") as f:
json.dump({"langs": langs}, f) json.dump({"langs": langs}, f)

View File

@@ -87,8 +87,8 @@ def generate_emoji_catalog(
# Sort the emojis according to iamcal's sort order. This sorting determines the # Sort the emojis according to iamcal's sort order. This sorting determines the
# order in which emojis will be displayed in emoji picker. # order in which emojis will be displayed in emoji picker.
for category in emoji_catalog: for emoji_codes in emoji_catalog.values():
emoji_catalog[category].sort(key=lambda emoji_code: sort_order[emoji_code]) emoji_codes.sort(key=lambda emoji_code: sort_order[emoji_code])
return dict(emoji_catalog) return dict(emoji_catalog)

View File

@@ -140,10 +140,8 @@ def generate_emoji_code_to_emoji_names_maps() -> None:
else: else:
reverse_unified_reactions_map[emoji_code] = [name] reverse_unified_reactions_map[emoji_code] = [name]
for emoji_code in reverse_unified_reactions_map: for emoji_code, names in reverse_unified_reactions_map.items():
emoji_code_to_gemoji_names[emoji_code] = ", ".join( emoji_code_to_gemoji_names[emoji_code] = ", ".join(names)
reverse_unified_reactions_map[emoji_code]
)
# Prepare iamcal names map. # Prepare iamcal names map.
for emoji_dict in EMOJI_DATA: for emoji_dict in EMOJI_DATA:

View File

@@ -447,11 +447,11 @@ def run_tests_via_node_js() -> int:
def check_line_coverage( def check_line_coverage(
fn: str, line_coverage: dict[Any, Any], line_mapping: dict[Any, Any], log: bool = True fn: str, line_coverage: dict[Any, Any], line_mapping: dict[Any, Any], log: bool = True
) -> bool: ) -> bool:
missing_lines = [] missing_lines = [
for line in line_coverage: str(line_mapping[line]["start"]["line"])
if line_coverage[line] == 0: for line, coverage in line_coverage.items()
actual_line = line_mapping[line] if coverage == 0
missing_lines.append(str(actual_line["start"]["line"])) ]
if missing_lines: if missing_lines:
if log: if log:
print_error(f"{fn} no longer has complete node test coverage") print_error(f"{fn} no longer has complete node test coverage")

View File

@@ -267,11 +267,11 @@ def get_realm_authentication_methods_for_page_params_api(
from corporate.models import CustomerPlan from corporate.models import CustomerPlan
for backend_name in result_dict: for backend_name, backend_result in result_dict.items():
available_for = AUTH_BACKEND_NAME_MAP[backend_name].available_for_cloud_plans available_for = AUTH_BACKEND_NAME_MAP[backend_name].available_for_cloud_plans
if available_for is not None and realm.plan_type not in available_for: if available_for is not None and realm.plan_type not in available_for:
result_dict[backend_name]["available"] = False backend_result["available"] = False
required_upgrade_plan_number = min( required_upgrade_plan_number = min(
set(available_for).intersection({Realm.PLAN_TYPE_STANDARD, Realm.PLAN_TYPE_PLUS}) set(available_for).intersection({Realm.PLAN_TYPE_STANDARD, Realm.PLAN_TYPE_PLUS})
@@ -286,11 +286,11 @@ def get_realm_authentication_methods_for_page_params_api(
CustomerPlan.TIER_CLOUD_PLUS CustomerPlan.TIER_CLOUD_PLUS
) )
result_dict[backend_name]["unavailable_reason"] = _( backend_result["unavailable_reason"] = _(
"You need to upgrade to the {required_upgrade_plan_name} plan to use this authentication method." "You need to upgrade to the {required_upgrade_plan_name} plan to use this authentication method."
).format(required_upgrade_plan_name=required_upgrade_plan_name) ).format(required_upgrade_plan_name=required_upgrade_plan_name)
else: else:
result_dict[backend_name]["available"] = True backend_result["available"] = True
return result_dict return result_dict

View File

@@ -160,8 +160,7 @@ def convert_channel_data(
channel_members_map[channel_name] = [] channel_members_map[channel_name] = []
channel_admins_map[channel_name] = [] channel_admins_map[channel_name] = []
for username in user_data_map: for username, user_dict in user_data_map.items():
user_dict = user_data_map[username]
teams = user_dict["teams"] teams = user_dict["teams"]
if user_dict["teams"] is None: if user_dict["teams"] is None:
continue continue
@@ -844,8 +843,7 @@ def label_mirror_dummy_users(
def reset_mirror_dummy_users(username_to_user: dict[str, dict[str, Any]]) -> None: def reset_mirror_dummy_users(username_to_user: dict[str, dict[str, Any]]) -> None:
for username in username_to_user: for user in username_to_user.values():
user = username_to_user[username]
user["is_mirror_dummy"] = False user["is_mirror_dummy"] = False

View File

@@ -64,8 +64,7 @@ def process_users(
realm_owners: list[int] = [] realm_owners: list[int] = []
bots: list[int] = [] bots: list[int] = []
for rc_user_id in user_id_to_user_map: for rc_user_id, user_dict in user_id_to_user_map.items():
user_dict = user_id_to_user_map[rc_user_id]
is_mirror_dummy = False is_mirror_dummy = False
is_bot = False is_bot = False
is_active = True is_active = True
@@ -166,9 +165,7 @@ def convert_channel_data(
) -> list[ZerverFieldsT]: ) -> list[ZerverFieldsT]:
streams = [] streams = []
for rc_room_id in room_id_to_room_map: for rc_room_id, channel_dict in room_id_to_room_map.items():
channel_dict = room_id_to_room_map[rc_room_id]
date_created = float(channel_dict["ts"].timestamp()) date_created = float(channel_dict["ts"].timestamp())
stream_id = stream_id_mapper.get(rc_room_id) stream_id = stream_id_mapper.get(rc_room_id)
invite_only = channel_dict["t"] == "p" invite_only = channel_dict["t"] == "p"
@@ -214,9 +211,7 @@ def convert_stream_subscription_data(
) -> None: ) -> None:
stream_members_map: dict[int, set[int]] = {} stream_members_map: dict[int, set[int]] = {}
for rc_user_id in user_id_to_user_map: for rc_user_id, user_dict in user_id_to_user_map.items():
user_dict = user_id_to_user_map[rc_user_id]
if not user_dict.get("__rooms"): if not user_dict.get("__rooms"):
continue continue
@@ -249,11 +244,10 @@ def convert_direct_message_group_data(
) -> list[ZerverFieldsT]: ) -> list[ZerverFieldsT]:
zerver_direct_message_group: list[ZerverFieldsT] = [] zerver_direct_message_group: list[ZerverFieldsT] = []
for rc_direct_message_group_id in direct_message_group_id_to_direct_message_group_map: for (
direct_message_group_dict = direct_message_group_id_to_direct_message_group_map[ rc_direct_message_group_id,
rc_direct_message_group_id direct_message_group_dict,
] ) in direct_message_group_id_to_direct_message_group_map.items():
direct_message_group_id = direct_message_group_id_mapper.get(rc_direct_message_group_id) direct_message_group_id = direct_message_group_id_mapper.get(rc_direct_message_group_id)
direct_message_group = build_direct_message_group( direct_message_group = build_direct_message_group(
direct_message_group_id, len(direct_message_group_dict["uids"]) direct_message_group_id, len(direct_message_group_dict["uids"])

View File

@@ -238,10 +238,10 @@ def cache_set_many(
items: dict[str, Any], cache_name: str | None = None, timeout: int | None = None items: dict[str, Any], cache_name: str | None = None, timeout: int | None = None
) -> None: ) -> None:
new_items = {} new_items = {}
for key in items: for key, item in items.items():
new_key = KEY_PREFIX + key new_key = KEY_PREFIX + key
validate_cache_key(new_key) validate_cache_key(new_key)
new_items[new_key] = items[key] new_items[new_key] = item
items = new_items items = new_items
remote_cache_stats_start() remote_cache_stats_start()
get_cache_backend(cache_name).set_many(items, timeout=timeout) get_cache_backend(cache_name).set_many(items, timeout=timeout)

View File

@@ -184,80 +184,64 @@ class APIReturnValuesTablePreprocessor(Preprocessor):
def render_table(self, return_values: dict[str, Any], spacing: int) -> list[str]: def render_table(self, return_values: dict[str, Any], spacing: int) -> list[str]:
IGNORE = ["result", "msg", "ignored_parameters_unsupported"] IGNORE = ["result", "msg", "ignored_parameters_unsupported"]
ans = [] ans = []
for return_value in return_values: for return_value, schema in return_values.items():
if return_value in IGNORE: if return_value in IGNORE:
continue continue
if "oneOf" in return_values[return_value]: if "oneOf" in schema:
# For elements using oneOf there are two descriptions. The first description # For elements using oneOf there are two descriptions. The first description
# should be at level with the oneOf and should contain the basic non-specific # should be at level with the oneOf and should contain the basic non-specific
# description of the endpoint. Then for each element of oneOf there is a # description of the endpoint. Then for each element of oneOf there is a
# specialized description for that particular case. The description used # specialized description for that particular case. The description used
# right below is the main description. # right below is the main description.
data_type = generate_data_type(return_values[return_value]) data_type = generate_data_type(schema)
ans.append( ans.append(
self.render_desc( self.render_desc(schema["description"], spacing, data_type, return_value)
return_values[return_value]["description"], spacing, data_type, return_value
)
) )
ans += self.render_oneof_block(return_values[return_value], spacing + 4) ans += self.render_oneof_block(schema, spacing + 4)
continue continue
description = return_values[return_value]["description"] description = schema["description"]
data_type = generate_data_type(return_values[return_value]) data_type = generate_data_type(schema)
check_deprecated_consistency( check_deprecated_consistency(schema.get("deprecated", False), description)
return_values[return_value].get("deprecated", False), description
)
ans.append(self.render_desc(description, spacing, data_type, return_value)) ans.append(self.render_desc(description, spacing, data_type, return_value))
if "properties" in return_values[return_value]: if "properties" in schema:
ans += self.render_table(return_values[return_value]["properties"], spacing + 4) ans += self.render_table(schema["properties"], spacing + 4)
if return_values[return_value].get("additionalProperties", False): if schema.get("additionalProperties", False):
data_type = generate_data_type(return_values[return_value]["additionalProperties"]) data_type = generate_data_type(schema["additionalProperties"])
ans.append( ans.append(
self.render_desc( self.render_desc(
return_values[return_value]["additionalProperties"]["description"], schema["additionalProperties"]["description"],
spacing + 4, spacing + 4,
data_type, data_type,
) )
) )
if "properties" in return_values[return_value]["additionalProperties"]: if "properties" in schema["additionalProperties"]:
ans += self.render_table( ans += self.render_table(
return_values[return_value]["additionalProperties"]["properties"], schema["additionalProperties"]["properties"],
spacing + 8, spacing + 8,
) )
elif "oneOf" in return_values[return_value]["additionalProperties"]: elif "oneOf" in schema["additionalProperties"]:
ans += self.render_oneof_block( ans += self.render_oneof_block(schema["additionalProperties"], spacing + 8)
return_values[return_value]["additionalProperties"], spacing + 8 elif schema["additionalProperties"].get("additionalProperties", False):
)
elif return_values[return_value]["additionalProperties"].get(
"additionalProperties", False
):
data_type = generate_data_type( data_type = generate_data_type(
return_values[return_value]["additionalProperties"]["additionalProperties"] schema["additionalProperties"]["additionalProperties"]
) )
ans.append( ans.append(
self.render_desc( self.render_desc(
return_values[return_value]["additionalProperties"][ schema["additionalProperties"]["additionalProperties"]["description"],
"additionalProperties"
]["description"],
spacing + 8, spacing + 8,
data_type, data_type,
) )
) )
ans += self.render_table( ans += self.render_table(
return_values[return_value]["additionalProperties"]["additionalProperties"][ schema["additionalProperties"]["additionalProperties"]["properties"],
"properties"
],
spacing + 12, spacing + 12,
) )
if "items" in return_values[return_value]: if "items" in schema:
if "properties" in return_values[return_value]["items"]: if "properties" in schema["items"]:
ans += self.render_table( ans += self.render_table(schema["items"]["properties"], spacing + 4)
return_values[return_value]["items"]["properties"], spacing + 4 elif "oneOf" in schema["items"]:
) ans += self.render_oneof_block(schema["items"], spacing + 4)
elif "oneOf" in return_values[return_value]["items"]:
ans += self.render_oneof_block(
return_values[return_value]["items"], spacing + 4
)
return ans return ans
def generate_event_strings(self, event_data: EventData) -> list[str]: def generate_event_strings(self, event_data: EventData) -> list[str]:

View File

@@ -55,16 +55,15 @@ def create_role_based_system_groups(
# failure, and had already processed this realm. # failure, and had already processed this realm.
continue continue
role_system_groups_dict = {} role_system_groups_dict = {
for role in SYSTEM_USER_GROUP_ROLE_MAP: role: UserGroup(
user_group_params = SYSTEM_USER_GROUP_ROLE_MAP[role]
user_group = UserGroup(
name=user_group_params["name"], name=user_group_params["name"],
description=user_group_params["description"], description=user_group_params["description"],
realm=realm, realm=realm,
is_system_group=True, is_system_group=True,
) )
role_system_groups_dict[role] = user_group for role, user_group_params in SYSTEM_USER_GROUP_ROLE_MAP.items()
}
full_members_system_group = UserGroup( full_members_system_group = UserGroup(
name="@role:fullmembers", name="@role:fullmembers",

View File

@@ -64,16 +64,15 @@ def create_role_based_system_groups_for_internal_realms(
# failure, and had already created groups. # failure, and had already created groups.
return return
role_system_groups_dict = {} role_system_groups_dict = {
for role in SYSTEM_USER_GROUP_ROLE_MAP: role: UserGroup(
user_group_params = SYSTEM_USER_GROUP_ROLE_MAP[role]
user_group = UserGroup(
name=user_group_params["name"], name=user_group_params["name"],
description=user_group_params["description"], description=user_group_params["description"],
realm=realm, realm=realm,
is_system_group=True, is_system_group=True,
) )
role_system_groups_dict[role] = user_group for role, user_group_params in SYSTEM_USER_GROUP_ROLE_MAP.items()
}
full_members_system_group = UserGroup( full_members_system_group = UserGroup(
name="@role:fullmembers", name="@role:fullmembers",

View File

@@ -481,11 +481,11 @@ class BaseAction(ZulipTestCase):
print(json.dumps(event, indent=4)) print(json.dumps(event, indent=4))
print("\nMISMATCHES:\n") print("\nMISMATCHES:\n")
for k in state1: for k, v1 in state1.items():
if state1[k] != state2[k]: if v1 != state2[k]:
print("\nkey = " + k) print("\nkey = " + k)
try: try:
self.assertEqual({k: state1[k]}, {k: state2[k]}) self.assertEqual({k: v1}, {k: state2[k]})
except AssertionError as e: except AssertionError as e:
print(e) print(e)
print( print(

View File

@@ -752,7 +752,6 @@ class PermissionTest(ZulipTestCase):
def test_admin_user_can_change_profile_data(self) -> None: def test_admin_user_can_change_profile_data(self) -> None:
realm = get_realm("zulip") realm = get_realm("zulip")
self.login("iago") self.login("iago")
new_profile_data = []
cordelia = self.example_user("cordelia") cordelia = self.example_user("cordelia")
# Setting editable_by_user to false shouldn't affect admin's ability # Setting editable_by_user to false shouldn't affect admin's ability
@@ -780,14 +779,13 @@ class PermissionTest(ZulipTestCase):
"Pronouns": "she/her", "Pronouns": "she/her",
} }
for field_name in fields: new_profile_data = [
field = CustomProfileField.objects.get(name=field_name, realm=realm) {
new_profile_data.append( "id": CustomProfileField.objects.get(name=field_name, realm=realm).id,
{ "value": value,
"id": field.id, }
"value": fields[field_name], for field_name, value in fields.items()
} ]
)
result = self.client_patch( result = self.client_patch(
f"/json/users/{cordelia.id}", {"profile_data": orjson.dumps(new_profile_data).decode()} f"/json/users/{cordelia.id}", {"profile_data": orjson.dumps(new_profile_data).decode()}

View File

@@ -237,9 +237,7 @@ class MissedMessageWorker(QueueProcessingWorker):
trigger=event.trigger, mentioned_user_group_id=event.mentioned_user_group_id trigger=event.trigger, mentioned_user_group_id=event.mentioned_user_group_id
) )
for user_profile_id in events_by_recipient: for user_profile_id, events in events_by_recipient.items():
events = events_by_recipient[user_profile_id]
logging.info( logging.info(
"Batch-processing %s missedmessage_emails events for user %s", "Batch-processing %s missedmessage_emails events for user %s",
len(events), len(events),