mirror of
https://github.com/zulip/zulip.git
synced 2025-10-24 00:23:49 +00:00
python: Normalize quotes with Black.
Signed-off-by: Anders Kaseorg <anders@zulip.com>
This commit is contained in:
committed by
Tim Abbott
parent
11741543da
commit
6e4c3e41dc
@@ -34,16 +34,16 @@ from zerver.lib.types import ViewFuncT
|
||||
from zerver.lib.utils import statsd
|
||||
from zerver.models import Realm, flush_per_request_caches, get_realm
|
||||
|
||||
logger = logging.getLogger('zulip.requests')
|
||||
slow_query_logger = logging.getLogger('zulip.slow_queries')
|
||||
logger = logging.getLogger("zulip.requests")
|
||||
slow_query_logger = logging.getLogger("zulip.slow_queries")
|
||||
|
||||
|
||||
def record_request_stop_data(log_data: MutableMapping[str, Any]) -> None:
|
||||
log_data['time_stopped'] = time.time()
|
||||
log_data['remote_cache_time_stopped'] = get_remote_cache_time()
|
||||
log_data['remote_cache_requests_stopped'] = get_remote_cache_requests()
|
||||
log_data['markdown_time_stopped'] = get_markdown_time()
|
||||
log_data['markdown_requests_stopped'] = get_markdown_requests()
|
||||
log_data["time_stopped"] = time.time()
|
||||
log_data["remote_cache_time_stopped"] = get_remote_cache_time()
|
||||
log_data["remote_cache_requests_stopped"] = get_remote_cache_requests()
|
||||
log_data["markdown_time_stopped"] = get_markdown_time()
|
||||
log_data["markdown_requests_stopped"] = get_markdown_requests()
|
||||
if settings.PROFILE_ALL_REQUESTS:
|
||||
log_data["prof"].disable()
|
||||
|
||||
@@ -55,11 +55,11 @@ def async_request_timer_stop(request: HttpRequest) -> None:
|
||||
def record_request_restart_data(log_data: MutableMapping[str, Any]) -> None:
|
||||
if settings.PROFILE_ALL_REQUESTS:
|
||||
log_data["prof"].enable()
|
||||
log_data['time_restarted'] = time.time()
|
||||
log_data['remote_cache_time_restarted'] = get_remote_cache_time()
|
||||
log_data['remote_cache_requests_restarted'] = get_remote_cache_requests()
|
||||
log_data['markdown_time_restarted'] = get_markdown_time()
|
||||
log_data['markdown_requests_restarted'] = get_markdown_requests()
|
||||
log_data["time_restarted"] = time.time()
|
||||
log_data["remote_cache_time_restarted"] = get_remote_cache_time()
|
||||
log_data["remote_cache_requests_restarted"] = get_remote_cache_requests()
|
||||
log_data["markdown_time_restarted"] = get_markdown_time()
|
||||
log_data["markdown_requests_restarted"] = get_markdown_requests()
|
||||
|
||||
|
||||
def async_request_timer_restart(request: HttpRequest) -> None:
|
||||
@@ -76,11 +76,11 @@ def record_request_start_data(log_data: MutableMapping[str, Any]) -> None:
|
||||
log_data["prof"].enable()
|
||||
|
||||
reset_queries()
|
||||
log_data['time_started'] = time.time()
|
||||
log_data['remote_cache_time_start'] = get_remote_cache_time()
|
||||
log_data['remote_cache_requests_start'] = get_remote_cache_requests()
|
||||
log_data['markdown_time_start'] = get_markdown_time()
|
||||
log_data['markdown_requests_start'] = get_markdown_requests()
|
||||
log_data["time_started"] = time.time()
|
||||
log_data["remote_cache_time_start"] = get_remote_cache_time()
|
||||
log_data["remote_cache_requests_start"] = get_remote_cache_requests()
|
||||
log_data["markdown_time_start"] = get_markdown_time()
|
||||
log_data["markdown_requests_start"] = get_markdown_requests()
|
||||
|
||||
|
||||
def timedelta_ms(timedelta: float) -> float:
|
||||
@@ -103,31 +103,31 @@ def is_slow_query(time_delta: float, path: str) -> bool:
|
||||
)
|
||||
if is_exempt:
|
||||
return time_delta >= 5
|
||||
if 'webathena_kerberos' in path:
|
||||
if "webathena_kerberos" in path:
|
||||
return time_delta >= 10
|
||||
return True
|
||||
|
||||
|
||||
statsd_blacklisted_requests = [
|
||||
'do_confirm',
|
||||
'signup_send_confirm',
|
||||
'new_realm_send_confirm',
|
||||
'eventslast_event_id',
|
||||
'webreq.content',
|
||||
'avatar',
|
||||
'user_uploads',
|
||||
'password.reset',
|
||||
'static',
|
||||
'json.bots',
|
||||
'json.users',
|
||||
'json.streams',
|
||||
'accounts.unsubscribe',
|
||||
'apple-touch-icon',
|
||||
'emoji',
|
||||
'json.bots',
|
||||
'upload_file',
|
||||
'realm_activity',
|
||||
'user_activity',
|
||||
"do_confirm",
|
||||
"signup_send_confirm",
|
||||
"new_realm_send_confirm",
|
||||
"eventslast_event_id",
|
||||
"webreq.content",
|
||||
"avatar",
|
||||
"user_uploads",
|
||||
"password.reset",
|
||||
"static",
|
||||
"json.bots",
|
||||
"json.users",
|
||||
"json.streams",
|
||||
"accounts.unsubscribe",
|
||||
"apple-touch-icon",
|
||||
"emoji",
|
||||
"json.bots",
|
||||
"upload_file",
|
||||
"realm_activity",
|
||||
"user_activity",
|
||||
]
|
||||
|
||||
|
||||
@@ -146,50 +146,50 @@ def write_log_line(
|
||||
if error_content is not None:
|
||||
error_content_iter = (error_content,)
|
||||
|
||||
if settings.STATSD_HOST != '':
|
||||
if settings.STATSD_HOST != "":
|
||||
# For statsd timer name
|
||||
if path == '/':
|
||||
statsd_path = 'webreq'
|
||||
if path == "/":
|
||||
statsd_path = "webreq"
|
||||
else:
|
||||
statsd_path = "webreq.{}".format(path[1:].replace('/', '.'))
|
||||
statsd_path = "webreq.{}".format(path[1:].replace("/", "."))
|
||||
# Remove non-ascii chars from path (there should be none, if there are it's
|
||||
# because someone manually entered a nonexistent path), as UTF-8 chars make
|
||||
# statsd sad when it sends the key name over the socket
|
||||
statsd_path = statsd_path.encode('ascii', errors='ignore').decode("ascii")
|
||||
statsd_path = statsd_path.encode("ascii", errors="ignore").decode("ascii")
|
||||
# TODO: This could probably be optimized to use a regular expression rather than a loop.
|
||||
suppress_statsd = any(
|
||||
blacklisted in statsd_path for blacklisted in statsd_blacklisted_requests
|
||||
)
|
||||
else:
|
||||
suppress_statsd = True
|
||||
statsd_path = ''
|
||||
statsd_path = ""
|
||||
|
||||
time_delta = -1
|
||||
# A time duration of -1 means the StartLogRequests middleware
|
||||
# didn't run for some reason
|
||||
optional_orig_delta = ""
|
||||
if 'time_started' in log_data:
|
||||
time_delta = time.time() - log_data['time_started']
|
||||
if 'time_stopped' in log_data:
|
||||
if "time_started" in log_data:
|
||||
time_delta = time.time() - log_data["time_started"]
|
||||
if "time_stopped" in log_data:
|
||||
orig_time_delta = time_delta
|
||||
time_delta = (log_data['time_stopped'] - log_data['time_started']) + (
|
||||
time.time() - log_data['time_restarted']
|
||||
time_delta = (log_data["time_stopped"] - log_data["time_started"]) + (
|
||||
time.time() - log_data["time_restarted"]
|
||||
)
|
||||
optional_orig_delta = f" (lp: {format_timedelta(orig_time_delta)})"
|
||||
remote_cache_output = ""
|
||||
if 'remote_cache_time_start' in log_data:
|
||||
remote_cache_time_delta = get_remote_cache_time() - log_data['remote_cache_time_start']
|
||||
if "remote_cache_time_start" in log_data:
|
||||
remote_cache_time_delta = get_remote_cache_time() - log_data["remote_cache_time_start"]
|
||||
remote_cache_count_delta = (
|
||||
get_remote_cache_requests() - log_data['remote_cache_requests_start']
|
||||
get_remote_cache_requests() - log_data["remote_cache_requests_start"]
|
||||
)
|
||||
if 'remote_cache_requests_stopped' in log_data:
|
||||
if "remote_cache_requests_stopped" in log_data:
|
||||
# (now - restarted) + (stopped - start) = (now - start) + (stopped - restarted)
|
||||
remote_cache_time_delta += (
|
||||
log_data['remote_cache_time_stopped'] - log_data['remote_cache_time_restarted']
|
||||
log_data["remote_cache_time_stopped"] - log_data["remote_cache_time_restarted"]
|
||||
)
|
||||
remote_cache_count_delta += (
|
||||
log_data['remote_cache_requests_stopped']
|
||||
- log_data['remote_cache_requests_restarted']
|
||||
log_data["remote_cache_requests_stopped"]
|
||||
- log_data["remote_cache_requests_restarted"]
|
||||
)
|
||||
|
||||
if remote_cache_time_delta > 0.005:
|
||||
@@ -202,20 +202,20 @@ def write_log_line(
|
||||
statsd.incr(f"{statsd_path}.remote_cache.querycount", remote_cache_count_delta)
|
||||
|
||||
startup_output = ""
|
||||
if 'startup_time_delta' in log_data and log_data["startup_time_delta"] > 0.005:
|
||||
if "startup_time_delta" in log_data and log_data["startup_time_delta"] > 0.005:
|
||||
startup_output = " (+start: {})".format(format_timedelta(log_data["startup_time_delta"]))
|
||||
|
||||
markdown_output = ""
|
||||
if 'markdown_time_start' in log_data:
|
||||
markdown_time_delta = get_markdown_time() - log_data['markdown_time_start']
|
||||
markdown_count_delta = get_markdown_requests() - log_data['markdown_requests_start']
|
||||
if 'markdown_requests_stopped' in log_data:
|
||||
if "markdown_time_start" in log_data:
|
||||
markdown_time_delta = get_markdown_time() - log_data["markdown_time_start"]
|
||||
markdown_count_delta = get_markdown_requests() - log_data["markdown_requests_start"]
|
||||
if "markdown_requests_stopped" in log_data:
|
||||
# (now - restarted) + (stopped - start) = (now - start) + (stopped - restarted)
|
||||
markdown_time_delta += (
|
||||
log_data['markdown_time_stopped'] - log_data['markdown_time_restarted']
|
||||
log_data["markdown_time_stopped"] - log_data["markdown_time_restarted"]
|
||||
)
|
||||
markdown_count_delta += (
|
||||
log_data['markdown_requests_stopped'] - log_data['markdown_requests_restarted']
|
||||
log_data["markdown_requests_stopped"] - log_data["markdown_requests_restarted"]
|
||||
)
|
||||
|
||||
if markdown_time_delta > 0.005:
|
||||
@@ -231,7 +231,7 @@ def write_log_line(
|
||||
db_time_output = ""
|
||||
queries = connection.connection.queries if connection.connection is not None else []
|
||||
if len(queries) > 0:
|
||||
query_time = sum(float(query.get('time', 0)) for query in queries)
|
||||
query_time = sum(float(query.get("time", 0)) for query in queries)
|
||||
db_time_output = f" (db: {format_timedelta(query_time)}/{len(queries)}q)"
|
||||
|
||||
if not suppress_statsd:
|
||||
@@ -240,13 +240,13 @@ def write_log_line(
|
||||
statsd.incr(f"{statsd_path}.dbq", len(queries))
|
||||
statsd.timing(f"{statsd_path}.total", timedelta_ms(time_delta))
|
||||
|
||||
if 'extra' in log_data:
|
||||
extra_request_data = " {}".format(log_data['extra'])
|
||||
if "extra" in log_data:
|
||||
extra_request_data = " {}".format(log_data["extra"])
|
||||
else:
|
||||
extra_request_data = ""
|
||||
logger_client = f"({requestor_for_logs} via {client_name})"
|
||||
logger_timing = f'{format_timedelta(time_delta):>5}{optional_orig_delta}{remote_cache_output}{markdown_output}{db_time_output}{startup_output} {path}'
|
||||
logger_line = f'{remote_ip:<15} {method:<7} {status_code:3} {logger_timing}{extra_request_data} {logger_client}'
|
||||
logger_timing = f"{format_timedelta(time_delta):>5}{optional_orig_delta}{remote_cache_output}{markdown_output}{db_time_output}{startup_output} {path}"
|
||||
logger_line = f"{remote_ip:<15} {method:<7} {status_code:3} {logger_timing}{extra_request_data} {logger_client}"
|
||||
if status_code in [200, 304] and method == "GET" and path.startswith("/static"):
|
||||
logger.debug(logger_line)
|
||||
else:
|
||||
@@ -265,14 +265,14 @@ def write_log_line(
|
||||
assert error_content_iter is not None
|
||||
error_content_list = list(error_content_iter)
|
||||
if not error_content_list:
|
||||
error_data = ''
|
||||
error_data = ""
|
||||
elif isinstance(error_content_list[0], str):
|
||||
error_data = ''.join(error_content_list)
|
||||
error_data = "".join(error_content_list)
|
||||
elif isinstance(error_content_list[0], bytes):
|
||||
error_data = repr(b''.join(error_content_list))
|
||||
error_data = repr(b"".join(error_content_list))
|
||||
if len(error_data) > 200:
|
||||
error_data = "[content more than 200 characters]"
|
||||
logger.info('status=%3d, data=%s, uid=%s', status_code, error_data, requestor_for_logs)
|
||||
logger.info("status=%3d, data=%s, uid=%s", status_code, error_data, requestor_for_logs)
|
||||
|
||||
|
||||
class RequestContext(MiddlewareMixin):
|
||||
@@ -330,16 +330,16 @@ class LogRequests(MiddlewareMixin):
|
||||
# intends to block, so we stop here to avoid unnecessary work.
|
||||
return response
|
||||
|
||||
remote_ip = request.META['REMOTE_ADDR']
|
||||
remote_ip = request.META["REMOTE_ADDR"]
|
||||
|
||||
# Get the requestor's identifier and client, if available.
|
||||
try:
|
||||
requestor_for_logs = request._requestor_for_logs
|
||||
except Exception:
|
||||
if hasattr(request, 'user') and hasattr(request.user, 'format_requestor_for_logs'):
|
||||
if hasattr(request, "user") and hasattr(request.user, "format_requestor_for_logs"):
|
||||
requestor_for_logs = request.user.format_requestor_for_logs()
|
||||
else:
|
||||
requestor_for_logs = "unauth@{}".format(get_subdomain(request) or 'root')
|
||||
requestor_for_logs = "unauth@{}".format(get_subdomain(request) or "root")
|
||||
try:
|
||||
client = request.client.name
|
||||
except Exception:
|
||||
@@ -377,7 +377,7 @@ class JsonErrorHandler(MiddlewareMixin):
|
||||
self, request: HttpRequest, exception: Exception
|
||||
) -> Optional[HttpResponse]:
|
||||
if isinstance(exception, MissingAuthenticationError):
|
||||
if 'text/html' in request.META.get('HTTP_ACCEPT', ''):
|
||||
if "text/html" in request.META.get("HTTP_ACCEPT", ""):
|
||||
# If this looks like a request from a top-level page in a
|
||||
# browser, send the user to the login page.
|
||||
#
|
||||
@@ -385,13 +385,13 @@ class JsonErrorHandler(MiddlewareMixin):
|
||||
# execute the likely intent for intentionally visiting
|
||||
# an API endpoint without authentication in a browser,
|
||||
# but that's an unlikely to be done intentionally often.
|
||||
return HttpResponseRedirect(f'{settings.HOME_NOT_LOGGED_IN}?next={request.path}')
|
||||
return HttpResponseRedirect(f"{settings.HOME_NOT_LOGGED_IN}?next={request.path}")
|
||||
if request.path.startswith("/api"):
|
||||
# For API routes, ask for HTTP basic auth (email:apiKey).
|
||||
return json_unauthorized()
|
||||
else:
|
||||
# For /json routes, ask for session authentication.
|
||||
return json_unauthorized(www_authenticate='session')
|
||||
return json_unauthorized(www_authenticate="session")
|
||||
|
||||
if isinstance(exception, JsonableError):
|
||||
return json_response_from_error(exception)
|
||||
@@ -419,7 +419,7 @@ class TagRequests(MiddlewareMixin):
|
||||
class CsrfFailureError(JsonableError):
|
||||
http_status_code = 403
|
||||
code = ErrorCode.CSRF_FAILED
|
||||
data_fields = ['reason']
|
||||
data_fields = ["reason"]
|
||||
|
||||
def __init__(self, reason: str) -> None:
|
||||
self.reason: str = reason
|
||||
@@ -445,16 +445,16 @@ class LocaleMiddleware(DjangoLocaleMiddleware):
|
||||
# https://code.djangoproject.com/ticket/32005
|
||||
language = translation.get_language()
|
||||
language_from_path = translation.get_language_from_path(request.path_info)
|
||||
urlconf = getattr(request, 'urlconf', settings.ROOT_URLCONF)
|
||||
urlconf = getattr(request, "urlconf", settings.ROOT_URLCONF)
|
||||
i18n_patterns_used, _ = is_language_prefix_patterns_used(urlconf)
|
||||
if not (i18n_patterns_used and language_from_path):
|
||||
patch_vary_headers(response, ('Accept-Language',))
|
||||
response.setdefault('Content-Language', language)
|
||||
patch_vary_headers(response, ("Accept-Language",))
|
||||
response.setdefault("Content-Language", language)
|
||||
|
||||
# An additional responsibility of our override of this middleware is to save the user's language
|
||||
# preference in a cookie. That determination is made by code handling the request
|
||||
# and saved in the _set_language flag so that it can be used here.
|
||||
if hasattr(request, '_set_language'):
|
||||
if hasattr(request, "_set_language"):
|
||||
response.set_cookie(settings.LANGUAGE_COOKIE_NAME, request._set_language)
|
||||
|
||||
return response
|
||||
@@ -466,21 +466,21 @@ class RateLimitMiddleware(MiddlewareMixin):
|
||||
) -> None:
|
||||
# The limit on the action that was requested is the minimum of the limits that get applied:
|
||||
limit = min(result.entity.max_api_calls() for result in rate_limit_results)
|
||||
response['X-RateLimit-Limit'] = str(limit)
|
||||
response["X-RateLimit-Limit"] = str(limit)
|
||||
# Same principle applies to remaining API calls:
|
||||
remaining_api_calls = min(result.remaining for result in rate_limit_results)
|
||||
response['X-RateLimit-Remaining'] = str(remaining_api_calls)
|
||||
response["X-RateLimit-Remaining"] = str(remaining_api_calls)
|
||||
|
||||
# The full reset time is the maximum of the reset times for the limits that get applied:
|
||||
reset_time = time.time() + max(result.secs_to_freedom for result in rate_limit_results)
|
||||
response['X-RateLimit-Reset'] = str(int(reset_time))
|
||||
response["X-RateLimit-Reset"] = str(int(reset_time))
|
||||
|
||||
def process_response(self, request: HttpRequest, response: HttpResponse) -> HttpResponse:
|
||||
if not settings.RATE_LIMITING:
|
||||
return response
|
||||
|
||||
# Add X-RateLimit-*** headers
|
||||
if hasattr(request, '_ratelimits_applied'):
|
||||
if hasattr(request, "_ratelimits_applied"):
|
||||
self.set_response_headers(response, request._ratelimits_applied)
|
||||
|
||||
return response
|
||||
@@ -531,14 +531,14 @@ class SetRemoteAddrFromForwardedFor(MiddlewareMixin):
|
||||
|
||||
def process_request(self, request: HttpRequest) -> None:
|
||||
try:
|
||||
real_ip = request.META['HTTP_X_FORWARDED_FOR']
|
||||
real_ip = request.META["HTTP_X_FORWARDED_FOR"]
|
||||
except KeyError:
|
||||
return None
|
||||
else:
|
||||
# HTTP_X_FORWARDED_FOR can be a comma-separated list of IPs.
|
||||
# For NGINX reverse proxy servers, the client's IP will be the first one.
|
||||
real_ip = real_ip.split(",")[0].strip()
|
||||
request.META['REMOTE_ADDR'] = real_ip
|
||||
request.META["REMOTE_ADDR"] = real_ip
|
||||
|
||||
|
||||
def alter_content(request: HttpRequest, content: bytes) -> bytes:
|
||||
|
||||
Reference in New Issue
Block a user