Log time spent querying memcached in logs when larger than 5ms.

(imported from commit a4de15026d24526a446b724500d1194dce824d1a)
This commit is contained in:
Tim Abbott
2013-04-18 18:00:33 -04:00
parent 5e22778843
commit 66b3c1fbff
2 changed files with 46 additions and 1 deletions

View File

@@ -6,6 +6,17 @@ from django.core.cache import cache as djcache
from django.core.cache import get_cache from django.core.cache import get_cache
from zephyr.lib.utils import statsd, statsd_key, make_safe_digest from zephyr.lib.utils import statsd, statsd_key, make_safe_digest
import time
memcached_total_time = 0
memcached_total_requests = 0
def get_memcached_time():
return memcached_total_time
def get_memcached_requests():
return memcached_total_requests
def cache_with_key(keyfunc, cache_name=None, timeout=None): def cache_with_key(keyfunc, cache_name=None, timeout=None):
"""Decorator which applies Django caching to a function. """Decorator which applies Django caching to a function.
@@ -18,13 +29,19 @@ def cache_with_key(keyfunc, cache_name=None, timeout=None):
def decorator(func): def decorator(func):
@wraps(func) @wraps(func)
def func_with_caching(*args, **kwargs): def func_with_caching(*args, **kwargs):
global memcached_total_time
global memcached_total_requests
memcached_start = time.time()
if cache_name is None: if cache_name is None:
cache_backend = djcache cache_backend = djcache
else: else:
cache_backend = get_cache(cache_name) cache_backend = get_cache(cache_name)
key = keyfunc(*args, **kwargs) key = keyfunc(*args, **kwargs)
val = cache_backend.get(key) val = cache_backend.get(key)
memcached_total_requests += 1
memcached_total_time += (time.time() - memcached_start)
if val is not None: if val is not None:
statsd.incr("cache.%s.hit" % (statsd_key(key),)) statsd.incr("cache.%s.hit" % (statsd_key(key),))
@@ -37,7 +54,13 @@ def cache_with_key(keyfunc, cache_name=None, timeout=None):
return val[0] return val[0]
val = func(*args, **kwargs) val = func(*args, **kwargs)
memcached_start = time.time()
cache_backend.set(key, (val,), timeout=timeout) cache_backend.set(key, (val,), timeout=timeout)
memcached_total_requests += 1
memcached_total_time += (time.time() - memcached_start)
return val return val
return func_with_caching return func_with_caching

View File

@@ -5,6 +5,7 @@ from zephyr.decorator import RequestVariableMissingError, RequestVariableConvers
from zephyr.lib.response import json_error from zephyr.lib.response import json_error
from django.db import connection from django.db import connection
from zephyr.lib.utils import statsd from zephyr.lib.utils import statsd
from zephyr.lib.cache import get_memcached_time, get_memcached_requests
import logging import logging
import time import time
@@ -13,13 +14,19 @@ logger = logging.getLogger('humbug.requests')
def async_request_stop(request): def async_request_stop(request):
request._time_stopped = time.time() request._time_stopped = time.time()
request._memcached_time_stopped = get_memcached_time()
request._memcached_requests_stopped = get_memcached_requests()
def async_request_restart(request): def async_request_restart(request):
request._time_restarted = time.time() request._time_restarted = time.time()
request._memcached_time_restarted = get_memcached_time()
request._memcached_requests_restarted = get_memcached_requests()
class LogRequests(object): class LogRequests(object):
def process_request(self, request): def process_request(self, request):
request._time_started = time.time() request._time_started = time.time()
request._memcached_time_start = get_memcached_time()
request._memcached_requests_start = get_memcached_requests()
def process_response(self, request, response): def process_response(self, request, response):
def timedelta_ms(timedelta): def timedelta_ms(timedelta):
@@ -53,6 +60,20 @@ class LogRequests(object):
time_delta = ((request._time_stopped - request._time_started) + time_delta = ((request._time_stopped - request._time_started) +
(time.time() - request._time_restarted)) (time.time() - request._time_restarted))
optional_orig_delta = " (lp: %s)" % (format_timedelta(orig_time_delta),) optional_orig_delta = " (lp: %s)" % (format_timedelta(orig_time_delta),)
memcached_output = ""
if hasattr(request, '_memcached_time_start'):
memcached_time_delta = get_memcached_time() - request._memcached_time_start
memcached_count_delta = get_memcached_requests() - request._memcached_requests_start
if hasattr(request, "_memcached_requests_stopped"):
# (now - restarted) + (stopped - start) = (now - start) + (stopped - restarted)
memcached_time_delta += (request._memcached_time_stopped -
request._memcached_time_restarted)
memcached_count_delta += (request._memcached_requests_stopped -
request._memcached_requests_restarted)
if (memcached_time_delta > 0.005):
memcached_output = " (mem: %s/%s)" % (format_timedelta(memcached_time_delta),
memcached_count_delta)
# Get the amount of time spent doing database queries # Get the amount of time spent doing database queries
db_time_output = "" db_time_output = ""
@@ -76,9 +97,10 @@ class LogRequests(object):
except Exception: except Exception:
client = "?" client = "?"
logger.info('%-15s %-7s %3d %5s%s%s %s (%s via %s)' % logger.info('%-15s %-7s %3d %5s%s%s%s %s (%s via %s)' %
(remote_ip, request.method, response.status_code, (remote_ip, request.method, response.status_code,
format_timedelta(time_delta), optional_orig_delta, format_timedelta(time_delta), optional_orig_delta,
memcached_output,
db_time_output, request.get_full_path(), email, client)) db_time_output, request.get_full_path(), email, client))
# Log some additional data whenever we return certain 40x errors # Log some additional data whenever we return certain 40x errors