Add a bunch of statsd logging

(imported from commit 8c17a76fed08c34abab3cc45d76a0fb30774d73a)
This commit is contained in:
Leo Franchi
2013-04-16 16:58:21 -04:00
parent 46415d4984
commit 652b821d64
5 changed files with 53 additions and 8 deletions

View File

@@ -24,8 +24,10 @@ from zephyr.lib.create_user import create_user
from zephyr.lib import bugdown from zephyr.lib import bugdown
from zephyr.lib.cache import cache_with_key, user_profile_by_id_cache_key, \ from zephyr.lib.cache import cache_with_key, user_profile_by_id_cache_key, \
user_profile_by_email_cache_key, status_dict_cache_key user_profile_by_email_cache_key, status_dict_cache_key
from zephyr.decorator import get_user_profile_by_email, json_to_list, JsonableError from zephyr.decorator import get_user_profile_by_email, json_to_list, JsonableError, \
statsd_increment
from zephyr.lib.event_queue import request_event_queue, get_user_events from zephyr.lib.event_queue import request_event_queue, get_user_events
from zephyr.lib.utils import log_statsd_event, statsd
import confirmation.settings import confirmation.settings
@@ -559,6 +561,7 @@ def get_default_subs(user_profile):
return [default.stream for default in return [default.stream for default in
DefaultStream.objects.filter(realm=user_profile.realm)] DefaultStream.objects.filter(realm=user_profile.realm)]
@statsd_increment('user_activity')
@transaction.commit_on_success @transaction.commit_on_success
def do_update_user_activity(user_profile, client, query, log_time): def do_update_user_activity(user_profile, client, query, log_time):
try: try:
@@ -593,6 +596,7 @@ def send_presence_changed(user_profile, presence):
is_active=True)]) is_active=True)])
tornado_callbacks.send_notification(notice) tornado_callbacks.send_notification(notice)
@statsd_increment('user_presence')
@transaction.commit_on_success @transaction.commit_on_success
def do_update_user_presence(user_profile, client, log_time, status): def do_update_user_presence(user_profile, client, log_time, status):
try: try:
@@ -633,6 +637,8 @@ def update_message_flags(user_profile, operation, flag, messages, all):
rest_until = None rest_until = None
if all: if all:
log_statsd_event('bankruptcy')
# Do the first 450 message updates in-process, as this is a # Do the first 450 message updates in-process, as this is a
# bankruptcy request and the user is about to reload. We don't # bankruptcy request and the user is about to reload. We don't
# want them to see a bunch of unread messages while we go about # want them to see a bunch of unread messages while we go about
@@ -707,6 +713,8 @@ def process_update_message_flags(event):
elif op == 'remove': elif op == 'remove':
msgs.update(flags=F('flags').bitand(~flagattr)) msgs.update(flags=F('flags').bitand(~flagattr))
statsd.incr("flags.%s.%s" % (flag, op), len(msgs))
return True return True
def update_flags_externally(op, flag, user_profile, until_id): def update_flags_externally(op, flag, user_profile, until_id):

View File

@@ -5,6 +5,8 @@ from django.core.cache import get_cache
from utils import make_safe_digest from utils import make_safe_digest
from zephyr.lib.utils import statsd, statsd_key
def cache_with_key(keyfunc, cache_name=None, timeout=None): def cache_with_key(keyfunc, cache_name=None, timeout=None):
"""Decorator which applies Django caching to a function. """Decorator which applies Django caching to a function.
@@ -24,6 +26,11 @@ def cache_with_key(keyfunc, cache_name=None, timeout=None):
key = keyfunc(*args, **kwargs) key = keyfunc(*args, **kwargs)
val = cache_backend.get(key) val = cache_backend.get(key)
if val is not None:
statsd.incr("cache.%s.hit" % (statsd_key(key),))
else:
statsd.incr("cache.%s.miss" % (statsd_key(key),))
# Values are singleton tuples so that we can distinguish # Values are singleton tuples so that we can distinguish
# a result of None from a missing key. # a result of None from a missing key.
if val is not None: if val is not None:

View File

@@ -8,6 +8,8 @@ import threading
import atexit import atexit
from collections import defaultdict from collections import defaultdict
from zephyr.lib.utils import statsd
# This simple queuing library doesn't expose much of the power of # This simple queuing library doesn't expose much of the power of
# rabbitmq/pika's queuing system; its purpose is to just provide an # rabbitmq/pika's queuing system; its purpose is to just provide an
# interface for external files to put things into queues and take them # interface for external files to put things into queues and take them
@@ -67,12 +69,16 @@ class SimpleQueueClient(object):
callback() callback()
def publish(self, queue_name, body): def publish(self, queue_name, body):
self.ensure_queue(queue_name, def do_publish():
lambda: self.channel.basic_publish( self.channel.basic_publish(
exchange='', exchange='',
routing_key=queue_name, routing_key=queue_name,
properties=pika.BasicProperties(delivery_mode=2), properties=pika.BasicProperties(delivery_mode=2),
body=body)) body=body)
statsd.incr("rabbitmq.publish.%s" % (queue_name,))
self.ensure_queue(queue_name, do_publish)
def json_publish(self, queue_name, body): def json_publish(self, queue_name, body):
try: try:

View File

@@ -2,6 +2,7 @@ from django.conf import settings
from decorator import RequestVariableMissingError, RequestVariableConversionError from decorator import RequestVariableMissingError, RequestVariableConversionError
from zephyr.lib.response import json_error from zephyr.lib.response import json_error
from django.db import connection from django.db import connection
from zephyr.lib.utils import statsd
import logging import logging
import time import time
@@ -13,10 +14,20 @@ class LogRequests(object):
request._time_started = time.time() request._time_started = time.time()
def process_response(self, request, response): def process_response(self, request, response):
def timedelta_ms(timedelta):
return timedelta * 1000
def format_timedelta(timedelta): def format_timedelta(timedelta):
if (timedelta >= 1): if (timedelta >= 1):
return "%.1fs" % (timedelta) return "%.1fs" % (timedelta)
return "%.0fms" % (timedelta * 1000,) return "%.0fms" % (timedelta_ms(timedelta),)
# For statsd timer name
if request.get_full_path() == '/':
statsd_path = 'webreq'
else:
statsd_path = "webreq.%s" % (request.get_full_path()[1:].replace('/', '.'),)
# The reverse proxy might have sent us the real external IP # The reverse proxy might have sent us the real external IP
remote_ip = request.META.get('HTTP_X_REAL_IP') remote_ip = request.META.get('HTTP_X_REAL_IP')
@@ -42,6 +53,11 @@ class LogRequests(object):
db_time_output = " (db: %s/%sq)" % (format_timedelta(query_time), db_time_output = " (db: %s/%sq)" % (format_timedelta(query_time),
len(connection.queries)) len(connection.queries))
# Log ms, db ms, and num queries to statsd
statsd.timing("%s.dbtime" % (statsd_path,), timedelta_ms(time_delta))
statsd.incr("%s.dbq" % (statsd_path, ), len(connection.queries))
statsd.timing("%s.total" % (statsd_path,), timedelta_ms(time_delta))
# Get the requestor's email address and client, if available. # Get the requestor's email address and client, if available.
try: try:
email = request._email email = request._email
@@ -63,6 +79,7 @@ class LogRequests(object):
if len(content) > 100: if len(content) > 100:
content = "[content more than 100 characters]" content = "[content more than 100 characters]"
logger.info('status=%3d, data=%s, uid=%s' % (response.status_code, content, email)) logger.info('status=%3d, data=%s, uid=%s' % (response.status_code, content, email))
return response return response
class JsonErrorHandler(object): class JsonErrorHandler(object):

View File

@@ -45,10 +45,12 @@ from zephyr.lib.timestamp import datetime_to_timestamp
from zephyr.lib.cache import cache_with_key from zephyr.lib.cache import cache_with_key
from zephyr.lib.unminify import SourceMap from zephyr.lib.unminify import SourceMap
from zephyr.lib.queue import queue_json_publish from zephyr.lib.queue import queue_json_publish
from zephyr.lib.utils import statsd
from zephyr import tornado_callbacks from zephyr import tornado_callbacks
from confirmation.models import Confirmation from confirmation.models import Confirmation
import datetime import datetime
import simplejson import simplejson
import re import re
@@ -129,6 +131,7 @@ def send_signup_message(sender, signups_stream, user_profile, internal=False):
) )
def notify_new_user(user_profile, internal=False): def notify_new_user(user_profile, internal=False):
statsd.gauge('users.signup', 1, delta=True)
send_signup_message("humbug+signups@humbughq.com", "signups", user_profile, internal) send_signup_message("humbug+signups@humbughq.com", "signups", user_profile, internal)
class PrincipalError(JsonableError): class PrincipalError(JsonableError):
@@ -477,6 +480,8 @@ def home(request):
max_message_id = register_ret['max_message_id'] max_message_id = register_ret['max_message_id']
)) ))
statsd.incr('views.home')
try: try:
isnt_mit(user_profile.email) isnt_mit(user_profile.email)
show_invites = True show_invites = True
@@ -722,6 +727,8 @@ def get_old_messages_backend(request, user_profile,
message_list = [dict(umessage.message.to_dict(apply_markdown), message_list = [dict(umessage.message.to_dict(apply_markdown),
**umessage.flags_dict()) **umessage.flags_dict())
for umessage in messages] for umessage in messages]
statsd.incr('loaded_old_messages', len(message_list))
ret = {'messages': message_list, ret = {'messages': message_list,
"result": "success", "result": "success",
"msg": ""} "msg": ""}