mirror of
https://github.com/zulip/zulip.git
synced 2025-10-23 04:52:12 +00:00
Compare commits
114 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
726ab9c4fa | ||
|
b7c87a4d82 | ||
|
1bf0a92eb6 | ||
|
63eece23a9 | ||
|
1cdec46aa9 | ||
|
c47039dfb6 | ||
|
bccec80062 | ||
|
6e287db98e | ||
|
9489d2d850 | ||
|
226a96bdb8 | ||
|
a4bf15bbc7 | ||
|
2bb3af1ade | ||
|
5797f013b3 | ||
|
1195841dfb | ||
|
dca727f178 | ||
|
77d5a37539 | ||
|
2c16b9905d | ||
|
3c5e69aa0d | ||
|
32af9a95d5 | ||
|
3eeaafed73 | ||
|
e9e25fd319 | ||
|
a1f62a66ae | ||
|
d52a5a1d2d | ||
|
f2022834bd | ||
|
54f981bfeb | ||
|
d6daf11e03 | ||
|
4e5bb844b4 | ||
|
9230d3304f | ||
|
cd07bcdd29 | ||
|
89cbe7f53b | ||
|
a41cbad45c | ||
|
633c91facf | ||
|
e8acffcb8a | ||
|
342c7d946a | ||
|
f95c807100 | ||
|
d11f32909c | ||
|
d232ce0078 | ||
|
4f7c463257 | ||
|
2eb713552f | ||
|
2da9d25a9a | ||
|
388565c7bd | ||
|
a88643313f | ||
|
6d7a3cf4d7 | ||
|
8f1ac265e4 | ||
|
38af195ab4 | ||
|
e63245eee3 | ||
|
cb86bb7d84 | ||
|
b1d5bb4ae4 | ||
|
1c0653136c | ||
|
3db5a03f5c | ||
|
967ff6f770 | ||
|
67cb813740 | ||
|
5f2c3cd835 | ||
|
7b897cac77 | ||
|
94395c6dd8 | ||
|
38cd21b1b4 | ||
|
e074165c1b | ||
|
abe645f1d3 | ||
|
064f5ccf8f | ||
|
19fbf9c3cf | ||
|
eb7e44f7ed | ||
|
b08824d92f | ||
|
d9a1617d00 | ||
|
6f2903dd29 | ||
|
afd06bdb46 | ||
|
57cdef11c4 | ||
|
8020247b79 | ||
|
01adbc3a2e | ||
|
8b739e4d0b | ||
|
1fb079d63b | ||
|
95482235ad | ||
|
f8d74fab08 | ||
|
049b83f0bb | ||
|
bffa709ec8 | ||
|
ddae999601 | ||
|
57cd185366 | ||
|
d39a7ea429 | ||
|
088f8745d1 | ||
|
f32f02da8b | ||
|
76d6d69568 | ||
|
4a1e98f574 | ||
|
487632b454 | ||
|
848276ee3b | ||
|
d740b1ae19 | ||
|
48d8b90863 | ||
|
eeeb947187 | ||
|
507cf1d322 | ||
|
f3f90bb527 | ||
|
46d6541958 | ||
|
13eaa49a42 | ||
|
1157aef8b3 | ||
|
65eb125d61 | ||
|
713d6739ec | ||
|
70c0c7a83f | ||
|
c1ee7692d6 | ||
|
ad336800d0 | ||
|
e9e3eafdde | ||
|
df68a3e963 | ||
|
faaf84bb01 | ||
|
c082547021 | ||
|
d6c7199ce1 | ||
|
29b3dd0852 | ||
|
0ffc42083e | ||
|
019e5a17f0 | ||
|
177673c84e | ||
|
f6c1a31988 | ||
|
870cd00f5f | ||
|
7db599deaa | ||
|
84d2be5e0c | ||
|
d360833d7f | ||
|
bc3db1701b | ||
|
e8aca7b723 | ||
|
7a72390710 | ||
|
3ffe4ca3e5 |
@@ -1,5 +0,0 @@
|
||||
> 0.15%
|
||||
> 0.15% in US
|
||||
last 2 versions
|
||||
Firefox ESR
|
||||
not dead and supports async-functions
|
161
.circleci/config.yml
Normal file
161
.circleci/config.yml
Normal file
@@ -0,0 +1,161 @@
|
||||
# See https://zulip.readthedocs.io/en/latest/testing/continuous-integration.html for
|
||||
# high-level documentation on our CircleCI setup.
|
||||
# See CircleCI upstream's docs on this config format:
|
||||
# https://circleci.com/docs/2.0/language-python/
|
||||
#
|
||||
version: 2
|
||||
aliases:
|
||||
- &create_cache_directories
|
||||
run:
|
||||
name: create cache directories
|
||||
command: |
|
||||
dirs=(/srv/zulip-{npm,venv}-cache)
|
||||
sudo mkdir -p "${dirs[@]}"
|
||||
sudo chown -R circleci "${dirs[@]}"
|
||||
|
||||
- &restore_cache_package_json
|
||||
restore_cache:
|
||||
keys:
|
||||
- v1-npm-base.{{ .Environment.CIRCLE_JOB }}-{{ checksum "package.json" }}-{{ checksum "yarn.lock" }}
|
||||
|
||||
- &restore_cache_requirements
|
||||
restore_cache:
|
||||
keys:
|
||||
- v1-venv-base.{{ .Environment.CIRCLE_JOB }}-{{ checksum "requirements/thumbor.txt" }}-{{ checksum "requirements/dev.txt" }}
|
||||
|
||||
- &install_dependencies
|
||||
run:
|
||||
name: install dependencies
|
||||
command: |
|
||||
sudo apt-get update
|
||||
# Install moreutils so we can use `ts` and `mispipe` in the following.
|
||||
sudo apt-get install -y moreutils
|
||||
|
||||
# CircleCI sets the following in Git config at clone time:
|
||||
# url.ssh://git@github.com.insteadOf https://github.com
|
||||
# This breaks the Git clones in the NVM `install.sh` we run
|
||||
# in `install-node`.
|
||||
# TODO: figure out why that breaks, and whether we want it.
|
||||
# (Is it an optimization?)
|
||||
rm -f /home/circleci/.gitconfig
|
||||
|
||||
# This is the main setup job for the test suite
|
||||
mispipe "tools/ci/setup-backend" ts
|
||||
|
||||
# Cleaning caches is mostly unnecessary in Circle, because
|
||||
# most builds don't get to write to the cache.
|
||||
# mispipe "scripts/lib/clean-unused-caches --verbose --threshold 0" ts
|
||||
|
||||
- &save_cache_package_json
|
||||
save_cache:
|
||||
paths:
|
||||
- /srv/zulip-npm-cache
|
||||
key: v1-npm-base.{{ .Environment.CIRCLE_JOB }}-{{ checksum "package.json" }}-{{ checksum "yarn.lock" }}
|
||||
|
||||
- &save_cache_requirements
|
||||
save_cache:
|
||||
paths:
|
||||
- /srv/zulip-venv-cache
|
||||
key: v1-venv-base.{{ .Environment.CIRCLE_JOB }}-{{ checksum "requirements/thumbor.txt" }}-{{ checksum "requirements/dev.txt" }}
|
||||
# TODO: in Travis we also cache ~/zulip-emoji-cache, ~/node, ~/misc
|
||||
|
||||
- &run_backend_tests
|
||||
run:
|
||||
name: run backend tests
|
||||
command: |
|
||||
. /srv/zulip-py3-venv/bin/activate
|
||||
mispipe ./tools/ci/backend ts
|
||||
|
||||
- &run_frontend_tests
|
||||
run:
|
||||
name: run frontend tests
|
||||
command: |
|
||||
. /srv/zulip-py3-venv/bin/activate
|
||||
mispipe ./tools/ci/frontend ts
|
||||
|
||||
- &upload_coverage_report
|
||||
run:
|
||||
name: upload coverage report
|
||||
command: |
|
||||
. /srv/zulip-py3-venv/bin/activate
|
||||
pip install codecov && codecov \
|
||||
|| echo "Error in uploading coverage reports to codecov.io."
|
||||
|
||||
jobs:
|
||||
"trusty-python-3.4":
|
||||
docker:
|
||||
# This is built from tools/circleci/images/trusty/Dockerfile .
|
||||
- image: gregprice/circleci:trusty-python-5.test
|
||||
|
||||
working_directory: ~/zulip
|
||||
|
||||
steps:
|
||||
- checkout
|
||||
|
||||
- *create_cache_directories
|
||||
- *restore_cache_package_json
|
||||
- *restore_cache_requirements
|
||||
- *install_dependencies
|
||||
- *save_cache_package_json
|
||||
- *save_cache_requirements
|
||||
- *run_backend_tests
|
||||
- *run_frontend_tests
|
||||
- *upload_coverage_report
|
||||
|
||||
# - store_artifacts: # TODO
|
||||
# path: var/casper/
|
||||
# # also /tmp/zulip-test-event-log/
|
||||
# destination: test-reports
|
||||
|
||||
"xenial-python-3.5":
|
||||
docker:
|
||||
# This is built from tools/circleci/images/xenial/Dockerfile .
|
||||
- image: gregprice/circleci:xenial-python-4.test
|
||||
|
||||
working_directory: ~/zulip
|
||||
|
||||
steps:
|
||||
- checkout
|
||||
- *create_cache_directories
|
||||
- *restore_cache_package_json
|
||||
- *restore_cache_requirements
|
||||
- *install_dependencies
|
||||
- *save_cache_package_json
|
||||
- *save_cache_requirements
|
||||
- *run_backend_tests
|
||||
- *upload_coverage_report
|
||||
|
||||
"bionic-python-3.6":
|
||||
docker:
|
||||
# This is built from tools/circleci/images/bionic/Dockerfile .
|
||||
- image: gregprice/circleci:bionic-python-1.test
|
||||
|
||||
working_directory: ~/zulip
|
||||
|
||||
steps:
|
||||
- checkout
|
||||
|
||||
- *create_cache_directories
|
||||
|
||||
- run:
|
||||
name: do Bionic hack
|
||||
command: |
|
||||
# Temporary hack till `sudo service redis-server start` gets fixes in Bionic. See
|
||||
# https://chat.zulip.org/#narrow/stream/3-backend/topic/Ubuntu.20bionic.20CircleCI
|
||||
redis-server --daemonize yes
|
||||
|
||||
- *restore_cache_package_json
|
||||
- *restore_cache_requirements
|
||||
- *install_dependencies
|
||||
- *save_cache_package_json
|
||||
- *save_cache_requirements
|
||||
- *run_backend_tests
|
||||
- *upload_coverage_report
|
||||
|
||||
workflows:
|
||||
version: 2
|
||||
build:
|
||||
jobs:
|
||||
- "trusty-python-3.4"
|
||||
- "xenial-python-3.5"
|
||||
- "bionic-python-3.6"
|
@@ -5,8 +5,6 @@ coverage:
|
||||
project:
|
||||
default:
|
||||
target: auto
|
||||
# Codecov has the tendency to report a lot of false negatives,
|
||||
# so we basically suppress comments completely.
|
||||
threshold: 50%
|
||||
threshold: 0.50
|
||||
base: auto
|
||||
patch: off
|
||||
|
@@ -1,18 +0,0 @@
|
||||
te
|
||||
ans
|
||||
pullrequest
|
||||
ist
|
||||
cros
|
||||
wit
|
||||
nwe
|
||||
circularly
|
||||
ned
|
||||
ba
|
||||
ressemble
|
||||
ser
|
||||
sur
|
||||
hel
|
||||
fpr
|
||||
alls
|
||||
nd
|
||||
ot
|
@@ -3,22 +3,23 @@ root = true
|
||||
[*]
|
||||
end_of_line = lf
|
||||
charset = utf-8
|
||||
indent_size = 4
|
||||
indent_style = space
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
insert_final_newline = true
|
||||
|
||||
binary_next_line = true # for shfmt
|
||||
switch_case_indent = true # for shfmt
|
||||
[*.{sh,py,pyi,js,json,yml,xml,css,md,markdown,handlebars,html}]
|
||||
indent_style = space
|
||||
indent_size = 4
|
||||
|
||||
[{*.{js,json,ts},check-openapi}]
|
||||
max_line_length = 100
|
||||
|
||||
[*.{py,pyi}]
|
||||
[*.{py}]
|
||||
max_line_length = 110
|
||||
|
||||
[*.{md,svg,rb,pp,yaml,yml}]
|
||||
[*.{js}]
|
||||
max_line_length = 120
|
||||
|
||||
[*.{svg,rb,pp,pl}]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
||||
[package.json]
|
||||
indent_size = 2
|
||||
[*.{cfg}]
|
||||
indent_style = space
|
||||
indent_size = 8
|
||||
|
@@ -1,14 +1,2 @@
|
||||
# This is intended for generated files and vendored third-party files.
|
||||
# For our source code, instead of adding files here, consider using
|
||||
# specific eslint-disable comments in the files themselves.
|
||||
|
||||
/docs/_build
|
||||
/static/generated
|
||||
/static/third
|
||||
/static/webpack-bundles
|
||||
/var/*
|
||||
!/var/puppeteer
|
||||
/var/puppeteer/*
|
||||
!/var/puppeteer/test_credentials.d.ts
|
||||
/zulip-current-venv
|
||||
/zulip-py3-venv
|
||||
static/js/blueslip.js
|
||||
static/webpack-bundles
|
||||
|
651
.eslintrc.json
651
.eslintrc.json
@@ -1,276 +1,411 @@
|
||||
{
|
||||
"env": {
|
||||
"es2020": true,
|
||||
"node": true
|
||||
"node": true,
|
||||
"es6": true
|
||||
},
|
||||
"extends": [
|
||||
"eslint:recommended",
|
||||
"plugin:import/errors",
|
||||
"plugin:import/warnings",
|
||||
"plugin:no-jquery/recommended",
|
||||
"plugin:no-jquery/deprecated",
|
||||
"plugin:unicorn/recommended",
|
||||
"prettier"
|
||||
],
|
||||
"parser": "@babel/eslint-parser",
|
||||
"parserOptions": {
|
||||
"warnOnUnsupportedTypeScriptVersion": false,
|
||||
"sourceType": "unambiguous"
|
||||
"sourceType": "module"
|
||||
},
|
||||
"plugins": ["formatjs", "no-jquery"],
|
||||
"settings": {
|
||||
"additionalFunctionNames": ["$t", "$t_html"],
|
||||
"no-jquery": {
|
||||
"collectionReturningPlugins": {
|
||||
"expectOne": "always"
|
||||
},
|
||||
"variablePattern": "^\\$(?!t$|t_html$)."
|
||||
}
|
||||
"globals": {
|
||||
"$": false,
|
||||
"ClipboardJS": false,
|
||||
"Dict": false,
|
||||
"FetchStatus": false,
|
||||
"Filter": false,
|
||||
"Handlebars": false,
|
||||
"LightboxCanvas": false,
|
||||
"MessageListData": false,
|
||||
"MessageListView": false,
|
||||
"PerfectScrollbar": false,
|
||||
"Plotly": false,
|
||||
"SockJS": false,
|
||||
"Socket": false,
|
||||
"Sortable": false,
|
||||
"WinChan": false,
|
||||
"XDate": false,
|
||||
"_": false,
|
||||
"activity": false,
|
||||
"admin": false,
|
||||
"alert_words": false,
|
||||
"alert_words_ui": false,
|
||||
"attachments_ui": false,
|
||||
"avatar": false,
|
||||
"billing": false,
|
||||
"blueslip": false,
|
||||
"bot_data": false,
|
||||
"bridge": false,
|
||||
"buddy_data": false,
|
||||
"buddy_list": false,
|
||||
"channel": false,
|
||||
"click_handlers": false,
|
||||
"color_data": false,
|
||||
"colorspace": false,
|
||||
"common": false,
|
||||
"components": false,
|
||||
"compose": false,
|
||||
"compose_actions": false,
|
||||
"compose_fade": false,
|
||||
"compose_pm_pill": false,
|
||||
"compose_state": false,
|
||||
"compose_ui": false,
|
||||
"composebox_typeahead": false,
|
||||
"condense": false,
|
||||
"confirm_dialog": false,
|
||||
"copy_and_paste": false,
|
||||
"csrf_token": false,
|
||||
"current_msg_list": true,
|
||||
"drafts": false,
|
||||
"echo": false,
|
||||
"emoji": false,
|
||||
"emoji_codes": false,
|
||||
"emoji_picker": false,
|
||||
"favicon": false,
|
||||
"feature_flags": false,
|
||||
"feedback_widget": false,
|
||||
"fenced_code": false,
|
||||
"flatpickr": false,
|
||||
"floating_recipient_bar": false,
|
||||
"gear_menu": false,
|
||||
"hash_util": false,
|
||||
"hashchange": false,
|
||||
"helpers": false,
|
||||
"home_msg_list": false,
|
||||
"hotspots": false,
|
||||
"i18n": false,
|
||||
"info_overlay": false,
|
||||
"input_pill": false,
|
||||
"invite": false,
|
||||
"jQuery": false,
|
||||
"katex": false,
|
||||
"keydown_util": false,
|
||||
"lightbox": false,
|
||||
"list_cursor": false,
|
||||
"list_render": false,
|
||||
"list_util": false,
|
||||
"loading": false,
|
||||
"localStorage": false,
|
||||
"local_message": false,
|
||||
"localstorage": false,
|
||||
"markdown": false,
|
||||
"marked": false,
|
||||
"md5": false,
|
||||
"message_edit": false,
|
||||
"message_events": false,
|
||||
"message_fetch": false,
|
||||
"message_flags": false,
|
||||
"message_list": false,
|
||||
"message_live_update": false,
|
||||
"message_scroll": false,
|
||||
"message_store": false,
|
||||
"message_util": false,
|
||||
"message_viewport": false,
|
||||
"moment": false,
|
||||
"muting": false,
|
||||
"muting_ui": false,
|
||||
"narrow": false,
|
||||
"narrow_state": false,
|
||||
"navigate": false,
|
||||
"night_mode": false,
|
||||
"notifications": false,
|
||||
"overlays": false,
|
||||
"padded_widget": false,
|
||||
"page_params": false,
|
||||
"panels": false,
|
||||
"people": false,
|
||||
"pm_conversations": false,
|
||||
"pm_list": false,
|
||||
"pointer": false,
|
||||
"popovers": false,
|
||||
"presence": false,
|
||||
"pygments_data": false,
|
||||
"reactions": false,
|
||||
"realm_icon": false,
|
||||
"realm_logo": false,
|
||||
"realm_night_logo": false,
|
||||
"recent_senders": false,
|
||||
"reload": false,
|
||||
"reload_state": false,
|
||||
"reminder": false,
|
||||
"resize": false,
|
||||
"rows": false,
|
||||
"rtl": false,
|
||||
"run_test": false,
|
||||
"schema": false,
|
||||
"scroll_bar": false,
|
||||
"scroll_util": false,
|
||||
"search": false,
|
||||
"search_pill": false,
|
||||
"search_pill_widget": false,
|
||||
"search_suggestion": false,
|
||||
"search_util": false,
|
||||
"sent_messages": false,
|
||||
"server_events": false,
|
||||
"server_events_dispatch": false,
|
||||
"settings": false,
|
||||
"settings_account": false,
|
||||
"settings_bots": false,
|
||||
"settings_display": false,
|
||||
"settings_emoji": false,
|
||||
"settings_linkifiers": false,
|
||||
"settings_invites": false,
|
||||
"settings_muting": false,
|
||||
"settings_notifications": false,
|
||||
"settings_org": false,
|
||||
"settings_panel_menu": false,
|
||||
"settings_profile_fields": false,
|
||||
"settings_sections": false,
|
||||
"settings_streams": false,
|
||||
"settings_toggle": false,
|
||||
"settings_ui": false,
|
||||
"settings_user_groups": false,
|
||||
"settings_users": false,
|
||||
"starred_messages": false,
|
||||
"stream_color": false,
|
||||
"stream_create": false,
|
||||
"stream_data": false,
|
||||
"stream_edit": false,
|
||||
"stream_events": false,
|
||||
"stream_list": false,
|
||||
"stream_muting": false,
|
||||
"stream_popover": false,
|
||||
"stream_sort": false,
|
||||
"StripeCheckout": false,
|
||||
"submessage": false,
|
||||
"subs": false,
|
||||
"tab_bar": false,
|
||||
"templates": false,
|
||||
"tictactoe_widget": false,
|
||||
"timerender": false,
|
||||
"toMarkdown": false,
|
||||
"todo_widget": false,
|
||||
"top_left_corner": false,
|
||||
"topic_data": false,
|
||||
"topic_generator": false,
|
||||
"topic_list": false,
|
||||
"topic_zoom": false,
|
||||
"transmit": false,
|
||||
"tutorial": false,
|
||||
"typeahead_helper": false,
|
||||
"typing": false,
|
||||
"typing_data": false,
|
||||
"typing_events": false,
|
||||
"typing_status": false,
|
||||
"ui": false,
|
||||
"ui_init": false,
|
||||
"ui_report": false,
|
||||
"ui_util": false,
|
||||
"unread": false,
|
||||
"unread_ops": false,
|
||||
"unread_ui": false,
|
||||
"upgrade": false,
|
||||
"upload": false,
|
||||
"upload_widget": false,
|
||||
"user_events": false,
|
||||
"user_groups": false,
|
||||
"user_pill": false,
|
||||
"user_search": false,
|
||||
"user_status": false,
|
||||
"user_status_ui": false,
|
||||
"util": false,
|
||||
"poll_widget": false,
|
||||
"widgetize": false,
|
||||
"zcommand": false,
|
||||
"zform": false,
|
||||
"zxcvbn": false
|
||||
},
|
||||
"reportUnusedDisableDirectives": true,
|
||||
"plugins": [
|
||||
"eslint-plugin-empty-returns"
|
||||
],
|
||||
"rules": {
|
||||
"array-callback-return": "error",
|
||||
"arrow-body-style": "error",
|
||||
"block-scoped-var": "error",
|
||||
"consistent-return": "error",
|
||||
"curly": "error",
|
||||
"dot-notation": "error",
|
||||
"eqeqeq": "error",
|
||||
"formatjs/enforce-default-message": ["error", "literal"],
|
||||
"formatjs/enforce-placeholders": [
|
||||
"error",
|
||||
{"ignoreList": ["b", "code", "em", "i", "kbd", "p", "strong"]}
|
||||
],
|
||||
"formatjs/no-id": "error",
|
||||
"guard-for-in": "error",
|
||||
"import/extensions": "error",
|
||||
"import/first": "error",
|
||||
"import/newline-after-import": "error",
|
||||
"import/no-self-import": "error",
|
||||
"import/no-useless-path-segments": "error",
|
||||
"import/order": [
|
||||
"error",
|
||||
"array-bracket-spacing": "error",
|
||||
"arrow-spacing": [ "error", { "before": true, "after": true } ],
|
||||
"block-scoped-var": 2,
|
||||
"brace-style": [ "error", "1tbs", { "allowSingleLine": true } ],
|
||||
"camelcase": 0,
|
||||
"comma-dangle": [ "error",
|
||||
{
|
||||
"alphabetize": {"order": "asc"},
|
||||
"newlines-between": "always"
|
||||
"arrays": "always-multiline",
|
||||
"objects": "always-multiline",
|
||||
"imports": "always-multiline",
|
||||
"exports": "always-multiline",
|
||||
"functions": "never"
|
||||
}
|
||||
],
|
||||
"import/unambiguous": "error",
|
||||
"lines-around-directive": "error",
|
||||
"new-cap": "error",
|
||||
"no-alert": "error",
|
||||
"no-array-constructor": "error",
|
||||
"no-bitwise": "error",
|
||||
"no-caller": "error",
|
||||
"no-catch-shadow": "error",
|
||||
"no-constant-condition": ["error", {"checkLoops": false}],
|
||||
"no-div-regex": "error",
|
||||
"no-duplicate-imports": "error",
|
||||
"no-else-return": "error",
|
||||
"no-eq-null": "error",
|
||||
"no-eval": "error",
|
||||
"no-implicit-coercion": "error",
|
||||
"no-implied-eval": "error",
|
||||
"no-inner-declarations": "off",
|
||||
"no-iterator": "error",
|
||||
"no-jquery/no-parse-html-literal": "error",
|
||||
"no-label-var": "error",
|
||||
"no-labels": "error",
|
||||
"no-loop-func": "error",
|
||||
"no-multi-str": "error",
|
||||
"no-native-reassign": "error",
|
||||
"no-new-func": "error",
|
||||
"no-new-object": "error",
|
||||
"no-new-wrappers": "error",
|
||||
"no-octal-escape": "error",
|
||||
"no-plusplus": "error",
|
||||
"no-proto": "error",
|
||||
"no-return-assign": "error",
|
||||
"no-script-url": "error",
|
||||
"no-self-compare": "error",
|
||||
"no-sync": "error",
|
||||
"no-throw-literal": "error",
|
||||
"no-undef-init": "error",
|
||||
"no-unneeded-ternary": ["error", {"defaultAssignment": false}],
|
||||
"no-unused-expressions": "error",
|
||||
"no-use-before-define": ["error", {"functions": false}],
|
||||
"no-useless-concat": "error",
|
||||
"no-useless-constructor": "error",
|
||||
"no-var": "error",
|
||||
"object-shorthand": "error",
|
||||
"one-var": ["error", "never"],
|
||||
"prefer-arrow-callback": "error",
|
||||
"prefer-const": [
|
||||
"error",
|
||||
"comma-spacing": [ "error",
|
||||
{
|
||||
"before": false,
|
||||
"after": true
|
||||
}
|
||||
],
|
||||
"complexity": [ 0, 4 ],
|
||||
"curly": 2,
|
||||
"dot-notation": [ "error", { "allowKeywords": true } ],
|
||||
"empty-returns/main": "error",
|
||||
"eol-last": [ "error", "always" ],
|
||||
"eqeqeq": 2,
|
||||
"func-style": [ "off", "expression" ],
|
||||
"guard-for-in": 2,
|
||||
"indent": ["error", 4, {
|
||||
"ArrayExpression": "first",
|
||||
"outerIIFEBody": 0,
|
||||
"ObjectExpression": "first",
|
||||
"SwitchCase": 0,
|
||||
"CallExpression": {"arguments": "first"},
|
||||
"FunctionExpression": {"parameters": "first"},
|
||||
"FunctionDeclaration": {"parameters": "first"}
|
||||
}],
|
||||
"key-spacing": [ "error",
|
||||
{
|
||||
"beforeColon": false,
|
||||
"afterColon": true
|
||||
}
|
||||
],
|
||||
"keyword-spacing": [ "error",
|
||||
{
|
||||
"before": true,
|
||||
"after": true,
|
||||
"overrides": {
|
||||
"return": { "after": true },
|
||||
"throw": { "after": true },
|
||||
"case": { "after": true }
|
||||
}
|
||||
}
|
||||
],
|
||||
"max-depth": [ 0, 4 ],
|
||||
"max-len": [ "error", 100, 2,
|
||||
{
|
||||
"ignoreUrls": true,
|
||||
"ignoreComments": false,
|
||||
"ignoreRegExpLiterals": true,
|
||||
"ignoreStrings": true,
|
||||
"ignoreTemplateLiterals": true
|
||||
}
|
||||
],
|
||||
"max-params": [ 0, 3 ],
|
||||
"max-statements": [ 0, 10 ],
|
||||
"new-cap": [ "error",
|
||||
{
|
||||
"newIsCap": true,
|
||||
"capIsNew": false
|
||||
}
|
||||
],
|
||||
"new-parens": 2,
|
||||
"newline-per-chained-call": 0,
|
||||
"no-alert": 2,
|
||||
"no-array-constructor": "error",
|
||||
"no-bitwise": 2,
|
||||
"no-caller": 2,
|
||||
"no-case-declarations": "error",
|
||||
"no-catch-shadow": 2,
|
||||
"no-console": 0,
|
||||
"no-const-assign": "error",
|
||||
"no-control-regex": 2,
|
||||
"no-debugger": 2,
|
||||
"no-delete-var": 2,
|
||||
"no-div-regex": 2,
|
||||
"no-dupe-class-members": "error",
|
||||
"no-dupe-keys": 2,
|
||||
"no-duplicate-imports": "error",
|
||||
"no-else-return": 2,
|
||||
"no-empty": 2,
|
||||
"no-empty-character-class": 2,
|
||||
"no-eq-null": 2,
|
||||
"no-eval": 2,
|
||||
"no-ex-assign": 2,
|
||||
"no-extra-parens": ["error", "all"],
|
||||
"no-extra-semi": 2,
|
||||
"no-fallthrough": 2,
|
||||
"no-floating-decimal": 2,
|
||||
"no-func-assign": 2,
|
||||
"no-implied-eval": 2,
|
||||
"no-iterator": "error",
|
||||
"no-label-var": 2,
|
||||
"no-labels": 2,
|
||||
"no-loop-func": 2,
|
||||
"no-mixed-requires": [ 0, false ],
|
||||
"no-multi-str": 2,
|
||||
"no-native-reassign": 2,
|
||||
"no-nested-ternary": 0,
|
||||
"no-new-func": "error",
|
||||
"no-new-object": 2,
|
||||
"no-new-wrappers": 2,
|
||||
"no-obj-calls": 2,
|
||||
"no-octal": 2,
|
||||
"no-octal-escape": 2,
|
||||
"no-param-reassign": 0,
|
||||
"no-plusplus": 2,
|
||||
"no-proto": 2,
|
||||
"no-redeclare": 2,
|
||||
"no-regex-spaces": 2,
|
||||
"no-restricted-syntax": 0,
|
||||
"no-return-assign": 2,
|
||||
"no-script-url": 2,
|
||||
"no-self-compare": 2,
|
||||
"no-shadow": 0,
|
||||
"no-sync": 2,
|
||||
"no-ternary": 0,
|
||||
"no-trailing-spaces": 2,
|
||||
"no-undef": "error",
|
||||
"no-undef-init": 2,
|
||||
"no-underscore-dangle": 0,
|
||||
"no-unneeded-ternary": [ "error", { "defaultAssignment": false } ],
|
||||
"no-unreachable": 2,
|
||||
"no-unused-expressions": 2,
|
||||
"no-unused-vars": [ "error",
|
||||
{
|
||||
"vars": "local",
|
||||
"args": "after-used",
|
||||
"varsIgnorePattern": "print_elapsed_time|check_duplicate_ids"
|
||||
}
|
||||
],
|
||||
"no-use-before-define": 2,
|
||||
"no-useless-constructor": "error",
|
||||
// The Zulip codebase complies partially with the "no-useless-escape"
|
||||
// rule; only regex expressions haven't been updated yet.
|
||||
// Updated regex expressions are currently being tested in casper
|
||||
// files and will decide about a potential future enforcement of this rule.
|
||||
"no-useless-escape": 0,
|
||||
"space-unary-ops": 2,
|
||||
"no-whitespace-before-property": 2,
|
||||
"no-with": 2,
|
||||
"one-var": [ "error", "never" ],
|
||||
"padded-blocks": 0,
|
||||
"prefer-const": [ "error",
|
||||
{
|
||||
"destructuring": "any",
|
||||
"ignoreReadBeforeAssign": true
|
||||
}
|
||||
],
|
||||
"radix": "error",
|
||||
"sort-imports": ["error", {"ignoreDeclarationSort": true}],
|
||||
"spaced-comment": ["error", "always", {"markers": ["/"]}],
|
||||
"strict": "error",
|
||||
"unicorn/consistent-function-scoping": "off",
|
||||
"unicorn/explicit-length-check": "off",
|
||||
"unicorn/filename-case": "off",
|
||||
"unicorn/no-await-expression-member": "off",
|
||||
"unicorn/no-nested-ternary": "off",
|
||||
"unicorn/no-null": "off",
|
||||
"unicorn/no-process-exit": "off",
|
||||
"unicorn/no-useless-undefined": "off",
|
||||
"unicorn/number-literal-case": "off",
|
||||
"unicorn/numeric-separators-style": "off",
|
||||
"unicorn/prefer-module": "off",
|
||||
"unicorn/prefer-node-protocol": "off",
|
||||
"unicorn/prefer-spread": "off",
|
||||
"unicorn/prefer-ternary": "off",
|
||||
"unicorn/prevent-abbreviations": "off",
|
||||
"valid-typeof": ["error", {"requireStringLiterals": true}],
|
||||
"yoda": "error"
|
||||
},
|
||||
"overrides": [
|
||||
{
|
||||
"files": ["frontend_tests/node_tests/**", "frontend_tests/zjsunit/**"],
|
||||
"rules": {
|
||||
"no-jquery/no-selector-prop": "off"
|
||||
"quote-props": [ "error", "as-needed",
|
||||
{
|
||||
"keywords": false,
|
||||
"unnecessary": true,
|
||||
"numbers": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"files": ["frontend_tests/puppeteer_lib/**", "frontend_tests/puppeteer_tests/**"],
|
||||
"globals": {
|
||||
"$": false,
|
||||
"zulip_test": false
|
||||
],
|
||||
"quotes": [ 0, "single" ],
|
||||
"radix": 2,
|
||||
"semi": 2,
|
||||
"semi-spacing": [2, {"before": false, "after": true}],
|
||||
"space-before-blocks": 2,
|
||||
"space-before-function-paren": [ "error",
|
||||
{
|
||||
"anonymous": "always",
|
||||
"named": "never",
|
||||
"asyncArrow": "always"
|
||||
}
|
||||
},
|
||||
{
|
||||
"files": ["static/js/**"],
|
||||
"globals": {
|
||||
"StripeCheckout": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"files": ["**/*.ts"],
|
||||
"extends": [
|
||||
"plugin:@typescript-eslint/recommended-requiring-type-checking",
|
||||
"plugin:import/typescript"
|
||||
],
|
||||
"parserOptions": {
|
||||
"project": "tsconfig.json"
|
||||
},
|
||||
"settings": {
|
||||
"import/resolver": {
|
||||
"node": {
|
||||
"extensions": [".ts", ".d.ts", ".js"] // https://github.com/import-js/eslint-plugin-import/issues/2267
|
||||
}
|
||||
}
|
||||
},
|
||||
"globals": {
|
||||
"JQuery": false
|
||||
},
|
||||
"rules": {
|
||||
// Disable base rule to avoid conflict
|
||||
"no-duplicate-imports": "off",
|
||||
"no-unused-vars": "off",
|
||||
"no-useless-constructor": "off",
|
||||
"no-use-before-define": "off",
|
||||
|
||||
"@typescript-eslint/array-type": "error",
|
||||
"@typescript-eslint/consistent-type-assertions": "error",
|
||||
"@typescript-eslint/consistent-type-imports": "error",
|
||||
"@typescript-eslint/explicit-function-return-type": [
|
||||
"error",
|
||||
{"allowExpressions": true}
|
||||
],
|
||||
"@typescript-eslint/member-ordering": "error",
|
||||
"@typescript-eslint/no-duplicate-imports": "off",
|
||||
"@typescript-eslint/no-explicit-any": "off",
|
||||
"@typescript-eslint/no-extraneous-class": "error",
|
||||
"@typescript-eslint/no-non-null-assertion": "off",
|
||||
"@typescript-eslint/no-parameter-properties": "error",
|
||||
"@typescript-eslint/no-unnecessary-qualifier": "error",
|
||||
"@typescript-eslint/no-unused-vars": ["error", {"varsIgnorePattern": "^_"}],
|
||||
"@typescript-eslint/no-unsafe-argument": "off",
|
||||
"@typescript-eslint/no-unsafe-assignment": "off",
|
||||
"@typescript-eslint/no-unsafe-call": "off",
|
||||
"@typescript-eslint/no-unsafe-member-access": "off",
|
||||
"@typescript-eslint/no-unsafe-return": "off",
|
||||
"@typescript-eslint/no-use-before-define": "error",
|
||||
"@typescript-eslint/no-useless-constructor": "error",
|
||||
"@typescript-eslint/prefer-includes": "error",
|
||||
"@typescript-eslint/prefer-string-starts-ends-with": "error",
|
||||
"@typescript-eslint/promise-function-async": "error",
|
||||
"@typescript-eslint/unified-signatures": "error",
|
||||
"no-undef": "error"
|
||||
}
|
||||
},
|
||||
{
|
||||
"files": ["**/*.d.ts"],
|
||||
"rules": {
|
||||
"import/unambiguous": "off"
|
||||
}
|
||||
},
|
||||
{
|
||||
"files": ["frontend_tests/**"],
|
||||
"globals": {
|
||||
"CSS": false,
|
||||
"document": false,
|
||||
"navigator": false,
|
||||
"window": false
|
||||
},
|
||||
"rules": {
|
||||
"formatjs/no-id": "off",
|
||||
"new-cap": "off",
|
||||
"no-sync": "off",
|
||||
"unicorn/prefer-prototype-methods": "off"
|
||||
}
|
||||
},
|
||||
{
|
||||
"files": ["tools/debug-require.js"],
|
||||
"env": {
|
||||
"browser": true,
|
||||
"es2020": false
|
||||
},
|
||||
"rules": {
|
||||
// Don’t require ES features that PhantomJS doesn’t support
|
||||
// TODO: Toggle these settings now that we don't use PhantomJS
|
||||
"no-var": "off",
|
||||
"object-shorthand": "off",
|
||||
"prefer-arrow-callback": "off"
|
||||
}
|
||||
},
|
||||
{
|
||||
"files": ["static/**"],
|
||||
"env": {
|
||||
"browser": true,
|
||||
"node": false
|
||||
},
|
||||
"rules": {
|
||||
"no-console": "error"
|
||||
},
|
||||
"settings": {
|
||||
"import/resolver": "webpack"
|
||||
}
|
||||
},
|
||||
{
|
||||
"files": ["static/shared/**"],
|
||||
"env": {
|
||||
"browser": false,
|
||||
"shared-node-browser": true
|
||||
},
|
||||
"rules": {
|
||||
"import/no-restricted-paths": [
|
||||
"error",
|
||||
{
|
||||
"zones": [
|
||||
{
|
||||
"target": "./static/shared",
|
||||
"from": ".",
|
||||
"except": ["./node_modules", "./static/shared"]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
],
|
||||
"space-in-parens": 2,
|
||||
"space-infix-ops": 2,
|
||||
"spaced-comment": 0,
|
||||
"strict": 0,
|
||||
"template-curly-spacing": "error",
|
||||
"unnecessary-strict": 0,
|
||||
"use-isnan": 2,
|
||||
"valid-typeof": [ "error", { "requireStringLiterals": true } ],
|
||||
"wrap-iife": [ "error", "outside", { "functionPrototypeMethods": false } ],
|
||||
"wrap-regex": 0,
|
||||
"yoda": 2
|
||||
}
|
||||
}
|
||||
|
20
.gitattributes
vendored
20
.gitattributes
vendored
@@ -1,19 +1,4 @@
|
||||
# DIFFS: Noise suppression.
|
||||
#
|
||||
# Suppress noisy generated files in diffs.
|
||||
# (When you actually want to see these diffs, use `git diff -a`.)
|
||||
|
||||
# Large test fixtures:
|
||||
corporate/tests/stripe_fixtures/*.json -diff
|
||||
|
||||
|
||||
# FORMATTING
|
||||
|
||||
# Maintain LF (Unix-style) newlines in text files.
|
||||
* text=auto eol=lf
|
||||
|
||||
# Make sure various media files never get somehow auto-detected as text
|
||||
# and then newline-converted.
|
||||
*.gif binary
|
||||
*.jpg binary
|
||||
*.jpeg binary
|
||||
@@ -26,7 +11,4 @@ corporate/tests/stripe_fixtures/*.json -diff
|
||||
*.otf binary
|
||||
*.tif binary
|
||||
*.ogg binary
|
||||
*.bson binary
|
||||
*.bmp binary
|
||||
*.mp3 binary
|
||||
*.pdf binary
|
||||
yarn.lock binary
|
||||
|
3
.github/FUNDING.yml
vendored
3
.github/FUNDING.yml
vendored
@@ -1,3 +0,0 @@
|
||||
github: zulip
|
||||
patreon: zulip
|
||||
open_collective: zulip
|
7
.github/pull_request_template.md
vendored
7
.github/pull_request_template.md
vendored
@@ -1,11 +1,14 @@
|
||||
<!-- What's this PR for? (Just a link to an issue is fine.) -->
|
||||
|
||||
**Testing plan:** <!-- How have you tested? -->
|
||||
|
||||
**GIFs or screenshots:** <!-- If a UI change. See:
|
||||
**Testing Plan:** <!-- How have you tested? -->
|
||||
|
||||
|
||||
**GIFs or Screenshots:** <!-- If a UI change. See:
|
||||
https://zulip.readthedocs.io/en/latest/tutorials/screenshot-and-gif-software.html
|
||||
-->
|
||||
|
||||
|
||||
<!-- Also be sure to make clear, coherent commits:
|
||||
https://zulip.readthedocs.io/en/latest/contributing/version-control.html
|
||||
-->
|
||||
|
43
.github/workflows/cancel-previous-runs.yml
vendored
43
.github/workflows/cancel-previous-runs.yml
vendored
@@ -1,43 +0,0 @@
|
||||
name: Cancel previous runs
|
||||
on: [push, pull_request]
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
cancel:
|
||||
name: Cancel previous runs
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 3
|
||||
|
||||
# Don't run this job for zulip/zulip pushes since we
|
||||
# want to run those jobs.
|
||||
if: ${{ github.event_name != 'push' || github.event.repository.full_name != 'zulip/zulip' }}
|
||||
|
||||
steps:
|
||||
# We get workflow IDs from GitHub API so we don't have to maintain
|
||||
# a hard-coded list of IDs which need to be updated when a workflow
|
||||
# is added or removed. And, workflow IDs are different for other forks
|
||||
# so this is required.
|
||||
- name: Get workflow IDs.
|
||||
id: workflow_ids
|
||||
continue-on-error: true # Don't fail this job on failure
|
||||
env:
|
||||
# This is in <owner>/<repo> format e.g. zulip/zulip
|
||||
REPOSITORY: ${{ github.repository }}
|
||||
run: |
|
||||
workflow_api_url=https://api.github.com/repos/$REPOSITORY/actions/workflows
|
||||
curl -fL $workflow_api_url -o workflows.json
|
||||
|
||||
script="const {workflows} = require('./workflows'); \
|
||||
const ids = workflows.map(workflow => workflow.id); \
|
||||
console.log(ids.join(','));"
|
||||
ids=$(node -e "$script")
|
||||
echo "::set-output name=ids::$ids"
|
||||
|
||||
- uses: styfle/cancel-workflow-action@0.9.0
|
||||
continue-on-error: true # Don't fail this job on failure
|
||||
with:
|
||||
workflow_id: ${{ steps.workflow_ids.outputs.ids }}
|
||||
access_token: ${{ github.token }}
|
27
.github/workflows/codeql-analysis.yml
vendored
27
.github/workflows/codeql-analysis.yml
vendored
@@ -1,27 +0,0 @@
|
||||
name: "Code scanning"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches-ignore:
|
||||
- dependabot/** # https://github.com/github/codeql-action/pull/435
|
||||
pull_request: {}
|
||||
|
||||
jobs:
|
||||
CodeQL:
|
||||
if: ${{!github.event.repository.private}}
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v1
|
||||
|
||||
# Override language selection by uncommenting this and choosing your languages
|
||||
# with:
|
||||
# languages: go, javascript, csharp, python, cpp, java
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v1
|
269
.github/workflows/production-suite.yml
vendored
269
.github/workflows/production-suite.yml
vendored
@@ -1,269 +0,0 @@
|
||||
name: Zulip production suite
|
||||
|
||||
on:
|
||||
push: {}
|
||||
pull_request:
|
||||
paths:
|
||||
- .github/workflows/production-suite.yml
|
||||
- "**/migrations/**"
|
||||
- babel.config.js
|
||||
- manage.py
|
||||
- postcss.config.js
|
||||
- puppet/**
|
||||
- requirements/**
|
||||
- scripts/**
|
||||
- static/assets/**
|
||||
- static/third/**
|
||||
- tools/**
|
||||
- webpack.config.ts
|
||||
- yarn.lock
|
||||
- zerver/worker/queue_processors.py
|
||||
- zerver/lib/push_notifications.py
|
||||
- zerver/decorator.py
|
||||
- zproject/**
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
production_build:
|
||||
# This job builds a release tarball from the current commit, which
|
||||
# will be used for all of the following install/upgrade tests.
|
||||
name: Debian 10 production build
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
# Docker images are built from 'tools/ci/Dockerfile'; the comments at
|
||||
# the top explain how to build and upload these images.
|
||||
# Debian 10 ships with Python 3.7.3.
|
||||
container: zulip/ci:buster
|
||||
steps:
|
||||
- name: Add required permissions
|
||||
run: |
|
||||
# The checkout actions doesn't clone to ~/zulip or allow
|
||||
# us to use the path option to clone outside the current
|
||||
# /__w/zulip/zulip directory. Since this directory is owned
|
||||
# by root we need to change it's ownership to allow the
|
||||
# github user to clone the code here.
|
||||
# Note: /__w/ is a docker volume mounted to $GITHUB_WORKSPACE
|
||||
# which is /home/runner/work/.
|
||||
sudo chown -R github .
|
||||
|
||||
# This is the GitHub Actions specific cache directory the
|
||||
# the current github user must be able to access for the
|
||||
# cache action to work. It is owned by root currently.
|
||||
sudo chmod -R 0777 /__w/_temp/
|
||||
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Create cache directories
|
||||
run: |
|
||||
dirs=(/srv/zulip-{npm,venv,emoji}-cache)
|
||||
sudo mkdir -p "${dirs[@]}"
|
||||
sudo chown -R github "${dirs[@]}"
|
||||
|
||||
- name: Restore node_modules cache
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: /srv/zulip-npm-cache
|
||||
key: v1-yarn-deps-buster-${{ hashFiles('package.json') }}-${{ hashFiles('yarn.lock') }}
|
||||
restore-keys: v1-yarn-deps-buster
|
||||
|
||||
- name: Restore python cache
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: /srv/zulip-venv-cache
|
||||
key: v1-venv-buster-${{ hashFiles('requirements/dev.txt') }}
|
||||
restore-keys: v1-venv-buster
|
||||
|
||||
- name: Restore emoji cache
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: /srv/zulip-emoji-cache
|
||||
key: v1-emoji-buster-${{ hashFiles('tools/setup/emoji/emoji_map.json') }}-${{ hashFiles('tools/setup/emoji/build_emoji') }}-${{ hashFiles('tools/setup/emoji/emoji_setup_utils.py') }}-${{ hashFiles('tools/setup/emoji/emoji_names.py') }}-${{ hashFiles('package.json') }}
|
||||
restore-keys: v1-emoji-buster
|
||||
|
||||
- name: Build production tarball
|
||||
run: ./tools/ci/production-build
|
||||
|
||||
- name: Upload production build artifacts for install jobs
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: production-tarball
|
||||
path: /tmp/production-build
|
||||
retention-days: 14
|
||||
|
||||
- name: Report status
|
||||
if: failure()
|
||||
env:
|
||||
ZULIP_BOT_KEY: ${{ secrets.ZULIP_BOT_KEY }}
|
||||
run: tools/ci/send-failure-message
|
||||
|
||||
production_install:
|
||||
# This job installs the server release tarball built above on a
|
||||
# range of platforms, and does some basic health checks on the
|
||||
# resulting installer Zulip server.
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
extra_args: [""]
|
||||
include:
|
||||
# Docker images are built from 'tools/ci/Dockerfile'; the comments at
|
||||
# the top explain how to build and upload these images.
|
||||
- docker_image: zulip/ci:focal
|
||||
name: Ubuntu 20.04 production install
|
||||
os: focal
|
||||
|
||||
- docker_image: zulip/ci:jammy
|
||||
name: Ubuntu 22.04 production install
|
||||
os: jammy
|
||||
|
||||
- docker_image: zulip/ci:buster
|
||||
name: Debian 10 production install with custom db name and user
|
||||
os: buster
|
||||
extra_args: --test-custom-db
|
||||
|
||||
- docker_image: zulip/ci:bullseye
|
||||
name: Debian 11 production install
|
||||
os: bullseye
|
||||
|
||||
name: ${{ matrix.name }}
|
||||
container:
|
||||
image: ${{ matrix.docker_image }}
|
||||
options: --init
|
||||
runs-on: ubuntu-latest
|
||||
needs: production_build
|
||||
|
||||
steps:
|
||||
- name: Download built production tarball
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: production-tarball
|
||||
path: /tmp
|
||||
|
||||
- name: Add required permissions and setup
|
||||
run: |
|
||||
# This is the GitHub Actions specific cache directory the
|
||||
# the current github user must be able to access for the
|
||||
# cache action to work. It is owned by root currently.
|
||||
sudo chmod -R 0777 /__w/_temp/
|
||||
|
||||
# Since actions/download-artifact@v2 loses all the permissions
|
||||
# of the tarball uploaded by the upload artifact fix those.
|
||||
chmod +x /tmp/production-upgrade-pg
|
||||
chmod +x /tmp/production-pgroonga
|
||||
chmod +x /tmp/production-install
|
||||
chmod +x /tmp/production-verify
|
||||
chmod +x /tmp/send-failure-message
|
||||
|
||||
- name: Create cache directories
|
||||
run: |
|
||||
dirs=(/srv/zulip-{npm,venv,emoji}-cache)
|
||||
sudo mkdir -p "${dirs[@]}"
|
||||
sudo chown -R github "${dirs[@]}"
|
||||
|
||||
- name: Restore node_modules cache
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: /srv/zulip-npm-cache
|
||||
key: v1-yarn-deps-${{ matrix.os }}-${{ hashFiles('/tmp/package.json') }}-${{ hashFiles('/tmp/yarn.lock') }}
|
||||
restore-keys: v1-yarn-deps-${{ matrix.os }}
|
||||
|
||||
- name: Install production
|
||||
run: |
|
||||
sudo service rabbitmq-server restart
|
||||
sudo /tmp/production-install ${{ matrix.extra-args }}
|
||||
|
||||
- name: Verify install
|
||||
run: sudo /tmp/production-verify ${{ matrix.extra-args }}
|
||||
|
||||
- name: Install pgroonga
|
||||
if: ${{ matrix.os == 'focal' }}
|
||||
run: sudo /tmp/production-pgroonga
|
||||
|
||||
- name: Verify install after installing pgroonga
|
||||
if: ${{ matrix.os == 'focal' }}
|
||||
run: sudo /tmp/production-verify ${{ matrix.extra-args }}
|
||||
|
||||
- name: Upgrade postgresql
|
||||
if: ${{ matrix.os == 'focal' }}
|
||||
run: sudo /tmp/production-upgrade-pg
|
||||
|
||||
- name: Verify install after upgrading postgresql
|
||||
if: ${{ matrix.os == 'focal' }}
|
||||
run: sudo /tmp/production-verify ${{ matrix.extra-args }}
|
||||
|
||||
- name: Report status
|
||||
if: failure()
|
||||
env:
|
||||
ZULIP_BOT_KEY: ${{ secrets.ZULIP_BOT_KEY }}
|
||||
run: /tmp/send-failure-message
|
||||
|
||||
production_upgrade:
|
||||
# The production upgrade job starts with a container with a
|
||||
# previous Zulip release installed, and attempts to upgrade it to
|
||||
# the release tarball built for the current commit being tested.
|
||||
#
|
||||
# This is intended to catch bugs that result in the upgrade
|
||||
# process failing.
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
# Docker images are built from 'tools/ci/Dockerfile'; the comments at
|
||||
# the top explain how to build and upload these images.
|
||||
- docker_image: zulip/ci:buster-3.4
|
||||
name: 3.4 Version Upgrade
|
||||
os: buster
|
||||
|
||||
- docker_image: zulip/ci:bullseye-4.11
|
||||
name: 4.11 Version Upgrade
|
||||
os: bullseye
|
||||
|
||||
name: ${{ matrix.name }}
|
||||
container:
|
||||
image: ${{ matrix.docker_image }}
|
||||
options: --init
|
||||
runs-on: ubuntu-latest
|
||||
needs: production_build
|
||||
|
||||
steps:
|
||||
- name: Download built production tarball
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: production-tarball
|
||||
path: /tmp
|
||||
|
||||
- name: Add required permissions and setup
|
||||
run: |
|
||||
# This is the GitHub Actions specific cache directory the
|
||||
# the current github user must be able to access for the
|
||||
# cache action to work. It is owned by root currently.
|
||||
sudo chmod -R 0777 /__w/_temp/
|
||||
|
||||
# Since actions/download-artifact@v2 loses all the permissions
|
||||
# of the tarball uploaded by the upload artifact fix those.
|
||||
chmod +x /tmp/production-upgrade
|
||||
chmod +x /tmp/production-verify
|
||||
chmod +x /tmp/send-failure-message
|
||||
|
||||
- name: Create cache directories
|
||||
run: |
|
||||
dirs=(/srv/zulip-{npm,venv,emoji}-cache)
|
||||
sudo mkdir -p "${dirs[@]}"
|
||||
sudo chown -R github "${dirs[@]}"
|
||||
|
||||
- name: Upgrade production
|
||||
run: sudo /tmp/production-upgrade
|
||||
|
||||
# TODO: We should be running production-verify here, but it
|
||||
# doesn't pass yet.
|
||||
#
|
||||
# - name: Verify install
|
||||
# run: sudo /tmp/production-verify
|
||||
|
||||
- name: Report status
|
||||
if: failure()
|
||||
env:
|
||||
ZULIP_BOT_KEY: ${{ secrets.ZULIP_BOT_KEY }}
|
||||
run: /tmp/send-failure-message
|
24
.github/workflows/update-oneclick-apps.yml
vendored
24
.github/workflows/update-oneclick-apps.yml
vendored
@@ -1,24 +0,0 @@
|
||||
name: Update one click apps
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
jobs:
|
||||
update-digitalocean-oneclick-app:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Update DigitalOcean one click app
|
||||
env:
|
||||
DIGITALOCEAN_API_KEY: ${{ secrets.ONE_CLICK_ACTION_DIGITALOCEAN_API_KEY }}
|
||||
ZULIP_API_KEY: ${{ secrets.ONE_CLICK_ACTION_ZULIP_BOT_API_KEY }}
|
||||
ZULIP_EMAIL: ${{ secrets.ONE_CLICK_ACTION_ZULIP_BOT_EMAIL }}
|
||||
ZULIP_SITE: https://chat.zulip.org
|
||||
ONE_CLICK_ACTION_STREAM: kandra ops
|
||||
PYTHON_DIGITALOCEAN_REQUEST_TIMEOUT_SEC: 30
|
||||
RELEASE_VERSION: ${{ github.event.release.tag_name }}
|
||||
run: |
|
||||
export PATH="$HOME/.local/bin:$PATH"
|
||||
git clone https://github.com/zulip/marketplace-partners
|
||||
pip3 install python-digitalocean zulip fab-classic
|
||||
echo $PATH
|
||||
python3 tools/oneclickapps/prepare_digital_ocean_one_click_app_release.py
|
229
.github/workflows/zulip-ci.yml
vendored
229
.github/workflows/zulip-ci.yml
vendored
@@ -1,229 +0,0 @@
|
||||
# NOTE: Everything test in this file should be in `tools/test-all`. If there's a
|
||||
# reason not to run it there, it should be there as a comment
|
||||
# explaining why.
|
||||
|
||||
name: Zulip CI
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include_frontend_tests: [false]
|
||||
include:
|
||||
# Base images are built using `tools/ci/Dockerfile.prod.template`.
|
||||
# The comments at the top explain how to build and upload these images.
|
||||
# Debian 10 ships with Python 3.7.3.
|
||||
- docker_image: zulip/ci:buster
|
||||
name: Debian 10 (Python 3.7, backend + frontend)
|
||||
os: buster
|
||||
include_frontend_tests: true
|
||||
# Ubuntu 20.04 ships with Python 3.8.2.
|
||||
- docker_image: zulip/ci:focal
|
||||
name: Ubuntu 20.04 (Python 3.8, backend)
|
||||
os: focal
|
||||
# Debian 11 ships with Python 3.9.2.
|
||||
- docker_image: zulip/ci:bullseye
|
||||
name: Debian 11 (Python 3.9, backend)
|
||||
os: bullseye
|
||||
# Ubuntu 22.04 ships with Python 3.10.4.
|
||||
- docker_image: zulip/ci:jammy
|
||||
name: Ubuntu 22.04 (Python 3.10, backend)
|
||||
os: jammy
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
name: ${{ matrix.name }}
|
||||
container: ${{ matrix.docker_image }}
|
||||
env:
|
||||
# GitHub Actions sets HOME to /github/home which causes
|
||||
# problem later in provision and frontend test that runs
|
||||
# tools/setup/postgresql-init-dev-db because of the .pgpass
|
||||
# location. PostgreSQL (psql) expects .pgpass to be at
|
||||
# /home/github/.pgpass and setting home to `/home/github/`
|
||||
# ensures it written there because we write it to ~/.pgpass.
|
||||
HOME: /home/github/
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Create cache directories
|
||||
run: |
|
||||
dirs=(/srv/zulip-{npm,venv,emoji}-cache)
|
||||
sudo mkdir -p "${dirs[@]}"
|
||||
sudo chown -R github "${dirs[@]}"
|
||||
|
||||
- name: Restore node_modules cache
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: /srv/zulip-npm-cache
|
||||
key: v1-yarn-deps-${{ matrix.os }}-${{ hashFiles('package.json') }}-${{ hashFiles('yarn.lock') }}
|
||||
restore-keys: v1-yarn-deps-${{ matrix.os }}
|
||||
|
||||
- name: Restore python cache
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: /srv/zulip-venv-cache
|
||||
key: v1-venv-${{ matrix.os }}-${{ hashFiles('requirements/dev.txt') }}
|
||||
restore-keys: v1-venv-${{ matrix.os }}
|
||||
|
||||
- name: Restore emoji cache
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: /srv/zulip-emoji-cache
|
||||
key: v1-emoji-${{ matrix.os }}-${{ hashFiles('tools/setup/emoji/emoji_map.json') }}-${{ hashFiles('tools/setup/emoji/build_emoji') }}-${{ hashFiles('tools/setup/emoji/emoji_setup_utils.py') }}-${{ hashFiles('tools/setup/emoji/emoji_names.py') }}-${{ hashFiles('package.json') }}
|
||||
restore-keys: v1-emoji-${{ matrix.os }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
# This is the main setup job for the test suite
|
||||
./tools/ci/setup-backend --skip-dev-db-build
|
||||
|
||||
# Cleaning caches is mostly unnecessary in GitHub Actions, because
|
||||
# most builds don't get to write to the cache.
|
||||
# scripts/lib/clean_unused_caches.py --verbose --threshold 0
|
||||
|
||||
- name: Run tools test
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
./tools/test-tools
|
||||
|
||||
- name: Run Codespell lint
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
./tools/run-codespell
|
||||
|
||||
- name: Run backend lint
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
echo "Test suite is running under $(python --version)."
|
||||
./tools/lint --groups=backend --skip=gitlint,mypy # gitlint disabled because flaky
|
||||
|
||||
- name: Run frontend lint
|
||||
if: ${{ matrix.include_frontend_tests }}
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
./tools/lint --groups=frontend --skip=gitlint # gitlint disabled because flaky
|
||||
|
||||
- name: Run backend tests
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
./tools/test-backend --coverage --include-webhooks --no-cov-cleanup --ban-console-output
|
||||
|
||||
- name: Run mypy
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
# We run mypy after the backend tests so we get output from the
|
||||
# backend tests, which tend to uncover more serious problems, first.
|
||||
./tools/run-mypy --version
|
||||
./tools/run-mypy
|
||||
|
||||
- name: Run miscellaneous tests
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
|
||||
# Currently our compiled requirements files will differ for different python versions
|
||||
# so we will run test-locked-requirements only for Debian 10.
|
||||
# ./tools/test-locked-requirements
|
||||
# ./tools/test-run-dev # https://github.com/zulip/zulip/pull/14233
|
||||
#
|
||||
# This test has been persistently flaky at like 1% frequency, is slow,
|
||||
# and is for a very specific single feature, so we don't run it by default:
|
||||
# ./tools/test-queue-worker-reload
|
||||
|
||||
./tools/test-migrations
|
||||
./tools/setup/optimize-svg --check
|
||||
./tools/setup/generate_integration_bots_avatars.py --check-missing
|
||||
|
||||
# Ban check-database-compatibility.py from transitively
|
||||
# relying on static/generated, because it might not be
|
||||
# up-to-date at that point in upgrade-zulip-stage-2.
|
||||
chmod 000 static/generated
|
||||
./scripts/lib/check-database-compatibility.py
|
||||
chmod 755 static/generated
|
||||
|
||||
- name: Run documentation and api tests
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
# In CI, we only test links we control in test-documentation to avoid flakes
|
||||
./tools/test-documentation --skip-external-links
|
||||
./tools/test-help-documentation --skip-external-links
|
||||
./tools/test-api
|
||||
|
||||
- name: Run node tests
|
||||
if: ${{ matrix.include_frontend_tests }}
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
# Run the node tests first, since they're fast and deterministic
|
||||
./tools/test-js-with-node --coverage --parallel=1
|
||||
|
||||
- name: Check schemas
|
||||
if: ${{ matrix.include_frontend_tests }}
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
# Check that various schemas are consistent. (is fast)
|
||||
./tools/check-schemas
|
||||
|
||||
- name: Check capitalization of strings
|
||||
if: ${{ matrix.include_frontend_tests }}
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
./manage.py makemessages --locale en
|
||||
PYTHONWARNINGS=ignore ./tools/check-capitalization --no-generate
|
||||
PYTHONWARNINGS=ignore ./tools/check-frontend-i18n --no-generate
|
||||
|
||||
- name: Run puppeteer tests
|
||||
if: ${{ matrix.include_frontend_tests }}
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
./tools/test-js-with-puppeteer
|
||||
|
||||
- name: Check for untracked files
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
# This final check looks for untracked files that may have been
|
||||
# created by test-backend or provision.
|
||||
untracked="$(git ls-files --exclude-standard --others)"
|
||||
if [ -n "$untracked" ]; then
|
||||
printf >&2 "Error: untracked files:\n%s\n" "$untracked"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Test locked requirements
|
||||
if: ${{ matrix.os == 'buster' }}
|
||||
run: |
|
||||
. /srv/zulip-py3-venv/bin/activate && \
|
||||
./tools/test-locked-requirements
|
||||
|
||||
- name: Upload coverage reports
|
||||
|
||||
# Only upload coverage when both frontend and backend
|
||||
# tests are run.
|
||||
if: ${{ matrix.include_frontend_tests }}
|
||||
uses: codecov/codecov-action@v2
|
||||
with:
|
||||
files: var/coverage.xml,var/node-coverage/lcov.info
|
||||
|
||||
- name: Store Puppeteer artifacts
|
||||
# Upload these on failure, as well
|
||||
if: ${{ always() && matrix.include_frontend_tests }}
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: puppeteer
|
||||
path: ./var/puppeteer
|
||||
retention-days: 60
|
||||
|
||||
- name: Check development database build
|
||||
if: ${{ matrix.os == 'focal' || matrix.os == 'bullseye' || matrix.os == 'jammy' }}
|
||||
run: ./tools/ci/setup-backend
|
||||
|
||||
- name: Report status
|
||||
if: failure()
|
||||
env:
|
||||
ZULIP_BOT_KEY: ${{ secrets.ZULIP_BOT_KEY }}
|
||||
run: tools/ci/send-failure-message
|
24
.gitignore
vendored
24
.gitignore
vendored
@@ -27,18 +27,12 @@
|
||||
package-lock.json
|
||||
|
||||
/.vagrant
|
||||
/var/*
|
||||
!/var/puppeteer
|
||||
/var/puppeteer/*
|
||||
!/var/puppeteer/test_credentials.d.ts
|
||||
/var
|
||||
|
||||
/.dmypy.json
|
||||
|
||||
# Generated i18n data
|
||||
/locale/en
|
||||
/locale/language_options.json
|
||||
/locale/language_name_map.json
|
||||
/locale/*/mobile.json
|
||||
# Dockerfiles generated for CircleCI
|
||||
/tools/circleci/images
|
||||
|
||||
# Static build
|
||||
*.mo
|
||||
@@ -48,7 +42,6 @@ npm-debug.log
|
||||
/staticfiles.json
|
||||
/webpack-stats-production.json
|
||||
/yarn-error.log
|
||||
zulip-git-version
|
||||
|
||||
# Test / analysis tools
|
||||
.coverage
|
||||
@@ -70,18 +63,13 @@ zulip.kdev4
|
||||
*.kate-swp
|
||||
*.sublime-project
|
||||
*.sublime-workspace
|
||||
.vscode/
|
||||
*.DS_Store
|
||||
# VS Code. Avoid checking in .vscode in general, while still specifying
|
||||
# recommended extensions for working with this repository.
|
||||
/.vscode/**/*
|
||||
!/.vscode/extensions.json
|
||||
# .cache/ is generated by VS Code test runner
|
||||
# .cache/ is generated by VSCode's test runner
|
||||
.cache/
|
||||
.eslintcache
|
||||
|
||||
# Core dump files
|
||||
core
|
||||
|
||||
## Miscellaneous
|
||||
# (Ideally this section is empty.)
|
||||
zthumbor/thumbor_local_settings.py
|
||||
.transifexrc
|
||||
|
4
.gitlint
4
.gitlint
@@ -1,9 +1,9 @@
|
||||
[general]
|
||||
ignore=title-trailing-punctuation, body-min-length, body-is-missing
|
||||
ignore=title-trailing-punctuation, body-min-length, body-is-missing, title-imperative-mood
|
||||
|
||||
extra-path=tools/lib/gitlint-rules.py
|
||||
|
||||
[title-match-regex]
|
||||
[title-match-regex-allow-exception]
|
||||
regex=^(.+:\ )?[A-Z].+\.$
|
||||
|
||||
[title-max-length]
|
||||
|
10
.isort.cfg
Normal file
10
.isort.cfg
Normal file
@@ -0,0 +1,10 @@
|
||||
[settings]
|
||||
line_length = 79
|
||||
multi_line_output = 2
|
||||
balanced_wrapping = true
|
||||
known_third_party = django, ujson, sqlalchemy
|
||||
known_first_party = zerver, zproject, version, confirmation, zilencer, analytics, frontend_tests, scripts, corporate
|
||||
sections = FUTURE, STDLIB, THIRDPARTY, FIRSTPARTY, LOCALFOLDER
|
||||
lines_after_imports = 1
|
||||
# See the comment related to ioloop_logging for why this is skipped.
|
||||
skip = zerver/management/commands/runtornado.py
|
73
.mailmap
73
.mailmap
@@ -1,73 +0,0 @@
|
||||
# This file teaches `git log` and friends the canonical names
|
||||
# and email addresses to use for our contributors.
|
||||
#
|
||||
# For details on the format, see:
|
||||
# https://git.github.io/htmldocs/gitmailmap.html
|
||||
#
|
||||
# Handy commands for examining or adding to this file:
|
||||
#
|
||||
# # shows all names/emails after mapping, sorted:
|
||||
# $ git shortlog -es | sort -k2
|
||||
#
|
||||
# # shows raw names/emails, filtered by mapped name:
|
||||
# $ git log --format='%an %ae' --author=$NAME | uniq -c
|
||||
|
||||
Alex Vandiver <alexmv@zulip.com> <alex@chmrr.net>
|
||||
Alex Vandiver <alexmv@zulip.com> <github@chmrr.net>
|
||||
Allen Rabinovich <allenrabinovich@yahoo.com> <allenr@humbughq.com>
|
||||
Allen Rabinovich <allenrabinovich@yahoo.com> <allenr@zulip.com>
|
||||
Alya Abbott <alya@zulip.com> <2090066+alya@users.noreply.github.com>
|
||||
Aman Agrawal <amanagr@zulip.com> <f2016561@pilani.bits-pilani.ac.in>
|
||||
Anders Kaseorg <anders@zulip.com> <anders@zulipchat.com>
|
||||
Anders Kaseorg <anders@zulip.com> <andersk@mit.edu>
|
||||
Austin Riba <austin@zulip.com> <austin@m51.io>
|
||||
BIKI DAS <bikid475@gmail.com>
|
||||
Brock Whittaker <brock@zulipchat.com> <bjwhitta@asu.edu>
|
||||
Brock Whittaker <brock@zulipchat.com> <brockwhittaker@Brocks-MacBook.local>
|
||||
Brock Whittaker <brock@zulipchat.com> <brock@zulipchat.org>
|
||||
Chris Bobbe <cbobbe@zulip.com> <cbobbe@zulipchat.com>
|
||||
Chris Bobbe <cbobbe@zulip.com> <csbobbe@gmail.com>
|
||||
Eeshan Garg <eeshan@zulip.com> <jerryguitarist@gmail.com>
|
||||
Greg Price <greg@zulip.com> <gnprice@gmail.com>
|
||||
Greg Price <greg@zulip.com> <greg@zulipchat.com>
|
||||
Greg Price <greg@zulip.com> <price@mit.edu>
|
||||
Jai soni <jai_s@me.iitr.ac.in>
|
||||
Jai soni <jai_s@me.iitr.ac.in> <76561593+jai2201@users.noreply.github.com>
|
||||
Jeff Arnold <jbarnold@gmail.com> <jbarnold@humbughq.com>
|
||||
Jeff Arnold <jbarnold@gmail.com> <jbarnold@zulip.com>
|
||||
Jessica McKellar <jesstess@mit.edu> <jesstess@humbughq.com>
|
||||
Jessica McKellar <jesstess@mit.edu> <jesstess@zulip.com>
|
||||
Kevin Mehall <km@kevinmehall.net> <kevin@humbughq.com>
|
||||
Kevin Mehall <km@kevinmehall.net> <kevin@zulip.com>
|
||||
Kevin Scott <kevin.scott.98@gmail.com>
|
||||
Lauryn Menard <lauryn@zulip.com> <lauryn.menard@gmail.com>
|
||||
Mateusz Mandera <mateusz.mandera@zulip.com> <mateusz.mandera@protonmail.com>
|
||||
m-e-l-u-h-a-n <purushottam.tiwari.cd.cse19@itbhu.ac.in>
|
||||
Palash Raghuwanshi <singhpalash0@gmail.com>
|
||||
Parth <mittalparth22@gmail.com>
|
||||
Ray Kraesig <rkraesig@zulip.com> <rkraesig@zulipchat.com>
|
||||
Rishi Gupta <rishig@zulipchat.com> <rishig+git@mit.edu>
|
||||
Rishi Gupta <rishig@zulipchat.com> <rishig@kandralabs.com>
|
||||
Rishi Gupta <rishig@zulipchat.com> <rishig@users.noreply.github.com>
|
||||
Reid Barton <rwbarton@gmail.com> <rwbarton@humbughq.com>
|
||||
Sayam Samal <samal.sayam@gmail.com>
|
||||
Scott Feeney <scott@oceanbase.org> <scott@humbughq.com>
|
||||
Scott Feeney <scott@oceanbase.org> <scott@zulip.com>
|
||||
Shlok Patel <shlokcpatel2001@gmail.com>
|
||||
Steve Howell <showell@zulip.com> <showell30@yahoo.com>
|
||||
Steve Howell <showell@zulip.com> <showell@yahoo.com>
|
||||
Steve Howell <showell@zulip.com> <showell@zulipchat.com>
|
||||
Steve Howell <showell@zulip.com> <steve@humbughq.com>
|
||||
Steve Howell <showell@zulip.com> <steve@zulip.com>
|
||||
strifel <info@strifel.de>
|
||||
Tim Abbott <tabbott@zulip.com> <tabbott@dropbox.com>
|
||||
Tim Abbott <tabbott@zulip.com> <tabbott@humbughq.com>
|
||||
Tim Abbott <tabbott@zulip.com> <tabbott@mit.edu>
|
||||
Tim Abbott <tabbott@zulip.com> <tabbott@zulipchat.com>
|
||||
Vishnu KS <vishnu@zulip.com> <hackerkid@vishnuks.com>
|
||||
Vishnu KS <vishnu@zulip.com> <yo@vishnuks.com>
|
||||
Alya Abbott <alya@zulip.com> <alyaabbott@elance-odesk.com>
|
||||
Sahil Batra <sahil@zulip.com> <sahilbatra839@gmail.com>
|
||||
Yash RE <33805964+YashRE42@users.noreply.github.com> <YashRE42@github.com>
|
||||
Yash RE <33805964+YashRE42@users.noreply.github.com>
|
||||
Yogesh Sirsat <yogeshsirsat56@gmail.com>
|
@@ -1,8 +0,0 @@
|
||||
/corporate/tests/stripe_fixtures
|
||||
/locale
|
||||
/static/third
|
||||
/templates/**/*.md
|
||||
/tools/setup/emoji/emoji_map.json
|
||||
/zerver/tests/fixtures
|
||||
/zerver/webhooks/*/doc.md
|
||||
/zerver/webhooks/*/fixtures
|
@@ -1,15 +0,0 @@
|
||||
{
|
||||
"source_directories": ["."],
|
||||
"taint_models_path": [
|
||||
"stubs/taint",
|
||||
"zulip-py3-venv/lib/pyre_check/taint/"
|
||||
],
|
||||
"search_path": [
|
||||
"stubs/",
|
||||
"zulip-py3-venv/lib/pyre_check/stubs/"
|
||||
],
|
||||
"typeshed": "zulip-py3-venv/lib/pyre_check/typeshed/",
|
||||
"exclude": [
|
||||
"/srv/zulip/zulip-py3-venv/.*"
|
||||
]
|
||||
}
|
@@ -1 +0,0 @@
|
||||
sonar.inclusions=**/*.py,**/*.html
|
67
.stylelintrc
Normal file
67
.stylelintrc
Normal file
@@ -0,0 +1,67 @@
|
||||
{
|
||||
"rules": {
|
||||
# Stylistic rules for CSS.
|
||||
"function-comma-space-after": "always",
|
||||
"function-comma-space-before": "never",
|
||||
"function-max-empty-lines": 0,
|
||||
"function-whitespace-after": "always",
|
||||
|
||||
"value-keyword-case": "lower",
|
||||
"value-list-comma-newline-after": "always-multi-line",
|
||||
"value-list-comma-space-after": "always-single-line",
|
||||
"value-list-comma-space-before": "never",
|
||||
"value-list-max-empty-lines": 0,
|
||||
|
||||
"unit-case": "lower",
|
||||
"property-case": "lower",
|
||||
"color-hex-case": "lower",
|
||||
|
||||
"declaration-bang-space-before": "always",
|
||||
"declaration-colon-newline-after": "always-multi-line",
|
||||
"declaration-colon-space-after": "always-single-line",
|
||||
"declaration-colon-space-before": "never",
|
||||
"declaration-block-semicolon-newline-after": "always",
|
||||
"declaration-block-semicolon-space-before": "never",
|
||||
"declaration-block-trailing-semicolon": "always",
|
||||
|
||||
"block-closing-brace-empty-line-before": "never",
|
||||
"block-closing-brace-newline-after": "always",
|
||||
"block-closing-brace-newline-before": "always",
|
||||
"block-opening-brace-newline-after": "always",
|
||||
"block-opening-brace-space-before": "always",
|
||||
|
||||
"selector-attribute-brackets-space-inside": "never",
|
||||
"selector-attribute-operator-space-after": "never",
|
||||
"selector-attribute-operator-space-before": "never",
|
||||
"selector-combinator-space-after": "always",
|
||||
"selector-combinator-space-before": "always",
|
||||
"selector-descendant-combinator-no-non-space": true,
|
||||
"selector-pseudo-class-parentheses-space-inside": "never",
|
||||
"selector-pseudo-element-case": "lower",
|
||||
"selector-pseudo-element-colon-notation": "double",
|
||||
"selector-type-case": "lower",
|
||||
"selector-list-comma-newline-after": "always",
|
||||
"selector-list-comma-space-before": "never",
|
||||
|
||||
"media-feature-colon-space-after": "always",
|
||||
"media-feature-colon-space-before": "never",
|
||||
"media-feature-name-case": "lower",
|
||||
"media-feature-parentheses-space-inside": "never",
|
||||
"media-feature-range-operator-space-after": "always",
|
||||
"media-feature-range-operator-space-before": "always",
|
||||
"media-query-list-comma-newline-after": "always",
|
||||
"media-query-list-comma-space-before": "never",
|
||||
|
||||
"at-rule-name-case": "lower",
|
||||
"at-rule-name-space-after": "always",
|
||||
"at-rule-semicolon-newline-after": "always",
|
||||
"at-rule-semicolon-space-before": "never",
|
||||
|
||||
"comment-whitespace-inside": "always",
|
||||
"indentation": 4,
|
||||
|
||||
# Limit language features
|
||||
"color-no-hex": true,
|
||||
"color-named": "never",
|
||||
}
|
||||
}
|
67
.travis.yml
Normal file
67
.travis.yml
Normal file
@@ -0,0 +1,67 @@
|
||||
# See https://zulip.readthedocs.io/en/latest/testing/continuous-integration.html for
|
||||
# high-level documentation on our Travis CI setup.
|
||||
dist: trusty
|
||||
group: deprecated-2017Q4
|
||||
install:
|
||||
# Disable sometimes-broken sources.list in Travis base images
|
||||
- sudo rm -vf /etc/apt/sources.list.d/*
|
||||
- sudo apt-get update
|
||||
|
||||
# Disable Travis CI's built-in NVM installation
|
||||
- mispipe "mv ~/.nvm ~/.travis-nvm-disabled" ts
|
||||
|
||||
# Install codecov, the library for the code coverage reporting tool we use
|
||||
# With a retry to minimize impact of transient networking errors.
|
||||
- mispipe "pip install codecov" ts || mispipe "pip install codecov" ts
|
||||
|
||||
# This is the main setup job for the test suite
|
||||
- mispipe "tools/ci/setup-$TEST_SUITE" ts
|
||||
|
||||
# Clean any caches that are not in use to avoid our cache
|
||||
# becoming huge.
|
||||
- mispipe "scripts/lib/clean-unused-caches --verbose --threshold 0" ts
|
||||
|
||||
script:
|
||||
# We unset GEM_PATH here as a hack to work around Travis CI having
|
||||
# broken running their system puppet with Ruby. See
|
||||
# https://travis-ci.org/zulip/zulip/jobs/240120991 for an example traceback.
|
||||
- unset GEM_PATH
|
||||
- mispipe "./tools/ci/$TEST_SUITE" ts
|
||||
cache:
|
||||
yarn: true
|
||||
apt: false
|
||||
directories:
|
||||
- $HOME/zulip-venv-cache
|
||||
- $HOME/zulip-npm-cache
|
||||
- $HOME/zulip-emoji-cache
|
||||
- $HOME/node
|
||||
- $HOME/misc
|
||||
env:
|
||||
global:
|
||||
- BOTO_CONFIG=/nonexistent
|
||||
language: python
|
||||
# Our test suites generally run on Python 3.4, the version in
|
||||
# Ubuntu 14.04 trusty, which is the oldest OS release we support.
|
||||
matrix:
|
||||
include:
|
||||
# Travis will actually run the jobs in the order they're listed here;
|
||||
# that doesn't seem to be documented, but it's what we see empirically.
|
||||
# We only get 4 jobs running at a time, so we try to make the first few
|
||||
# the most likely to break.
|
||||
- python: "3.4"
|
||||
env: TEST_SUITE=production
|
||||
# Other suites moved to CircleCI -- see .circleci/.
|
||||
sudo: required
|
||||
addons:
|
||||
artifacts:
|
||||
paths:
|
||||
# Casper debugging data (screenshots, etc.) is super useful for
|
||||
# debugging test flakes.
|
||||
- $(ls var/casper/* | tr "\n" ":")
|
||||
- $(ls /tmp/zulip-test-event-log/* | tr "\n" ":")
|
||||
postgresql: "9.3"
|
||||
apt:
|
||||
packages:
|
||||
- moreutils
|
||||
after_success:
|
||||
- codecov
|
20
.tx/config
20
.tx/config
@@ -3,31 +3,31 @@ host = https://www.transifex.com
|
||||
lang_map = zh-Hans: zh_Hans, zh-Hant: zh_Hant
|
||||
|
||||
[zulip.djangopo]
|
||||
file_filter = locale/<lang>/LC_MESSAGES/django.po
|
||||
source_file = locale/en/LC_MESSAGES/django.po
|
||||
source_file = static/locale/en/LC_MESSAGES/django.po
|
||||
source_lang = en
|
||||
type = PO
|
||||
file_filter = static/locale/<lang>/LC_MESSAGES/django.po
|
||||
|
||||
[zulip.translationsjson]
|
||||
file_filter = locale/<lang>/translations.json
|
||||
source_file = locale/en/translations.json
|
||||
source_file = static/locale/en/translations.json
|
||||
source_lang = en
|
||||
type = KEYVALUEJSON
|
||||
file_filter = static/locale/<lang>/translations.json
|
||||
|
||||
[zulip.mobile]
|
||||
file_filter = locale/<lang>/mobile.json
|
||||
source_file = locale/en/mobile.json
|
||||
source_file = static/locale/en/mobile.json
|
||||
source_lang = en
|
||||
type = KEYVALUEJSON
|
||||
file_filter = static/locale/<lang>/mobile.json
|
||||
|
||||
[zulip-test.djangopo]
|
||||
file_filter = locale/<lang>/LC_MESSAGES/django.po
|
||||
source_file = locale/en/LC_MESSAGES/django.po
|
||||
source_file = static/locale/en/LC_MESSAGES/django.po
|
||||
source_lang = en
|
||||
type = PO
|
||||
file_filter = static/locale/<lang>/LC_MESSAGES/django.po
|
||||
|
||||
[zulip-test.translationsjson]
|
||||
file_filter = locale/<lang>/translations.json
|
||||
source_file = locale/en/translations.json
|
||||
source_file = static/locale/en/translations.json
|
||||
source_lang = en
|
||||
type = KEYVALUEJSON
|
||||
file_filter = static/locale/<lang>/translations.json
|
||||
|
23
.vscode/extensions.json
vendored
23
.vscode/extensions.json
vendored
@@ -1,23 +0,0 @@
|
||||
{
|
||||
// Recommended VS Code extensions for zulip/zulip.
|
||||
//
|
||||
// VS Code prompts a user to install the recommended extensions
|
||||
// when a workspace is opened for the first time. The user can
|
||||
// also review the list with the 'Extensions: Show Recommended
|
||||
// Extensions' command. See
|
||||
// https://code.visualstudio.com/docs/editor/extension-marketplace#_workspace-recommended-extensions
|
||||
// for more information.
|
||||
//
|
||||
// Extension identifier format: ${publisher}.${name}.
|
||||
// Example: vscode.csharp
|
||||
|
||||
"recommendations": [
|
||||
"42crunch.vscode-openapi",
|
||||
"dbaeumer.vscode-eslint",
|
||||
"esbenp.prettier-vscode",
|
||||
"ms-vscode-remote.vscode-remote-extensionpack"
|
||||
],
|
||||
|
||||
// Extensions recommended by VS Code which are not recommended for users of zulip/zulip.
|
||||
"unwantedRecommendations": []
|
||||
}
|
@@ -14,46 +14,46 @@ This isn't an exhaustive list of things that you can't do. Rather, take it
|
||||
in the spirit in which it's intended --- a guide to make it easier to enrich
|
||||
all of us and the technical communities in which we participate.
|
||||
|
||||
## Expected behavior
|
||||
## Expected Behavior
|
||||
|
||||
The following behaviors are expected and requested of all community members:
|
||||
|
||||
- Participate. In doing so, you contribute to the health and longevity of
|
||||
* Participate. In doing so, you contribute to the health and longevity of
|
||||
the community.
|
||||
- Exercise consideration and respect in your speech and actions.
|
||||
- Attempt collaboration before conflict. Assume good faith.
|
||||
- Refrain from demeaning, discriminatory, or harassing behavior and speech.
|
||||
- Take action or alert community leaders if you notice a dangerous
|
||||
* Exercise consideration and respect in your speech and actions.
|
||||
* Attempt collaboration before conflict. Assume good faith.
|
||||
* Refrain from demeaning, discriminatory, or harassing behavior and speech.
|
||||
* Take action or alert community leaders if you notice a dangerous
|
||||
situation, someone in distress, or violations of this code, even if they
|
||||
seem inconsequential.
|
||||
- Community event venues may be shared with members of the public; be
|
||||
* Community event venues may be shared with members of the public; be
|
||||
respectful to all patrons of these locations.
|
||||
|
||||
## Unacceptable behavior
|
||||
## Unacceptable Behavior
|
||||
|
||||
The following behaviors are considered harassment and are unacceptable
|
||||
within the Zulip community:
|
||||
|
||||
- Jokes or derogatory language that singles out members of any race,
|
||||
* Jokes or derogatory language that singles out members of any race,
|
||||
ethnicity, culture, national origin, color, immigration status, social and
|
||||
economic class, educational level, language proficiency, sex, sexual
|
||||
orientation, gender identity and expression, age, size, family status,
|
||||
political belief, religion, and mental and physical ability.
|
||||
- Violence, threats of violence, or violent language directed against
|
||||
* Violence, threats of violence, or violent language directed against
|
||||
another person.
|
||||
- Disseminating or threatening to disseminate another person's personal
|
||||
* Disseminating or threatening to disseminate another person's personal
|
||||
information.
|
||||
- Personal insults of any sort.
|
||||
- Posting or displaying sexually explicit or violent material.
|
||||
- Inappropriate photography or recording.
|
||||
- Deliberate intimidation, stalking, or following (online or in person).
|
||||
- Unwelcome sexual attention. This includes sexualized comments or jokes,
|
||||
* Personal insults of any sort.
|
||||
* Posting or displaying sexually explicit or violent material.
|
||||
* Inappropriate photography or recording.
|
||||
* Deliberate intimidation, stalking, or following (online or in person).
|
||||
* Unwelcome sexual attention. This includes sexualized comments or jokes,
|
||||
inappropriate touching or groping, and unwelcomed sexual advances.
|
||||
- Sustained disruption of community events, including talks and
|
||||
* Sustained disruption of community events, including talks and
|
||||
presentations.
|
||||
- Advocating for, or encouraging, any of the behaviors above.
|
||||
* Advocating for, or encouraging, any of the behaviors above.
|
||||
|
||||
## Reporting and enforcement
|
||||
## Reporting and Enforcement
|
||||
|
||||
Harassment and other code of conduct violations reduce the value of the
|
||||
community for everyone. If someone makes you or anyone else feel unsafe or
|
||||
@@ -78,7 +78,7 @@ something you can do while a violation is happening, do it. A lot of the
|
||||
harms of harassment and other violations can be mitigated by the victim
|
||||
knowing that the other people present are on their side.
|
||||
|
||||
All reports will be kept confidential. In some cases, we may determine that a
|
||||
All reports will be kept confidential. In some cases we may determine that a
|
||||
public statement will need to be made. In such cases, the identities of all
|
||||
victims and reporters will remain confidential unless those individuals
|
||||
instruct us otherwise.
|
||||
@@ -95,10 +95,11 @@ behavior occurring outside the scope of community activities when such
|
||||
behavior has the potential to adversely affect the safety and well-being of
|
||||
community members.
|
||||
|
||||
## License and attribution
|
||||
## License and Attribution
|
||||
|
||||
This Code of Conduct is adapted from the
|
||||
[Citizen Code of Conduct](http://citizencodeofconduct.org/) and the
|
||||
[Django Code of Conduct](https://www.djangoproject.com/conduct/), and is
|
||||
under a
|
||||
[Creative Commons BY-SA](https://creativecommons.org/licenses/by-sa/4.0/)
|
||||
[Creative Commons BY-SA](http://creativecommons.org/licenses/by-sa/4.0/)
|
||||
license.
|
||||
|
559
CONTRIBUTING.md
559
CONTRIBUTING.md
@@ -5,359 +5,188 @@ Welcome to the Zulip community!
|
||||
## Community
|
||||
|
||||
The
|
||||
[Zulip community server](https://zulip.com/development-community/)
|
||||
[Zulip community server](https://zulip.readthedocs.io/en/latest/contributing/chat-zulip-org.html)
|
||||
is the primary communication forum for the Zulip community. It is a good
|
||||
place to start whether you have a question, are a new contributor, are a new
|
||||
user, or anything else. Please review our
|
||||
[community norms](https://zulip.com/development-community/#community-norms)
|
||||
user, or anything else. Make sure to read the
|
||||
[community norms](https://zulip.readthedocs.io/en/latest/contributing/chat-zulip-org.html#community-norms)
|
||||
before posting. The Zulip community is also governed by a
|
||||
[code of conduct](https://zulip.readthedocs.io/en/latest/code-of-conduct.html).
|
||||
|
||||
You can subscribe to zulip-devel@googlegroups.com for a lower traffic (~1
|
||||
email/month) way to hear about things like mentorship opportunities with Google
|
||||
Code-in, in-person sprints at conferences, and other opportunities to
|
||||
contribute.
|
||||
|
||||
## Ways to contribute
|
||||
|
||||
To make a code or documentation contribution, read our
|
||||
[step-by-step guide](#your-first-codebase-contribution) to getting
|
||||
started with the Zulip codebase. A small sample of the type of work that
|
||||
needs doing:
|
||||
|
||||
- Bug squashing and feature development on our Python/Django
|
||||
* Bug squashing and feature development on our Python/Django
|
||||
[backend](https://github.com/zulip/zulip), web
|
||||
[frontend](https://github.com/zulip/zulip), React Native
|
||||
[mobile app](https://github.com/zulip/zulip-mobile), or Electron
|
||||
[desktop app](https://github.com/zulip/zulip-desktop).
|
||||
- Building out our
|
||||
[desktop app](https://github.com/zulip/zulip-electron).
|
||||
* Building out our
|
||||
[Python API and bots](https://github.com/zulip/python-zulip-api) framework.
|
||||
- [Writing an integration](https://zulip.com/api/integrations-overview).
|
||||
- Improving our [user](https://zulip.com/help/) or
|
||||
* [Writing an integration](https://zulipchat.com/api/integrations-overview).
|
||||
* Improving our [user](https://zulipchat.com/help/) or
|
||||
[developer](https://zulip.readthedocs.io/en/latest/) documentation.
|
||||
- [Reviewing code](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html)
|
||||
* [Reviewing code](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html)
|
||||
and manually testing pull requests.
|
||||
|
||||
**Non-code contributions**: Some of the most valuable ways to contribute
|
||||
don't require touching the codebase at all. For example, you can:
|
||||
don't require touching the codebase at all. We list a few of them below:
|
||||
|
||||
- [Report issues](#reporting-issues), including both feature requests and
|
||||
* [Reporting issues](#reporting-issues), including both feature requests and
|
||||
bug reports.
|
||||
- [Give feedback](#user-feedback) if you are evaluating or using Zulip.
|
||||
- [Sponsor Zulip](https://github.com/sponsors/zulip) through the GitHub sponsors program.
|
||||
- [Translate](https://zulip.readthedocs.io/en/latest/translating/translating.html)
|
||||
Zulip into your language.
|
||||
- [Stay connected](#stay-connected) with Zulip, and [help others
|
||||
find us](#help-others-find-zulip).
|
||||
* [Giving feedback](#user-feedback) if you are evaluating or using Zulip.
|
||||
* [Translating](https://zulip.readthedocs.io/en/latest/translating/translating.html)
|
||||
Zulip.
|
||||
* [Outreach](#zulip-outreach): Star us on GitHub, upvote us
|
||||
on product comparison sites, or write for [the Zulip blog](http://blog.zulip.org/).
|
||||
|
||||
## Your first codebase contribution
|
||||
## Your first (codebase) contribution
|
||||
|
||||
This section has a step by step guide to starting as a Zulip codebase
|
||||
contributor. It's long, but don't worry about doing all the steps perfectly;
|
||||
no one gets it right the first time, and there are a lot of people available
|
||||
to help.
|
||||
|
||||
- First, make an account on the
|
||||
[Zulip community server](https://zulip.com/development-community/),
|
||||
* First, make an account on the
|
||||
[Zulip community server](https://zulip.readthedocs.io/en/latest/contributing/chat-zulip-org.html),
|
||||
paying special attention to the community norms. If you'd like, introduce
|
||||
yourself in
|
||||
[#new members](https://chat.zulip.org/#narrow/stream/95-new-members), using
|
||||
your name as the topic. Bonus: tell us about your first impressions of
|
||||
Zulip, and anything that felt confusing/broken as you started using the
|
||||
product.
|
||||
- Read [What makes a great Zulip contributor](#what-makes-a-great-zulip-contributor).
|
||||
- [Install the development environment](https://zulip.readthedocs.io/en/latest/development/overview.html),
|
||||
* Read [What makes a great Zulip contributor](#what-makes-a-great-zulip-contributor).
|
||||
* [Install the development environment](https://zulip.readthedocs.io/en/latest/development/overview.html),
|
||||
getting help in
|
||||
[#provision help](https://chat.zulip.org/#narrow/stream/21-provision-help)
|
||||
[#development help](https://chat.zulip.org/#narrow/stream/49-development-help)
|
||||
if you run into any troubles.
|
||||
- Familiarize yourself with [using the development environment](https://zulip.readthedocs.io/en/latest/development/using.html).
|
||||
- Go through the [new application feature
|
||||
tutorial](https://zulip.readthedocs.io/en/latest/tutorials/new-feature-tutorial.html) to get familiar with
|
||||
how the Zulip codebase is organized and how to find code in it.
|
||||
- Read the [Zulip guide to
|
||||
Git](https://zulip.readthedocs.io/en/latest/git/index.html) if you
|
||||
are unfamiliar with Git or Zulip's rebase-based Git workflow,
|
||||
getting help in [#git
|
||||
help](https://chat.zulip.org/#narrow/stream/44-git-help) if you run
|
||||
into any troubles. Even Git experts should read the [Zulip-specific
|
||||
Git tools
|
||||
page](https://zulip.readthedocs.io/en/latest/git/zulip-tools.html).
|
||||
* Read the
|
||||
[Zulip guide to Git](https://zulip.readthedocs.io/en/latest/git/index.html)
|
||||
and do the Git tutorial (coming soon) if you are unfamiliar with
|
||||
Git, getting help in
|
||||
[#git help](https://chat.zulip.org/#narrow/stream/44-git-help) if
|
||||
you run into any troubles. Be sure to check out the
|
||||
[extremely useful Zulip-specific tools page](https://zulip.readthedocs.io/en/latest/git/zulip-tools.html).
|
||||
* Sign the
|
||||
[Dropbox Contributor License Agreement](https://opensource.dropbox.com/cla/).
|
||||
|
||||
### Where to look for an issue
|
||||
### Picking an issue
|
||||
|
||||
Now you're ready to pick your first issue! Zulip has several repositories you
|
||||
can check out, depending on your interests. There are hundreds of open issues in
|
||||
the [main Zulip server and web app
|
||||
repository](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22)
|
||||
alone.
|
||||
Now, you're ready to pick your first issue! There are hundreds of open issues
|
||||
in the main codebase alone. This section will help you find an issue to work
|
||||
on.
|
||||
|
||||
You can look through issues tagged with the "help wanted" label, which is used
|
||||
to indicate the issues that are ready for contributions. Some repositories also
|
||||
use the "good first issue" label to tag issues that are especially approachable
|
||||
for new contributors.
|
||||
|
||||
- [Server and web app](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22)
|
||||
- [Mobile apps](https://github.com/zulip/zulip-mobile/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22)
|
||||
- [Desktop app](https://github.com/zulip/zulip-desktop/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22)
|
||||
- [Terminal app](https://github.com/zulip/zulip-terminal/issues?q=is%3Aopen+is%3Aissue+label%3A"help+wanted")
|
||||
- [Python API bindings and bots](https://github.com/zulip/python-zulip-api/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22)
|
||||
|
||||
### Picking an issue to work on
|
||||
|
||||
There's a lot to learn while making your first pull request, so start small!
|
||||
Many first contributions have fewer than 10 lines of changes (not counting
|
||||
changes to tests).
|
||||
|
||||
We recommend the following process for finding an issue to work on:
|
||||
|
||||
1. Read the description of an issue tagged with the "help wanted" label and make
|
||||
sure you understand it.
|
||||
2. If it seems promising, poke around the product
|
||||
(on [chat.zulip.org](https://chat.zulip.org) or in the development
|
||||
environment) until you know how the piece being
|
||||
described fits into the bigger picture. If after some exploration the
|
||||
description seems confusing or ambiguous, post a question on the GitHub
|
||||
issue, as others may benefit from the clarification as well.
|
||||
3. When you find an issue you like, try to get started working on it. See if you
|
||||
can find the part of the code you'll need to modify (`git grep` is your
|
||||
friend!) and get some idea of how you'll approach the problem.
|
||||
4. If you feel lost, that's OK! Go through these steps again with another issue.
|
||||
There's plenty to work on, and the exploration you do will help you learn
|
||||
more about the project.
|
||||
|
||||
Note that you are _not_ claiming an issue while you are iterating through steps
|
||||
1-4. _Before you claim an issue_, you should be confident that you will be able to
|
||||
tackle it effectively.
|
||||
|
||||
If the lists of issues are overwhelming, you can post in
|
||||
[#new members](https://chat.zulip.org/#narrow/stream/95-new-members) with a
|
||||
bit about your background and interests, and we'll help you out. The most
|
||||
important thing to say is whether you're looking for a backend (Python),
|
||||
frontend (JavaScript and TypeScript), mobile (React Native), desktop (Electron),
|
||||
documentation (English) or visual design (JavaScript/TypeScript + CSS) issue, and a
|
||||
bit about your programming experience and available time.
|
||||
|
||||
Additional tips for the [main server and web app
|
||||
repository](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22):
|
||||
|
||||
- We especially recommend browsing recently opened issues, as there are more
|
||||
likely to be easy ones for you to find.
|
||||
- All issues are partitioned into areas like
|
||||
* If you're interested in
|
||||
[mobile](https://github.com/zulip/zulip-mobile/issues?q=is%3Aopen+is%3Aissue),
|
||||
[desktop](https://github.com/zulip/zulip-electron/issues?q=is%3Aopen+is%3Aissue),
|
||||
or
|
||||
[bots](https://github.com/zulip/python-zulip-api/issues?q=is%3Aopen+is%3Aissue)
|
||||
development, check the respective links for open issues, or post in
|
||||
[#mobile](https://chat.zulip.org/#narrow/stream/48-mobile),
|
||||
[#desktop](https://chat.zulip.org/#narrow/stream/16-desktop), or
|
||||
[#integration](https://chat.zulip.org/#narrow/stream/127-integrations).
|
||||
* For the main server and web repository, start by looking through issues
|
||||
with the label
|
||||
[good first issue](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A"good+first+issue").
|
||||
These are smaller projects particularly suitable for a first contribution.
|
||||
* We also partition all of our issues in the main repo into areas like
|
||||
admin, compose, emoji, hotkeys, i18n, onboarding, search, etc. Look
|
||||
through our [list of labels](https://github.com/zulip/zulip/labels), and
|
||||
click on some of the `area:` labels to see all the issues related to your
|
||||
areas of interest.
|
||||
- Avoid issues with the "difficult" label unless you
|
||||
understand why it is difficult and are highly confident you can resolve the
|
||||
issue correctly and completely.
|
||||
* If the lists of issues are overwhelming, post in
|
||||
[#new members](https://chat.zulip.org/#narrow/stream/95-new-members) with a
|
||||
bit about your background and interests, and we'll help you out. The most
|
||||
important thing to say is whether you're looking for a backend (Python),
|
||||
frontend (JavaScript), mobile (React Native), desktop (Electron),
|
||||
documentation (English) or visual design (JavaScript + CSS) issue, and a
|
||||
bit about your programming experience and available time.
|
||||
|
||||
### Claiming an issue
|
||||
We also welcome suggestions of features that you feel would be valuable or
|
||||
changes that you feel would make Zulip a better open source project. If you
|
||||
have a new feature you'd like to add, we recommend you start by posting in
|
||||
[#new members](https://chat.zulip.org/#narrow/stream/95-new-members) with the
|
||||
feature idea and the problem that you're hoping to solve.
|
||||
|
||||
#### In the main server and web app repository
|
||||
|
||||
After making sure the issue is tagged with a [help
|
||||
wanted](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22)
|
||||
label, post a comment with `@zulipbot claim` to the issue thread.
|
||||
[Zulipbot](https://github.com/zulip/zulipbot) is a GitHub workflow bot; it will
|
||||
assign you to the issue and label the issue as "in progress".
|
||||
|
||||
New contributors can only claim one issue until their first pull request is
|
||||
merged. This is to encourage folks to finish ongoing work before starting
|
||||
something new. If you would like to pick up a new issue while waiting for review
|
||||
on an almost-ready pull request, you can post a comment to this effect on the
|
||||
issue you're interested in.
|
||||
|
||||
#### In other Zulip repositories
|
||||
|
||||
There is no bot for other repositories, so you can simply post a comment saying
|
||||
that you'd like to work on the issue.
|
||||
|
||||
Please follow the same guidelines as described above: find an issue labeled
|
||||
"help wanted", and only pick up one issue at a time to start with.
|
||||
Other notes:
|
||||
* For a first pull request, it's better to aim for a smaller contribution
|
||||
than a bigger one. Many first contributions have fewer than 10 lines of
|
||||
changes (not counting changes to tests).
|
||||
* The full list of issues looking for a contributor can be found with the
|
||||
[good first issue](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22)
|
||||
and
|
||||
[help wanted](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22)
|
||||
labels.
|
||||
* For most new contributors, there's a lot to learn while making your first
|
||||
pull request. It's OK if it takes you a while; that's normal! You'll be
|
||||
able to work a lot faster as you build experience.
|
||||
|
||||
### Working on an issue
|
||||
|
||||
You're encouraged to ask questions on how to best implement or debug your
|
||||
changes -- the Zulip maintainers are excited to answer questions to help you
|
||||
stay unblocked and working efficiently. You can ask questions in the [Zulip
|
||||
development community](https://zulip.com/development-community/), or on the
|
||||
GitHub issue or pull request.
|
||||
To work on an issue, claim it by adding a comment with `@zulipbot claim` to
|
||||
the issue thread. [Zulipbot](https://github.com/zulip/zulipbot) is a GitHub
|
||||
workflow bot; it will assign you to the issue and label the issue as "in
|
||||
progress". Some additional notes:
|
||||
|
||||
To get early feedback on any UI changes, we encourage you to post screenshots of
|
||||
your work in the [#design
|
||||
stream](https://chat.zulip.org/#narrow/stream/101-design) in the [Zulip
|
||||
development community](https://zulip.com/development-community/)
|
||||
* You can only claim issues with the
|
||||
[good first issue](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22)
|
||||
or
|
||||
[help wanted](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22)
|
||||
labels. Zulipbot will give you an error if you try to claim an issue
|
||||
without one of those labels.
|
||||
* You're encouraged to ask questions on how to best implement or debug your
|
||||
changes -- the Zulip maintainers are excited to answer questions to help
|
||||
you stay unblocked and working efficiently. You can ask questions on
|
||||
chat.zulip.org, or on the GitHub issue or pull request.
|
||||
* We encourage early pull requests for work in progress. Prefix the title of
|
||||
work in progress pull requests with `[WIP]`, and remove the prefix when
|
||||
you think it might be mergeable and want it to be reviewed.
|
||||
* After updating a PR, add a comment to the GitHub thread mentioning that it
|
||||
is ready for another review. GitHub only notifies maintainers of the
|
||||
changes when you post a comment, so if you don't, your PR will likely be
|
||||
neglected by accident!
|
||||
|
||||
For more advice, see [What makes a great Zulip
|
||||
contributor?](https://zulip.readthedocs.io/en/latest/overview/contributing.html#what-makes-a-great-zulip-contributor)
|
||||
below.
|
||||
### And beyond
|
||||
|
||||
### Submitting a pull request
|
||||
|
||||
When you believe your code is ready, follow the [guide on how to review
|
||||
code](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html#how-to-review-code)
|
||||
to review your own work. You can often find things you missed by taking a step
|
||||
back to look over your work before asking others to do so. Catching mistakes
|
||||
yourself will help your PRs be merged faster, and folks will appreciate the
|
||||
quality and professionalism of your work.
|
||||
|
||||
Then, submit your changes. Carefully reading our [Git guide][git-guide], and in
|
||||
particular the section on [making a pull request][git-guide-make-pr],
|
||||
will help avoid many common mistakes.
|
||||
|
||||
Once you are satisfied with the quality of your PR, follow the
|
||||
[guidelines on asking for a code
|
||||
review](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html#asking-for-a-code-review)
|
||||
to request a review. If you are not sure what's best, simply post a
|
||||
comment on the main GitHub thread for your PR clearly indicating that
|
||||
it is ready for review, and the project maintainers will take a look
|
||||
and follow up with next steps.
|
||||
|
||||
It's OK if your first issue takes you a while; that's normal! You'll be
|
||||
able to work a lot faster as you build experience.
|
||||
|
||||
If it helps your workflow, you can submit a work-in-progress pull
|
||||
request before your work is ready for review. Simply prefix the title
|
||||
of work in progress pull requests with `[WIP]`, and then remove the
|
||||
prefix when you think it's time for someone else to review your work.
|
||||
|
||||
[git-guide]: https://zulip.readthedocs.io/en/latest/git/
|
||||
[git-guide-make-pr]: https://zulip.readthedocs.io/en/latest/git/pull-requests.html
|
||||
|
||||
### Stages of a pull request
|
||||
|
||||
Your pull request will likely go through several stages of review.
|
||||
|
||||
1. If your PR makes user-facing changes, the UI and user experience may be
|
||||
reviewed early on, without reference to the code. You will get feedback on
|
||||
any user-facing bugs in the implementation. To minimize the number of review
|
||||
round-trips, make sure to [thoroughly
|
||||
test](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html#manual-testing)
|
||||
your own PR prior to asking for review.
|
||||
2. There may be choices made in the implementation that the reviewer
|
||||
will ask you to revisit. This process will go more smoothly if you
|
||||
specifically call attention to the decisions you made while
|
||||
drafting the PR and any points about which you are uncertain. The
|
||||
PR description and comments on your own PR are good ways to do this.
|
||||
3. Oftentimes, seeing an initial implementation will make it clear that the
|
||||
product design for a feature needs to be revised, or that additional changes
|
||||
are needed. The reviewer may therefore ask you to amend or change the
|
||||
implementation. Some changes may be blockers for getting the PR merged, while
|
||||
others may be improvements that can happen afterwards. Feel free to ask if
|
||||
it's unclear which type of feedback you're getting. (Follow-ups can be a
|
||||
great next issue to work on!)
|
||||
4. In addition to any UI/user experience review, all PRs will go through one or
|
||||
more rounds of code review. Your code may initially be [reviewed by other
|
||||
contributors](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html).
|
||||
This helps us make good use of project maintainers' time, and helps you make
|
||||
progress on the PR by getting more frequent feedback. A project maintainer
|
||||
may leave a comment asking someone with expertise in the area you're working
|
||||
on to review your work.
|
||||
5. Final code review and integration for server and webapp PRs is generally done
|
||||
by `@timabbott`.
|
||||
|
||||
#### How to help move the review process forward
|
||||
|
||||
The key to keeping your review moving through the review process is to:
|
||||
|
||||
- Address _all_ the feedback to the best of your ability.
|
||||
- Make it clear when the requested changes have been made
|
||||
and you believe it's time for another look.
|
||||
- Make it as easy as possible to review the changes you made.
|
||||
|
||||
In order to do this, when you believe you have addressed the previous round of
|
||||
feedback on your PR as best you can, post an comment asking reviewers to take
|
||||
another look. Your comment should make it easy to understand what has been done
|
||||
and what remains by:
|
||||
|
||||
- Summarizing the changes made since the last review you received.
|
||||
- Highlighting remaining questions or decisions, with links to any relevant
|
||||
chat.zulip.org threads.
|
||||
- Providing updated screenshots and information on manual testing if
|
||||
appropriate.
|
||||
|
||||
The easier it is to review your work, the more likely you are to receive quick
|
||||
feedback.
|
||||
|
||||
### Beyond the first issue
|
||||
|
||||
To find a second issue to work on, we recommend looking through issues with the same
|
||||
A great place to look for a second issue is to look for issues with the same
|
||||
`area:` label as the last issue you resolved. You'll be able to reuse the
|
||||
work you did learning how that part of the codebase works. Also, the path to
|
||||
becoming a core developer often involves taking ownership of one of these area
|
||||
labels.
|
||||
|
||||
### Common questions
|
||||
|
||||
- **What if somebody is already working on the issue I want do claim?** There
|
||||
are lots of issue to work on! If somebody else is actively working on the
|
||||
issue, you can find a different one, or help with
|
||||
reviewing their work.
|
||||
- **What if somebody else claims an issue while I'm figuring out whether or not to
|
||||
work on it?** No worries! You can contribute by providing feedback on
|
||||
their pull request. If you've made good progress in understanding part of the
|
||||
codebase, you can also find another "help wanted" issue in the same area to
|
||||
work on.
|
||||
- **What if there is already a pull request for the issue I want to work on?**
|
||||
Start by reviewing the existing work. If you agree with the approach, you can
|
||||
use the existing pull request (PR) as a starting point for your contribution. If
|
||||
you think a different approach is needed, you can post a new PR, with a comment that clearly
|
||||
explains _why_ you decided to start from scratch.
|
||||
- **Can I come up with my own feature idea and work on it?** We welcome
|
||||
suggestions of features or other improvements that you feel would be valuable. If you
|
||||
have a new feature you'd like to add, you can start a conversation [in our
|
||||
development community](https://zulip.com/development-community/#where-do-i-send-my-message)
|
||||
explaining the feature idea and the problem that you're hoping to solve.
|
||||
- **I think my PR is done, but it hasn't been merged yet. What's going on?**
|
||||
1. **Double-check that you have addressed all the feedback**, including any comments
|
||||
on [Git commit
|
||||
discipline](https://zulip.readthedocs.io/en/latest/contributing/version-control.html#commit-discipline).
|
||||
2. If all the feedback has been addressed, did you [leave a
|
||||
comment](https://zulip.readthedocs.io/en/latest/overview/contributing.html#how-to-help-move-the-review-process-forward)
|
||||
explaining that you have done so and **requesting another review**? If not,
|
||||
it may not be clear to project maintainers or reviewers that your PR is
|
||||
ready for another look.
|
||||
3. There may be a pause between initial rounds of review for your PR and final
|
||||
review by project maintainers. This is normal, and we encourage you to **work
|
||||
on other issues** while you wait.
|
||||
4. If you think the PR is ready and haven't seen any updates for a couple
|
||||
of weeks, it can be helpful to **leave another comment**. Summarize the
|
||||
overall state of the review process and your work, and indicate that you
|
||||
are waiting for a review.
|
||||
5. Finally, **Zulip project maintainers are people too**! They may be busy
|
||||
with other work, and sometimes they might even take a vacation. ;) It can
|
||||
occasionally take a few weeks for a PR in the final stages of the review
|
||||
process to be merged.
|
||||
|
||||
## What makes a great Zulip contributor?
|
||||
|
||||
Zulip has a lot of experience working with new contributors. In our
|
||||
experience, these are the best predictors of success:
|
||||
Zulip runs a lot of [internship programs](#internship-programs), so we have
|
||||
a lot of experience with new contributors. In our experience, these are the
|
||||
best predictors of success:
|
||||
|
||||
- Posting good questions. It's very hard to answer a general question like, "How
|
||||
do I do this issue?" When asking for help, explain
|
||||
your current understanding, including what you've done or tried so far and where
|
||||
you got stuck. Post tracebacks or other error messages if appropriate. For
|
||||
more information, check out the ["Getting help" section of our community
|
||||
guidelines](https://zulip.com/development-community/#getting-help) and
|
||||
[this essay][good-questions-blog] for some good advice.
|
||||
- Learning and practicing
|
||||
* Posting good questions. This generally means explaining your current
|
||||
understanding, saying what you've done or tried so far, and including
|
||||
tracebacks or other error messages if appropriate.
|
||||
* Learning and practicing
|
||||
[Git commit discipline](https://zulip.readthedocs.io/en/latest/contributing/version-control.html#commit-discipline).
|
||||
- Submitting carefully tested code. See our [detailed guide on how to review
|
||||
code](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html#how-to-review-code)
|
||||
(yours or someone else's).
|
||||
- Posting
|
||||
* Submitting carefully tested code. This generally means checking your work
|
||||
through a combination of automated tests and manually clicking around the
|
||||
UI trying to find bugs in your work. See
|
||||
[things to look for](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html#things-to-look-for)
|
||||
for additional ideas.
|
||||
* Posting
|
||||
[screenshots or GIFs](https://zulip.readthedocs.io/en/latest/tutorials/screenshot-and-gif-software.html)
|
||||
for frontend changes.
|
||||
- Clearly describing what you have implemented and why. For example, if your
|
||||
implementation differs from the issue description in some way or is a partial
|
||||
step towards the requirements described in the issue, be sure to call
|
||||
out those differences.
|
||||
- Being responsive to feedback on pull requests. This means incorporating or
|
||||
* Being responsive to feedback on pull requests. This means incorporating or
|
||||
responding to all suggested changes, and leaving a note if you won't be
|
||||
able to address things within a few days.
|
||||
- Being helpful and friendly on the [Zulip community
|
||||
server](https://zulip.com/development-community/).
|
||||
* Being helpful and friendly on chat.zulip.org.
|
||||
|
||||
[good-questions-blog]: https://jvns.ca/blog/good-questions/
|
||||
|
||||
These are also the main criteria we use to select candidates for all
|
||||
of our outreach programs.
|
||||
These are also the main criteria we use to select interns for all of our
|
||||
internship programs.
|
||||
|
||||
## Reporting issues
|
||||
|
||||
@@ -369,7 +198,7 @@ is, the best place to post issues is
|
||||
[#issues](https://chat.zulip.org/#narrow/stream/9-issues) (or
|
||||
[#mobile](https://chat.zulip.org/#narrow/stream/48-mobile) or
|
||||
[#desktop](https://chat.zulip.org/#narrow/stream/16-desktop)) on the
|
||||
[Zulip community server](https://zulip.com/development-community/).
|
||||
[Zulip community server](https://zulip.readthedocs.io/en/latest/contributing/chat-zulip-org.html).
|
||||
This allows us to interactively figure out what is going on, let you know if
|
||||
a similar issue has already been opened, and collect any other information
|
||||
we need. Choose a 2-4 word topic that describes the issue, explain the issue
|
||||
@@ -378,9 +207,8 @@ and how to reproduce it if known, your browser/OS if relevant, and a
|
||||
if appropriate.
|
||||
|
||||
**Reporting security issues**. Please do not report security issues
|
||||
publicly, including on public streams on chat.zulip.org. You can
|
||||
email [security@zulip.com](mailto:security@zulip.com). We create a CVE for every
|
||||
security issue in our released software.
|
||||
publicly, including on public streams on chat.zulip.org. You can email
|
||||
zulip-security@googlegroups.com. We create a CVE for every security issue.
|
||||
|
||||
## User feedback
|
||||
|
||||
@@ -390,114 +218,125 @@ hear about your experience with the product. If you're not sure what to
|
||||
write, here are some questions we're always very curious to know the answer
|
||||
to:
|
||||
|
||||
- Evaluation: What is the process by which your organization chose or will
|
||||
* Evaluation: What is the process by which your organization chose or will
|
||||
choose a group chat product?
|
||||
- Pros and cons: What are the pros and cons of Zulip for your organization,
|
||||
* Pros and cons: What are the pros and cons of Zulip for your organization,
|
||||
and the pros and cons of other products you are evaluating?
|
||||
- Features: What are the features that are most important for your
|
||||
organization? In the best-case scenario, what would your chat solution do
|
||||
* Features: What are the features that are most important for your
|
||||
organization? In the best case scenario, what would your chat solution do
|
||||
for you?
|
||||
- Onboarding: If you remember it, what was your impression during your first
|
||||
* Onboarding: If you remember it, what was your impression during your first
|
||||
few minutes of using Zulip? What did you notice, and how did you feel? Was
|
||||
there anything that stood out to you as confusing, or broken, or great?
|
||||
- Organization: What does your organization do? How big is the organization?
|
||||
* Organization: What does your organization do? How big is the organization?
|
||||
A link to your organization's website?
|
||||
|
||||
You can contact us in the [#feedback stream of the Zulip development
|
||||
community](https://chat.zulip.org/#narrow/stream/137-feedback) or
|
||||
by emailing [support@zulip.com](mailto:support@zulip.com).
|
||||
## Internship programs
|
||||
|
||||
## Outreach programs
|
||||
|
||||
Zulip participates in [Google Summer of Code
|
||||
(GSoC)](https://developers.google.com/open-source/gsoc/) every year.
|
||||
In the past, we've also participated in
|
||||
[Outreachy](https://www.outreachy.org/), [Google
|
||||
Code-In](https://developers.google.com/open-source/gci/), and hosted
|
||||
summer interns from Harvard, MIT, and Stanford.
|
||||
Zulip runs internship programs with
|
||||
[Outreachy](https://www.outreachy.org/),
|
||||
[Google Summer of Code (GSoC)](https://developers.google.com/open-source/gsoc/)
|
||||
[1], and the
|
||||
[MIT Externship program](https://alum.mit.edu/students/NetworkwithAlumni/ExternshipProgram),
|
||||
and has in the past taken summer interns from Harvard, MIT, and
|
||||
Stanford.
|
||||
|
||||
While each third-party program has its own rules and requirements, the
|
||||
Zulip community's approaches all of these programs with these ideas in
|
||||
mind:
|
||||
|
||||
- We try to make the application process as valuable for the applicant as
|
||||
possible. Expect high-quality code reviews, a supportive community, and
|
||||
* We try to make the application process as valuable for the applicant as
|
||||
possible. Expect high quality code reviews, a supportive community, and
|
||||
publicly viewable patches you can link to from your resume, regardless of
|
||||
whether you are selected.
|
||||
- To apply, you'll have to submit at least one pull request to a Zulip
|
||||
repository. Most students accepted to one of our programs have
|
||||
* To apply, you'll have to submit at least one pull request to a Zulip
|
||||
repository. Most students accepted to one of our programs have
|
||||
several merged pull requests (including at least one larger PR) by
|
||||
the time of the application deadline.
|
||||
- The main criteria we use is quality of your best contributions, and
|
||||
* The main criteria we use is quality of your best contributions, and
|
||||
the bullets listed at
|
||||
[What makes a great Zulip contributor](#what-makes-a-great-zulip-contributor).
|
||||
Because we focus on evaluating your best work, it doesn't hurt your
|
||||
application to makes mistakes in your first few PRs as long as your
|
||||
work improves.
|
||||
|
||||
Most of our outreach program participants end up sticking around the
|
||||
project long-term, and many have become core team members, maintaining
|
||||
important parts of the project. We hope you apply!
|
||||
Zulip also participates in
|
||||
[Google Code-In](https://developers.google.com/open-source/gci/). Our
|
||||
selection criteria for Finalists and Grand Prize Winners is the same as our
|
||||
selection criteria for interns above.
|
||||
|
||||
Most of our interns end up sticking around the project long-term, and many
|
||||
quickly become core team members. We hope you apply!
|
||||
|
||||
### Google Summer of Code
|
||||
|
||||
The largest outreach program Zulip participates in is GSoC (14
|
||||
students in 2017; 11 in 2018; 17 in 2019; 18 in 2020; 18 in 2021). While we
|
||||
don't control how
|
||||
many slots Google allocates to Zulip, we hope to mentor a similar
|
||||
number of students in future summers. Check out our [blog
|
||||
post](https://blog.zulip.com/2021/09/30/google-summer-of-code-2021/) to learn
|
||||
about the GSoC 2021 experience and our participants' accomplishments.
|
||||
GSoC is by far the largest of our internship programs (we had 14 GSoC
|
||||
students in summer 2017). While we don't control how many slots
|
||||
Google allocates to Zulip, we hope to mentor a similar number of
|
||||
students in 2018.
|
||||
|
||||
If you're reading this well before the application deadline and want
|
||||
to make your application strong, we recommend getting involved in the
|
||||
community and fixing issues in Zulip now. Having good contributions
|
||||
and building a reputation for doing good work is the best way to have
|
||||
a strong application.
|
||||
|
||||
Our [GSoC program page][gsoc-guide] has lots more details on how
|
||||
Zulip does GSoC, as well as project ideas. Note, however, that the project idea
|
||||
and building a reputation for doing good work is best way to have a
|
||||
strong application. About half of Zulip's GSoC students for Summer
|
||||
2017 had made significant contributions to the project by February
|
||||
2017, and about half had not. Our
|
||||
[GSoC project ideas page][gsoc-guide] has lots more details on how
|
||||
Zulip does GSoC, as well as project ideas (though the project idea
|
||||
list is maintained only during the GSoC application period, so if
|
||||
you're looking at some other time of year, the project list is likely
|
||||
out-of-date.
|
||||
out-of-date).
|
||||
|
||||
In some years, we have also run a Zulip Summer of Code (ZSoC)
|
||||
program for students who we wanted to accept into GSoC but did not have an
|
||||
official slot for. Student expectations are the
|
||||
same as with GSoC, and ZSoC has no separate application process; your
|
||||
GSoC application is your ZSoC application. If we'd like to select you
|
||||
We also have in some past years run a Zulip Summer of Code (ZSoC)
|
||||
program for students who we didn't have enough slots to accept for
|
||||
GSoC but were able to find funding for. Student expectations are the
|
||||
same as with GSoC, and it has no separate application process; your
|
||||
GSoC application is your ZSoC application. If we'd like to select you
|
||||
for ZSoC, we'll contact you when the GSoC results are announced.
|
||||
|
||||
[gsoc-guide]: https://zulip.readthedocs.io/en/latest/contributing/gsoc.html
|
||||
[gsoc-guide]: https://zulip.readthedocs.io/en/latest/overview/gsoc-ideas.html
|
||||
[gsoc-faq]: https://developers.google.com/open-source/gsoc/faq
|
||||
|
||||
## Stay connected
|
||||
[1] Formally, [GSoC isn't an internship][gsoc-faq], but it is similar
|
||||
enough that we're treating it as such for the purposes of this
|
||||
documentation.
|
||||
|
||||
Even if you are not logging into the development community on a regular basis,
|
||||
you can still stay connected with the project.
|
||||
## Zulip Outreach
|
||||
|
||||
- Follow us [on Twitter](https://twitter.com/zulip).
|
||||
- Subscribe to [our blog](https://blog.zulip.org/).
|
||||
- Join or follow the project [on LinkedIn](https://www.linkedin.com/company/zulip-project/).
|
||||
|
||||
## Help others find Zulip
|
||||
|
||||
Here are some ways you can help others find Zulip:
|
||||
|
||||
- Star us on GitHub. There are four main repositories:
|
||||
**Upvoting Zulip**. Upvotes and reviews make a big difference in the public
|
||||
perception of projects like Zulip. We've collected a few sites below
|
||||
where we know Zulip has been discussed. Doing everything in the following
|
||||
list typically takes about 15 minutes.
|
||||
* Star us on GitHub. There are four main repositories:
|
||||
[server/web](https://github.com/zulip/zulip),
|
||||
[mobile](https://github.com/zulip/zulip-mobile),
|
||||
[desktop](https://github.com/zulip/zulip-desktop), and
|
||||
[desktop](https://github.com/zulip/zulip-electron), and
|
||||
[Python API](https://github.com/zulip/python-zulip-api).
|
||||
* [Follow us](https://twitter.com/zulip) on Twitter.
|
||||
|
||||
- "Like" and retweet [our tweets](https://twitter.com/zulip).
|
||||
For both of the following, you'll need to make an account on the site if you
|
||||
don't already have one.
|
||||
|
||||
- Upvote and post feedback on Zulip on comparison websites. A couple specific
|
||||
ones to highlight:
|
||||
* [Like Zulip](https://alternativeto.net/software/zulip-chat-server/) on
|
||||
AlternativeTo. We recommend upvoting a couple of other products you like
|
||||
as well, both to give back to their community, and since single-upvote
|
||||
accounts are generally given less weight. You can also
|
||||
[upvote Zulip](https://alternativeto.net/software/slack/) on their page
|
||||
for Slack.
|
||||
* [Add Zulip to your stack](https://stackshare.io/zulip) on StackShare, star
|
||||
it, and upvote the reasons why people like Zulip that you find most
|
||||
compelling. Again, we recommend adding a few other products that you like
|
||||
as well.
|
||||
|
||||
- [AlternativeTo](https://alternativeto.net/software/zulip-chat-server/). You can also
|
||||
[upvote Zulip](https://alternativeto.net/software/slack/) on their page
|
||||
for Slack.
|
||||
- [Add Zulip to your stack](https://stackshare.io/zulip) on StackShare, star
|
||||
it, and upvote the reasons why people like Zulip that you find most
|
||||
compelling.
|
||||
We have a doc with more detailed instructions and a few other sites, if you
|
||||
have been using Zulip for a while and want to contribute more.
|
||||
|
||||
**Blog posts**. Writing a blog post about your experiences with Zulip, or
|
||||
about a technical aspect of Zulip can be a great way to spread the word
|
||||
about Zulip.
|
||||
|
||||
We also occasionally [publish](http://blog.zulip.org/) longer form
|
||||
articles related to Zulip. Our posts typically get tens of thousands
|
||||
of views, and we always have good ideas for blog posts that we can
|
||||
outline but don't have time to write. If you are an experienced writer
|
||||
or copyeditor, send us a portfolio; we'd love to talk!
|
||||
|
17
Dockerfile-dev
Normal file
17
Dockerfile-dev
Normal file
@@ -0,0 +1,17 @@
|
||||
FROM ubuntu:trusty
|
||||
|
||||
EXPOSE 9991
|
||||
|
||||
RUN apt-get update && apt-get install -y wget
|
||||
|
||||
RUN localedef -i en_US -f UTF-8 en_US.UTF-8
|
||||
|
||||
RUN useradd -d /home/zulip -m zulip && echo 'zulip ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers
|
||||
|
||||
USER zulip
|
||||
|
||||
RUN ln -nsf /srv/zulip ~/zulip
|
||||
RUN echo 'export LC_ALL="en_US.UTF-8" LANG="en_US.UTF-8" LANGUAGE="en_US.UTF-8"' >> ~zulip/.bashrc
|
||||
RUN echo 'export LC_ALL="en_US.UTF-8" LANG="en_US.UTF-8" LANGUAGE="en_US.UTF-8"' >> ~zulip/.bash_profile
|
||||
|
||||
WORKDIR /srv/zulip
|
@@ -1,15 +1,42 @@
|
||||
# To build run `docker build -f Dockerfile-postgresql .` from the root of the
|
||||
# zulip repo.
|
||||
|
||||
# Currently the PostgreSQL images do not support automatic upgrading of
|
||||
# Install build tools and build tsearch_extras for the current postgres
|
||||
# version. Currently the postgres images do not support automatic upgrading of
|
||||
# the on-disk data in volumes. So the base image can not currently be upgraded
|
||||
# without users needing a manual pgdump and restore.
|
||||
FROM postgres:10
|
||||
RUN apt-get update \
|
||||
&& DEBIAN_FRONTEND=noninteractive apt-get install -y \
|
||||
postgresql-server-dev-$PG_MAJOR \
|
||||
postgresql-server-dev-all \
|
||||
git \
|
||||
build-essential \
|
||||
fakeroot \
|
||||
devscripts
|
||||
RUN git clone https://github.com/zulip/tsearch_extras.git \
|
||||
&& cd tsearch_extras \
|
||||
&& echo $PG_MAJOR > debian/pgversions \
|
||||
&& pg_buildext updatecontrol \
|
||||
&& debuild -b -uc -us
|
||||
|
||||
# Install hunspell, Zulip stop words, and run Zulip database
|
||||
# Install tsearch_extras, hunspell, zulip stop words, and run zulip database
|
||||
# init.
|
||||
FROM groonga/pgroonga:latest-alpine-10-slim
|
||||
RUN apk add -U --no-cache hunspell-en
|
||||
RUN ln -sf /usr/share/hunspell/en_US.dic /usr/local/share/postgresql/tsearch_data/en_us.dict && ln -sf /usr/share/hunspell/en_US.aff /usr/local/share/postgresql/tsearch_data/en_us.affix
|
||||
COPY puppet/zulip/files/postgresql/zulip_english.stop /usr/local/share/postgresql/tsearch_data/zulip_english.stop
|
||||
COPY scripts/setup/create-db.sql /docker-entrypoint-initdb.d/zulip-create-db.sql
|
||||
COPY scripts/setup/create-pgroonga.sql /docker-entrypoint-initdb.d/zulip-create-pgroonga.sql
|
||||
FROM postgres:10
|
||||
ENV TSEARCH_EXTRAS_VERSION=0.4
|
||||
ENV TSEARCH_EXTRAS_DEB=postgresql-${PG_MAJOR}-tsearch-extras_${TSEARCH_EXTRAS_VERSION}_amd64.deb
|
||||
COPY --from=0 /${TSEARCH_EXTRAS_DEB} /tmp
|
||||
COPY puppet/zulip/files/postgresql/zulip_english.stop /usr/share/postgresql/$PG_MAJOR/tsearch_data/zulip_english.stop
|
||||
COPY scripts/setup/postgres-create-db /docker-entrypoint-initdb.d/postgres-create-db.sh
|
||||
COPY scripts/setup/pgroonga-debian.asc /tmp
|
||||
RUN apt-key add /tmp/pgroonga-debian.asc \
|
||||
&& echo "deb http://packages.groonga.org/debian/ stretch main" > /etc/apt/sources.list.d/zulip.list \
|
||||
&& apt-get update \
|
||||
&& DEBIAN_FRONTEND=noninteractive apt-get install --no-install-recommends -y \
|
||||
hunspell-en-us \
|
||||
postgresql-${PG_MAJOR}-pgroonga \
|
||||
&& DEBIAN_FRONTEND=noninteractive dpkg -i /tmp/${TSEARCH_EXTRAS_DEB} \
|
||||
&& rm /tmp/${TSEARCH_EXTRAS_DEB} \
|
||||
&& ln -sf /var/cache/postgresql/dicts/en_us.dict "/usr/share/postgresql/$PG_MAJOR/tsearch_data/en_us.dict" \
|
||||
&& ln -sf /var/cache/postgresql/dicts/en_us.affix "/usr/share/postgresql/$PG_MAJOR/tsearch_data/en_us.affix" \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
1
LICENSE
1
LICENSE
@@ -1,3 +1,4 @@
|
||||
Copyright 2011-2018 Dropbox, Inc., Kandra Labs, Inc., and contributors
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
|
2
NOTICE
2
NOTICE
@@ -1,5 +1,3 @@
|
||||
Copyright 2012–2015 Dropbox, Inc., 2015–2021 Kandra Labs, Inc., and contributors
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this project except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
121
README.md
121
README.md
@@ -1,81 +1,84 @@
|
||||
# Zulip overview
|
||||
|
||||
[Zulip](https://zulip.com) is an open-source team collaboration tool with unique
|
||||
[topic-based threading][why-zulip] that combines the best of email and chat to
|
||||
make remote work productive and delightful. Fortune 500 companies, [leading open
|
||||
source projects][rust-case-study], and thousands of other organizations use
|
||||
Zulip every day. Zulip is the only [modern team chat app][features] that is
|
||||
designed for both live and asynchronous conversations.
|
||||
Zulip is a powerful, open source group chat application that combines the
|
||||
immediacy of real-time chat with the productivity benefits of threaded
|
||||
conversations. Zulip is used by open source projects, Fortune 500 companies,
|
||||
large standards bodies, and others who need a real-time chat system that
|
||||
allows users to easily process hundreds or thousands of messages a day. With
|
||||
over 500 contributors merging over 500 commits a month, Zulip is also the
|
||||
largest and fastest growing open source group chat project.
|
||||
|
||||
Zulip is built by a distributed community of developers from all around the
|
||||
world, with 74+ people who have each contributed 100+ commits. With
|
||||
over 1000 contributors merging over 500 commits a month, Zulip is the
|
||||
largest and fastest growing open source team chat project.
|
||||
|
||||
Come find us on the [development community chat](https://zulip.com/development-community/)!
|
||||
|
||||
[](https://github.com/zulip/zulip/actions/workflows/zulip-ci.yml?query=branch%3Amain)
|
||||
[](https://codecov.io/gh/zulip/zulip)
|
||||
[](https://circleci.com/gh/zulip/zulip)
|
||||
[](https://travis-ci.org/zulip/zulip)
|
||||
[](https://codecov.io/gh/zulip/zulip)
|
||||
[][mypy-coverage]
|
||||
[](https://github.com/psf/black)
|
||||
[](https://github.com/prettier/prettier)
|
||||
[](https://github.com/zulip/zulip/releases/latest)
|
||||
[](https://zulip.readthedocs.io/en/latest/)
|
||||
[](https://chat.zulip.org)
|
||||
[](https://twitter.com/zulip)
|
||||
[](https://github.com/sponsors/zulip)
|
||||
|
||||
[mypy-coverage]: https://blog.zulip.org/2016/10/13/static-types-in-python-oh-mypy/
|
||||
[why-zulip]: https://zulip.com/why-zulip/
|
||||
[rust-case-study]: https://zulip.com/case-studies/rust/
|
||||
[features]: https://zulip.com/features/
|
||||
|
||||
## Getting started
|
||||
|
||||
- **Contributing code**. Check out our [guide for new
|
||||
contributors](https://zulip.readthedocs.io/en/latest/overview/contributing.html)
|
||||
to get started. We have invested into making Zulip’s code uniquely readable,
|
||||
well tested, and easy to modify. Beyond that, we have written an extraordinary
|
||||
150K words of documentation on how to contribute to Zulip.
|
||||
Click on the appropriate link below. If nothing seems to apply,
|
||||
join us on the
|
||||
[Zulip community server](https://zulip.readthedocs.io/en/latest/contributing/chat-zulip-org.html)
|
||||
and tell us what's up!
|
||||
|
||||
- **Contributing non-code**. [Report an
|
||||
issue](https://zulip.readthedocs.io/en/latest/overview/contributing.html#reporting-issues),
|
||||
[translate](https://zulip.readthedocs.io/en/latest/translating/translating.html)
|
||||
Zulip into your language, or [give us
|
||||
feedback](https://zulip.readthedocs.io/en/latest/overview/contributing.html#user-feedback).
|
||||
We'd love to hear from you, whether you've been using Zulip for years, or are just
|
||||
trying it out for the first time.
|
||||
You might be interested in:
|
||||
|
||||
- **Checking Zulip out**. The best way to see Zulip in action is to drop by the
|
||||
[Zulip community server](https://zulip.com/development-community/). We also
|
||||
recommend reading about Zulip's [unique
|
||||
approach](https://zulip.com/why-zulip/) to organizing conversations.
|
||||
* **Contributing code**. Check out our
|
||||
[guide for new contributors](https://zulip.readthedocs.io/en/latest/overview/contributing.html)
|
||||
to get started. Zulip prides itself on maintaining a clean and
|
||||
well-tested codebase, and a stock of hundreds of
|
||||
[beginner-friendly issues][beginner-friendly].
|
||||
|
||||
- **Running a Zulip server**. Self host Zulip directly on Ubuntu or Debian
|
||||
Linux, in [Docker](https://github.com/zulip/docker-zulip), or with prebuilt
|
||||
images for [Digital Ocean](https://marketplace.digitalocean.com/apps/zulip) and
|
||||
[Render](https://render.com/docs/deploy-zulip).
|
||||
Learn more about [self-hosting Zulip](https://zulip.com/self-hosting/).
|
||||
* **Contributing non-code**.
|
||||
[Report an issue](https://zulip.readthedocs.io/en/latest/overview/contributing.html#reporting-issue),
|
||||
[translate](https://zulip.readthedocs.io/en/latest/translating/translating.html) Zulip
|
||||
into your language,
|
||||
[write](https://zulip.readthedocs.io/en/latest/overview/contributing.html#zulip-outreach)
|
||||
for the Zulip blog, or
|
||||
[give us feedback](https://zulip.readthedocs.io/en/latest/overview/contributing.html#user-feedback). We
|
||||
would love to hear from you, even if you're just trying the product out.
|
||||
|
||||
- **Using Zulip without setting up a server**. Learn about [Zulip
|
||||
Cloud](https://zulip.com/plans/) hosting options. Zulip sponsors free [Zulip
|
||||
Cloud Standard](https://zulip.com/plans/) for hundreds of worthy
|
||||
organizations, including [fellow open-source
|
||||
projects](https://zulip.com/for/open-source/).
|
||||
* **Supporting Zulip**. Advocate for your organization to use Zulip, write a
|
||||
review in the mobile app stores, or
|
||||
[upvote Zulip](https://zulip.readthedocs.io/en/latest/overview/contributing.html#zulip-outreach) on
|
||||
product comparison sites.
|
||||
|
||||
- **Participating in [outreach
|
||||
programs](https://zulip.readthedocs.io/en/latest/overview/contributing.html#outreach-programs)**
|
||||
like [Google Summer of Code](https://developers.google.com/open-source/gsoc/)
|
||||
and [Outreachy](https://www.outreachy.org/).
|
||||
* **Checking Zulip out**. The best way to see Zulip in action is to drop by
|
||||
the
|
||||
[Zulip community server](https://zulip.readthedocs.io/en/latest/contributing/chat-zulip-org.html). We
|
||||
also recommend reading Zulip for
|
||||
[open source](https://zulipchat.com/for/open-source/), Zulip for
|
||||
[companies](https://zulipchat.com/for/companies/), or Zulip for
|
||||
[working groups and part time communities](https://zulipchat.com/for/working-groups-and-communities/).
|
||||
|
||||
- **Supporting Zulip**. Advocate for your organization to use Zulip, become a
|
||||
[sponsor](https://github.com/sponsors/zulip), write a review in the mobile app
|
||||
stores, or [help others find
|
||||
Zulip](https://zulip.readthedocs.io/en/latest/overview/contributing.html#help-others-find-zulip).
|
||||
* **Running a Zulip server**. Setting up a server takes just a couple
|
||||
of minutes. Zulip runs on Ubuntu 18.04 Bionic, Ubuntu 16.04 Xenial,
|
||||
Ubuntu 14.04 Trusty, and Debian 9 Stretch. The installation process is
|
||||
[documented here](https://zulip.readthedocs.io/en/stable/production/install.html).
|
||||
Commercial support is available; see <https://zulipchat.com/plans>
|
||||
for details.
|
||||
|
||||
You may also be interested in reading our [blog](https://blog.zulip.org/), and
|
||||
following us on [Twitter](https://twitter.com/zulip) and
|
||||
[LinkedIn](https://www.linkedin.com/company/zulip-project/).
|
||||
* **Using Zulip without setting up a server**. <https://zulipchat.com> offers
|
||||
free and commercial hosting.
|
||||
|
||||
* **Applying for a Zulip internship**. Zulip runs internship programs with
|
||||
[Outreachy](https://www.outreachy.org/),
|
||||
[Google Summer of Code](https://developers.google.com/open-source/gsoc/),
|
||||
and the
|
||||
[MIT Externship program](https://alum.mit.edu/students/NetworkwithAlumni/ExternshipProgram). Zulip
|
||||
also participates in
|
||||
[Google Code-In](https://developers.google.com/open-source/gci/). More
|
||||
information is available
|
||||
[here](https://zulip.readthedocs.io/en/latest/overview/contributing.html#internship-programs).
|
||||
|
||||
You may also be interested in reading our [blog](http://blog.zulip.org/) or
|
||||
following us on [twitter](https://twitter.com/zulip).
|
||||
Zulip is distributed under the
|
||||
[Apache 2.0](https://github.com/zulip/zulip/blob/main/LICENSE) license.
|
||||
[Apache 2.0](https://github.com/zulip/zulip/blob/master/LICENSE) license.
|
||||
|
||||
[beginner-friendly]: https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22
|
||||
|
37
SECURITY.md
37
SECURITY.md
@@ -1,37 +0,0 @@
|
||||
# Security policy
|
||||
|
||||
## Reporting a vulnerability
|
||||
|
||||
We love responsible reports of (potential) security issues in Zulip,
|
||||
whether in the latest release or our development branch.
|
||||
|
||||
Our security contact is security@zulip.com. Reporters should expect a
|
||||
response within 24 hours.
|
||||
|
||||
Please include details on the issue and how you'd like to be credited
|
||||
in our release notes when we publish the fix.
|
||||
|
||||
Our [security model][security-model] document may be a helpful
|
||||
resource.
|
||||
|
||||
## Security announcements
|
||||
|
||||
We send security announcements to our [announcement mailing
|
||||
list](https://groups.google.com/g/zulip-announce). If you are running
|
||||
Zulip in production, you should subscribe, by clicking "Join group" at
|
||||
the top of that page.
|
||||
|
||||
## Supported versions
|
||||
|
||||
Zulip provides security support for the latest major release, in the
|
||||
form of minor security/maintenance releases.
|
||||
|
||||
We work hard to make [upgrades][upgrades] reliable, so that there's no
|
||||
reason to run older major releases.
|
||||
|
||||
See also our documentation on the [Zulip release
|
||||
lifecycle][release-lifecycle].
|
||||
|
||||
[security-model]: https://zulip.readthedocs.io/en/latest/production/security-model.html
|
||||
[upgrades]: https://zulip.readthedocs.io/en/latest/production/upgrade-or-modify.html#upgrading-to-a-release
|
||||
[release-lifecycle]: https://zulip.readthedocs.io/en/latest/overview/release-lifecycle.html
|
222
Vagrantfile
vendored
222
Vagrantfile
vendored
@@ -1,39 +1,102 @@
|
||||
# -*- mode: ruby -*-
|
||||
|
||||
Vagrant.require_version ">= 2.2.6"
|
||||
VAGRANTFILE_API_VERSION = "2"
|
||||
|
||||
def command?(name)
|
||||
`which #{name} > /dev/null 2>&1`
|
||||
$?.success?
|
||||
end
|
||||
|
||||
if Vagrant::VERSION == "1.8.7" then
|
||||
path = `which curl`
|
||||
if path.include?('/opt/vagrant/embedded/bin/curl') then
|
||||
puts "In Vagrant 1.8.7, curl is broken. Please use Vagrant 2.0.2 "\
|
||||
"or run 'sudo rm -f /opt/vagrant/embedded/bin/curl' to fix the "\
|
||||
"issue before provisioning. See "\
|
||||
"https://github.com/mitchellh/vagrant/issues/7997 "\
|
||||
"for reference."
|
||||
exit
|
||||
end
|
||||
end
|
||||
|
||||
# Workaround: the lxc-config in vagrant-lxc is incompatible with changes in
|
||||
# LXC 2.1.0, found in Ubuntu 17.10 artful. LXC 2.1.1 (in 18.04 LTS bionic)
|
||||
# ignores the old config key, so this will only be needed for artful.
|
||||
#
|
||||
# vagrant-lxc upstream has an attempted fix:
|
||||
# https://github.com/fgrehm/vagrant-lxc/issues/445
|
||||
# but it didn't work in our testing. This is a temporary issue, so we just
|
||||
# hack in a fix: we patch the skeleton `lxc-config` file right in the
|
||||
# distribution of the vagrant-lxc "box" we use. If the user doesn't yet
|
||||
# have the box (e.g. on first setup), Vagrant would download it but too
|
||||
# late for us to patch it like this; so we prompt them to explicitly add it
|
||||
# first and then rerun.
|
||||
if ['up', 'provision'].include? ARGV[0]
|
||||
if command? "lxc-ls"
|
||||
LXC_VERSION = `lxc-ls --version`.strip unless defined? LXC_VERSION
|
||||
if LXC_VERSION == "2.1.0"
|
||||
lxc_config_file = ENV['HOME'] + "/.vagrant.d/boxes/fgrehm-VAGRANTSLASH-trusty64-lxc/1.2.0/lxc/lxc-config"
|
||||
if File.file?(lxc_config_file)
|
||||
lines = File.readlines(lxc_config_file)
|
||||
deprecated_line = "lxc.pivotdir = lxc_putold\n"
|
||||
if lines[1] == deprecated_line
|
||||
lines[1] = "# #{deprecated_line}"
|
||||
File.open(lxc_config_file, 'w') do |f|
|
||||
f.puts(lines)
|
||||
end
|
||||
end
|
||||
else
|
||||
puts 'You are running LXC 2.1.0, and fgrehm/trusty64-lxc box is incompatible '\
|
||||
"with it by default. First add the box by doing:\n"\
|
||||
" vagrant box add https://vagrantcloud.com/fgrehm/trusty64-lxc\n"\
|
||||
'Once this command succeeds, do "vagrant up" again.'
|
||||
exit
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Workaround: Vagrant removed the atlas.hashicorp.com to
|
||||
# vagrantcloud.com redirect in February 2018. The value of
|
||||
# DEFAULT_SERVER_URL in Vagrant versions less than 1.9.3 is
|
||||
# atlas.hashicorp.com, which means that removal broke the fetching and
|
||||
# updating of boxes (since the old URL doesn't work). See
|
||||
# https://github.com/hashicorp/vagrant/issues/9442
|
||||
if Vagrant::DEFAULT_SERVER_URL == "atlas.hashicorp.com"
|
||||
Vagrant::DEFAULT_SERVER_URL.replace('https://vagrantcloud.com')
|
||||
end
|
||||
|
||||
Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
|
||||
|
||||
# For LXC. VirtualBox hosts use a different box, described below.
|
||||
config.vm.box = "fgrehm/trusty64-lxc"
|
||||
|
||||
Vagrant.configure("2") do |config|
|
||||
# The Zulip development environment runs on 9991 on the guest.
|
||||
host_port = 9991
|
||||
http_proxy = https_proxy = no_proxy = nil
|
||||
host_ip_addr = "127.0.0.1"
|
||||
|
||||
# System settings for the virtual machine.
|
||||
vm_num_cpus = "2"
|
||||
vm_memory = "2048"
|
||||
|
||||
debian_mirror = ""
|
||||
vboxadd_version = nil
|
||||
|
||||
config.vm.synced_folder ".", "/vagrant", disabled: true
|
||||
config.vm.synced_folder ".", "/srv/zulip"
|
||||
if (/darwin/ =~ RUBY_PLATFORM) != nil
|
||||
config.vm.synced_folder ".", "/srv/zulip", type: "nfs",
|
||||
linux__nfs_options: ['rw']
|
||||
config.vm.network "private_network", type: "dhcp"
|
||||
else
|
||||
config.vm.synced_folder ".", "/srv/zulip"
|
||||
end
|
||||
|
||||
vagrant_config_file = ENV["HOME"] + "/.zulip-vagrant-config"
|
||||
vagrant_config_file = ENV['HOME'] + "/.zulip-vagrant-config"
|
||||
if File.file?(vagrant_config_file)
|
||||
IO.foreach(vagrant_config_file) do |line|
|
||||
line.chomp!
|
||||
key, value = line.split(nil, 2)
|
||||
case key
|
||||
when /^([#;]|$)/ # ignore comments
|
||||
when /^([#;]|$)/; # ignore comments
|
||||
when "HTTP_PROXY"; http_proxy = value
|
||||
when "HTTPS_PROXY"; https_proxy = value
|
||||
when "NO_PROXY"; no_proxy = value
|
||||
when "HOST_PORT"; host_port = value.to_i
|
||||
when "HOST_IP_ADDR"; host_ip_addr = value
|
||||
when "GUEST_CPUS"; vm_num_cpus = value
|
||||
when "GUEST_MEMORY_MB"; vm_memory = value
|
||||
when "DEBIAN_MIRROR"; debian_mirror = value
|
||||
when "VBOXADD_VERSION"; vboxadd_version = value
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -51,58 +114,113 @@ Vagrant.configure("2") do |config|
|
||||
elsif !http_proxy.nil? or !https_proxy.nil?
|
||||
# This prints twice due to https://github.com/hashicorp/vagrant/issues/7504
|
||||
# We haven't figured out a workaround.
|
||||
puts "You have specified value for proxy in ~/.zulip-vagrant-config file but did not " \
|
||||
"install the vagrant-proxyconf plugin. To install it, run `vagrant plugin install " \
|
||||
"vagrant-proxyconf` in a terminal. This error will appear twice."
|
||||
puts 'You have specified value for proxy in ~/.zulip-vagrant-config file but did not ' \
|
||||
'install the vagrant-proxyconf plugin. To install it, run `vagrant plugin install ' \
|
||||
'vagrant-proxyconf` in a terminal. This error will appear twice.'
|
||||
exit
|
||||
end
|
||||
|
||||
config.vm.network "forwarded_port", guest: 9991, host: host_port, host_ip: host_ip_addr
|
||||
config.vm.network "forwarded_port", guest: 9994, host: host_port + 3, host_ip: host_ip_addr
|
||||
# Specify Docker provider before VirtualBox provider so it's preferred.
|
||||
config.vm.provider "docker" do |d, override|
|
||||
d.build_dir = File.join(__dir__, "tools", "setup", "dev-vagrant-docker")
|
||||
d.build_args = ["--build-arg", "VAGRANT_UID=#{Process.uid}"]
|
||||
if !debian_mirror.empty?
|
||||
d.build_args += ["--build-arg", "DEBIAN_MIRROR=#{debian_mirror}"]
|
||||
# Specify LXC provider before VirtualBox provider so it's preferred.
|
||||
config.vm.provider "lxc" do |lxc|
|
||||
if command? "lxc-ls"
|
||||
LXC_VERSION = `lxc-ls --version`.strip unless defined? LXC_VERSION
|
||||
if LXC_VERSION >= "1.1.0" and LXC_VERSION < "3.0.0"
|
||||
# Allow start without AppArmor, otherwise Box will not Start on Ubuntu 14.10
|
||||
# see https://github.com/fgrehm/vagrant-lxc/issues/333
|
||||
lxc.customize 'aa_allow_incomplete', 1
|
||||
end
|
||||
if LXC_VERSION >= "3.0.0"
|
||||
lxc.customize 'apparmor.allow_incomplete', 1
|
||||
end
|
||||
if LXC_VERSION >= "2.0.0"
|
||||
lxc.backingstore = 'dir'
|
||||
end
|
||||
end
|
||||
d.has_ssh = true
|
||||
d.create_args = ["--ulimit", "nofile=1024:65536"]
|
||||
end
|
||||
|
||||
config.vm.provider "virtualbox" do |vb, override|
|
||||
override.vm.box = "bento/debian-10"
|
||||
override.vm.box = "ubuntu/trusty64"
|
||||
# It's possible we can get away with just 1.5GB; more testing needed
|
||||
vb.memory = vm_memory
|
||||
vb.cpus = vm_num_cpus
|
||||
|
||||
if !vboxadd_version.nil?
|
||||
override.vbguest.installer = Class.new(VagrantVbguest::Installers::Debian) do
|
||||
define_method(:host_version) do |reload = false|
|
||||
VagrantVbguest::Version(vboxadd_version)
|
||||
end
|
||||
end
|
||||
override.vbguest.allow_downgrade = true
|
||||
override.vbguest.iso_path = "https://download.virtualbox.org/virtualbox/#{vboxadd_version}/VBoxGuestAdditions_#{vboxadd_version}.iso"
|
||||
end
|
||||
vb.memory = 2048
|
||||
vb.cpus = 2
|
||||
end
|
||||
|
||||
config.vm.provider "hyperv" do |h, override|
|
||||
override.vm.box = "bento/debian-10"
|
||||
h.memory = vm_memory
|
||||
h.maxmemory = vm_memory
|
||||
h.cpus = vm_num_cpus
|
||||
config.vm.provider "vmware_fusion" do |vb, override|
|
||||
override.vm.box = "puphpet/ubuntu1404-x64"
|
||||
vb.vmx["memsize"] = "2048"
|
||||
vb.vmx["numvcpus"] = "2"
|
||||
end
|
||||
|
||||
config.vm.provider "parallels" do |prl, override|
|
||||
override.vm.box = "bento/debian-10"
|
||||
prl.memory = vm_memory
|
||||
prl.cpus = vm_num_cpus
|
||||
end
|
||||
$provision_script = <<SCRIPT
|
||||
set -x
|
||||
set -e
|
||||
set -o pipefail
|
||||
|
||||
# Code should go here, rather than tools/provision, only if it is
|
||||
# something that we don't want to happen when running provision in a
|
||||
# development environment not using Vagrant.
|
||||
|
||||
# Set the MOTD on the system to have Zulip instructions
|
||||
sudo rm -f /etc/update-motd.d/*
|
||||
sudo bash -c 'cat << EndOfMessage > /etc/motd
|
||||
Welcome to the Zulip development environment! Popular commands:
|
||||
* tools/provision - Update the development environment
|
||||
* tools/run-dev.py - Run the development server
|
||||
* tools/lint - Run the linter (quick and catches many problems)
|
||||
* tools/test-* - Run tests (use --help to learn about options)
|
||||
|
||||
Read https://zulip.readthedocs.io/en/latest/testing/testing.html to learn
|
||||
how to run individual test suites so that you can get a fast debug cycle.
|
||||
|
||||
EndOfMessage'
|
||||
|
||||
# If the host is running SELinux remount the /sys/fs/selinux directory as read only,
|
||||
# needed for apt-get to work.
|
||||
if [ -d "/sys/fs/selinux" ]; then
|
||||
sudo mount -o remount,ro /sys/fs/selinux
|
||||
fi
|
||||
|
||||
# Set default locale, this prevents errors if the user has another locale set.
|
||||
if ! grep -q 'LC_ALL=en_US.UTF-8' /etc/default/locale; then
|
||||
echo "LC_ALL=en_US.UTF-8" | sudo tee -a /etc/default/locale
|
||||
fi
|
||||
|
||||
# Set an environment variable, so that we won't print the virtualenv
|
||||
# shell warning (it'll be wrong, since the shell is dying anyway)
|
||||
export SKIP_VENV_SHELL_WARNING=1
|
||||
|
||||
# End `set -x`, so that the end of provision doesn't look like an error
|
||||
# message after a successful run.
|
||||
set +x
|
||||
|
||||
# Check if the zulip directory is writable
|
||||
if [ ! -w /srv/zulip ]; then
|
||||
echo "The vagrant user is unable to write to the zulip directory."
|
||||
echo "To fix this, run the following commands on the host machine:"
|
||||
# sudo is required since our uid is not 1000
|
||||
echo ' vagrant halt -f'
|
||||
echo ' rm -rf /PATH/TO/ZULIP/CLONE/.vagrant'
|
||||
echo ' sudo chown -R 1000:$(id -g) /PATH/TO/ZULIP/CLONE'
|
||||
echo "Replace /PATH/TO/ZULIP/CLONE with the path to where zulip code is cloned."
|
||||
echo "You can resume setting up your vagrant environment by running:"
|
||||
echo " vagrant up"
|
||||
exit 1
|
||||
fi
|
||||
# Provision the development environment
|
||||
ln -nsf /srv/zulip ~/zulip
|
||||
/srv/zulip/tools/provision
|
||||
|
||||
# Run any custom provision hooks the user has configured
|
||||
if [ -f /srv/zulip/tools/custom_provision ]; then
|
||||
chmod +x /srv/zulip/tools/custom_provision
|
||||
/srv/zulip/tools/custom_provision
|
||||
fi
|
||||
SCRIPT
|
||||
|
||||
config.vm.provision "shell",
|
||||
# We want provision to be run with the permissions of the vagrant user.
|
||||
privileged: false,
|
||||
path: "tools/setup/vagrant-provision",
|
||||
env: { "DEBIAN_MIRROR" => debian_mirror }
|
||||
inline: $provision_script
|
||||
end
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -4,19 +4,11 @@ from typing import List
|
||||
|
||||
from analytics.lib.counts import CountStat
|
||||
|
||||
|
||||
def generate_time_series_data(
|
||||
days: int = 100,
|
||||
business_hours_base: float = 10,
|
||||
non_business_hours_base: float = 10,
|
||||
growth: float = 1,
|
||||
autocorrelation: float = 0,
|
||||
spikiness: float = 1,
|
||||
holiday_rate: float = 0,
|
||||
frequency: str = CountStat.DAY,
|
||||
partial_sum: bool = False,
|
||||
random_seed: int = 26,
|
||||
) -> List[int]:
|
||||
def generate_time_series_data(days: int=100, business_hours_base: float=10,
|
||||
non_business_hours_base: float=10, growth: float=1,
|
||||
autocorrelation: float=0, spikiness: float=1,
|
||||
holiday_rate: float=0, frequency: str=CountStat.DAY,
|
||||
partial_sum: bool=False, random_seed: int=26) -> List[int]:
|
||||
"""
|
||||
Generate semi-realistic looking time series data for testing analytics graphs.
|
||||
|
||||
@@ -37,43 +29,35 @@ def generate_time_series_data(
|
||||
random_seed -- Seed for random number generator.
|
||||
"""
|
||||
if frequency == CountStat.HOUR:
|
||||
length = days * 24
|
||||
length = days*24
|
||||
seasonality = [non_business_hours_base] * 24 * 7
|
||||
for day in range(5):
|
||||
for hour in range(8):
|
||||
seasonality[24 * day + hour] = business_hours_base
|
||||
holidays = []
|
||||
seasonality[24*day + hour] = business_hours_base
|
||||
holidays = []
|
||||
for i in range(days):
|
||||
holidays.extend([random() < holiday_rate] * 24)
|
||||
elif frequency == CountStat.DAY:
|
||||
length = days
|
||||
seasonality = [8 * business_hours_base + 16 * non_business_hours_base] * 5 + [
|
||||
24 * non_business_hours_base
|
||||
] * 2
|
||||
seasonality = [8*business_hours_base + 16*non_business_hours_base] * 5 + \
|
||||
[24*non_business_hours_base] * 2
|
||||
holidays = [random() < holiday_rate for i in range(days)]
|
||||
else:
|
||||
raise AssertionError(f"Unknown frequency: {frequency}")
|
||||
raise AssertionError("Unknown frequency: %s" % (frequency,))
|
||||
if length < 2:
|
||||
raise AssertionError(
|
||||
f"Must be generating at least 2 data points. Currently generating {length}"
|
||||
)
|
||||
growth_base = growth ** (1.0 / (length - 1))
|
||||
values_no_noise = [
|
||||
seasonality[i % len(seasonality)] * (growth_base**i) for i in range(length)
|
||||
]
|
||||
raise AssertionError("Must be generating at least 2 data points. "
|
||||
"Currently generating %s" % (length,))
|
||||
growth_base = growth ** (1. / (length-1))
|
||||
values_no_noise = [seasonality[i % len(seasonality)] * (growth_base**i) for i in range(length)]
|
||||
|
||||
seed(random_seed)
|
||||
noise_scalars = [gauss(0, 1)]
|
||||
for i in range(1, length):
|
||||
noise_scalars.append(
|
||||
noise_scalars[-1] * autocorrelation + gauss(0, 1) * (1 - autocorrelation)
|
||||
)
|
||||
noise_scalars.append(noise_scalars[-1]*autocorrelation + gauss(0, 1)*(1-autocorrelation))
|
||||
|
||||
values = [
|
||||
0 if holiday else int(v + sqrt(v) * noise_scalar * spikiness)
|
||||
for v, noise_scalar, holiday in zip(values_no_noise, noise_scalars, holidays)
|
||||
]
|
||||
values = [0 if holiday else int(v + sqrt(v)*noise_scalar*spikiness)
|
||||
for v, noise_scalar, holiday in zip(values_no_noise, noise_scalars, holidays)]
|
||||
if partial_sum:
|
||||
for i in range(1, length):
|
||||
values[i] = values[i - 1] + values[i]
|
||||
values[i] = values[i-1] + values[i]
|
||||
return [max(v, 0) for v in values]
|
||||
|
@@ -4,14 +4,12 @@ from typing import List, Optional
|
||||
from analytics.lib.counts import CountStat
|
||||
from zerver.lib.timestamp import floor_to_day, floor_to_hour, verify_UTC
|
||||
|
||||
|
||||
# If min_length is None, returns end_times from ceiling(start) to floor(end), inclusive.
|
||||
# If min_length is greater than 0, pads the list to the left.
|
||||
# So informally, time_range(Sep 20, Sep 22, day, None) returns [Sep 20, Sep 21, Sep 22],
|
||||
# and time_range(Sep 20, Sep 22, day, 5) returns [Sep 18, Sep 19, Sep 20, Sep 21, Sep 22]
|
||||
def time_range(
|
||||
start: datetime, end: datetime, frequency: str, min_length: Optional[int]
|
||||
) -> List[datetime]:
|
||||
def time_range(start: datetime, end: datetime, frequency: str,
|
||||
min_length: Optional[int]) -> List[datetime]:
|
||||
verify_UTC(start)
|
||||
verify_UTC(end)
|
||||
if frequency == CountStat.HOUR:
|
||||
@@ -21,11 +19,11 @@ def time_range(
|
||||
end = floor_to_day(end)
|
||||
step = timedelta(days=1)
|
||||
else:
|
||||
raise AssertionError(f"Unknown frequency: {frequency}")
|
||||
raise AssertionError("Unknown frequency: %s" % (frequency,))
|
||||
|
||||
times = []
|
||||
if min_length is not None:
|
||||
start = min(start, end - (min_length - 1) * step)
|
||||
start = min(start, end - (min_length-1)*step)
|
||||
current = end
|
||||
while current >= start:
|
||||
times.append(current)
|
||||
|
81
analytics/management/commands/analyze_mit.py
Normal file
81
analytics/management/commands/analyze_mit.py
Normal file
@@ -0,0 +1,81 @@
|
||||
import datetime
|
||||
import logging
|
||||
import time
|
||||
from typing import Any, Dict
|
||||
|
||||
from django.core.management.base import BaseCommand, CommandParser
|
||||
|
||||
from zerver.lib.timestamp import timestamp_to_datetime
|
||||
from zerver.models import Message, Recipient
|
||||
|
||||
def compute_stats(log_level: int) -> None:
|
||||
logger = logging.getLogger()
|
||||
logger.setLevel(log_level)
|
||||
|
||||
one_week_ago = timestamp_to_datetime(time.time()) - datetime.timedelta(weeks=1)
|
||||
mit_query = Message.objects.filter(sender__realm__string_id="zephyr",
|
||||
recipient__type=Recipient.STREAM,
|
||||
pub_date__gt=one_week_ago)
|
||||
for bot_sender_start in ["imap.", "rcmd.", "sys."]:
|
||||
mit_query = mit_query.exclude(sender__email__startswith=(bot_sender_start))
|
||||
# Filtering for "/" covers tabbott/extra@ and all the daemon/foo bots.
|
||||
mit_query = mit_query.exclude(sender__email__contains=("/"))
|
||||
mit_query = mit_query.exclude(sender__email__contains=("aim.com"))
|
||||
mit_query = mit_query.exclude(
|
||||
sender__email__in=["rss@mit.edu", "bash@mit.edu", "apache@mit.edu",
|
||||
"bitcoin@mit.edu", "lp@mit.edu", "clocks@mit.edu",
|
||||
"root@mit.edu", "nagios@mit.edu",
|
||||
"www-data|local-realm@mit.edu"])
|
||||
user_counts = {} # type: Dict[str, Dict[str, int]]
|
||||
for m in mit_query.select_related("sending_client", "sender"):
|
||||
email = m.sender.email
|
||||
user_counts.setdefault(email, {})
|
||||
user_counts[email].setdefault(m.sending_client.name, 0)
|
||||
user_counts[email][m.sending_client.name] += 1
|
||||
|
||||
total_counts = {} # type: Dict[str, int]
|
||||
total_user_counts = {} # type: Dict[str, int]
|
||||
for email, counts in user_counts.items():
|
||||
total_user_counts.setdefault(email, 0)
|
||||
for client_name, count in counts.items():
|
||||
total_counts.setdefault(client_name, 0)
|
||||
total_counts[client_name] += count
|
||||
total_user_counts[email] += count
|
||||
|
||||
logging.debug("%40s | %10s | %s" % ("User", "Messages", "Percentage Zulip"))
|
||||
top_percents = {} # type: Dict[int, float]
|
||||
for size in [10, 25, 50, 100, 200, len(total_user_counts.keys())]:
|
||||
top_percents[size] = 0.0
|
||||
for i, email in enumerate(sorted(total_user_counts.keys(),
|
||||
key=lambda x: -total_user_counts[x])):
|
||||
percent_zulip = round(100 - (user_counts[email].get("zephyr_mirror", 0)) * 100. /
|
||||
total_user_counts[email], 1)
|
||||
for size in top_percents.keys():
|
||||
top_percents.setdefault(size, 0)
|
||||
if i < size:
|
||||
top_percents[size] += (percent_zulip * 1.0 / size)
|
||||
|
||||
logging.debug("%40s | %10s | %s%%" % (email, total_user_counts[email],
|
||||
percent_zulip))
|
||||
|
||||
logging.info("")
|
||||
for size in sorted(top_percents.keys()):
|
||||
logging.info("Top %6s | %s%%" % (size, round(top_percents[size], 1)))
|
||||
|
||||
grand_total = sum(total_counts.values())
|
||||
print(grand_total)
|
||||
logging.info("%15s | %s" % ("Client", "Percentage"))
|
||||
for client in total_counts.keys():
|
||||
logging.info("%15s | %s%%" % (client, round(100. * total_counts[client] / grand_total, 1)))
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Compute statistics on MIT Zephyr usage."
|
||||
|
||||
def add_arguments(self, parser: CommandParser) -> None:
|
||||
parser.add_argument('--verbose', default=False, action='store_true')
|
||||
|
||||
def handle(self, *args: Any, **options: Any) -> None:
|
||||
level = logging.INFO
|
||||
if options["verbose"]:
|
||||
level = logging.DEBUG
|
||||
compute_stats(level)
|
56
analytics/management/commands/analyze_user_activity.py
Normal file
56
analytics/management/commands/analyze_user_activity.py
Normal file
@@ -0,0 +1,56 @@
|
||||
import datetime
|
||||
from typing import Any, Dict
|
||||
|
||||
from django.core.management.base import BaseCommand, CommandParser
|
||||
from django.utils.timezone import utc
|
||||
|
||||
from zerver.lib.statistics import seconds_usage_between
|
||||
from zerver.models import UserProfile
|
||||
|
||||
def analyze_activity(options: Dict[str, Any]) -> None:
|
||||
day_start = datetime.datetime.strptime(options["date"], "%Y-%m-%d").replace(tzinfo=utc)
|
||||
day_end = day_start + datetime.timedelta(days=options["duration"])
|
||||
|
||||
user_profile_query = UserProfile.objects.all()
|
||||
if options["realm"]:
|
||||
user_profile_query = user_profile_query.filter(realm__string_id=options["realm"])
|
||||
|
||||
print("Per-user online duration:\n")
|
||||
total_duration = datetime.timedelta(0)
|
||||
for user_profile in user_profile_query:
|
||||
duration = seconds_usage_between(user_profile, day_start, day_end)
|
||||
|
||||
if duration == datetime.timedelta(0):
|
||||
continue
|
||||
|
||||
total_duration += duration
|
||||
print("%-*s%s" % (37, user_profile.email, duration,))
|
||||
|
||||
print("\nTotal Duration: %s" % (total_duration,))
|
||||
print("\nTotal Duration in minutes: %s" % (total_duration.total_seconds() / 60.,))
|
||||
print("Total Duration amortized to a month: %s" % (total_duration.total_seconds() * 30. / 60.,))
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = """Report analytics of user activity on a per-user and realm basis.
|
||||
|
||||
This command aggregates user activity data that is collected by each user using Zulip. It attempts
|
||||
to approximate how much each user has been using Zulip per day, measured by recording each 15 minute
|
||||
period where some activity has occurred (mouse move or keyboard activity).
|
||||
|
||||
It will correctly not count server-initiated reloads in the activity statistics.
|
||||
|
||||
The duration flag can be used to control how many days to show usage duration for
|
||||
|
||||
Usage: ./manage.py analyze_user_activity [--realm=zulip] [--date=2013-09-10] [--duration=1]
|
||||
|
||||
By default, if no date is selected 2013-09-10 is used. If no realm is provided, information
|
||||
is shown for all realms"""
|
||||
|
||||
def add_arguments(self, parser: CommandParser) -> None:
|
||||
parser.add_argument('--realm', action='store')
|
||||
parser.add_argument('--date', action='store', default="2013-09-06")
|
||||
parser.add_argument('--duration', action='store', default=1, type=int,
|
||||
help="How many days to show usage information for")
|
||||
|
||||
def handle(self, *args: Any, **options: Any) -> None:
|
||||
analyze_activity(options)
|
@@ -1,24 +1,26 @@
|
||||
import os
|
||||
import time
|
||||
from datetime import timedelta
|
||||
from typing import Any, Dict
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.utils.timezone import now as timezone_now
|
||||
|
||||
from analytics.models import installation_epoch, \
|
||||
last_successful_fill
|
||||
from analytics.lib.counts import COUNT_STATS, CountStat
|
||||
from analytics.models import installation_epoch
|
||||
from zerver.lib.timestamp import TimeZoneNotUTCException, floor_to_day, floor_to_hour, verify_UTC
|
||||
from zerver.lib.timestamp import floor_to_hour, floor_to_day, verify_UTC, \
|
||||
TimezoneNotUTCException
|
||||
from zerver.models import Realm
|
||||
|
||||
import os
|
||||
import time
|
||||
from typing import Any, Dict
|
||||
|
||||
states = {
|
||||
0: "OK",
|
||||
1: "WARNING",
|
||||
2: "CRITICAL",
|
||||
3: "UNKNOWN",
|
||||
3: "UNKNOWN"
|
||||
}
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = """Checks FillState table.
|
||||
|
||||
@@ -26,30 +28,31 @@ class Command(BaseCommand):
|
||||
|
||||
def handle(self, *args: Any, **options: Any) -> None:
|
||||
fill_state = self.get_fill_state()
|
||||
status = fill_state["status"]
|
||||
message = fill_state["message"]
|
||||
status = fill_state['status']
|
||||
message = fill_state['message']
|
||||
|
||||
state_file_path = "/var/lib/nagios_state/check-analytics-state"
|
||||
state_file_tmp = state_file_path + "-tmp"
|
||||
|
||||
with open(state_file_tmp, "w") as f:
|
||||
f.write(f"{int(time.time())}|{status}|{states[status]}|{message}\n")
|
||||
f.write("%s|%s|%s|%s\n" % (
|
||||
int(time.time()), status, states[status], message))
|
||||
os.rename(state_file_tmp, state_file_path)
|
||||
|
||||
def get_fill_state(self) -> Dict[str, Any]:
|
||||
if not Realm.objects.exists():
|
||||
return {"status": 0, "message": "No realms exist, so not checking FillState."}
|
||||
return {'status': 0, 'message': 'No realms exist, so not checking FillState.'}
|
||||
|
||||
warning_unfilled_properties = []
|
||||
critical_unfilled_properties = []
|
||||
for property, stat in COUNT_STATS.items():
|
||||
last_fill = stat.last_successful_fill()
|
||||
last_fill = last_successful_fill(property)
|
||||
if last_fill is None:
|
||||
last_fill = installation_epoch()
|
||||
try:
|
||||
verify_UTC(last_fill)
|
||||
except TimeZoneNotUTCException:
|
||||
return {"status": 2, "message": f"FillState not in UTC for {property}"}
|
||||
except TimezoneNotUTCException:
|
||||
return {'status': 2, 'message': 'FillState not in UTC for %s' % (property,)}
|
||||
|
||||
if stat.frequency == CountStat.DAY:
|
||||
floor_function = floor_to_day
|
||||
@@ -61,10 +64,8 @@ class Command(BaseCommand):
|
||||
critical_threshold = timedelta(minutes=150)
|
||||
|
||||
if floor_function(last_fill) != last_fill:
|
||||
return {
|
||||
"status": 2,
|
||||
"message": f"FillState not on {stat.frequency} boundary for {property}",
|
||||
}
|
||||
return {'status': 2, 'message': 'FillState not on %s boundary for %s' %
|
||||
(stat.frequency, property)}
|
||||
|
||||
time_to_last_fill = timezone_now() - last_fill
|
||||
if time_to_last_fill > critical_threshold:
|
||||
@@ -73,18 +74,9 @@ class Command(BaseCommand):
|
||||
warning_unfilled_properties.append(property)
|
||||
|
||||
if len(critical_unfilled_properties) == 0 and len(warning_unfilled_properties) == 0:
|
||||
return {"status": 0, "message": "FillState looks fine."}
|
||||
return {'status': 0, 'message': 'FillState looks fine.'}
|
||||
if len(critical_unfilled_properties) == 0:
|
||||
return {
|
||||
"status": 1,
|
||||
"message": "Missed filling {} once.".format(
|
||||
", ".join(warning_unfilled_properties),
|
||||
),
|
||||
}
|
||||
return {
|
||||
"status": 2,
|
||||
"message": "Missed filling {} once. Missed filling {} at least twice.".format(
|
||||
", ".join(warning_unfilled_properties),
|
||||
", ".join(critical_unfilled_properties),
|
||||
),
|
||||
}
|
||||
return {'status': 1, 'message': 'Missed filling %s once.' %
|
||||
(', '.join(warning_unfilled_properties),)}
|
||||
return {'status': 2, 'message': 'Missed filling %s once. Missed filling %s at least twice.' %
|
||||
(', '.join(warning_unfilled_properties), ', '.join(critical_unfilled_properties))}
|
||||
|
@@ -1,21 +1,22 @@
|
||||
import sys
|
||||
from argparse import ArgumentParser
|
||||
from typing import Any
|
||||
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from analytics.lib.counts import do_drop_all_analytics_tables
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = """Clear analytics tables."""
|
||||
|
||||
def add_arguments(self, parser: ArgumentParser) -> None:
|
||||
parser.add_argument("--force", action="store_true", help="Clear analytics tables.")
|
||||
parser.add_argument('--force',
|
||||
action='store_true',
|
||||
help="Clear analytics tables.")
|
||||
|
||||
def handle(self, *args: Any, **options: Any) -> None:
|
||||
if options["force"]:
|
||||
if options['force']:
|
||||
do_drop_all_analytics_tables()
|
||||
else:
|
||||
raise CommandError(
|
||||
"Would delete all data from analytics tables (!); use --force to do so."
|
||||
)
|
||||
print("Would delete all data from analytics tables (!); use --force to do so.")
|
||||
sys.exit(1)
|
||||
|
@@ -1,23 +1,29 @@
|
||||
import sys
|
||||
from argparse import ArgumentParser
|
||||
from typing import Any
|
||||
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from analytics.lib.counts import COUNT_STATS, do_drop_single_stat
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = """Clear analytics tables."""
|
||||
|
||||
def add_arguments(self, parser: ArgumentParser) -> None:
|
||||
parser.add_argument("--force", action="store_true", help="Actually do it.")
|
||||
parser.add_argument("--property", help="The property of the stat to be cleared.")
|
||||
parser.add_argument('--force',
|
||||
action='store_true',
|
||||
help="Actually do it.")
|
||||
parser.add_argument('--property',
|
||||
type=str,
|
||||
help="The property of the stat to be cleared.")
|
||||
|
||||
def handle(self, *args: Any, **options: Any) -> None:
|
||||
property = options["property"]
|
||||
property = options['property']
|
||||
if property not in COUNT_STATS:
|
||||
raise CommandError(f"Invalid property: {property}")
|
||||
if not options["force"]:
|
||||
raise CommandError("No action taken. Use --force.")
|
||||
print("Invalid property: %s" % (property,))
|
||||
sys.exit(1)
|
||||
if not options['force']:
|
||||
print("No action taken. Use --force.")
|
||||
sys.exit(1)
|
||||
|
||||
do_drop_single_stat(property)
|
||||
|
73
analytics/management/commands/client_activity.py
Normal file
73
analytics/management/commands/client_activity.py
Normal file
@@ -0,0 +1,73 @@
|
||||
import datetime
|
||||
from argparse import ArgumentParser
|
||||
from typing import Any
|
||||
|
||||
from django.db.models import Count, QuerySet
|
||||
from django.utils.timezone import now as timezone_now
|
||||
|
||||
from zerver.lib.management import ZulipBaseCommand
|
||||
from zerver.models import UserActivity
|
||||
|
||||
class Command(ZulipBaseCommand):
|
||||
help = """Report rough client activity globally, for a realm, or for a user
|
||||
|
||||
Usage examples:
|
||||
|
||||
./manage.py client_activity --target server
|
||||
./manage.py client_activity --target realm --realm zulip
|
||||
./manage.py client_activity --target user --user hamlet@zulip.com --realm zulip"""
|
||||
|
||||
def add_arguments(self, parser: ArgumentParser) -> None:
|
||||
parser.add_argument('--target', dest='target', required=True, type=str,
|
||||
help="'server' will calculate client activity of the entire server. "
|
||||
"'realm' will calculate client activity of realm. "
|
||||
"'user' will calculate client activity of the user.")
|
||||
parser.add_argument('--user', dest='user', type=str,
|
||||
help="The email address of the user you want to calculate activity.")
|
||||
self.add_realm_args(parser)
|
||||
|
||||
def compute_activity(self, user_activity_objects: QuerySet) -> None:
|
||||
# Report data from the past week.
|
||||
#
|
||||
# This is a rough report of client activity because we inconsistently
|
||||
# register activity from various clients; think of it as telling you
|
||||
# approximately how many people from a group have used a particular
|
||||
# client recently. For example, this might be useful to get a sense of
|
||||
# how popular different versions of a desktop client are.
|
||||
#
|
||||
# Importantly, this does NOT tell you anything about the relative
|
||||
# volumes of requests from clients.
|
||||
threshold = timezone_now() - datetime.timedelta(days=7)
|
||||
client_counts = user_activity_objects.filter(
|
||||
last_visit__gt=threshold).values("client__name").annotate(
|
||||
count=Count('client__name'))
|
||||
|
||||
total = 0
|
||||
counts = []
|
||||
for client_type in client_counts:
|
||||
count = client_type["count"]
|
||||
client = client_type["client__name"]
|
||||
total += count
|
||||
counts.append((count, client))
|
||||
|
||||
counts.sort()
|
||||
|
||||
for count in counts:
|
||||
print("%25s %15d" % (count[1], count[0]))
|
||||
print("Total:", total)
|
||||
|
||||
def handle(self, *args: Any, **options: str) -> None:
|
||||
realm = self.get_realm(options)
|
||||
if options["user"] is None:
|
||||
if options["target"] == "server" and realm is None:
|
||||
# Report global activity.
|
||||
self.compute_activity(UserActivity.objects.all())
|
||||
elif options["target"] == "realm" and realm is not None:
|
||||
self.compute_activity(UserActivity.objects.filter(user_profile__realm=realm))
|
||||
else:
|
||||
self.print_help("./manage.py", "client_activity")
|
||||
elif options["target"] == "user":
|
||||
user_profile = self.get_user(options["user"], realm)
|
||||
self.compute_activity(UserActivity.objects.filter(user_profile=user_profile))
|
||||
else:
|
||||
self.print_help("./manage.py", "client_activity")
|
@@ -1,28 +1,20 @@
|
||||
from datetime import timedelta
|
||||
from typing import Any, Dict, List, Mapping, Type, Union
|
||||
from unittest import mock
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Any, Dict, List, Mapping, Optional, Type
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.utils.timezone import now as timezone_now
|
||||
|
||||
from analytics.lib.counts import COUNT_STATS, CountStat, do_drop_all_analytics_tables
|
||||
from analytics.lib.counts import COUNT_STATS, \
|
||||
CountStat, do_drop_all_analytics_tables
|
||||
from analytics.lib.fixtures import generate_time_series_data
|
||||
from analytics.lib.time_utils import time_range
|
||||
from analytics.models import (
|
||||
BaseCount,
|
||||
FillState,
|
||||
InstallationCount,
|
||||
RealmCount,
|
||||
StreamCount,
|
||||
UserCount,
|
||||
)
|
||||
from zerver.actions.create_realm import do_create_realm
|
||||
from zerver.actions.users import do_change_user_role
|
||||
from zerver.lib.create_user import create_user
|
||||
from zerver.lib.stream_color import STREAM_ASSIGNMENT_COLORS
|
||||
from analytics.models import BaseCount, FillState, RealmCount, UserCount, \
|
||||
StreamCount, InstallationCount
|
||||
from zerver.lib.actions import do_change_is_admin, STREAM_ASSIGNMENT_COLORS
|
||||
from zerver.lib.timestamp import floor_to_day
|
||||
from zerver.models import Client, Realm, Recipient, Stream, Subscription, UserProfile
|
||||
|
||||
from zerver.models import Realm, UserProfile, Stream, Client, \
|
||||
RealmAuditLog, Recipient, Subscription
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = """Populates analytics tables with randomly generated data."""
|
||||
@@ -30,30 +22,30 @@ class Command(BaseCommand):
|
||||
DAYS_OF_DATA = 100
|
||||
random_seed = 26
|
||||
|
||||
def generate_fixture_data(
|
||||
self,
|
||||
stat: CountStat,
|
||||
business_hours_base: float,
|
||||
non_business_hours_base: float,
|
||||
growth: float,
|
||||
autocorrelation: float,
|
||||
spikiness: float,
|
||||
holiday_rate: float = 0,
|
||||
partial_sum: bool = False,
|
||||
) -> List[int]:
|
||||
def create_user(self, email: str,
|
||||
full_name: str,
|
||||
is_staff: bool,
|
||||
date_joined: datetime,
|
||||
realm: Realm) -> UserProfile:
|
||||
user = UserProfile.objects.create(
|
||||
email=email, full_name=full_name, is_staff=is_staff,
|
||||
realm=realm, short_name=full_name, pointer=-1, last_pointer_updater='none',
|
||||
api_key='42', date_joined=date_joined)
|
||||
RealmAuditLog.objects.create(
|
||||
realm=realm, modified_user=user, event_type=RealmAuditLog.USER_CREATED,
|
||||
event_time=user.date_joined)
|
||||
return user
|
||||
|
||||
def generate_fixture_data(self, stat: CountStat, business_hours_base: float,
|
||||
non_business_hours_base: float, growth: float,
|
||||
autocorrelation: float, spikiness: float,
|
||||
holiday_rate: float=0, partial_sum: bool=False) -> List[int]:
|
||||
self.random_seed += 1
|
||||
return generate_time_series_data(
|
||||
days=self.DAYS_OF_DATA,
|
||||
business_hours_base=business_hours_base,
|
||||
non_business_hours_base=non_business_hours_base,
|
||||
growth=growth,
|
||||
autocorrelation=autocorrelation,
|
||||
spikiness=spikiness,
|
||||
holiday_rate=holiday_rate,
|
||||
frequency=stat.frequency,
|
||||
partial_sum=partial_sum,
|
||||
random_seed=self.random_seed,
|
||||
)
|
||||
days=self.DAYS_OF_DATA, business_hours_base=business_hours_base,
|
||||
non_business_hours_base=non_business_hours_base, growth=growth,
|
||||
autocorrelation=autocorrelation, spikiness=spikiness, holiday_rate=holiday_rate,
|
||||
frequency=stat.frequency, partial_sum=partial_sum, random_seed=self.random_seed)
|
||||
|
||||
def handle(self, *args: Any, **options: Any) -> None:
|
||||
# TODO: This should arguably only delete the objects
|
||||
@@ -61,7 +53,7 @@ class Command(BaseCommand):
|
||||
do_drop_all_analytics_tables()
|
||||
|
||||
# This also deletes any objects with this realm as a foreign key
|
||||
Realm.objects.filter(string_id="analytics").delete()
|
||||
Realm.objects.filter(string_id='analytics').delete()
|
||||
|
||||
# Because we just deleted a bunch of objects in the database
|
||||
# directly (rather than deleting individual objects in Django,
|
||||
@@ -70,237 +62,163 @@ class Command(BaseCommand):
|
||||
# memcached in order to ensure deleted objects aren't still
|
||||
# present in the memcached cache.
|
||||
from zerver.apps import flush_cache
|
||||
|
||||
flush_cache(None)
|
||||
|
||||
installation_time = timezone_now() - timedelta(days=self.DAYS_OF_DATA)
|
||||
last_end_time = floor_to_day(timezone_now())
|
||||
realm = do_create_realm(
|
||||
string_id="analytics", name="Analytics", date_created=installation_time
|
||||
)
|
||||
|
||||
with mock.patch("zerver.lib.create_user.timezone_now", return_value=installation_time):
|
||||
shylock = create_user(
|
||||
"shylock@analytics.ds",
|
||||
"Shylock",
|
||||
realm,
|
||||
full_name="Shylock",
|
||||
role=UserProfile.ROLE_REALM_OWNER,
|
||||
)
|
||||
do_change_user_role(shylock, UserProfile.ROLE_REALM_OWNER, acting_user=None)
|
||||
stream = Stream.objects.create(name="all", realm=realm, date_created=installation_time)
|
||||
realm = Realm.objects.create(
|
||||
string_id='analytics', name='Analytics', date_created=installation_time)
|
||||
shylock = self.create_user('shylock@analytics.ds', 'Shylock', True, installation_time, realm)
|
||||
do_change_is_admin(shylock, True)
|
||||
stream = Stream.objects.create(
|
||||
name='all', realm=realm, date_created=installation_time)
|
||||
recipient = Recipient.objects.create(type_id=stream.id, type=Recipient.STREAM)
|
||||
stream.recipient = recipient
|
||||
stream.save(update_fields=["recipient"])
|
||||
|
||||
# Subscribe shylock to the stream to avoid invariant failures.
|
||||
# TODO: This should use subscribe_users_to_streams from populate_db.
|
||||
subs = [
|
||||
Subscription(
|
||||
recipient=recipient,
|
||||
user_profile=shylock,
|
||||
is_user_active=shylock.is_active,
|
||||
color=STREAM_ASSIGNMENT_COLORS[0],
|
||||
),
|
||||
Subscription(recipient=recipient,
|
||||
user_profile=shylock,
|
||||
color=STREAM_ASSIGNMENT_COLORS[0]),
|
||||
]
|
||||
Subscription.objects.bulk_create(subs)
|
||||
|
||||
FixtureData = Mapping[Union[str, int, None], List[int]]
|
||||
|
||||
def insert_fixture_data(
|
||||
stat: CountStat,
|
||||
fixture_data: FixtureData,
|
||||
table: Type[BaseCount],
|
||||
) -> None:
|
||||
end_times = time_range(
|
||||
last_end_time, last_end_time, stat.frequency, len(list(fixture_data.values())[0])
|
||||
)
|
||||
def insert_fixture_data(stat: CountStat,
|
||||
fixture_data: Mapping[Optional[str], List[int]],
|
||||
table: Type[BaseCount]) -> None:
|
||||
end_times = time_range(last_end_time, last_end_time, stat.frequency,
|
||||
len(list(fixture_data.values())[0]))
|
||||
if table == InstallationCount:
|
||||
id_args: Dict[str, Any] = {}
|
||||
id_args = {} # type: Dict[str, Any]
|
||||
if table == RealmCount:
|
||||
id_args = {"realm": realm}
|
||||
id_args = {'realm': realm}
|
||||
if table == UserCount:
|
||||
id_args = {"realm": realm, "user": shylock}
|
||||
id_args = {'realm': realm, 'user': shylock}
|
||||
if table == StreamCount:
|
||||
id_args = {"stream": stream, "realm": realm}
|
||||
id_args = {'stream': stream, 'realm': realm}
|
||||
|
||||
for subgroup, values in fixture_data.items():
|
||||
table.objects.bulk_create(
|
||||
table(
|
||||
property=stat.property,
|
||||
subgroup=subgroup,
|
||||
end_time=end_time,
|
||||
value=value,
|
||||
**id_args,
|
||||
)
|
||||
for end_time, value in zip(end_times, values)
|
||||
if value != 0
|
||||
)
|
||||
table.objects.bulk_create([
|
||||
table(property=stat.property, subgroup=subgroup, end_time=end_time,
|
||||
value=value, **id_args)
|
||||
for end_time, value in zip(end_times, values) if value != 0])
|
||||
|
||||
stat = COUNT_STATS["1day_actives::day"]
|
||||
realm_data: FixtureData = {
|
||||
None: self.generate_fixture_data(stat, 0.08, 0.02, 3, 0.3, 6, partial_sum=True),
|
||||
}
|
||||
insert_fixture_data(stat, realm_data, RealmCount)
|
||||
installation_data: FixtureData = {
|
||||
None: self.generate_fixture_data(stat, 0.8, 0.2, 4, 0.3, 6, partial_sum=True),
|
||||
}
|
||||
insert_fixture_data(stat, installation_data, InstallationCount)
|
||||
FillState.objects.create(
|
||||
property=stat.property, end_time=last_end_time, state=FillState.DONE
|
||||
)
|
||||
|
||||
stat = COUNT_STATS["7day_actives::day"]
|
||||
stat = COUNT_STATS['1day_actives::day']
|
||||
realm_data = {
|
||||
None: self.generate_fixture_data(stat, 0.2, 0.07, 3, 0.3, 6, partial_sum=True),
|
||||
None: self.generate_fixture_data(stat, .08, .02, 3, .3, 6, partial_sum=True),
|
||||
} # type: Mapping[Optional[str], List[int]]
|
||||
insert_fixture_data(stat, realm_data, RealmCount)
|
||||
installation_data = {
|
||||
None: self.generate_fixture_data(stat, .8, .2, 4, .3, 6, partial_sum=True),
|
||||
} # type: Mapping[Optional[str], List[int]]
|
||||
insert_fixture_data(stat, installation_data, InstallationCount)
|
||||
FillState.objects.create(property=stat.property, end_time=last_end_time,
|
||||
state=FillState.DONE)
|
||||
|
||||
stat = COUNT_STATS['realm_active_humans::day']
|
||||
realm_data = {
|
||||
None: self.generate_fixture_data(stat, .1, .03, 3, .5, 3, partial_sum=True),
|
||||
}
|
||||
insert_fixture_data(stat, realm_data, RealmCount)
|
||||
installation_data = {
|
||||
None: self.generate_fixture_data(stat, 2, 0.7, 4, 0.3, 6, partial_sum=True),
|
||||
None: self.generate_fixture_data(stat, 1, .3, 4, .5, 3, partial_sum=True),
|
||||
}
|
||||
insert_fixture_data(stat, installation_data, InstallationCount)
|
||||
FillState.objects.create(
|
||||
property=stat.property, end_time=last_end_time, state=FillState.DONE
|
||||
)
|
||||
FillState.objects.create(property=stat.property, end_time=last_end_time,
|
||||
state=FillState.DONE)
|
||||
|
||||
stat = COUNT_STATS["realm_active_humans::day"]
|
||||
stat = COUNT_STATS['active_users_audit:is_bot:day']
|
||||
realm_data = {
|
||||
None: self.generate_fixture_data(stat, 0.8, 0.08, 3, 0.5, 3, partial_sum=True),
|
||||
'false': self.generate_fixture_data(stat, .1, .03, 3.5, .8, 2, partial_sum=True),
|
||||
}
|
||||
insert_fixture_data(stat, realm_data, RealmCount)
|
||||
installation_data = {
|
||||
None: self.generate_fixture_data(stat, 1, 0.3, 4, 0.5, 3, partial_sum=True),
|
||||
'false': self.generate_fixture_data(stat, 1, .3, 6, .8, 2, partial_sum=True),
|
||||
}
|
||||
insert_fixture_data(stat, installation_data, InstallationCount)
|
||||
FillState.objects.create(
|
||||
property=stat.property, end_time=last_end_time, state=FillState.DONE
|
||||
)
|
||||
FillState.objects.create(property=stat.property, end_time=last_end_time,
|
||||
state=FillState.DONE)
|
||||
|
||||
stat = COUNT_STATS["active_users_audit:is_bot:day"]
|
||||
realm_data = {
|
||||
"false": self.generate_fixture_data(stat, 1, 0.2, 3.5, 0.8, 2, partial_sum=True),
|
||||
"true": self.generate_fixture_data(stat, 0.3, 0.05, 3, 0.3, 2, partial_sum=True),
|
||||
}
|
||||
insert_fixture_data(stat, realm_data, RealmCount)
|
||||
installation_data = {
|
||||
"false": self.generate_fixture_data(stat, 3, 1, 4, 0.8, 2, partial_sum=True),
|
||||
"true": self.generate_fixture_data(stat, 1, 0.4, 4, 0.8, 2, partial_sum=True),
|
||||
}
|
||||
insert_fixture_data(stat, installation_data, InstallationCount)
|
||||
FillState.objects.create(
|
||||
property=stat.property, end_time=last_end_time, state=FillState.DONE
|
||||
)
|
||||
|
||||
stat = COUNT_STATS["messages_sent:is_bot:hour"]
|
||||
user_data: FixtureData = {
|
||||
"false": self.generate_fixture_data(stat, 2, 1, 1.5, 0.6, 8, holiday_rate=0.1),
|
||||
}
|
||||
stat = COUNT_STATS['messages_sent:is_bot:hour']
|
||||
user_data = {'false': self.generate_fixture_data(
|
||||
stat, 2, 1, 1.5, .6, 8, holiday_rate=.1)} # type: Mapping[Optional[str], List[int]]
|
||||
insert_fixture_data(stat, user_data, UserCount)
|
||||
realm_data = {
|
||||
"false": self.generate_fixture_data(stat, 35, 15, 6, 0.6, 4),
|
||||
"true": self.generate_fixture_data(stat, 15, 15, 3, 0.4, 2),
|
||||
}
|
||||
realm_data = {'false': self.generate_fixture_data(stat, 35, 15, 6, .6, 4),
|
||||
'true': self.generate_fixture_data(stat, 15, 15, 3, .4, 2)}
|
||||
insert_fixture_data(stat, realm_data, RealmCount)
|
||||
installation_data = {
|
||||
"false": self.generate_fixture_data(stat, 350, 150, 6, 0.6, 4),
|
||||
"true": self.generate_fixture_data(stat, 150, 150, 3, 0.4, 2),
|
||||
}
|
||||
installation_data = {'false': self.generate_fixture_data(stat, 350, 150, 6, .6, 4),
|
||||
'true': self.generate_fixture_data(stat, 150, 150, 3, .4, 2)}
|
||||
insert_fixture_data(stat, installation_data, InstallationCount)
|
||||
FillState.objects.create(
|
||||
property=stat.property, end_time=last_end_time, state=FillState.DONE
|
||||
)
|
||||
FillState.objects.create(property=stat.property, end_time=last_end_time,
|
||||
state=FillState.DONE)
|
||||
|
||||
stat = COUNT_STATS["messages_sent:message_type:day"]
|
||||
stat = COUNT_STATS['messages_sent:message_type:day']
|
||||
user_data = {
|
||||
"public_stream": self.generate_fixture_data(stat, 1.5, 1, 3, 0.6, 8),
|
||||
"private_message": self.generate_fixture_data(stat, 0.5, 0.3, 1, 0.6, 8),
|
||||
"huddle_message": self.generate_fixture_data(stat, 0.2, 0.2, 2, 0.6, 8),
|
||||
}
|
||||
'public_stream': self.generate_fixture_data(stat, 1.5, 1, 3, .6, 8),
|
||||
'private_message': self.generate_fixture_data(stat, .5, .3, 1, .6, 8),
|
||||
'huddle_message': self.generate_fixture_data(stat, .2, .2, 2, .6, 8)}
|
||||
insert_fixture_data(stat, user_data, UserCount)
|
||||
realm_data = {
|
||||
"public_stream": self.generate_fixture_data(stat, 30, 8, 5, 0.6, 4),
|
||||
"private_stream": self.generate_fixture_data(stat, 7, 7, 5, 0.6, 4),
|
||||
"private_message": self.generate_fixture_data(stat, 13, 5, 5, 0.6, 4),
|
||||
"huddle_message": self.generate_fixture_data(stat, 6, 3, 3, 0.6, 4),
|
||||
}
|
||||
'public_stream': self.generate_fixture_data(stat, 30, 8, 5, .6, 4),
|
||||
'private_stream': self.generate_fixture_data(stat, 7, 7, 5, .6, 4),
|
||||
'private_message': self.generate_fixture_data(stat, 13, 5, 5, .6, 4),
|
||||
'huddle_message': self.generate_fixture_data(stat, 6, 3, 3, .6, 4)}
|
||||
insert_fixture_data(stat, realm_data, RealmCount)
|
||||
installation_data = {
|
||||
"public_stream": self.generate_fixture_data(stat, 300, 80, 5, 0.6, 4),
|
||||
"private_stream": self.generate_fixture_data(stat, 70, 70, 5, 0.6, 4),
|
||||
"private_message": self.generate_fixture_data(stat, 130, 50, 5, 0.6, 4),
|
||||
"huddle_message": self.generate_fixture_data(stat, 60, 30, 3, 0.6, 4),
|
||||
}
|
||||
'public_stream': self.generate_fixture_data(stat, 300, 80, 5, .6, 4),
|
||||
'private_stream': self.generate_fixture_data(stat, 70, 70, 5, .6, 4),
|
||||
'private_message': self.generate_fixture_data(stat, 130, 50, 5, .6, 4),
|
||||
'huddle_message': self.generate_fixture_data(stat, 60, 30, 3, .6, 4)}
|
||||
insert_fixture_data(stat, installation_data, InstallationCount)
|
||||
FillState.objects.create(
|
||||
property=stat.property, end_time=last_end_time, state=FillState.DONE
|
||||
)
|
||||
FillState.objects.create(property=stat.property, end_time=last_end_time,
|
||||
state=FillState.DONE)
|
||||
|
||||
website, created = Client.objects.get_or_create(name="website")
|
||||
old_desktop, created = Client.objects.get_or_create(name="desktop app Linux 0.3.7")
|
||||
android, created = Client.objects.get_or_create(name="ZulipAndroid")
|
||||
iOS, created = Client.objects.get_or_create(name="ZulipiOS")
|
||||
react_native, created = Client.objects.get_or_create(name="ZulipMobile")
|
||||
API, created = Client.objects.get_or_create(name="API: Python")
|
||||
zephyr_mirror, created = Client.objects.get_or_create(name="zephyr_mirror")
|
||||
unused, created = Client.objects.get_or_create(name="unused")
|
||||
long_webhook, created = Client.objects.get_or_create(name="ZulipLooooooooooongNameWebhook")
|
||||
website, created = Client.objects.get_or_create(name='website')
|
||||
old_desktop, created = Client.objects.get_or_create(name='desktop app Linux 0.3.7')
|
||||
android, created = Client.objects.get_or_create(name='ZulipAndroid')
|
||||
iOS, created = Client.objects.get_or_create(name='ZulipiOS')
|
||||
react_native, created = Client.objects.get_or_create(name='ZulipMobile')
|
||||
API, created = Client.objects.get_or_create(name='API: Python')
|
||||
zephyr_mirror, created = Client.objects.get_or_create(name='zephyr_mirror')
|
||||
unused, created = Client.objects.get_or_create(name='unused')
|
||||
long_webhook, created = Client.objects.get_or_create(name='ZulipLooooooooooongNameWebhook')
|
||||
|
||||
stat = COUNT_STATS["messages_sent:client:day"]
|
||||
stat = COUNT_STATS['messages_sent:client:day']
|
||||
user_data = {
|
||||
website.id: self.generate_fixture_data(stat, 2, 1, 1.5, 0.6, 8),
|
||||
zephyr_mirror.id: self.generate_fixture_data(stat, 0, 0.3, 1.5, 0.6, 8),
|
||||
}
|
||||
website.id: self.generate_fixture_data(stat, 2, 1, 1.5, .6, 8),
|
||||
zephyr_mirror.id: self.generate_fixture_data(stat, 0, .3, 1.5, .6, 8)}
|
||||
insert_fixture_data(stat, user_data, UserCount)
|
||||
realm_data = {
|
||||
website.id: self.generate_fixture_data(stat, 30, 20, 5, 0.6, 3),
|
||||
old_desktop.id: self.generate_fixture_data(stat, 5, 3, 8, 0.6, 3),
|
||||
android.id: self.generate_fixture_data(stat, 5, 5, 2, 0.6, 3),
|
||||
iOS.id: self.generate_fixture_data(stat, 5, 5, 2, 0.6, 3),
|
||||
react_native.id: self.generate_fixture_data(stat, 5, 5, 10, 0.6, 3),
|
||||
API.id: self.generate_fixture_data(stat, 5, 5, 5, 0.6, 3),
|
||||
zephyr_mirror.id: self.generate_fixture_data(stat, 1, 1, 3, 0.6, 3),
|
||||
website.id: self.generate_fixture_data(stat, 30, 20, 5, .6, 3),
|
||||
old_desktop.id: self.generate_fixture_data(stat, 5, 3, 8, .6, 3),
|
||||
android.id: self.generate_fixture_data(stat, 5, 5, 2, .6, 3),
|
||||
iOS.id: self.generate_fixture_data(stat, 5, 5, 2, .6, 3),
|
||||
react_native.id: self.generate_fixture_data(stat, 5, 5, 10, .6, 3),
|
||||
API.id: self.generate_fixture_data(stat, 5, 5, 5, .6, 3),
|
||||
zephyr_mirror.id: self.generate_fixture_data(stat, 1, 1, 3, .6, 3),
|
||||
unused.id: self.generate_fixture_data(stat, 0, 0, 0, 0, 0),
|
||||
long_webhook.id: self.generate_fixture_data(stat, 5, 5, 2, 0.6, 3),
|
||||
}
|
||||
long_webhook.id: self.generate_fixture_data(stat, 5, 5, 2, .6, 3)}
|
||||
insert_fixture_data(stat, realm_data, RealmCount)
|
||||
installation_data = {
|
||||
website.id: self.generate_fixture_data(stat, 300, 200, 5, 0.6, 3),
|
||||
old_desktop.id: self.generate_fixture_data(stat, 50, 30, 8, 0.6, 3),
|
||||
android.id: self.generate_fixture_data(stat, 50, 50, 2, 0.6, 3),
|
||||
iOS.id: self.generate_fixture_data(stat, 50, 50, 2, 0.6, 3),
|
||||
react_native.id: self.generate_fixture_data(stat, 5, 5, 10, 0.6, 3),
|
||||
API.id: self.generate_fixture_data(stat, 50, 50, 5, 0.6, 3),
|
||||
zephyr_mirror.id: self.generate_fixture_data(stat, 10, 10, 3, 0.6, 3),
|
||||
website.id: self.generate_fixture_data(stat, 300, 200, 5, .6, 3),
|
||||
old_desktop.id: self.generate_fixture_data(stat, 50, 30, 8, .6, 3),
|
||||
android.id: self.generate_fixture_data(stat, 50, 50, 2, .6, 3),
|
||||
iOS.id: self.generate_fixture_data(stat, 50, 50, 2, .6, 3),
|
||||
react_native.id: self.generate_fixture_data(stat, 5, 5, 10, .6, 3),
|
||||
API.id: self.generate_fixture_data(stat, 50, 50, 5, .6, 3),
|
||||
zephyr_mirror.id: self.generate_fixture_data(stat, 10, 10, 3, .6, 3),
|
||||
unused.id: self.generate_fixture_data(stat, 0, 0, 0, 0, 0),
|
||||
long_webhook.id: self.generate_fixture_data(stat, 50, 50, 2, 0.6, 3),
|
||||
}
|
||||
long_webhook.id: self.generate_fixture_data(stat, 50, 50, 2, .6, 3)}
|
||||
insert_fixture_data(stat, installation_data, InstallationCount)
|
||||
FillState.objects.create(
|
||||
property=stat.property, end_time=last_end_time, state=FillState.DONE
|
||||
)
|
||||
FillState.objects.create(property=stat.property, end_time=last_end_time,
|
||||
state=FillState.DONE)
|
||||
|
||||
stat = COUNT_STATS["messages_in_stream:is_bot:day"]
|
||||
realm_data = {
|
||||
"false": self.generate_fixture_data(stat, 30, 5, 6, 0.6, 4),
|
||||
"true": self.generate_fixture_data(stat, 20, 2, 3, 0.2, 3),
|
||||
}
|
||||
stat = COUNT_STATS['messages_in_stream:is_bot:day']
|
||||
realm_data = {'false': self.generate_fixture_data(stat, 30, 5, 6, .6, 4),
|
||||
'true': self.generate_fixture_data(stat, 20, 2, 3, .2, 3)}
|
||||
insert_fixture_data(stat, realm_data, RealmCount)
|
||||
stream_data: Mapping[Union[int, str, None], List[int]] = {
|
||||
"false": self.generate_fixture_data(stat, 10, 7, 5, 0.6, 4),
|
||||
"true": self.generate_fixture_data(stat, 5, 3, 2, 0.4, 2),
|
||||
}
|
||||
stream_data = {'false': self.generate_fixture_data(stat, 10, 7, 5, .6, 4),
|
||||
'true': self.generate_fixture_data(stat, 5, 3, 2, .4, 2)} # type: Mapping[Optional[str], List[int]]
|
||||
insert_fixture_data(stat, stream_data, StreamCount)
|
||||
FillState.objects.create(
|
||||
property=stat.property, end_time=last_end_time, state=FillState.DONE
|
||||
)
|
||||
|
||||
stat = COUNT_STATS["messages_read::hour"]
|
||||
user_data = {
|
||||
None: self.generate_fixture_data(stat, 7, 3, 2, 0.6, 8, holiday_rate=0.1),
|
||||
}
|
||||
insert_fixture_data(stat, user_data, UserCount)
|
||||
realm_data = {None: self.generate_fixture_data(stat, 50, 35, 6, 0.6, 4)}
|
||||
insert_fixture_data(stat, realm_data, RealmCount)
|
||||
FillState.objects.create(
|
||||
property=stat.property, end_time=last_end_time, state=FillState.DONE
|
||||
)
|
||||
FillState.objects.create(property=stat.property, end_time=last_end_time,
|
||||
state=FillState.DONE)
|
||||
|
152
analytics/management/commands/realm_stats.py
Normal file
152
analytics/management/commands/realm_stats.py
Normal file
@@ -0,0 +1,152 @@
|
||||
import datetime
|
||||
from argparse import ArgumentParser
|
||||
from typing import Any, List
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db.models import Count
|
||||
from django.utils.timezone import now as timezone_now
|
||||
|
||||
from zerver.models import Message, Realm, Recipient, Stream, \
|
||||
Subscription, UserActivity, UserMessage, UserProfile, get_realm
|
||||
|
||||
MOBILE_CLIENT_LIST = ["Android", "ios"]
|
||||
HUMAN_CLIENT_LIST = MOBILE_CLIENT_LIST + ["website"]
|
||||
|
||||
human_messages = Message.objects.filter(sending_client__name__in=HUMAN_CLIENT_LIST)
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Generate statistics on realm activity."
|
||||
|
||||
def add_arguments(self, parser: ArgumentParser) -> None:
|
||||
parser.add_argument('realms', metavar='<realm>', type=str, nargs='*',
|
||||
help="realm to generate statistics for")
|
||||
|
||||
def active_users(self, realm: Realm) -> List[UserProfile]:
|
||||
# Has been active (on the website, for now) in the last 7 days.
|
||||
activity_cutoff = timezone_now() - datetime.timedelta(days=7)
|
||||
return [activity.user_profile for activity in (
|
||||
UserActivity.objects.filter(user_profile__realm=realm,
|
||||
user_profile__is_active=True,
|
||||
last_visit__gt=activity_cutoff,
|
||||
query="/json/users/me/pointer",
|
||||
client__name="website"))]
|
||||
|
||||
def messages_sent_by(self, user: UserProfile, days_ago: int) -> int:
|
||||
sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago)
|
||||
return human_messages.filter(sender=user, pub_date__gt=sent_time_cutoff).count()
|
||||
|
||||
def total_messages(self, realm: Realm, days_ago: int) -> int:
|
||||
sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago)
|
||||
return Message.objects.filter(sender__realm=realm, pub_date__gt=sent_time_cutoff).count()
|
||||
|
||||
def human_messages(self, realm: Realm, days_ago: int) -> int:
|
||||
sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago)
|
||||
return human_messages.filter(sender__realm=realm, pub_date__gt=sent_time_cutoff).count()
|
||||
|
||||
def api_messages(self, realm: Realm, days_ago: int) -> int:
|
||||
return (self.total_messages(realm, days_ago) - self.human_messages(realm, days_ago))
|
||||
|
||||
def stream_messages(self, realm: Realm, days_ago: int) -> int:
|
||||
sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago)
|
||||
return human_messages.filter(sender__realm=realm, pub_date__gt=sent_time_cutoff,
|
||||
recipient__type=Recipient.STREAM).count()
|
||||
|
||||
def private_messages(self, realm: Realm, days_ago: int) -> int:
|
||||
sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago)
|
||||
return human_messages.filter(sender__realm=realm, pub_date__gt=sent_time_cutoff).exclude(
|
||||
recipient__type=Recipient.STREAM).exclude(recipient__type=Recipient.HUDDLE).count()
|
||||
|
||||
def group_private_messages(self, realm: Realm, days_ago: int) -> int:
|
||||
sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago)
|
||||
return human_messages.filter(sender__realm=realm, pub_date__gt=sent_time_cutoff).exclude(
|
||||
recipient__type=Recipient.STREAM).exclude(recipient__type=Recipient.PERSONAL).count()
|
||||
|
||||
def report_percentage(self, numerator: float, denominator: float, text: str) -> None:
|
||||
if not denominator:
|
||||
fraction = 0.0
|
||||
else:
|
||||
fraction = numerator / float(denominator)
|
||||
print("%.2f%% of" % (fraction * 100,), text)
|
||||
|
||||
def handle(self, *args: Any, **options: Any) -> None:
|
||||
if options['realms']:
|
||||
try:
|
||||
realms = [get_realm(string_id) for string_id in options['realms']]
|
||||
except Realm.DoesNotExist as e:
|
||||
print(e)
|
||||
exit(1)
|
||||
else:
|
||||
realms = Realm.objects.all()
|
||||
|
||||
for realm in realms:
|
||||
print(realm.string_id)
|
||||
|
||||
user_profiles = UserProfile.objects.filter(realm=realm, is_active=True)
|
||||
active_users = self.active_users(realm)
|
||||
num_active = len(active_users)
|
||||
|
||||
print("%d active users (%d total)" % (num_active, len(user_profiles)))
|
||||
streams = Stream.objects.filter(realm=realm).extra(
|
||||
tables=['zerver_subscription', 'zerver_recipient'],
|
||||
where=['zerver_subscription.recipient_id = zerver_recipient.id',
|
||||
'zerver_recipient.type = 2',
|
||||
'zerver_recipient.type_id = zerver_stream.id',
|
||||
'zerver_subscription.active = true']).annotate(count=Count("name"))
|
||||
print("%d streams" % (streams.count(),))
|
||||
|
||||
for days_ago in (1, 7, 30):
|
||||
print("In last %d days, users sent:" % (days_ago,))
|
||||
sender_quantities = [self.messages_sent_by(user, days_ago) for user in user_profiles]
|
||||
for quantity in sorted(sender_quantities, reverse=True):
|
||||
print(quantity, end=' ')
|
||||
print("")
|
||||
|
||||
print("%d stream messages" % (self.stream_messages(realm, days_ago),))
|
||||
print("%d one-on-one private messages" % (self.private_messages(realm, days_ago),))
|
||||
print("%d messages sent via the API" % (self.api_messages(realm, days_ago),))
|
||||
print("%d group private messages" % (self.group_private_messages(realm, days_ago),))
|
||||
|
||||
num_notifications_enabled = len([x for x in active_users if x.enable_desktop_notifications])
|
||||
self.report_percentage(num_notifications_enabled, num_active,
|
||||
"active users have desktop notifications enabled")
|
||||
|
||||
num_enter_sends = len([x for x in active_users if x.enter_sends])
|
||||
self.report_percentage(num_enter_sends, num_active,
|
||||
"active users have enter-sends")
|
||||
|
||||
all_message_count = human_messages.filter(sender__realm=realm).count()
|
||||
multi_paragraph_message_count = human_messages.filter(
|
||||
sender__realm=realm, content__contains="\n\n").count()
|
||||
self.report_percentage(multi_paragraph_message_count, all_message_count,
|
||||
"all messages are multi-paragraph")
|
||||
|
||||
# Starred messages
|
||||
starrers = UserMessage.objects.filter(user_profile__in=user_profiles,
|
||||
flags=UserMessage.flags.starred).values(
|
||||
"user_profile").annotate(count=Count("user_profile"))
|
||||
print("%d users have starred %d messages" % (
|
||||
len(starrers), sum([elt["count"] for elt in starrers])))
|
||||
|
||||
active_user_subs = Subscription.objects.filter(
|
||||
user_profile__in=user_profiles, active=True)
|
||||
|
||||
# Streams not in home view
|
||||
non_home_view = active_user_subs.filter(in_home_view=False).values(
|
||||
"user_profile").annotate(count=Count("user_profile"))
|
||||
print("%d users have %d streams not in home view" % (
|
||||
len(non_home_view), sum([elt["count"] for elt in non_home_view])))
|
||||
|
||||
# Code block markup
|
||||
markup_messages = human_messages.filter(
|
||||
sender__realm=realm, content__contains="~~~").values(
|
||||
"sender").annotate(count=Count("sender"))
|
||||
print("%d users have used code block markup on %s messages" % (
|
||||
len(markup_messages), sum([elt["count"] for elt in markup_messages])))
|
||||
|
||||
# Notifications for stream messages
|
||||
notifications = active_user_subs.filter(desktop_notifications=True).values(
|
||||
"user_profile").annotate(count=Count("user_profile"))
|
||||
print("%d users receive desktop notifications for %d streams" % (
|
||||
len(notifications), sum([elt["count"] for elt in notifications])))
|
||||
|
||||
print("")
|
57
analytics/management/commands/stream_stats.py
Normal file
57
analytics/management/commands/stream_stats.py
Normal file
@@ -0,0 +1,57 @@
|
||||
from argparse import ArgumentParser
|
||||
from typing import Any
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db.models import Q
|
||||
|
||||
from zerver.models import Message, Realm, \
|
||||
Recipient, Stream, Subscription, get_realm
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Generate statistics on the streams for a realm."
|
||||
|
||||
def add_arguments(self, parser: ArgumentParser) -> None:
|
||||
parser.add_argument('realms', metavar='<realm>', type=str, nargs='*',
|
||||
help="realm to generate statistics for")
|
||||
|
||||
def handle(self, *args: Any, **options: str) -> None:
|
||||
if options['realms']:
|
||||
try:
|
||||
realms = [get_realm(string_id) for string_id in options['realms']]
|
||||
except Realm.DoesNotExist as e:
|
||||
print(e)
|
||||
exit(1)
|
||||
else:
|
||||
realms = Realm.objects.all()
|
||||
|
||||
for realm in realms:
|
||||
streams = Stream.objects.filter(realm=realm).exclude(Q(name__istartswith="tutorial-"))
|
||||
# private stream count
|
||||
private_count = 0
|
||||
# public stream count
|
||||
public_count = 0
|
||||
for stream in streams:
|
||||
if stream.invite_only:
|
||||
private_count += 1
|
||||
else:
|
||||
public_count += 1
|
||||
print("------------")
|
||||
print(realm.string_id, end=' ')
|
||||
print("%10s %d public streams and" % ("(", public_count), end=' ')
|
||||
print("%d private streams )" % (private_count,))
|
||||
print("------------")
|
||||
print("%25s %15s %10s %12s" % ("stream", "subscribers", "messages", "type"))
|
||||
|
||||
for stream in streams:
|
||||
if stream.invite_only:
|
||||
stream_type = 'private'
|
||||
else:
|
||||
stream_type = 'public'
|
||||
print("%25s" % (stream.name,), end=' ')
|
||||
recipient = Recipient.objects.filter(type=Recipient.STREAM, type_id=stream.id)
|
||||
print("%10d" % (len(Subscription.objects.filter(recipient=recipient,
|
||||
active=True)),), end=' ')
|
||||
num_messages = len(Message.objects.filter(recipient=recipient))
|
||||
print("%12d" % (num_messages,), end=' ')
|
||||
print("%15s" % (stream_type,))
|
||||
print("")
|
@@ -1,13 +1,13 @@
|
||||
import os
|
||||
import time
|
||||
from argparse import ArgumentParser
|
||||
from datetime import timezone
|
||||
from typing import Any, Dict
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.utils.dateparse import parse_datetime
|
||||
from django.utils.timezone import now as timezone_now
|
||||
from django.utils.timezone import utc as timezone_utc
|
||||
|
||||
from analytics.lib.counts import COUNT_STATS, logger, process_count_stat
|
||||
from scripts.lib.zulip_tools import ENDC, WARNING
|
||||
@@ -15,36 +15,34 @@ from zerver.lib.remote_server import send_analytics_to_remote_server
|
||||
from zerver.lib.timestamp import floor_to_hour
|
||||
from zerver.models import Realm
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = """Fills Analytics tables.
|
||||
|
||||
Run as a cron job that runs every hour."""
|
||||
|
||||
def add_arguments(self, parser: ArgumentParser) -> None:
|
||||
parser.add_argument(
|
||||
"--time",
|
||||
"-t",
|
||||
help="Update stat tables from current state to "
|
||||
"--time. Defaults to the current time.",
|
||||
default=timezone_now().isoformat(),
|
||||
)
|
||||
parser.add_argument("--utc", action="store_true", help="Interpret --time in UTC.")
|
||||
parser.add_argument(
|
||||
"--stat", "-s", help="CountStat to process. If omitted, all stats are processed."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--verbose", action="store_true", help="Print timing information to stdout."
|
||||
)
|
||||
parser.add_argument('--time', '-t',
|
||||
type=str,
|
||||
help='Update stat tables from current state to'
|
||||
'--time. Defaults to the current time.',
|
||||
default=timezone_now().isoformat())
|
||||
parser.add_argument('--utc',
|
||||
action='store_true',
|
||||
help="Interpret --time in UTC.",
|
||||
default=False)
|
||||
parser.add_argument('--stat', '-s',
|
||||
type=str,
|
||||
help="CountStat to process. If omitted, all stats are processed.")
|
||||
parser.add_argument('--verbose',
|
||||
action='store_true',
|
||||
help="Print timing information to stdout.",
|
||||
default=False)
|
||||
|
||||
def handle(self, *args: Any, **options: Any) -> None:
|
||||
try:
|
||||
os.mkdir(settings.ANALYTICS_LOCK_DIR)
|
||||
except OSError:
|
||||
print(
|
||||
f"{WARNING}Analytics lock {settings.ANALYTICS_LOCK_DIR} is unavailable;"
|
||||
f" exiting.{ENDC}"
|
||||
)
|
||||
print(WARNING + "Analytics lock %s is unavailable; exiting... " + ENDC)
|
||||
return
|
||||
|
||||
try:
|
||||
@@ -59,38 +57,34 @@ class Command(BaseCommand):
|
||||
logger.info("No realms, stopping update_analytics_counts")
|
||||
return
|
||||
|
||||
fill_to_time = parse_datetime(options["time"])
|
||||
assert fill_to_time is not None
|
||||
if options["utc"]:
|
||||
fill_to_time = fill_to_time.replace(tzinfo=timezone.utc)
|
||||
fill_to_time = parse_datetime(options['time'])
|
||||
if options['utc']:
|
||||
fill_to_time = fill_to_time.replace(tzinfo=timezone_utc)
|
||||
if fill_to_time.tzinfo is None:
|
||||
raise ValueError(
|
||||
"--time must be time-zone-aware. Maybe you meant to use the --utc option?"
|
||||
)
|
||||
raise ValueError("--time must be timezone aware. Maybe you meant to use the --utc option?")
|
||||
|
||||
fill_to_time = floor_to_hour(fill_to_time.astimezone(timezone.utc))
|
||||
fill_to_time = floor_to_hour(fill_to_time.astimezone(timezone_utc))
|
||||
|
||||
if options["stat"] is not None:
|
||||
stats = [COUNT_STATS[options["stat"]]]
|
||||
if options['stat'] is not None:
|
||||
stats = [COUNT_STATS[options['stat']]]
|
||||
else:
|
||||
stats = list(COUNT_STATS.values())
|
||||
|
||||
logger.info("Starting updating analytics counts through %s", fill_to_time)
|
||||
if options["verbose"]:
|
||||
logger.info("Starting updating analytics counts through %s" % (fill_to_time,))
|
||||
if options['verbose']:
|
||||
start = time.time()
|
||||
last = start
|
||||
|
||||
for stat in stats:
|
||||
process_count_stat(stat, fill_to_time)
|
||||
if options["verbose"]:
|
||||
print(f"Updated {stat.property} in {time.time() - last:.3f}s")
|
||||
if options['verbose']:
|
||||
print("Updated %s in %.3fs" % (stat.property, time.time() - last))
|
||||
last = time.time()
|
||||
|
||||
if options["verbose"]:
|
||||
print(
|
||||
f"Finished updating analytics counts through {fill_to_time} in {time.time() - start:.3f}s"
|
||||
)
|
||||
logger.info("Finished updating analytics counts through %s", fill_to_time)
|
||||
if options['verbose']:
|
||||
print("Finished updating analytics counts through %s in %.3fs" %
|
||||
(fill_to_time, time.time() - start))
|
||||
logger.info("Finished updating analytics counts through %s" % (fill_to_time,))
|
||||
|
||||
if settings.PUSH_NOTIFICATION_BOUNCER_URL and settings.SUBMIT_USAGE_STATISTICS:
|
||||
send_analytics_to_remote_server()
|
||||
|
42
analytics/management/commands/user_stats.py
Normal file
42
analytics/management/commands/user_stats.py
Normal file
@@ -0,0 +1,42 @@
|
||||
import datetime
|
||||
from argparse import ArgumentParser
|
||||
from typing import Any
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.utils.timezone import now as timezone_now
|
||||
|
||||
from zerver.models import Message, Realm, Stream, UserProfile, get_realm
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Generate statistics on user activity."
|
||||
|
||||
def add_arguments(self, parser: ArgumentParser) -> None:
|
||||
parser.add_argument('realms', metavar='<realm>', type=str, nargs='*',
|
||||
help="realm to generate statistics for")
|
||||
|
||||
def messages_sent_by(self, user: UserProfile, week: int) -> int:
|
||||
start = timezone_now() - datetime.timedelta(days=(week + 1)*7)
|
||||
end = timezone_now() - datetime.timedelta(days=week*7)
|
||||
return Message.objects.filter(sender=user, pub_date__gt=start, pub_date__lte=end).count()
|
||||
|
||||
def handle(self, *args: Any, **options: Any) -> None:
|
||||
if options['realms']:
|
||||
try:
|
||||
realms = [get_realm(string_id) for string_id in options['realms']]
|
||||
except Realm.DoesNotExist as e:
|
||||
print(e)
|
||||
exit(1)
|
||||
else:
|
||||
realms = Realm.objects.all()
|
||||
|
||||
for realm in realms:
|
||||
print(realm.string_id)
|
||||
user_profiles = UserProfile.objects.filter(realm=realm, is_active=True)
|
||||
print("%d users" % (len(user_profiles),))
|
||||
print("%d streams" % (len(Stream.objects.filter(realm=realm)),))
|
||||
|
||||
for user_profile in user_profiles:
|
||||
print("%35s" % (user_profile.email,), end=' ')
|
||||
for week in range(10):
|
||||
print("%5d" % (self.messages_sent_by(user_profile, week)), end=' ')
|
||||
print("")
|
@@ -1,209 +1,110 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("zerver", "0030_realm_org_type"),
|
||||
('zerver', '0030_realm_org_type'),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="Anomaly",
|
||||
name='Anomaly',
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
|
||||
),
|
||||
),
|
||||
("info", models.CharField(max_length=1000)),
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('info', models.CharField(max_length=1000)),
|
||||
],
|
||||
bases=(models.Model,),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="HuddleCount",
|
||||
name='HuddleCount',
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"huddle",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="zerver.Recipient"
|
||||
),
|
||||
),
|
||||
(
|
||||
"user",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
|
||||
),
|
||||
),
|
||||
("property", models.CharField(max_length=40)),
|
||||
("end_time", models.DateTimeField()),
|
||||
("interval", models.CharField(max_length=20)),
|
||||
("value", models.BigIntegerField()),
|
||||
(
|
||||
"anomaly",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="analytics.Anomaly",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('huddle', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Recipient')),
|
||||
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
|
||||
('property', models.CharField(max_length=40)),
|
||||
('end_time', models.DateTimeField()),
|
||||
('interval', models.CharField(max_length=20)),
|
||||
('value', models.BigIntegerField()),
|
||||
('anomaly', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='analytics.Anomaly', null=True)),
|
||||
],
|
||||
bases=(models.Model,),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="InstallationCount",
|
||||
name='InstallationCount',
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
|
||||
),
|
||||
),
|
||||
("property", models.CharField(max_length=40)),
|
||||
("end_time", models.DateTimeField()),
|
||||
("interval", models.CharField(max_length=20)),
|
||||
("value", models.BigIntegerField()),
|
||||
(
|
||||
"anomaly",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="analytics.Anomaly",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('property', models.CharField(max_length=40)),
|
||||
('end_time', models.DateTimeField()),
|
||||
('interval', models.CharField(max_length=20)),
|
||||
('value', models.BigIntegerField()),
|
||||
('anomaly', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='analytics.Anomaly', null=True)),
|
||||
],
|
||||
bases=(models.Model,),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="RealmCount",
|
||||
name='RealmCount',
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"realm",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"
|
||||
),
|
||||
),
|
||||
("property", models.CharField(max_length=40)),
|
||||
("end_time", models.DateTimeField()),
|
||||
("interval", models.CharField(max_length=20)),
|
||||
("value", models.BigIntegerField()),
|
||||
(
|
||||
"anomaly",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="analytics.Anomaly",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('realm', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Realm')),
|
||||
('property', models.CharField(max_length=40)),
|
||||
('end_time', models.DateTimeField()),
|
||||
('interval', models.CharField(max_length=20)),
|
||||
('value', models.BigIntegerField()),
|
||||
('anomaly', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='analytics.Anomaly', null=True)),
|
||||
|
||||
],
|
||||
bases=(models.Model,),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="StreamCount",
|
||||
name='StreamCount',
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"realm",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"
|
||||
),
|
||||
),
|
||||
(
|
||||
"stream",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="zerver.Stream"
|
||||
),
|
||||
),
|
||||
("property", models.CharField(max_length=40)),
|
||||
("end_time", models.DateTimeField()),
|
||||
("interval", models.CharField(max_length=20)),
|
||||
("value", models.BigIntegerField()),
|
||||
(
|
||||
"anomaly",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="analytics.Anomaly",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('realm', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Realm')),
|
||||
('stream', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Stream')),
|
||||
('property', models.CharField(max_length=40)),
|
||||
('end_time', models.DateTimeField()),
|
||||
('interval', models.CharField(max_length=20)),
|
||||
('value', models.BigIntegerField()),
|
||||
('anomaly', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='analytics.Anomaly', null=True)),
|
||||
],
|
||||
bases=(models.Model,),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="UserCount",
|
||||
name='UserCount',
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"realm",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"
|
||||
),
|
||||
),
|
||||
(
|
||||
"user",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
|
||||
),
|
||||
),
|
||||
("property", models.CharField(max_length=40)),
|
||||
("end_time", models.DateTimeField()),
|
||||
("interval", models.CharField(max_length=20)),
|
||||
("value", models.BigIntegerField()),
|
||||
(
|
||||
"anomaly",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="analytics.Anomaly",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('realm', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Realm')),
|
||||
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
|
||||
('property', models.CharField(max_length=40)),
|
||||
('end_time', models.DateTimeField()),
|
||||
('interval', models.CharField(max_length=20)),
|
||||
('value', models.BigIntegerField()),
|
||||
('anomaly', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='analytics.Anomaly', null=True)),
|
||||
],
|
||||
bases=(models.Model,),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="usercount",
|
||||
unique_together={("user", "property", "end_time", "interval")},
|
||||
name='usercount',
|
||||
unique_together=set([('user', 'property', 'end_time', 'interval')]),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="streamcount",
|
||||
unique_together={("stream", "property", "end_time", "interval")},
|
||||
name='streamcount',
|
||||
unique_together=set([('stream', 'property', 'end_time', 'interval')]),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="realmcount",
|
||||
unique_together={("realm", "property", "end_time", "interval")},
|
||||
name='realmcount',
|
||||
unique_together=set([('realm', 'property', 'end_time', 'interval')]),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="installationcount",
|
||||
unique_together={("property", "end_time", "interval")},
|
||||
name='installationcount',
|
||||
unique_together=set([('property', 'end_time', 'interval')]),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="huddlecount",
|
||||
unique_together={("huddle", "property", "end_time", "interval")},
|
||||
name='huddlecount',
|
||||
unique_together=set([('huddle', 'property', 'end_time', 'interval')]),
|
||||
),
|
||||
]
|
||||
|
@@ -1,30 +1,30 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("analytics", "0001_initial"),
|
||||
('analytics', '0001_initial'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterUniqueTogether(
|
||||
name="huddlecount",
|
||||
unique_together=set(),
|
||||
name='huddlecount',
|
||||
unique_together=set([]),
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="huddlecount",
|
||||
name="anomaly",
|
||||
model_name='huddlecount',
|
||||
name='anomaly',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="huddlecount",
|
||||
name="huddle",
|
||||
model_name='huddlecount',
|
||||
name='huddle',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="huddlecount",
|
||||
name="user",
|
||||
model_name='huddlecount',
|
||||
name='user',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name="HuddleCount",
|
||||
name='HuddleCount',
|
||||
),
|
||||
]
|
||||
|
@@ -1,26 +1,21 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("analytics", "0002_remove_huddlecount"),
|
||||
('analytics', '0002_remove_huddlecount'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="FillState",
|
||||
name='FillState',
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
|
||||
),
|
||||
),
|
||||
("property", models.CharField(unique=True, max_length=40)),
|
||||
("end_time", models.DateTimeField()),
|
||||
("state", models.PositiveSmallIntegerField()),
|
||||
("last_modified", models.DateTimeField(auto_now=True)),
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('property', models.CharField(unique=True, max_length=40)),
|
||||
('end_time', models.DateTimeField()),
|
||||
('state', models.PositiveSmallIntegerField()),
|
||||
('last_modified', models.DateTimeField(auto_now=True)),
|
||||
],
|
||||
bases=(models.Model,),
|
||||
),
|
||||
|
@@ -1,31 +1,31 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("analytics", "0003_fillstate"),
|
||||
('analytics', '0003_fillstate'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="installationcount",
|
||||
name="subgroup",
|
||||
model_name='installationcount',
|
||||
name='subgroup',
|
||||
field=models.CharField(max_length=16, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="realmcount",
|
||||
name="subgroup",
|
||||
model_name='realmcount',
|
||||
name='subgroup',
|
||||
field=models.CharField(max_length=16, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="streamcount",
|
||||
name="subgroup",
|
||||
model_name='streamcount',
|
||||
name='subgroup',
|
||||
field=models.CharField(max_length=16, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="usercount",
|
||||
name="subgroup",
|
||||
model_name='usercount',
|
||||
name='subgroup',
|
||||
field=models.CharField(max_length=16, null=True),
|
||||
),
|
||||
]
|
||||
|
@@ -1,51 +1,51 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("analytics", "0004_add_subgroup"),
|
||||
('analytics', '0004_add_subgroup'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="installationcount",
|
||||
name="interval",
|
||||
model_name='installationcount',
|
||||
name='interval',
|
||||
field=models.CharField(max_length=8),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="installationcount",
|
||||
name="property",
|
||||
model_name='installationcount',
|
||||
name='property',
|
||||
field=models.CharField(max_length=32),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="realmcount",
|
||||
name="interval",
|
||||
model_name='realmcount',
|
||||
name='interval',
|
||||
field=models.CharField(max_length=8),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="realmcount",
|
||||
name="property",
|
||||
model_name='realmcount',
|
||||
name='property',
|
||||
field=models.CharField(max_length=32),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="streamcount",
|
||||
name="interval",
|
||||
model_name='streamcount',
|
||||
name='interval',
|
||||
field=models.CharField(max_length=8),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="streamcount",
|
||||
name="property",
|
||||
model_name='streamcount',
|
||||
name='property',
|
||||
field=models.CharField(max_length=32),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="usercount",
|
||||
name="interval",
|
||||
model_name='usercount',
|
||||
name='interval',
|
||||
field=models.CharField(max_length=8),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="usercount",
|
||||
name="property",
|
||||
model_name='usercount',
|
||||
name='property',
|
||||
field=models.CharField(max_length=32),
|
||||
),
|
||||
]
|
||||
|
@@ -1,27 +1,27 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("analytics", "0005_alter_field_size"),
|
||||
('analytics', '0005_alter_field_size'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterUniqueTogether(
|
||||
name="installationcount",
|
||||
unique_together={("property", "subgroup", "end_time", "interval")},
|
||||
name='installationcount',
|
||||
unique_together=set([('property', 'subgroup', 'end_time', 'interval')]),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="realmcount",
|
||||
unique_together={("realm", "property", "subgroup", "end_time", "interval")},
|
||||
name='realmcount',
|
||||
unique_together=set([('realm', 'property', 'subgroup', 'end_time', 'interval')]),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="streamcount",
|
||||
unique_together={("stream", "property", "subgroup", "end_time", "interval")},
|
||||
name='streamcount',
|
||||
unique_together=set([('stream', 'property', 'subgroup', 'end_time', 'interval')]),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="usercount",
|
||||
unique_together={("user", "property", "subgroup", "end_time", "interval")},
|
||||
name='usercount',
|
||||
unique_together=set([('user', 'property', 'subgroup', 'end_time', 'interval')]),
|
||||
),
|
||||
]
|
||||
|
@@ -1,44 +1,44 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.10.4 on 2017-01-16 20:50
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("analytics", "0006_add_subgroup_to_unique_constraints"),
|
||||
('analytics', '0006_add_subgroup_to_unique_constraints'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterUniqueTogether(
|
||||
name="installationcount",
|
||||
unique_together={("property", "subgroup", "end_time")},
|
||||
name='installationcount',
|
||||
unique_together=set([('property', 'subgroup', 'end_time')]),
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="installationcount",
|
||||
name="interval",
|
||||
model_name='installationcount',
|
||||
name='interval',
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="realmcount",
|
||||
unique_together={("realm", "property", "subgroup", "end_time")},
|
||||
name='realmcount',
|
||||
unique_together=set([('realm', 'property', 'subgroup', 'end_time')]),
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="realmcount",
|
||||
name="interval",
|
||||
model_name='realmcount',
|
||||
name='interval',
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="streamcount",
|
||||
unique_together={("stream", "property", "subgroup", "end_time")},
|
||||
name='streamcount',
|
||||
unique_together=set([('stream', 'property', 'subgroup', 'end_time')]),
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="streamcount",
|
||||
name="interval",
|
||||
model_name='streamcount',
|
||||
name='interval',
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="usercount",
|
||||
unique_together={("user", "property", "subgroup", "end_time")},
|
||||
name='usercount',
|
||||
unique_together=set([('user', 'property', 'subgroup', 'end_time')]),
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="usercount",
|
||||
name="interval",
|
||||
model_name='usercount',
|
||||
name='interval',
|
||||
),
|
||||
]
|
||||
|
@@ -1,25 +1,25 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.10.5 on 2017-02-01 22:28
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("zerver", "0050_userprofile_avatar_version"),
|
||||
("analytics", "0007_remove_interval"),
|
||||
('zerver', '0050_userprofile_avatar_version'),
|
||||
('analytics', '0007_remove_interval'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterIndexTogether(
|
||||
name="realmcount",
|
||||
index_together={("property", "end_time")},
|
||||
name='realmcount',
|
||||
index_together=set([('property', 'end_time')]),
|
||||
),
|
||||
migrations.AlterIndexTogether(
|
||||
name="streamcount",
|
||||
index_together={("property", "realm", "end_time")},
|
||||
name='streamcount',
|
||||
index_together=set([('property', 'realm', 'end_time')]),
|
||||
),
|
||||
migrations.AlterIndexTogether(
|
||||
name="usercount",
|
||||
index_together={("property", "realm", "end_time")},
|
||||
name='usercount',
|
||||
index_together=set([('property', 'realm', 'end_time')]),
|
||||
),
|
||||
]
|
||||
|
@@ -1,29 +1,26 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from django.db import migrations
|
||||
from django.db.backends.postgresql.schema import DatabaseSchemaEditor
|
||||
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
|
||||
from django.db.migrations.state import StateApps
|
||||
|
||||
def delete_messages_sent_to_stream_stat(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
|
||||
UserCount = apps.get_model('analytics', 'UserCount')
|
||||
StreamCount = apps.get_model('analytics', 'StreamCount')
|
||||
RealmCount = apps.get_model('analytics', 'RealmCount')
|
||||
InstallationCount = apps.get_model('analytics', 'InstallationCount')
|
||||
FillState = apps.get_model('analytics', 'FillState')
|
||||
|
||||
def delete_messages_sent_to_stream_stat(
|
||||
apps: StateApps, schema_editor: DatabaseSchemaEditor
|
||||
) -> None:
|
||||
UserCount = apps.get_model("analytics", "UserCount")
|
||||
StreamCount = apps.get_model("analytics", "StreamCount")
|
||||
RealmCount = apps.get_model("analytics", "RealmCount")
|
||||
InstallationCount = apps.get_model("analytics", "InstallationCount")
|
||||
FillState = apps.get_model("analytics", "FillState")
|
||||
|
||||
property = "messages_sent_to_stream:is_bot"
|
||||
property = 'messages_sent_to_stream:is_bot'
|
||||
UserCount.objects.filter(property=property).delete()
|
||||
StreamCount.objects.filter(property=property).delete()
|
||||
RealmCount.objects.filter(property=property).delete()
|
||||
InstallationCount.objects.filter(property=property).delete()
|
||||
FillState.objects.filter(property=property).delete()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("analytics", "0008_add_count_indexes"),
|
||||
('analytics', '0008_add_count_indexes'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
|
@@ -1,28 +1,25 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from django.db import migrations
|
||||
from django.db.backends.postgresql.schema import DatabaseSchemaEditor
|
||||
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
|
||||
from django.db.migrations.state import StateApps
|
||||
|
||||
def clear_message_sent_by_message_type_values(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
|
||||
UserCount = apps.get_model('analytics', 'UserCount')
|
||||
StreamCount = apps.get_model('analytics', 'StreamCount')
|
||||
RealmCount = apps.get_model('analytics', 'RealmCount')
|
||||
InstallationCount = apps.get_model('analytics', 'InstallationCount')
|
||||
FillState = apps.get_model('analytics', 'FillState')
|
||||
|
||||
def clear_message_sent_by_message_type_values(
|
||||
apps: StateApps, schema_editor: DatabaseSchemaEditor
|
||||
) -> None:
|
||||
UserCount = apps.get_model("analytics", "UserCount")
|
||||
StreamCount = apps.get_model("analytics", "StreamCount")
|
||||
RealmCount = apps.get_model("analytics", "RealmCount")
|
||||
InstallationCount = apps.get_model("analytics", "InstallationCount")
|
||||
FillState = apps.get_model("analytics", "FillState")
|
||||
|
||||
property = "messages_sent:message_type:day"
|
||||
property = 'messages_sent:message_type:day'
|
||||
UserCount.objects.filter(property=property).delete()
|
||||
StreamCount.objects.filter(property=property).delete()
|
||||
RealmCount.objects.filter(property=property).delete()
|
||||
InstallationCount.objects.filter(property=property).delete()
|
||||
FillState.objects.filter(property=property).delete()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [("analytics", "0009_remove_messages_to_stream_stat")]
|
||||
dependencies = [('analytics', '0009_remove_messages_to_stream_stat')]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(clear_message_sent_by_message_type_values),
|
||||
|
@@ -1,14 +1,14 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from django.db import migrations
|
||||
from django.db.backends.postgresql.schema import DatabaseSchemaEditor
|
||||
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
|
||||
from django.db.migrations.state import StateApps
|
||||
|
||||
|
||||
def clear_analytics_tables(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
|
||||
UserCount = apps.get_model("analytics", "UserCount")
|
||||
StreamCount = apps.get_model("analytics", "StreamCount")
|
||||
RealmCount = apps.get_model("analytics", "RealmCount")
|
||||
InstallationCount = apps.get_model("analytics", "InstallationCount")
|
||||
FillState = apps.get_model("analytics", "FillState")
|
||||
UserCount = apps.get_model('analytics', 'UserCount')
|
||||
StreamCount = apps.get_model('analytics', 'StreamCount')
|
||||
RealmCount = apps.get_model('analytics', 'RealmCount')
|
||||
InstallationCount = apps.get_model('analytics', 'InstallationCount')
|
||||
FillState = apps.get_model('analytics', 'FillState')
|
||||
|
||||
UserCount.objects.all().delete()
|
||||
StreamCount.objects.all().delete()
|
||||
@@ -16,11 +16,10 @@ def clear_analytics_tables(apps: StateApps, schema_editor: DatabaseSchemaEditor)
|
||||
InstallationCount.objects.all().delete()
|
||||
FillState.objects.all().delete()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("analytics", "0010_clear_messages_sent_values"),
|
||||
('analytics', '0010_clear_messages_sent_values'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
|
@@ -1,42 +1,36 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11.6 on 2018-01-29 08:14
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("analytics", "0011_clear_analytics_tables"),
|
||||
('analytics', '0011_clear_analytics_tables'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="installationcount",
|
||||
name="anomaly",
|
||||
field=models.ForeignKey(
|
||||
null=True, on_delete=django.db.models.deletion.SET_NULL, to="analytics.Anomaly"
|
||||
),
|
||||
model_name='installationcount',
|
||||
name='anomaly',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='analytics.Anomaly'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="realmcount",
|
||||
name="anomaly",
|
||||
field=models.ForeignKey(
|
||||
null=True, on_delete=django.db.models.deletion.SET_NULL, to="analytics.Anomaly"
|
||||
),
|
||||
model_name='realmcount',
|
||||
name='anomaly',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='analytics.Anomaly'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="streamcount",
|
||||
name="anomaly",
|
||||
field=models.ForeignKey(
|
||||
null=True, on_delete=django.db.models.deletion.SET_NULL, to="analytics.Anomaly"
|
||||
),
|
||||
model_name='streamcount',
|
||||
name='anomaly',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='analytics.Anomaly'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="usercount",
|
||||
name="anomaly",
|
||||
field=models.ForeignKey(
|
||||
null=True, on_delete=django.db.models.deletion.SET_NULL, to="analytics.Anomaly"
|
||||
),
|
||||
model_name='usercount',
|
||||
name='anomaly',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='analytics.Anomaly'),
|
||||
),
|
||||
]
|
||||
|
@@ -1,4 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11.18 on 2019-02-02 02:47
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
@@ -6,27 +8,27 @@ from django.db import migrations
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("analytics", "0012_add_on_delete"),
|
||||
('analytics', '0012_add_on_delete'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name="installationcount",
|
||||
name="anomaly",
|
||||
model_name='installationcount',
|
||||
name='anomaly',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="realmcount",
|
||||
name="anomaly",
|
||||
model_name='realmcount',
|
||||
name='anomaly',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="streamcount",
|
||||
name="anomaly",
|
||||
model_name='streamcount',
|
||||
name='anomaly',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="usercount",
|
||||
name="anomaly",
|
||||
model_name='usercount',
|
||||
name='anomaly',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name="Anomaly",
|
||||
name='Anomaly',
|
||||
),
|
||||
]
|
||||
|
@@ -1,17 +0,0 @@
|
||||
# Generated by Django 1.11.26 on 2020-01-27 04:32
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("analytics", "0013_remove_anomaly"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name="fillstate",
|
||||
name="last_modified",
|
||||
),
|
||||
]
|
@@ -1,65 +0,0 @@
|
||||
from django.db import migrations
|
||||
from django.db.backends.postgresql.schema import DatabaseSchemaEditor
|
||||
from django.db.migrations.state import StateApps
|
||||
from django.db.models import Count, Sum
|
||||
|
||||
|
||||
def clear_duplicate_counts(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
|
||||
"""This is a preparatory migration for our Analytics tables.
|
||||
|
||||
The backstory is that Django's unique_together indexes do not properly
|
||||
handle the subgroup=None corner case (allowing duplicate rows that have a
|
||||
subgroup of None), which meant that in race conditions, rather than updating
|
||||
an existing row for the property/(realm, stream, user)/time with subgroup=None, Django would
|
||||
create a duplicate row.
|
||||
|
||||
In the next migration, we'll add a proper constraint to fix this bug, but
|
||||
we need to fix any existing problematic rows before we can add that constraint.
|
||||
|
||||
We fix this in an appropriate fashion for each type of CountStat object; mainly
|
||||
this means deleting the extra rows, but for LoggingCountStat objects, we need to
|
||||
additionally combine the sums.
|
||||
"""
|
||||
count_tables = dict(
|
||||
realm=apps.get_model("analytics", "RealmCount"),
|
||||
user=apps.get_model("analytics", "UserCount"),
|
||||
stream=apps.get_model("analytics", "StreamCount"),
|
||||
installation=apps.get_model("analytics", "InstallationCount"),
|
||||
)
|
||||
|
||||
for name, count_table in count_tables.items():
|
||||
value = [name, "property", "end_time"]
|
||||
if name == "installation":
|
||||
value = ["property", "end_time"]
|
||||
counts = (
|
||||
count_table.objects.filter(subgroup=None)
|
||||
.values(*value)
|
||||
.annotate(Count("id"), Sum("value"))
|
||||
.filter(id__count__gt=1)
|
||||
)
|
||||
|
||||
for count in counts:
|
||||
count.pop("id__count")
|
||||
total_value = count.pop("value__sum")
|
||||
duplicate_counts = list(count_table.objects.filter(**count))
|
||||
first_count = duplicate_counts[0]
|
||||
if count["property"] in ["invites_sent::day", "active_users_log:is_bot:day"]:
|
||||
# For LoggingCountStat objects, the right fix is to combine the totals;
|
||||
# for other CountStat objects, we expect the duplicates to have the same value.
|
||||
# And so all we need to do is delete them.
|
||||
first_count.value = total_value
|
||||
first_count.save()
|
||||
to_cleanup = duplicate_counts[1:]
|
||||
for duplicate_count in to_cleanup:
|
||||
duplicate_count.delete()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("analytics", "0014_remove_fillstate_last_modified"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(clear_duplicate_counts, reverse_code=migrations.RunPython.noop),
|
||||
]
|
@@ -1,93 +0,0 @@
|
||||
# Generated by Django 2.2.10 on 2020-02-29 19:40
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("analytics", "0015_clear_duplicate_counts"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterUniqueTogether(
|
||||
name="installationcount",
|
||||
unique_together=set(),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="realmcount",
|
||||
unique_together=set(),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="streamcount",
|
||||
unique_together=set(),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="usercount",
|
||||
unique_together=set(),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="installationcount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=False),
|
||||
fields=("property", "subgroup", "end_time"),
|
||||
name="unique_installation_count",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="installationcount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=True),
|
||||
fields=("property", "end_time"),
|
||||
name="unique_installation_count_null_subgroup",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="realmcount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=False),
|
||||
fields=("realm", "property", "subgroup", "end_time"),
|
||||
name="unique_realm_count",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="realmcount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=True),
|
||||
fields=("realm", "property", "end_time"),
|
||||
name="unique_realm_count_null_subgroup",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="streamcount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=False),
|
||||
fields=("stream", "property", "subgroup", "end_time"),
|
||||
name="unique_stream_count",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="streamcount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=True),
|
||||
fields=("stream", "property", "end_time"),
|
||||
name="unique_stream_count_null_subgroup",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="usercount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=False),
|
||||
fields=("user", "property", "subgroup", "end_time"),
|
||||
name="unique_user_count",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="usercount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=True),
|
||||
fields=("user", "property", "end_time"),
|
||||
name="unique_user_count_null_subgroup",
|
||||
),
|
||||
),
|
||||
]
|
@@ -2,139 +2,91 @@ import datetime
|
||||
from typing import Optional
|
||||
|
||||
from django.db import models
|
||||
from django.db.models import Q, UniqueConstraint
|
||||
|
||||
from zerver.lib.timestamp import floor_to_day
|
||||
from zerver.models import Realm, Stream, UserProfile
|
||||
|
||||
|
||||
class FillState(models.Model):
|
||||
property: str = models.CharField(max_length=40, unique=True)
|
||||
end_time: datetime.datetime = models.DateTimeField()
|
||||
property = models.CharField(max_length=40, unique=True) # type: str
|
||||
end_time = models.DateTimeField() # type: datetime.datetime
|
||||
|
||||
# Valid states are {DONE, STARTED}
|
||||
DONE = 1
|
||||
STARTED = 2
|
||||
state: int = models.PositiveSmallIntegerField()
|
||||
state = models.PositiveSmallIntegerField() # type: int
|
||||
|
||||
last_modified = models.DateTimeField(auto_now=True) # type: datetime.datetime
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"<FillState: {self.property} {self.end_time} {self.state}>"
|
||||
|
||||
return "<FillState: %s %s %s>" % (self.property, self.end_time, self.state)
|
||||
|
||||
# The earliest/starting end_time in FillState
|
||||
# We assume there is at least one realm
|
||||
def installation_epoch() -> datetime.datetime:
|
||||
earliest_realm_creation = Realm.objects.aggregate(models.Min("date_created"))[
|
||||
"date_created__min"
|
||||
]
|
||||
earliest_realm_creation = Realm.objects.aggregate(models.Min('date_created'))['date_created__min']
|
||||
return floor_to_day(earliest_realm_creation)
|
||||
|
||||
def last_successful_fill(property: str) -> Optional[datetime.datetime]:
|
||||
fillstate = FillState.objects.filter(property=property).first()
|
||||
if fillstate is None:
|
||||
return None
|
||||
if fillstate.state == FillState.DONE:
|
||||
return fillstate.end_time
|
||||
return fillstate.end_time - datetime.timedelta(hours=1)
|
||||
|
||||
class BaseCount(models.Model):
|
||||
# Note: When inheriting from BaseCount, you may want to rearrange
|
||||
# the order of the columns in the migration to make sure they
|
||||
# match how you'd like the table to be arranged.
|
||||
property: str = models.CharField(max_length=32)
|
||||
subgroup: Optional[str] = models.CharField(max_length=16, null=True)
|
||||
end_time: datetime.datetime = models.DateTimeField()
|
||||
value: int = models.BigIntegerField()
|
||||
property = models.CharField(max_length=32) # type: str
|
||||
subgroup = models.CharField(max_length=16, null=True) # type: Optional[str]
|
||||
end_time = models.DateTimeField() # type: datetime.datetime
|
||||
value = models.BigIntegerField() # type: int
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
|
||||
class InstallationCount(BaseCount):
|
||||
|
||||
class Meta:
|
||||
# Handles invalid duplicate InstallationCount data
|
||||
constraints = [
|
||||
UniqueConstraint(
|
||||
fields=["property", "subgroup", "end_time"],
|
||||
condition=Q(subgroup__isnull=False),
|
||||
name="unique_installation_count",
|
||||
),
|
||||
UniqueConstraint(
|
||||
fields=["property", "end_time"],
|
||||
condition=Q(subgroup__isnull=True),
|
||||
name="unique_installation_count_null_subgroup",
|
||||
),
|
||||
]
|
||||
unique_together = ("property", "subgroup", "end_time")
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"<InstallationCount: {self.property} {self.subgroup} {self.value}>"
|
||||
|
||||
return "<InstallationCount: %s %s %s>" % (self.property, self.subgroup, self.value)
|
||||
|
||||
class RealmCount(BaseCount):
|
||||
realm = models.ForeignKey(Realm, on_delete=models.CASCADE)
|
||||
|
||||
class Meta:
|
||||
# Handles invalid duplicate RealmCount data
|
||||
constraints = [
|
||||
UniqueConstraint(
|
||||
fields=["realm", "property", "subgroup", "end_time"],
|
||||
condition=Q(subgroup__isnull=False),
|
||||
name="unique_realm_count",
|
||||
),
|
||||
UniqueConstraint(
|
||||
fields=["realm", "property", "end_time"],
|
||||
condition=Q(subgroup__isnull=True),
|
||||
name="unique_realm_count_null_subgroup",
|
||||
),
|
||||
]
|
||||
unique_together = ("realm", "property", "subgroup", "end_time")
|
||||
index_together = ["property", "end_time"]
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"<RealmCount: {self.realm} {self.property} {self.subgroup} {self.value}>"
|
||||
|
||||
return "<RealmCount: %s %s %s %s>" % (self.realm, self.property, self.subgroup, self.value)
|
||||
|
||||
class UserCount(BaseCount):
|
||||
user = models.ForeignKey(UserProfile, on_delete=models.CASCADE)
|
||||
realm = models.ForeignKey(Realm, on_delete=models.CASCADE)
|
||||
|
||||
class Meta:
|
||||
# Handles invalid duplicate UserCount data
|
||||
constraints = [
|
||||
UniqueConstraint(
|
||||
fields=["user", "property", "subgroup", "end_time"],
|
||||
condition=Q(subgroup__isnull=False),
|
||||
name="unique_user_count",
|
||||
),
|
||||
UniqueConstraint(
|
||||
fields=["user", "property", "end_time"],
|
||||
condition=Q(subgroup__isnull=True),
|
||||
name="unique_user_count_null_subgroup",
|
||||
),
|
||||
]
|
||||
unique_together = ("user", "property", "subgroup", "end_time")
|
||||
# This index dramatically improves the performance of
|
||||
# aggregating from users to realms
|
||||
index_together = ["property", "realm", "end_time"]
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"<UserCount: {self.user} {self.property} {self.subgroup} {self.value}>"
|
||||
|
||||
return "<UserCount: %s %s %s %s>" % (self.user, self.property, self.subgroup, self.value)
|
||||
|
||||
class StreamCount(BaseCount):
|
||||
stream = models.ForeignKey(Stream, on_delete=models.CASCADE)
|
||||
realm = models.ForeignKey(Realm, on_delete=models.CASCADE)
|
||||
|
||||
class Meta:
|
||||
# Handles invalid duplicate StreamCount data
|
||||
constraints = [
|
||||
UniqueConstraint(
|
||||
fields=["stream", "property", "subgroup", "end_time"],
|
||||
condition=Q(subgroup__isnull=False),
|
||||
name="unique_stream_count",
|
||||
),
|
||||
UniqueConstraint(
|
||||
fields=["stream", "property", "end_time"],
|
||||
condition=Q(subgroup__isnull=True),
|
||||
name="unique_stream_count_null_subgroup",
|
||||
),
|
||||
]
|
||||
unique_together = ("stream", "property", "subgroup", "end_time")
|
||||
# This index dramatically improves the performance of
|
||||
# aggregating from streams to realms
|
||||
index_together = ["property", "realm", "end_time"]
|
||||
|
||||
def __str__(self) -> str:
|
||||
return (
|
||||
f"<StreamCount: {self.stream} {self.property} {self.subgroup} {self.value} {self.id}>"
|
||||
)
|
||||
return "<StreamCount: %s %s %s %s %s>" % (
|
||||
self.stream, self.property, self.subgroup, self.value, self.id)
|
||||
|
@@ -1,55 +0,0 @@
|
||||
from unittest import mock
|
||||
|
||||
from django.utils.timezone import now as timezone_now
|
||||
|
||||
from zerver.lib.test_classes import ZulipTestCase
|
||||
from zerver.lib.test_helpers import queries_captured
|
||||
from zerver.models import Client, UserActivity, UserProfile, flush_per_request_caches
|
||||
|
||||
|
||||
class ActivityTest(ZulipTestCase):
|
||||
@mock.patch("stripe.Customer.list", return_value=[])
|
||||
def test_activity(self, unused_mock: mock.Mock) -> None:
|
||||
self.login("hamlet")
|
||||
client, _ = Client.objects.get_or_create(name="website")
|
||||
query = "/json/messages/flags"
|
||||
last_visit = timezone_now()
|
||||
count = 150
|
||||
for activity_user_profile in UserProfile.objects.all():
|
||||
UserActivity.objects.get_or_create(
|
||||
user_profile=activity_user_profile,
|
||||
client=client,
|
||||
query=query,
|
||||
count=count,
|
||||
last_visit=last_visit,
|
||||
)
|
||||
|
||||
# Fails when not staff
|
||||
result = self.client_get("/activity")
|
||||
self.assertEqual(result.status_code, 302)
|
||||
|
||||
user_profile = self.example_user("hamlet")
|
||||
user_profile.is_staff = True
|
||||
user_profile.save(update_fields=["is_staff"])
|
||||
|
||||
flush_per_request_caches()
|
||||
with queries_captured() as queries:
|
||||
result = self.client_get("/activity")
|
||||
self.assertEqual(result.status_code, 200)
|
||||
|
||||
self.assert_length(queries, 19)
|
||||
|
||||
flush_per_request_caches()
|
||||
with queries_captured() as queries:
|
||||
result = self.client_get("/realm_activity/zulip/")
|
||||
self.assertEqual(result.status_code, 200)
|
||||
|
||||
self.assert_length(queries, 8)
|
||||
|
||||
iago = self.example_user("iago")
|
||||
flush_per_request_caches()
|
||||
with queries_captured() as queries:
|
||||
result = self.client_get(f"/user_activity/{iago.id}/")
|
||||
self.assertEqual(result.status_code, 200)
|
||||
|
||||
self.assert_length(queries, 5)
|
File diff suppressed because it is too large
Load Diff
@@ -2,39 +2,28 @@ from analytics.lib.counts import CountStat
|
||||
from analytics.lib.fixtures import generate_time_series_data
|
||||
from zerver.lib.test_classes import ZulipTestCase
|
||||
|
||||
|
||||
# A very light test suite; the code being tested is not run in production.
|
||||
class TestFixtures(ZulipTestCase):
|
||||
def test_deterministic_settings(self) -> None:
|
||||
# test basic business_hour / non_business_hour calculation
|
||||
# test we get an array of the right length with frequency=CountStat.DAY
|
||||
data = generate_time_series_data(
|
||||
days=7, business_hours_base=20, non_business_hours_base=15, spikiness=0
|
||||
)
|
||||
days=7, business_hours_base=20, non_business_hours_base=15, spikiness=0)
|
||||
self.assertEqual(data, [400, 400, 400, 400, 400, 360, 360])
|
||||
|
||||
data = generate_time_series_data(
|
||||
days=1,
|
||||
business_hours_base=2000,
|
||||
non_business_hours_base=1500,
|
||||
growth=2,
|
||||
spikiness=0,
|
||||
frequency=CountStat.HOUR,
|
||||
)
|
||||
days=1, business_hours_base=2000, non_business_hours_base=1500,
|
||||
growth=2, spikiness=0, frequency=CountStat.HOUR)
|
||||
# test we get an array of the right length with frequency=CountStat.HOUR
|
||||
self.assert_length(data, 24)
|
||||
self.assertEqual(len(data), 24)
|
||||
# test that growth doesn't affect the first data point
|
||||
self.assertEqual(data[0], 2000)
|
||||
# test that the last data point is growth times what it otherwise would be
|
||||
self.assertEqual(data[-1], 1500 * 2)
|
||||
self.assertEqual(data[-1], 1500*2)
|
||||
|
||||
# test autocorrelation == 1, since that's the easiest value to test
|
||||
data = generate_time_series_data(
|
||||
days=1,
|
||||
business_hours_base=2000,
|
||||
non_business_hours_base=2000,
|
||||
autocorrelation=1,
|
||||
frequency=CountStat.HOUR,
|
||||
)
|
||||
days=1, business_hours_base=2000, non_business_hours_base=2000,
|
||||
autocorrelation=1, frequency=CountStat.HOUR)
|
||||
self.assertEqual(data[0], data[1])
|
||||
self.assertEqual(data[0], data[-1])
|
||||
|
@@ -1,639 +0,0 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import List, Optional
|
||||
|
||||
from django.utils.timezone import now as timezone_now
|
||||
|
||||
from analytics.lib.counts import COUNT_STATS, CountStat
|
||||
from analytics.lib.time_utils import time_range
|
||||
from analytics.models import FillState, RealmCount, UserCount
|
||||
from analytics.views.stats import rewrite_client_arrays, sort_by_totals, sort_client_labels
|
||||
from zerver.lib.test_classes import ZulipTestCase
|
||||
from zerver.lib.timestamp import ceiling_to_day, ceiling_to_hour, datetime_to_timestamp
|
||||
from zerver.models import Client, get_realm
|
||||
|
||||
|
||||
class TestStatsEndpoint(ZulipTestCase):
|
||||
def test_stats(self) -> None:
|
||||
self.user = self.example_user("hamlet")
|
||||
self.login_user(self.user)
|
||||
result = self.client_get("/stats")
|
||||
self.assertEqual(result.status_code, 200)
|
||||
# Check that we get something back
|
||||
self.assert_in_response("Zulip analytics for", result)
|
||||
|
||||
def test_guest_user_cant_access_stats(self) -> None:
|
||||
self.user = self.example_user("polonius")
|
||||
self.login_user(self.user)
|
||||
result = self.client_get("/stats")
|
||||
self.assert_json_error(result, "Not allowed for guest users", 400)
|
||||
|
||||
result = self.client_get("/json/analytics/chart_data")
|
||||
self.assert_json_error(result, "Not allowed for guest users", 400)
|
||||
|
||||
def test_stats_for_realm(self) -> None:
|
||||
user = self.example_user("hamlet")
|
||||
self.login_user(user)
|
||||
|
||||
result = self.client_get("/stats/realm/zulip/")
|
||||
self.assertEqual(result.status_code, 302)
|
||||
|
||||
result = self.client_get("/stats/realm/not_existing_realm/")
|
||||
self.assertEqual(result.status_code, 302)
|
||||
|
||||
user = self.example_user("hamlet")
|
||||
user.is_staff = True
|
||||
user.save(update_fields=["is_staff"])
|
||||
|
||||
result = self.client_get("/stats/realm/not_existing_realm/")
|
||||
self.assertEqual(result.status_code, 404)
|
||||
|
||||
result = self.client_get("/stats/realm/zulip/")
|
||||
self.assertEqual(result.status_code, 200)
|
||||
self.assert_in_response("Zulip analytics for", result)
|
||||
|
||||
def test_stats_for_installation(self) -> None:
|
||||
user = self.example_user("hamlet")
|
||||
self.login_user(user)
|
||||
|
||||
result = self.client_get("/stats/installation")
|
||||
self.assertEqual(result.status_code, 302)
|
||||
|
||||
user = self.example_user("hamlet")
|
||||
user.is_staff = True
|
||||
user.save(update_fields=["is_staff"])
|
||||
|
||||
result = self.client_get("/stats/installation")
|
||||
self.assertEqual(result.status_code, 200)
|
||||
self.assert_in_response("Zulip analytics for", result)
|
||||
|
||||
|
||||
class TestGetChartData(ZulipTestCase):
|
||||
def setUp(self) -> None:
|
||||
super().setUp()
|
||||
self.realm = get_realm("zulip")
|
||||
self.user = self.example_user("hamlet")
|
||||
self.login_user(self.user)
|
||||
self.end_times_hour = [
|
||||
ceiling_to_hour(self.realm.date_created) + timedelta(hours=i) for i in range(4)
|
||||
]
|
||||
self.end_times_day = [
|
||||
ceiling_to_day(self.realm.date_created) + timedelta(days=i) for i in range(4)
|
||||
]
|
||||
|
||||
def data(self, i: int) -> List[int]:
|
||||
return [0, 0, i, 0]
|
||||
|
||||
def insert_data(
|
||||
self, stat: CountStat, realm_subgroups: List[Optional[str]], user_subgroups: List[str]
|
||||
) -> None:
|
||||
if stat.frequency == CountStat.HOUR:
|
||||
insert_time = self.end_times_hour[2]
|
||||
fill_time = self.end_times_hour[-1]
|
||||
if stat.frequency == CountStat.DAY:
|
||||
insert_time = self.end_times_day[2]
|
||||
fill_time = self.end_times_day[-1]
|
||||
|
||||
RealmCount.objects.bulk_create(
|
||||
RealmCount(
|
||||
property=stat.property,
|
||||
subgroup=subgroup,
|
||||
end_time=insert_time,
|
||||
value=100 + i,
|
||||
realm=self.realm,
|
||||
)
|
||||
for i, subgroup in enumerate(realm_subgroups)
|
||||
)
|
||||
UserCount.objects.bulk_create(
|
||||
UserCount(
|
||||
property=stat.property,
|
||||
subgroup=subgroup,
|
||||
end_time=insert_time,
|
||||
value=200 + i,
|
||||
realm=self.realm,
|
||||
user=self.user,
|
||||
)
|
||||
for i, subgroup in enumerate(user_subgroups)
|
||||
)
|
||||
FillState.objects.create(property=stat.property, end_time=fill_time, state=FillState.DONE)
|
||||
|
||||
def test_number_of_humans(self) -> None:
|
||||
stat = COUNT_STATS["realm_active_humans::day"]
|
||||
self.insert_data(stat, [None], [])
|
||||
stat = COUNT_STATS["1day_actives::day"]
|
||||
self.insert_data(stat, [None], [])
|
||||
stat = COUNT_STATS["active_users_audit:is_bot:day"]
|
||||
self.insert_data(stat, ["false"], [])
|
||||
result = self.client_get("/json/analytics/chart_data", {"chart_name": "number_of_humans"})
|
||||
self.assert_json_success(result)
|
||||
data = result.json()
|
||||
self.assertEqual(
|
||||
data,
|
||||
{
|
||||
"msg": "",
|
||||
"end_times": [datetime_to_timestamp(dt) for dt in self.end_times_day],
|
||||
"frequency": CountStat.DAY,
|
||||
"everyone": {
|
||||
"_1day": self.data(100),
|
||||
"_15day": self.data(100),
|
||||
"all_time": self.data(100),
|
||||
},
|
||||
"display_order": None,
|
||||
"result": "success",
|
||||
},
|
||||
)
|
||||
|
||||
def test_messages_sent_over_time(self) -> None:
|
||||
stat = COUNT_STATS["messages_sent:is_bot:hour"]
|
||||
self.insert_data(stat, ["true", "false"], ["false"])
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
|
||||
)
|
||||
self.assert_json_success(result)
|
||||
data = result.json()
|
||||
self.assertEqual(
|
||||
data,
|
||||
{
|
||||
"msg": "",
|
||||
"end_times": [datetime_to_timestamp(dt) for dt in self.end_times_hour],
|
||||
"frequency": CountStat.HOUR,
|
||||
"everyone": {"bot": self.data(100), "human": self.data(101)},
|
||||
"user": {"bot": self.data(0), "human": self.data(200)},
|
||||
"display_order": None,
|
||||
"result": "success",
|
||||
},
|
||||
)
|
||||
|
||||
def test_messages_sent_by_message_type(self) -> None:
|
||||
stat = COUNT_STATS["messages_sent:message_type:day"]
|
||||
self.insert_data(
|
||||
stat, ["public_stream", "private_message"], ["public_stream", "private_stream"]
|
||||
)
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_by_message_type"}
|
||||
)
|
||||
self.assert_json_success(result)
|
||||
data = result.json()
|
||||
self.assertEqual(
|
||||
data,
|
||||
{
|
||||
"msg": "",
|
||||
"end_times": [datetime_to_timestamp(dt) for dt in self.end_times_day],
|
||||
"frequency": CountStat.DAY,
|
||||
"everyone": {
|
||||
"Public streams": self.data(100),
|
||||
"Private streams": self.data(0),
|
||||
"Private messages": self.data(101),
|
||||
"Group private messages": self.data(0),
|
||||
},
|
||||
"user": {
|
||||
"Public streams": self.data(200),
|
||||
"Private streams": self.data(201),
|
||||
"Private messages": self.data(0),
|
||||
"Group private messages": self.data(0),
|
||||
},
|
||||
"display_order": [
|
||||
"Private messages",
|
||||
"Public streams",
|
||||
"Private streams",
|
||||
"Group private messages",
|
||||
],
|
||||
"result": "success",
|
||||
},
|
||||
)
|
||||
|
||||
def test_messages_sent_by_client(self) -> None:
|
||||
stat = COUNT_STATS["messages_sent:client:day"]
|
||||
client1 = Client.objects.create(name="client 1")
|
||||
client2 = Client.objects.create(name="client 2")
|
||||
client3 = Client.objects.create(name="client 3")
|
||||
client4 = Client.objects.create(name="client 4")
|
||||
self.insert_data(
|
||||
stat,
|
||||
[str(client4.id), str(client3.id), str(client2.id)],
|
||||
[str(client3.id), str(client1.id)],
|
||||
)
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_by_client"}
|
||||
)
|
||||
self.assert_json_success(result)
|
||||
data = result.json()
|
||||
self.assertEqual(
|
||||
data,
|
||||
{
|
||||
"msg": "",
|
||||
"end_times": [datetime_to_timestamp(dt) for dt in self.end_times_day],
|
||||
"frequency": CountStat.DAY,
|
||||
"everyone": {
|
||||
"client 4": self.data(100),
|
||||
"client 3": self.data(101),
|
||||
"client 2": self.data(102),
|
||||
},
|
||||
"user": {"client 3": self.data(200), "client 1": self.data(201)},
|
||||
"display_order": ["client 1", "client 2", "client 3", "client 4"],
|
||||
"result": "success",
|
||||
},
|
||||
)
|
||||
|
||||
def test_messages_read_over_time(self) -> None:
|
||||
stat = COUNT_STATS["messages_read::hour"]
|
||||
self.insert_data(stat, [None], [])
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_read_over_time"}
|
||||
)
|
||||
self.assert_json_success(result)
|
||||
data = result.json()
|
||||
self.assertEqual(
|
||||
data,
|
||||
{
|
||||
"msg": "",
|
||||
"end_times": [datetime_to_timestamp(dt) for dt in self.end_times_hour],
|
||||
"frequency": CountStat.HOUR,
|
||||
"everyone": {"read": self.data(100)},
|
||||
"user": {"read": self.data(0)},
|
||||
"display_order": None,
|
||||
"result": "success",
|
||||
},
|
||||
)
|
||||
|
||||
def test_include_empty_subgroups(self) -> None:
|
||||
FillState.objects.create(
|
||||
property="realm_active_humans::day",
|
||||
end_time=self.end_times_day[0],
|
||||
state=FillState.DONE,
|
||||
)
|
||||
result = self.client_get("/json/analytics/chart_data", {"chart_name": "number_of_humans"})
|
||||
self.assert_json_success(result)
|
||||
data = result.json()
|
||||
self.assertEqual(data["everyone"], {"_1day": [0], "_15day": [0], "all_time": [0]})
|
||||
self.assertFalse("user" in data)
|
||||
|
||||
FillState.objects.create(
|
||||
property="messages_sent:is_bot:hour",
|
||||
end_time=self.end_times_hour[0],
|
||||
state=FillState.DONE,
|
||||
)
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
|
||||
)
|
||||
self.assert_json_success(result)
|
||||
data = result.json()
|
||||
self.assertEqual(data["everyone"], {"human": [0], "bot": [0]})
|
||||
self.assertEqual(data["user"], {"human": [0], "bot": [0]})
|
||||
|
||||
FillState.objects.create(
|
||||
property="messages_sent:message_type:day",
|
||||
end_time=self.end_times_day[0],
|
||||
state=FillState.DONE,
|
||||
)
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_by_message_type"}
|
||||
)
|
||||
self.assert_json_success(result)
|
||||
data = result.json()
|
||||
self.assertEqual(
|
||||
data["everyone"],
|
||||
{
|
||||
"Public streams": [0],
|
||||
"Private streams": [0],
|
||||
"Private messages": [0],
|
||||
"Group private messages": [0],
|
||||
},
|
||||
)
|
||||
self.assertEqual(
|
||||
data["user"],
|
||||
{
|
||||
"Public streams": [0],
|
||||
"Private streams": [0],
|
||||
"Private messages": [0],
|
||||
"Group private messages": [0],
|
||||
},
|
||||
)
|
||||
|
||||
FillState.objects.create(
|
||||
property="messages_sent:client:day",
|
||||
end_time=self.end_times_day[0],
|
||||
state=FillState.DONE,
|
||||
)
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_by_client"}
|
||||
)
|
||||
self.assert_json_success(result)
|
||||
data = result.json()
|
||||
self.assertEqual(data["everyone"], {})
|
||||
self.assertEqual(data["user"], {})
|
||||
|
||||
def test_start_and_end(self) -> None:
|
||||
stat = COUNT_STATS["realm_active_humans::day"]
|
||||
self.insert_data(stat, [None], [])
|
||||
stat = COUNT_STATS["1day_actives::day"]
|
||||
self.insert_data(stat, [None], [])
|
||||
stat = COUNT_STATS["active_users_audit:is_bot:day"]
|
||||
self.insert_data(stat, ["false"], [])
|
||||
end_time_timestamps = [datetime_to_timestamp(dt) for dt in self.end_times_day]
|
||||
|
||||
# valid start and end
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data",
|
||||
{
|
||||
"chart_name": "number_of_humans",
|
||||
"start": end_time_timestamps[1],
|
||||
"end": end_time_timestamps[2],
|
||||
},
|
||||
)
|
||||
self.assert_json_success(result)
|
||||
data = result.json()
|
||||
self.assertEqual(data["end_times"], end_time_timestamps[1:3])
|
||||
self.assertEqual(
|
||||
data["everyone"], {"_1day": [0, 100], "_15day": [0, 100], "all_time": [0, 100]}
|
||||
)
|
||||
|
||||
# start later then end
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data",
|
||||
{
|
||||
"chart_name": "number_of_humans",
|
||||
"start": end_time_timestamps[2],
|
||||
"end": end_time_timestamps[1],
|
||||
},
|
||||
)
|
||||
self.assert_json_error_contains(result, "Start time is later than")
|
||||
|
||||
def test_min_length(self) -> None:
|
||||
stat = COUNT_STATS["realm_active_humans::day"]
|
||||
self.insert_data(stat, [None], [])
|
||||
stat = COUNT_STATS["1day_actives::day"]
|
||||
self.insert_data(stat, [None], [])
|
||||
stat = COUNT_STATS["active_users_audit:is_bot:day"]
|
||||
self.insert_data(stat, ["false"], [])
|
||||
# test min_length is too short to change anything
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "number_of_humans", "min_length": 2}
|
||||
)
|
||||
self.assert_json_success(result)
|
||||
data = result.json()
|
||||
self.assertEqual(
|
||||
data["end_times"], [datetime_to_timestamp(dt) for dt in self.end_times_day]
|
||||
)
|
||||
self.assertEqual(
|
||||
data["everyone"],
|
||||
{"_1day": self.data(100), "_15day": self.data(100), "all_time": self.data(100)},
|
||||
)
|
||||
# test min_length larger than filled data
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "number_of_humans", "min_length": 5}
|
||||
)
|
||||
self.assert_json_success(result)
|
||||
data = result.json()
|
||||
end_times = [
|
||||
ceiling_to_day(self.realm.date_created) + timedelta(days=i) for i in range(-1, 4)
|
||||
]
|
||||
self.assertEqual(data["end_times"], [datetime_to_timestamp(dt) for dt in end_times])
|
||||
self.assertEqual(
|
||||
data["everyone"],
|
||||
{
|
||||
"_1day": [0, *self.data(100)],
|
||||
"_15day": [0, *self.data(100)],
|
||||
"all_time": [0, *self.data(100)],
|
||||
},
|
||||
)
|
||||
|
||||
def test_non_existent_chart(self) -> None:
|
||||
result = self.client_get("/json/analytics/chart_data", {"chart_name": "does_not_exist"})
|
||||
self.assert_json_error_contains(result, "Unknown chart name")
|
||||
|
||||
def test_analytics_not_running(self) -> None:
|
||||
realm = get_realm("zulip")
|
||||
|
||||
self.assertEqual(FillState.objects.count(), 0)
|
||||
|
||||
realm.date_created = timezone_now() - timedelta(days=3)
|
||||
realm.save(update_fields=["date_created"])
|
||||
with self.assertLogs(level="WARNING") as m:
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
|
||||
)
|
||||
self.assertEqual(
|
||||
m.output,
|
||||
[
|
||||
f"WARNING:root:User from realm zulip attempted to access /stats, but the computed start time: {realm.date_created} (creation of realm or installation) is later than the computed end time: 0001-01-01 00:00:00+00:00 (last successful analytics update). Is the analytics cron job running?"
|
||||
],
|
||||
)
|
||||
|
||||
self.assert_json_error_contains(result, "No analytics data available")
|
||||
|
||||
realm.date_created = timezone_now() - timedelta(days=1, hours=2)
|
||||
realm.save(update_fields=["date_created"])
|
||||
with self.assertLogs(level="WARNING") as m:
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
|
||||
)
|
||||
self.assertEqual(
|
||||
m.output,
|
||||
[
|
||||
f"WARNING:root:User from realm zulip attempted to access /stats, but the computed start time: {realm.date_created} (creation of realm or installation) is later than the computed end time: 0001-01-01 00:00:00+00:00 (last successful analytics update). Is the analytics cron job running?"
|
||||
],
|
||||
)
|
||||
|
||||
self.assert_json_error_contains(result, "No analytics data available")
|
||||
|
||||
realm.date_created = timezone_now() - timedelta(days=1, minutes=10)
|
||||
realm.save(update_fields=["date_created"])
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
|
||||
)
|
||||
self.assert_json_success(result)
|
||||
|
||||
realm.date_created = timezone_now() - timedelta(hours=10)
|
||||
realm.save(update_fields=["date_created"])
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
|
||||
)
|
||||
self.assert_json_success(result)
|
||||
|
||||
end_time = timezone_now() - timedelta(days=5)
|
||||
fill_state = FillState.objects.create(
|
||||
property="messages_sent:is_bot:hour", end_time=end_time, state=FillState.DONE
|
||||
)
|
||||
|
||||
realm.date_created = timezone_now() - timedelta(days=3)
|
||||
realm.save(update_fields=["date_created"])
|
||||
with self.assertLogs(level="WARNING") as m:
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
|
||||
)
|
||||
self.assertEqual(
|
||||
m.output,
|
||||
[
|
||||
f"WARNING:root:User from realm zulip attempted to access /stats, but the computed start time: {realm.date_created} (creation of realm or installation) is later than the computed end time: {end_time} (last successful analytics update). Is the analytics cron job running?"
|
||||
],
|
||||
)
|
||||
|
||||
self.assert_json_error_contains(result, "No analytics data available")
|
||||
|
||||
realm.date_created = timezone_now() - timedelta(days=1, minutes=10)
|
||||
realm.save(update_fields=["date_created"])
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
|
||||
)
|
||||
self.assert_json_success(result)
|
||||
|
||||
end_time = timezone_now() - timedelta(days=2)
|
||||
fill_state.end_time = end_time
|
||||
fill_state.save(update_fields=["end_time"])
|
||||
|
||||
realm.date_created = timezone_now() - timedelta(days=3)
|
||||
realm.save(update_fields=["date_created"])
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
|
||||
)
|
||||
self.assert_json_success(result)
|
||||
|
||||
realm.date_created = timezone_now() - timedelta(days=1, hours=2)
|
||||
realm.save(update_fields=["date_created"])
|
||||
with self.assertLogs(level="WARNING") as m:
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
|
||||
)
|
||||
self.assertEqual(
|
||||
m.output,
|
||||
[
|
||||
f"WARNING:root:User from realm zulip attempted to access /stats, but the computed start time: {realm.date_created} (creation of realm or installation) is later than the computed end time: {end_time} (last successful analytics update). Is the analytics cron job running?"
|
||||
],
|
||||
)
|
||||
|
||||
self.assert_json_error_contains(result, "No analytics data available")
|
||||
|
||||
realm.date_created = timezone_now() - timedelta(days=1, minutes=10)
|
||||
realm.save(update_fields=["date_created"])
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
|
||||
)
|
||||
self.assert_json_success(result)
|
||||
|
||||
def test_get_chart_data_for_realm(self) -> None:
|
||||
user = self.example_user("hamlet")
|
||||
self.login_user(user)
|
||||
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data/realm/zulip", {"chart_name": "number_of_humans"}
|
||||
)
|
||||
self.assert_json_error(result, "Must be an server administrator", 400)
|
||||
|
||||
user = self.example_user("hamlet")
|
||||
user.is_staff = True
|
||||
user.save(update_fields=["is_staff"])
|
||||
stat = COUNT_STATS["realm_active_humans::day"]
|
||||
self.insert_data(stat, [None], [])
|
||||
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data/realm/not_existing_realm",
|
||||
{"chart_name": "number_of_humans"},
|
||||
)
|
||||
self.assert_json_error(result, "Invalid organization", 400)
|
||||
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data/realm/zulip", {"chart_name": "number_of_humans"}
|
||||
)
|
||||
self.assert_json_success(result)
|
||||
|
||||
def test_get_chart_data_for_installation(self) -> None:
|
||||
user = self.example_user("hamlet")
|
||||
self.login_user(user)
|
||||
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data/installation", {"chart_name": "number_of_humans"}
|
||||
)
|
||||
self.assert_json_error(result, "Must be an server administrator", 400)
|
||||
|
||||
user = self.example_user("hamlet")
|
||||
user.is_staff = True
|
||||
user.save(update_fields=["is_staff"])
|
||||
stat = COUNT_STATS["realm_active_humans::day"]
|
||||
self.insert_data(stat, [None], [])
|
||||
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data/installation", {"chart_name": "number_of_humans"}
|
||||
)
|
||||
self.assert_json_success(result)
|
||||
|
||||
|
||||
class TestGetChartDataHelpers(ZulipTestCase):
|
||||
def test_sort_by_totals(self) -> None:
|
||||
empty: List[int] = []
|
||||
value_arrays = {"c": [0, 1], "a": [9], "b": [1, 1, 1], "d": empty}
|
||||
self.assertEqual(sort_by_totals(value_arrays), ["a", "b", "c", "d"])
|
||||
|
||||
def test_sort_client_labels(self) -> None:
|
||||
data = {
|
||||
"everyone": {"a": [16], "c": [15], "b": [14], "e": [13], "d": [12], "h": [11]},
|
||||
"user": {"a": [6], "b": [5], "d": [4], "e": [3], "f": [2], "g": [1]},
|
||||
}
|
||||
self.assertEqual(sort_client_labels(data), ["a", "b", "c", "d", "e", "f", "g", "h"])
|
||||
|
||||
|
||||
class TestTimeRange(ZulipTestCase):
|
||||
def test_time_range(self) -> None:
|
||||
HOUR = timedelta(hours=1)
|
||||
DAY = timedelta(days=1)
|
||||
|
||||
a_time = datetime(2016, 3, 14, 22, 59, tzinfo=timezone.utc)
|
||||
floor_hour = datetime(2016, 3, 14, 22, tzinfo=timezone.utc)
|
||||
floor_day = datetime(2016, 3, 14, tzinfo=timezone.utc)
|
||||
|
||||
# test start == end
|
||||
self.assertEqual(time_range(a_time, a_time, CountStat.HOUR, None), [])
|
||||
self.assertEqual(time_range(a_time, a_time, CountStat.DAY, None), [])
|
||||
# test start == end == boundary, and min_length == 0
|
||||
self.assertEqual(time_range(floor_hour, floor_hour, CountStat.HOUR, 0), [floor_hour])
|
||||
self.assertEqual(time_range(floor_day, floor_day, CountStat.DAY, 0), [floor_day])
|
||||
# test start and end on different boundaries
|
||||
self.assertEqual(
|
||||
time_range(floor_hour, floor_hour + HOUR, CountStat.HOUR, None),
|
||||
[floor_hour, floor_hour + HOUR],
|
||||
)
|
||||
self.assertEqual(
|
||||
time_range(floor_day, floor_day + DAY, CountStat.DAY, None),
|
||||
[floor_day, floor_day + DAY],
|
||||
)
|
||||
# test min_length
|
||||
self.assertEqual(
|
||||
time_range(floor_hour, floor_hour + HOUR, CountStat.HOUR, 4),
|
||||
[floor_hour - 2 * HOUR, floor_hour - HOUR, floor_hour, floor_hour + HOUR],
|
||||
)
|
||||
self.assertEqual(
|
||||
time_range(floor_day, floor_day + DAY, CountStat.DAY, 4),
|
||||
[floor_day - 2 * DAY, floor_day - DAY, floor_day, floor_day + DAY],
|
||||
)
|
||||
|
||||
|
||||
class TestMapArrays(ZulipTestCase):
|
||||
def test_map_arrays(self) -> None:
|
||||
a = {
|
||||
"desktop app 1.0": [1, 2, 3],
|
||||
"desktop app 2.0": [10, 12, 13],
|
||||
"desktop app 3.0": [21, 22, 23],
|
||||
"website": [1, 2, 3],
|
||||
"ZulipiOS": [1, 2, 3],
|
||||
"ZulipElectron": [2, 5, 7],
|
||||
"ZulipMobile": [1, 5, 7],
|
||||
"ZulipPython": [1, 2, 3],
|
||||
"API: Python": [1, 2, 3],
|
||||
"SomethingRandom": [4, 5, 6],
|
||||
"ZulipGitHubWebhook": [7, 7, 9],
|
||||
"ZulipAndroid": [64, 63, 65],
|
||||
}
|
||||
result = rewrite_client_arrays(a)
|
||||
self.assertEqual(
|
||||
result,
|
||||
{
|
||||
"Old desktop app": [32, 36, 39],
|
||||
"Old iOS app": [1, 2, 3],
|
||||
"Desktop app": [2, 5, 7],
|
||||
"Mobile app": [1, 5, 7],
|
||||
"Website": [1, 2, 3],
|
||||
"Python API": [2, 4, 6],
|
||||
"SomethingRandom": [4, 5, 6],
|
||||
"GitHub webhook": [7, 7, 9],
|
||||
"Old Android app": [64, 63, 65],
|
||||
},
|
||||
)
|
@@ -1,629 +0,0 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from unittest import mock
|
||||
|
||||
import orjson
|
||||
from django.http import HttpResponse
|
||||
from django.utils.timezone import now as timezone_now
|
||||
|
||||
from corporate.lib.stripe import add_months, update_sponsorship_status
|
||||
from corporate.models import Customer, CustomerPlan, LicenseLedger, get_customer_by_realm
|
||||
from zerver.actions.invites import do_create_multiuse_invite_link
|
||||
from zerver.actions.realm_settings import do_send_realm_reactivation_email, do_set_realm_property
|
||||
from zerver.lib.test_classes import ZulipTestCase
|
||||
from zerver.lib.test_helpers import reset_emails_in_zulip_realm
|
||||
from zerver.models import (
|
||||
MultiuseInvite,
|
||||
PreregistrationUser,
|
||||
Realm,
|
||||
UserMessage,
|
||||
UserProfile,
|
||||
get_org_type_display_name,
|
||||
get_realm,
|
||||
)
|
||||
|
||||
|
||||
class TestSupportEndpoint(ZulipTestCase):
|
||||
def test_search(self) -> None:
|
||||
reset_emails_in_zulip_realm()
|
||||
|
||||
def assert_user_details_in_html_response(
|
||||
html_response: HttpResponse, full_name: str, email: str, role: str
|
||||
) -> None:
|
||||
self.assert_in_success_response(
|
||||
[
|
||||
'<span class="label">user</span>\n',
|
||||
f"<h3>{full_name}</h3>",
|
||||
f"<b>Email</b>: {email}",
|
||||
"<b>Is active</b>: True<br />",
|
||||
f"<b>Role</b>: {role}<br />",
|
||||
],
|
||||
html_response,
|
||||
)
|
||||
|
||||
def check_hamlet_user_query_result(result: HttpResponse) -> None:
|
||||
assert_user_details_in_html_response(
|
||||
result, "King Hamlet", self.example_email("hamlet"), "Member"
|
||||
)
|
||||
self.assert_in_success_response(
|
||||
[
|
||||
f"<b>Admins</b>: {self.example_email('iago')}\n",
|
||||
f"<b>Owners</b>: {self.example_email('desdemona')}\n",
|
||||
'class="copy-button" data-copytext="{}">'.format(self.example_email("iago")),
|
||||
'class="copy-button" data-copytext="{}">'.format(
|
||||
self.example_email("desdemona")
|
||||
),
|
||||
],
|
||||
result,
|
||||
)
|
||||
|
||||
def check_othello_user_query_result(result: HttpResponse) -> None:
|
||||
assert_user_details_in_html_response(
|
||||
result, "Othello, the Moor of Venice", self.example_email("othello"), "Member"
|
||||
)
|
||||
|
||||
def check_polonius_user_query_result(result: HttpResponse) -> None:
|
||||
assert_user_details_in_html_response(
|
||||
result, "Polonius", self.example_email("polonius"), "Guest"
|
||||
)
|
||||
|
||||
def check_zulip_realm_query_result(result: HttpResponse) -> None:
|
||||
zulip_realm = get_realm("zulip")
|
||||
first_human_user = zulip_realm.get_first_human_user()
|
||||
assert first_human_user is not None
|
||||
self.assert_in_success_response(
|
||||
[
|
||||
f"<b>First human user</b>: {first_human_user.delivery_email}\n",
|
||||
f'<input type="hidden" name="realm_id" value="{zulip_realm.id}"',
|
||||
"Zulip Dev</h3>",
|
||||
'<option value="1" selected>Self-hosted</option>',
|
||||
'<option value="2" >Limited</option>',
|
||||
'input type="number" name="discount" value="None"',
|
||||
'<option value="active" selected>Active</option>',
|
||||
'<option value="deactivated" >Deactivated</option>',
|
||||
f'<option value="{zulip_realm.org_type}" selected>',
|
||||
'scrub-realm-button">',
|
||||
'data-string-id="zulip"',
|
||||
],
|
||||
result,
|
||||
)
|
||||
|
||||
def check_lear_realm_query_result(result: HttpResponse) -> None:
|
||||
lear_realm = get_realm("lear")
|
||||
self.assert_in_success_response(
|
||||
[
|
||||
f'<input type="hidden" name="realm_id" value="{lear_realm.id}"',
|
||||
"Lear & Co.</h3>",
|
||||
'<option value="1" selected>Self-hosted</option>',
|
||||
'<option value="2" >Limited</option>',
|
||||
'input type="number" name="discount" value="None"',
|
||||
'<option value="active" selected>Active</option>',
|
||||
'<option value="deactivated" >Deactivated</option>',
|
||||
'scrub-realm-button">',
|
||||
'data-string-id="lear"',
|
||||
"<b>Name</b>: Zulip Cloud Standard",
|
||||
"<b>Status</b>: Active",
|
||||
"<b>Billing schedule</b>: Annual",
|
||||
"<b>Licenses</b>: 2/10 (Manual)",
|
||||
"<b>Price per license</b>: $80.0",
|
||||
"<b>Next invoice date</b>: 02 January 2017",
|
||||
'<option value="send_invoice" selected>',
|
||||
'<option value="charge_automatically" >',
|
||||
],
|
||||
result,
|
||||
)
|
||||
|
||||
def check_preregistration_user_query_result(
|
||||
result: HttpResponse, email: str, invite: bool = False
|
||||
) -> None:
|
||||
self.assert_in_success_response(
|
||||
[
|
||||
'<span class="label">preregistration user</span>\n',
|
||||
f"<b>Email</b>: {email}",
|
||||
],
|
||||
result,
|
||||
)
|
||||
if invite:
|
||||
self.assert_in_success_response(['<span class="label">invite</span>'], result)
|
||||
self.assert_in_success_response(
|
||||
[
|
||||
"<b>Expires in</b>: 1\xa0week, 3\xa0days",
|
||||
"<b>Status</b>: Link has never been clicked",
|
||||
],
|
||||
result,
|
||||
)
|
||||
self.assert_in_success_response([], result)
|
||||
else:
|
||||
self.assert_not_in_success_response(['<span class="label">invite</span>'], result)
|
||||
self.assert_in_success_response(
|
||||
[
|
||||
"<b>Expires in</b>: 1\xa0day",
|
||||
"<b>Status</b>: Link has never been clicked",
|
||||
],
|
||||
result,
|
||||
)
|
||||
|
||||
def check_realm_creation_query_result(result: HttpResponse, email: str) -> None:
|
||||
self.assert_in_success_response(
|
||||
[
|
||||
'<span class="label">preregistration user</span>\n',
|
||||
'<span class="label">realm creation</span>\n',
|
||||
"<b>Link</b>: http://testserver/accounts/do_confirm/",
|
||||
"<b>Expires in</b>: 1\xa0day",
|
||||
],
|
||||
result,
|
||||
)
|
||||
|
||||
def check_multiuse_invite_link_query_result(result: HttpResponse) -> None:
|
||||
self.assert_in_success_response(
|
||||
[
|
||||
'<span class="label">multiuse invite</span>\n',
|
||||
"<b>Link</b>: http://zulip.testserver/join/",
|
||||
"<b>Expires in</b>: 1\xa0week, 3\xa0days",
|
||||
],
|
||||
result,
|
||||
)
|
||||
|
||||
def check_realm_reactivation_link_query_result(result: HttpResponse) -> None:
|
||||
self.assert_in_success_response(
|
||||
[
|
||||
'<span class="label">realm reactivation</span>\n',
|
||||
"<b>Link</b>: http://zulip.testserver/reactivate/",
|
||||
"<b>Expires in</b>: 1\xa0day",
|
||||
],
|
||||
result,
|
||||
)
|
||||
|
||||
self.login("cordelia")
|
||||
|
||||
result = self.client_get("/activity/support")
|
||||
self.assertEqual(result.status_code, 302)
|
||||
self.assertEqual(result["Location"], "/login/")
|
||||
|
||||
self.login("iago")
|
||||
|
||||
do_set_realm_property(
|
||||
get_realm("zulip"),
|
||||
"email_address_visibility",
|
||||
Realm.EMAIL_ADDRESS_VISIBILITY_NOBODY,
|
||||
acting_user=None,
|
||||
)
|
||||
|
||||
customer = Customer.objects.create(realm=get_realm("lear"), stripe_customer_id="cus_123")
|
||||
now = datetime(2016, 1, 2, tzinfo=timezone.utc)
|
||||
plan = CustomerPlan.objects.create(
|
||||
customer=customer,
|
||||
billing_cycle_anchor=now,
|
||||
billing_schedule=CustomerPlan.ANNUAL,
|
||||
tier=CustomerPlan.STANDARD,
|
||||
price_per_license=8000,
|
||||
next_invoice_date=add_months(now, 12),
|
||||
)
|
||||
LicenseLedger.objects.create(
|
||||
licenses=10,
|
||||
licenses_at_next_renewal=10,
|
||||
event_time=timezone_now(),
|
||||
is_renewal=True,
|
||||
plan=plan,
|
||||
)
|
||||
|
||||
result = self.client_get("/activity/support")
|
||||
self.assert_in_success_response(
|
||||
['<input type="text" name="q" class="input-xxlarge search-query"'], result
|
||||
)
|
||||
|
||||
result = self.client_get("/activity/support", {"q": self.example_email("hamlet")})
|
||||
check_hamlet_user_query_result(result)
|
||||
check_zulip_realm_query_result(result)
|
||||
|
||||
result = self.client_get("/activity/support", {"q": self.example_email("polonius")})
|
||||
check_polonius_user_query_result(result)
|
||||
check_zulip_realm_query_result(result)
|
||||
|
||||
result = self.client_get("/activity/support", {"q": "lear"})
|
||||
check_lear_realm_query_result(result)
|
||||
|
||||
result = self.client_get("/activity/support", {"q": "http://lear.testserver"})
|
||||
check_lear_realm_query_result(result)
|
||||
|
||||
with self.settings(REALM_HOSTS={"zulip": "localhost"}):
|
||||
result = self.client_get("/activity/support", {"q": "http://localhost"})
|
||||
check_zulip_realm_query_result(result)
|
||||
|
||||
result = self.client_get("/activity/support", {"q": "hamlet@zulip.com, lear"})
|
||||
check_hamlet_user_query_result(result)
|
||||
check_zulip_realm_query_result(result)
|
||||
check_lear_realm_query_result(result)
|
||||
|
||||
result = self.client_get("/activity/support", {"q": "King hamlet,lear"})
|
||||
check_hamlet_user_query_result(result)
|
||||
check_zulip_realm_query_result(result)
|
||||
check_lear_realm_query_result(result)
|
||||
|
||||
result = self.client_get("/activity/support", {"q": "Othello, the Moor of Venice"})
|
||||
check_othello_user_query_result(result)
|
||||
check_zulip_realm_query_result(result)
|
||||
|
||||
result = self.client_get("/activity/support", {"q": "lear, Hamlet <hamlet@zulip.com>"})
|
||||
check_hamlet_user_query_result(result)
|
||||
check_zulip_realm_query_result(result)
|
||||
check_lear_realm_query_result(result)
|
||||
|
||||
with mock.patch(
|
||||
"analytics.views.support.timezone_now",
|
||||
return_value=timezone_now() - timedelta(minutes=50),
|
||||
):
|
||||
self.client_post("/accounts/home/", {"email": self.nonreg_email("test")})
|
||||
self.login("iago")
|
||||
result = self.client_get("/activity/support", {"q": self.nonreg_email("test")})
|
||||
check_preregistration_user_query_result(result, self.nonreg_email("test"))
|
||||
check_zulip_realm_query_result(result)
|
||||
|
||||
invite_expires_in_days = 10
|
||||
stream_ids = [self.get_stream_id("Denmark")]
|
||||
invitee_emails = [self.nonreg_email("test1")]
|
||||
self.client_post(
|
||||
"/json/invites",
|
||||
{
|
||||
"invitee_emails": invitee_emails,
|
||||
"stream_ids": orjson.dumps(stream_ids).decode(),
|
||||
"invite_expires_in_days": invite_expires_in_days,
|
||||
"invite_as": PreregistrationUser.INVITE_AS["MEMBER"],
|
||||
},
|
||||
)
|
||||
result = self.client_get("/activity/support", {"q": self.nonreg_email("test1")})
|
||||
check_preregistration_user_query_result(result, self.nonreg_email("test1"), invite=True)
|
||||
check_zulip_realm_query_result(result)
|
||||
|
||||
email = self.nonreg_email("alice")
|
||||
self.client_post("/new/", {"email": email})
|
||||
result = self.client_get("/activity/support", {"q": email})
|
||||
check_realm_creation_query_result(result, email)
|
||||
|
||||
do_create_multiuse_invite_link(
|
||||
self.example_user("hamlet"),
|
||||
invited_as=1,
|
||||
invite_expires_in_days=invite_expires_in_days,
|
||||
)
|
||||
result = self.client_get("/activity/support", {"q": "zulip"})
|
||||
check_multiuse_invite_link_query_result(result)
|
||||
check_zulip_realm_query_result(result)
|
||||
MultiuseInvite.objects.all().delete()
|
||||
|
||||
do_send_realm_reactivation_email(get_realm("zulip"), acting_user=None)
|
||||
result = self.client_get("/activity/support", {"q": "zulip"})
|
||||
check_realm_reactivation_link_query_result(result)
|
||||
check_zulip_realm_query_result(result)
|
||||
|
||||
def test_get_org_type_display_name(self) -> None:
|
||||
self.assertEqual(get_org_type_display_name(Realm.ORG_TYPES["business"]["id"]), "Business")
|
||||
self.assertEqual(get_org_type_display_name(883), "")
|
||||
|
||||
@mock.patch("analytics.views.support.update_billing_method_of_current_plan")
|
||||
def test_change_billing_method(self, m: mock.Mock) -> None:
|
||||
cordelia = self.example_user("cordelia")
|
||||
self.login_user(cordelia)
|
||||
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{cordelia.realm_id}", "plan_type": "2"}
|
||||
)
|
||||
self.assertEqual(result.status_code, 302)
|
||||
self.assertEqual(result["Location"], "/login/")
|
||||
|
||||
iago = self.example_user("iago")
|
||||
self.login_user(iago)
|
||||
|
||||
result = self.client_post(
|
||||
"/activity/support",
|
||||
{"realm_id": f"{iago.realm_id}", "billing_method": "charge_automatically"},
|
||||
)
|
||||
m.assert_called_once_with(get_realm("zulip"), charge_automatically=True, acting_user=iago)
|
||||
self.assert_in_success_response(
|
||||
["Billing method of zulip updated to charge automatically"], result
|
||||
)
|
||||
|
||||
m.reset_mock()
|
||||
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{iago.realm_id}", "billing_method": "send_invoice"}
|
||||
)
|
||||
m.assert_called_once_with(get_realm("zulip"), charge_automatically=False, acting_user=iago)
|
||||
self.assert_in_success_response(
|
||||
["Billing method of zulip updated to pay by invoice"], result
|
||||
)
|
||||
|
||||
def test_change_realm_plan_type(self) -> None:
|
||||
cordelia = self.example_user("cordelia")
|
||||
self.login_user(cordelia)
|
||||
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{cordelia.realm_id}", "plan_type": "2"}
|
||||
)
|
||||
self.assertEqual(result.status_code, 302)
|
||||
self.assertEqual(result["Location"], "/login/")
|
||||
|
||||
iago = self.example_user("iago")
|
||||
self.login_user(iago)
|
||||
|
||||
with mock.patch("analytics.views.support.do_change_realm_plan_type") as m:
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{iago.realm_id}", "plan_type": "2"}
|
||||
)
|
||||
m.assert_called_once_with(get_realm("zulip"), 2, acting_user=iago)
|
||||
self.assert_in_success_response(
|
||||
["Plan type of zulip changed from self-hosted to limited"], result
|
||||
)
|
||||
|
||||
with mock.patch("analytics.views.support.do_change_realm_plan_type") as m:
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{iago.realm_id}", "plan_type": "10"}
|
||||
)
|
||||
m.assert_called_once_with(get_realm("zulip"), 10, acting_user=iago)
|
||||
self.assert_in_success_response(
|
||||
["Plan type of zulip changed from self-hosted to plus"], result
|
||||
)
|
||||
|
||||
def test_change_org_type(self) -> None:
|
||||
cordelia = self.example_user("cordelia")
|
||||
self.login_user(cordelia)
|
||||
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{cordelia.realm_id}", "org_type": "70"}
|
||||
)
|
||||
self.assertEqual(result.status_code, 302)
|
||||
self.assertEqual(result["Location"], "/login/")
|
||||
|
||||
iago = self.example_user("iago")
|
||||
self.login_user(iago)
|
||||
|
||||
with mock.patch("analytics.views.support.do_change_realm_org_type") as m:
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{iago.realm_id}", "org_type": "70"}
|
||||
)
|
||||
m.assert_called_once_with(get_realm("zulip"), 70, acting_user=iago)
|
||||
self.assert_in_success_response(
|
||||
["Org type of zulip changed from Business to Government"], result
|
||||
)
|
||||
|
||||
def test_attach_discount(self) -> None:
|
||||
cordelia = self.example_user("cordelia")
|
||||
lear_realm = get_realm("lear")
|
||||
self.login_user(cordelia)
|
||||
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{lear_realm.id}", "discount": "25"}
|
||||
)
|
||||
self.assertEqual(result.status_code, 302)
|
||||
self.assertEqual(result["Location"], "/login/")
|
||||
|
||||
iago = self.example_user("iago")
|
||||
self.login("iago")
|
||||
|
||||
with mock.patch("analytics.views.support.attach_discount_to_realm") as m:
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{lear_realm.id}", "discount": "25"}
|
||||
)
|
||||
m.assert_called_once_with(get_realm("lear"), 25, acting_user=iago)
|
||||
self.assert_in_success_response(["Discount of lear changed to 25% from 0%"], result)
|
||||
|
||||
def test_change_sponsorship_status(self) -> None:
|
||||
lear_realm = get_realm("lear")
|
||||
self.assertIsNone(get_customer_by_realm(lear_realm))
|
||||
|
||||
cordelia = self.example_user("cordelia")
|
||||
self.login_user(cordelia)
|
||||
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{lear_realm.id}", "sponsorship_pending": "true"}
|
||||
)
|
||||
self.assertEqual(result.status_code, 302)
|
||||
self.assertEqual(result["Location"], "/login/")
|
||||
|
||||
iago = self.example_user("iago")
|
||||
self.login_user(iago)
|
||||
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{lear_realm.id}", "sponsorship_pending": "true"}
|
||||
)
|
||||
self.assert_in_success_response(["lear marked as pending sponsorship."], result)
|
||||
customer = get_customer_by_realm(lear_realm)
|
||||
assert customer is not None
|
||||
self.assertTrue(customer.sponsorship_pending)
|
||||
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{lear_realm.id}", "sponsorship_pending": "false"}
|
||||
)
|
||||
self.assert_in_success_response(["lear is no longer pending sponsorship."], result)
|
||||
customer = get_customer_by_realm(lear_realm)
|
||||
assert customer is not None
|
||||
self.assertFalse(customer.sponsorship_pending)
|
||||
|
||||
def test_approve_sponsorship(self) -> None:
|
||||
lear_realm = get_realm("lear")
|
||||
update_sponsorship_status(lear_realm, True, acting_user=None)
|
||||
king_user = self.lear_user("king")
|
||||
king_user.role = UserProfile.ROLE_REALM_OWNER
|
||||
king_user.save()
|
||||
|
||||
cordelia = self.example_user("cordelia")
|
||||
self.login_user(cordelia)
|
||||
|
||||
result = self.client_post(
|
||||
"/activity/support",
|
||||
{"realm_id": f"{lear_realm.id}", "approve_sponsorship": "true"},
|
||||
)
|
||||
self.assertEqual(result.status_code, 302)
|
||||
self.assertEqual(result["Location"], "/login/")
|
||||
|
||||
iago = self.example_user("iago")
|
||||
self.login_user(iago)
|
||||
|
||||
result = self.client_post(
|
||||
"/activity/support",
|
||||
{"realm_id": f"{lear_realm.id}", "approve_sponsorship": "true"},
|
||||
)
|
||||
self.assert_in_success_response(["Sponsorship approved for lear"], result)
|
||||
lear_realm.refresh_from_db()
|
||||
self.assertEqual(lear_realm.plan_type, Realm.PLAN_TYPE_STANDARD_FREE)
|
||||
customer = get_customer_by_realm(lear_realm)
|
||||
assert customer is not None
|
||||
self.assertFalse(customer.sponsorship_pending)
|
||||
messages = UserMessage.objects.filter(user_profile=king_user)
|
||||
self.assertIn(
|
||||
"request for sponsored hosting has been approved", messages[0].message.content
|
||||
)
|
||||
self.assert_length(messages, 1)
|
||||
|
||||
def test_activate_or_deactivate_realm(self) -> None:
|
||||
cordelia = self.example_user("cordelia")
|
||||
lear_realm = get_realm("lear")
|
||||
self.login_user(cordelia)
|
||||
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{lear_realm.id}", "status": "deactivated"}
|
||||
)
|
||||
self.assertEqual(result.status_code, 302)
|
||||
self.assertEqual(result["Location"], "/login/")
|
||||
|
||||
self.login("iago")
|
||||
|
||||
with mock.patch("analytics.views.support.do_deactivate_realm") as m:
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{lear_realm.id}", "status": "deactivated"}
|
||||
)
|
||||
m.assert_called_once_with(lear_realm, acting_user=self.example_user("iago"))
|
||||
self.assert_in_success_response(["lear deactivated"], result)
|
||||
|
||||
with mock.patch("analytics.views.support.do_send_realm_reactivation_email") as m:
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{lear_realm.id}", "status": "active"}
|
||||
)
|
||||
m.assert_called_once_with(lear_realm, acting_user=self.example_user("iago"))
|
||||
self.assert_in_success_response(
|
||||
["Realm reactivation email sent to admins of lear"], result
|
||||
)
|
||||
|
||||
def test_change_subdomain(self) -> None:
|
||||
cordelia = self.example_user("cordelia")
|
||||
lear_realm = get_realm("lear")
|
||||
self.login_user(cordelia)
|
||||
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{lear_realm.id}", "new_subdomain": "new_name"}
|
||||
)
|
||||
self.assertEqual(result.status_code, 302)
|
||||
self.assertEqual(result["Location"], "/login/")
|
||||
self.login("iago")
|
||||
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{lear_realm.id}", "new_subdomain": "new-name"}
|
||||
)
|
||||
self.assertEqual(result.status_code, 302)
|
||||
self.assertEqual(result["Location"], "/activity/support?q=new-name")
|
||||
realm_id = lear_realm.id
|
||||
lear_realm = get_realm("new-name")
|
||||
self.assertEqual(lear_realm.id, realm_id)
|
||||
self.assertTrue(Realm.objects.filter(string_id="lear").exists())
|
||||
self.assertTrue(Realm.objects.filter(string_id="lear")[0].deactivated)
|
||||
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{lear_realm.id}", "new_subdomain": "new-name"}
|
||||
)
|
||||
self.assert_in_success_response(
|
||||
["Subdomain unavailable. Please choose a different one."], result
|
||||
)
|
||||
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{lear_realm.id}", "new_subdomain": "zulip"}
|
||||
)
|
||||
self.assert_in_success_response(
|
||||
["Subdomain unavailable. Please choose a different one."], result
|
||||
)
|
||||
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{lear_realm.id}", "new_subdomain": "lear"}
|
||||
)
|
||||
self.assert_in_success_response(
|
||||
["Subdomain unavailable. Please choose a different one."], result
|
||||
)
|
||||
|
||||
def test_downgrade_realm(self) -> None:
|
||||
cordelia = self.example_user("cordelia")
|
||||
self.login_user(cordelia)
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{cordelia.realm_id}", "plan_type": "2"}
|
||||
)
|
||||
self.assertEqual(result.status_code, 302)
|
||||
self.assertEqual(result["Location"], "/login/")
|
||||
|
||||
iago = self.example_user("iago")
|
||||
self.login_user(iago)
|
||||
|
||||
with mock.patch("analytics.views.support.downgrade_at_the_end_of_billing_cycle") as m:
|
||||
result = self.client_post(
|
||||
"/activity/support",
|
||||
{
|
||||
"realm_id": f"{iago.realm_id}",
|
||||
"downgrade_method": "downgrade_at_billing_cycle_end",
|
||||
},
|
||||
)
|
||||
m.assert_called_once_with(get_realm("zulip"))
|
||||
self.assert_in_success_response(
|
||||
["zulip marked for downgrade at the end of billing cycle"], result
|
||||
)
|
||||
|
||||
with mock.patch(
|
||||
"analytics.views.support.downgrade_now_without_creating_additional_invoices"
|
||||
) as m:
|
||||
result = self.client_post(
|
||||
"/activity/support",
|
||||
{
|
||||
"realm_id": f"{iago.realm_id}",
|
||||
"downgrade_method": "downgrade_now_without_additional_licenses",
|
||||
},
|
||||
)
|
||||
m.assert_called_once_with(get_realm("zulip"))
|
||||
self.assert_in_success_response(
|
||||
["zulip downgraded without creating additional invoices"], result
|
||||
)
|
||||
|
||||
with mock.patch(
|
||||
"analytics.views.support.downgrade_now_without_creating_additional_invoices"
|
||||
) as m1:
|
||||
with mock.patch("analytics.views.support.void_all_open_invoices", return_value=1) as m2:
|
||||
result = self.client_post(
|
||||
"/activity/support",
|
||||
{
|
||||
"realm_id": f"{iago.realm_id}",
|
||||
"downgrade_method": "downgrade_now_void_open_invoices",
|
||||
},
|
||||
)
|
||||
m1.assert_called_once_with(get_realm("zulip"))
|
||||
m2.assert_called_once_with(get_realm("zulip"))
|
||||
self.assert_in_success_response(
|
||||
["zulip downgraded and voided 1 open invoices"], result
|
||||
)
|
||||
|
||||
def test_scrub_realm(self) -> None:
|
||||
cordelia = self.example_user("cordelia")
|
||||
lear_realm = get_realm("lear")
|
||||
self.login_user(cordelia)
|
||||
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{lear_realm.id}", "discount": "25"}
|
||||
)
|
||||
self.assertEqual(result.status_code, 302)
|
||||
self.assertEqual(result["Location"], "/login/")
|
||||
|
||||
self.login("iago")
|
||||
|
||||
with mock.patch("analytics.views.support.do_scrub_realm") as m:
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{lear_realm.id}", "scrub_realm": "true"}
|
||||
)
|
||||
m.assert_called_once_with(lear_realm, acting_user=self.example_user("iago"))
|
||||
self.assert_in_success_response(["lear scrubbed"], result)
|
||||
|
||||
with mock.patch("analytics.views.support.do_scrub_realm") as m:
|
||||
result = self.client_post("/activity/support", {"realm_id": f"{lear_realm.id}"})
|
||||
self.assert_json_error(result, "Invalid parameters")
|
||||
m.assert_not_called()
|
404
analytics/tests/test_views.py
Normal file
404
analytics/tests/test_views.py
Normal file
@@ -0,0 +1,404 @@
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Optional
|
||||
|
||||
import mock
|
||||
from django.utils.timezone import utc
|
||||
|
||||
from analytics.lib.counts import COUNT_STATS, CountStat
|
||||
from analytics.lib.time_utils import time_range
|
||||
from analytics.models import FillState, \
|
||||
RealmCount, UserCount, last_successful_fill
|
||||
from analytics.views import rewrite_client_arrays, \
|
||||
sort_by_totals, sort_client_labels
|
||||
from zerver.lib.test_classes import ZulipTestCase
|
||||
from zerver.lib.timestamp import ceiling_to_day, \
|
||||
ceiling_to_hour, datetime_to_timestamp
|
||||
from zerver.models import Client, get_realm
|
||||
|
||||
class TestStatsEndpoint(ZulipTestCase):
|
||||
def test_stats(self) -> None:
|
||||
self.user = self.example_user('hamlet')
|
||||
self.login(self.user.email)
|
||||
result = self.client_get('/stats')
|
||||
self.assertEqual(result.status_code, 200)
|
||||
# Check that we get something back
|
||||
self.assert_in_response("Zulip analytics for", result)
|
||||
|
||||
def test_guest_user_cant_access_stats(self) -> None:
|
||||
self.user = self.example_user('polonius')
|
||||
self.login(self.user.email)
|
||||
result = self.client_get('/stats')
|
||||
self.assert_json_error(result, "Not allowed for guest users", 400)
|
||||
|
||||
result = self.client_get('/json/analytics/chart_data')
|
||||
self.assert_json_error(result, "Not allowed for guest users", 400)
|
||||
|
||||
def test_stats_for_realm(self) -> None:
|
||||
user_profile = self.example_user('hamlet')
|
||||
self.login(user_profile.email)
|
||||
|
||||
result = self.client_get('/stats/realm/zulip/')
|
||||
self.assertEqual(result.status_code, 302)
|
||||
|
||||
user_profile = self.example_user('hamlet')
|
||||
user_profile.is_staff = True
|
||||
user_profile.save(update_fields=['is_staff'])
|
||||
|
||||
result = self.client_get('/stats/realm/not_existing_realm/')
|
||||
self.assertEqual(result.status_code, 302)
|
||||
|
||||
result = self.client_get('/stats/realm/zulip/')
|
||||
self.assertEqual(result.status_code, 200)
|
||||
self.assert_in_response("Zulip analytics for", result)
|
||||
|
||||
def test_stats_for_installation(self) -> None:
|
||||
user_profile = self.example_user('hamlet')
|
||||
self.login(user_profile.email)
|
||||
|
||||
result = self.client_get('/stats/installation')
|
||||
self.assertEqual(result.status_code, 302)
|
||||
|
||||
user_profile = self.example_user('hamlet')
|
||||
user_profile.is_staff = True
|
||||
user_profile.save(update_fields=['is_staff'])
|
||||
|
||||
result = self.client_get('/stats/installation')
|
||||
self.assertEqual(result.status_code, 200)
|
||||
self.assert_in_response("Zulip analytics for", result)
|
||||
|
||||
class TestGetChartData(ZulipTestCase):
|
||||
def setUp(self) -> None:
|
||||
self.realm = get_realm('zulip')
|
||||
self.user = self.example_user('hamlet')
|
||||
self.login(self.user.email)
|
||||
self.end_times_hour = [ceiling_to_hour(self.realm.date_created) + timedelta(hours=i)
|
||||
for i in range(4)]
|
||||
self.end_times_day = [ceiling_to_day(self.realm.date_created) + timedelta(days=i)
|
||||
for i in range(4)]
|
||||
|
||||
def data(self, i: int) -> List[int]:
|
||||
return [0, 0, i, 0]
|
||||
|
||||
def insert_data(self, stat: CountStat, realm_subgroups: List[Optional[str]],
|
||||
user_subgroups: List[str]) -> None:
|
||||
if stat.frequency == CountStat.HOUR:
|
||||
insert_time = self.end_times_hour[2]
|
||||
fill_time = self.end_times_hour[-1]
|
||||
if stat.frequency == CountStat.DAY:
|
||||
insert_time = self.end_times_day[2]
|
||||
fill_time = self.end_times_day[-1]
|
||||
|
||||
RealmCount.objects.bulk_create([
|
||||
RealmCount(property=stat.property, subgroup=subgroup, end_time=insert_time,
|
||||
value=100+i, realm=self.realm)
|
||||
for i, subgroup in enumerate(realm_subgroups)])
|
||||
UserCount.objects.bulk_create([
|
||||
UserCount(property=stat.property, subgroup=subgroup, end_time=insert_time,
|
||||
value=200+i, realm=self.realm, user=self.user)
|
||||
for i, subgroup in enumerate(user_subgroups)])
|
||||
FillState.objects.create(property=stat.property, end_time=fill_time, state=FillState.DONE)
|
||||
|
||||
def test_number_of_humans(self) -> None:
|
||||
stat = COUNT_STATS['realm_active_humans::day']
|
||||
self.insert_data(stat, [None], [])
|
||||
stat = COUNT_STATS['1day_actives::day']
|
||||
self.insert_data(stat, [None], [])
|
||||
stat = COUNT_STATS['active_users_audit:is_bot:day']
|
||||
self.insert_data(stat, ['false'], [])
|
||||
result = self.client_get('/json/analytics/chart_data',
|
||||
{'chart_name': 'number_of_humans'})
|
||||
self.assert_json_success(result)
|
||||
data = result.json()
|
||||
self.assertEqual(data, {
|
||||
'msg': '',
|
||||
'end_times': [datetime_to_timestamp(dt) for dt in self.end_times_day],
|
||||
'frequency': CountStat.DAY,
|
||||
'everyone': {'_1day': self.data(100), '_15day': self.data(100), 'all_time': self.data(100)},
|
||||
'display_order': None,
|
||||
'result': 'success',
|
||||
})
|
||||
|
||||
def test_messages_sent_over_time(self) -> None:
|
||||
stat = COUNT_STATS['messages_sent:is_bot:hour']
|
||||
self.insert_data(stat, ['true', 'false'], ['false'])
|
||||
result = self.client_get('/json/analytics/chart_data',
|
||||
{'chart_name': 'messages_sent_over_time'})
|
||||
self.assert_json_success(result)
|
||||
data = result.json()
|
||||
self.assertEqual(data, {
|
||||
'msg': '',
|
||||
'end_times': [datetime_to_timestamp(dt) for dt in self.end_times_hour],
|
||||
'frequency': CountStat.HOUR,
|
||||
'everyone': {'bot': self.data(100), 'human': self.data(101)},
|
||||
'user': {'bot': self.data(0), 'human': self.data(200)},
|
||||
'display_order': None,
|
||||
'result': 'success',
|
||||
})
|
||||
|
||||
def test_messages_sent_by_message_type(self) -> None:
|
||||
stat = COUNT_STATS['messages_sent:message_type:day']
|
||||
self.insert_data(stat, ['public_stream', 'private_message'],
|
||||
['public_stream', 'private_stream'])
|
||||
result = self.client_get('/json/analytics/chart_data',
|
||||
{'chart_name': 'messages_sent_by_message_type'})
|
||||
self.assert_json_success(result)
|
||||
data = result.json()
|
||||
self.assertEqual(data, {
|
||||
'msg': '',
|
||||
'end_times': [datetime_to_timestamp(dt) for dt in self.end_times_day],
|
||||
'frequency': CountStat.DAY,
|
||||
'everyone': {'Public streams': self.data(100), 'Private streams': self.data(0),
|
||||
'Private messages': self.data(101), 'Group private messages': self.data(0)},
|
||||
'user': {'Public streams': self.data(200), 'Private streams': self.data(201),
|
||||
'Private messages': self.data(0), 'Group private messages': self.data(0)},
|
||||
'display_order': ['Private messages', 'Public streams', 'Private streams', 'Group private messages'],
|
||||
'result': 'success',
|
||||
})
|
||||
|
||||
def test_messages_sent_by_client(self) -> None:
|
||||
stat = COUNT_STATS['messages_sent:client:day']
|
||||
client1 = Client.objects.create(name='client 1')
|
||||
client2 = Client.objects.create(name='client 2')
|
||||
client3 = Client.objects.create(name='client 3')
|
||||
client4 = Client.objects.create(name='client 4')
|
||||
self.insert_data(stat, [client4.id, client3.id, client2.id],
|
||||
[client3.id, client1.id])
|
||||
result = self.client_get('/json/analytics/chart_data',
|
||||
{'chart_name': 'messages_sent_by_client'})
|
||||
self.assert_json_success(result)
|
||||
data = result.json()
|
||||
self.assertEqual(data, {
|
||||
'msg': '',
|
||||
'end_times': [datetime_to_timestamp(dt) for dt in self.end_times_day],
|
||||
'frequency': CountStat.DAY,
|
||||
'everyone': {'client 4': self.data(100), 'client 3': self.data(101),
|
||||
'client 2': self.data(102)},
|
||||
'user': {'client 3': self.data(200), 'client 1': self.data(201)},
|
||||
'display_order': ['client 1', 'client 2', 'client 3', 'client 4'],
|
||||
'result': 'success',
|
||||
})
|
||||
|
||||
def test_include_empty_subgroups(self) -> None:
|
||||
FillState.objects.create(
|
||||
property='realm_active_humans::day', end_time=self.end_times_day[0],
|
||||
state=FillState.DONE)
|
||||
result = self.client_get('/json/analytics/chart_data',
|
||||
{'chart_name': 'number_of_humans'})
|
||||
self.assert_json_success(result)
|
||||
data = result.json()
|
||||
self.assertEqual(data['everyone'], {"_1day": [0], "_15day": [0], "all_time": [0]})
|
||||
self.assertFalse('user' in data)
|
||||
|
||||
FillState.objects.create(
|
||||
property='messages_sent:is_bot:hour', end_time=self.end_times_hour[0],
|
||||
state=FillState.DONE)
|
||||
result = self.client_get('/json/analytics/chart_data',
|
||||
{'chart_name': 'messages_sent_over_time'})
|
||||
self.assert_json_success(result)
|
||||
data = result.json()
|
||||
self.assertEqual(data['everyone'], {'human': [0], 'bot': [0]})
|
||||
self.assertEqual(data['user'], {'human': [0], 'bot': [0]})
|
||||
|
||||
FillState.objects.create(
|
||||
property='messages_sent:message_type:day', end_time=self.end_times_day[0],
|
||||
state=FillState.DONE)
|
||||
result = self.client_get('/json/analytics/chart_data',
|
||||
{'chart_name': 'messages_sent_by_message_type'})
|
||||
self.assert_json_success(result)
|
||||
data = result.json()
|
||||
self.assertEqual(data['everyone'], {
|
||||
'Public streams': [0], 'Private streams': [0],
|
||||
'Private messages': [0], 'Group private messages': [0]})
|
||||
self.assertEqual(data['user'], {
|
||||
'Public streams': [0], 'Private streams': [0],
|
||||
'Private messages': [0], 'Group private messages': [0]})
|
||||
|
||||
FillState.objects.create(
|
||||
property='messages_sent:client:day', end_time=self.end_times_day[0],
|
||||
state=FillState.DONE)
|
||||
result = self.client_get('/json/analytics/chart_data',
|
||||
{'chart_name': 'messages_sent_by_client'})
|
||||
self.assert_json_success(result)
|
||||
data = result.json()
|
||||
self.assertEqual(data['everyone'], {})
|
||||
self.assertEqual(data['user'], {})
|
||||
|
||||
def test_start_and_end(self) -> None:
|
||||
stat = COUNT_STATS['realm_active_humans::day']
|
||||
self.insert_data(stat, [None], [])
|
||||
stat = COUNT_STATS['1day_actives::day']
|
||||
self.insert_data(stat, [None], [])
|
||||
stat = COUNT_STATS['active_users_audit:is_bot:day']
|
||||
self.insert_data(stat, ['false'], [])
|
||||
end_time_timestamps = [datetime_to_timestamp(dt) for dt in self.end_times_day]
|
||||
|
||||
# valid start and end
|
||||
result = self.client_get('/json/analytics/chart_data',
|
||||
{'chart_name': 'number_of_humans',
|
||||
'start': end_time_timestamps[1],
|
||||
'end': end_time_timestamps[2]})
|
||||
self.assert_json_success(result)
|
||||
data = result.json()
|
||||
self.assertEqual(data['end_times'], end_time_timestamps[1:3])
|
||||
self.assertEqual(data['everyone'], {'_1day': [0, 100], '_15day': [0, 100], 'all_time': [0, 100]})
|
||||
|
||||
# start later then end
|
||||
result = self.client_get('/json/analytics/chart_data',
|
||||
{'chart_name': 'number_of_humans',
|
||||
'start': end_time_timestamps[2],
|
||||
'end': end_time_timestamps[1]})
|
||||
self.assert_json_error_contains(result, 'Start time is later than')
|
||||
|
||||
def test_min_length(self) -> None:
|
||||
stat = COUNT_STATS['realm_active_humans::day']
|
||||
self.insert_data(stat, [None], [])
|
||||
stat = COUNT_STATS['1day_actives::day']
|
||||
self.insert_data(stat, [None], [])
|
||||
stat = COUNT_STATS['active_users_audit:is_bot:day']
|
||||
self.insert_data(stat, ['false'], [])
|
||||
# test min_length is too short to change anything
|
||||
result = self.client_get('/json/analytics/chart_data',
|
||||
{'chart_name': 'number_of_humans',
|
||||
'min_length': 2})
|
||||
self.assert_json_success(result)
|
||||
data = result.json()
|
||||
self.assertEqual(data['end_times'], [datetime_to_timestamp(dt) for dt in self.end_times_day])
|
||||
self.assertEqual(data['everyone'], {'_1day': self.data(100), '_15day': self.data(100), 'all_time': self.data(100)})
|
||||
# test min_length larger than filled data
|
||||
result = self.client_get('/json/analytics/chart_data',
|
||||
{'chart_name': 'number_of_humans',
|
||||
'min_length': 5})
|
||||
self.assert_json_success(result)
|
||||
data = result.json()
|
||||
end_times = [ceiling_to_day(self.realm.date_created) + timedelta(days=i) for i in range(-1, 4)]
|
||||
self.assertEqual(data['end_times'], [datetime_to_timestamp(dt) for dt in end_times])
|
||||
self.assertEqual(data['everyone'], {'_1day': [0]+self.data(100), '_15day': [0]+self.data(100), 'all_time': [0]+self.data(100)})
|
||||
|
||||
def test_non_existent_chart(self) -> None:
|
||||
result = self.client_get('/json/analytics/chart_data',
|
||||
{'chart_name': 'does_not_exist'})
|
||||
self.assert_json_error_contains(result, 'Unknown chart name')
|
||||
|
||||
def test_analytics_not_running(self) -> None:
|
||||
# try to get data for a valid chart, but before we've put anything in the database
|
||||
# (e.g. before update_analytics_counts has been run)
|
||||
with mock.patch('logging.warning'):
|
||||
result = self.client_get('/json/analytics/chart_data',
|
||||
{'chart_name': 'number_of_humans'})
|
||||
self.assert_json_error_contains(result, 'No analytics data available')
|
||||
|
||||
def test_get_chart_data_for_realm(self) -> None:
|
||||
user_profile = self.example_user('hamlet')
|
||||
self.login(user_profile.email)
|
||||
|
||||
result = self.client_get('/json/analytics/chart_data/realm/zulip/',
|
||||
{'chart_name': 'number_of_humans'})
|
||||
self.assert_json_error(result, "Must be an server administrator", 400)
|
||||
|
||||
user_profile = self.example_user('hamlet')
|
||||
user_profile.is_staff = True
|
||||
user_profile.save(update_fields=['is_staff'])
|
||||
stat = COUNT_STATS['realm_active_humans::day']
|
||||
self.insert_data(stat, [None], [])
|
||||
|
||||
result = self.client_get('/json/analytics/chart_data/realm/not_existing_realm',
|
||||
{'chart_name': 'number_of_humans'})
|
||||
self.assert_json_error(result, 'Invalid organization', 400)
|
||||
|
||||
result = self.client_get('/json/analytics/chart_data/realm/zulip',
|
||||
{'chart_name': 'number_of_humans'})
|
||||
self.assert_json_success(result)
|
||||
|
||||
def test_get_chart_data_for_installation(self) -> None:
|
||||
user_profile = self.example_user('hamlet')
|
||||
self.login(user_profile.email)
|
||||
|
||||
result = self.client_get('/json/analytics/chart_data/installation',
|
||||
{'chart_name': 'number_of_humans'})
|
||||
self.assert_json_error(result, "Must be an server administrator", 400)
|
||||
|
||||
user_profile = self.example_user('hamlet')
|
||||
user_profile.is_staff = True
|
||||
user_profile.save(update_fields=['is_staff'])
|
||||
stat = COUNT_STATS['realm_active_humans::day']
|
||||
self.insert_data(stat, [None], [])
|
||||
|
||||
result = self.client_get('/json/analytics/chart_data/installation',
|
||||
{'chart_name': 'number_of_humans'})
|
||||
self.assert_json_success(result)
|
||||
|
||||
class TestGetChartDataHelpers(ZulipTestCase):
|
||||
# last_successful_fill is in analytics/models.py, but get_chart_data is
|
||||
# the only function that uses it at the moment
|
||||
def test_last_successful_fill(self) -> None:
|
||||
self.assertIsNone(last_successful_fill('non-existant'))
|
||||
a_time = datetime(2016, 3, 14, 19).replace(tzinfo=utc)
|
||||
one_hour_before = datetime(2016, 3, 14, 18).replace(tzinfo=utc)
|
||||
fillstate = FillState.objects.create(property='property', end_time=a_time,
|
||||
state=FillState.DONE)
|
||||
self.assertEqual(last_successful_fill('property'), a_time)
|
||||
fillstate.state = FillState.STARTED
|
||||
fillstate.save()
|
||||
self.assertEqual(last_successful_fill('property'), one_hour_before)
|
||||
|
||||
def test_sort_by_totals(self) -> None:
|
||||
empty = [] # type: List[int]
|
||||
value_arrays = {'c': [0, 1], 'a': [9], 'b': [1, 1, 1], 'd': empty}
|
||||
self.assertEqual(sort_by_totals(value_arrays), ['a', 'b', 'c', 'd'])
|
||||
|
||||
def test_sort_client_labels(self) -> None:
|
||||
data = {'everyone': {'a': [16], 'c': [15], 'b': [14], 'e': [13], 'd': [12], 'h': [11]},
|
||||
'user': {'a': [6], 'b': [5], 'd': [4], 'e': [3], 'f': [2], 'g': [1]}}
|
||||
self.assertEqual(sort_client_labels(data), ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h'])
|
||||
|
||||
class TestTimeRange(ZulipTestCase):
|
||||
def test_time_range(self) -> None:
|
||||
HOUR = timedelta(hours=1)
|
||||
DAY = timedelta(days=1)
|
||||
|
||||
a_time = datetime(2016, 3, 14, 22, 59).replace(tzinfo=utc)
|
||||
floor_hour = datetime(2016, 3, 14, 22).replace(tzinfo=utc)
|
||||
floor_day = datetime(2016, 3, 14).replace(tzinfo=utc)
|
||||
|
||||
# test start == end
|
||||
self.assertEqual(time_range(a_time, a_time, CountStat.HOUR, None), [])
|
||||
self.assertEqual(time_range(a_time, a_time, CountStat.DAY, None), [])
|
||||
# test start == end == boundary, and min_length == 0
|
||||
self.assertEqual(time_range(floor_hour, floor_hour, CountStat.HOUR, 0), [floor_hour])
|
||||
self.assertEqual(time_range(floor_day, floor_day, CountStat.DAY, 0), [floor_day])
|
||||
# test start and end on different boundaries
|
||||
self.assertEqual(time_range(floor_hour, floor_hour+HOUR, CountStat.HOUR, None),
|
||||
[floor_hour, floor_hour+HOUR])
|
||||
self.assertEqual(time_range(floor_day, floor_day+DAY, CountStat.DAY, None),
|
||||
[floor_day, floor_day+DAY])
|
||||
# test min_length
|
||||
self.assertEqual(time_range(floor_hour, floor_hour+HOUR, CountStat.HOUR, 4),
|
||||
[floor_hour-2*HOUR, floor_hour-HOUR, floor_hour, floor_hour+HOUR])
|
||||
self.assertEqual(time_range(floor_day, floor_day+DAY, CountStat.DAY, 4),
|
||||
[floor_day-2*DAY, floor_day-DAY, floor_day, floor_day+DAY])
|
||||
|
||||
class TestMapArrays(ZulipTestCase):
|
||||
def test_map_arrays(self) -> None:
|
||||
a = {'desktop app 1.0': [1, 2, 3],
|
||||
'desktop app 2.0': [10, 12, 13],
|
||||
'desktop app 3.0': [21, 22, 23],
|
||||
'website': [1, 2, 3],
|
||||
'ZulipiOS': [1, 2, 3],
|
||||
'ZulipElectron': [2, 5, 7],
|
||||
'ZulipMobile': [1, 5, 7],
|
||||
'ZulipPython': [1, 2, 3],
|
||||
'API: Python': [1, 2, 3],
|
||||
'SomethingRandom': [4, 5, 6],
|
||||
'ZulipGitHubWebhook': [7, 7, 9],
|
||||
'ZulipAndroid': [64, 63, 65]}
|
||||
result = rewrite_client_arrays(a)
|
||||
self.assertEqual(result,
|
||||
{'Old desktop app': [32, 36, 39],
|
||||
'Old iOS app': [1, 2, 3],
|
||||
'Desktop app': [2, 5, 7],
|
||||
'Mobile app': [1, 5, 7],
|
||||
'Website': [1, 2, 3],
|
||||
'Python API': [2, 4, 6],
|
||||
'SomethingRandom': [4, 5, 6],
|
||||
'GitHub webhook': [7, 7, 9],
|
||||
'Old Android app': [64, 63, 65]})
|
@@ -1,41 +1,31 @@
|
||||
from typing import List, Union
|
||||
from django.conf.urls import include, url
|
||||
|
||||
from django.conf.urls import include
|
||||
from django.urls import path
|
||||
from django.urls.resolvers import URLPattern, URLResolver
|
||||
import analytics.views
|
||||
from zerver.lib.rest import rest_dispatch
|
||||
|
||||
from analytics.views.installation_activity import get_installation_activity
|
||||
from analytics.views.realm_activity import get_realm_activity
|
||||
from analytics.views.stats import (
|
||||
get_chart_data,
|
||||
get_chart_data_for_installation,
|
||||
get_chart_data_for_realm,
|
||||
get_chart_data_for_remote_installation,
|
||||
get_chart_data_for_remote_realm,
|
||||
stats,
|
||||
stats_for_installation,
|
||||
stats_for_realm,
|
||||
stats_for_remote_installation,
|
||||
stats_for_remote_realm,
|
||||
)
|
||||
from analytics.views.support import support
|
||||
from analytics.views.user_activity import get_user_activity
|
||||
from zerver.lib.rest import rest_path
|
||||
|
||||
i18n_urlpatterns: List[Union[URLPattern, URLResolver]] = [
|
||||
i18n_urlpatterns = [
|
||||
# Server admin (user_profile.is_staff) visible stats pages
|
||||
path("activity", get_installation_activity),
|
||||
path("activity/support", support, name="support"),
|
||||
path("realm_activity/<realm_str>/", get_realm_activity),
|
||||
path("user_activity/<user_profile_id>/", get_user_activity),
|
||||
path("stats/realm/<realm_str>/", stats_for_realm),
|
||||
path("stats/installation", stats_for_installation),
|
||||
path("stats/remote/<int:remote_server_id>/installation", stats_for_remote_installation),
|
||||
path(
|
||||
"stats/remote/<int:remote_server_id>/realm/<int:remote_realm_id>/", stats_for_remote_realm
|
||||
),
|
||||
url(r'^activity$', analytics.views.get_activity,
|
||||
name='analytics.views.get_activity'),
|
||||
url(r'^realm_activity/(?P<realm_str>[\S]+)/$', analytics.views.get_realm_activity,
|
||||
name='analytics.views.get_realm_activity'),
|
||||
url(r'^user_activity/(?P<email>[\S]+)/$', analytics.views.get_user_activity,
|
||||
name='analytics.views.get_user_activity'),
|
||||
|
||||
url(r'^stats/realm/(?P<realm_str>[\S]+)/$', analytics.views.stats_for_realm,
|
||||
name='analytics.views.stats_for_realm'),
|
||||
url(r'^stats/installation$', analytics.views.stats_for_installation,
|
||||
name='analytics.views.stats_for_installation'),
|
||||
url(r'^stats/remote/(?P<remote_server_id>[\S]+)/installation$',
|
||||
analytics.views.stats_for_remote_installation,
|
||||
name='analytics.views.stats_for_remote_installation'),
|
||||
url(r'^stats/remote/(?P<remote_server_id>[\S]+)/realm/(?P<remote_realm_id>[\S]+)/$',
|
||||
analytics.views.stats_for_remote_realm,
|
||||
name='analytics.views.stats_for_remote_realm'),
|
||||
|
||||
# User-visible stats page
|
||||
path("stats", stats, name="stats"),
|
||||
url(r'^stats$', analytics.views.stats,
|
||||
name='analytics.views.stats'),
|
||||
]
|
||||
|
||||
# These endpoints are a part of the API (V1), which uses:
|
||||
@@ -48,22 +38,22 @@ i18n_urlpatterns: List[Union[URLPattern, URLResolver]] = [
|
||||
# All of these paths are accessed by either a /json or /api prefix
|
||||
v1_api_and_json_patterns = [
|
||||
# get data for the graphs at /stats
|
||||
rest_path("analytics/chart_data", GET=get_chart_data),
|
||||
rest_path("analytics/chart_data/realm/<realm_str>", GET=get_chart_data_for_realm),
|
||||
rest_path("analytics/chart_data/installation", GET=get_chart_data_for_installation),
|
||||
rest_path(
|
||||
"analytics/chart_data/remote/<int:remote_server_id>/installation",
|
||||
GET=get_chart_data_for_remote_installation,
|
||||
),
|
||||
rest_path(
|
||||
"analytics/chart_data/remote/<int:remote_server_id>/realm/<int:remote_realm_id>",
|
||||
GET=get_chart_data_for_remote_realm,
|
||||
),
|
||||
url(r'^analytics/chart_data$', rest_dispatch,
|
||||
{'GET': 'analytics.views.get_chart_data'}),
|
||||
url(r'^analytics/chart_data/realm/(?P<realm_str>[\S]+)$', rest_dispatch,
|
||||
{'GET': 'analytics.views.get_chart_data_for_realm'}),
|
||||
url(r'^analytics/chart_data/installation$', rest_dispatch,
|
||||
{'GET': 'analytics.views.get_chart_data_for_installation'}),
|
||||
url(r'^analytics/chart_data/remote/(?P<remote_server_id>[\S]+)/installation$', rest_dispatch,
|
||||
{'GET': 'analytics.views.get_chart_data_for_remote_installation'}),
|
||||
url(r'^analytics/chart_data/remote/(?P<remote_server_id>[\S]+)/realm/(?P<remote_realm_id>[\S]+)$',
|
||||
rest_dispatch,
|
||||
{'GET': 'analytics.views.get_chart_data_for_remote_realm'}),
|
||||
]
|
||||
|
||||
i18n_urlpatterns += [
|
||||
path("api/v1/", include(v1_api_and_json_patterns)),
|
||||
path("json/", include(v1_api_and_json_patterns)),
|
||||
url(r'^api/v1/', include(v1_api_and_json_patterns)),
|
||||
url(r'^json/', include(v1_api_and_json_patterns)),
|
||||
]
|
||||
|
||||
urlpatterns = i18n_urlpatterns
|
||||
|
1337
analytics/views.py
Normal file
1337
analytics/views.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,137 +0,0 @@
|
||||
import re
|
||||
from datetime import datetime
|
||||
from html import escape
|
||||
from typing import Any, Dict, List, Optional, Sequence
|
||||
|
||||
import pytz
|
||||
from django.conf import settings
|
||||
from django.db.backends.utils import CursorWrapper
|
||||
from django.db.models.query import QuerySet
|
||||
from django.template import loader
|
||||
from django.urls import reverse
|
||||
from markupsafe import Markup as mark_safe
|
||||
|
||||
eastern_tz = pytz.timezone("US/Eastern")
|
||||
|
||||
|
||||
if settings.BILLING_ENABLED:
|
||||
pass
|
||||
|
||||
|
||||
def make_table(
|
||||
title: str, cols: Sequence[str], rows: Sequence[Any], has_row_class: bool = False
|
||||
) -> str:
|
||||
|
||||
if not has_row_class:
|
||||
|
||||
def fix_row(row: Any) -> Dict[str, Any]:
|
||||
return dict(cells=row, row_class=None)
|
||||
|
||||
rows = list(map(fix_row, rows))
|
||||
|
||||
data = dict(title=title, cols=cols, rows=rows)
|
||||
|
||||
content = loader.render_to_string(
|
||||
"analytics/ad_hoc_query.html",
|
||||
dict(data=data),
|
||||
)
|
||||
|
||||
return content
|
||||
|
||||
|
||||
def dictfetchall(cursor: CursorWrapper) -> List[Dict[str, Any]]:
|
||||
"Returns all rows from a cursor as a dict"
|
||||
desc = cursor.description
|
||||
return [dict(zip((col[0] for col in desc), row)) for row in cursor.fetchall()]
|
||||
|
||||
|
||||
def format_date_for_activity_reports(date: Optional[datetime]) -> str:
|
||||
if date:
|
||||
return date.astimezone(eastern_tz).strftime("%Y-%m-%d %H:%M")
|
||||
else:
|
||||
return ""
|
||||
|
||||
|
||||
def user_activity_link(email: str, user_profile_id: int) -> mark_safe:
|
||||
from analytics.views.user_activity import get_user_activity
|
||||
|
||||
url = reverse(get_user_activity, kwargs=dict(user_profile_id=user_profile_id))
|
||||
email_link = f'<a href="{escape(url)}">{escape(email)}</a>'
|
||||
return mark_safe(email_link)
|
||||
|
||||
|
||||
def realm_activity_link(realm_str: str) -> mark_safe:
|
||||
from analytics.views.realm_activity import get_realm_activity
|
||||
|
||||
url = reverse(get_realm_activity, kwargs=dict(realm_str=realm_str))
|
||||
realm_link = f'<a href="{escape(url)}">{escape(realm_str)}</a>'
|
||||
return mark_safe(realm_link)
|
||||
|
||||
|
||||
def realm_stats_link(realm_str: str) -> mark_safe:
|
||||
from analytics.views.stats import stats_for_realm
|
||||
|
||||
url = reverse(stats_for_realm, kwargs=dict(realm_str=realm_str))
|
||||
stats_link = f'<a href="{escape(url)}"><i class="fa fa-pie-chart"></i>{escape(realm_str)}</a>'
|
||||
return mark_safe(stats_link)
|
||||
|
||||
|
||||
def remote_installation_stats_link(server_id: int, hostname: str) -> mark_safe:
|
||||
from analytics.views.stats import stats_for_remote_installation
|
||||
|
||||
url = reverse(stats_for_remote_installation, kwargs=dict(remote_server_id=server_id))
|
||||
stats_link = f'<a href="{escape(url)}"><i class="fa fa-pie-chart"></i>{escape(hostname)}</a>'
|
||||
return mark_safe(stats_link)
|
||||
|
||||
|
||||
def get_user_activity_summary(records: List[QuerySet]) -> Dict[str, Any]:
|
||||
#: The type annotation used above is clearly overly permissive.
|
||||
#: We should perhaps use TypedDict to clearly lay out the schema
|
||||
#: for the user activity summary.
|
||||
summary: Dict[str, Any] = {}
|
||||
|
||||
def update(action: str, record: QuerySet) -> None:
|
||||
if action not in summary:
|
||||
summary[action] = dict(
|
||||
count=record.count,
|
||||
last_visit=record.last_visit,
|
||||
)
|
||||
else:
|
||||
summary[action]["count"] += record.count
|
||||
summary[action]["last_visit"] = max(
|
||||
summary[action]["last_visit"],
|
||||
record.last_visit,
|
||||
)
|
||||
|
||||
if records:
|
||||
summary["name"] = records[0].user_profile.full_name
|
||||
summary["user_profile_id"] = records[0].user_profile.id
|
||||
|
||||
for record in records:
|
||||
client = record.client.name
|
||||
query = str(record.query)
|
||||
|
||||
update("use", record)
|
||||
|
||||
if client == "API":
|
||||
m = re.match("/api/.*/external/(.*)", query)
|
||||
if m:
|
||||
client = m.group(1)
|
||||
update(client, record)
|
||||
|
||||
if client.startswith("desktop"):
|
||||
update("desktop", record)
|
||||
if client == "website":
|
||||
update("website", record)
|
||||
if ("send_message" in query) or re.search("/api/.*/external/.*", query):
|
||||
update("send", record)
|
||||
if query in [
|
||||
"/json/update_pointer",
|
||||
"/json/users/me/pointer",
|
||||
"/api/v1/update_pointer",
|
||||
"update_pointer_backend",
|
||||
]:
|
||||
update("pointer", record)
|
||||
update(client, record)
|
||||
|
||||
return summary
|
@@ -1,622 +0,0 @@
|
||||
import itertools
|
||||
import time
|
||||
from collections import defaultdict
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import connection
|
||||
from django.http import HttpRequest, HttpResponse
|
||||
from django.shortcuts import render
|
||||
from django.template import loader
|
||||
from django.utils.timezone import now as timezone_now
|
||||
from markupsafe import Markup as mark_safe
|
||||
from psycopg2.sql import SQL, Composable, Literal
|
||||
|
||||
from analytics.lib.counts import COUNT_STATS
|
||||
from analytics.views.activity_common import (
|
||||
dictfetchall,
|
||||
format_date_for_activity_reports,
|
||||
make_table,
|
||||
realm_activity_link,
|
||||
realm_stats_link,
|
||||
remote_installation_stats_link,
|
||||
)
|
||||
from analytics.views.support import get_plan_name
|
||||
from zerver.decorator import require_server_admin
|
||||
from zerver.lib.request import has_request_variables
|
||||
from zerver.lib.timestamp import timestamp_to_datetime
|
||||
from zerver.models import Realm, UserActivityInterval, UserProfile, get_org_type_display_name
|
||||
|
||||
if settings.BILLING_ENABLED:
|
||||
from corporate.lib.stripe import (
|
||||
estimate_annual_recurring_revenue_by_realm,
|
||||
get_realms_to_default_discount_dict,
|
||||
)
|
||||
|
||||
|
||||
def get_realm_day_counts() -> Dict[str, Dict[str, str]]:
|
||||
query = SQL(
|
||||
"""
|
||||
select
|
||||
r.string_id,
|
||||
(now()::date - date_sent::date) age,
|
||||
count(*) cnt
|
||||
from zerver_message m
|
||||
join zerver_userprofile up on up.id = m.sender_id
|
||||
join zerver_realm r on r.id = up.realm_id
|
||||
join zerver_client c on c.id = m.sending_client_id
|
||||
where
|
||||
(not up.is_bot)
|
||||
and
|
||||
date_sent > now()::date - interval '8 day'
|
||||
and
|
||||
c.name not in ('zephyr_mirror', 'ZulipMonitoring')
|
||||
group by
|
||||
r.string_id,
|
||||
age
|
||||
order by
|
||||
r.string_id,
|
||||
age
|
||||
"""
|
||||
)
|
||||
cursor = connection.cursor()
|
||||
cursor.execute(query)
|
||||
rows = dictfetchall(cursor)
|
||||
cursor.close()
|
||||
|
||||
counts: Dict[str, Dict[int, int]] = defaultdict(dict)
|
||||
for row in rows:
|
||||
counts[row["string_id"]][row["age"]] = row["cnt"]
|
||||
|
||||
result = {}
|
||||
for string_id in counts:
|
||||
raw_cnts = [counts[string_id].get(age, 0) for age in range(8)]
|
||||
min_cnt = min(raw_cnts[1:])
|
||||
max_cnt = max(raw_cnts[1:])
|
||||
|
||||
def format_count(cnt: int, style: Optional[str] = None) -> str:
|
||||
if style is not None:
|
||||
good_bad = style
|
||||
elif cnt == min_cnt:
|
||||
good_bad = "bad"
|
||||
elif cnt == max_cnt:
|
||||
good_bad = "good"
|
||||
else:
|
||||
good_bad = "neutral"
|
||||
|
||||
return f'<td class="number {good_bad}">{cnt}</td>'
|
||||
|
||||
cnts = format_count(raw_cnts[0], "neutral") + "".join(map(format_count, raw_cnts[1:]))
|
||||
result[string_id] = dict(cnts=cnts)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def realm_summary_table(realm_minutes: Dict[str, float]) -> str:
|
||||
now = timezone_now()
|
||||
|
||||
query = SQL(
|
||||
"""
|
||||
SELECT
|
||||
realm.string_id,
|
||||
realm.date_created,
|
||||
realm.plan_type,
|
||||
realm.org_type,
|
||||
coalesce(wau_table.value, 0) wau_count,
|
||||
coalesce(dau_table.value, 0) dau_count,
|
||||
coalesce(user_count_table.value, 0) user_profile_count,
|
||||
coalesce(bot_count_table.value, 0) bot_count
|
||||
FROM
|
||||
zerver_realm as realm
|
||||
LEFT OUTER JOIN (
|
||||
SELECT
|
||||
value _14day_active_humans,
|
||||
realm_id
|
||||
from
|
||||
analytics_realmcount
|
||||
WHERE
|
||||
property = 'realm_active_humans::day'
|
||||
AND end_time = %(realm_active_humans_end_time)s
|
||||
) as _14day_active_humans_table ON realm.id = _14day_active_humans_table.realm_id
|
||||
LEFT OUTER JOIN (
|
||||
SELECT
|
||||
value,
|
||||
realm_id
|
||||
from
|
||||
analytics_realmcount
|
||||
WHERE
|
||||
property = '7day_actives::day'
|
||||
AND end_time = %(seven_day_actives_end_time)s
|
||||
) as wau_table ON realm.id = wau_table.realm_id
|
||||
LEFT OUTER JOIN (
|
||||
SELECT
|
||||
value,
|
||||
realm_id
|
||||
from
|
||||
analytics_realmcount
|
||||
WHERE
|
||||
property = '1day_actives::day'
|
||||
AND end_time = %(one_day_actives_end_time)s
|
||||
) as dau_table ON realm.id = dau_table.realm_id
|
||||
LEFT OUTER JOIN (
|
||||
SELECT
|
||||
value,
|
||||
realm_id
|
||||
from
|
||||
analytics_realmcount
|
||||
WHERE
|
||||
property = 'active_users_audit:is_bot:day'
|
||||
AND subgroup = 'false'
|
||||
AND end_time = %(active_users_audit_end_time)s
|
||||
) as user_count_table ON realm.id = user_count_table.realm_id
|
||||
LEFT OUTER JOIN (
|
||||
SELECT
|
||||
value,
|
||||
realm_id
|
||||
from
|
||||
analytics_realmcount
|
||||
WHERE
|
||||
property = 'active_users_audit:is_bot:day'
|
||||
AND subgroup = 'true'
|
||||
AND end_time = %(active_users_audit_end_time)s
|
||||
) as bot_count_table ON realm.id = bot_count_table.realm_id
|
||||
WHERE
|
||||
_14day_active_humans IS NOT NULL
|
||||
or realm.plan_type = 3
|
||||
ORDER BY
|
||||
dau_count DESC,
|
||||
string_id ASC
|
||||
"""
|
||||
)
|
||||
|
||||
cursor = connection.cursor()
|
||||
cursor.execute(
|
||||
query,
|
||||
{
|
||||
"realm_active_humans_end_time": COUNT_STATS[
|
||||
"realm_active_humans::day"
|
||||
].last_successful_fill(),
|
||||
"seven_day_actives_end_time": COUNT_STATS["7day_actives::day"].last_successful_fill(),
|
||||
"one_day_actives_end_time": COUNT_STATS["1day_actives::day"].last_successful_fill(),
|
||||
"active_users_audit_end_time": COUNT_STATS[
|
||||
"active_users_audit:is_bot:day"
|
||||
].last_successful_fill(),
|
||||
},
|
||||
)
|
||||
rows = dictfetchall(cursor)
|
||||
cursor.close()
|
||||
|
||||
# Fetch all the realm administrator users
|
||||
realm_owners: Dict[str, List[str]] = defaultdict(list)
|
||||
for up in UserProfile.objects.select_related("realm").filter(
|
||||
role=UserProfile.ROLE_REALM_OWNER,
|
||||
is_active=True,
|
||||
):
|
||||
realm_owners[up.realm.string_id].append(up.delivery_email)
|
||||
|
||||
for row in rows:
|
||||
row["date_created_day"] = row["date_created"].strftime("%Y-%m-%d")
|
||||
row["age_days"] = int((now - row["date_created"]).total_seconds() / 86400)
|
||||
row["is_new"] = row["age_days"] < 12 * 7
|
||||
row["realm_owner_emails"] = ", ".join(realm_owners[row["string_id"]])
|
||||
|
||||
# get messages sent per day
|
||||
counts = get_realm_day_counts()
|
||||
for row in rows:
|
||||
try:
|
||||
row["history"] = counts[row["string_id"]]["cnts"]
|
||||
except Exception:
|
||||
row["history"] = ""
|
||||
|
||||
# estimate annual subscription revenue
|
||||
total_arr = 0
|
||||
if settings.BILLING_ENABLED:
|
||||
estimated_arrs = estimate_annual_recurring_revenue_by_realm()
|
||||
realms_to_default_discount = get_realms_to_default_discount_dict()
|
||||
|
||||
for row in rows:
|
||||
row["plan_type_string"] = get_plan_name(row["plan_type"])
|
||||
|
||||
string_id = row["string_id"]
|
||||
|
||||
if string_id in estimated_arrs:
|
||||
row["arr"] = estimated_arrs[string_id]
|
||||
|
||||
if row["plan_type"] in [Realm.PLAN_TYPE_STANDARD, Realm.PLAN_TYPE_PLUS]:
|
||||
row["effective_rate"] = 100 - int(realms_to_default_discount.get(string_id, 0))
|
||||
elif row["plan_type"] == Realm.PLAN_TYPE_STANDARD_FREE:
|
||||
row["effective_rate"] = 0
|
||||
elif (
|
||||
row["plan_type"] == Realm.PLAN_TYPE_LIMITED
|
||||
and string_id in realms_to_default_discount
|
||||
):
|
||||
row["effective_rate"] = 100 - int(realms_to_default_discount[string_id])
|
||||
else:
|
||||
row["effective_rate"] = ""
|
||||
|
||||
total_arr += sum(estimated_arrs.values())
|
||||
|
||||
for row in rows:
|
||||
row["org_type_string"] = get_org_type_display_name(row["org_type"])
|
||||
|
||||
# augment data with realm_minutes
|
||||
total_hours = 0.0
|
||||
for row in rows:
|
||||
string_id = row["string_id"]
|
||||
minutes = realm_minutes.get(string_id, 0.0)
|
||||
hours = minutes / 60.0
|
||||
total_hours += hours
|
||||
row["hours"] = str(int(hours))
|
||||
try:
|
||||
row["hours_per_user"] = "{:.1f}".format(hours / row["dau_count"])
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# formatting
|
||||
for row in rows:
|
||||
row["stats_link"] = realm_stats_link(row["string_id"])
|
||||
row["string_id"] = realm_activity_link(row["string_id"])
|
||||
|
||||
# Count active sites
|
||||
def meets_goal(row: Dict[str, int]) -> bool:
|
||||
return row["dau_count"] >= 5
|
||||
|
||||
num_active_sites = len(list(filter(meets_goal, rows)))
|
||||
|
||||
# create totals
|
||||
total_dau_count = 0
|
||||
total_user_profile_count = 0
|
||||
total_bot_count = 0
|
||||
total_wau_count = 0
|
||||
for row in rows:
|
||||
total_dau_count += int(row["dau_count"])
|
||||
total_user_profile_count += int(row["user_profile_count"])
|
||||
total_bot_count += int(row["bot_count"])
|
||||
total_wau_count += int(row["wau_count"])
|
||||
|
||||
total_row = dict(
|
||||
string_id="Total",
|
||||
plan_type_string="",
|
||||
org_type_string="",
|
||||
effective_rate="",
|
||||
arr=total_arr,
|
||||
stats_link="",
|
||||
date_created_day="",
|
||||
realm_owner_emails="",
|
||||
dau_count=total_dau_count,
|
||||
user_profile_count=total_user_profile_count,
|
||||
bot_count=total_bot_count,
|
||||
hours=int(total_hours),
|
||||
wau_count=total_wau_count,
|
||||
)
|
||||
|
||||
rows.insert(0, total_row)
|
||||
|
||||
content = loader.render_to_string(
|
||||
"analytics/realm_summary_table.html",
|
||||
dict(
|
||||
rows=rows,
|
||||
num_active_sites=num_active_sites,
|
||||
utctime=now.strftime("%Y-%m-%d %H:%MZ"),
|
||||
billing_enabled=settings.BILLING_ENABLED,
|
||||
),
|
||||
)
|
||||
return content
|
||||
|
||||
|
||||
def user_activity_intervals() -> Tuple[mark_safe, Dict[str, float]]:
|
||||
day_end = timestamp_to_datetime(time.time())
|
||||
day_start = day_end - timedelta(hours=24)
|
||||
|
||||
output = "Per-user online duration for the last 24 hours:\n"
|
||||
total_duration = timedelta(0)
|
||||
|
||||
all_intervals = (
|
||||
UserActivityInterval.objects.filter(
|
||||
end__gte=day_start,
|
||||
start__lte=day_end,
|
||||
)
|
||||
.select_related(
|
||||
"user_profile",
|
||||
"user_profile__realm",
|
||||
)
|
||||
.only(
|
||||
"start",
|
||||
"end",
|
||||
"user_profile__delivery_email",
|
||||
"user_profile__realm__string_id",
|
||||
)
|
||||
.order_by(
|
||||
"user_profile__realm__string_id",
|
||||
"user_profile__delivery_email",
|
||||
)
|
||||
)
|
||||
|
||||
by_string_id = lambda row: row.user_profile.realm.string_id
|
||||
by_email = lambda row: row.user_profile.delivery_email
|
||||
|
||||
realm_minutes = {}
|
||||
|
||||
for string_id, realm_intervals in itertools.groupby(all_intervals, by_string_id):
|
||||
realm_duration = timedelta(0)
|
||||
output += f"<hr>{string_id}\n"
|
||||
for email, intervals in itertools.groupby(realm_intervals, by_email):
|
||||
duration = timedelta(0)
|
||||
for interval in intervals:
|
||||
start = max(day_start, interval.start)
|
||||
end = min(day_end, interval.end)
|
||||
duration += end - start
|
||||
|
||||
total_duration += duration
|
||||
realm_duration += duration
|
||||
output += f" {email:<37}{duration}\n"
|
||||
|
||||
realm_minutes[string_id] = realm_duration.total_seconds() / 60
|
||||
|
||||
output += f"\nTotal duration: {total_duration}\n"
|
||||
output += f"\nTotal duration in minutes: {total_duration.total_seconds() / 60.}\n"
|
||||
output += f"Total duration amortized to a month: {total_duration.total_seconds() * 30. / 60.}"
|
||||
content = mark_safe("<pre>" + output + "</pre>")
|
||||
return content, realm_minutes
|
||||
|
||||
|
||||
def ad_hoc_queries() -> List[Dict[str, str]]:
|
||||
def get_page(
|
||||
query: Composable, cols: Sequence[str], title: str, totals_columns: Sequence[int] = []
|
||||
) -> Dict[str, str]:
|
||||
cursor = connection.cursor()
|
||||
cursor.execute(query)
|
||||
rows = cursor.fetchall()
|
||||
rows = list(map(list, rows))
|
||||
cursor.close()
|
||||
|
||||
def fix_rows(
|
||||
i: int, fixup_func: Union[Callable[[str], mark_safe], Callable[[datetime], str]]
|
||||
) -> None:
|
||||
for row in rows:
|
||||
row[i] = fixup_func(row[i])
|
||||
|
||||
total_row = []
|
||||
for i, col in enumerate(cols):
|
||||
if col == "Realm":
|
||||
fix_rows(i, realm_activity_link)
|
||||
elif col in ["Last time", "Last visit"]:
|
||||
fix_rows(i, format_date_for_activity_reports)
|
||||
elif col == "Hostname":
|
||||
for row in rows:
|
||||
row[i] = remote_installation_stats_link(row[0], row[i])
|
||||
if len(totals_columns) > 0:
|
||||
if i == 0:
|
||||
total_row.append("Total")
|
||||
elif i in totals_columns:
|
||||
total_row.append(str(sum(row[i] for row in rows if row[i] is not None)))
|
||||
else:
|
||||
total_row.append("")
|
||||
if len(totals_columns) > 0:
|
||||
rows.insert(0, total_row)
|
||||
|
||||
content = make_table(title, cols, rows)
|
||||
|
||||
return dict(
|
||||
content=content,
|
||||
title=title,
|
||||
)
|
||||
|
||||
pages = []
|
||||
|
||||
###
|
||||
|
||||
for mobile_type in ["Android", "ZulipiOS"]:
|
||||
title = f"{mobile_type} usage"
|
||||
|
||||
query = SQL(
|
||||
"""
|
||||
select
|
||||
realm.string_id,
|
||||
up.id user_id,
|
||||
client.name,
|
||||
sum(count) as hits,
|
||||
max(last_visit) as last_time
|
||||
from zerver_useractivity ua
|
||||
join zerver_client client on client.id = ua.client_id
|
||||
join zerver_userprofile up on up.id = ua.user_profile_id
|
||||
join zerver_realm realm on realm.id = up.realm_id
|
||||
where
|
||||
client.name like {mobile_type}
|
||||
group by string_id, up.id, client.name
|
||||
having max(last_visit) > now() - interval '2 week'
|
||||
order by string_id, up.id, client.name
|
||||
"""
|
||||
).format(
|
||||
mobile_type=Literal(mobile_type),
|
||||
)
|
||||
|
||||
cols = [
|
||||
"Realm",
|
||||
"User id",
|
||||
"Name",
|
||||
"Hits",
|
||||
"Last time",
|
||||
]
|
||||
|
||||
pages.append(get_page(query, cols, title))
|
||||
|
||||
###
|
||||
|
||||
title = "Desktop users"
|
||||
|
||||
query = SQL(
|
||||
"""
|
||||
select
|
||||
realm.string_id,
|
||||
client.name,
|
||||
sum(count) as hits,
|
||||
max(last_visit) as last_time
|
||||
from zerver_useractivity ua
|
||||
join zerver_client client on client.id = ua.client_id
|
||||
join zerver_userprofile up on up.id = ua.user_profile_id
|
||||
join zerver_realm realm on realm.id = up.realm_id
|
||||
where
|
||||
client.name like 'desktop%%'
|
||||
group by string_id, client.name
|
||||
having max(last_visit) > now() - interval '2 week'
|
||||
order by string_id, client.name
|
||||
"""
|
||||
)
|
||||
|
||||
cols = [
|
||||
"Realm",
|
||||
"Client",
|
||||
"Hits",
|
||||
"Last time",
|
||||
]
|
||||
|
||||
pages.append(get_page(query, cols, title))
|
||||
|
||||
###
|
||||
|
||||
title = "Integrations by realm"
|
||||
|
||||
query = SQL(
|
||||
"""
|
||||
select
|
||||
realm.string_id,
|
||||
case
|
||||
when query like '%%external%%' then split_part(query, '/', 5)
|
||||
else client.name
|
||||
end client_name,
|
||||
sum(count) as hits,
|
||||
max(last_visit) as last_time
|
||||
from zerver_useractivity ua
|
||||
join zerver_client client on client.id = ua.client_id
|
||||
join zerver_userprofile up on up.id = ua.user_profile_id
|
||||
join zerver_realm realm on realm.id = up.realm_id
|
||||
where
|
||||
(query in ('send_message_backend', '/api/v1/send_message')
|
||||
and client.name not in ('Android', 'ZulipiOS')
|
||||
and client.name not like 'test: Zulip%%'
|
||||
)
|
||||
or
|
||||
query like '%%external%%'
|
||||
group by string_id, client_name
|
||||
having max(last_visit) > now() - interval '2 week'
|
||||
order by string_id, client_name
|
||||
"""
|
||||
)
|
||||
|
||||
cols = [
|
||||
"Realm",
|
||||
"Client",
|
||||
"Hits",
|
||||
"Last time",
|
||||
]
|
||||
|
||||
pages.append(get_page(query, cols, title))
|
||||
|
||||
###
|
||||
|
||||
title = "Integrations by client"
|
||||
|
||||
query = SQL(
|
||||
"""
|
||||
select
|
||||
case
|
||||
when query like '%%external%%' then split_part(query, '/', 5)
|
||||
else client.name
|
||||
end client_name,
|
||||
realm.string_id,
|
||||
sum(count) as hits,
|
||||
max(last_visit) as last_time
|
||||
from zerver_useractivity ua
|
||||
join zerver_client client on client.id = ua.client_id
|
||||
join zerver_userprofile up on up.id = ua.user_profile_id
|
||||
join zerver_realm realm on realm.id = up.realm_id
|
||||
where
|
||||
(query in ('send_message_backend', '/api/v1/send_message')
|
||||
and client.name not in ('Android', 'ZulipiOS')
|
||||
and client.name not like 'test: Zulip%%'
|
||||
)
|
||||
or
|
||||
query like '%%external%%'
|
||||
group by client_name, string_id
|
||||
having max(last_visit) > now() - interval '2 week'
|
||||
order by client_name, string_id
|
||||
"""
|
||||
)
|
||||
|
||||
cols = [
|
||||
"Client",
|
||||
"Realm",
|
||||
"Hits",
|
||||
"Last time",
|
||||
]
|
||||
|
||||
pages.append(get_page(query, cols, title))
|
||||
|
||||
title = "Remote Zulip servers"
|
||||
|
||||
query = SQL(
|
||||
"""
|
||||
with icount as (
|
||||
select
|
||||
server_id,
|
||||
max(value) as max_value,
|
||||
max(end_time) as max_end_time
|
||||
from zilencer_remoteinstallationcount
|
||||
where
|
||||
property='active_users:is_bot:day'
|
||||
and subgroup='false'
|
||||
group by server_id
|
||||
),
|
||||
remote_push_devices as (
|
||||
select server_id, count(distinct(user_id)) as push_user_count from zilencer_remotepushdevicetoken
|
||||
group by server_id
|
||||
)
|
||||
select
|
||||
rserver.id,
|
||||
rserver.hostname,
|
||||
rserver.contact_email,
|
||||
max_value,
|
||||
push_user_count,
|
||||
max_end_time
|
||||
from zilencer_remotezulipserver rserver
|
||||
left join icount on icount.server_id = rserver.id
|
||||
left join remote_push_devices on remote_push_devices.server_id = rserver.id
|
||||
order by max_value DESC NULLS LAST, push_user_count DESC NULLS LAST
|
||||
"""
|
||||
)
|
||||
|
||||
cols = [
|
||||
"ID",
|
||||
"Hostname",
|
||||
"Contact email",
|
||||
"Analytics users",
|
||||
"Mobile users",
|
||||
"Last update time",
|
||||
]
|
||||
|
||||
pages.append(get_page(query, cols, title, totals_columns=[3, 4]))
|
||||
|
||||
return pages
|
||||
|
||||
|
||||
@require_server_admin
|
||||
@has_request_variables
|
||||
def get_installation_activity(request: HttpRequest) -> HttpResponse:
|
||||
duration_content, realm_minutes = user_activity_intervals()
|
||||
counts_content: str = realm_summary_table(realm_minutes)
|
||||
data = [
|
||||
("Counts", counts_content),
|
||||
("Durations", duration_content),
|
||||
]
|
||||
for page in ad_hoc_queries():
|
||||
data.append((page["title"], page["content"]))
|
||||
|
||||
title = "Activity"
|
||||
|
||||
return render(
|
||||
request,
|
||||
"analytics/activity.html",
|
||||
context=dict(data=data, title=title, is_home=True),
|
||||
)
|
@@ -1,259 +0,0 @@
|
||||
import itertools
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, List, Optional, Set, Tuple
|
||||
|
||||
from django.db import connection
|
||||
from django.db.models.query import QuerySet
|
||||
from django.http import HttpRequest, HttpResponse, HttpResponseNotFound
|
||||
from django.shortcuts import render
|
||||
from django.utils.timezone import now as timezone_now
|
||||
from psycopg2.sql import SQL
|
||||
|
||||
from analytics.views.activity_common import (
|
||||
format_date_for_activity_reports,
|
||||
get_user_activity_summary,
|
||||
make_table,
|
||||
user_activity_link,
|
||||
)
|
||||
from zerver.decorator import require_server_admin
|
||||
from zerver.models import Realm, UserActivity
|
||||
|
||||
|
||||
def get_user_activity_records_for_realm(realm: str, is_bot: bool) -> QuerySet:
|
||||
fields = [
|
||||
"user_profile__full_name",
|
||||
"user_profile__delivery_email",
|
||||
"query",
|
||||
"client__name",
|
||||
"count",
|
||||
"last_visit",
|
||||
]
|
||||
|
||||
records = UserActivity.objects.filter(
|
||||
user_profile__realm__string_id=realm,
|
||||
user_profile__is_active=True,
|
||||
user_profile__is_bot=is_bot,
|
||||
)
|
||||
records = records.order_by("user_profile__delivery_email", "-last_visit")
|
||||
records = records.select_related("user_profile", "client").only(*fields)
|
||||
return records
|
||||
|
||||
|
||||
def realm_user_summary_table(
|
||||
all_records: List[QuerySet], admin_emails: Set[str]
|
||||
) -> Tuple[Dict[str, Any], str]:
|
||||
user_records = {}
|
||||
|
||||
def by_email(record: QuerySet) -> str:
|
||||
return record.user_profile.delivery_email
|
||||
|
||||
for email, records in itertools.groupby(all_records, by_email):
|
||||
user_records[email] = get_user_activity_summary(list(records))
|
||||
|
||||
def get_last_visit(user_summary: Dict[str, Dict[str, datetime]], k: str) -> Optional[datetime]:
|
||||
if k in user_summary:
|
||||
return user_summary[k]["last_visit"]
|
||||
else:
|
||||
return None
|
||||
|
||||
def get_count(user_summary: Dict[str, Dict[str, str]], k: str) -> str:
|
||||
if k in user_summary:
|
||||
return user_summary[k]["count"]
|
||||
else:
|
||||
return ""
|
||||
|
||||
def is_recent(val: datetime) -> bool:
|
||||
age = timezone_now() - val
|
||||
return age.total_seconds() < 5 * 60
|
||||
|
||||
rows = []
|
||||
for email, user_summary in user_records.items():
|
||||
email_link = user_activity_link(email, user_summary["user_profile_id"])
|
||||
sent_count = get_count(user_summary, "send")
|
||||
cells = [user_summary["name"], email_link, sent_count]
|
||||
row_class = ""
|
||||
for field in ["use", "send", "pointer", "desktop", "ZulipiOS", "Android"]:
|
||||
visit = get_last_visit(user_summary, field)
|
||||
if field == "use":
|
||||
if visit and is_recent(visit):
|
||||
row_class += " recently_active"
|
||||
if email in admin_emails:
|
||||
row_class += " admin"
|
||||
val = format_date_for_activity_reports(visit)
|
||||
cells.append(val)
|
||||
row = dict(cells=cells, row_class=row_class)
|
||||
rows.append(row)
|
||||
|
||||
def by_used_time(row: Dict[str, Any]) -> str:
|
||||
return row["cells"][3]
|
||||
|
||||
rows = sorted(rows, key=by_used_time, reverse=True)
|
||||
|
||||
cols = [
|
||||
"Name",
|
||||
"Email",
|
||||
"Total sent",
|
||||
"Heard from",
|
||||
"Message sent",
|
||||
"Pointer motion",
|
||||
"Desktop",
|
||||
"ZulipiOS",
|
||||
"Android",
|
||||
]
|
||||
|
||||
title = "Summary"
|
||||
|
||||
content = make_table(title, cols, rows, has_row_class=True)
|
||||
return user_records, content
|
||||
|
||||
|
||||
def realm_client_table(user_summaries: Dict[str, Dict[str, Any]]) -> str:
|
||||
exclude_keys = [
|
||||
"internal",
|
||||
"name",
|
||||
"user_profile_id",
|
||||
"use",
|
||||
"send",
|
||||
"pointer",
|
||||
"website",
|
||||
"desktop",
|
||||
]
|
||||
|
||||
rows = []
|
||||
for email, user_summary in user_summaries.items():
|
||||
email_link = user_activity_link(email, user_summary["user_profile_id"])
|
||||
name = user_summary["name"]
|
||||
for k, v in user_summary.items():
|
||||
if k in exclude_keys:
|
||||
continue
|
||||
client = k
|
||||
count = v["count"]
|
||||
last_visit = v["last_visit"]
|
||||
row = [
|
||||
format_date_for_activity_reports(last_visit),
|
||||
client,
|
||||
name,
|
||||
email_link,
|
||||
count,
|
||||
]
|
||||
rows.append(row)
|
||||
|
||||
rows = sorted(rows, key=lambda r: r[0], reverse=True)
|
||||
|
||||
cols = [
|
||||
"Last visit",
|
||||
"Client",
|
||||
"Name",
|
||||
"Email",
|
||||
"Count",
|
||||
]
|
||||
|
||||
title = "Clients"
|
||||
|
||||
return make_table(title, cols, rows)
|
||||
|
||||
|
||||
def sent_messages_report(realm: str) -> str:
|
||||
title = "Recently sent messages for " + realm
|
||||
|
||||
cols = [
|
||||
"Date",
|
||||
"Humans",
|
||||
"Bots",
|
||||
]
|
||||
|
||||
query = SQL(
|
||||
"""
|
||||
select
|
||||
series.day::date,
|
||||
humans.cnt,
|
||||
bots.cnt
|
||||
from (
|
||||
select generate_series(
|
||||
(now()::date - interval '2 week'),
|
||||
now()::date,
|
||||
interval '1 day'
|
||||
) as day
|
||||
) as series
|
||||
left join (
|
||||
select
|
||||
date_sent::date date_sent,
|
||||
count(*) cnt
|
||||
from zerver_message m
|
||||
join zerver_userprofile up on up.id = m.sender_id
|
||||
join zerver_realm r on r.id = up.realm_id
|
||||
where
|
||||
r.string_id = %s
|
||||
and
|
||||
(not up.is_bot)
|
||||
and
|
||||
date_sent > now() - interval '2 week'
|
||||
group by
|
||||
date_sent::date
|
||||
order by
|
||||
date_sent::date
|
||||
) humans on
|
||||
series.day = humans.date_sent
|
||||
left join (
|
||||
select
|
||||
date_sent::date date_sent,
|
||||
count(*) cnt
|
||||
from zerver_message m
|
||||
join zerver_userprofile up on up.id = m.sender_id
|
||||
join zerver_realm r on r.id = up.realm_id
|
||||
where
|
||||
r.string_id = %s
|
||||
and
|
||||
up.is_bot
|
||||
and
|
||||
date_sent > now() - interval '2 week'
|
||||
group by
|
||||
date_sent::date
|
||||
order by
|
||||
date_sent::date
|
||||
) bots on
|
||||
series.day = bots.date_sent
|
||||
"""
|
||||
)
|
||||
cursor = connection.cursor()
|
||||
cursor.execute(query, [realm, realm])
|
||||
rows = cursor.fetchall()
|
||||
cursor.close()
|
||||
|
||||
return make_table(title, cols, rows)
|
||||
|
||||
|
||||
@require_server_admin
|
||||
def get_realm_activity(request: HttpRequest, realm_str: str) -> HttpResponse:
|
||||
data: List[Tuple[str, str]] = []
|
||||
all_user_records: Dict[str, Any] = {}
|
||||
|
||||
try:
|
||||
admins = Realm.objects.get(string_id=realm_str).get_human_admin_users()
|
||||
except Realm.DoesNotExist:
|
||||
return HttpResponseNotFound()
|
||||
|
||||
admin_emails = {admin.delivery_email for admin in admins}
|
||||
|
||||
for is_bot, page_title in [(False, "Humans"), (True, "Bots")]:
|
||||
all_records = list(get_user_activity_records_for_realm(realm_str, is_bot))
|
||||
|
||||
user_records, content = realm_user_summary_table(all_records, admin_emails)
|
||||
all_user_records.update(user_records)
|
||||
|
||||
data += [(page_title, content)]
|
||||
|
||||
page_title = "Clients"
|
||||
content = realm_client_table(all_user_records)
|
||||
data += [(page_title, content)]
|
||||
|
||||
page_title = "History"
|
||||
content = sent_messages_report(realm_str)
|
||||
data += [(page_title, content)]
|
||||
|
||||
title = realm_str
|
||||
return render(
|
||||
request,
|
||||
"analytics/activity.html",
|
||||
context=dict(data=data, realm_link=None, title=title),
|
||||
)
|
@@ -1,514 +0,0 @@
|
||||
import logging
|
||||
from collections import defaultdict
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Any, Dict, List, Optional, Tuple, Type, Union, cast
|
||||
|
||||
from django.conf import settings
|
||||
from django.db.models.query import QuerySet
|
||||
from django.http import HttpRequest, HttpResponse, HttpResponseNotFound
|
||||
from django.shortcuts import render
|
||||
from django.utils import translation
|
||||
from django.utils.timezone import now as timezone_now
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
from analytics.lib.counts import COUNT_STATS, CountStat
|
||||
from analytics.lib.time_utils import time_range
|
||||
from analytics.models import (
|
||||
BaseCount,
|
||||
InstallationCount,
|
||||
RealmCount,
|
||||
StreamCount,
|
||||
UserCount,
|
||||
installation_epoch,
|
||||
)
|
||||
from zerver.decorator import (
|
||||
require_non_guest_user,
|
||||
require_server_admin,
|
||||
require_server_admin_api,
|
||||
to_utc_datetime,
|
||||
zulip_login_required,
|
||||
)
|
||||
from zerver.lib.exceptions import JsonableError
|
||||
from zerver.lib.i18n import get_and_set_request_language, get_language_translation_data
|
||||
from zerver.lib.request import REQ, has_request_variables
|
||||
from zerver.lib.response import json_success
|
||||
from zerver.lib.timestamp import convert_to_UTC
|
||||
from zerver.lib.validator import to_non_negative_int
|
||||
from zerver.models import Client, Realm, UserProfile, get_realm
|
||||
|
||||
if settings.ZILENCER_ENABLED:
|
||||
from zilencer.models import RemoteInstallationCount, RemoteRealmCount, RemoteZulipServer
|
||||
|
||||
MAX_TIME_FOR_FULL_ANALYTICS_GENERATION = timedelta(days=1, minutes=30)
|
||||
|
||||
|
||||
def is_analytics_ready(realm: Realm) -> bool:
|
||||
return (timezone_now() - realm.date_created) > MAX_TIME_FOR_FULL_ANALYTICS_GENERATION
|
||||
|
||||
|
||||
def render_stats(
|
||||
request: HttpRequest,
|
||||
data_url_suffix: str,
|
||||
target_name: str,
|
||||
for_installation: bool = False,
|
||||
remote: bool = False,
|
||||
analytics_ready: bool = True,
|
||||
) -> HttpResponse:
|
||||
assert request.user.is_authenticated
|
||||
page_params = dict(
|
||||
data_url_suffix=data_url_suffix,
|
||||
for_installation=for_installation,
|
||||
remote=remote,
|
||||
)
|
||||
|
||||
request_language = get_and_set_request_language(
|
||||
request,
|
||||
request.user.default_language,
|
||||
translation.get_language_from_path(request.path_info),
|
||||
)
|
||||
|
||||
page_params["translation_data"] = get_language_translation_data(request_language)
|
||||
|
||||
return render(
|
||||
request,
|
||||
"analytics/stats.html",
|
||||
context=dict(
|
||||
target_name=target_name, page_params=page_params, analytics_ready=analytics_ready
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@zulip_login_required
|
||||
def stats(request: HttpRequest) -> HttpResponse:
|
||||
assert request.user.is_authenticated
|
||||
realm = request.user.realm
|
||||
if request.user.is_guest:
|
||||
# TODO: Make @zulip_login_required pass the UserProfile so we
|
||||
# can use @require_member_or_admin
|
||||
raise JsonableError(_("Not allowed for guest users"))
|
||||
return render_stats(
|
||||
request, "", realm.name or realm.string_id, analytics_ready=is_analytics_ready(realm)
|
||||
)
|
||||
|
||||
|
||||
@require_server_admin
|
||||
@has_request_variables
|
||||
def stats_for_realm(request: HttpRequest, realm_str: str) -> HttpResponse:
|
||||
try:
|
||||
realm = get_realm(realm_str)
|
||||
except Realm.DoesNotExist:
|
||||
return HttpResponseNotFound()
|
||||
|
||||
return render_stats(
|
||||
request,
|
||||
f"/realm/{realm_str}",
|
||||
realm.name or realm.string_id,
|
||||
analytics_ready=is_analytics_ready(realm),
|
||||
)
|
||||
|
||||
|
||||
@require_server_admin
|
||||
@has_request_variables
|
||||
def stats_for_remote_realm(
|
||||
request: HttpRequest, remote_server_id: int, remote_realm_id: int
|
||||
) -> HttpResponse:
|
||||
assert settings.ZILENCER_ENABLED
|
||||
server = RemoteZulipServer.objects.get(id=remote_server_id)
|
||||
return render_stats(
|
||||
request,
|
||||
f"/remote/{server.id}/realm/{remote_realm_id}",
|
||||
f"Realm {remote_realm_id} on server {server.hostname}",
|
||||
)
|
||||
|
||||
|
||||
@require_server_admin_api
|
||||
@has_request_variables
|
||||
def get_chart_data_for_realm(
|
||||
request: HttpRequest, user_profile: UserProfile, realm_str: str, **kwargs: Any
|
||||
) -> HttpResponse:
|
||||
try:
|
||||
realm = get_realm(realm_str)
|
||||
except Realm.DoesNotExist:
|
||||
raise JsonableError(_("Invalid organization"))
|
||||
|
||||
return get_chart_data(request=request, user_profile=user_profile, realm=realm, **kwargs)
|
||||
|
||||
|
||||
@require_server_admin_api
|
||||
@has_request_variables
|
||||
def get_chart_data_for_remote_realm(
|
||||
request: HttpRequest,
|
||||
user_profile: UserProfile,
|
||||
remote_server_id: int,
|
||||
remote_realm_id: int,
|
||||
**kwargs: Any,
|
||||
) -> HttpResponse:
|
||||
assert settings.ZILENCER_ENABLED
|
||||
server = RemoteZulipServer.objects.get(id=remote_server_id)
|
||||
return get_chart_data(
|
||||
request=request,
|
||||
user_profile=user_profile,
|
||||
server=server,
|
||||
remote=True,
|
||||
remote_realm_id=int(remote_realm_id),
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
|
||||
@require_server_admin
|
||||
def stats_for_installation(request: HttpRequest) -> HttpResponse:
|
||||
return render_stats(request, "/installation", "installation", True)
|
||||
|
||||
|
||||
@require_server_admin
|
||||
def stats_for_remote_installation(request: HttpRequest, remote_server_id: int) -> HttpResponse:
|
||||
assert settings.ZILENCER_ENABLED
|
||||
server = RemoteZulipServer.objects.get(id=remote_server_id)
|
||||
return render_stats(
|
||||
request,
|
||||
f"/remote/{server.id}/installation",
|
||||
f"remote installation {server.hostname}",
|
||||
True,
|
||||
True,
|
||||
)
|
||||
|
||||
|
||||
@require_server_admin_api
|
||||
@has_request_variables
|
||||
def get_chart_data_for_installation(
|
||||
request: HttpRequest, user_profile: UserProfile, chart_name: str = REQ(), **kwargs: Any
|
||||
) -> HttpResponse:
|
||||
return get_chart_data(
|
||||
request=request, user_profile=user_profile, for_installation=True, **kwargs
|
||||
)
|
||||
|
||||
|
||||
@require_server_admin_api
|
||||
@has_request_variables
|
||||
def get_chart_data_for_remote_installation(
|
||||
request: HttpRequest,
|
||||
user_profile: UserProfile,
|
||||
remote_server_id: int,
|
||||
chart_name: str = REQ(),
|
||||
**kwargs: Any,
|
||||
) -> HttpResponse:
|
||||
assert settings.ZILENCER_ENABLED
|
||||
server = RemoteZulipServer.objects.get(id=remote_server_id)
|
||||
return get_chart_data(
|
||||
request=request,
|
||||
user_profile=user_profile,
|
||||
for_installation=True,
|
||||
remote=True,
|
||||
server=server,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
|
||||
@require_non_guest_user
|
||||
@has_request_variables
|
||||
def get_chart_data(
|
||||
request: HttpRequest,
|
||||
user_profile: UserProfile,
|
||||
chart_name: str = REQ(),
|
||||
min_length: Optional[int] = REQ(converter=to_non_negative_int, default=None),
|
||||
start: Optional[datetime] = REQ(converter=to_utc_datetime, default=None),
|
||||
end: Optional[datetime] = REQ(converter=to_utc_datetime, default=None),
|
||||
realm: Optional[Realm] = None,
|
||||
for_installation: bool = False,
|
||||
remote: bool = False,
|
||||
remote_realm_id: Optional[int] = None,
|
||||
server: Optional["RemoteZulipServer"] = None,
|
||||
) -> HttpResponse:
|
||||
TableType = Union[
|
||||
Type["RemoteInstallationCount"],
|
||||
Type[InstallationCount],
|
||||
Type["RemoteRealmCount"],
|
||||
Type[RealmCount],
|
||||
]
|
||||
if for_installation:
|
||||
if remote:
|
||||
assert settings.ZILENCER_ENABLED
|
||||
aggregate_table: TableType = RemoteInstallationCount
|
||||
assert server is not None
|
||||
else:
|
||||
aggregate_table = InstallationCount
|
||||
else:
|
||||
if remote:
|
||||
assert settings.ZILENCER_ENABLED
|
||||
aggregate_table = RemoteRealmCount
|
||||
assert server is not None
|
||||
assert remote_realm_id is not None
|
||||
else:
|
||||
aggregate_table = RealmCount
|
||||
|
||||
tables: Union[Tuple[TableType], Tuple[TableType, Type[UserCount]]]
|
||||
|
||||
if chart_name == "number_of_humans":
|
||||
stats = [
|
||||
COUNT_STATS["1day_actives::day"],
|
||||
COUNT_STATS["realm_active_humans::day"],
|
||||
COUNT_STATS["active_users_audit:is_bot:day"],
|
||||
]
|
||||
tables = (aggregate_table,)
|
||||
subgroup_to_label: Dict[CountStat, Dict[Optional[str], str]] = {
|
||||
stats[0]: {None: "_1day"},
|
||||
stats[1]: {None: "_15day"},
|
||||
stats[2]: {"false": "all_time"},
|
||||
}
|
||||
labels_sort_function = None
|
||||
include_empty_subgroups = True
|
||||
elif chart_name == "messages_sent_over_time":
|
||||
stats = [COUNT_STATS["messages_sent:is_bot:hour"]]
|
||||
tables = (aggregate_table, UserCount)
|
||||
subgroup_to_label = {stats[0]: {"false": "human", "true": "bot"}}
|
||||
labels_sort_function = None
|
||||
include_empty_subgroups = True
|
||||
elif chart_name == "messages_sent_by_message_type":
|
||||
stats = [COUNT_STATS["messages_sent:message_type:day"]]
|
||||
tables = (aggregate_table, UserCount)
|
||||
subgroup_to_label = {
|
||||
stats[0]: {
|
||||
"public_stream": _("Public streams"),
|
||||
"private_stream": _("Private streams"),
|
||||
"private_message": _("Private messages"),
|
||||
"huddle_message": _("Group private messages"),
|
||||
}
|
||||
}
|
||||
labels_sort_function = lambda data: sort_by_totals(data["everyone"])
|
||||
include_empty_subgroups = True
|
||||
elif chart_name == "messages_sent_by_client":
|
||||
stats = [COUNT_STATS["messages_sent:client:day"]]
|
||||
tables = (aggregate_table, UserCount)
|
||||
# Note that the labels are further re-written by client_label_map
|
||||
subgroup_to_label = {
|
||||
stats[0]: {str(id): name for id, name in Client.objects.values_list("id", "name")}
|
||||
}
|
||||
labels_sort_function = sort_client_labels
|
||||
include_empty_subgroups = False
|
||||
elif chart_name == "messages_read_over_time":
|
||||
stats = [COUNT_STATS["messages_read::hour"]]
|
||||
tables = (aggregate_table, UserCount)
|
||||
subgroup_to_label = {stats[0]: {None: "read"}}
|
||||
labels_sort_function = None
|
||||
include_empty_subgroups = True
|
||||
else:
|
||||
raise JsonableError(_("Unknown chart name: {}").format(chart_name))
|
||||
|
||||
# Most likely someone using our API endpoint. The /stats page does not
|
||||
# pass a start or end in its requests.
|
||||
if start is not None:
|
||||
start = convert_to_UTC(start)
|
||||
if end is not None:
|
||||
end = convert_to_UTC(end)
|
||||
if start is not None and end is not None and start > end:
|
||||
raise JsonableError(
|
||||
_("Start time is later than end time. Start: {start}, End: {end}").format(
|
||||
start=start,
|
||||
end=end,
|
||||
)
|
||||
)
|
||||
|
||||
if realm is None:
|
||||
# Note that this value is invalid for Remote tables; be
|
||||
# careful not to access it in those code paths.
|
||||
realm = user_profile.realm
|
||||
|
||||
if remote:
|
||||
# For remote servers, we don't have fillstate data, and thus
|
||||
# should simply use the first and last data points for the
|
||||
# table.
|
||||
assert server is not None
|
||||
assert aggregate_table is RemoteInstallationCount or aggregate_table is RemoteRealmCount
|
||||
aggregate_table_remote = cast(
|
||||
Union[Type[RemoteInstallationCount], Type[RemoteRealmCount]], aggregate_table
|
||||
) # https://stackoverflow.com/questions/68540528/mypy-assertions-on-the-types-of-types
|
||||
if not aggregate_table_remote.objects.filter(server=server).exists():
|
||||
raise JsonableError(
|
||||
_("No analytics data available. Please contact your server administrator.")
|
||||
)
|
||||
if start is None:
|
||||
first = aggregate_table_remote.objects.filter(server=server).first()
|
||||
assert first is not None
|
||||
start = first.end_time
|
||||
if end is None:
|
||||
last = aggregate_table_remote.objects.filter(server=server).last()
|
||||
assert last is not None
|
||||
end = last.end_time
|
||||
else:
|
||||
# Otherwise, we can use tables on the current server to
|
||||
# determine a nice range, and some additional validation.
|
||||
if start is None:
|
||||
if for_installation:
|
||||
start = installation_epoch()
|
||||
else:
|
||||
start = realm.date_created
|
||||
if end is None:
|
||||
end = max(
|
||||
stat.last_successful_fill() or datetime.min.replace(tzinfo=timezone.utc)
|
||||
for stat in stats
|
||||
)
|
||||
|
||||
if start > end and (timezone_now() - start > MAX_TIME_FOR_FULL_ANALYTICS_GENERATION):
|
||||
logging.warning(
|
||||
"User from realm %s attempted to access /stats, but the computed "
|
||||
"start time: %s (creation of realm or installation) is later than the computed "
|
||||
"end time: %s (last successful analytics update). Is the "
|
||||
"analytics cron job running?",
|
||||
realm.string_id,
|
||||
start,
|
||||
end,
|
||||
)
|
||||
raise JsonableError(
|
||||
_("No analytics data available. Please contact your server administrator.")
|
||||
)
|
||||
|
||||
assert len({stat.frequency for stat in stats}) == 1
|
||||
end_times = time_range(start, end, stats[0].frequency, min_length)
|
||||
data: Dict[str, Any] = {
|
||||
"end_times": [int(end_time.timestamp()) for end_time in end_times],
|
||||
"frequency": stats[0].frequency,
|
||||
}
|
||||
|
||||
aggregation_level = {
|
||||
InstallationCount: "everyone",
|
||||
RealmCount: "everyone",
|
||||
UserCount: "user",
|
||||
}
|
||||
if settings.ZILENCER_ENABLED:
|
||||
aggregation_level[RemoteInstallationCount] = "everyone"
|
||||
aggregation_level[RemoteRealmCount] = "everyone"
|
||||
|
||||
# -1 is a placeholder value, since there is no relevant filtering on InstallationCount
|
||||
id_value = {
|
||||
InstallationCount: -1,
|
||||
RealmCount: realm.id,
|
||||
UserCount: user_profile.id,
|
||||
}
|
||||
if settings.ZILENCER_ENABLED:
|
||||
if server is not None:
|
||||
id_value[RemoteInstallationCount] = server.id
|
||||
# TODO: RemoteRealmCount logic doesn't correctly handle
|
||||
# filtering by server_id as well.
|
||||
if remote_realm_id is not None:
|
||||
id_value[RemoteRealmCount] = remote_realm_id
|
||||
|
||||
for table in tables:
|
||||
data[aggregation_level[table]] = {}
|
||||
for stat in stats:
|
||||
data[aggregation_level[table]].update(
|
||||
get_time_series_by_subgroup(
|
||||
stat,
|
||||
table,
|
||||
id_value[table],
|
||||
end_times,
|
||||
subgroup_to_label[stat],
|
||||
include_empty_subgroups,
|
||||
)
|
||||
)
|
||||
|
||||
if labels_sort_function is not None:
|
||||
data["display_order"] = labels_sort_function(data)
|
||||
else:
|
||||
data["display_order"] = None
|
||||
return json_success(request, data=data)
|
||||
|
||||
|
||||
def sort_by_totals(value_arrays: Dict[str, List[int]]) -> List[str]:
|
||||
totals = [(sum(values), label) for label, values in value_arrays.items()]
|
||||
totals.sort(reverse=True)
|
||||
return [label for total, label in totals]
|
||||
|
||||
|
||||
# For any given user, we want to show a fixed set of clients in the chart,
|
||||
# regardless of the time aggregation or whether we're looking at realm or
|
||||
# user data. This fixed set ideally includes the clients most important in
|
||||
# understanding the realm's traffic and the user's traffic. This function
|
||||
# tries to rank the clients so that taking the first N elements of the
|
||||
# sorted list has a reasonable chance of doing so.
|
||||
def sort_client_labels(data: Dict[str, Dict[str, List[int]]]) -> List[str]:
|
||||
realm_order = sort_by_totals(data["everyone"])
|
||||
user_order = sort_by_totals(data["user"])
|
||||
label_sort_values: Dict[str, float] = {}
|
||||
for i, label in enumerate(realm_order):
|
||||
label_sort_values[label] = i
|
||||
for i, label in enumerate(user_order):
|
||||
label_sort_values[label] = min(i - 0.1, label_sort_values.get(label, i))
|
||||
return [label for label, sort_value in sorted(label_sort_values.items(), key=lambda x: x[1])]
|
||||
|
||||
|
||||
def table_filtered_to_id(table: Type[BaseCount], key_id: int) -> QuerySet:
|
||||
if table == RealmCount:
|
||||
return RealmCount.objects.filter(realm_id=key_id)
|
||||
elif table == UserCount:
|
||||
return UserCount.objects.filter(user_id=key_id)
|
||||
elif table == StreamCount:
|
||||
return StreamCount.objects.filter(stream_id=key_id)
|
||||
elif table == InstallationCount:
|
||||
return InstallationCount.objects.all()
|
||||
elif settings.ZILENCER_ENABLED and table == RemoteInstallationCount:
|
||||
return RemoteInstallationCount.objects.filter(server_id=key_id)
|
||||
elif settings.ZILENCER_ENABLED and table == RemoteRealmCount:
|
||||
return RemoteRealmCount.objects.filter(realm_id=key_id)
|
||||
else:
|
||||
raise AssertionError(f"Unknown table: {table}")
|
||||
|
||||
|
||||
def client_label_map(name: str) -> str:
|
||||
if name == "website":
|
||||
return "Website"
|
||||
if name.startswith("desktop app"):
|
||||
return "Old desktop app"
|
||||
if name == "ZulipElectron":
|
||||
return "Desktop app"
|
||||
if name == "ZulipAndroid":
|
||||
return "Old Android app"
|
||||
if name == "ZulipiOS":
|
||||
return "Old iOS app"
|
||||
if name == "ZulipMobile":
|
||||
return "Mobile app"
|
||||
if name in ["ZulipPython", "API: Python"]:
|
||||
return "Python API"
|
||||
if name.startswith("Zulip") and name.endswith("Webhook"):
|
||||
return name[len("Zulip") : -len("Webhook")] + " webhook"
|
||||
return name
|
||||
|
||||
|
||||
def rewrite_client_arrays(value_arrays: Dict[str, List[int]]) -> Dict[str, List[int]]:
|
||||
mapped_arrays: Dict[str, List[int]] = {}
|
||||
for label, array in value_arrays.items():
|
||||
mapped_label = client_label_map(label)
|
||||
if mapped_label in mapped_arrays:
|
||||
for i in range(0, len(array)):
|
||||
mapped_arrays[mapped_label][i] += value_arrays[label][i]
|
||||
else:
|
||||
mapped_arrays[mapped_label] = [value_arrays[label][i] for i in range(0, len(array))]
|
||||
return mapped_arrays
|
||||
|
||||
|
||||
def get_time_series_by_subgroup(
|
||||
stat: CountStat,
|
||||
table: Type[BaseCount],
|
||||
key_id: int,
|
||||
end_times: List[datetime],
|
||||
subgroup_to_label: Dict[Optional[str], str],
|
||||
include_empty_subgroups: bool,
|
||||
) -> Dict[str, List[int]]:
|
||||
queryset = (
|
||||
table_filtered_to_id(table, key_id)
|
||||
.filter(property=stat.property)
|
||||
.values_list("subgroup", "end_time", "value")
|
||||
)
|
||||
value_dicts: Dict[Optional[str], Dict[datetime, int]] = defaultdict(lambda: defaultdict(int))
|
||||
for subgroup, end_time, value in queryset:
|
||||
value_dicts[subgroup][end_time] = value
|
||||
value_arrays = {}
|
||||
for subgroup, label in subgroup_to_label.items():
|
||||
if (subgroup in value_dicts) or include_empty_subgroups:
|
||||
value_arrays[label] = [value_dicts[subgroup][end_time] for end_time in end_times]
|
||||
|
||||
if stat == COUNT_STATS["messages_sent:client:day"]:
|
||||
# HACK: We rewrite these arrays to collapse the Client objects
|
||||
# with similar names into a single sum, and generally give
|
||||
# them better names
|
||||
return rewrite_client_arrays(value_arrays)
|
||||
return value_arrays
|
@@ -1,343 +0,0 @@
|
||||
import urllib
|
||||
from datetime import timedelta
|
||||
from decimal import Decimal
|
||||
from typing import Any, Dict, List, Optional
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.core.validators import URLValidator
|
||||
from django.http import HttpRequest, HttpResponse, HttpResponseRedirect
|
||||
from django.shortcuts import render
|
||||
from django.urls import reverse
|
||||
from django.utils.timesince import timesince
|
||||
from django.utils.timezone import now as timezone_now
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
from confirmation.models import Confirmation, confirmation_url
|
||||
from confirmation.settings import STATUS_ACTIVE
|
||||
from zerver.actions.create_realm import do_change_realm_subdomain
|
||||
from zerver.actions.realm_settings import (
|
||||
do_change_realm_org_type,
|
||||
do_change_realm_plan_type,
|
||||
do_deactivate_realm,
|
||||
do_scrub_realm,
|
||||
do_send_realm_reactivation_email,
|
||||
)
|
||||
from zerver.decorator import require_server_admin
|
||||
from zerver.forms import check_subdomain_available
|
||||
from zerver.lib.exceptions import JsonableError
|
||||
from zerver.lib.realm_icon import realm_icon_url
|
||||
from zerver.lib.request import REQ, has_request_variables
|
||||
from zerver.lib.subdomains import get_subdomain_from_hostname
|
||||
from zerver.lib.validator import check_bool, check_string_in, to_decimal, to_non_negative_int
|
||||
from zerver.models import (
|
||||
MultiuseInvite,
|
||||
PreregistrationUser,
|
||||
Realm,
|
||||
UserProfile,
|
||||
get_org_type_display_name,
|
||||
get_realm,
|
||||
)
|
||||
from zerver.views.invite import get_invitee_emails_set
|
||||
|
||||
if settings.BILLING_ENABLED:
|
||||
from corporate.lib.stripe import approve_sponsorship as do_approve_sponsorship
|
||||
from corporate.lib.stripe import (
|
||||
attach_discount_to_realm,
|
||||
downgrade_at_the_end_of_billing_cycle,
|
||||
downgrade_now_without_creating_additional_invoices,
|
||||
get_discount_for_realm,
|
||||
get_latest_seat_count,
|
||||
make_end_of_cycle_updates_if_needed,
|
||||
update_billing_method_of_current_plan,
|
||||
update_sponsorship_status,
|
||||
void_all_open_invoices,
|
||||
)
|
||||
from corporate.models import get_current_plan_by_realm, get_customer_by_realm
|
||||
|
||||
|
||||
def get_plan_name(plan_type: int) -> str:
|
||||
return {
|
||||
Realm.PLAN_TYPE_SELF_HOSTED: "self-hosted",
|
||||
Realm.PLAN_TYPE_LIMITED: "limited",
|
||||
Realm.PLAN_TYPE_STANDARD: "standard",
|
||||
Realm.PLAN_TYPE_STANDARD_FREE: "open source",
|
||||
Realm.PLAN_TYPE_PLUS: "plus",
|
||||
}[plan_type]
|
||||
|
||||
|
||||
def get_confirmations(
|
||||
types: List[int], object_ids: List[int], hostname: Optional[str] = None
|
||||
) -> List[Dict[str, Any]]:
|
||||
lowest_datetime = timezone_now() - timedelta(days=30)
|
||||
confirmations = Confirmation.objects.filter(
|
||||
type__in=types, object_id__in=object_ids, date_sent__gte=lowest_datetime
|
||||
)
|
||||
confirmation_dicts = []
|
||||
for confirmation in confirmations:
|
||||
realm = confirmation.realm
|
||||
content_object = confirmation.content_object
|
||||
|
||||
type = confirmation.type
|
||||
expiry_date = confirmation.expiry_date
|
||||
|
||||
assert content_object is not None
|
||||
if hasattr(content_object, "status"):
|
||||
if content_object.status == STATUS_ACTIVE:
|
||||
link_status = "Link has been clicked"
|
||||
else:
|
||||
link_status = "Link has never been clicked"
|
||||
else:
|
||||
link_status = ""
|
||||
|
||||
now = timezone_now()
|
||||
if expiry_date is None:
|
||||
expires_in = "Never"
|
||||
elif now < expiry_date:
|
||||
expires_in = timesince(now, expiry_date)
|
||||
else:
|
||||
expires_in = "Expired"
|
||||
|
||||
url = confirmation_url(confirmation.confirmation_key, realm, type)
|
||||
confirmation_dicts.append(
|
||||
{
|
||||
"object": confirmation.content_object,
|
||||
"url": url,
|
||||
"type": type,
|
||||
"link_status": link_status,
|
||||
"expires_in": expires_in,
|
||||
}
|
||||
)
|
||||
return confirmation_dicts
|
||||
|
||||
|
||||
VALID_DOWNGRADE_METHODS = [
|
||||
"downgrade_at_billing_cycle_end",
|
||||
"downgrade_now_without_additional_licenses",
|
||||
"downgrade_now_void_open_invoices",
|
||||
]
|
||||
|
||||
VALID_STATUS_VALUES = [
|
||||
"active",
|
||||
"deactivated",
|
||||
]
|
||||
|
||||
VALID_BILLING_METHODS = [
|
||||
"send_invoice",
|
||||
"charge_automatically",
|
||||
]
|
||||
|
||||
|
||||
@require_server_admin
|
||||
@has_request_variables
|
||||
def support(
|
||||
request: HttpRequest,
|
||||
realm_id: Optional[int] = REQ(default=None, converter=to_non_negative_int),
|
||||
plan_type: Optional[int] = REQ(default=None, converter=to_non_negative_int),
|
||||
discount: Optional[Decimal] = REQ(default=None, converter=to_decimal),
|
||||
new_subdomain: Optional[str] = REQ(default=None),
|
||||
status: Optional[str] = REQ(default=None, str_validator=check_string_in(VALID_STATUS_VALUES)),
|
||||
billing_method: Optional[str] = REQ(
|
||||
default=None, str_validator=check_string_in(VALID_BILLING_METHODS)
|
||||
),
|
||||
sponsorship_pending: Optional[bool] = REQ(default=None, json_validator=check_bool),
|
||||
approve_sponsorship: Optional[bool] = REQ(default=None, json_validator=check_bool),
|
||||
downgrade_method: Optional[str] = REQ(
|
||||
default=None, str_validator=check_string_in(VALID_DOWNGRADE_METHODS)
|
||||
),
|
||||
scrub_realm: Optional[bool] = REQ(default=None, json_validator=check_bool),
|
||||
query: Optional[str] = REQ("q", default=None),
|
||||
org_type: Optional[int] = REQ(default=None, converter=to_non_negative_int),
|
||||
) -> HttpResponse:
|
||||
context: Dict[str, Any] = {}
|
||||
|
||||
if "success_message" in request.session:
|
||||
context["success_message"] = request.session["success_message"]
|
||||
del request.session["success_message"]
|
||||
|
||||
if settings.BILLING_ENABLED and request.method == "POST":
|
||||
# We check that request.POST only has two keys in it: The
|
||||
# realm_id and a field to change.
|
||||
keys = set(request.POST.keys())
|
||||
if "csrfmiddlewaretoken" in keys:
|
||||
keys.remove("csrfmiddlewaretoken")
|
||||
if len(keys) != 2:
|
||||
raise JsonableError(_("Invalid parameters"))
|
||||
|
||||
realm = Realm.objects.get(id=realm_id)
|
||||
|
||||
acting_user = request.user
|
||||
assert isinstance(acting_user, UserProfile)
|
||||
if plan_type is not None:
|
||||
current_plan_type = realm.plan_type
|
||||
do_change_realm_plan_type(realm, plan_type, acting_user=acting_user)
|
||||
msg = f"Plan type of {realm.string_id} changed from {get_plan_name(current_plan_type)} to {get_plan_name(plan_type)} "
|
||||
context["success_message"] = msg
|
||||
elif org_type is not None:
|
||||
current_realm_type = realm.org_type
|
||||
do_change_realm_org_type(realm, org_type, acting_user=acting_user)
|
||||
msg = f"Org type of {realm.string_id} changed from {get_org_type_display_name(current_realm_type)} to {get_org_type_display_name(org_type)} "
|
||||
context["success_message"] = msg
|
||||
elif discount is not None:
|
||||
current_discount = get_discount_for_realm(realm) or 0
|
||||
attach_discount_to_realm(realm, discount, acting_user=acting_user)
|
||||
context[
|
||||
"success_message"
|
||||
] = f"Discount of {realm.string_id} changed to {discount}% from {current_discount}%."
|
||||
elif new_subdomain is not None:
|
||||
old_subdomain = realm.string_id
|
||||
try:
|
||||
check_subdomain_available(new_subdomain)
|
||||
except ValidationError as error:
|
||||
context["error_message"] = error.message
|
||||
else:
|
||||
do_change_realm_subdomain(realm, new_subdomain, acting_user=acting_user)
|
||||
request.session[
|
||||
"success_message"
|
||||
] = f"Subdomain changed from {old_subdomain} to {new_subdomain}"
|
||||
return HttpResponseRedirect(
|
||||
reverse("support") + "?" + urlencode({"q": new_subdomain})
|
||||
)
|
||||
elif status is not None:
|
||||
if status == "active":
|
||||
do_send_realm_reactivation_email(realm, acting_user=acting_user)
|
||||
context[
|
||||
"success_message"
|
||||
] = f"Realm reactivation email sent to admins of {realm.string_id}."
|
||||
elif status == "deactivated":
|
||||
do_deactivate_realm(realm, acting_user=acting_user)
|
||||
context["success_message"] = f"{realm.string_id} deactivated."
|
||||
elif billing_method is not None:
|
||||
if billing_method == "send_invoice":
|
||||
update_billing_method_of_current_plan(
|
||||
realm, charge_automatically=False, acting_user=acting_user
|
||||
)
|
||||
context[
|
||||
"success_message"
|
||||
] = f"Billing method of {realm.string_id} updated to pay by invoice."
|
||||
elif billing_method == "charge_automatically":
|
||||
update_billing_method_of_current_plan(
|
||||
realm, charge_automatically=True, acting_user=acting_user
|
||||
)
|
||||
context[
|
||||
"success_message"
|
||||
] = f"Billing method of {realm.string_id} updated to charge automatically."
|
||||
elif sponsorship_pending is not None:
|
||||
if sponsorship_pending:
|
||||
update_sponsorship_status(realm, True, acting_user=acting_user)
|
||||
context["success_message"] = f"{realm.string_id} marked as pending sponsorship."
|
||||
else:
|
||||
update_sponsorship_status(realm, False, acting_user=acting_user)
|
||||
context["success_message"] = f"{realm.string_id} is no longer pending sponsorship."
|
||||
elif approve_sponsorship:
|
||||
do_approve_sponsorship(realm, acting_user=acting_user)
|
||||
context["success_message"] = f"Sponsorship approved for {realm.string_id}"
|
||||
elif downgrade_method is not None:
|
||||
if downgrade_method == "downgrade_at_billing_cycle_end":
|
||||
downgrade_at_the_end_of_billing_cycle(realm)
|
||||
context[
|
||||
"success_message"
|
||||
] = f"{realm.string_id} marked for downgrade at the end of billing cycle"
|
||||
elif downgrade_method == "downgrade_now_without_additional_licenses":
|
||||
downgrade_now_without_creating_additional_invoices(realm)
|
||||
context[
|
||||
"success_message"
|
||||
] = f"{realm.string_id} downgraded without creating additional invoices"
|
||||
elif downgrade_method == "downgrade_now_void_open_invoices":
|
||||
downgrade_now_without_creating_additional_invoices(realm)
|
||||
voided_invoices_count = void_all_open_invoices(realm)
|
||||
context[
|
||||
"success_message"
|
||||
] = f"{realm.string_id} downgraded and voided {voided_invoices_count} open invoices"
|
||||
elif scrub_realm:
|
||||
do_scrub_realm(realm, acting_user=acting_user)
|
||||
context["success_message"] = f"{realm.string_id} scrubbed."
|
||||
|
||||
if query:
|
||||
key_words = get_invitee_emails_set(query)
|
||||
|
||||
users = set(UserProfile.objects.filter(delivery_email__in=key_words))
|
||||
realms = set(Realm.objects.filter(string_id__in=key_words))
|
||||
|
||||
for key_word in key_words:
|
||||
try:
|
||||
URLValidator()(key_word)
|
||||
parse_result = urllib.parse.urlparse(key_word)
|
||||
hostname = parse_result.hostname
|
||||
assert hostname is not None
|
||||
if parse_result.port:
|
||||
hostname = f"{hostname}:{parse_result.port}"
|
||||
subdomain = get_subdomain_from_hostname(hostname)
|
||||
try:
|
||||
realms.add(get_realm(subdomain))
|
||||
except Realm.DoesNotExist:
|
||||
pass
|
||||
except ValidationError:
|
||||
users.update(UserProfile.objects.filter(full_name__iexact=key_word))
|
||||
|
||||
for realm in realms:
|
||||
realm.customer = get_customer_by_realm(realm)
|
||||
|
||||
current_plan = get_current_plan_by_realm(realm)
|
||||
if current_plan is not None:
|
||||
new_plan, last_ledger_entry = make_end_of_cycle_updates_if_needed(
|
||||
current_plan, timezone_now()
|
||||
)
|
||||
if last_ledger_entry is not None:
|
||||
if new_plan is not None:
|
||||
realm.current_plan = new_plan
|
||||
else:
|
||||
realm.current_plan = current_plan
|
||||
realm.current_plan.licenses = last_ledger_entry.licenses
|
||||
realm.current_plan.licenses_used = get_latest_seat_count(realm)
|
||||
|
||||
# full_names can have , in them
|
||||
users.update(UserProfile.objects.filter(full_name__iexact=query))
|
||||
|
||||
context["users"] = users
|
||||
context["realms"] = realms
|
||||
|
||||
confirmations: List[Dict[str, Any]] = []
|
||||
|
||||
preregistration_users = PreregistrationUser.objects.filter(email__in=key_words)
|
||||
confirmations += get_confirmations(
|
||||
[Confirmation.USER_REGISTRATION, Confirmation.INVITATION, Confirmation.REALM_CREATION],
|
||||
preregistration_users,
|
||||
hostname=request.get_host(),
|
||||
)
|
||||
|
||||
multiuse_invites = MultiuseInvite.objects.filter(realm__in=realms)
|
||||
confirmations += get_confirmations([Confirmation.MULTIUSE_INVITE], multiuse_invites)
|
||||
|
||||
confirmations += get_confirmations(
|
||||
[Confirmation.REALM_REACTIVATION], [realm.id for realm in realms]
|
||||
)
|
||||
|
||||
context["confirmations"] = confirmations
|
||||
|
||||
def get_realm_owner_emails_as_string(realm: Realm) -> str:
|
||||
return ", ".join(
|
||||
realm.get_human_owner_users()
|
||||
.order_by("delivery_email")
|
||||
.values_list("delivery_email", flat=True)
|
||||
)
|
||||
|
||||
def get_realm_admin_emails_as_string(realm: Realm) -> str:
|
||||
return ", ".join(
|
||||
realm.get_human_admin_users(include_realm_owners=False)
|
||||
.order_by("delivery_email")
|
||||
.values_list("delivery_email", flat=True)
|
||||
)
|
||||
|
||||
context["get_realm_owner_emails_as_string"] = get_realm_owner_emails_as_string
|
||||
context["get_realm_admin_emails_as_string"] = get_realm_admin_emails_as_string
|
||||
context["get_discount_for_realm"] = get_discount_for_realm
|
||||
context["get_org_type_display_name"] = get_org_type_display_name
|
||||
context["realm_icon_url"] = realm_icon_url
|
||||
context["Confirmation"] = Confirmation
|
||||
context["sorted_realm_types"] = sorted(
|
||||
Realm.ORG_TYPES.values(), key=lambda d: d["display_order"]
|
||||
)
|
||||
|
||||
return render(request, "analytics/support.html", context=context)
|
@@ -1,104 +0,0 @@
|
||||
from typing import Any, Dict, List, Tuple
|
||||
|
||||
from django.conf import settings
|
||||
from django.db.models.query import QuerySet
|
||||
from django.http import HttpRequest, HttpResponse
|
||||
from django.shortcuts import render
|
||||
|
||||
from analytics.views.activity_common import (
|
||||
format_date_for_activity_reports,
|
||||
get_user_activity_summary,
|
||||
make_table,
|
||||
)
|
||||
from zerver.decorator import require_server_admin
|
||||
from zerver.models import UserActivity, UserProfile, get_user_profile_by_id
|
||||
|
||||
if settings.BILLING_ENABLED:
|
||||
pass
|
||||
|
||||
|
||||
def get_user_activity_records(user_profile: UserProfile) -> List[QuerySet]:
|
||||
fields = [
|
||||
"user_profile__full_name",
|
||||
"query",
|
||||
"client__name",
|
||||
"count",
|
||||
"last_visit",
|
||||
]
|
||||
|
||||
records = UserActivity.objects.filter(
|
||||
user_profile=user_profile,
|
||||
)
|
||||
records = records.order_by("-last_visit")
|
||||
records = records.select_related("user_profile", "client").only(*fields)
|
||||
return records
|
||||
|
||||
|
||||
def raw_user_activity_table(records: List[QuerySet]) -> str:
|
||||
cols = [
|
||||
"query",
|
||||
"client",
|
||||
"count",
|
||||
"last_visit",
|
||||
]
|
||||
|
||||
def row(record: QuerySet) -> List[Any]:
|
||||
return [
|
||||
record.query,
|
||||
record.client.name,
|
||||
record.count,
|
||||
format_date_for_activity_reports(record.last_visit),
|
||||
]
|
||||
|
||||
rows = list(map(row, records))
|
||||
title = "Raw data"
|
||||
return make_table(title, cols, rows)
|
||||
|
||||
|
||||
def user_activity_summary_table(user_summary: Dict[str, Dict[str, Any]]) -> str:
|
||||
rows = []
|
||||
for k, v in user_summary.items():
|
||||
if k == "name" or k == "user_profile_id":
|
||||
continue
|
||||
client = k
|
||||
count = v["count"]
|
||||
last_visit = v["last_visit"]
|
||||
row = [
|
||||
format_date_for_activity_reports(last_visit),
|
||||
client,
|
||||
count,
|
||||
]
|
||||
rows.append(row)
|
||||
|
||||
rows = sorted(rows, key=lambda r: r[0], reverse=True)
|
||||
|
||||
cols = [
|
||||
"last_visit",
|
||||
"client",
|
||||
"count",
|
||||
]
|
||||
|
||||
title = "User activity"
|
||||
return make_table(title, cols, rows)
|
||||
|
||||
|
||||
@require_server_admin
|
||||
def get_user_activity(request: HttpRequest, user_profile_id: int) -> HttpResponse:
|
||||
user_profile = get_user_profile_by_id(user_profile_id)
|
||||
records = get_user_activity_records(user_profile)
|
||||
|
||||
data: List[Tuple[str, str]] = []
|
||||
user_summary = get_user_activity_summary(records)
|
||||
content = user_activity_summary_table(user_summary)
|
||||
|
||||
data += [("Summary", content)]
|
||||
|
||||
content = raw_user_activity_table(records)
|
||||
data += [("Info", content)]
|
||||
|
||||
title = user_profile.delivery_email
|
||||
return render(
|
||||
request,
|
||||
"analytics/activity.html",
|
||||
context=dict(data=data, title=title),
|
||||
)
|
@@ -1,25 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
module.exports = {
|
||||
plugins: [
|
||||
[
|
||||
"formatjs",
|
||||
{
|
||||
additionalFunctionNames: ["$t", "$t_html"],
|
||||
overrideIdFn: (id, defaultMessage) => defaultMessage,
|
||||
},
|
||||
],
|
||||
],
|
||||
presets: [
|
||||
[
|
||||
"@babel/preset-env",
|
||||
{
|
||||
corejs: "3.20",
|
||||
shippedProposals: true,
|
||||
useBuiltIns: "usage",
|
||||
},
|
||||
],
|
||||
"@babel/typescript",
|
||||
],
|
||||
sourceType: "unambiguous",
|
||||
};
|
@@ -1,3 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright: (c) 2008, Jarek Zgoda <jarek.zgoda@gmail.com>
|
||||
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a
|
||||
@@ -19,4 +21,4 @@
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
# IN THE SOFTWARE.
|
||||
|
||||
VERSION = (0, 9, "pre")
|
||||
VERSION = (0, 9, 'pre')
|
||||
|
@@ -1,39 +1,27 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from django.db import models, migrations
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("contenttypes", "0001_initial"),
|
||||
('contenttypes', '0001_initial'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="Confirmation",
|
||||
name='Confirmation',
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
|
||||
),
|
||||
),
|
||||
("object_id", models.PositiveIntegerField()),
|
||||
("date_sent", models.DateTimeField(verbose_name="sent")),
|
||||
(
|
||||
"confirmation_key",
|
||||
models.CharField(max_length=40, verbose_name="activation key"),
|
||||
),
|
||||
(
|
||||
"content_type",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="contenttypes.ContentType"
|
||||
),
|
||||
),
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('object_id', models.PositiveIntegerField()),
|
||||
('date_sent', models.DateTimeField(verbose_name='sent')),
|
||||
('confirmation_key', models.CharField(max_length=40, verbose_name='activation key')),
|
||||
('content_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "confirmation email",
|
||||
"verbose_name_plural": "confirmation emails",
|
||||
'verbose_name': 'confirmation email',
|
||||
'verbose_name_plural': 'confirmation emails',
|
||||
},
|
||||
bases=(models.Model,),
|
||||
),
|
||||
|
@@ -1,28 +1,21 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from django.db import models, migrations
|
||||
import django.utils.timezone
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("confirmation", "0001_initial"),
|
||||
('confirmation', '0001_initial'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="RealmCreationKey",
|
||||
name='RealmCreationKey',
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
|
||||
),
|
||||
),
|
||||
("creation_key", models.CharField(max_length=40, verbose_name="activation key")),
|
||||
(
|
||||
"date_created",
|
||||
models.DateTimeField(default=django.utils.timezone.now, verbose_name="created"),
|
||||
),
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('creation_key', models.CharField(max_length=40, verbose_name='activation key')),
|
||||
('date_created', models.DateTimeField(default=django.utils.timezone.now, verbose_name='created')),
|
||||
],
|
||||
),
|
||||
]
|
||||
|
@@ -1,3 +1,4 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.10.4 on 2017-01-17 09:16
|
||||
from django.db import migrations
|
||||
|
||||
@@ -5,16 +6,17 @@ from django.db import migrations
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("confirmation", "0002_realmcreationkey"),
|
||||
('confirmation', '0002_realmcreationkey'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="EmailChangeConfirmation",
|
||||
fields=[],
|
||||
name='EmailChangeConfirmation',
|
||||
fields=[
|
||||
],
|
||||
options={
|
||||
"proxy": True,
|
||||
'proxy': True,
|
||||
},
|
||||
bases=("confirmation.confirmation",),
|
||||
bases=('confirmation.confirmation',),
|
||||
),
|
||||
]
|
||||
|
@@ -1,3 +1,4 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11.2 on 2017-07-08 04:23
|
||||
from django.db import migrations, models
|
||||
|
||||
@@ -5,31 +6,31 @@ from django.db import migrations, models
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("confirmation", "0003_emailchangeconfirmation"),
|
||||
('confirmation', '0003_emailchangeconfirmation'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.DeleteModel(
|
||||
name="EmailChangeConfirmation",
|
||||
name='EmailChangeConfirmation',
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name="confirmation",
|
||||
name='confirmation',
|
||||
options={},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="confirmation",
|
||||
name="type",
|
||||
model_name='confirmation',
|
||||
name='type',
|
||||
field=models.PositiveSmallIntegerField(default=1),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="confirmation",
|
||||
name="confirmation_key",
|
||||
model_name='confirmation',
|
||||
name='confirmation_key',
|
||||
field=models.CharField(max_length=40),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="confirmation",
|
||||
name="date_sent",
|
||||
model_name='confirmation',
|
||||
name='date_sent',
|
||||
field=models.DateTimeField(),
|
||||
),
|
||||
]
|
||||
|
@@ -1,21 +1,22 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11.6 on 2017-11-30 00:13
|
||||
import django.db.models.deletion
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("zerver", "0124_stream_enable_notifications"),
|
||||
("confirmation", "0004_remove_confirmationmanager"),
|
||||
('zerver', '0124_stream_enable_notifications'),
|
||||
('confirmation', '0004_remove_confirmationmanager'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="confirmation",
|
||||
name="realm",
|
||||
field=models.ForeignKey(
|
||||
null=True, on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"
|
||||
),
|
||||
model_name='confirmation',
|
||||
name='realm',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='zerver.Realm'),
|
||||
),
|
||||
]
|
||||
|
@@ -1,4 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11.6 on 2018-01-29 18:39
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
@@ -6,13 +8,13 @@ from django.db import migrations, models
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("confirmation", "0005_confirmation_realm"),
|
||||
('confirmation', '0005_confirmation_realm'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="realmcreationkey",
|
||||
name="presume_email_valid",
|
||||
model_name='realmcreationkey',
|
||||
name='presume_email_valid',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
|
@@ -1,37 +0,0 @@
|
||||
# Generated by Django 2.2.10 on 2020-03-27 09:02
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("confirmation", "0006_realmcreationkey_presume_email_valid"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="confirmation",
|
||||
name="confirmation_key",
|
||||
field=models.CharField(db_index=True, max_length=40),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="confirmation",
|
||||
name="date_sent",
|
||||
field=models.DateTimeField(db_index=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="confirmation",
|
||||
name="object_id",
|
||||
field=models.PositiveIntegerField(db_index=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="realmcreationkey",
|
||||
name="creation_key",
|
||||
field=models.CharField(db_index=True, max_length=40, verbose_name="activation key"),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="confirmation",
|
||||
unique_together={("type", "confirmation_key")},
|
||||
),
|
||||
]
|
@@ -1,17 +0,0 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("confirmation", "0007_add_indexes"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="confirmation",
|
||||
name="expiry_date",
|
||||
field=models.DateTimeField(db_index=True, null=True),
|
||||
preserve_default=False,
|
||||
),
|
||||
]
|
@@ -1,70 +0,0 @@
|
||||
# Generated by Django 3.1.7 on 2021-03-31 20:47
|
||||
|
||||
import time
|
||||
from datetime import timedelta
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, transaction
|
||||
from django.db.backends.postgresql.schema import DatabaseSchemaEditor
|
||||
from django.db.migrations.state import StateApps
|
||||
|
||||
|
||||
def set_expiry_date_for_existing_confirmations(
|
||||
apps: StateApps, schema_editor: DatabaseSchemaEditor
|
||||
) -> None:
|
||||
Confirmation = apps.get_model("confirmation", "Confirmation")
|
||||
if not Confirmation.objects.exists():
|
||||
return
|
||||
|
||||
# The values at the time of this migration
|
||||
INVITATION = 2
|
||||
UNSUBSCRIBE = 4
|
||||
MULTIUSE_INVITE = 6
|
||||
|
||||
@transaction.atomic
|
||||
def backfill_confirmations_between(lower_bound: int, upper_bound: int) -> None:
|
||||
confirmations = Confirmation.objects.filter(id__gte=lower_bound, id__lte=upper_bound)
|
||||
for confirmation in confirmations:
|
||||
if confirmation.type in (INVITATION, MULTIUSE_INVITE):
|
||||
confirmation.expiry_date = confirmation.date_sent + timedelta(
|
||||
days=settings.INVITATION_LINK_VALIDITY_DAYS
|
||||
)
|
||||
elif confirmation.type == UNSUBSCRIBE:
|
||||
# Unsubscribe links never expire, which we apparently implement as in 1M days.
|
||||
confirmation.expiry_date = confirmation.date_sent + timedelta(days=1000000)
|
||||
else:
|
||||
confirmation.expiry_date = confirmation.date_sent + timedelta(
|
||||
days=settings.CONFIRMATION_LINK_DEFAULT_VALIDITY_DAYS
|
||||
)
|
||||
Confirmation.objects.bulk_update(confirmations, ["expiry_date"])
|
||||
|
||||
# Because the ranges in this code are inclusive, subtracting 1 offers round numbers.
|
||||
BATCH_SIZE = 1000 - 1
|
||||
|
||||
first_id = Confirmation.objects.earliest("id").id
|
||||
last_id = Confirmation.objects.latest("id").id
|
||||
|
||||
id_range_lower_bound = first_id
|
||||
id_range_upper_bound = first_id + BATCH_SIZE
|
||||
while id_range_lower_bound <= last_id:
|
||||
print(f"Processed {id_range_lower_bound} / {last_id}")
|
||||
backfill_confirmations_between(id_range_lower_bound, id_range_upper_bound)
|
||||
id_range_lower_bound = id_range_upper_bound + 1
|
||||
id_range_upper_bound = id_range_lower_bound + BATCH_SIZE
|
||||
time.sleep(0.1)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("confirmation", "0008_confirmation_expiry_date"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
set_expiry_date_for_existing_confirmations,
|
||||
reverse_code=migrations.RunPython.noop,
|
||||
elidable=True,
|
||||
),
|
||||
]
|
@@ -1,18 +0,0 @@
|
||||
# Generated by Django 3.2.5 on 2021-08-02 19:03
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("confirmation", "0009_confirmation_expiry_date_backfill"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="confirmation",
|
||||
name="expiry_date",
|
||||
field=models.DateTimeField(db_index=True),
|
||||
),
|
||||
]
|
@@ -1,18 +0,0 @@
|
||||
# Generated by Django 3.2.9 on 2021-11-30 17:44
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("confirmation", "0010_alter_confirmation_expiry_date"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="confirmation",
|
||||
name="expiry_date",
|
||||
field=models.DateTimeField(db_index=True, null=True),
|
||||
),
|
||||
]
|
@@ -1,34 +1,26 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright: (c) 2008, Jarek Zgoda <jarek.zgoda@gmail.com>
|
||||
|
||||
__revision__ = "$Id: models.py 28 2009-10-22 15:03:02Z jarek.zgoda $"
|
||||
import datetime
|
||||
import secrets
|
||||
from base64 import b32encode
|
||||
from typing import List, Mapping, Optional, Union
|
||||
from urllib.parse import urljoin
|
||||
__revision__ = '$Id: models.py 28 2009-10-22 15:03:02Z jarek.zgoda $'
|
||||
|
||||
import datetime
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.contenttypes.fields import GenericForeignKey
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.db import models
|
||||
from django.db.models import CASCADE
|
||||
from django.urls import reverse
|
||||
from django.conf import settings
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.contrib.contenttypes.fields import GenericForeignKey
|
||||
from django.http import HttpRequest, HttpResponse
|
||||
from django.shortcuts import render
|
||||
from django.urls import reverse
|
||||
from django.utils.timezone import now as timezone_now
|
||||
from typing_extensions import Protocol
|
||||
|
||||
from zerver.lib.types import UnspecifiedValue
|
||||
from zerver.models import EmailChangeStatus, MultiuseInvite, PreregistrationUser, Realm, UserProfile
|
||||
|
||||
|
||||
class HasRealmObject(Protocol):
|
||||
realm: Realm
|
||||
|
||||
|
||||
class OptionalHasRealmObject(Protocol):
|
||||
realm: Optional[Realm]
|
||||
|
||||
from zerver.models import PreregistrationUser, EmailChangeStatus, MultiuseInvite, \
|
||||
UserProfile, Realm
|
||||
from random import SystemRandom
|
||||
import string
|
||||
from typing import Dict, Optional, Union
|
||||
|
||||
class ConfirmationKeyException(Exception):
|
||||
WRONG_LENGTH = 1
|
||||
@@ -39,112 +31,67 @@ class ConfirmationKeyException(Exception):
|
||||
super().__init__()
|
||||
self.error_type = error_type
|
||||
|
||||
|
||||
def render_confirmation_key_error(
|
||||
request: HttpRequest, exception: ConfirmationKeyException
|
||||
) -> HttpResponse:
|
||||
def render_confirmation_key_error(request: HttpRequest, exception: ConfirmationKeyException) -> HttpResponse:
|
||||
if exception.error_type == ConfirmationKeyException.WRONG_LENGTH:
|
||||
return render(request, "confirmation/link_malformed.html", status=404)
|
||||
return render(request, 'confirmation/link_malformed.html')
|
||||
if exception.error_type == ConfirmationKeyException.EXPIRED:
|
||||
return render(request, "confirmation/link_expired.html", status=404)
|
||||
return render(request, "confirmation/link_does_not_exist.html", status=404)
|
||||
|
||||
return render(request, 'confirmation/link_expired.html')
|
||||
return render(request, 'confirmation/link_does_not_exist.html')
|
||||
|
||||
def generate_key() -> str:
|
||||
generator = SystemRandom()
|
||||
# 24 characters * 5 bits of entropy/character = 120 bits of entropy
|
||||
return b32encode(secrets.token_bytes(15)).decode().lower()
|
||||
|
||||
return ''.join(generator.choice(string.ascii_lowercase + string.digits) for _ in range(24))
|
||||
|
||||
ConfirmationObjT = Union[MultiuseInvite, PreregistrationUser, EmailChangeStatus]
|
||||
|
||||
|
||||
def get_object_from_key(
|
||||
confirmation_key: str, confirmation_types: List[int], activate_object: bool = True
|
||||
) -> ConfirmationObjT:
|
||||
def get_object_from_key(confirmation_key: str,
|
||||
confirmation_type: int) -> ConfirmationObjT:
|
||||
# Confirmation keys used to be 40 characters
|
||||
if len(confirmation_key) not in (24, 40):
|
||||
raise ConfirmationKeyException(ConfirmationKeyException.WRONG_LENGTH)
|
||||
try:
|
||||
confirmation = Confirmation.objects.get(
|
||||
confirmation_key=confirmation_key, type__in=confirmation_types
|
||||
)
|
||||
confirmation = Confirmation.objects.get(confirmation_key=confirmation_key,
|
||||
type=confirmation_type)
|
||||
except Confirmation.DoesNotExist:
|
||||
raise ConfirmationKeyException(ConfirmationKeyException.DOES_NOT_EXIST)
|
||||
|
||||
if confirmation.expiry_date is not None and timezone_now() > confirmation.expiry_date:
|
||||
time_elapsed = timezone_now() - confirmation.date_sent
|
||||
if time_elapsed.total_seconds() > _properties[confirmation.type].validity_in_days * 24 * 3600:
|
||||
raise ConfirmationKeyException(ConfirmationKeyException.EXPIRED)
|
||||
|
||||
obj = confirmation.content_object
|
||||
assert obj is not None
|
||||
if activate_object and hasattr(obj, "status"):
|
||||
obj.status = getattr(settings, "STATUS_ACTIVE", 1)
|
||||
obj.save(update_fields=["status"])
|
||||
if hasattr(obj, "status"):
|
||||
obj.status = getattr(settings, 'STATUS_ACTIVE', 1)
|
||||
obj.save(update_fields=['status'])
|
||||
return obj
|
||||
|
||||
|
||||
def create_confirmation_link(
|
||||
obj: Union[Realm, HasRealmObject, OptionalHasRealmObject],
|
||||
confirmation_type: int,
|
||||
*,
|
||||
validity_in_days: Union[Optional[int], UnspecifiedValue] = UnspecifiedValue(),
|
||||
url_args: Mapping[str, str] = {},
|
||||
) -> str:
|
||||
# validity_in_days is an override for the default values which are
|
||||
# determined by the confirmation_type - its main purpose is for use
|
||||
# in tests which may want to have control over the exact expiration time.
|
||||
def create_confirmation_link(obj: ContentType, host: str,
|
||||
confirmation_type: int,
|
||||
url_args: Optional[Dict[str, str]]=None) -> str:
|
||||
key = generate_key()
|
||||
realm = None
|
||||
if isinstance(obj, Realm):
|
||||
realm = obj
|
||||
elif hasattr(obj, "realm"):
|
||||
if hasattr(obj, 'realm'):
|
||||
realm = obj.realm
|
||||
Confirmation.objects.create(content_object=obj, date_sent=timezone_now(), confirmation_key=key,
|
||||
realm=realm, type=confirmation_type)
|
||||
return confirmation_url(key, host, confirmation_type, url_args)
|
||||
|
||||
current_time = timezone_now()
|
||||
expiry_date = None
|
||||
if not isinstance(validity_in_days, UnspecifiedValue):
|
||||
if validity_in_days is None:
|
||||
expiry_date = None
|
||||
else:
|
||||
assert validity_in_days is not None
|
||||
expiry_date = current_time + datetime.timedelta(days=validity_in_days)
|
||||
else:
|
||||
expiry_date = current_time + datetime.timedelta(
|
||||
days=_properties[confirmation_type].validity_in_days
|
||||
)
|
||||
|
||||
Confirmation.objects.create(
|
||||
content_object=obj,
|
||||
date_sent=current_time,
|
||||
confirmation_key=key,
|
||||
realm=realm,
|
||||
expiry_date=expiry_date,
|
||||
type=confirmation_type,
|
||||
)
|
||||
return confirmation_url(key, realm, confirmation_type, url_args)
|
||||
|
||||
|
||||
def confirmation_url(
|
||||
confirmation_key: str,
|
||||
realm: Optional[Realm],
|
||||
confirmation_type: int,
|
||||
url_args: Mapping[str, str] = {},
|
||||
) -> str:
|
||||
url_args = dict(url_args)
|
||||
url_args["confirmation_key"] = confirmation_key
|
||||
return urljoin(
|
||||
settings.ROOT_DOMAIN_URI if realm is None else realm.uri,
|
||||
reverse(_properties[confirmation_type].url_name, kwargs=url_args),
|
||||
)
|
||||
|
||||
def confirmation_url(confirmation_key: str, host: str,
|
||||
confirmation_type: int,
|
||||
url_args: Optional[Dict[str, str]]=None) -> str:
|
||||
if url_args is None:
|
||||
url_args = {}
|
||||
url_args['confirmation_key'] = confirmation_key
|
||||
return '%s%s%s' % (settings.EXTERNAL_URI_SCHEME, host,
|
||||
reverse(_properties[confirmation_type].url_name, kwargs=url_args))
|
||||
|
||||
class Confirmation(models.Model):
|
||||
content_type = models.ForeignKey(ContentType, on_delete=CASCADE)
|
||||
object_id: int = models.PositiveIntegerField(db_index=True)
|
||||
content_object = GenericForeignKey("content_type", "object_id")
|
||||
date_sent: datetime.datetime = models.DateTimeField(db_index=True)
|
||||
confirmation_key: str = models.CharField(max_length=40, db_index=True)
|
||||
expiry_date: Optional[datetime.datetime] = models.DateTimeField(db_index=True, null=True)
|
||||
realm: Optional[Realm] = models.ForeignKey(Realm, null=True, on_delete=CASCADE)
|
||||
object_id = models.PositiveIntegerField() # type: int
|
||||
content_object = GenericForeignKey('content_type', 'object_id')
|
||||
date_sent = models.DateTimeField() # type: datetime.datetime
|
||||
confirmation_key = models.CharField(max_length=40) # type: str
|
||||
realm = models.ForeignKey(Realm, null=True, on_delete=CASCADE) # type: Optional[Realm]
|
||||
|
||||
# The following list is the set of valid types
|
||||
USER_REGISTRATION = 1
|
||||
@@ -155,52 +102,39 @@ class Confirmation(models.Model):
|
||||
MULTIUSE_INVITE = 6
|
||||
REALM_CREATION = 7
|
||||
REALM_REACTIVATION = 8
|
||||
type: int = models.PositiveSmallIntegerField()
|
||||
type = models.PositiveSmallIntegerField() # type: int
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"<Confirmation: {self.content_object}>"
|
||||
|
||||
class Meta:
|
||||
unique_together = ("type", "confirmation_key")
|
||||
|
||||
return '<Confirmation: %s>' % (self.content_object,)
|
||||
|
||||
class ConfirmationType:
|
||||
def __init__(
|
||||
self,
|
||||
url_name: str,
|
||||
validity_in_days: int = settings.CONFIRMATION_LINK_DEFAULT_VALIDITY_DAYS,
|
||||
) -> None:
|
||||
def __init__(self, url_name: str,
|
||||
validity_in_days: int=settings.CONFIRMATION_LINK_DEFAULT_VALIDITY_DAYS) -> None:
|
||||
self.url_name = url_name
|
||||
self.validity_in_days = validity_in_days
|
||||
|
||||
|
||||
_properties = {
|
||||
Confirmation.USER_REGISTRATION: ConfirmationType("get_prereg_key_and_redirect"),
|
||||
Confirmation.INVITATION: ConfirmationType(
|
||||
"get_prereg_key_and_redirect", validity_in_days=settings.INVITATION_LINK_VALIDITY_DAYS
|
||||
),
|
||||
Confirmation.EMAIL_CHANGE: ConfirmationType("confirm_email_change"),
|
||||
Confirmation.UNSUBSCRIBE: ConfirmationType(
|
||||
"unsubscribe",
|
||||
validity_in_days=1000000, # should never expire
|
||||
),
|
||||
Confirmation.USER_REGISTRATION: ConfirmationType('check_prereg_key_and_redirect'),
|
||||
Confirmation.INVITATION: ConfirmationType('check_prereg_key_and_redirect',
|
||||
validity_in_days=settings.INVITATION_LINK_VALIDITY_DAYS),
|
||||
Confirmation.EMAIL_CHANGE: ConfirmationType('zerver.views.user_settings.confirm_email_change'),
|
||||
Confirmation.UNSUBSCRIBE: ConfirmationType('zerver.views.unsubscribe.email_unsubscribe',
|
||||
validity_in_days=1000000), # should never expire
|
||||
Confirmation.MULTIUSE_INVITE: ConfirmationType(
|
||||
"join", validity_in_days=settings.INVITATION_LINK_VALIDITY_DAYS
|
||||
),
|
||||
Confirmation.REALM_CREATION: ConfirmationType("get_prereg_key_and_redirect"),
|
||||
Confirmation.REALM_REACTIVATION: ConfirmationType("realm_reactivation"),
|
||||
'zerver.views.registration.accounts_home_from_multiuse_invite',
|
||||
validity_in_days=settings.INVITATION_LINK_VALIDITY_DAYS),
|
||||
Confirmation.REALM_CREATION: ConfirmationType('check_prereg_key_and_redirect'),
|
||||
Confirmation.REALM_REACTIVATION: ConfirmationType('zerver.views.realm.realm_reactivation'),
|
||||
}
|
||||
|
||||
|
||||
def one_click_unsubscribe_link(user_profile: UserProfile, email_type: str) -> str:
|
||||
"""
|
||||
Generate a unique link that a logged-out user can visit to unsubscribe from
|
||||
Zulip e-mails without having to first log in.
|
||||
"""
|
||||
return create_confirmation_link(
|
||||
user_profile, Confirmation.UNSUBSCRIBE, url_args={"email_type": email_type}
|
||||
)
|
||||
|
||||
return create_confirmation_link(user_profile, user_profile.realm.host,
|
||||
Confirmation.UNSUBSCRIBE,
|
||||
url_args = {'email_type': email_type})
|
||||
|
||||
# Functions related to links generated by the generate_realm_creation_link.py
|
||||
# management command.
|
||||
@@ -210,8 +144,7 @@ def one_click_unsubscribe_link(user_profile: UserProfile, email_type: str) -> st
|
||||
# Arguably RealmCreationKey should just be another ConfirmationObjT and we should
|
||||
# add another Confirmation.type for this; it's this way for historical reasons.
|
||||
|
||||
|
||||
def validate_key(creation_key: Optional[str]) -> Optional["RealmCreationKey"]:
|
||||
def validate_key(creation_key: Optional[str]) -> Optional['RealmCreationKey']:
|
||||
"""Get the record for this key, raising InvalidCreationKey if non-None but invalid."""
|
||||
if creation_key is None:
|
||||
return None
|
||||
@@ -224,25 +157,23 @@ def validate_key(creation_key: Optional[str]) -> Optional["RealmCreationKey"]:
|
||||
raise RealmCreationKey.Invalid()
|
||||
return key_record
|
||||
|
||||
|
||||
def generate_realm_creation_url(by_admin: bool = False) -> str:
|
||||
def generate_realm_creation_url(by_admin: bool=False) -> str:
|
||||
key = generate_key()
|
||||
RealmCreationKey.objects.create(
|
||||
creation_key=key, date_created=timezone_now(), presume_email_valid=by_admin
|
||||
)
|
||||
return urljoin(
|
||||
settings.ROOT_DOMAIN_URI,
|
||||
reverse("create_realm", kwargs={"creation_key": key}),
|
||||
)
|
||||
|
||||
RealmCreationKey.objects.create(creation_key=key,
|
||||
date_created=timezone_now(),
|
||||
presume_email_valid=by_admin)
|
||||
return '%s%s%s' % (settings.EXTERNAL_URI_SCHEME,
|
||||
settings.EXTERNAL_HOST,
|
||||
reverse('zerver.views.create_realm',
|
||||
kwargs={'creation_key': key}))
|
||||
|
||||
class RealmCreationKey(models.Model):
|
||||
creation_key = models.CharField("activation key", db_index=True, max_length=40)
|
||||
date_created = models.DateTimeField("created", default=timezone_now)
|
||||
creation_key = models.CharField('activation key', max_length=40)
|
||||
date_created = models.DateTimeField('created', default=timezone_now)
|
||||
|
||||
# True just if we should presume the email address the user enters
|
||||
# is theirs, and skip sending mail to it to confirm that.
|
||||
presume_email_valid: bool = models.BooleanField(default=False)
|
||||
presume_email_valid = models.BooleanField(default=False) # type: bool
|
||||
|
||||
class Invalid(Exception):
|
||||
pass
|
||||
|
@@ -1,6 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright: (c) 2008, Jarek Zgoda <jarek.zgoda@gmail.com>
|
||||
|
||||
__revision__ = "$Id: settings.py 12 2008-11-23 19:38:52Z jarek.zgoda $"
|
||||
__revision__ = '$Id: settings.py 12 2008-11-23 19:38:52Z jarek.zgoda $'
|
||||
|
||||
STATUS_ACTIVE = 1
|
||||
STATUS_REVOKED = 2
|
||||
|
@@ -1,106 +0,0 @@
|
||||
from typing import Optional
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
from corporate.lib.stripe import LicenseLimitError, get_latest_seat_count
|
||||
from corporate.models import get_current_plan_by_realm
|
||||
from zerver.actions.create_user import send_message_to_signup_notification_stream
|
||||
from zerver.lib.exceptions import InvitationError
|
||||
from zerver.models import Realm, get_system_bot
|
||||
|
||||
|
||||
def generate_licenses_low_warning_message_if_required(realm: Realm) -> Optional[str]:
|
||||
plan = get_current_plan_by_realm(realm)
|
||||
if plan is None or plan.automanage_licenses:
|
||||
return None
|
||||
|
||||
licenses_remaining = plan.licenses() - get_latest_seat_count(realm)
|
||||
if licenses_remaining > 3:
|
||||
return None
|
||||
|
||||
format_kwargs = {
|
||||
"billing_page_link": "/billing/#settings",
|
||||
"deactivate_user_help_page_link": "/help/deactivate-or-reactivate-a-user",
|
||||
}
|
||||
|
||||
if licenses_remaining <= 0:
|
||||
return _(
|
||||
"Your organization has no Zulip licenses remaining and can no longer accept new users. "
|
||||
"Please [increase the number of licenses]({billing_page_link}) or "
|
||||
"[deactivate inactive users]({deactivate_user_help_page_link}) to allow new users to join."
|
||||
).format(**format_kwargs)
|
||||
|
||||
return {
|
||||
1: _(
|
||||
"Your organization has only one Zulip license remaining. You can "
|
||||
"[increase the number of licenses]({billing_page_link}) or [deactivate inactive users]({deactivate_user_help_page_link}) "
|
||||
"to allow more than one user to join."
|
||||
),
|
||||
2: _(
|
||||
"Your organization has only two Zulip licenses remaining. You can "
|
||||
"[increase the number of licenses]({billing_page_link}) or [deactivate inactive users]({deactivate_user_help_page_link}) "
|
||||
"to allow more than two users to join."
|
||||
),
|
||||
3: _(
|
||||
"Your organization has only three Zulip licenses remaining. You can "
|
||||
"[increase the number of licenses]({billing_page_link}) or [deactivate inactive users]({deactivate_user_help_page_link}) "
|
||||
"to allow more than three users to join."
|
||||
),
|
||||
}[licenses_remaining].format(**format_kwargs)
|
||||
|
||||
|
||||
def send_user_unable_to_signup_message_to_signup_notification_stream(
|
||||
realm: Realm, user_email: str
|
||||
) -> None:
|
||||
message = _(
|
||||
"A new member ({email}) was unable to join your organization because all Zulip licenses "
|
||||
"are in use. Please [increase the number of licenses]({billing_page_link}) or "
|
||||
"[deactivate inactive users]({deactivate_user_help_page_link}) to allow new members to join."
|
||||
).format(
|
||||
email=user_email,
|
||||
billing_page_link="/billing/#settings",
|
||||
deactivate_user_help_page_link="/help/deactivate-or-reactivate-a-user",
|
||||
)
|
||||
|
||||
send_message_to_signup_notification_stream(
|
||||
get_system_bot(settings.NOTIFICATION_BOT, realm.id), realm, message
|
||||
)
|
||||
|
||||
|
||||
def check_spare_licenses_available_for_adding_new_users(
|
||||
realm: Realm, number_of_users_to_add: int
|
||||
) -> None:
|
||||
plan = get_current_plan_by_realm(realm)
|
||||
if (
|
||||
plan is None
|
||||
or plan.automanage_licenses
|
||||
or plan.customer.exempt_from_from_license_number_check
|
||||
):
|
||||
return
|
||||
|
||||
if plan.licenses() < get_latest_seat_count(realm) + number_of_users_to_add:
|
||||
raise LicenseLimitError()
|
||||
|
||||
|
||||
def check_spare_licenses_available_for_registering_new_user(
|
||||
realm: Realm, user_email_to_add: str
|
||||
) -> None:
|
||||
try:
|
||||
check_spare_licenses_available_for_adding_new_users(realm, 1)
|
||||
except LicenseLimitError:
|
||||
send_user_unable_to_signup_message_to_signup_notification_stream(realm, user_email_to_add)
|
||||
raise
|
||||
|
||||
|
||||
def check_spare_licenses_available_for_inviting_new_users(realm: Realm, num_invites: int) -> None:
|
||||
try:
|
||||
check_spare_licenses_available_for_adding_new_users(realm, num_invites)
|
||||
except LicenseLimitError:
|
||||
if num_invites == 1:
|
||||
message = _("All Zulip licenses for this organization are currently in use.")
|
||||
else:
|
||||
message = _(
|
||||
"Your organization does not have enough unused Zulip licenses to invite {num_invites} users."
|
||||
).format(num_invites=num_invites)
|
||||
raise InvitationError(message, [], sent_invitations=False, license_limit_reached=True)
|
File diff suppressed because it is too large
Load Diff
@@ -1,181 +0,0 @@
|
||||
import logging
|
||||
from typing import Any, Callable, Dict, Union
|
||||
|
||||
import stripe
|
||||
from django.conf import settings
|
||||
|
||||
from corporate.lib.stripe import (
|
||||
BillingError,
|
||||
UpgradeWithExistingPlanError,
|
||||
ensure_realm_does_not_have_active_plan,
|
||||
process_initial_upgrade,
|
||||
update_or_create_stripe_customer,
|
||||
)
|
||||
from corporate.models import Event, PaymentIntent, Session
|
||||
from zerver.models import get_active_user_profile_by_id_in_realm
|
||||
|
||||
billing_logger = logging.getLogger("corporate.stripe")
|
||||
|
||||
|
||||
def error_handler(
|
||||
func: Callable[[Any, Any], None],
|
||||
) -> Callable[[Union[stripe.checkout.Session, stripe.PaymentIntent], Event], None]:
|
||||
def wrapper(
|
||||
stripe_object: Union[stripe.checkout.Session, stripe.PaymentIntent], event: Event
|
||||
) -> None:
|
||||
event.status = Event.EVENT_HANDLER_STARTED
|
||||
event.save(update_fields=["status"])
|
||||
|
||||
try:
|
||||
func(stripe_object, event.content_object)
|
||||
except BillingError as e:
|
||||
billing_logger.warning(
|
||||
"BillingError in %s event handler: %s. stripe_object_id=%s, customer_id=%s metadata=%s",
|
||||
event.type,
|
||||
e.error_description,
|
||||
stripe_object.id,
|
||||
stripe_object.customer,
|
||||
stripe_object.metadata,
|
||||
)
|
||||
event.status = Event.EVENT_HANDLER_FAILED
|
||||
event.handler_error = {
|
||||
"message": e.msg,
|
||||
"description": e.error_description,
|
||||
}
|
||||
event.save(update_fields=["status", "handler_error"])
|
||||
except Exception:
|
||||
billing_logger.exception(
|
||||
"Uncaught exception in %s event handler:",
|
||||
event.type,
|
||||
stack_info=True,
|
||||
)
|
||||
event.status = Event.EVENT_HANDLER_FAILED
|
||||
event.handler_error = {
|
||||
"description": f"uncaught exception in {event.type} event handler",
|
||||
"message": BillingError.CONTACT_SUPPORT.format(email=settings.ZULIP_ADMINISTRATOR),
|
||||
}
|
||||
event.save(update_fields=["status", "handler_error"])
|
||||
else:
|
||||
event.status = Event.EVENT_HANDLER_SUCCEEDED
|
||||
event.save()
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
@error_handler
|
||||
def handle_checkout_session_completed_event(
|
||||
stripe_session: stripe.checkout.Session, session: Session
|
||||
) -> None:
|
||||
session.status = Session.COMPLETED
|
||||
session.save()
|
||||
|
||||
stripe_setup_intent = stripe.SetupIntent.retrieve(stripe_session.setup_intent)
|
||||
assert session.customer.realm is not None
|
||||
user_id = stripe_session.metadata.get("user_id")
|
||||
assert user_id is not None
|
||||
user = get_active_user_profile_by_id_in_realm(user_id, session.customer.realm)
|
||||
payment_method = stripe_setup_intent.payment_method
|
||||
|
||||
if session.type in [
|
||||
Session.UPGRADE_FROM_BILLING_PAGE,
|
||||
Session.RETRY_UPGRADE_WITH_ANOTHER_PAYMENT_METHOD,
|
||||
]:
|
||||
ensure_realm_does_not_have_active_plan(user.realm)
|
||||
update_or_create_stripe_customer(user, payment_method)
|
||||
session.payment_intent.status = PaymentIntent.PROCESSING
|
||||
session.payment_intent.last_payment_error = ()
|
||||
session.payment_intent.save(update_fields=["status", "last_payment_error"])
|
||||
try:
|
||||
stripe.PaymentIntent.confirm(
|
||||
session.payment_intent.stripe_payment_intent_id,
|
||||
payment_method=payment_method,
|
||||
off_session=True,
|
||||
)
|
||||
except stripe.error.CardError:
|
||||
pass
|
||||
elif session.type in [
|
||||
Session.FREE_TRIAL_UPGRADE_FROM_BILLING_PAGE,
|
||||
Session.FREE_TRIAL_UPGRADE_FROM_ONBOARDING_PAGE,
|
||||
]:
|
||||
ensure_realm_does_not_have_active_plan(user.realm)
|
||||
update_or_create_stripe_customer(user, payment_method)
|
||||
process_initial_upgrade(
|
||||
user,
|
||||
int(stripe_setup_intent.metadata["licenses"]),
|
||||
stripe_setup_intent.metadata["license_management"] == "automatic",
|
||||
int(stripe_setup_intent.metadata["billing_schedule"]),
|
||||
charge_automatically=True,
|
||||
free_trial=True,
|
||||
)
|
||||
elif session.type in [Session.CARD_UPDATE_FROM_BILLING_PAGE]:
|
||||
update_or_create_stripe_customer(user, payment_method)
|
||||
|
||||
|
||||
@error_handler
|
||||
def handle_payment_intent_succeeded_event(
|
||||
stripe_payment_intent: stripe.PaymentIntent, payment_intent: PaymentIntent
|
||||
) -> None:
|
||||
payment_intent.status = PaymentIntent.SUCCEEDED
|
||||
payment_intent.save()
|
||||
metadata: Dict[str, Any] = stripe_payment_intent.metadata
|
||||
assert payment_intent.customer.realm is not None
|
||||
user_id = metadata.get("user_id")
|
||||
assert user_id is not None
|
||||
user = get_active_user_profile_by_id_in_realm(user_id, payment_intent.customer.realm)
|
||||
|
||||
description = ""
|
||||
for charge in stripe_payment_intent.charges:
|
||||
description = f"Payment (Card ending in {charge.payment_method_details.card.last4})"
|
||||
break
|
||||
|
||||
stripe.InvoiceItem.create(
|
||||
amount=stripe_payment_intent.amount * -1,
|
||||
currency="usd",
|
||||
customer=stripe_payment_intent.customer,
|
||||
description=description,
|
||||
discountable=False,
|
||||
)
|
||||
try:
|
||||
ensure_realm_does_not_have_active_plan(user.realm)
|
||||
except UpgradeWithExistingPlanError as e:
|
||||
stripe_invoice = stripe.Invoice.create(
|
||||
auto_advance=True,
|
||||
collection_method="charge_automatically",
|
||||
customer=stripe_payment_intent.customer,
|
||||
days_until_due=None,
|
||||
statement_descriptor="Zulip Cloud Standard Credit",
|
||||
)
|
||||
stripe.Invoice.finalize_invoice(stripe_invoice)
|
||||
raise e
|
||||
|
||||
process_initial_upgrade(
|
||||
user,
|
||||
int(metadata["licenses"]),
|
||||
metadata["license_management"] == "automatic",
|
||||
int(metadata["billing_schedule"]),
|
||||
True,
|
||||
False,
|
||||
)
|
||||
|
||||
|
||||
@error_handler
|
||||
def handle_payment_intent_payment_failed_event(
|
||||
stripe_payment_intent: stripe.PaymentIntent, payment_intent: Event
|
||||
) -> None:
|
||||
payment_intent.status = PaymentIntent.get_status_integer_from_status_text(
|
||||
stripe_payment_intent.status
|
||||
)
|
||||
billing_logger.info(
|
||||
"Stripe payment intent failed: %s %s %s %s",
|
||||
payment_intent.customer.realm.string_id,
|
||||
stripe_payment_intent.last_payment_error.get("type"),
|
||||
stripe_payment_intent.last_payment_error.get("code"),
|
||||
stripe_payment_intent.last_payment_error.get("param"),
|
||||
)
|
||||
payment_intent.last_payment_error = {
|
||||
"description": stripe_payment_intent.last_payment_error.get("type"),
|
||||
}
|
||||
payment_intent.last_payment_error["message"] = stripe_payment_intent.last_payment_error.get(
|
||||
"message"
|
||||
)
|
||||
payment_intent.save(update_fields=["status", "last_payment_error"])
|
@@ -1,15 +0,0 @@
|
||||
from urllib.parse import urlencode, urljoin, urlunsplit
|
||||
|
||||
from django.conf import settings
|
||||
from django.urls import reverse
|
||||
|
||||
from zerver.models import Realm, get_realm
|
||||
|
||||
|
||||
def get_support_url(realm: Realm) -> str:
|
||||
support_realm_uri = get_realm(settings.STAFF_SUBDOMAIN).uri
|
||||
support_url = urljoin(
|
||||
support_realm_uri,
|
||||
urlunsplit(("", "", reverse("support"), urlencode({"q": realm.string_id}), "")),
|
||||
)
|
||||
return support_url
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user