Compare commits

..

9 Commits

Author SHA1 Message Date
Anders Kaseorg
d111128def memcached: Switch from pylibmc to python-binary-memcached.
Signed-off-by: Anders Kaseorg <anders@zulip.com>
2020-07-31 17:34:48 -07:00
Tim Abbott
31f7006309 Release Zulip Server 3.1. 2020-07-30 15:44:18 -07:00
arpit551
d8b966e528 migrations: Upgrade migrations to remove duplicates in all Count tables.
This commit upgrades 0015_clear_duplicate_counts migration to remove
duplicate count in StreamCount, UserCount, InstallationCount as well.

Fixes https://github.com/zulip/docker-zulip/issues/266
2020-07-30 15:18:07 -07:00
Mateusz Mandera
444359ebd3 saml: Use self.logger in get_issuing_idp.
get_issuing_idp is no longer a class method, so that akward logger
fetching can be skipped and self.logger can be accessed.
2020-07-26 15:49:44 -07:00
Mateusz Mandera
c78bdd6330 saml: Fix incorrect settings object being passed in get_issuing_idp.
Fixes #15904.

settings is supposed to be a proper OneLogin_Saml2_Settings object,
rather than an empty dictionary. This bug wasn't easy to spot because
the codepath that causes this to demonstrate runs only if the
SAMLResponse contains encrypted assertions.
2020-07-26 15:49:43 -07:00
Gittenburg
f4e02f0e80 upload: Do not open compose box when editing.
Previously editing a message and uploading a file in
the edit textarea opened the message compose box.

Fixes #15890.
2020-07-23 11:29:51 -07:00
Gittenburg
77234ef40b message_edit: Fix invisible delete spinner.
Introduced in 953d475274.
2020-07-23 10:25:02 -07:00
Tim Abbott
00f9cd672b docs: Fix versions in stretch=>buster documentation. 2020-07-22 16:36:00 -07:00
Emilio López
c33a7dfff4 email_mirror: Fix exception handling unstructured headers.
This commit rewrites the way addresses are collected. If
the header with the address is not an AddressHeader (for instance,
Delivered-To and Envelope-To), we take its string representation.

Fixes: #15864 ("Error in email_mirror - _UnstructuredHeader has no attribute addresses").
2020-07-22 12:11:38 -07:00
6115 changed files with 429615 additions and 732848 deletions

6
.browserslistrc Normal file
View File

@@ -0,0 +1,6 @@
> 0.2%
> 0.2% in US
last 2 versions
Firefox ESR
not dead
Chrome 26 # similar to PhantomJS

383
.circleci/config.yml Normal file
View File

@@ -0,0 +1,383 @@
# See https://zulip.readthedocs.io/en/latest/testing/continuous-integration.html for
# high-level documentation on our CircleCI setup.
# See CircleCI upstream's docs on this config format:
# https://circleci.com/docs/2.0/language-python/
#
version: 2.0
aliases:
- &create_cache_directories
run:
name: create cache directories
command: |
dirs=(/srv/zulip-{npm,venv,emoji}-cache)
sudo mkdir -p "${dirs[@]}"
sudo chown -R circleci "${dirs[@]}"
- &restore_cache_package_json
restore_cache:
keys:
- v1-npm-base.{{ .Environment.CIRCLE_JOB }}-{{ checksum "package.json" }}-{{ checksum "yarn.lock" }}
- &restore_cache_requirements
restore_cache:
keys:
- v1-venv-base.{{ .Environment.CIRCLE_JOB }}-{{ checksum "requirements/thumbor-dev.txt" }}-{{ checksum "requirements/dev.txt" }}
- &restore_emoji_cache
restore_cache:
keys:
- v1-venv-base.{{ .Environment.CIRCLE_JOB }}-{{ checksum "tools/setup/emoji/emoji_map.json" }}-{{ checksum "tools/setup/emoji/build_emoji" }}-{{checksum "tools/setup/emoji/emoji_setup_utils.py" }}-{{ checksum "tools/setup/emoji/emoji_names.py" }}-{{ checksum "package.json" }}
- &install_dependencies
run:
name: install dependencies
command: |
sudo apt-get update
# Install moreutils so we can use `ts` and `mispipe` in the following.
sudo apt-get install -y moreutils
# CircleCI sets the following in Git config at clone time:
# url.ssh://git@github.com.insteadOf https://github.com
# This breaks the Git clones in the NVM `install.sh` we run
# in `install-node`.
# TODO: figure out why that breaks, and whether we want it.
# (Is it an optimization?)
rm -f /home/circleci/.gitconfig
# This is the main setup job for the test suite
mispipe "tools/ci/setup-backend --skip-dev-db-build" ts
# Cleaning caches is mostly unnecessary in Circle, because
# most builds don't get to write to the cache.
# mispipe "scripts/lib/clean-unused-caches --verbose --threshold 0 2>&1" ts
- &save_cache_package_json
save_cache:
paths:
- /srv/zulip-npm-cache
key: v1-npm-base.{{ .Environment.CIRCLE_JOB }}-{{ checksum "package.json" }}-{{ checksum "yarn.lock" }}
- &save_cache_requirements
save_cache:
paths:
- /srv/zulip-venv-cache
key: v1-venv-base.{{ .Environment.CIRCLE_JOB }}-{{ checksum "requirements/thumbor-dev.txt" }}-{{ checksum "requirements/dev.txt" }}
- &save_emoji_cache
save_cache:
paths:
- /srv/zulip-emoji-cache
key: v1-venv-base.{{ .Environment.CIRCLE_JOB }}-{{ checksum "tools/setup/emoji/emoji_map.json" }}-{{ checksum "tools/setup/emoji/build_emoji" }}-{{checksum "tools/setup/emoji/emoji_setup_utils.py" }}-{{ checksum "tools/setup/emoji/emoji_names.py" }}-{{ checksum "package.json" }}
- &do_bionic_hack
run:
name: do Bionic hack
command: |
# Temporary hack till `sudo service redis-server start` gets fixes in Bionic. See
# https://chat.zulip.org/#narrow/stream/3-backend/topic/Ubuntu.20bionic.20CircleCI
sudo sed -i '/^bind/s/bind.*/bind 0.0.0.0/' /etc/redis/redis.conf
- &run_backend_tests
run:
name: run backend tests
command: |
. /srv/zulip-py3-venv/bin/activate
mispipe "./tools/ci/backend 2>&1" ts
- &run_frontend_tests
run:
name: run frontend tests
command: |
. /srv/zulip-py3-venv/bin/activate
mispipe "./tools/ci/frontend 2>&1" ts
- &upload_coverage_report
run:
name: upload coverage report
command: |
# codecov requires `.coverage` file to be stored in pwd for
# uploading coverage results.
mv /home/circleci/zulip/var/.coverage /home/circleci/zulip/.coverage
. /srv/zulip-py3-venv/bin/activate
# TODO: Check that the next release of codecov doesn't
# throw find error.
# codecov==2.0.16 introduced a bug which uses "find"
# for locating files which is buggy on some platforms.
# It was fixed via https://github.com/codecov/codecov-python/pull/217
# and should get automatically fixed here once it's released.
# We cannot pin the version here because we need the latest version for uploading files.
# see https://community.codecov.io/t/http-400-while-uploading-to-s3-with-python-codecov-from-travis/1428/7
pip install codecov && codecov \
|| echo "Error in uploading coverage reports to codecov.io."
- &build_production
run:
name: build production
command: |
sudo apt-get update
# Install moreutils so we can use `ts` and `mispipe` in the following.
sudo apt-get install -y moreutils
mispipe "./tools/ci/production-build 2>&1" ts
- &production_extract_tarball
run:
name: production extract tarball
command: |
sudo apt-get update
# Install moreutils so we can use `ts` and `mispipe` in the following.
sudo apt-get install -y moreutils
mispipe "/tmp/production-extract-tarball 2>&1" ts
- &install_production
run:
name: install production
command: |
sudo service rabbitmq-server restart
sudo mispipe "/tmp/production-install 2>&1" ts
- &verify_production
run:
name: verify install
command: |
sudo mispipe "/tmp/production-verify 2>&1" ts
- &upgrade_postgresql
run:
name: upgrade postgresql
command: |
sudo mispipe "/tmp/production-upgrade-pg 2>&1" ts
- &check_xenial_provision_error
run:
name: check tools/provision error message on xenial
command: |
! tools/provision > >(tee provision.out)
grep -Fqx 'CRITICAL:root:Unsupported platform: ubuntu 16.04' provision.out
- &check_xenial_upgrade_error
run:
name: check scripts/lib/upgrade-zulip-stage-2 error message on xenial
command: |
! sudo scripts/lib/upgrade-zulip-stage-2 2> >(tee upgrade.err >&2)
grep -Fq 'upgrade-zulip-stage-2: Unsupported platform: ubuntu 16.04' upgrade.err
- &notify_failure_status
run:
name: On fail
when: on_fail
branches:
only: master
command: |
if [[ "$CIRCLE_REPOSITORY_URL" == "git@github.com:zulip/zulip.git" && "$ZULIP_BOT_KEY" != "" ]]; then
curl -H "Content-Type: application/json" \
-X POST -i 'https://chat.zulip.org/api/v1/external/circleci?api_key='"$ZULIP_BOT_KEY"'&stream=automated%20testing&topic=master%20failing' \
-d '{"payload": { "branch": "'"$CIRCLE_BRANCH"'", "reponame": "'"$CIRCLE_PROJECT_REPONAME"'", "status": "failed", "build_url": "'"$CIRCLE_BUILD_URL"'", "username": "'"$CIRCLE_USERNAME"'"}}'
fi
jobs:
"bionic-backend-frontend":
docker:
# This is built from tools/ci/images/bionic/Dockerfile .
# Bionic ships with Python 3.6.
- image: arpit551/circleci:bionic-python-test
working_directory: ~/zulip
steps:
- checkout
- *create_cache_directories
- *do_bionic_hack
- *restore_cache_package_json
- *restore_cache_requirements
- *restore_emoji_cache
- *install_dependencies
- *save_cache_package_json
- *save_cache_requirements
- *save_emoji_cache
- *run_backend_tests
- run:
name: test locked requirements
command: |
. /srv/zulip-py3-venv/bin/activate
mispipe "./tools/test-locked-requirements 2>&1" ts
- *run_frontend_tests
# We only need to upload coverage reports on whichever platform
# runs the frontend tests.
- *upload_coverage_report
- store_artifacts:
path: ./var/casper/
destination: casper
- store_artifacts:
path: ./var/puppeteer/
destination: puppeteer
- store_artifacts:
path: ../../../tmp/zulip-test-event-log/
destination: test-reports
- store_test_results:
path: ./var/xunit-test-results/casper/
- *notify_failure_status
"focal-backend":
docker:
# This is built from tools/ci/images/focal/Dockerfile.
# Focal ships with Python 3.8.2.
- image: arpit551/circleci:focal-python-test
working_directory: ~/zulip
steps:
- checkout
- *create_cache_directories
- *restore_cache_package_json
- *restore_cache_requirements
- *restore_emoji_cache
- *install_dependencies
- *save_cache_package_json
- *save_cache_requirements
- *save_emoji_cache
- *run_backend_tests
- run:
name: Check development database build
command: mispipe "tools/ci/setup-backend" ts
- *notify_failure_status
"xenial-legacy":
docker:
- image: arpit551/circleci:xenial-python-test
working_directory: ~/zulip
steps:
- checkout
- *check_xenial_provision_error
- *check_xenial_upgrade_error
- *notify_failure_status
"bionic-production-build":
docker:
# This is built from tools/ci/images/bionic/Dockerfile .
# Bionic ships with Python 3.6.
- image: arpit551/circleci:bionic-python-test
working_directory: ~/zulip
steps:
- checkout
- *create_cache_directories
- *do_bionic_hack
- *restore_cache_package_json
- *restore_cache_requirements
- *restore_emoji_cache
- *build_production
- *save_cache_package_json
- *save_cache_requirements
- *save_emoji_cache
# Persist the built tarball to be used in downstream job
# for installation of production server.
# See https://circleci.com/docs/2.0/workflows/#using-workspaces-to-share-data-among-jobs
- persist_to_workspace:
# Must be an absolute path,
# or relative path from working_directory.
# This is a directory on the container which is
# taken to be the root directory of the workspace.
root: /tmp
# Must be relative path from root
paths:
- zulip-server-test.tar.gz
- success-http-headers.template.txt
- production-install
- production-verify
- production-upgrade-pg
- production
- production-extract-tarball
- *notify_failure_status
"bionic-production-install":
docker:
# This is built from tools/ci/images/bionic/Dockerfile .
# Bionic ships with Python 3.6.
- image: arpit551/circleci:bionic-python-test
working_directory: ~/zulip
steps:
# Contains the built tarball from bionic-production-build job
- attach_workspace:
# Must be absolute path or relative path from working_directory
at: /tmp
- *create_cache_directories
- *do_bionic_hack
- *production_extract_tarball
- *restore_cache_package_json
- *install_production
- *verify_production
- *upgrade_postgresql
- *verify_production
- *save_cache_package_json
- *notify_failure_status
"focal-production-install":
docker:
# This is built from tools/ci/images/focal/Dockerfile.
# Focal ships with Python 3.8.2.
- image: arpit551/circleci:focal-python-test
working_directory: ~/zulip
steps:
# Contains the built tarball from bionic-production-build job
- attach_workspace:
# Must be absolute path or relative path from working_directory
at: /tmp
- *create_cache_directories
- run:
name: do memcached hack
command: |
# Temporary hack till memcached upstream is updated in Focal.
# https://bugs.launchpad.net/ubuntu/+source/memcached/+bug/1878721
echo "export SASL_CONF_PATH=/etc/sasl2" | sudo tee - a /etc/default/memcached
- *production_extract_tarball
- *restore_cache_package_json
- *install_production
- *verify_production
- *save_cache_package_json
- *notify_failure_status
workflows:
version: 2
"Ubuntu 16.04 Xenial (Python 3.5, legacy)":
jobs:
- "xenial-legacy"
"Ubuntu 18.04 Bionic (Python 3.6, backend+frontend)":
jobs:
- "bionic-backend-frontend"
"Ubuntu 20.04 Focal (Python 3.8, backend)":
jobs:
- "focal-backend"
"Production":
jobs:
- "bionic-production-build"
- "bionic-production-install":
requires:
- "bionic-production-build"
- "focal-production-install":
requires:
- "bionic-production-build"

View File

@@ -1,27 +0,0 @@
te
ans
pullrequest
ist
cros
wit
nwe
circularly
ned
ba
ressemble
ser
sur
hel
fpr
alls
nd
ot
womens
vise
falsy
ro
derails
forin
uper
slac
couldn

View File

@@ -3,22 +3,19 @@ root = true
[*]
end_of_line = lf
charset = utf-8
indent_size = 4
indent_style = space
insert_final_newline = true
trim_trailing_whitespace = true
insert_final_newline = true
binary_next_line = true # for shfmt
switch_case_indent = true # for shfmt
[*.{sh,py,pyi,js,ts,json,xml,css,scss,hbs,html}]
indent_style = space
indent_size = 4
[{*.{js,json,ts},check-openapi}]
max_line_length = 100
[*.{py,pyi}]
[*.py]
max_line_length = 110
[*.{md,svg,rb,pp,yaml,yml}]
indent_size = 2
[*.{js,ts}]
max_line_length = 100
[package.json]
[*.{svg,rb,pp}]
indent_style = space
indent_size = 2

View File

@@ -4,12 +4,7 @@
/docs/_build
/static/generated
/static/third
/static/webpack-bundles
/var/*
!/var/puppeteer
/var/puppeteer/*
!/var/puppeteer/test_credentials.d.ts
/web/generated
/web/third
/zulip-current-venv
/var
/zulip-py3-venv

View File

@@ -1,62 +1,34 @@
{
"root": true,
"env": {
"es2020": true,
"node": true
"node": true,
"es6": true
},
"extends": [
"eslint:recommended",
"plugin:import/errors",
"plugin:import/warnings",
"plugin:no-jquery/recommended",
"plugin:no-jquery/deprecated",
"plugin:unicorn/recommended",
"prettier"
],
"parser": "@babel/eslint-parser",
"parserOptions": {
"requireConfigFile": false,
"ecmaVersion": 2019,
"warnOnUnsupportedTypeScriptVersion": false,
"sourceType": "unambiguous"
"sourceType": "module"
},
"plugins": ["formatjs", "no-jquery"],
"settings": {
"formatjs": {
"additionalFunctionNames": ["$t", "$t_html"]
},
"no-jquery": {
"collectionReturningPlugins": {
"expectOne": "always"
},
"variablePattern": "^\\$(?!t$|t_html$)."
}
},
"reportUnusedDisableDirectives": true,
"plugins": [
"eslint-plugin-empty-returns"
],
"rules": {
"array-callback-return": "error",
"arrow-body-style": "error",
"block-scoped-var": "error",
"consistent-return": "error",
"curly": "error",
"dot-notation": "error",
"empty-returns/main": "error",
"eqeqeq": "error",
"formatjs/enforce-default-message": ["error", "literal"],
"formatjs/enforce-placeholders": [
"error",
{"ignoreList": ["b", "code", "em", "i", "kbd", "p", "strong"]}
],
"formatjs/no-id": "error",
"guard-for-in": "error",
"import/extensions": "error",
"import/first": "error",
"import/newline-after-import": "error",
"import/no-self-import": "error",
"import/no-unresolved": ["error", {"ignore": ["^date-fns-tz$|^js-cookie$|^katex$"]}],
"import/no-useless-path-segments": "error",
"import/order": ["error", {"alphabetize": {"order": "asc"}, "newlines-between": "always"}],
"import/unambiguous": "error",
"lines-around-directive": "error",
"new-cap": "error",
"new-cap": [ "error",
{
"capIsNew": false
}
],
"no-alert": "error",
"no-array-constructor": "error",
"no-bitwise": "error",
@@ -68,12 +40,9 @@
"no-else-return": "error",
"no-eq-null": "error",
"no-eval": "error",
"no-implicit-coercion": "error",
"no-implied-eval": "error",
"no-inner-declarations": "off",
"no-iterator": "error",
"no-jquery/no-constructor-attributes": "error",
"no-jquery/no-parse-html-literal": "error",
"no-label-var": "error",
"no-labels": "error",
"no-loop-func": "error",
@@ -89,184 +58,292 @@
"no-script-url": "error",
"no-self-compare": "error",
"no-sync": "error",
"no-throw-literal": "error",
"no-undef-init": "error",
"no-unneeded-ternary": ["error", {"defaultAssignment": false}],
"no-unneeded-ternary": [ "error", { "defaultAssignment": false } ],
"no-unused-expressions": "error",
"no-unused-vars": ["error", {"ignoreRestSiblings": true}],
"no-use-before-define": ["error", {"functions": false}],
"no-useless-concat": "error",
"no-unused-vars": [ "error",
{
"vars": "local",
"varsIgnorePattern": "print_elapsed_time|check_duplicate_ids"
}
],
"no-use-before-define": "error",
"no-useless-constructor": "error",
"no-var": "error",
"object-shorthand": ["error", "always", {"avoidExplicitReturnArrows": true}],
"one-var": ["error", "never"],
"one-var": [ "error", "never" ],
"prefer-arrow-callback": "error",
"prefer-const": ["error", {"ignoreReadBeforeAssign": true}],
"prefer-const": [ "error",
{
"ignoreReadBeforeAssign": true
}
],
"radix": "error",
"sort-imports": ["error", {"ignoreDeclarationSort": true}],
"spaced-comment": ["error", "always", {"markers": ["/"]}],
"strict": "error",
"unicorn/consistent-function-scoping": "off",
"unicorn/explicit-length-check": "off",
"unicorn/filename-case": "off",
"unicorn/no-await-expression-member": "off",
"unicorn/no-negated-condition": "off",
"unicorn/no-null": "off",
"unicorn/no-process-exit": "off",
"unicorn/no-useless-undefined": "off",
"unicorn/numeric-separators-style": "off",
"unicorn/prefer-module": "off",
"unicorn/prefer-node-protocol": "off",
"unicorn/prefer-ternary": "off",
"unicorn/prefer-top-level-await": "off",
"unicorn/prevent-abbreviations": "off",
"unicorn/switch-case-braces": "off",
"valid-typeof": ["error", {"requireStringLiterals": true}],
"sort-imports": "error",
"spaced-comment": "off",
"strict": "off",
"valid-typeof": [ "error", { "requireStringLiterals": true } ],
"yoda": "error"
},
"overrides": [
{
"files": ["web/tests/**"],
"files": [
"frontend_tests/**/*.{js,ts}",
"static/js/**/*.{js,ts}"
],
"globals": {
"$": false,
"ClipboardJS": false,
"FetchStatus": false,
"Filter": false,
"Handlebars": false,
"LightboxCanvas": false,
"MessageListData": false,
"MessageListView": false,
"Plotly": false,
"Sortable": false,
"WinChan": false,
"XDate": false,
"_": false,
"activity": false,
"admin": false,
"alert_words": false,
"alert_words_ui": false,
"attachments_ui": false,
"avatar": false,
"billing": false,
"blueslip": false,
"bot_data": false,
"bridge": false,
"buddy_data": false,
"buddy_list": false,
"channel": false,
"click_handlers": false,
"color_data": false,
"colorspace": false,
"common": false,
"components": false,
"compose": false,
"compose_actions": false,
"compose_fade": false,
"compose_pm_pill": false,
"compose_state": false,
"compose_ui": false,
"composebox_typeahead": false,
"condense": false,
"confirm_dialog": false,
"copy_and_paste": false,
"csrf_token": false,
"current_msg_list": true,
"drafts": false,
"dropdown_list_widget": false,
"echo": false,
"emoji": false,
"emoji_picker": false,
"favicon": false,
"feature_flags": false,
"feedback_widget": false,
"fenced_code": false,
"flatpickr": false,
"floating_recipient_bar": false,
"gear_menu": false,
"hash_util": false,
"hashchange": false,
"helpers": false,
"history": false,
"home_msg_list": false,
"hotspots": false,
"i18n": false,
"info_overlay": false,
"input_pill": false,
"invite": false,
"jQuery": false,
"katex": false,
"keydown_util": false,
"lightbox": false,
"list_cursor": false,
"list_render": false,
"list_util": false,
"loading": false,
"localStorage": false,
"local_message": false,
"localstorage": false,
"location": false,
"markdown": false,
"marked": false,
"md5": false,
"message_edit": false,
"message_edit_history": false,
"message_events": false,
"message_fetch": false,
"message_flags": false,
"message_list": false,
"message_live_update": false,
"message_scroll": false,
"message_store": false,
"message_util": false,
"message_viewport": false,
"moment": false,
"muting": false,
"muting_ui": false,
"narrow": false,
"narrow_state": false,
"navigate": false,
"night_mode": false,
"notifications": false,
"overlays": false,
"padded_widget": false,
"page_params": false,
"panels": false,
"people": false,
"pm_conversations": false,
"pm_list": false,
"pm_list_dom": false,
"pointer": false,
"popovers": false,
"presence": false,
"reactions": false,
"realm_icon": false,
"realm_logo": false,
"realm_night_logo": false,
"recent_senders": false,
"recent_topics": false,
"reload": false,
"reload_state": false,
"reminder": false,
"resize": false,
"rows": false,
"rtl": false,
"run_test": false,
"schema": false,
"scroll_bar": false,
"scroll_util": false,
"search": false,
"search_pill": false,
"search_pill_widget": false,
"search_suggestion": false,
"search_util": false,
"sent_messages": false,
"server_events": false,
"server_events_dispatch": false,
"settings": false,
"settings_account": false,
"settings_bots": false,
"settings_display": false,
"settings_emoji": false,
"settings_exports": false,
"settings_linkifiers": false,
"settings_invites": false,
"settings_muting": false,
"settings_notifications": false,
"settings_org": false,
"settings_panel_menu": false,
"settings_profile_fields": false,
"settings_sections": false,
"settings_streams": false,
"settings_toggle": false,
"settings_ui": false,
"settings_user_groups": false,
"settings_users": false,
"spoilers": false,
"starred_messages": false,
"stream_color": false,
"stream_create": false,
"stream_data": false,
"stream_edit": false,
"stream_events": false,
"stream_topic_history": false,
"stream_list": false,
"stream_muting": false,
"stream_popover": false,
"stream_sort": false,
"stream_ui_updates": false,
"StripeCheckout": false,
"submessage": false,
"subs": false,
"tab_bar": false,
"templates": false,
"tictactoe_widget": false,
"timerender": false,
"todo_widget": false,
"top_left_corner": false,
"topic_generator": false,
"topic_list": false,
"topic_zoom": false,
"transmit": false,
"tutorial": false,
"typeahead_helper": false,
"typing": false,
"typing_data": false,
"typing_events": false,
"ui": false,
"ui_init": false,
"ui_report": false,
"ui_util": false,
"unread": false,
"unread_ops": false,
"unread_ui": false,
"upgrade": false,
"upload": false,
"upload_widget": false,
"user_events": false,
"user_groups": false,
"user_pill": false,
"user_search": false,
"user_status": false,
"user_status_ui": false,
"poll_widget": false,
"vdom": false,
"widgetize": false,
"zcommand": false,
"zform": false,
"zxcvbn": false
}
},
{
"files": [
"frontend_tests/casper_tests/*.js",
"frontend_tests/casper_lib/*.js"
],
"rules": {
"no-jquery/no-selector-prop": "off"
}
},
{
"files": ["web/e2e-tests/**"],
"globals": {
"zulip_test": false
}
},
{
"files": ["web/src/**"],
"globals": {
"StripeCheckout": false
// Dont require ES features that PhantomJS doesnt support
"no-var": "off",
"prefer-arrow-callback": "off"
}
},
{
"files": ["**/*.ts"],
"extends": [
"plugin:@typescript-eslint/recommended",
"plugin:@typescript-eslint/recommended-requiring-type-checking",
"plugin:@typescript-eslint/strict",
"plugin:import/typescript"
"prettier/@typescript-eslint"
],
"parserOptions": {
"project": "tsconfig.json"
},
"settings": {
"import/resolver": {
"node": {
"extensions": [".ts", ".d.ts", ".js"] // https://github.com/import-js/eslint-plugin-import/issues/2267
}
}
},
"globals": {
"JQuery": false
},
"rules": {
// Disable base rule to avoid conflict
"no-duplicate-imports": "off",
"no-use-before-define": "off",
"empty-returns/main": "off",
"no-unused-vars": "off",
"no-useless-constructor": "off",
"@typescript-eslint/consistent-type-definitions": ["error", "type"],
"@typescript-eslint/consistent-type-imports": "error",
"@typescript-eslint/explicit-function-return-type": [
"error",
{"allowExpressions": true}
],
"@typescript-eslint/array-type": "error",
"@typescript-eslint/await-thenable": "error",
"@typescript-eslint/consistent-type-assertions": "error",
"@typescript-eslint/explicit-function-return-type": ["error", { "allowExpressions": true }],
"@typescript-eslint/member-ordering": "error",
"@typescript-eslint/no-duplicate-imports": "error",
"@typescript-eslint/no-explicit-any": "off",
"@typescript-eslint/no-extraneous-class": "error",
"@typescript-eslint/no-non-null-assertion": "off",
"@typescript-eslint/no-parameter-properties": "error",
"@typescript-eslint/no-unnecessary-condition": "off",
"@typescript-eslint/no-unnecessary-qualifier": "error",
"@typescript-eslint/no-unsafe-argument": "off",
"@typescript-eslint/no-unsafe-assignment": "off",
"@typescript-eslint/no-unsafe-call": "off",
"@typescript-eslint/no-unsafe-member-access": "off",
"@typescript-eslint/no-unsafe-return": "off",
"@typescript-eslint/no-unused-vars": ["error", {"ignoreRestSiblings": true}],
"@typescript-eslint/no-use-before-define": ["error", {"functions": false}],
"@typescript-eslint/no-unnecessary-type-assertion": "error",
"@typescript-eslint/no-unused-vars": ["error", { "varsIgnorePattern": "^_" } ],
"@typescript-eslint/no-use-before-define": "error",
"@typescript-eslint/no-useless-constructor": "error",
"@typescript-eslint/prefer-includes": "error",
"@typescript-eslint/prefer-regexp-exec": "error",
"@typescript-eslint/prefer-string-starts-ends-with": "error",
"@typescript-eslint/promise-function-async": "error",
"import/no-cycle": "error",
"no-undef": "error"
}
},
{
"files": ["**/*.d.ts"],
"rules": {
"import/unambiguous": "off"
}
},
{
"files": ["web/e2e-tests/**", "web/tests/**"],
"globals": {
"CSS": false,
"document": false,
"navigator": false,
"window": false
},
"rules": {
"formatjs/no-id": "off",
"new-cap": "off",
"no-sync": "off",
"unicorn/prefer-prototype-methods": "off"
}
},
{
"files": ["web/debug-require.js"],
"env": {
"browser": true,
"es2020": false
},
"rules": {
// Dont require ES features that PhantomJS doesnt support
// TODO: Toggle these settings now that we don't use PhantomJS
"no-var": "off",
"object-shorthand": "off",
"prefer-arrow-callback": "off"
}
},
{
"files": ["web/shared/**", "web/src/**", "web/third/**"],
"env": {
"browser": true,
"node": false
},
"globals": {
"ZULIP_VERSION": false
},
"rules": {
"no-console": "error"
},
"settings": {
"import/resolver": {
"webpack": {
"config": "./web/webpack.config.ts"
}
}
}
},
{
"files": ["web/shared/**"],
"env": {
"browser": false,
"shared-node-browser": true
},
"rules": {
"import/no-restricted-paths": [
"error",
{
"zones": [
{
"target": "./web/shared",
"from": ".",
"except": ["./node_modules", "./web/shared"]
}
]
}
]
"@typescript-eslint/unified-signatures": "error"
}
}
]

19
.gitattributes vendored
View File

@@ -1,19 +1,4 @@
# DIFFS: Noise suppression.
#
# Suppress noisy generated files in diffs.
# (When you actually want to see these diffs, use `git diff -a`.)
# Large test fixtures:
corporate/tests/stripe_fixtures/*.json -diff
# FORMATTING
# Maintain LF (Unix-style) newlines in text files.
* text=auto eol=lf
# Make sure various media files never get somehow auto-detected as text
# and then newline-converted.
*.gif binary
*.jpg binary
*.jpeg binary
@@ -26,7 +11,3 @@ corporate/tests/stripe_fixtures/*.json -diff
*.otf binary
*.tif binary
*.ogg binary
*.bson binary
*.bmp binary
*.mp3 binary
*.pdf binary

3
.github/FUNDING.yml vendored
View File

@@ -1,3 +0,0 @@
github: zulip
patreon: zulip
open_collective: zulip

View File

@@ -1,43 +1,14 @@
<!-- Describe your pull request here.-->
<!-- What's this PR for? (Just a link to an issue is fine.) -->
Fixes: <!-- Issue link, or clear description.-->
<!-- If the PR makes UI changes, always include one or more still screenshots to demonstrate your changes. If it seems helpful, add a screen capture of the new functionality as well.
**Testing Plan:** <!-- How have you tested? -->
Tooling tips: https://zulip.readthedocs.io/en/latest/tutorials/screenshot-and-gif-software.html
-->
**Screenshots and screen captures:**
**GIFs or Screenshots:** <!-- If a UI change. See:
https://zulip.readthedocs.io/en/latest/tutorials/screenshot-and-gif-software.html
-->
<details>
<summary>Self-review checklist</summary>
<!-- Prior to submitting a PR, follow our step-by-step guide to review your own code:
https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html#how-to-review-code -->
<!-- Once you create the PR, check off all the steps below that you have completed.
If any of these steps are not relevant or you have not completed, leave them unchecked.-->
- [ ] [Self-reviewed](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html#how-to-review-code) the changes for clarity and maintainability
(variable names, code reuse, readability, etc.).
Communicate decisions, questions, and potential concerns.
- [ ] Explains differences from previous plans (e.g., issue description).
- [ ] Highlights technical choices and bugs encountered.
- [ ] Calls out remaining decisions and concerns.
- [ ] Automated tests verify logic where appropriate.
Individual commits are ready for review (see [commit discipline](https://zulip.readthedocs.io/en/latest/contributing/commit-discipline.html)).
- [ ] Each commit is a coherent idea.
- [ ] Commit message(s) explain reasoning and motivation for changes.
Completed manual review and testing of the following:
- [ ] Visual appearance of the changes.
- [ ] Responsiveness and internationalization.
- [ ] Strings and tooltips.
- [ ] End-to-end functionality of buttons, interactions and flows.
- [ ] Corner cases, error conditions, and easily imagined bugs.
</details>
<!-- Also be sure to make clear, coherent commits:
https://zulip.readthedocs.io/en/latest/contributing/version-control.html
-->

View File

@@ -1,40 +1,30 @@
name: "Code scanning"
name: "Code Scanning"
on:
push:
branches: ["*.x", chat.zulip.org, main]
tags: ["*"]
pull_request:
branches: ["*.x", chat.zulip.org, main]
workflow_dispatch:
concurrency:
group: "${{ github.workflow }}-${{ github.head_ref || github.run_id }}"
cancel-in-progress: true
permissions:
contents: read
on: [push, pull_request]
jobs:
CodeQL:
permissions:
actions: read # for github/codeql-action/init to get workflow details
contents: read # for actions/checkout to fetch code
security-events: write # for github/codeql-action/analyze to upload SARIF results
if: ${{!github.event.repository.private}}
runs-on: ubuntu-latest
steps:
- name: Check out repository
uses: actions/checkout@v3
- name: Checkout repository
uses: actions/checkout@v2
with:
# We must fetch at least the immediate parents so that if this is
# a pull request then we can checkout the head.
fetch-depth: 2
# If this run was triggered by a pull request event, then checkout
# the head of the pull request instead of the merge commit.
- run: git checkout HEAD^2
if: ${{ github.event_name == 'pull_request' }}
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
uses: github/codeql-action/init@v1
# Override language selection by uncommenting this and choosing your languages
# with:
# languages: go, javascript, csharp, python, cpp, java
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2
uses: github/codeql-action/analyze@v1

View File

@@ -1,310 +0,0 @@
name: Zulip production suite
on:
push:
branches: ["*.x", chat.zulip.org, main]
tags: ["*"]
pull_request:
paths:
- .github/workflows/production-suite.yml
- "**/migrations/**"
- manage.py
- pnpm-lock.yaml
- puppet/**
- requirements/**
- scripts/**
- tools/**
- web/babel.config.js
- web/postcss.config.js
- web/third/**
- web/webpack.config.ts
- zerver/worker/queue_processors.py
- zerver/lib/push_notifications.py
- zerver/decorator.py
- zproject/**
workflow_dispatch:
concurrency:
group: "${{ github.workflow }}-${{ github.head_ref || github.run_id }}"
cancel-in-progress: true
defaults:
run:
shell: bash
permissions:
contents: read
jobs:
production_build:
# This job builds a release tarball from the current commit, which
# will be used for all of the following install/upgrade tests.
name: Ubuntu 20.04 production build
runs-on: ubuntu-latest
# Docker images are built from 'tools/ci/Dockerfile'; the comments at
# the top explain how to build and upload these images.
# Ubuntu 20.04 ships with Python 3.8.10.
container: zulip/ci:focal
steps:
- name: Add required permissions
run: |
# The checkout actions doesn't clone to ~/zulip or allow
# us to use the path option to clone outside the current
# /__w/zulip/zulip directory. Since this directory is owned
# by root we need to change it's ownership to allow the
# github user to clone the code here.
# Note: /__w/ is a docker volume mounted to $GITHUB_WORKSPACE
# which is /home/runner/work/.
sudo chown -R github .
# This is the GitHub Actions specific cache directory the
# the current github user must be able to access for the
# cache action to work. It is owned by root currently.
sudo chmod -R 0777 /__w/_temp/
- uses: actions/checkout@v3
- name: Create cache directories
run: |
dirs=(/srv/zulip-{venv,emoji}-cache)
sudo mkdir -p "${dirs[@]}"
sudo chown -R github "${dirs[@]}"
- name: Restore pnpm store
uses: actions/cache@v3
with:
path: ~/.local/share/pnpm/store
key: v1-pnpm-store-focal-${{ hashFiles('pnpm-lock.yaml') }}
- name: Restore python cache
uses: actions/cache@v3
with:
path: /srv/zulip-venv-cache
key: v1-venv-focal-${{ hashFiles('requirements/dev.txt') }}
restore-keys: v1-venv-focal
- name: Restore emoji cache
uses: actions/cache@v3
with:
path: /srv/zulip-emoji-cache
key: v1-emoji-focal-${{ hashFiles('tools/setup/emoji/emoji_map.json') }}-${{ hashFiles('tools/setup/emoji/build_emoji') }}-${{ hashFiles('tools/setup/emoji/emoji_setup_utils.py') }}-${{ hashFiles('tools/setup/emoji/emoji_names.py') }}-${{ hashFiles('package.json') }}
restore-keys: v1-emoji-focal
- name: Build production tarball
run: ./tools/ci/production-build
- name: Upload production build artifacts for install jobs
uses: actions/upload-artifact@v3
with:
name: production-tarball
path: /tmp/production-build
retention-days: 1
- name: Generate failure report string
id: failure_report_string
if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }}
run: tools/ci/generate-failure-message >> $GITHUB_OUTPUT
- name: Report status to CZO
if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }}
uses: zulip/github-actions-zulip/send-message@v1
with:
api-key: ${{ secrets.ZULIP_BOT_KEY }}
email: "github-actions-bot@chat.zulip.org"
organization-url: "https://chat.zulip.org"
to: "automated testing"
topic: ${{ steps.failure_report_string.outputs.topic }}
type: "stream"
content: ${{ steps.failure_report_string.outputs.content }}
production_install:
# This job installs the server release tarball built above on a
# range of platforms, and does some basic health checks on the
# resulting installer Zulip server.
strategy:
fail-fast: false
matrix:
include:
# Docker images are built from 'tools/ci/Dockerfile'; the comments at
# the top explain how to build and upload these images.
- docker_image: zulip/ci:focal
name: Ubuntu 20.04 production install and PostgreSQL upgrade with pgroonga
os: focal
extra-args: ""
- docker_image: zulip/ci:jammy
name: Ubuntu 22.04 production install
os: jammy
extra-args: ""
- docker_image: zulip/ci:bullseye
name: Debian 11 production install with custom db name and user
os: bullseye
extra-args: --test-custom-db
name: ${{ matrix.name }}
container:
image: ${{ matrix.docker_image }}
options: --init
runs-on: ubuntu-latest
needs: production_build
steps:
- name: Download built production tarball
uses: actions/download-artifact@v3
with:
name: production-tarball
path: /tmp
- name: Add required permissions and setup
run: |
# This is the GitHub Actions specific cache directory the
# the current github user must be able to access for the
# cache action to work. It is owned by root currently.
sudo chmod -R 0777 /__w/_temp/
# Since actions/download-artifact@v2 loses all the permissions
# of the tarball uploaded by the upload artifact fix those.
chmod +x /tmp/production-upgrade-pg
chmod +x /tmp/production-pgroonga
chmod +x /tmp/production-install
chmod +x /tmp/production-verify
chmod +x /tmp/generate-failure-message
- name: Create cache directories
run: |
dirs=(/srv/zulip-{venv,emoji}-cache)
sudo mkdir -p "${dirs[@]}"
sudo chown -R github "${dirs[@]}"
- name: Restore pnpm store
uses: actions/cache@v3
with:
path: ~/.local/share/pnpm/store
key: v1-pnpm-store-${{ matrix.os }}-${{ hashFiles('/tmp/pnpm-lock.yaml') }}
- name: Install production
run: sudo /tmp/production-install ${{ matrix.extra-args }}
- name: Verify install
run: sudo /tmp/production-verify ${{ matrix.extra-args }}
- name: Install pgroonga
if: ${{ matrix.os == 'focal' }}
run: sudo /tmp/production-pgroonga
- name: Verify install after installing pgroonga
if: ${{ matrix.os == 'focal' }}
run: sudo /tmp/production-verify ${{ matrix.extra-args }}
- name: Upgrade postgresql
if: ${{ matrix.os == 'focal' }}
run: sudo /tmp/production-upgrade-pg
- name: Verify install after upgrading postgresql
if: ${{ matrix.os == 'focal' }}
run: sudo /tmp/production-verify ${{ matrix.extra-args }}
- name: Generate failure report string
id: failure_report_string
if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }}
run: /tmp/generate-failure-message >> $GITHUB_OUTPUT
- name: Report status to CZO
if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }}
uses: zulip/github-actions-zulip/send-message@v1
with:
api-key: ${{ secrets.ZULIP_BOT_KEY }}
email: "github-actions-bot@chat.zulip.org"
organization-url: "https://chat.zulip.org"
to: "automated testing"
topic: ${{ steps.failure_report_string.outputs.topic }}
type: "stream"
content: ${{ steps.failure_report_string.outputs.content }}
production_upgrade:
# The production upgrade job starts with a container with a
# previous Zulip release installed, and attempts to upgrade it to
# the release tarball built for the current commit being tested.
#
# This is intended to catch bugs that result in the upgrade
# process failing.
strategy:
fail-fast: false
matrix:
include:
# Docker images are built from 'tools/ci/Dockerfile.prod'; the comments at
# the top explain how to build and upload these images.
- docker_image: zulip/ci:focal-3.2
name: 3.2 Version Upgrade
os: focal
- docker_image: zulip/ci:bullseye-4.2
name: 4.2 Version Upgrade
os: bullseye
- docker_image: zulip/ci:bullseye-5.0
name: 5.0 Version Upgrade
os: bullseye
- docker_image: zulip/ci:bullseye-6.0
name: 6.0 Version Upgrade
os: bullseye
name: ${{ matrix.name }}
container:
image: ${{ matrix.docker_image }}
options: --init
runs-on: ubuntu-latest
needs: production_build
steps:
- name: Download built production tarball
uses: actions/download-artifact@v3
with:
name: production-tarball
path: /tmp
- name: Add required permissions and setup
run: |
# This is the GitHub Actions specific cache directory the
# the current github user must be able to access for the
# cache action to work. It is owned by root currently.
sudo chmod -R 0777 /__w/_temp/
# Since actions/download-artifact@v2 loses all the permissions
# of the tarball uploaded by the upload artifact fix those.
chmod +x /tmp/production-upgrade
chmod +x /tmp/production-verify
chmod +x /tmp/generate-failure-message
- name: Create cache directories
run: |
dirs=(/srv/zulip-{venv,emoji}-cache)
sudo mkdir -p "${dirs[@]}"
sudo chown -R github "${dirs[@]}"
- name: Upgrade production
run: sudo /tmp/production-upgrade
# TODO: We should be running production-verify here, but it
# doesn't pass yet.
#
# - name: Verify install
# run: sudo /tmp/production-verify
- name: Generate failure report string
id: failure_report_string
if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }}
run: /tmp/generate-failure-message >> $GITHUB_OUTPUT
- name: Report status to CZO
if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }}
uses: zulip/github-actions-zulip/send-message@v1
with:
api-key: ${{ secrets.ZULIP_BOT_KEY }}
email: "github-actions-bot@chat.zulip.org"
organization-url: "https://chat.zulip.org"
to: "automated testing"
topic: ${{ steps.failure_report_string.outputs.topic }}
type: "stream"
content: ${{ steps.failure_report_string.outputs.content }}

View File

@@ -1,27 +0,0 @@
name: Update one click apps
on:
release:
types: [published]
permissions:
contents: read
jobs:
update-digitalocean-oneclick-app:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Update DigitalOcean one click app
env:
DIGITALOCEAN_API_KEY: ${{ secrets.ONE_CLICK_ACTION_DIGITALOCEAN_API_KEY }}
ZULIP_API_KEY: ${{ secrets.ONE_CLICK_ACTION_ZULIP_BOT_API_KEY }}
ZULIP_EMAIL: ${{ secrets.ONE_CLICK_ACTION_ZULIP_BOT_EMAIL }}
ZULIP_SITE: https://chat.zulip.org
ONE_CLICK_ACTION_STREAM: kandra ops
PYTHON_DIGITALOCEAN_REQUEST_TIMEOUT_SEC: 30
RELEASE_VERSION: ${{ github.event.release.tag_name }}
run: |
export PATH="$HOME/.local/bin:$PATH"
git clone https://github.com/zulip/marketplace-partners
pip3 install python-digitalocean zulip fab-classic PyNaCl
echo $PATH
python3 tools/oneclickapps/prepare_digital_ocean_one_click_app_release.py

View File

@@ -1,255 +1,171 @@
# NOTE: Everything test in this file should be in `tools/test-all`. If there's a
# reason not to run it there, it should be there as a comment
# explaining why.
name: Zulip CI
on:
push:
branches: ["*.x", chat.zulip.org, main]
tags: ["*"]
pull_request:
workflow_dispatch:
concurrency:
group: "${{ github.workflow }}-${{ github.head_ref || github.run_id }}"
cancel-in-progress: true
on: [push, pull_request]
defaults:
run:
shell: bash
permissions:
contents: read
jobs:
tests:
focal_bionic:
strategy:
fail-fast: false
matrix:
include_documentation_tests: [false]
include_frontend_tests: [false]
include:
# Base images are built using `tools/ci/Dockerfile.prod.template`.
# The comments at the top explain how to build and upload these images.
# Ubuntu 20.04 ships with Python 3.8.10.
- docker_image: zulip/ci:focal
name: Ubuntu 20.04 (Python 3.8, backend + frontend)
os: focal
# This docker image was created by a generated Dockerfile at:
# tools/ci/images/bionic/Dockerfile
# Bionic ships with Python 3.6.
- docker_image: mepriyank/actions:bionic
name: Ubuntu 18.04 Bionic (Python 3.6, backend + frontend)
os: bionic
is_bionic: true
include_frontend_tests: true
# Debian 11 ships with Python 3.9.2.
- docker_image: zulip/ci:bullseye
name: Debian 11 (Python 3.9, backend + documentation)
os: bullseye
include_documentation_tests: true
# Ubuntu 22.04 ships with Python 3.10.4.
- docker_image: zulip/ci:jammy
name: Ubuntu 22.04 (Python 3.10, backend)
os: jammy
# This docker image was created by a generated Dockerfile at:
# tools/ci/images/focal/Dockerfile
# Focal ships with Python 3.8.2.
- docker_image: mepriyank/actions:focal
name: Ubuntu 20.04 Focal (Python 3.8, backend)
os: focal
is_focal: true
include_frontend_tests: false
runs-on: ubuntu-latest
name: ${{ matrix.name }}
container: ${{ matrix.docker_image }}
env:
# GitHub Actions sets HOME to /github/home which causes
# problem later in provision and frontend test that runs
# tools/setup/postgresql-init-dev-db because of the .pgpass
# location. PostgreSQL (psql) expects .pgpass to be at
# problem later in provison and frontend test that runs
# tools/setup/postgres-init-dev-db because of the .pgpass
# location. Postgresql (psql) expects .pgpass to be at
# /home/github/.pgpass and setting home to `/home/github/`
# ensures it written there because we write it to ~/.pgpass.
HOME: /home/github/
steps:
- uses: actions/checkout@v3
- name: Add required permissions
run: |
# The checkout actions doesn't clone to ~/zulip or allow
# us to use the path option to clone outside the current
# /__w/zulip/zulip directory. Since this directory is owned
# by root we need to change it's ownership to allow the
# github user to clone the code here.
# Note: /__w/ is a docker volume mounted to $GITHUB_WORKSPACE
# which is /home/runner/work/.
sudo chown -R github .
# This is the GitHub Actions specific cache directory the
# the current github user must be able to access for the
# cache action to work. It is owned by root currently.
sudo chmod -R 0777 /__w/_temp/
- uses: actions/checkout@v2
- name: Create cache directories
run: |
dirs=(/srv/zulip-{venv,emoji}-cache)
dirs=(/srv/zulip-{npm,venv,emoji}-cache)
sudo mkdir -p "${dirs[@]}"
sudo chown -R github "${dirs[@]}"
- name: Restore pnpm store
uses: actions/cache@v3
- name: Restore node_modules cache
uses: actions/cache@v2
with:
path: ~/.local/share/pnpm/store
key: v1-pnpm-store-${{ matrix.os }}-${{ hashFiles('pnpm-lock.yaml') }}
path: /srv/zulip-npm-cache
key: v1-yarn-deps-${{ matrix.os }}-${{ hashFiles('package.json') }}-${{ hashFiles('yarn.lock') }}
restore-keys: v1-yarn-deps-${{ matrix.os }}
- name: Restore python cache
uses: actions/cache@v3
uses: actions/cache@v2
with:
path: /srv/zulip-venv-cache
key: v1-venv-${{ matrix.os }}-${{ hashFiles('requirements/dev.txt') }}
key: v1-venv-${{ matrix.os }}-${{ hashFiles('requirements/thumbor-dev.txt') }}-${{ hashFiles('requirements/dev.txt') }}
restore-keys: v1-venv-${{ matrix.os }}
- name: Restore emoji cache
uses: actions/cache@v3
uses: actions/cache@v2
with:
path: /srv/zulip-emoji-cache
key: v1-emoji-${{ matrix.os }}-${{ hashFiles('tools/setup/emoji/emoji_map.json', 'tools/setup/emoji/build_emoji', 'tools/setup/emoji/emoji_setup_utils.py', 'tools/setup/emoji/emoji_names.py', 'package.json') }}
key: v1-emoji-${{ matrix.os }}-${{ hashFiles('tools/setup/emoji/emoji_map.json') }}-${{ hashFiles('tools/setup/emoji/build_emoji') }}-${{ hashFiles('tools/setup/emoji/emoji_setup_utils.py') }}-${{ hashFiles('tools/setup/emoji/emoji_names.py') }}-${{ hashFiles('package.json') }}
restore-keys: v1-emoji-${{ matrix.os }}
- name: Do Bionic hack
if: ${{ matrix.is_bionic }}
run: |
# Temporary hack till `sudo service redis-server start` gets fixes in Bionic. See
# https://chat.zulip.org/#narrow/stream/3-backend/topic/Ubuntu.20bionic.20CircleCI
sudo sed -i '/^bind/s/bind.*/bind 0.0.0.0/' /etc/redis/redis.conf
- name: Install dependencies
run: |
# This is the main setup job for the test suite
./tools/ci/setup-backend --skip-dev-db-build
mispipe "tools/ci/setup-backend --skip-dev-db-build" ts
# Cleaning caches is mostly unnecessary in GitHub Actions, because
# most builds don't get to write to the cache.
# scripts/lib/clean_unused_caches.py --verbose --threshold 0
- name: Run tools test
run: |
source tools/ci/activate-venv
./tools/test-tools
- name: Run Codespell lint
run: |
source tools/ci/activate-venv
./tools/run-codespell
- name: Run backend lint
run: |
source tools/ci/activate-venv
echo "Test suite is running under $(python --version)."
./tools/lint --groups=backend --skip=gitlint,mypy # gitlint disabled because flaky
- name: Run frontend lint
if: ${{ matrix.include_frontend_tests }}
run: |
source tools/ci/activate-venv
./tools/lint --groups=frontend --skip=gitlint # gitlint disabled because flaky
# mispipe "scripts/lib/clean-unused-caches --verbose --threshold 0 2>&1" ts
- name: Run backend tests
run: |
source tools/ci/activate-venv
./tools/test-backend --coverage --xml-report --no-html-report --include-webhooks --no-cov-cleanup --ban-console-output
. /srv/zulip-py3-venv/bin/activate && \
mispipe "./tools/ci/backend 2>&1" ts
- name: Run mypy
run: |
source tools/ci/activate-venv
# We run mypy after the backend tests so we get output from the
# backend tests, which tend to uncover more serious problems, first.
./tools/run-mypy --version
./tools/run-mypy
- name: Run miscellaneous tests
run: |
source tools/ci/activate-venv
# Currently our compiled requirements files will differ for different
# Python versions, so we will run test-locked-requirements only on the
# platform with the oldest one.
# ./tools/test-locked-requirements
# ./tools/test-run-dev # https://github.com/zulip/zulip/pull/14233
#
# This test has been persistently flaky at like 1% frequency, is slow,
# and is for a very specific single feature, so we don't run it by default:
# ./tools/test-queue-worker-reload
./tools/test-migrations
./tools/setup/optimize-svg --check
./tools/setup/generate_integration_bots_avatars.py --check-missing
./tools/ci/check-executables
# Ban check-database-compatibility from transitively
# relying on static/generated, because it might not be
# up-to-date at that point in upgrade-zulip-stage-2.
chmod 000 static/generated web/generated
./scripts/lib/check-database-compatibility
chmod 755 static/generated web/generated
- name: Run documentation and api tests
if: ${{ matrix.include_documentation_tests }}
run: |
source tools/ci/activate-venv
# In CI, we only test links we control in test-documentation to avoid flakes
./tools/test-documentation --skip-external-links
./tools/test-help-documentation --skip-external-links
./tools/test-api
- name: Run node tests
- name: Run frontend tests
if: ${{ matrix.include_frontend_tests }}
run: |
source tools/ci/activate-venv
# Run the node tests first, since they're fast and deterministic
./tools/test-js-with-node --coverage --parallel=1
- name: Check schemas
if: ${{ matrix.include_frontend_tests }}
run: |
source tools/ci/activate-venv
# Check that various schemas are consistent. (is fast)
./tools/check-schemas
- name: Check capitalization of strings
if: ${{ matrix.include_frontend_tests }}
run: |
source tools/ci/activate-venv
./manage.py makemessages --locale en
PYTHONWARNINGS=ignore ./tools/check-capitalization --no-generate
PYTHONWARNINGS=ignore ./tools/check-frontend-i18n --no-generate
- name: Run puppeteer tests
if: ${{ matrix.include_frontend_tests }}
run: |
source tools/ci/activate-venv
./tools/test-js-with-puppeteer
- name: Check pnpm dedupe
if: ${{ matrix.include_frontend_tests }}
run: pnpm dedupe --check
- name: Check for untracked files
run: |
source tools/ci/activate-venv
# This final check looks for untracked files that may have been
# created by test-backend or provision.
untracked="$(git ls-files --exclude-standard --others)"
if [ -n "$untracked" ]; then
printf >&2 "Error: untracked files:\n%s\n" "$untracked"
exit 1
fi
. /srv/zulip-py3-venv/bin/activate
mispipe "./tools/ci/frontend 2>&1" ts
- name: Test locked requirements
if: ${{ matrix.os == 'focal' }}
if: ${{ matrix.is_bionic }}
run: |
. /srv/zulip-py3-venv/bin/activate && \
./tools/test-locked-requirements
mispipe "./tools/test-locked-requirements 2>&1" ts
- name: Upload coverage reports
# Only upload coverage when both frontend and backend
# tests are run.
# tests are ran.
if: ${{ matrix.include_frontend_tests }}
uses: codecov/codecov-action@v3
with:
files: var/coverage.xml,var/node-coverage/lcov.info
run: |
# Codcov requires `.coverage` file to be stored in the
# current working directory.
mv ./var/.coverage ./.coverage
. /srv/zulip-py3-venv/bin/activate || true
- name: Store Puppeteer artifacts
# Upload these on failure, as well
if: ${{ always() && matrix.include_frontend_tests }}
uses: actions/upload-artifact@v3
# TODO: Check that the next release of codecov doesn't
# throw find error.
# codecov==2.0.16 introduced a bug which uses "find"
# for locating files which is buggy on some platforms.
# It was fixed via https://github.com/codecov/codecov-python/pull/217
# and should get automatically fixed here once it's released.
# We cannot pin the version here because we need the latest version for uploading files.
# see https://community.codecov.io/t/http-400-while-uploading-to-s3-with-python-codecov-from-travis/1428/7
pip install codecov && codecov || echo "Error in uploading coverage reports to codecov.io."
- name: Store puppeteer artifacts
if: ${{ matrix.include_frontend_tests }}
uses: actions/upload-artifact@v2
with:
name: puppeteer
path: ./var/puppeteer
retention-days: 60
# We cannot use upload-artifacts actions to upload the test
# reports from /tmp, that directory exists inside the docker
# image. Move them to ./var so we access it outside docker since
# the current directory is volume mounted outside the docker image.
- name: Move test reports to var
run: mv /tmp/zulip-test-event-log/ ./var/
- name: Store test reports
if: ${{ matrix.is_bionic }}
uses: actions/upload-artifact@v2
with:
name: test-reports
path: ./var/zulip-test-event-log/
- name: Check development database build
run: ./tools/ci/setup-backend
- name: Generate failure report string
id: failure_report_string
if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }}
run: tools/ci/generate-failure-message >> $GITHUB_OUTPUT
- name: Report status to CZO
if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }}
uses: zulip/github-actions-zulip/send-message@v1
with:
api-key: ${{ secrets.ZULIP_BOT_KEY }}
email: "github-actions-bot@chat.zulip.org"
organization-url: "https://chat.zulip.org"
to: "automated testing"
topic: ${{ steps.failure_report_string.outputs.topic }}
type: "stream"
content: ${{ steps.failure_report_string.outputs.content }}
if: ${{ matrix.is_focal }}
run: mispipe "tools/ci/setup-backend" ts
# TODO: We need to port the notify_failure step from CircleCI
# config, however, it might be the case that GitHub Notifications
# make this unnesscary. More details on settings to configure it:
# https://help.github.com/en/github/managing-subscriptions-and-notifications-on-github/configuring-notifications#github-actions-notification-options

19
.gitignore vendored
View File

@@ -27,13 +27,12 @@
package-lock.json
/.vagrant
/var/*
!/var/puppeteer
/var/puppeteer/*
!/var/puppeteer/test_credentials.d.ts
/var
/.dmypy.json
/.ruff_cache
# Dockerfiles generated for CircleCI
/tools/ci/images
# Generated i18n data
/locale/en
@@ -44,11 +43,11 @@ package-lock.json
# Static build
*.mo
npm-debug.log
/.pnpm-store
/node_modules
/prod-static
/staticfiles.json
/webpack-stats-production.json
/yarn-error.log
zulip-git-version
# Test / analysis tools
@@ -71,12 +70,9 @@ zulip.kdev4
*.kate-swp
*.sublime-project
*.sublime-workspace
.vscode/
*.DS_Store
# VS Code. Avoid checking in .vscode in general, while still specifying
# recommended extensions for working with this repository.
/.vscode/**/*
!/.vscode/extensions.json
# .cache/ is generated by VS Code test runner
# .cache/ is generated by VSCode's test runner
.cache/
.eslintcache
@@ -85,4 +81,5 @@ core
## Miscellaneous
# (Ideally this section is empty.)
zthumbor/thumbor_local_settings.py
.transifexrc

View File

@@ -1,13 +1,13 @@
[general]
ignore=title-trailing-punctuation, body-min-length, body-is-missing
ignore=title-trailing-punctuation, body-min-length, body-is-missing, title-imperative-mood
extra-path=tools/lib/gitlint_rules.py
extra-path=tools/lib/gitlint-rules.py
[title-match-regex]
[title-match-regex-allow-exception]
regex=^(.+:\ )?[A-Z].+\.$
[title-max-length]
line-length=72
line-length=76
[body-max-line-length]
line-length=76

7
.isort.cfg Normal file
View File

@@ -0,0 +1,7 @@
[settings]
src_paths = ., tools, tools/setup/emoji
multi_line_output = 3
known_third_party = zulip
include_trailing_comma = True
use_parentheses = True
line_length = 100

107
.mailmap
View File

@@ -1,124 +1,29 @@
# This file teaches `git log` and friends the canonical names
# and email addresses to use for our contributors.
#
# For details on the format, see:
# https://git.github.io/htmldocs/gitmailmap.html
#
# Handy commands for examining or adding to this file:
#
# # shows all names/emails after mapping, sorted:
# $ git shortlog -es | sort -k2
#
# # shows raw names/emails, filtered by mapped name:
# $ git log --format='%an %ae' --author=$NAME | uniq -c
acrefoot <acrefoot@zulip.com> <acrefoot@humbughq.com>
acrefoot <acrefoot@zulip.com> <acrefoot@dropbox.com>
acrefoot <acrefoot@zulip.com> <acrefoot@alum.mit.edu>
Adam Benesh <Adam.Benesh@gmail.com> <Adam-Daniel.Benesh@t-systems.com>
Adam Benesh <Adam.Benesh@gmail.com>
Alex Vandiver <alexmv@zulip.com> <alex@chmrr.net>
Alex Vandiver <alexmv@zulip.com> <github@chmrr.net>
Allen Rabinovich <allenrabinovich@yahoo.com> <allenr@humbughq.com>
Allen Rabinovich <allenrabinovich@yahoo.com> <allenr@zulip.com>
Alya Abbott <alya@zulip.com> <2090066+alya@users.noreply.github.com>
Aman Agrawal <amanagr@zulip.com> <f2016561@pilani.bits-pilani.ac.in>
Aman Agrawal <amanagr@zulip.com>
Anders Kaseorg <anders@zulip.com> <anders@zulipchat.com>
Anders Kaseorg <anders@zulip.com> <andersk@mit.edu>
Aryan Shridhar <aryanshridhar7@gmail.com> <53977614+aryanshridhar@users.noreply.github.com>
Aryan Shridhar <aryanshridhar7@gmail.com>
aparna-bhatt <aparnabhatt2001@gmail.com> <86338542+aparna-bhatt@users.noreply.github.com>
Ashwat Kumar Singh <ashwat.kumarsingh.met20@itbhu.ac.in>
Austin Riba <austin@zulip.com> <austin@m51.io>
BIKI DAS <bikid475@gmail.com>
Brock Whittaker <brock@zulipchat.com> <bjwhitta@asu.edu>
Brock Whittaker <brock@zulipchat.com> <brockwhittaker@Brocks-MacBook.local>
Brock Whittaker <brock@zulipchat.com> <brock@zulipchat.org>
Chris Bobbe <cbobbe@zulip.com> <cbobbe@zulipchat.com>
Chris Bobbe <cbobbe@zulip.com> <csbobbe@gmail.com>
Danny Su <contact@dannysu.com> <opensource@emailengine.org>
Dinesh <chdinesh1089@gmail.com>
Dinesh <chdinesh1089@gmail.com> <chdinesh1089>
Eeshan Garg <eeshan@zulip.com> <jerryguitarist@gmail.com>
Eric Smith <erwsmith@gmail.com> <99841919+erwsmith@users.noreply.github.com>
Ganesh Pawar <pawarg256@gmail.com> <58626718+ganpa3@users.noreply.github.com>
Greg Price <greg@zulip.com> <gnprice@gmail.com>
Greg Price <greg@zulip.com> <greg@zulipchat.com>
Greg Price <greg@zulip.com> <price@mit.edu>
Hardik Dharmani <Ddharmani99@gmail.com> <ddharmani99@gmail.com>
Hemant Umre <hemantumre12@gmail.com> <87542880+HemantUmre12@users.noreply.github.com>
Jai soni <jai_s@me.iitr.ac.in>
Jai soni <jai_s@me.iitr.ac.in> <76561593+jai2201@users.noreply.github.com>
Jeff Arnold <jbarnold@gmail.com> <jbarnold@humbughq.com>
Jeff Arnold <jbarnold@gmail.com> <jbarnold@zulip.com>
Jessica McKellar <jesstess@mit.edu> <jesstess@humbughq.com>
Jessica McKellar <jesstess@mit.edu> <jesstess@zulip.com>
Julia Bichler <julia.bichler@tum.de> <74348920+juliaBichler01@users.noreply.github.com>
Karl Stolley <karl@zulip.com> <karl@stolley.dev>
Kevin Mehall <km@kevinmehall.net> <kevin@humbughq.com>
Kevin Mehall <km@kevinmehall.net> <kevin@zulip.com>
Kevin Scott <kevin.scott.98@gmail.com>
Lauryn Menard <lauryn@zulip.com> <lauryn.menard@gmail.com>
Lauryn Menard <lauryn@zulip.com> <63245456+laurynmm@users.noreply.github.com>
Mateusz Mandera <mateusz.mandera@zulip.com> <mateusz.mandera@protonmail.com>
Matt Keller <matt@zulip.com>
Matt Keller <matt@zulip.com> <m@cognusion.com>
m-e-l-u-h-a-n <purushottam.tiwari.cd.cse19@itbhu.ac.in>
m-e-l-u-h-a-n <purushottam.tiwari.cd.cse19@itbhu.ac.in> <pururshottam.tiwari.cd.cse19@itbhu.ac.in>
Noble Mittal <noblemittal@outlook.com> <62551163+beingnoble03@users.noreply.github.com>
Palash Baderia <palash.baderia@outlook.com>
Palash Baderia <palash.baderia@outlook.com> <66828942+palashb01@users.noreply.github.com>
Palash Raghuwanshi <singhpalash0@gmail.com>
Parth <mittalparth22@gmail.com>
Priyam Seth <sethpriyam1@gmail.com> <b19188@students.iitmandi.ac.in>
Ray Kraesig <rkraesig@zulip.com> <rkraesig@zulipchat.com>
Reid Barton <rwbarton@gmail.com> <rwbarton@humbughq.com>
Rein Zustand (rht) <rhtbot@protonmail.com>
Rishi Gupta <rishig@zulipchat.com> <rishig+git@mit.edu>
Rishi Gupta <rishig@zulipchat.com> <rishig@kandralabs.com>
Rishi Gupta <rishig@zulipchat.com> <rishig@users.noreply.github.com>
Rishabh Maheshwari <b20063@students.iitmandi.ac.in>
Rixant Rokaha <rixantrokaha@gmail.com>
Rixant Rokaha <rixantrokaha@gmail.com> <rishantrokaha@gmail.com>
Rixant Rokaha <rixantrokaha@gmail.com> <rrokaha@caldwell.edu>
Sahil Batra <sahil@zulip.com> <35494118+sahil839@users.noreply.github.com>
Sahil Batra <sahil@zulip.com> <sahilbatra839@gmail.com>
Satyam Bansal <sbansal1999@gmail.com>
Sayam Samal <samal.sayam@gmail.com>
Scott Feeney <scott@oceanbase.org> <scott@humbughq.com>
Scott Feeney <scott@oceanbase.org> <scott@zulip.com>
Shlok Patel <shlokcpatel2001@gmail.com>
Somesh Ranjan <somesh.ranjan.met20@itbhu.ac.in> <77766761+somesh202@users.noreply.github.com>
Rishi Gupta <rishig@zulip.com> <rishig+git@mit.edu>
Rishi Gupta <rishig@zulip.com> <rishig@kandralabs.com>
Rishi Gupta <rishig@zulip.com> <rishig@users.noreply.github.com>
Rishi Gupta <rishig@zulip.com> <rishig@zulipchat.com>
Steve Howell <showell@zulip.com> <showell30@yahoo.com>
Steve Howell <showell@zulip.com> <showell@yahoo.com>
Steve Howell <showell@zulip.com> <showell@zulipchat.com>
Steve Howell <showell@zulip.com> <steve@humbughq.com>
Steve Howell <showell@zulip.com> <steve@zulip.com>
strifel <info@strifel.de>
Tim Abbott <tabbott@zulip.com> <tabbott@dropbox.com>
Tim Abbott <tabbott@zulip.com> <tabbott@humbughq.com>
Tim Abbott <tabbott@zulip.com> <tabbott@mit.edu>
Tim Abbott <tabbott@zulip.com> <tabbott@zulipchat.com>
Ujjawal Modi <umodi2003@gmail.com> <99073049+Ujjawal3@users.noreply.github.com>
Vishnu KS <vishnu@zulip.com> <hackerkid@vishnuks.com>
Vishnu KS <vishnu@zulip.com> <yo@vishnuks.com>
Alya Abbott <alya@zulip.com> <alyaabbott@elance-odesk.com>
umkay <ukhan@zulipchat.com> <umaimah.k@gmail.com>
umkay <ukhan@zulipchat.com> <umkay@users.noreply.github.com>
Waseem Daher <wdaher@zulip.com> <wdaher@humbughq.com>
Waseem Daher <wdaher@zulip.com> <wdaher@dropbox.com>
Yash RE <33805964+YashRE42@users.noreply.github.com> <YashRE42@github.com>
Yash RE <33805964+YashRE42@users.noreply.github.com>
Yogesh Sirsat <yogeshsirsat56@gmail.com>
Yogesh Sirsat <yogeshsirsat56@gmail.com> <41695888+yogesh-sirsat@users.noreply.github.com>
Zeeshan Equbal <equbalzeeshan@gmail.com> <54993043+zee-bit@users.noreply.github.com>
Zeeshan Equbal <equbalzeeshan@gmail.com>
Zev Benjamin <zev@zulip.com> <zev@dropbox.com>
Zev Benjamin <zev@zulip.com> <zev@humbughq.com>
Zev Benjamin <zev@zulip.com> <zev@mit.edu>
Zixuan James Li <p359101898@gmail.com>
Zixuan James Li <p359101898@gmail.com> <39874143+PIG208@users.noreply.github.com>
Zixuan James Li <p359101898@gmail.com> <359101898@qq.com>
Joseph Ho <josephho678@gmail.com>
Joseph Ho <josephho678@gmail.com> <62449508+Joelute@users.noreply.github.com>
Vishnu KS <yo@vishnuks.com> <hackerkid@vishnuks.com>
Vishnu KS <yo@vishnuks.com> <yo@vishnuks.com>

1
.npmrc
View File

@@ -1 +0,0 @@
ignore-dep-scripts=true

View File

@@ -1,11 +1 @@
pnpm-lock.yaml
/api_docs/**/*.md
/corporate/tests/stripe_fixtures
/help/**/*.md
/locale
/templates/**/*.md
/tools/setup/emoji/emoji_map.json
/web/third
/zerver/tests/fixtures
/zerver/webhooks/*/doc.md
/zerver/webhooks/*/fixtures
/static/third

View File

@@ -8,7 +8,6 @@
"stubs/",
"zulip-py3-venv/lib/pyre_check/stubs/"
],
"typeshed": "zulip-py3-venv/lib/pyre_check/typeshed/",
"exclude": [
"/srv/zulip/zulip-py3-venv/.*"
]

View File

@@ -1,15 +0,0 @@
# https://docs.readthedocs.io/en/stable/config-file/v2.html
version: 2
build:
os: ubuntu-22.04
tools:
python: "3.10"
sphinx:
configuration: docs/conf.py
fail_on_warning: true
python:
install:
- requirements: requirements/docs.txt

67
.stylelintrc Normal file
View File

@@ -0,0 +1,67 @@
{
"rules": {
# Stylistic rules for CSS.
"function-comma-space-after": "always",
"function-comma-space-before": "never",
"function-max-empty-lines": 0,
"function-whitespace-after": "always",
"value-keyword-case": "lower",
"value-list-comma-newline-after": "always-multi-line",
"value-list-comma-space-after": "always-single-line",
"value-list-comma-space-before": "never",
"value-list-max-empty-lines": 0,
"unit-case": "lower",
"property-case": "lower",
"color-hex-case": "lower",
"declaration-bang-space-before": "always",
"declaration-colon-newline-after": "always-multi-line",
"declaration-colon-space-after": "always-single-line",
"declaration-colon-space-before": "never",
"declaration-block-semicolon-newline-after": "always",
"declaration-block-semicolon-space-before": "never",
"declaration-block-trailing-semicolon": "always",
"block-closing-brace-empty-line-before": "never",
"block-closing-brace-newline-after": "always",
"block-closing-brace-newline-before": "always",
"block-opening-brace-newline-after": "always",
"block-opening-brace-space-before": "always",
"selector-attribute-brackets-space-inside": "never",
"selector-attribute-operator-space-after": "never",
"selector-attribute-operator-space-before": "never",
"selector-combinator-space-after": "always",
"selector-combinator-space-before": "always",
"selector-descendant-combinator-no-non-space": true,
"selector-pseudo-class-parentheses-space-inside": "never",
"selector-pseudo-element-case": "lower",
"selector-pseudo-element-colon-notation": "double",
"selector-type-case": "lower",
"selector-list-comma-newline-after": "always",
"selector-list-comma-space-before": "never",
"media-feature-colon-space-after": "always",
"media-feature-colon-space-before": "never",
"media-feature-name-case": "lower",
"media-feature-parentheses-space-inside": "never",
"media-feature-range-operator-space-after": "always",
"media-feature-range-operator-space-before": "always",
"media-query-list-comma-newline-after": "always",
"media-query-list-comma-space-before": "never",
"at-rule-name-case": "lower",
"at-rule-name-space-after": "always",
"at-rule-semicolon-newline-after": "always",
"at-rule-semicolon-space-before": "never",
"comment-whitespace-inside": "always",
"indentation": 4,
# Limit language features
"color-no-hex": true,
"color-named": "never",
}
}

View File

@@ -1,39 +1,32 @@
# Migrated from transifex-client format with `tx migrate`
#
# See https://developers.transifex.com/docs/using-the-client which hints at
# this format, but in general, the headings are in the format of:
#
# [o:<org>:p:<project>:r:<resource>]
[main]
host = https://www.transifex.com
lang_map = zh-Hans: zh_Hans, zh-Hant: zh_Hant
[o:zulip:p:zulip:r:djangopo]
[zulip.djangopo]
file_filter = locale/<lang>/LC_MESSAGES/django.po
source_file = locale/en/LC_MESSAGES/django.po
source_lang = en
type = PO
[o:zulip:p:zulip:r:mobile]
[zulip.translationsjson]
file_filter = locale/<lang>/translations.json
source_file = locale/en/translations.json
source_lang = en
type = KEYVALUEJSON
[zulip.mobile]
file_filter = locale/<lang>/mobile.json
source_file = locale/en/mobile.json
source_lang = en
type = KEYVALUEJSON
[o:zulip:p:zulip:r:translationsjson]
file_filter = locale/<lang>/translations.json
source_file = locale/en/translations.json
source_lang = en
type = KEYVALUEJSON
[o:zulip:p:zulip-test:r:djangopo]
[zulip-test.djangopo]
file_filter = locale/<lang>/LC_MESSAGES/django.po
source_file = locale/en/LC_MESSAGES/django.po
source_lang = en
type = PO
[o:zulip:p:zulip-test:r:translationsjson]
[zulip-test.translationsjson]
file_filter = locale/<lang>/translations.json
source_file = locale/en/translations.json
source_lang = en

View File

@@ -1,23 +0,0 @@
{
// Recommended VS Code extensions for zulip/zulip.
//
// VS Code prompts a user to install the recommended extensions
// when a workspace is opened for the first time. The user can
// also review the list with the 'Extensions: Show Recommended
// Extensions' command. See
// https://code.visualstudio.com/docs/editor/extension-marketplace#_workspace-recommended-extensions
// for more information.
//
// Extension identifier format: ${publisher}.${name}.
// Example: vscode.csharp
"recommendations": [
"42crunch.vscode-openapi",
"dbaeumer.vscode-eslint",
"esbenp.prettier-vscode",
"ms-vscode-remote.vscode-remote-extensionpack"
],
// Extensions recommended by VS Code which are not recommended for users of zulip/zulip.
"unwantedRecommendations": []
}

1
.yarnrc Normal file
View File

@@ -0,0 +1 @@
ignore-scripts true

View File

@@ -14,46 +14,46 @@ This isn't an exhaustive list of things that you can't do. Rather, take it
in the spirit in which it's intended --- a guide to make it easier to enrich
all of us and the technical communities in which we participate.
## Expected behavior
## Expected Behavior
The following behaviors are expected and requested of all community members:
- Participate. In doing so, you contribute to the health and longevity of
* Participate. In doing so, you contribute to the health and longevity of
the community.
- Exercise consideration and respect in your speech and actions.
- Attempt collaboration before conflict. Assume good faith.
- Refrain from demeaning, discriminatory, or harassing behavior and speech.
- Take action or alert community leaders if you notice a dangerous
* Exercise consideration and respect in your speech and actions.
* Attempt collaboration before conflict. Assume good faith.
* Refrain from demeaning, discriminatory, or harassing behavior and speech.
* Take action or alert community leaders if you notice a dangerous
situation, someone in distress, or violations of this code, even if they
seem inconsequential.
- Community event venues may be shared with members of the public; be
* Community event venues may be shared with members of the public; be
respectful to all patrons of these locations.
## Unacceptable behavior
## Unacceptable Behavior
The following behaviors are considered harassment and are unacceptable
within the Zulip community:
- Jokes or derogatory language that singles out members of any race,
* Jokes or derogatory language that singles out members of any race,
ethnicity, culture, national origin, color, immigration status, social and
economic class, educational level, language proficiency, sex, sexual
orientation, gender identity and expression, age, size, family status,
political belief, religion, and mental and physical ability.
- Violence, threats of violence, or violent language directed against
* Violence, threats of violence, or violent language directed against
another person.
- Disseminating or threatening to disseminate another person's personal
* Disseminating or threatening to disseminate another person's personal
information.
- Personal insults of any sort.
- Posting or displaying sexually explicit or violent material.
- Inappropriate photography or recording.
- Deliberate intimidation, stalking, or following (online or in person).
- Unwelcome sexual attention. This includes sexualized comments or jokes,
* Personal insults of any sort.
* Posting or displaying sexually explicit or violent material.
* Inappropriate photography or recording.
* Deliberate intimidation, stalking, or following (online or in person).
* Unwelcome sexual attention. This includes sexualized comments or jokes,
inappropriate touching or groping, and unwelcomed sexual advances.
- Sustained disruption of community events, including talks and
* Sustained disruption of community events, including talks and
presentations.
- Advocating for, or encouraging, any of the behaviors above.
* Advocating for, or encouraging, any of the behaviors above.
## Reporting and enforcement
## Reporting and Enforcement
Harassment and other code of conduct violations reduce the value of the
community for everyone. If someone makes you or anyone else feel unsafe or
@@ -95,78 +95,11 @@ behavior occurring outside the scope of community activities when such
behavior has the potential to adversely affect the safety and well-being of
community members.
## License and attribution
## License and Attribution
This Code of Conduct is adapted from the
[Citizen Code of Conduct](http://citizencodeofconduct.org/) and the
[Django Code of Conduct](https://www.djangoproject.com/conduct/), and is
under a
[Creative Commons BY-SA](https://creativecommons.org/licenses/by-sa/4.0/)
license.
## Moderating the Zulip community
Anyone can help moderate the Zulip community by helping make sure that folks are
aware of the [community guidelines](https://zulip.com/development-community/)
and this Code of Conduct, and that we maintain a positive and respectful
atmosphere.
Here are some guidelines for you how can help:
- Be friendly! Welcoming folks, thanking them for their feedback, ideas and effort,
and just trying to keep the atmosphere warm make the whole community function
more smoothly. New participants who feel accepted, listened to and respected
are likely to treat others the same way.
- Be familiar with the [community
guidelines](https://zulip.com/development-community/), and cite them liberally
when a user violates them. Be polite but firm. Some examples:
- @user please note that there is no need to @-mention @\_**Tim Abbott** when
you ask a question. As noted in the [guidelines for this
community](https://zulip.com/development-community/):
> Use @-mentions sparingly… there is generally no need to @-mention a
> core contributor unless you need their timely attention.
- @user, please keep in mind the following [community
guideline](https://zulip.com/development-community/):
> Dont ask the same question in multiple places. Moderators read every
> public stream, and make sure every question gets a reply.
Ive gone ahead and moved the other copy of this message to this thread.
- If asked a question in a PM that is better discussed in a public stream:
> Hi @user! Please start by reviewing
> https://zulip.com/development-community/#community-norms to learn how to
> get help in this community.
- Users sometimes think chat.zulip.org is a testing instance. When this happens,
kindly direct them to use the **#test here** stream.
- If you see a message thats posted in the wrong place, go ahead and move it if
you have permissions to do so, even if you dont plan to respond to it.
Leaving the “Send automated notice to new topic” option enabled helps make it
clear what happened to the person who sent the message.
If you are responding to a message that's been moved, mention the user in your
reply, so that the mention serves as a notification of the new location for
their conversation.
- If a user is posting spam, please report it to an administrator. They will:
- Change the user's name to `<name> (spammer)` and deactivate them.
- Delete any spam messages they posted in public streams.
- We care very much about maintaining a respectful tone in our community. If you
see someone being mean or rude, point out that their tone is inappropriate,
and ask them to communicate their perspective in a respectful way in the
future. If you dont feel comfortable doing so yourself, feel free to ask a
member of Zulip's core team to take care of the situation.
- Try to assume the best intentions from others (given the range of
possibilities presented by their visible behavior), and stick with a friendly
and positive tone even when someones behavior is poor or disrespectful.
Everyone has bad days and stressful situations that can result in them
behaving not their best, and while we should be firm about our community
rules, we should also enforce them with kindness.

View File

@@ -1,36 +1,23 @@
# Contributing guide
# Contributing to Zulip
Welcome to the Zulip community!
## Zulip development community
## Community
The primary communication forum for the Zulip community is the Zulip
server hosted at [chat.zulip.org](https://chat.zulip.org/):
The
[Zulip community server](https://zulip.readthedocs.io/en/latest/contributing/chat-zulip-org.html)
is the primary communication forum for the Zulip community. It is a good
place to start whether you have a question, are a new contributor, are a new
user, or anything else. Make sure to read the
[community norms](https://zulip.readthedocs.io/en/latest/contributing/chat-zulip-org.html#community-norms)
before posting. The Zulip community is also governed by a
[code of conduct](https://zulip.readthedocs.io/en/latest/code-of-conduct.html).
- **Users** and **administrators** of Zulip organizations stop by to
ask questions, offer feedback, and participate in product design
discussions.
- **Contributors to the project**, including the **core Zulip
development team**, discuss ongoing and future projects, brainstorm
ideas, and generally help each other out.
Everyone is welcome to [sign up](https://chat.zulip.org/) and
participate — we love hearing from our users! Public streams in the
community receive thousands of messages a week. We recommend signing
up using the special invite links for
[users](https://chat.zulip.org/join/t5crtoe62bpcxyisiyglmtvb/),
[self-hosters](https://chat.zulip.org/join/wnhv3jzm6afa4raenedanfno/)
and
[contributors](https://chat.zulip.org/join/npzwak7vpmaknrhxthna3c7p/)
to get a curated list of initial stream subscriptions.
To learn how to get started participating in the community, including [community
norms](https://zulip.com/development-community/#community-norms) and [where to
post](https://zulip.com/development-community/#where-do-i-send-my-message),
check out our [Zulip development community
guide](https://zulip.com/development-community/). The Zulip community is
governed by a [code of
conduct](https://zulip.readthedocs.io/en/latest/code-of-conduct.html).
You can subscribe to zulip-devel-announce@googlegroups.com or our
[Twitter](https://twitter.com/zulip) account for a lower traffic (~1
email/month) way to hear about things like mentorship opportunities with Google
Code-in, in-person sprints at conferences, and other opportunities to
contribute.
## Ways to contribute
@@ -38,366 +25,174 @@ To make a code or documentation contribution, read our
[step-by-step guide](#your-first-codebase-contribution) to getting
started with the Zulip codebase. A small sample of the type of work that
needs doing:
- Bug squashing and feature development on our Python/Django
* Bug squashing and feature development on our Python/Django
[backend](https://github.com/zulip/zulip), web
[frontend](https://github.com/zulip/zulip), React Native
[mobile app](https://github.com/zulip/zulip-mobile), or Electron
[desktop app](https://github.com/zulip/zulip-desktop).
- Building out our
* Building out our
[Python API and bots](https://github.com/zulip/python-zulip-api) framework.
- [Writing an integration](https://zulip.com/api/integrations-overview).
- Improving our [user](https://zulip.com/help/) or
* [Writing an integration](https://zulip.com/api/integrations-overview).
* Improving our [user](https://zulip.com/help/) or
[developer](https://zulip.readthedocs.io/en/latest/) documentation.
- [Reviewing code](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html)
* [Reviewing code](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html)
and manually testing pull requests.
**Non-code contributions**: Some of the most valuable ways to contribute
don't require touching the codebase at all. For example, you can:
don't require touching the codebase at all. We list a few of them below:
- [Report issues](#reporting-issues), including both feature requests and
* [Reporting issues](#reporting-issues), including both feature requests and
bug reports.
- [Give feedback](#user-feedback) if you are evaluating or using Zulip.
- [Participate
thoughtfully](https://zulip.readthedocs.io/en/latest/contributing/design-discussions.html)
in design discussions.
- [Sponsor Zulip](https://github.com/sponsors/zulip) through the GitHub sponsors program.
- [Translate](https://zulip.readthedocs.io/en/latest/translating/translating.html)
Zulip into your language.
- [Stay connected](#stay-connected) with Zulip, and [help others
find us](#help-others-find-zulip).
* [Giving feedback](#user-feedback) if you are evaluating or using Zulip.
* [Translating](https://zulip.readthedocs.io/en/latest/translating/translating.html)
Zulip.
* [Outreach](#zulip-outreach): Star us on GitHub, upvote us
on product comparison sites, or write for [the Zulip blog](https://blog.zulip.org/).
## Your first codebase contribution
## Your first (codebase) contribution
This section has a step by step guide to starting as a Zulip codebase
contributor. It's long, but don't worry about doing all the steps perfectly;
no one gets it right the first time, and there are a lot of people available
to help.
- First, make an account on the
[Zulip community server](https://zulip.com/development-community/),
paying special attention to the
[community norms](https://zulip.com/development-community/#community-norms).
If you'd like, introduce yourself in
* First, make an account on the
[Zulip community server](https://zulip.readthedocs.io/en/latest/contributing/chat-zulip-org.html),
paying special attention to the community norms. If you'd like, introduce
yourself in
[#new members](https://chat.zulip.org/#narrow/stream/95-new-members), using
your name as the topic. Bonus: tell us about your first impressions of
Zulip, and anything that felt confusing/broken or interesting/helpful as you
started using the product.
- Read [What makes a great Zulip contributor](#what-makes-a-great-zulip-contributor).
- [Install the development environment](https://zulip.readthedocs.io/en/latest/development/overview.html),
Zulip, and anything that felt confusing/broken as you started using the
product.
* Read [What makes a great Zulip contributor](#what-makes-a-great-zulip-contributor).
* [Install the development environment](https://zulip.readthedocs.io/en/latest/development/overview.html),
getting help in
[#provision help](https://chat.zulip.org/#narrow/stream/21-provision-help)
[#development help](https://chat.zulip.org/#narrow/stream/49-development-help)
if you run into any troubles.
- Familiarize yourself with [using the development environment](https://zulip.readthedocs.io/en/latest/development/using.html).
- Go through the [new application feature
tutorial](https://zulip.readthedocs.io/en/latest/tutorials/new-feature-tutorial.html) to get familiar with
how the Zulip codebase is organized and how to find code in it.
- Read the [Zulip guide to
Git](https://zulip.readthedocs.io/en/latest/git/index.html) if you
are unfamiliar with Git or Zulip's rebase-based Git workflow,
getting help in [#git
help](https://chat.zulip.org/#narrow/stream/44-git-help) if you run
into any troubles. Even Git experts should read the [Zulip-specific
Git tools
page](https://zulip.readthedocs.io/en/latest/git/zulip-tools.html).
* Read the
[Zulip guide to Git](https://zulip.readthedocs.io/en/latest/git/index.html)
and do the Git tutorial (coming soon) if you are unfamiliar with
Git, getting help in
[#git help](https://chat.zulip.org/#narrow/stream/44-git-help) if
you run into any troubles. Be sure to check out the
[extremely useful Zulip-specific tools page](https://zulip.readthedocs.io/en/latest/git/zulip-tools.html).
### Where to look for an issue
### Picking an issue
Now you're ready to pick your first issue! Zulip has several repositories you
can check out, depending on your interests. There are hundreds of open issues in
the [main Zulip server and web app
repository](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22)
alone.
Now, you're ready to pick your first issue! There are hundreds of open issues
in the main codebase alone. This section will help you find an issue to work
on.
You can look through issues tagged with the "help wanted" label, which is used
to indicate the issues that are ready for contributions. Some repositories also
use the "good first issue" label to tag issues that are especially approachable
for new contributors.
- [Server and web app](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22)
- [Mobile apps](https://github.com/zulip/zulip-mobile/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22)
- [Desktop app](https://github.com/zulip/zulip-desktop/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22)
- [Terminal app](https://github.com/zulip/zulip-terminal/issues?q=is%3Aopen+is%3Aissue+label%3A"help+wanted")
- [Python API bindings and bots](https://github.com/zulip/python-zulip-api/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22)
### Picking an issue to work on
There's a lot to learn while making your first pull request, so start small!
Many first contributions have fewer than 10 lines of changes (not counting
changes to tests).
We recommend the following process for finding an issue to work on:
1. Read the description of an issue tagged with the "help wanted" label and make
sure you understand it.
2. If it seems promising, poke around the product
(on [chat.zulip.org](https://chat.zulip.org) or in the development
environment) until you know how the piece being
described fits into the bigger picture. If after some exploration the
description seems confusing or ambiguous, post a question on the GitHub
issue, as others may benefit from the clarification as well.
3. When you find an issue you like, try to get started working on it. See if you
can find the part of the code you'll need to modify (`git grep` is your
friend!) and get some idea of how you'll approach the problem.
4. If you feel lost, that's OK! Go through these steps again with another issue.
There's plenty to work on, and the exploration you do will help you learn
more about the project.
Note that you are _not_ claiming an issue while you are iterating through steps
1-4. _Before you claim an issue_, you should be confident that you will be able to
tackle it effectively.
Additional tips for the [main server and web app
repository](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22):
- We especially recommend browsing recently opened issues, as there are more
likely to be easy ones for you to find.
- All issues are partitioned into areas like
* If you're interested in
[mobile](https://github.com/zulip/zulip-mobile/issues?q=is%3Aopen+is%3Aissue),
[desktop](https://github.com/zulip/zulip-desktop/issues?q=is%3Aopen+is%3Aissue),
or
[bots](https://github.com/zulip/python-zulip-api/issues?q=is%3Aopen+is%3Aissue)
development, check the respective links for open issues, or post in
[#mobile](https://chat.zulip.org/#narrow/stream/48-mobile),
[#desktop](https://chat.zulip.org/#narrow/stream/16-desktop), or
[#integration](https://chat.zulip.org/#narrow/stream/127-integrations).
* For the main server and web repository, we recommend browsing
recently opened issues to look for issues you are confident you can
fix correctly in a way that clearly communicates why your changes
are the correct fix. Our GitHub workflow bot, zulipbot, limits
users who have 0 commits merged to claiming a single issue labeled
with "good first issue" or "help wanted".
* We also partition all of our issues in the main repo into areas like
admin, compose, emoji, hotkeys, i18n, onboarding, search, etc. Look
through our [list of labels](https://github.com/zulip/zulip/labels), and
click on some of the `area:` labels to see all the issues related to your
areas of interest.
- Avoid issues with the "difficult" label unless you
understand why it is difficult and are highly confident you can resolve the
issue correctly and completely.
* If the lists of issues are overwhelming, post in
[#new members](https://chat.zulip.org/#narrow/stream/95-new-members) with a
bit about your background and interests, and we'll help you out. The most
important thing to say is whether you're looking for a backend (Python),
frontend (JavaScript and TypeScript), mobile (React Native), desktop (Electron),
documentation (English) or visual design (JavaScript/TypeScript + CSS) issue, and a
bit about your programming experience and available time.
### Claiming an issue
We also welcome suggestions of features that you feel would be valuable or
changes that you feel would make Zulip a better open source project. If you
have a new feature you'd like to add, we recommend you start by posting in
[#new members](https://chat.zulip.org/#narrow/stream/95-new-members) with the
feature idea and the problem that you're hoping to solve.
#### In the main server/web app repository and Zulip Terminal repository
The Zulip server/web app repository
([`zulip/zulip`](https://github.com/zulip/zulip/)) and the Zulip Terminal
repository ([`zulip/zulip-terminal`](https://github.com/zulip/zulip-terminal/))
are set up with a GitHub workflow bot called
[Zulipbot](https://github.com/zulip/zulipbot), which manages issues and pull
requests in order to create a better workflow for Zulip contributors.
To claim an issue in these repositories, simply post a comment that says
`@zulipbot claim` to the issue thread. If the issue is tagged with a [help
wanted](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22)
label, Zulipbot will immediately assign the issue to you.
Note that new contributors can only claim one issue until their first pull request is
merged. This is to encourage folks to finish ongoing work before starting
something new. If you would like to pick up a new issue while waiting for review
on an almost-ready pull request, you can post a comment to this effect on the
issue you're interested in.
#### In other Zulip repositories
There is no bot for other Zulip repositories
([`zulip/zulip-mobile`](https://github.com/zulip/zulip-mobile/), etc.). If
you are interested in claiming an issue in one of these repositories, simply
post a comment on the issue thread saying that you'd like to work on it. There
is no need to @-mention the issue creator in your comment.
Please follow the same guidelines as described above: find an issue labeled
"help wanted", and only pick up one issue at a time to start with.
Other notes:
* For a first pull request, it's better to aim for a smaller contribution
than a bigger one. Many first contributions have fewer than 10 lines of
changes (not counting changes to tests).
* The full list of issues explicitly looking for a contributor can be
found with the
[good first issue](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22)
and
[help wanted](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22)
labels. Avoid issues with the "difficult" label unless you
understand why it is difficult and are confident you can resolve the
issue correctly and completely. Issues without one of these labels
are fair game if Tim has written a clear technical design proposal
in the issue, or it is a bug that you can reproduce and you are
confident you can fix the issue correctly.
* For most new contributors, there's a lot to learn while making your first
pull request. It's OK if it takes you a while; that's normal! You'll be
able to work a lot faster as you build experience.
### Working on an issue
You're encouraged to ask questions on how to best implement or debug your
changes -- the Zulip maintainers are excited to answer questions to help you
stay unblocked and working efficiently. You can ask questions in the [Zulip
development community](https://zulip.com/development-community/), or on the
GitHub issue or pull request.
To work on an issue, claim it by adding a comment with `@zulipbot claim` to
the issue thread. [Zulipbot](https://github.com/zulip/zulipbot) is a GitHub
workflow bot; it will assign you to the issue and label the issue as "in
progress". Some additional notes:
To get early feedback on any UI changes, we encourage you to post screenshots of
your work in the [#design
stream](https://chat.zulip.org/#narrow/stream/101-design) in the [Zulip
development community](https://zulip.com/development-community/)
* You can only claim issues with the
[good first issue](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22)
or
[help wanted](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22)
labels. Zulipbot will give you an error if you try to claim an issue
without one of those labels.
* You're encouraged to ask questions on how to best implement or debug your
changes -- the Zulip maintainers are excited to answer questions to help
you stay unblocked and working efficiently. You can ask questions on
chat.zulip.org, or on the GitHub issue or pull request.
* We encourage early pull requests for work in progress. Prefix the title of
work in progress pull requests with `[WIP]`, and remove the prefix when
you think it might be mergeable and want it to be reviewed.
* After updating a PR, add a comment to the GitHub thread mentioning that it
is ready for another review. GitHub only notifies maintainers of the
changes when you post a comment, so if you don't, your PR will likely be
neglected by accident!
For more advice, see [What makes a great Zulip
contributor?](#what-makes-a-great-zulip-contributor)
below.
### And beyond
### Submitting a pull request
When you believe your code is ready, follow the [guide on how to review
code](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html#how-to-review-code)
to review your own work. You can often find things you missed by taking a step
back to look over your work before asking others to do so. Catching mistakes
yourself will help your PRs be merged faster, and folks will appreciate the
quality and professionalism of your work.
Then, submit your changes. Carefully reading our [Git guide][git-guide], and in
particular the section on [making a pull request][git-guide-make-pr], will help
avoid many common mistakes. If any part of your contribution is from someone
else (code snippets, images, sounds, or any other copyrightable work, modified
or unmodified), be sure to review the instructions on how to [properly
attribute][licensing] the work.
[licensing]: https://zulip.readthedocs.io/en/latest/contributing/licensing.html#contributing-someone-else-s-work
Once you are satisfied with the quality of your PR, follow the
[guidelines on asking for a code
review](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html#asking-for-a-code-review)
to request a review. If you are not sure what's best, simply post a
comment on the main GitHub thread for your PR clearly indicating that
it is ready for review, and the project maintainers will take a look
and follow up with next steps.
It's OK if your first issue takes you a while; that's normal! You'll be
able to work a lot faster as you build experience.
If it helps your workflow, you can submit your pull request marked as
a [draft][github-help-draft-pr] while you're still working on it, and
then mark it ready when you think it's time for someone else to review
your work.
[git-guide]: https://zulip.readthedocs.io/en/latest/git/
[git-guide-make-pr]: https://zulip.readthedocs.io/en/latest/git/pull-requests.html
[github-help-draft-pr]: https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/about-pull-requests#draft-pull-requests
### Stages of a pull request
Your pull request will likely go through several stages of review.
1. If your PR makes user-facing changes, the UI and user experience may be
reviewed early on, without reference to the code. You will get feedback on
any user-facing bugs in the implementation. To minimize the number of review
round-trips, make sure to [thoroughly
test](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html#manual-testing)
your own PR prior to asking for review.
2. There may be choices made in the implementation that the reviewer
will ask you to revisit. This process will go more smoothly if you
specifically call attention to the decisions you made while
drafting the PR and any points about which you are uncertain. The
PR description and comments on your own PR are good ways to do this.
3. Oftentimes, seeing an initial implementation will make it clear that the
product design for a feature needs to be revised, or that additional changes
are needed. The reviewer may therefore ask you to amend or change the
implementation. Some changes may be blockers for getting the PR merged, while
others may be improvements that can happen afterwards. Feel free to ask if
it's unclear which type of feedback you're getting. (Follow-ups can be a
great next issue to work on!)
4. In addition to any UI/user experience review, all PRs will go through one or
more rounds of code review. Your code may initially be [reviewed by other
contributors](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html).
This helps us make good use of project maintainers' time, and helps you make
progress on the PR by getting more frequent feedback. A project maintainer
may leave a comment asking someone with expertise in the area you're working
on to review your work.
5. Final code review and integration for server and web app PRs is generally done
by `@timabbott`.
#### How to help move the review process forward
The key to keeping your review moving through the review process is to:
- Address _all_ the feedback to the best of your ability.
- Make it clear when the requested changes have been made
and you believe it's time for another look.
- Make it as easy as possible to review the changes you made.
In order to do this, when you believe you have addressed the previous round of
feedback on your PR as best you can, post a comment asking reviewers to take
another look. Your comment should make it easy to understand what has been done
and what remains by:
- Summarizing the changes made since the last review you received.
- Highlighting remaining questions or decisions, with links to any relevant
chat.zulip.org threads.
- Providing updated screenshots and information on manual testing if
appropriate.
The easier it is to review your work, the more likely you are to receive quick
feedback.
### Beyond the first issue
To find a second issue to work on, we recommend looking through issues with the same
A great place to look for a second issue is to look for issues with the same
`area:` label as the last issue you resolved. You'll be able to reuse the
work you did learning how that part of the codebase works. Also, the path to
becoming a core developer often involves taking ownership of one of these area
labels.
### Common questions
- **What if somebody is already working on the issue I want do claim?** There
are lots of issue to work on! If somebody else is actively working on the
issue, you can find a different one, or help with
reviewing their work.
- **What if somebody else claims an issue while I'm figuring out whether or not to
work on it?** No worries! You can contribute by providing feedback on
their pull request. If you've made good progress in understanding part of the
codebase, you can also find another "help wanted" issue in the same area to
work on.
- **What if there is already a pull request for the issue I want to work on?**
Start by reviewing the existing work. If you agree with the approach, you can
use the existing pull request (PR) as a starting point for your contribution. If
you think a different approach is needed, you can post a new PR, with a comment that clearly
explains _why_ you decided to start from scratch.
- **Can I come up with my own feature idea and work on it?** We welcome
suggestions of features or other improvements that you feel would be valuable. If you
have a new feature you'd like to add, you can start a conversation [in our
development community](https://zulip.com/development-community/#where-do-i-send-my-message)
explaining the feature idea and the problem that you're hoping to solve.
- **I'm waiting for the next round of review on my PR. Can I pick up
another issue in the meantime?** Someone's first Zulip PR often
requires quite a bit of iteration, so please [make sure your pull
request is reviewable][reviewable-pull-requests] and go through at
least one round of feedback from others before picking up a second
issue. After that, sure! If
[Zulipbot](https://github.com/zulip/zulipbot) does not allow you to
claim an issue, you can post a comment describing the status of your
other work on the issue you're interested in, and asking for the
issue to be assigned to you. Note that addressing feedback on
in-progress PRs should always take priority over starting a new PR.
- **I think my PR is done, but it hasn't been merged yet. What's going on?**
1. **Double-check that you have addressed all the feedback**, including any comments
on [Git commit
discipline](https://zulip.readthedocs.io/en/latest/contributing/commit-discipline.html).
2. If all the feedback has been addressed, did you [leave a
comment](#how-to-help-move-the-review-process-forward)
explaining that you have done so and **requesting another review**? If not,
it may not be clear to project maintainers or reviewers that your PR is
ready for another look.
3. There may be a pause between initial rounds of review for your PR and final
review by project maintainers. This is normal, and we encourage you to **work
on other issues** while you wait.
4. If you think the PR is ready and haven't seen any updates for a couple
of weeks, it can be helpful to **leave another comment**. Summarize the
overall state of the review process and your work, and indicate that you
are waiting for a review.
5. Finally, **Zulip project maintainers are people too**! They may be busy
with other work, and sometimes they might even take a vacation. ;) It can
occasionally take a few weeks for a PR in the final stages of the review
process to be merged.
[reviewable-pull-requests]: https://zulip.readthedocs.io/en/latest/contributing/reviewable-prs.html
## What makes a great Zulip contributor?
Zulip has a lot of experience working with new contributors. In our
Zulip has a lot of experience working with new contributors. In our
experience, these are the best predictors of success:
- [Asking great questions][great-questions]. It's very hard to answer a general
question like, "How do I do this issue?" When asking for help, explain your
current understanding, including what you've done or tried so far and where
you got stuck. Post tracebacks or other error messages if appropriate. For
more advice, check out [our guide][great-questions]!
- Learning and practicing
[Git commit discipline](https://zulip.readthedocs.io/en/latest/contributing/commit-discipline.html).
- Submitting carefully tested code. See our [detailed guide on how to review
code](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html#how-to-review-code)
(yours or someone else's).
- Posting
* Posting good questions. This generally means explaining your current
understanding, saying what you've done or tried so far, and including
tracebacks or other error messages if appropriate.
* Learning and practicing
[Git commit discipline](https://zulip.readthedocs.io/en/latest/contributing/version-control.html#commit-discipline).
* Submitting carefully tested code. This generally means checking your work
through a combination of automated tests and manually clicking around the
UI trying to find bugs in your work. See
[things to look for](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html#things-to-look-for)
for additional ideas.
* Posting
[screenshots or GIFs](https://zulip.readthedocs.io/en/latest/tutorials/screenshot-and-gif-software.html)
for frontend changes.
- Working to [make your pull requests easy to
review](https://zulip.readthedocs.io/en/latest/contributing/reviewable-prs.html).
- Clearly describing what you have implemented and why. For example, if your
implementation differs from the issue description in some way or is a partial
step towards the requirements described in the issue, be sure to call
out those differences.
- Being responsive to feedback on pull requests. This means incorporating or
* Being responsive to feedback on pull requests. This means incorporating or
responding to all suggested changes, and leaving a note if you won't be
able to address things within a few days.
- Being helpful and friendly on the [Zulip community
server](https://zulip.com/development-community/).
* Being helpful and friendly on chat.zulip.org.
[great-questions]: https://zulip.readthedocs.io/en/latest/contributing/asking-great-questions.html
These are also the main criteria we use to select candidates for all
of our outreach programs.
## Reporting issues
@@ -409,7 +204,7 @@ is, the best place to post issues is
[#issues](https://chat.zulip.org/#narrow/stream/9-issues) (or
[#mobile](https://chat.zulip.org/#narrow/stream/48-mobile) or
[#desktop](https://chat.zulip.org/#narrow/stream/16-desktop)) on the
[Zulip community server](https://zulip.com/development-community/).
[Zulip community server](https://zulip.readthedocs.io/en/latest/contributing/chat-zulip-org.html).
This allows us to interactively figure out what is going on, let you know if
a similar issue has already been opened, and collect any other information
we need. Choose a 2-4 word topic that describes the issue, explain the issue
@@ -418,9 +213,9 @@ and how to reproduce it if known, your browser/OS if relevant, and a
if appropriate.
**Reporting security issues**. Please do not report security issues
publicly, including on public streams on chat.zulip.org. You can
email [security@zulip.com](mailto:security@zulip.com). We create a CVE for every
security issue in our released software.
publicly, including on public streams on chat.zulip.org. You can
email security@zulip.com. We create a CVE for every security
issue in our released software.
## User feedback
@@ -430,67 +225,116 @@ hear about your experience with the product. If you're not sure what to
write, here are some questions we're always very curious to know the answer
to:
- Evaluation: What is the process by which your organization chose or will
* Evaluation: What is the process by which your organization chose or will
choose a group chat product?
- Pros and cons: What are the pros and cons of Zulip for your organization,
* Pros and cons: What are the pros and cons of Zulip for your organization,
and the pros and cons of other products you are evaluating?
- Features: What are the features that are most important for your
* Features: What are the features that are most important for your
organization? In the best-case scenario, what would your chat solution do
for you?
- Onboarding: If you remember it, what was your impression during your first
* Onboarding: If you remember it, what was your impression during your first
few minutes of using Zulip? What did you notice, and how did you feel? Was
there anything that stood out to you as confusing, or broken, or great?
- Organization: What does your organization do? How big is the organization?
* Organization: What does your organization do? How big is the organization?
A link to your organization's website?
You can contact us in the [#feedback stream of the Zulip development
community](https://chat.zulip.org/#narrow/stream/137-feedback) or
by emailing [support@zulip.com](mailto:support@zulip.com).
## Outreach programs
Zulip regularly participates in [Google Summer of Code
(GSoC)](https://developers.google.com/open-source/gsoc/) and
[Outreachy](https://www.outreachy.org/). We have been a GSoC mentoring
organization since 2016, and we accept 15-20 GSoC participants each summer. In
the past, weve also participated in [Google
Code-In](https://developers.google.com/open-source/gci/), and hosted summer
interns from Harvard, MIT, and Stanford.
Zulip participates in [Google Summer of Code
(GSoC)](https://developers.google.com/open-source/gsoc/) every year.
In the past, we've also participated in
[Outreachy](https://www.outreachy.org/), [Google
Code-In](https://developers.google.com/open-source/gci/), and hosted
summer interns from Harvard, MIT, and Stanford.
Check out our [outreach programs
overview](https://zulip.readthedocs.io/en/latest/outreach/overview.html) to learn
more about participating in an outreach program with Zulip. Most of our program
participants end up sticking around the project long-term, and many have become
core team members, maintaining important parts of the project. We hope you
apply!
While each third-party program has its own rules and requirements, the
Zulip community's approaches all of these programs with these ideas in
mind:
* We try to make the application process as valuable for the applicant as
possible. Expect high-quality code reviews, a supportive community, and
publicly viewable patches you can link to from your resume, regardless of
whether you are selected.
* To apply, you'll have to submit at least one pull request to a Zulip
repository. Most students accepted to one of our programs have
several merged pull requests (including at least one larger PR) by
the time of the application deadline.
* The main criteria we use is quality of your best contributions, and
the bullets listed at
[What makes a great Zulip contributor](#what-makes-a-great-zulip-contributor).
Because we focus on evaluating your best work, it doesn't hurt your
application to makes mistakes in your first few PRs as long as your
work improves.
## Stay connected
Most of our outreach program participants end up sticking around the
project long-term, and many have become core team members, maintaining
important parts of the project. We hope you apply!
Even if you are not logging into the development community on a regular basis,
you can still stay connected with the project.
### Google Summer of Code
- Follow us [on Twitter](https://twitter.com/zulip).
- Subscribe to [our blog](https://blog.zulip.org/).
- Join or follow the project [on LinkedIn](https://www.linkedin.com/company/zulip-project/).
The largest outreach program Zulip participates in is GSoC (14
students in 2017; 11 in 2018; 17 in 2019). While we don't control how
many slots Google allocates to Zulip, we hope to mentor a similar
number of students in future summers.
## Help others find Zulip
If you're reading this well before the application deadline and want
to make your application strong, we recommend getting involved in the
community and fixing issues in Zulip now. Having good contributions
and building a reputation for doing good work is the best way to have
a strong application. About half of Zulip's GSoC students for Summer
2017 had made significant contributions to the project by February
2017, and about half had not. Our
[GSoC project ideas page][gsoc-guide] has lots more details on how
Zulip does GSoC, as well as project ideas (though the project idea
list is maintained only during the GSoC application period, so if
you're looking at some other time of year, the project list is likely
out-of-date).
Here are some ways you can help others find Zulip:
We also have in some past years run a Zulip Summer of Code (ZSoC)
program for students who we didn't have enough slots to accept for
GSoC but were able to find funding for. Student expectations are the
same as with GSoC, and it has no separate application process; your
GSoC application is your ZSoC application. If we'd like to select you
for ZSoC, we'll contact you when the GSoC results are announced.
- Star us on GitHub. There are four main repositories:
[gsoc-guide]: https://zulip.readthedocs.io/en/latest/overview/gsoc-ideas.html
[gsoc-faq]: https://developers.google.com/open-source/gsoc/faq
## Zulip Outreach
**Upvoting Zulip**. Upvotes and reviews make a big difference in the public
perception of projects like Zulip. We've collected a few sites below
where we know Zulip has been discussed. Doing everything in the following
list typically takes about 15 minutes.
* Star us on GitHub. There are four main repositories:
[server/web](https://github.com/zulip/zulip),
[mobile](https://github.com/zulip/zulip-mobile),
[desktop](https://github.com/zulip/zulip-desktop), and
[Python API](https://github.com/zulip/python-zulip-api).
* [Follow us](https://twitter.com/zulip) on Twitter.
- "Like" and retweet [our tweets](https://twitter.com/zulip).
For both of the following, you'll need to make an account on the site if you
don't already have one.
- Upvote and post feedback on Zulip on comparison websites. A couple specific
ones to highlight:
* [Like Zulip](https://alternativeto.net/software/zulip-chat-server/) on
AlternativeTo. We recommend upvoting a couple of other products you like
as well, both to give back to their community, and since single-upvote
accounts are generally given less weight. You can also
[upvote Zulip](https://alternativeto.net/software/slack/) on their page
for Slack.
* [Add Zulip to your stack](https://stackshare.io/zulip) on StackShare, star
it, and upvote the reasons why people like Zulip that you find most
compelling. Again, we recommend adding a few other products that you like
as well.
- [AlternativeTo](https://alternativeto.net/software/zulip-chat-server/). You can also
[upvote Zulip](https://alternativeto.net/software/slack/) on their page
for Slack.
- [Add Zulip to your stack](https://stackshare.io/zulip) on StackShare, star
it, and upvote the reasons why people like Zulip that you find most
compelling.
We have a doc with more detailed instructions and a few other sites, if you
have been using Zulip for a while and want to contribute more.
**Blog posts**. Writing a blog post about your experiences with Zulip, or
about a technical aspect of Zulip can be a great way to spread the word
about Zulip.
We also occasionally [publish](https://blog.zulip.org/) long-form
articles related to Zulip. Our posts typically get tens of thousands
of views, and we always have good ideas for blog posts that we can
outline but don't have time to write. If you are an experienced writer
or copyeditor, send us a portfolio; we'd love to talk!

View File

@@ -1,25 +1,15 @@
# This is a multiarch Dockerfile. See https://docs.docker.com/desktop/multi-arch/
#
# To set up the first time:
# docker buildx create --name multiarch --use
#
# To build:
# docker buildx build --platform linux/amd64,linux/arm64 \
# -f ./Dockerfile-postgresql -t zulip/zulip-postgresql:14 --push .
# To build run `docker build -f Dockerfile-postgresql .` from the root of the
# zulip repo.
# Currently the PostgreSQL images do not support automatic upgrading of
# Currently the postgres images do not support automatic upgrading of
# the on-disk data in volumes. So the base image can not currently be upgraded
# without users needing a manual pgdump and restore.
# https://hub.docker.com/r/groonga/pgroonga/tags
ARG PGROONGA_VERSION=latest
ARG POSTGRESQL_VERSION=14
FROM groonga/pgroonga:$PGROONGA_VERSION-alpine-$POSTGRESQL_VERSION-slim
# Install hunspell, Zulip stop words, and run Zulip database
# Install hunspell, zulip stop words, and run zulip database
# init.
FROM groonga/pgroonga:latest-alpine-10-slim
RUN apk add -U --no-cache hunspell-en
RUN ln -sf /usr/share/hunspell/en_US.dic /usr/local/share/postgresql/tsearch_data/en_us.dict && ln -sf /usr/share/hunspell/en_US.aff /usr/local/share/postgresql/tsearch_data/en_us.affix
RUN ln -sf /usr/share/hunspell/en_US.dic /usr/local/share/postgresql/tsearch_data/en_us.dict && ln -sf /usr/share/hunspell/en_US.aff /usr/local/share/postgresql/tsearch_data/en_us.affix
COPY puppet/zulip/files/postgresql/zulip_english.stop /usr/local/share/postgresql/tsearch_data/zulip_english.stop
COPY scripts/setup/create-db.sql /docker-entrypoint-initdb.d/zulip-create-db.sql
COPY scripts/setup/create-pgroonga.sql /docker-entrypoint-initdb.d/zulip-create-pgroonga.sql

View File

@@ -1,3 +1,4 @@
Copyright 2011-2020 Dropbox, Inc., Kandra Labs, Inc., and contributors
Apache License
Version 2.0, January 2004

2
NOTICE
View File

@@ -1,5 +1,3 @@
Copyright 20122015 Dropbox, Inc., 20152021 Kandra Labs, Inc., and contributors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this project except in compliance with the License.
You may obtain a copy of the License at

116
README.md
View File

@@ -1,83 +1,77 @@
# Zulip overview
[Zulip](https://zulip.com) is an open-source team collaboration tool with unique
[topic-based threading][why-zulip] that combines the best of email and chat to
make remote work productive and delightful. Fortune 500 companies, [leading open
source projects][rust-case-study], and thousands of other organizations use
Zulip every day. Zulip is the only [modern team chat app][features] that is
designed for both live and asynchronous conversations.
Zulip is a powerful, open source group chat application that combines the
immediacy of real-time chat with the productivity benefits of threaded
conversations. Zulip is used by open source projects, Fortune 500 companies,
large standards bodies, and others who need a real-time chat system that
allows users to easily process hundreds or thousands of messages a day. With
over 500 contributors merging over 500 commits a month, Zulip is also the
largest and fastest growing open source group chat project.
Zulip is built by a distributed community of developers from all around the
world, with 74+ people who have each contributed 100+ commits. With
over 1000 contributors merging over 500 commits a month, Zulip is the
largest and fastest growing open source team chat project.
Come find us on the [development community chat](https://zulip.com/development-community/)!
[![GitHub Actions build status](https://github.com/zulip/zulip/actions/workflows/zulip-ci.yml/badge.svg)](https://github.com/zulip/zulip/actions/workflows/zulip-ci.yml?query=branch%3Amain)
[![coverage status](https://img.shields.io/codecov/c/github/zulip/zulip/main.svg)](https://codecov.io/gh/zulip/zulip)
[![CircleCI branch](https://img.shields.io/circleci/project/github/zulip/zulip/master.svg)](https://circleci.com/gh/zulip/zulip/tree/master)
[![Coverage Status](https://img.shields.io/codecov/c/github/zulip/zulip/master.svg)](https://codecov.io/gh/zulip/zulip/branch/master)
[![Mypy coverage](https://img.shields.io/badge/mypy-100%25-green.svg)][mypy-coverage]
[![Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v1.json)](https://github.com/charliermarsh/ruff)
[![code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
[![code style: prettier](https://img.shields.io/badge/code_style-prettier-ff69b4.svg)](https://github.com/prettier/prettier)
[![GitHub release](https://img.shields.io/github/release/zulip/zulip.svg)](https://github.com/zulip/zulip/releases/latest)
[![docs](https://readthedocs.org/projects/zulip/badge/?version=latest)](https://zulip.readthedocs.io/en/latest/)
[![Zulip chat](https://img.shields.io/badge/zulip-join_chat-brightgreen.svg)](https://chat.zulip.org)
[![Twitter](https://img.shields.io/badge/twitter-@zulip-blue.svg?style=flat)](https://twitter.com/zulip)
[![GitHub Sponsors](https://img.shields.io/github/sponsors/zulip)](https://github.com/sponsors/zulip)
[mypy-coverage]: https://blog.zulip.org/2016/10/13/static-types-in-python-oh-mypy/
[why-zulip]: https://zulip.com/why-zulip/
[rust-case-study]: https://zulip.com/case-studies/rust/
[features]: https://zulip.com/features/
## Getting started
- **Contributing code**. Check out our [guide for new
contributors](https://zulip.readthedocs.io/en/latest/contributing/contributing.html)
to get started. We have invested in making Zulips code highly
readable, thoughtfully tested, and easy to modify. Beyond that, we
have written an extraordinary 150K words of documentation for Zulip
contributors.
Click on the appropriate link below. If nothing seems to apply,
join us on the
[Zulip community server](https://zulip.readthedocs.io/en/latest/contributing/chat-zulip-org.html)
and tell us what's up!
- **Contributing non-code**. [Report an
issue](https://zulip.readthedocs.io/en/latest/contributing/contributing.html#reporting-issues),
[translate](https://zulip.readthedocs.io/en/latest/translating/translating.html)
Zulip into your language, or [give us
feedback](https://zulip.readthedocs.io/en/latest/contributing/contributing.html#user-feedback).
We'd love to hear from you, whether you've been using Zulip for years, or are just
trying it out for the first time.
You might be interested in:
- **Checking Zulip out**. The best way to see Zulip in action is to drop by the
[Zulip community server](https://zulip.com/development-community/). We also
recommend reading about Zulip's [unique
approach](https://zulip.com/why-zulip/) to organizing conversations.
* **Contributing code**. Check out our
[guide for new contributors](https://zulip.readthedocs.io/en/latest/overview/contributing.html)
to get started. Zulip prides itself on maintaining a clean and
well-tested codebase, and a stock of hundreds of
[beginner-friendly issues][beginner-friendly].
- **Running a Zulip server**. Self-host Zulip directly on Ubuntu or Debian
Linux, in [Docker](https://github.com/zulip/docker-zulip), or with prebuilt
images for [Digital Ocean](https://marketplace.digitalocean.com/apps/zulip) and
[Render](https://render.com/docs/deploy-zulip).
Learn more about [self-hosting Zulip](https://zulip.com/self-hosting/).
* **Contributing non-code**.
[Report an issue](https://zulip.readthedocs.io/en/latest/overview/contributing.html#reporting-issues),
[translate](https://zulip.readthedocs.io/en/latest/translating/translating.html) Zulip
into your language,
[write](https://zulip.readthedocs.io/en/latest/overview/contributing.html#zulip-outreach)
for the Zulip blog, or
[give us feedback](https://zulip.readthedocs.io/en/latest/overview/contributing.html#user-feedback). We
would love to hear from you, even if you're just trying the product out.
- **Using Zulip without setting up a server**. Learn about [Zulip
Cloud](https://zulip.com/plans/) hosting options. Zulip sponsors free [Zulip
Cloud Standard](https://zulip.com/plans/) for hundreds of worthy
organizations, including [fellow open-source
projects](https://zulip.com/for/open-source/).
* **Supporting Zulip**. Advocate for your organization to use Zulip, write a
review in the mobile app stores, or
[upvote Zulip](https://zulip.readthedocs.io/en/latest/overview/contributing.html#zulip-outreach) on
product comparison sites.
- **Participating in [outreach
programs](https://zulip.readthedocs.io/en/latest/contributing/contributing.html#outreach-programs)**
like [Google Summer of Code](https://developers.google.com/open-source/gsoc/)
and [Outreachy](https://www.outreachy.org/).
* **Checking Zulip out**. The best way to see Zulip in action is to drop by
the
[Zulip community server](https://zulip.readthedocs.io/en/latest/contributing/chat-zulip-org.html). We
also recommend reading Zulip for
[open source](https://zulip.com/for/open-source/), Zulip for
[companies](https://zulip.com/for/companies/), or Zulip for
[working groups and part time communities](https://zulip.com/for/working-groups-and-communities/).
- **Supporting Zulip**. Advocate for your organization to use Zulip, become a
[sponsor](https://github.com/sponsors/zulip), write a review in the mobile app
stores, or [help others find
Zulip](https://zulip.readthedocs.io/en/latest/contributing/contributing.html#help-others-find-zulip).
* **Running a Zulip server**. Use a preconfigured [Digital Ocean droplet](https://marketplace.digitalocean.com/apps/zulip),
[install Zulip](https://zulip.readthedocs.io/en/stable/production/install.html)
directly, or use Zulip's
experimental [Docker image](https://zulip.readthedocs.io/en/latest/production/deployment.html#zulip-in-docker).
Commercial support is available; see <https://zulip.com/plans> for details.
You may also be interested in reading our [blog](https://blog.zulip.org/), and
following us on [Twitter](https://twitter.com/zulip) and
[LinkedIn](https://www.linkedin.com/company/zulip-project/).
* **Using Zulip without setting up a server**. <https://zulip.com>
offers free and commercial hosting, including providing our paid
plan for free to fellow open source projects.
* **Participating in [outreach
programs](https://zulip.readthedocs.io/en/latest/overview/contributing.html#outreach-programs)**
like Google Summer of Code.
You may also be interested in reading our [blog](https://blog.zulip.org/) or
following us on [twitter](https://twitter.com/zulip).
Zulip is distributed under the
[Apache 2.0](https://github.com/zulip/zulip/blob/main/LICENSE) license.
[Apache 2.0](https://github.com/zulip/zulip/blob/master/LICENSE) license.
[beginner-friendly]: https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22

View File

@@ -1,37 +1,28 @@
# Security policy
# Security Policy
## Reporting a vulnerability
Security announcements are sent to zulip-announce@googlegroups.com,
so you should subscribe if you are running Zulip in production.
## Reporting a Vulnerability
We love responsible reports of (potential) security issues in Zulip,
whether in the latest release or our development branch.
Our security contact is security@zulip.com. Reporters should expect a
Our security contact is security@zulip.com. Reporters should expect a
response within 24 hours.
Please include details on the issue and how you'd like to be credited
in our release notes when we publish the fix.
Our [security model][security-model] document may be a helpful
resource.
Our [security
model](https://zulip.readthedocs.io/en/latest/production/security-model.html)
document may be a helpful resource.
## Security announcements
We send security announcements to our [announcement mailing
list](https://groups.google.com/g/zulip-announce). If you are running
Zulip in production, you should subscribe, by clicking "Join group" at
the top of that page.
## Supported versions
## Supported Versions
Zulip provides security support for the latest major release, in the
form of minor security/maintenance releases.
We work hard to make [upgrades][upgrades] reliable, so that there's no
reason to run older major releases.
See also our documentation on the [Zulip release
lifecycle][release-lifecycle].
[security-model]: https://zulip.readthedocs.io/en/latest/production/security-model.html
[upgrades]: https://zulip.readthedocs.io/en/latest/production/upgrade-or-modify.html#upgrading-to-a-release
[release-lifecycle]: https://zulip.readthedocs.io/en/latest/overview/release-lifecycle.html
We work hard to make
[upgrades](https://zulip.readthedocs.io/en/latest/production/upgrade-or-modify.html#upgrading-to-a-release)
reliable, so that there's no reason to run older major releases.

149
Vagrantfile vendored
View File

@@ -1,8 +1,53 @@
# -*- mode: ruby -*-
Vagrant.require_version ">= 2.2.6"
VAGRANTFILE_API_VERSION = "2"
def command?(name)
`which #{name} > /dev/null 2>&1`
$?.success?
end
if Vagrant::VERSION == "1.8.7" then
path = `which curl`
if path.include?('/opt/vagrant/embedded/bin/curl') then
puts "In Vagrant 1.8.7, curl is broken. Please use Vagrant 2.0.2 "\
"or run 'sudo rm -f /opt/vagrant/embedded/bin/curl' to fix the "\
"issue before provisioning. See "\
"https://github.com/mitchellh/vagrant/issues/7997 "\
"for reference."
exit
end
end
# Workaround: Vagrant removed the atlas.hashicorp.com to
# vagrantcloud.com redirect in February 2018. The value of
# DEFAULT_SERVER_URL in Vagrant versions less than 1.9.3 is
# atlas.hashicorp.com, which means that removal broke the fetching and
# updating of boxes (since the old URL doesn't work). See
# https://github.com/hashicorp/vagrant/issues/9442
if Vagrant::DEFAULT_SERVER_URL == "atlas.hashicorp.com"
Vagrant::DEFAULT_SERVER_URL.replace('https://vagrantcloud.com')
end
# Monkey patch https://github.com/hashicorp/vagrant/pull/10879 so we
# can fall back to another provider if docker is not installed.
begin
require Vagrant.source_root.join("plugins", "providers", "docker", "provider")
rescue LoadError
else
VagrantPlugins::DockerProvider::Provider.class_eval do
method(:usable?).owner == singleton_class or def self.usable?(raise_error=false)
VagrantPlugins::DockerProvider::Driver.new.execute("docker", "version")
true
rescue Vagrant::Errors::CommandUnavailable, VagrantPlugins::DockerProvider::Errors::ExecuteError
raise if raise_error
return false
end
end
end
Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
Vagrant.configure("2") do |config|
# The Zulip development environment runs on 9991 on the guest.
host_port = 9991
http_proxy = https_proxy = no_proxy = nil
@@ -13,20 +58,17 @@ Vagrant.configure("2") do |config|
vm_memory = "2048"
ubuntu_mirror = ""
vboxadd_version = nil
config.vm.box = "bento/ubuntu-20.04"
config.vm.synced_folder ".", "/vagrant", disabled: true
config.vm.synced_folder ".", "/srv/zulip", docker_consistency: "z"
config.vm.synced_folder ".", "/srv/zulip"
vagrant_config_file = ENV["HOME"] + "/.zulip-vagrant-config"
vagrant_config_file = ENV['HOME'] + "/.zulip-vagrant-config"
if File.file?(vagrant_config_file)
IO.foreach(vagrant_config_file) do |line|
line.chomp!
key, value = line.split(nil, 2)
case key
when /^([#;]|$)/ # ignore comments
when /^([#;]|$)/; # ignore comments
when "HTTP_PROXY"; http_proxy = value
when "HTTPS_PROXY"; https_proxy = value
when "NO_PROXY"; no_proxy = value
@@ -35,7 +77,6 @@ Vagrant.configure("2") do |config|
when "GUEST_CPUS"; vm_num_cpus = value
when "GUEST_MEMORY_MB"; vm_memory = value
when "UBUNTU_MIRROR"; ubuntu_mirror = value
when "VBOXADD_VERSION"; vboxadd_version = value
end
end
end
@@ -53,9 +94,9 @@ Vagrant.configure("2") do |config|
elsif !http_proxy.nil? or !https_proxy.nil?
# This prints twice due to https://github.com/hashicorp/vagrant/issues/7504
# We haven't figured out a workaround.
puts "You have specified value for proxy in ~/.zulip-vagrant-config file but did not " \
"install the vagrant-proxyconf plugin. To install it, run `vagrant plugin install " \
"vagrant-proxyconf` in a terminal. This error will appear twice."
puts 'You have specified value for proxy in ~/.zulip-vagrant-config file but did not ' \
'install the vagrant-proxyconf plugin. To install it, run `vagrant plugin install ' \
'vagrant-proxyconf` in a terminal. This error will appear twice.'
exit
end
@@ -63,7 +104,6 @@ Vagrant.configure("2") do |config|
config.vm.network "forwarded_port", guest: 9994, host: host_port + 3, host_ip: host_ip_addr
# Specify Docker provider before VirtualBox provider so it's preferred.
config.vm.provider "docker" do |d, override|
override.vm.box = nil
d.build_dir = File.join(__dir__, "tools", "setup", "dev-vagrant-docker")
d.build_args = ["--build-arg", "VAGRANT_UID=#{Process.uid}"]
if !ubuntu_mirror.empty?
@@ -74,35 +114,76 @@ Vagrant.configure("2") do |config|
end
config.vm.provider "virtualbox" do |vb, override|
override.vm.box = "hashicorp/bionic64"
# It's possible we can get away with just 1.5GB; more testing needed
vb.memory = vm_memory
vb.cpus = vm_num_cpus
if !vboxadd_version.nil?
override.vbguest.installer = Class.new(VagrantVbguest::Installers::Ubuntu) do
define_method(:host_version) do |reload = false|
VagrantVbguest::Version(vboxadd_version)
end
end
override.vbguest.allow_downgrade = true
override.vbguest.iso_path = "https://download.virtualbox.org/virtualbox/#{vboxadd_version}/VBoxGuestAdditions_#{vboxadd_version}.iso"
end
end
config.vm.provider "hyperv" do |h, override|
h.memory = vm_memory
h.maxmemory = vm_memory
h.cpus = vm_num_cpus
end
$provision_script = <<SCRIPT
set -x
set -e
set -o pipefail
config.vm.provider "parallels" do |prl, override|
prl.memory = vm_memory
prl.cpus = vm_num_cpus
end
# Code should go here, rather than tools/provision, only if it is
# something that we don't want to happen when running provision in a
# development environment not using Vagrant.
# Set the Ubuntu mirror
[ ! '#{ubuntu_mirror}' ] || sudo sed -i 's|http://\\(\\w*\\.\\)*archive\\.ubuntu\\.com/ubuntu/\\? |#{ubuntu_mirror} |' /etc/apt/sources.list
# Set the MOTD on the system to have Zulip instructions
sudo ln -nsf /srv/zulip/tools/setup/dev-motd /etc/update-motd.d/99-zulip-dev
sudo rm -f /etc/update-motd.d/10-help-text
sudo dpkg --purge landscape-client landscape-common ubuntu-release-upgrader-core update-manager-core update-notifier-common ubuntu-server
sudo dpkg-divert --add --rename /etc/default/motd-news
sudo sh -c 'echo ENABLED=0 > /etc/default/motd-news'
# If the host is running SELinux remount the /sys/fs/selinux directory as read only,
# needed for apt-get to work.
if [ -d "/sys/fs/selinux" ]; then
sudo mount -o remount,ro /sys/fs/selinux
fi
# Set default locale, this prevents errors if the user has another locale set.
if ! grep -q 'LC_ALL=en_US.UTF-8' /etc/default/locale; then
echo "LC_ALL=en_US.UTF-8" | sudo tee -a /etc/default/locale
fi
# Set an environment variable, so that we won't print the virtualenv
# shell warning (it'll be wrong, since the shell is dying anyway)
export SKIP_VENV_SHELL_WARNING=1
# End `set -x`, so that the end of provision doesn't look like an error
# message after a successful run.
set +x
# Check if the zulip directory is writable
if [ ! -w /srv/zulip ]; then
echo "The vagrant user is unable to write to the zulip directory."
echo "To fix this, run the following commands on the host machine:"
# sudo is required since our uid is not 1000
echo ' vagrant halt -f'
echo ' rm -rf /PATH/TO/ZULIP/CLONE/.vagrant'
echo ' sudo chown -R 1000:$(id -g) /PATH/TO/ZULIP/CLONE'
echo "Replace /PATH/TO/ZULIP/CLONE with the path to where zulip code is cloned."
echo "You can resume setting up your vagrant environment by running:"
echo " vagrant up"
exit 1
fi
# Provision the development environment
ln -nsf /srv/zulip ~/zulip
/srv/zulip/tools/provision
# Run any custom provision hooks the user has configured
if [ -f /srv/zulip/tools/custom_provision ]; then
chmod +x /srv/zulip/tools/custom_provision
/srv/zulip/tools/custom_provision
fi
SCRIPT
config.vm.provision "shell",
# We want provision to be run with the permissions of the vagrant user.
privileged: false,
path: "tools/setup/vagrant-provision",
env: { "UBUNTU_MIRROR" => ubuntu_mirror }
inline: $provision_script
end

View File

@@ -5,7 +5,7 @@ from datetime import datetime, timedelta
from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union
from django.conf import settings
from django.db import connection, models
from django.db import connection
from django.db.models import F
from psycopg2.sql import SQL, Composable, Identifier, Literal
@@ -17,40 +17,37 @@ from analytics.models import (
StreamCount,
UserCount,
installation_epoch,
last_successful_fill,
)
from zerver.lib.logging_util import log_to_file
from zerver.lib.timestamp import ceiling_to_day, ceiling_to_hour, floor_to_hour, verify_UTC
from zerver.models import Message, Realm, RealmAuditLog, Stream, UserActivityInterval, UserProfile
from zerver.models import (
Message,
Realm,
RealmAuditLog,
Stream,
UserActivityInterval,
UserProfile,
models,
)
## Logging setup ##
logger = logging.getLogger("zulip.management")
logger = logging.getLogger('zulip.management')
log_to_file(logger, settings.ANALYTICS_LOG_PATH)
# You can't subtract timedelta.max from a datetime, so use this instead
TIMEDELTA_MAX = timedelta(days=365 * 1000)
TIMEDELTA_MAX = timedelta(days=365*1000)
## Class definitions ##
class CountStat:
HOUR = "hour"
DAY = "day"
HOUR = 'hour'
DAY = 'day'
FREQUENCIES = frozenset([HOUR, DAY])
@property
def time_increment(self) -> timedelta:
if self.frequency == CountStat.HOUR:
return timedelta(hours=1)
return timedelta(days=1)
def __init__(
self,
property: str,
data_collector: "DataCollector",
frequency: str,
interval: Optional[timedelta] = None,
) -> None:
def __init__(self, property: str, data_collector: 'DataCollector', frequency: str,
interval: Optional[timedelta]=None) -> None:
self.property = property
self.data_collector = data_collector
# might have to do something different for bitfields
@@ -59,55 +56,34 @@ class CountStat:
self.frequency = frequency
if interval is not None:
self.interval = interval
else:
self.interval = self.time_increment
elif frequency == CountStat.HOUR:
self.interval = timedelta(hours=1)
else: # frequency == CountStat.DAY
self.interval = timedelta(days=1)
def __repr__(self) -> str:
def __str__(self) -> str:
return f"<CountStat: {self.property}>"
def last_successful_fill(self) -> Optional[datetime]:
fillstate = FillState.objects.filter(property=self.property).first()
if fillstate is None:
return None
if fillstate.state == FillState.DONE:
return fillstate.end_time
return fillstate.end_time - self.time_increment
class LoggingCountStat(CountStat):
def __init__(self, property: str, output_table: Type[BaseCount], frequency: str) -> None:
CountStat.__init__(self, property, DataCollector(output_table, None), frequency)
class DependentCountStat(CountStat):
def __init__(
self,
property: str,
data_collector: "DataCollector",
frequency: str,
interval: Optional[timedelta] = None,
dependencies: Sequence[str] = [],
) -> None:
def __init__(self, property: str, data_collector: 'DataCollector', frequency: str,
interval: Optional[timedelta] = None, dependencies: Sequence[str] = []) -> None:
CountStat.__init__(self, property, data_collector, frequency, interval=interval)
self.dependencies = dependencies
class DataCollector:
def __init__(
self,
output_table: Type[BaseCount],
pull_function: Optional[Callable[[str, datetime, datetime, Optional[Realm]], int]],
) -> None:
def __init__(self, output_table: Type[BaseCount],
pull_function: Optional[Callable[[str, datetime, datetime, Optional[Realm]], int]]) -> None:
self.output_table = output_table
self.pull_function = pull_function
## CountStat-level operations ##
def process_count_stat(
stat: CountStat, fill_to_time: datetime, realm: Optional[Realm] = None
) -> None:
def process_count_stat(stat: CountStat, fill_to_time: datetime,
realm: Optional[Realm]=None) -> None:
# TODO: The realm argument is not yet supported, in that we don't
# have a solution for how to update FillState if it is passed. It
# exists solely as partial plumbing for when we do fully implement
@@ -117,6 +93,13 @@ def process_count_stat(
# the CountStat object passed in needs to have come from
# E.g. get_count_stats(realm), i.e. have the realm_id already
# entered into the SQL query defined by the CountState object.
if stat.frequency == CountStat.HOUR:
time_increment = timedelta(hours=1)
elif stat.frequency == CountStat.DAY:
time_increment = timedelta(days=1)
else:
raise AssertionError(f"Unknown frequency: {stat.frequency}")
verify_UTC(fill_to_time)
if floor_to_hour(fill_to_time) != fill_to_time:
raise ValueError(f"fill_to_time must be on an hour boundary: {fill_to_time}")
@@ -124,14 +107,14 @@ def process_count_stat(
fill_state = FillState.objects.filter(property=stat.property).first()
if fill_state is None:
currently_filled = installation_epoch()
fill_state = FillState.objects.create(
property=stat.property, end_time=currently_filled, state=FillState.DONE
)
fill_state = FillState.objects.create(property=stat.property,
end_time=currently_filled,
state=FillState.DONE)
logger.info("INITIALIZED %s %s", stat.property, currently_filled)
elif fill_state.state == FillState.STARTED:
logger.info("UNDO START %s %s", stat.property, fill_state.end_time)
do_delete_counts_at_hour(stat, fill_state.end_time)
currently_filled = fill_state.end_time - stat.time_increment
currently_filled = fill_state.end_time - time_increment
do_update_fill_state(fill_state, currently_filled, FillState.DONE)
logger.info("UNDO DONE %s", stat.property)
elif fill_state.state == FillState.DONE:
@@ -141,15 +124,14 @@ def process_count_stat(
if isinstance(stat, DependentCountStat):
for dependency in stat.dependencies:
dependency_fill_time = COUNT_STATS[dependency].last_successful_fill()
dependency_fill_time = last_successful_fill(dependency)
if dependency_fill_time is None:
logger.warning(
"DependentCountStat %s run before dependency %s.", stat.property, dependency
)
logger.warning("DependentCountStat %s run before dependency %s.",
stat.property, dependency)
return
fill_to_time = min(fill_to_time, dependency_fill_time)
currently_filled = currently_filled + stat.time_increment
currently_filled = currently_filled + time_increment
while currently_filled <= fill_to_time:
logger.info("START %s %s", stat.property, currently_filled)
start = time.time()
@@ -157,35 +139,26 @@ def process_count_stat(
do_fill_count_stat_at_hour(stat, currently_filled, realm)
do_update_fill_state(fill_state, currently_filled, FillState.DONE)
end = time.time()
currently_filled = currently_filled + stat.time_increment
logger.info("DONE %s (%dms)", stat.property, (end - start) * 1000)
currently_filled = currently_filled + time_increment
logger.info("DONE %s (%dms)", stat.property, (end-start)*1000)
def do_update_fill_state(fill_state: FillState, end_time: datetime, state: int) -> None:
fill_state.end_time = end_time
fill_state.state = state
fill_state.save()
# We assume end_time is valid (e.g. is on a day or hour boundary as appropriate)
# and is time-zone-aware. It is the caller's responsibility to enforce this!
def do_fill_count_stat_at_hour(
stat: CountStat, end_time: datetime, realm: Optional[Realm] = None
) -> None:
# and is timezone aware. It is the caller's responsibility to enforce this!
def do_fill_count_stat_at_hour(stat: CountStat, end_time: datetime, realm: Optional[Realm]=None) -> None:
start_time = end_time - stat.interval
if not isinstance(stat, LoggingCountStat):
timer = time.time()
assert stat.data_collector.pull_function is not None
assert(stat.data_collector.pull_function is not None)
rows_added = stat.data_collector.pull_function(stat.property, start_time, end_time, realm)
logger.info(
"%s run pull_function (%dms/%sr)",
stat.property,
(time.time() - timer) * 1000,
rows_added,
)
logger.info("%s run pull_function (%dms/%sr)",
stat.property, (time.time()-timer)*1000, rows_added)
do_aggregate_to_summary_table(stat, end_time, realm)
def do_delete_counts_at_hour(stat: CountStat, end_time: datetime) -> None:
if isinstance(stat, LoggingCountStat):
InstallationCount.objects.filter(property=stat.property, end_time=end_time).delete()
@@ -197,22 +170,19 @@ def do_delete_counts_at_hour(stat: CountStat, end_time: datetime) -> None:
RealmCount.objects.filter(property=stat.property, end_time=end_time).delete()
InstallationCount.objects.filter(property=stat.property, end_time=end_time).delete()
def do_aggregate_to_summary_table(
stat: CountStat, end_time: datetime, realm: Optional[Realm] = None
) -> None:
def do_aggregate_to_summary_table(stat: CountStat, end_time: datetime,
realm: Optional[Realm]=None) -> None:
cursor = connection.cursor()
# Aggregate into RealmCount
output_table = stat.data_collector.output_table
if realm is not None:
realm_clause: Composable = SQL("AND zerver_realm.id = {}").format(Literal(realm.id))
realm_clause = SQL("AND zerver_realm.id = {}").format(Literal(realm.id))
else:
realm_clause = SQL("")
if output_table in (UserCount, StreamCount):
realmcount_query = SQL(
"""
realmcount_query = SQL("""
INSERT INTO analytics_realmcount
(realm_id, value, property, subgroup, end_time)
SELECT
@@ -227,25 +197,19 @@ def do_aggregate_to_summary_table(
{output_table}.end_time = %(end_time)s
{realm_clause}
GROUP BY zerver_realm.id, {output_table}.subgroup
"""
).format(
""").format(
output_table=Identifier(output_table._meta.db_table),
realm_clause=realm_clause,
)
start = time.time()
cursor.execute(
realmcount_query,
{
"property": stat.property,
"end_time": end_time,
},
)
cursor.execute(realmcount_query, {
'property': stat.property,
'end_time': end_time,
})
end = time.time()
logger.info(
"%s RealmCount aggregation (%dms/%sr)",
stat.property,
(end - start) * 1000,
cursor.rowcount,
stat.property, (end - start) * 1000, cursor.rowcount,
)
if realm is None:
@@ -254,8 +218,7 @@ def do_aggregate_to_summary_table(
#
# TODO: Add support for updating installation data after
# changing an individual realm's values.
installationcount_query = SQL(
"""
installationcount_query = SQL("""
INSERT INTO analytics_installationcount
(value, property, subgroup, end_time)
SELECT
@@ -265,51 +228,36 @@ def do_aggregate_to_summary_table(
property = %(property)s AND
end_time = %(end_time)s
GROUP BY analytics_realmcount.subgroup
"""
)
""")
start = time.time()
cursor.execute(
installationcount_query,
{
"property": stat.property,
"end_time": end_time,
},
)
cursor.execute(installationcount_query, {
'property': stat.property,
'end_time': end_time,
})
end = time.time()
logger.info(
"%s InstallationCount aggregation (%dms/%sr)",
stat.property,
(end - start) * 1000,
cursor.rowcount,
stat.property, (end - start) * 1000, cursor.rowcount,
)
cursor.close()
## Utility functions called from outside counts.py ##
# called from zerver.actions; should not throw any errors
def do_increment_logging_stat(
zerver_object: Union[Realm, UserProfile, Stream],
stat: CountStat,
subgroup: Optional[Union[str, int, bool]],
event_time: datetime,
increment: int = 1,
) -> None:
# called from zerver/lib/actions.py; should not throw any errors
def do_increment_logging_stat(zerver_object: Union[Realm, UserProfile, Stream], stat: CountStat,
subgroup: Optional[Union[str, int, bool]], event_time: datetime,
increment: int=1) -> None:
if not increment:
return
table = stat.data_collector.output_table
if table == RealmCount:
assert isinstance(zerver_object, Realm)
id_args: Dict[str, Union[Realm, UserProfile, Stream]] = {"realm": zerver_object}
id_args = {'realm': zerver_object}
elif table == UserCount:
assert isinstance(zerver_object, UserProfile)
id_args = {"realm": zerver_object.realm, "user": zerver_object}
id_args = {'realm': zerver_object.realm, 'user': zerver_object}
else: # StreamCount
assert isinstance(zerver_object, Stream)
id_args = {"realm": zerver_object.realm, "stream": zerver_object}
id_args = {'realm': zerver_object.realm, 'stream': zerver_object}
if stat.frequency == CountStat.DAY:
end_time = ceiling_to_day(event_time)
@@ -317,16 +265,11 @@ def do_increment_logging_stat(
end_time = ceiling_to_hour(event_time)
row, created = table.objects.get_or_create(
property=stat.property,
subgroup=subgroup,
end_time=end_time,
defaults={"value": increment},
**id_args,
)
property=stat.property, subgroup=subgroup, end_time=end_time,
defaults={'value': increment}, **id_args)
if not created:
row.value = F("value") + increment
row.save(update_fields=["value"])
row.value = F('value') + increment
row.save(update_fields=['value'])
def do_drop_all_analytics_tables() -> None:
UserCount.objects.all().delete()
@@ -335,7 +278,6 @@ def do_drop_all_analytics_tables() -> None:
InstallationCount.objects.all().delete()
FillState.objects.all().delete()
def do_drop_single_stat(property: str) -> None:
UserCount.objects.filter(property=property).delete()
StreamCount.objects.filter(property=property).delete()
@@ -343,58 +285,49 @@ def do_drop_single_stat(property: str) -> None:
InstallationCount.objects.filter(property=property).delete()
FillState.objects.filter(property=property).delete()
## DataCollector-level operations ##
QueryFn = Callable[[Dict[str, Composable]], Composable]
def do_pull_by_sql_query(
property: str,
start_time: datetime,
end_time: datetime,
query: QueryFn,
group_by: Optional[Tuple[Type[models.Model], str]],
group_by: Optional[Tuple[models.Model, str]],
) -> int:
if group_by is None:
subgroup: Composable = SQL("NULL")
group_by_clause: Composable = SQL("")
subgroup = SQL('NULL')
group_by_clause = SQL('')
else:
subgroup = Identifier(group_by[0]._meta.db_table, group_by[1])
group_by_clause = SQL(", {}").format(subgroup)
group_by_clause = SQL(', {}').format(subgroup)
# We do string replacement here because cursor.execute will reject a
# group_by_clause given as a param.
# We pass in the datetimes as params to cursor.execute so that we don't have to
# think about how to convert python datetimes to SQL datetimes.
query_ = query(
{
"subgroup": subgroup,
"group_by_clause": group_by_clause,
}
)
query_ = query({
'subgroup': subgroup,
'group_by_clause': group_by_clause,
})
cursor = connection.cursor()
cursor.execute(
query_,
{
"property": property,
"time_start": start_time,
"time_end": end_time,
},
)
cursor.execute(query_, {
'property': property,
'time_start': start_time,
'time_end': end_time,
})
rowcount = cursor.rowcount
cursor.close()
return rowcount
def sql_data_collector(
output_table: Type[BaseCount],
query: QueryFn,
group_by: Optional[Tuple[Type[models.Model], str]],
group_by: Optional[Tuple[models.Model, str]],
) -> DataCollector:
def pull_function(
property: str, start_time: datetime, end_time: datetime, realm: Optional[Realm] = None
) -> int:
def pull_function(property: str, start_time: datetime, end_time: datetime,
realm: Optional[Realm] = None) -> int:
# The pull function type needs to accept a Realm argument
# because the 'minutes_active::day' CountStat uses
# DataCollector directly for do_pull_minutes_active, which
@@ -402,23 +335,16 @@ def sql_data_collector(
# realm should have been already encoded in the `query` we're
# passed.
return do_pull_by_sql_query(property, start_time, end_time, query, group_by)
return DataCollector(output_table, pull_function)
def do_pull_minutes_active(
property: str, start_time: datetime, end_time: datetime, realm: Optional[Realm] = None
) -> int:
user_activity_intervals = (
UserActivityInterval.objects.filter(
end__gt=start_time,
start__lt=end_time,
)
.select_related(
"user_profile",
)
.values_list("user_profile_id", "user_profile__realm_id", "start", "end")
)
def do_pull_minutes_active(property: str, start_time: datetime, end_time: datetime,
realm: Optional[Realm] = None) -> int:
user_activity_intervals = UserActivityInterval.objects.filter(
end__gt=start_time, start__lt=end_time,
).select_related(
'user_profile',
).values_list(
'user_profile_id', 'user_profile__realm_id', 'start', 'end')
seconds_active: Dict[Tuple[int, int], float] = defaultdict(float)
for user_id, realm_id, interval_start, interval_end in user_activity_intervals:
@@ -427,28 +353,18 @@ def do_pull_minutes_active(
end = min(end_time, interval_end)
seconds_active[(user_id, realm_id)] += (end - start).total_seconds()
rows = [
UserCount(
user_id=ids[0],
realm_id=ids[1],
property=property,
end_time=end_time,
value=int(seconds // 60),
)
for ids, seconds in seconds_active.items()
if seconds >= 60
]
rows = [UserCount(user_id=ids[0], realm_id=ids[1], property=property,
end_time=end_time, value=int(seconds // 60))
for ids, seconds in seconds_active.items() if seconds >= 60]
UserCount.objects.bulk_create(rows)
return len(rows)
def count_message_by_user_query(realm: Optional[Realm]) -> QueryFn:
if realm is None:
realm_clause: Composable = SQL("")
realm_clause = SQL("")
else:
realm_clause = SQL("zerver_userprofile.realm_id = {} AND").format(Literal(realm.id))
return lambda kwargs: SQL(
"""
return lambda kwargs: SQL("""
INSERT INTO analytics_usercount
(user_id, realm_id, value, property, subgroup, end_time)
SELECT
@@ -464,18 +380,15 @@ def count_message_by_user_query(realm: Optional[Realm]) -> QueryFn:
{realm_clause}
zerver_message.date_sent < %(time_end)s
GROUP BY zerver_userprofile.id {group_by_clause}
"""
).format(**kwargs, realm_clause=realm_clause)
""").format(**kwargs, realm_clause=realm_clause)
# Note: ignores the group_by / group_by_clause.
def count_message_type_by_user_query(realm: Optional[Realm]) -> QueryFn:
if realm is None:
realm_clause: Composable = SQL("")
realm_clause = SQL("")
else:
realm_clause = SQL("zerver_userprofile.realm_id = {} AND").format(Literal(realm.id))
return lambda kwargs: SQL(
"""
return lambda kwargs: SQL("""
INSERT INTO analytics_usercount
(realm_id, user_id, value, property, subgroup, end_time)
SELECT realm_id, id, SUM(count) AS value, %(property)s, message_type, %(time_end)s
@@ -510,9 +423,7 @@ def count_message_type_by_user_query(realm: Optional[Realm]) -> QueryFn:
zerver_recipient.type, zerver_stream.invite_only
) AS subquery
GROUP BY realm_id, id, message_type
"""
).format(**kwargs, realm_clause=realm_clause)
""").format(**kwargs, realm_clause=realm_clause)
# This query joins to the UserProfile table since all current queries that
# use this also subgroup on UserProfile.is_bot. If in the future there is a
@@ -520,11 +431,10 @@ def count_message_type_by_user_query(realm: Optional[Realm]) -> QueryFn:
# table, consider writing a new query for efficiency.
def count_message_by_stream_query(realm: Optional[Realm]) -> QueryFn:
if realm is None:
realm_clause: Composable = SQL("")
realm_clause = SQL("")
else:
realm_clause = SQL("zerver_stream.realm_id = {} AND").format(Literal(realm.id))
return lambda kwargs: SQL(
"""
return lambda kwargs: SQL("""
INSERT INTO analytics_streamcount
(stream_id, realm_id, value, property, subgroup, end_time)
SELECT
@@ -546,19 +456,16 @@ def count_message_by_stream_query(realm: Optional[Realm]) -> QueryFn:
{realm_clause}
zerver_message.date_sent < %(time_end)s
GROUP BY zerver_stream.id {group_by_clause}
"""
).format(**kwargs, realm_clause=realm_clause)
""").format(**kwargs, realm_clause=realm_clause)
# Hardcodes the query needed by active_users:is_bot:day, since that is
# currently the only stat that uses this.
def count_user_by_realm_query(realm: Optional[Realm]) -> QueryFn:
if realm is None:
realm_clause: Composable = SQL("")
realm_clause = SQL("")
else:
realm_clause = SQL("zerver_userprofile.realm_id = {} AND").format(Literal(realm.id))
return lambda kwargs: SQL(
"""
return lambda kwargs: SQL("""
INSERT INTO analytics_realmcount
(realm_id, value, property, subgroup, end_time)
SELECT
@@ -574,9 +481,7 @@ def count_user_by_realm_query(realm: Optional[Realm]) -> QueryFn:
{realm_clause}
zerver_userprofile.is_active = TRUE
GROUP BY zerver_realm.id {group_by_clause}
"""
).format(**kwargs, realm_clause=realm_clause)
""").format(**kwargs, realm_clause=realm_clause)
# Currently hardcodes the query needed for active_users_audit:is_bot:day.
# Assumes that a user cannot have two RealmAuditLog entries with the same event_time and
@@ -584,11 +489,10 @@ def count_user_by_realm_query(realm: Optional[Realm]) -> QueryFn:
# In particular, it's important to ensure that migrations don't cause that to happen.
def check_realmauditlog_by_user_query(realm: Optional[Realm]) -> QueryFn:
if realm is None:
realm_clause: Composable = SQL("")
realm_clause = SQL("")
else:
realm_clause = SQL("realm_id = {} AND").format(Literal(realm.id))
return lambda kwargs: SQL(
"""
return lambda kwargs: SQL("""
INSERT INTO analytics_usercount
(user_id, realm_id, value, property, subgroup, end_time)
SELECT
@@ -611,8 +515,7 @@ def check_realmauditlog_by_user_query(realm: Optional[Realm]) -> QueryFn:
ral1.modified_user_id = zerver_userprofile.id
WHERE
ral1.event_type in ({user_created}, {user_activated}, {user_reactivated})
"""
).format(
""").format(
**kwargs,
user_created=Literal(RealmAuditLog.USER_CREATED),
user_activated=Literal(RealmAuditLog.USER_ACTIVATED),
@@ -621,14 +524,12 @@ def check_realmauditlog_by_user_query(realm: Optional[Realm]) -> QueryFn:
realm_clause=realm_clause,
)
def check_useractivityinterval_by_user_query(realm: Optional[Realm]) -> QueryFn:
if realm is None:
realm_clause: Composable = SQL("")
realm_clause = SQL("")
else:
realm_clause = SQL("zerver_userprofile.realm_id = {} AND").format(Literal(realm.id))
return lambda kwargs: SQL(
"""
return lambda kwargs: SQL("""
INSERT INTO analytics_usercount
(user_id, realm_id, value, property, subgroup, end_time)
SELECT
@@ -642,17 +543,14 @@ def check_useractivityinterval_by_user_query(realm: Optional[Realm]) -> QueryFn:
{realm_clause}
zerver_useractivityinterval.start < %(time_end)s
GROUP BY zerver_userprofile.id {group_by_clause}
"""
).format(**kwargs, realm_clause=realm_clause)
""").format(**kwargs, realm_clause=realm_clause)
def count_realm_active_humans_query(realm: Optional[Realm]) -> QueryFn:
if realm is None:
realm_clause: Composable = SQL("")
realm_clause = SQL("")
else:
realm_clause = SQL("realm_id = {} AND").format(Literal(realm.id))
return lambda kwargs: SQL(
"""
return lambda kwargs: SQL("""
INSERT INTO analytics_realmcount
(realm_id, value, property, subgroup, end_time)
SELECT
@@ -677,13 +575,10 @@ def count_realm_active_humans_query(realm: Optional[Realm]) -> QueryFn:
ON
usercount1.user_id = usercount2.user_id
GROUP BY usercount1.realm_id
"""
).format(**kwargs, realm_clause=realm_clause)
""").format(**kwargs, realm_clause=realm_clause)
# Currently unused and untested
count_stream_by_realm_query = lambda kwargs: SQL(
"""
count_stream_by_realm_query = lambda kwargs: SQL("""
INSERT INTO analytics_realmcount
(realm_id, value, property, subgroup, end_time)
SELECT
@@ -697,77 +592,62 @@ count_stream_by_realm_query = lambda kwargs: SQL(
zerver_stream.date_created >= %(time_start)s AND
zerver_stream.date_created < %(time_end)s
GROUP BY zerver_realm.id {group_by_clause}
"""
).format(**kwargs)
""").format(**kwargs)
def get_count_stats(realm: Optional[Realm] = None) -> Dict[str, CountStat]:
def get_count_stats(realm: Optional[Realm]=None) -> Dict[str, CountStat]:
## CountStat declarations ##
count_stats_ = [
# Messages sent stats
# Messages Sent stats
# Stats that count the number of messages sent in various ways.
# These are also the set of stats that read from the Message table.
CountStat(
"messages_sent:is_bot:hour",
sql_data_collector(
UserCount, count_message_by_user_query(realm), (UserProfile, "is_bot")
),
CountStat.HOUR,
),
CountStat(
"messages_sent:message_type:day",
sql_data_collector(UserCount, count_message_type_by_user_query(realm), None),
CountStat.DAY,
),
CountStat(
"messages_sent:client:day",
sql_data_collector(
UserCount, count_message_by_user_query(realm), (Message, "sending_client_id")
),
CountStat.DAY,
),
CountStat(
"messages_in_stream:is_bot:day",
sql_data_collector(
StreamCount, count_message_by_stream_query(realm), (UserProfile, "is_bot")
),
CountStat.DAY,
),
# Number of users stats
CountStat('messages_sent:is_bot:hour',
sql_data_collector(UserCount, count_message_by_user_query(
realm), (UserProfile, 'is_bot')),
CountStat.HOUR),
CountStat('messages_sent:message_type:day',
sql_data_collector(
UserCount, count_message_type_by_user_query(realm), None),
CountStat.DAY),
CountStat('messages_sent:client:day',
sql_data_collector(UserCount, count_message_by_user_query(realm),
(Message, 'sending_client_id')), CountStat.DAY),
CountStat('messages_in_stream:is_bot:day',
sql_data_collector(StreamCount, count_message_by_stream_query(realm),
(UserProfile, 'is_bot')), CountStat.DAY),
# Number of Users stats
# Stats that count the number of active users in the UserProfile.is_active sense.
# 'active_users_audit:is_bot:day' is the canonical record of which users were
# active on which days (in the UserProfile.is_active sense).
# Important that this stay a daily stat, so that 'realm_active_humans::day' works as expected.
CountStat(
"active_users_audit:is_bot:day",
sql_data_collector(
UserCount, check_realmauditlog_by_user_query(realm), (UserProfile, "is_bot")
),
CountStat.DAY,
),
CountStat('active_users_audit:is_bot:day',
sql_data_collector(UserCount, check_realmauditlog_by_user_query(
realm), (UserProfile, 'is_bot')),
CountStat.DAY),
# Important note: LoggingCountStat objects aren't passed the
# Realm argument, because by nature they have a logging
# structure, not a pull-from-database structure, so there's no
# way to compute them for a single realm after the fact (the
# use case for passing a Realm argument).
# Sanity check on 'active_users_audit:is_bot:day', and a archetype for future LoggingCountStats.
# In RealmCount, 'active_users_audit:is_bot:day' should be the partial
# sum sequence of 'active_users_log:is_bot:day', for any realm that
# started after the latter stat was introduced.
LoggingCountStat("active_users_log:is_bot:day", RealmCount, CountStat.DAY),
LoggingCountStat('active_users_log:is_bot:day',
RealmCount, CountStat.DAY),
# Another sanity check on 'active_users_audit:is_bot:day'. Is only an
# approximation, e.g. if a user is deactivated between the end of the
# day and when this stat is run, they won't be counted. However, is the
# simplest of the three to inspect by hand.
CountStat(
"active_users:is_bot:day",
sql_data_collector(
RealmCount, count_user_by_realm_query(realm), (UserProfile, "is_bot")
),
CountStat.DAY,
interval=TIMEDELTA_MAX,
),
CountStat('active_users:is_bot:day',
sql_data_collector(RealmCount, count_user_by_realm_query(realm), (UserProfile, 'is_bot')),
CountStat.DAY, interval=TIMEDELTA_MAX),
# Messages read stats. messages_read::hour is the total
# number of messages read, whereas
# messages_read_interactions::hour tries to count the total
@@ -775,47 +655,40 @@ def get_count_stats(realm: Optional[Realm] = None) -> Dict[str, CountStat]:
# as read (imperfect because of batching of some request
# types, but less likely to be overwhelmed by a single bulk
# operation).
LoggingCountStat("messages_read::hour", UserCount, CountStat.HOUR),
LoggingCountStat("messages_read_interactions::hour", UserCount, CountStat.HOUR),
# User activity stats
LoggingCountStat('messages_read::hour', UserCount, CountStat.HOUR),
LoggingCountStat('messages_read_interactions::hour', UserCount, CountStat.HOUR),
# User Activity stats
# Stats that measure user activity in the UserActivityInterval sense.
CountStat(
"1day_actives::day",
sql_data_collector(UserCount, check_useractivityinterval_by_user_query(realm), None),
CountStat.DAY,
interval=timedelta(days=1) - UserActivityInterval.MIN_INTERVAL_LENGTH,
),
CountStat(
"7day_actives::day",
sql_data_collector(UserCount, check_useractivityinterval_by_user_query(realm), None),
CountStat.DAY,
interval=timedelta(days=7) - UserActivityInterval.MIN_INTERVAL_LENGTH,
),
CountStat(
"15day_actives::day",
sql_data_collector(UserCount, check_useractivityinterval_by_user_query(realm), None),
CountStat.DAY,
interval=timedelta(days=15) - UserActivityInterval.MIN_INTERVAL_LENGTH,
),
CountStat(
"minutes_active::day", DataCollector(UserCount, do_pull_minutes_active), CountStat.DAY
),
CountStat('1day_actives::day',
sql_data_collector(
UserCount, check_useractivityinterval_by_user_query(realm), None),
CountStat.DAY, interval=timedelta(days=1)-UserActivityInterval.MIN_INTERVAL_LENGTH),
CountStat('15day_actives::day',
sql_data_collector(
UserCount, check_useractivityinterval_by_user_query(realm), None),
CountStat.DAY, interval=timedelta(days=15)-UserActivityInterval.MIN_INTERVAL_LENGTH),
CountStat('minutes_active::day', DataCollector(
UserCount, do_pull_minutes_active), CountStat.DAY),
# Rate limiting stats
# Used to limit the number of invitation emails sent by a realm
LoggingCountStat("invites_sent::day", RealmCount, CountStat.DAY),
LoggingCountStat('invites_sent::day', RealmCount, CountStat.DAY),
# Dependent stats
# Must come after their dependencies.
# Canonical account of the number of active humans in a realm on each day.
DependentCountStat(
"realm_active_humans::day",
sql_data_collector(RealmCount, count_realm_active_humans_query(realm), None),
CountStat.DAY,
dependencies=["active_users_audit:is_bot:day", "15day_actives::day"],
),
DependentCountStat('realm_active_humans::day',
sql_data_collector(
RealmCount, count_realm_active_humans_query(realm), None),
CountStat.DAY,
dependencies=['active_users_audit:is_bot:day', '15day_actives::day']),
]
return OrderedDict((stat.property, stat) for stat in count_stats_)
return OrderedDict([(stat.property, stat) for stat in count_stats_])
# To avoid refactoring for now COUNT_STATS can be used as before
COUNT_STATS = get_count_stats()

View File

@@ -5,18 +5,11 @@ from typing import List
from analytics.lib.counts import CountStat
def generate_time_series_data(
days: int = 100,
business_hours_base: float = 10,
non_business_hours_base: float = 10,
growth: float = 1,
autocorrelation: float = 0,
spikiness: float = 1,
holiday_rate: float = 0,
frequency: str = CountStat.DAY,
partial_sum: bool = False,
random_seed: int = 26,
) -> List[int]:
def generate_time_series_data(days: int=100, business_hours_base: float=10,
non_business_hours_base: float=10, growth: float=1,
autocorrelation: float=0, spikiness: float=1,
holiday_rate: float=0, frequency: str=CountStat.DAY,
partial_sum: bool=False, random_seed: int=26) -> List[int]:
"""
Generate semi-realistic looking time series data for testing analytics graphs.
@@ -37,43 +30,35 @@ def generate_time_series_data(
random_seed -- Seed for random number generator.
"""
if frequency == CountStat.HOUR:
length = days * 24
length = days*24
seasonality = [non_business_hours_base] * 24 * 7
for day in range(5):
for hour in range(8):
seasonality[24 * day + hour] = business_hours_base
holidays = []
seasonality[24*day + hour] = business_hours_base
holidays = []
for i in range(days):
holidays.extend([random() < holiday_rate] * 24)
elif frequency == CountStat.DAY:
length = days
seasonality = [8 * business_hours_base + 16 * non_business_hours_base] * 5 + [
24 * non_business_hours_base
] * 2
seasonality = [8*business_hours_base + 16*non_business_hours_base] * 5 + \
[24*non_business_hours_base] * 2
holidays = [random() < holiday_rate for i in range(days)]
else:
raise AssertionError(f"Unknown frequency: {frequency}")
if length < 2:
raise AssertionError(
f"Must be generating at least 2 data points. Currently generating {length}"
)
growth_base = growth ** (1.0 / (length - 1))
values_no_noise = [
seasonality[i % len(seasonality)] * (growth_base**i) for i in range(length)
]
raise AssertionError("Must be generating at least 2 data points. "
f"Currently generating {length}")
growth_base = growth ** (1. / (length-1))
values_no_noise = [seasonality[i % len(seasonality)] * (growth_base**i) for i in range(length)]
seed(random_seed)
noise_scalars = [gauss(0, 1)]
for i in range(1, length):
noise_scalars.append(
noise_scalars[-1] * autocorrelation + gauss(0, 1) * (1 - autocorrelation)
)
noise_scalars.append(noise_scalars[-1]*autocorrelation + gauss(0, 1)*(1-autocorrelation))
values = [
0 if holiday else int(v + sqrt(v) * noise_scalar * spikiness)
for v, noise_scalar, holiday in zip(values_no_noise, noise_scalars, holidays)
]
values = [0 if holiday else int(v + sqrt(v)*noise_scalar*spikiness)
for v, noise_scalar, holiday in zip(values_no_noise, noise_scalars, holidays)]
if partial_sum:
for i in range(1, length):
values[i] = values[i - 1] + values[i]
values[i] = values[i-1] + values[i]
return [max(v, 0) for v in values]

View File

@@ -9,9 +9,8 @@ from zerver.lib.timestamp import floor_to_day, floor_to_hour, verify_UTC
# If min_length is greater than 0, pads the list to the left.
# So informally, time_range(Sep 20, Sep 22, day, None) returns [Sep 20, Sep 21, Sep 22],
# and time_range(Sep 20, Sep 22, day, 5) returns [Sep 18, Sep 19, Sep 20, Sep 21, Sep 22]
def time_range(
start: datetime, end: datetime, frequency: str, min_length: Optional[int]
) -> List[datetime]:
def time_range(start: datetime, end: datetime, frequency: str,
min_length: Optional[int]) -> List[datetime]:
verify_UTC(start)
verify_UTC(end)
if frequency == CountStat.HOUR:
@@ -25,7 +24,7 @@ def time_range(
times = []
if min_length is not None:
start = min(start, end - (min_length - 1) * step)
start = min(start, end - (min_length-1)*step)
current = end
while current >= start:
times.append(current)

View File

@@ -0,0 +1,82 @@
import datetime
import logging
import time
from typing import Any, Dict
from django.core.management.base import BaseCommand, CommandParser
from zerver.lib.timestamp import timestamp_to_datetime
from zerver.models import Message, Recipient
def compute_stats(log_level: int) -> None:
logger = logging.getLogger()
logger.setLevel(log_level)
one_week_ago = timestamp_to_datetime(time.time()) - datetime.timedelta(weeks=1)
mit_query = Message.objects.filter(sender__realm__string_id="zephyr",
recipient__type=Recipient.STREAM,
date_sent__gt=one_week_ago)
for bot_sender_start in ["imap.", "rcmd.", "sys."]:
mit_query = mit_query.exclude(sender__email__startswith=(bot_sender_start))
# Filtering for "/" covers tabbott/extra@ and all the daemon/foo bots.
mit_query = mit_query.exclude(sender__email__contains=("/"))
mit_query = mit_query.exclude(sender__email__contains=("aim.com"))
mit_query = mit_query.exclude(
sender__email__in=["rss@mit.edu", "bash@mit.edu", "apache@mit.edu",
"bitcoin@mit.edu", "lp@mit.edu", "clocks@mit.edu",
"root@mit.edu", "nagios@mit.edu",
"www-data|local-realm@mit.edu"])
user_counts: Dict[str, Dict[str, int]] = {}
for m in mit_query.select_related("sending_client", "sender"):
email = m.sender.email
user_counts.setdefault(email, {})
user_counts[email].setdefault(m.sending_client.name, 0)
user_counts[email][m.sending_client.name] += 1
total_counts: Dict[str, int] = {}
total_user_counts: Dict[str, int] = {}
for email, counts in user_counts.items():
total_user_counts.setdefault(email, 0)
for client_name, count in counts.items():
total_counts.setdefault(client_name, 0)
total_counts[client_name] += count
total_user_counts[email] += count
logging.debug("%40s | %10s | %s", "User", "Messages", "Percentage Zulip")
top_percents: Dict[int, float] = {}
for size in [10, 25, 50, 100, 200, len(total_user_counts.keys())]:
top_percents[size] = 0.0
for i, email in enumerate(sorted(total_user_counts.keys(),
key=lambda x: -total_user_counts[x])):
percent_zulip = round(100 - (user_counts[email].get("zephyr_mirror", 0)) * 100. /
total_user_counts[email], 1)
for size in top_percents.keys():
top_percents.setdefault(size, 0)
if i < size:
top_percents[size] += (percent_zulip * 1.0 / size)
logging.debug("%40s | %10s | %s%%", email, total_user_counts[email],
percent_zulip)
logging.info("")
for size in sorted(top_percents.keys()):
logging.info("Top %6s | %s%%", size, round(top_percents[size], 1))
grand_total = sum(total_counts.values())
print(grand_total)
logging.info("%15s | %s", "Client", "Percentage")
for client in total_counts.keys():
logging.info("%15s | %s%%", client, round(100. * total_counts[client] / grand_total, 1))
class Command(BaseCommand):
help = "Compute statistics on MIT Zephyr usage."
def add_arguments(self, parser: CommandParser) -> None:
parser.add_argument('--verbose', default=False, action='store_true')
def handle(self, *args: Any, **options: Any) -> None:
level = logging.INFO
if options["verbose"]:
level = logging.DEBUG
compute_stats(level)

View File

@@ -0,0 +1,56 @@
import datetime
from typing import Any, Dict
from django.core.management.base import BaseCommand, CommandParser
from zerver.lib.statistics import seconds_usage_between
from zerver.models import UserProfile
def analyze_activity(options: Dict[str, Any]) -> None:
day_start = datetime.datetime.strptime(options["date"], "%Y-%m-%d").replace(tzinfo=datetime.timezone.utc)
day_end = day_start + datetime.timedelta(days=options["duration"])
user_profile_query = UserProfile.objects.all()
if options["realm"]:
user_profile_query = user_profile_query.filter(realm__string_id=options["realm"])
print("Per-user online duration:\n")
total_duration = datetime.timedelta(0)
for user_profile in user_profile_query:
duration = seconds_usage_between(user_profile, day_start, day_end)
if duration == datetime.timedelta(0):
continue
total_duration += duration
print(f"{user_profile.email:<37}{duration}")
print(f"\nTotal Duration: {total_duration}")
print(f"\nTotal Duration in minutes: {total_duration.total_seconds() / 60.}")
print(f"Total Duration amortized to a month: {total_duration.total_seconds() * 30. / 60.}")
class Command(BaseCommand):
help = """Report analytics of user activity on a per-user and realm basis.
This command aggregates user activity data that is collected by each user using Zulip. It attempts
to approximate how much each user has been using Zulip per day, measured by recording each 15 minute
period where some activity has occurred (mouse move or keyboard activity).
It will correctly not count server-initiated reloads in the activity statistics.
The duration flag can be used to control how many days to show usage duration for
Usage: ./manage.py analyze_user_activity [--realm=zulip] [--date=2013-09-10] [--duration=1]
By default, if no date is selected 2013-09-10 is used. If no realm is provided, information
is shown for all realms"""
def add_arguments(self, parser: CommandParser) -> None:
parser.add_argument('--realm', action='store')
parser.add_argument('--date', action='store', default="2013-09-06")
parser.add_argument('--duration', action='store', default=1, type=int,
help="How many days to show usage information for")
def handle(self, *args: Any, **options: Any) -> None:
analyze_activity(options)

View File

@@ -7,8 +7,8 @@ from django.core.management.base import BaseCommand
from django.utils.timezone import now as timezone_now
from analytics.lib.counts import COUNT_STATS, CountStat
from analytics.models import installation_epoch
from zerver.lib.timestamp import TimeZoneNotUTCError, floor_to_day, floor_to_hour, verify_UTC
from analytics.models import installation_epoch, last_successful_fill
from zerver.lib.timestamp import TimezoneNotUTCException, floor_to_day, floor_to_hour, verify_UTC
from zerver.models import Realm
states = {
@@ -18,7 +18,6 @@ states = {
3: "UNKNOWN",
}
class Command(BaseCommand):
help = """Checks FillState table.
@@ -26,8 +25,8 @@ class Command(BaseCommand):
def handle(self, *args: Any, **options: Any) -> None:
fill_state = self.get_fill_state()
status = fill_state["status"]
message = fill_state["message"]
status = fill_state['status']
message = fill_state['message']
state_file_path = "/var/lib/nagios_state/check-analytics-state"
state_file_tmp = state_file_path + "-tmp"
@@ -38,18 +37,18 @@ class Command(BaseCommand):
def get_fill_state(self) -> Dict[str, Any]:
if not Realm.objects.exists():
return {"status": 0, "message": "No realms exist, so not checking FillState."}
return {'status': 0, 'message': 'No realms exist, so not checking FillState.'}
warning_unfilled_properties = []
critical_unfilled_properties = []
for property, stat in COUNT_STATS.items():
last_fill = stat.last_successful_fill()
last_fill = last_successful_fill(property)
if last_fill is None:
last_fill = installation_epoch()
try:
verify_UTC(last_fill)
except TimeZoneNotUTCError:
return {"status": 2, "message": f"FillState not in UTC for {property}"}
except TimezoneNotUTCException:
return {'status': 2, 'message': f'FillState not in UTC for {property}'}
if stat.frequency == CountStat.DAY:
floor_function = floor_to_day
@@ -61,10 +60,7 @@ class Command(BaseCommand):
critical_threshold = timedelta(minutes=150)
if floor_function(last_fill) != last_fill:
return {
"status": 2,
"message": f"FillState not on {stat.frequency} boundary for {property}",
}
return {'status': 2, 'message': f'FillState not on {stat.frequency} boundary for {property}'}
time_to_last_fill = timezone_now() - last_fill
if time_to_last_fill > critical_threshold:
@@ -73,18 +69,18 @@ class Command(BaseCommand):
warning_unfilled_properties.append(property)
if len(critical_unfilled_properties) == 0 and len(warning_unfilled_properties) == 0:
return {"status": 0, "message": "FillState looks fine."}
return {'status': 0, 'message': 'FillState looks fine.'}
if len(critical_unfilled_properties) == 0:
return {
"status": 1,
"message": "Missed filling {} once.".format(
", ".join(warning_unfilled_properties),
'status': 1,
'message': 'Missed filling {} once.'.format(
', '.join(warning_unfilled_properties),
),
}
return {
"status": 2,
"message": "Missed filling {} once. Missed filling {} at least twice.".format(
", ".join(warning_unfilled_properties),
", ".join(critical_unfilled_properties),
'status': 2,
'message': 'Missed filling {} once. Missed filling {} at least twice.'.format(
', '.join(warning_unfilled_properties),
', '.join(critical_unfilled_properties),
),
}

View File

@@ -10,12 +10,12 @@ class Command(BaseCommand):
help = """Clear analytics tables."""
def add_arguments(self, parser: ArgumentParser) -> None:
parser.add_argument("--force", action="store_true", help="Clear analytics tables.")
parser.add_argument('--force',
action='store_true',
help="Clear analytics tables.")
def handle(self, *args: Any, **options: Any) -> None:
if options["force"]:
if options['force']:
do_drop_all_analytics_tables()
else:
raise CommandError(
"Would delete all data from analytics tables (!); use --force to do so."
)
raise CommandError("Would delete all data from analytics tables (!); use --force to do so.")

View File

@@ -10,14 +10,18 @@ class Command(BaseCommand):
help = """Clear analytics tables."""
def add_arguments(self, parser: ArgumentParser) -> None:
parser.add_argument("--force", action="store_true", help="Actually do it.")
parser.add_argument("--property", help="The property of the stat to be cleared.")
parser.add_argument('--force',
action='store_true',
help="Actually do it.")
parser.add_argument('--property',
type=str,
help="The property of the stat to be cleared.")
def handle(self, *args: Any, **options: Any) -> None:
property = options["property"]
property = options['property']
if property not in COUNT_STATS:
raise CommandError(f"Invalid property: {property}")
if not options["force"]:
if not options['force']:
raise CommandError("No action taken. Use --force.")
do_drop_single_stat(property)

View File

@@ -0,0 +1,74 @@
import datetime
from argparse import ArgumentParser
from typing import Any, Optional
from django.db.models import Count, QuerySet
from django.utils.timezone import now as timezone_now
from zerver.lib.management import ZulipBaseCommand
from zerver.models import UserActivity
class Command(ZulipBaseCommand):
help = """Report rough client activity globally, for a realm, or for a user
Usage examples:
./manage.py client_activity --target server
./manage.py client_activity --target realm --realm zulip
./manage.py client_activity --target user --user hamlet@zulip.com --realm zulip"""
def add_arguments(self, parser: ArgumentParser) -> None:
parser.add_argument('--target', dest='target', required=True, type=str,
help="'server' will calculate client activity of the entire server. "
"'realm' will calculate client activity of realm. "
"'user' will calculate client activity of the user.")
parser.add_argument('--user', dest='user', type=str,
help="The email address of the user you want to calculate activity.")
self.add_realm_args(parser)
def compute_activity(self, user_activity_objects: QuerySet) -> None:
# Report data from the past week.
#
# This is a rough report of client activity because we inconsistently
# register activity from various clients; think of it as telling you
# approximately how many people from a group have used a particular
# client recently. For example, this might be useful to get a sense of
# how popular different versions of a desktop client are.
#
# Importantly, this does NOT tell you anything about the relative
# volumes of requests from clients.
threshold = timezone_now() - datetime.timedelta(days=7)
client_counts = user_activity_objects.filter(
last_visit__gt=threshold).values("client__name").annotate(
count=Count('client__name'))
total = 0
counts = []
for client_type in client_counts:
count = client_type["count"]
client = client_type["client__name"]
total += count
counts.append((count, client))
counts.sort()
for count in counts:
print(f"{count[1]:>25} {count[0]:15}")
print("Total:", total)
def handle(self, *args: Any, **options: Optional[str]) -> None:
realm = self.get_realm(options)
if options["user"] is None:
if options["target"] == "server" and realm is None:
# Report global activity.
self.compute_activity(UserActivity.objects.all())
elif options["target"] == "realm" and realm is not None:
self.compute_activity(UserActivity.objects.filter(user_profile__realm=realm))
else:
self.print_help("./manage.py", "client_activity")
elif options["target"] == "user":
user_profile = self.get_user(options["user"], realm)
self.compute_activity(UserActivity.objects.filter(user_profile=user_profile))
else:
self.print_help("./manage.py", "client_activity")

View File

@@ -1,8 +1,7 @@
import os
from datetime import timedelta
from typing import Any, Dict, List, Mapping, Type, Union
from typing import Any, Dict, List, Mapping, Optional, Type
from unittest import mock
from django.core.files.uploadedfile import UploadedFile
from django.core.management.base import BaseCommand
from django.utils.timezone import now as timezone_now
@@ -17,14 +16,10 @@ from analytics.models import (
StreamCount,
UserCount,
)
from zerver.actions.create_realm import do_create_realm
from zerver.actions.users import do_change_user_role
from zerver.lib.actions import STREAM_ASSIGNMENT_COLORS, do_change_user_role
from zerver.lib.create_user import create_user
from zerver.lib.storage import static_path
from zerver.lib.stream_color import STREAM_ASSIGNMENT_COLORS
from zerver.lib.timestamp import floor_to_day
from zerver.lib.upload import upload_message_attachment_from_request
from zerver.models import Client, Realm, Recipient, Stream, Subscription, UserGroup, UserProfile
from zerver.models import Client, Realm, Recipient, Stream, Subscription, UserProfile
class Command(BaseCommand):
@@ -33,30 +28,16 @@ class Command(BaseCommand):
DAYS_OF_DATA = 100
random_seed = 26
def generate_fixture_data(
self,
stat: CountStat,
business_hours_base: float,
non_business_hours_base: float,
growth: float,
autocorrelation: float,
spikiness: float,
holiday_rate: float = 0,
partial_sum: bool = False,
) -> List[int]:
def generate_fixture_data(self, stat: CountStat, business_hours_base: float,
non_business_hours_base: float, growth: float,
autocorrelation: float, spikiness: float,
holiday_rate: float=0, partial_sum: bool=False) -> List[int]:
self.random_seed += 1
return generate_time_series_data(
days=self.DAYS_OF_DATA,
business_hours_base=business_hours_base,
non_business_hours_base=non_business_hours_base,
growth=growth,
autocorrelation=autocorrelation,
spikiness=spikiness,
holiday_rate=holiday_rate,
frequency=stat.frequency,
partial_sum=partial_sum,
random_seed=self.random_seed,
)
days=self.DAYS_OF_DATA, business_hours_base=business_hours_base,
non_business_hours_base=non_business_hours_base, growth=growth,
autocorrelation=autocorrelation, spikiness=spikiness, holiday_rate=holiday_rate,
frequency=stat.frequency, partial_sum=partial_sum, random_seed=self.random_seed)
def handle(self, *args: Any, **options: Any) -> None:
# TODO: This should arguably only delete the objects
@@ -64,7 +45,7 @@ class Command(BaseCommand):
do_drop_all_analytics_tables()
# This also deletes any objects with this realm as a foreign key
Realm.objects.filter(string_id="analytics").delete()
Realm.objects.filter(string_id='analytics').delete()
# Because we just deleted a bunch of objects in the database
# directly (rather than deleting individual objects in Django,
@@ -73,44 +54,23 @@ class Command(BaseCommand):
# memcached in order to ensure deleted objects aren't still
# present in the memcached cache.
from zerver.apps import flush_cache
flush_cache(None)
installation_time = timezone_now() - timedelta(days=self.DAYS_OF_DATA)
last_end_time = floor_to_day(timezone_now())
realm = do_create_realm(
string_id="analytics", name="Analytics", date_created=installation_time
)
shylock = create_user(
"shylock@analytics.ds",
"Shylock",
realm,
full_name="Shylock",
role=UserProfile.ROLE_REALM_OWNER,
force_date_joined=installation_time,
)
do_change_user_role(shylock, UserProfile.ROLE_REALM_OWNER, acting_user=None)
# Create guest user for set_guest_users_statistic.
create_user(
"bassanio@analytics.ds",
"Bassanio",
realm,
full_name="Bassanio",
role=UserProfile.ROLE_GUEST,
force_date_joined=installation_time,
)
administrators_user_group = UserGroup.objects.get(
name=UserGroup.ADMINISTRATORS_GROUP_NAME, realm=realm, is_system_group=True
)
realm = Realm.objects.create(
string_id='analytics', name='Analytics', date_created=installation_time)
with mock.patch("zerver.lib.create_user.timezone_now", return_value=installation_time):
shylock = create_user(
'shylock@analytics.ds',
'Shylock',
realm,
full_name='Shylock',
role=UserProfile.ROLE_REALM_ADMINISTRATOR
)
do_change_user_role(shylock, UserProfile.ROLE_REALM_ADMINISTRATOR, acting_user=None)
stream = Stream.objects.create(
name="all",
realm=realm,
date_created=installation_time,
can_remove_subscribers_group=administrators_user_group,
)
name='all', realm=realm, date_created=installation_time)
recipient = Recipient.objects.create(type_id=stream.id, type=Recipient.STREAM)
stream.recipient = recipient
stream.save(update_fields=["recipient"])
@@ -118,218 +78,163 @@ class Command(BaseCommand):
# Subscribe shylock to the stream to avoid invariant failures.
# TODO: This should use subscribe_users_to_streams from populate_db.
subs = [
Subscription(
recipient=recipient,
user_profile=shylock,
is_user_active=shylock.is_active,
color=STREAM_ASSIGNMENT_COLORS[0],
),
Subscription(recipient=recipient,
user_profile=shylock,
color=STREAM_ASSIGNMENT_COLORS[0]),
]
Subscription.objects.bulk_create(subs)
# Create an attachment in the database for set_storage_space_used_statistic.
IMAGE_FILE_PATH = static_path("images/test-images/checkbox.png")
file_info = os.stat(IMAGE_FILE_PATH)
file_size = file_info.st_size
with open(IMAGE_FILE_PATH, "rb") as fp:
upload_message_attachment_from_request(UploadedFile(fp), shylock, file_size)
FixtureData = Mapping[Union[str, int, None], List[int]]
def insert_fixture_data(
stat: CountStat,
fixture_data: FixtureData,
table: Type[BaseCount],
) -> None:
end_times = time_range(
last_end_time, last_end_time, stat.frequency, len(list(fixture_data.values())[0])
)
def insert_fixture_data(stat: CountStat,
fixture_data: Mapping[Optional[str], List[int]],
table: Type[BaseCount]) -> None:
end_times = time_range(last_end_time, last_end_time, stat.frequency,
len(list(fixture_data.values())[0]))
if table == InstallationCount:
id_args: Dict[str, Any] = {}
if table == RealmCount:
id_args = {"realm": realm}
id_args = {'realm': realm}
if table == UserCount:
id_args = {"realm": realm, "user": shylock}
id_args = {'realm': realm, 'user': shylock}
if table == StreamCount:
id_args = {"stream": stream, "realm": realm}
id_args = {'stream': stream, 'realm': realm}
for subgroup, values in fixture_data.items():
table.objects.bulk_create(
table(
property=stat.property,
subgroup=subgroup,
end_time=end_time,
value=value,
**id_args,
)
for end_time, value in zip(end_times, values)
if value != 0
)
table.objects.bulk_create([
table(property=stat.property, subgroup=subgroup, end_time=end_time,
value=value, **id_args)
for end_time, value in zip(end_times, values) if value != 0])
stat = COUNT_STATS["1day_actives::day"]
realm_data: FixtureData = {
None: self.generate_fixture_data(stat, 0.08, 0.02, 3, 0.3, 6, partial_sum=True),
stat = COUNT_STATS['1day_actives::day']
realm_data: Mapping[Optional[str], List[int]] = {
None: self.generate_fixture_data(stat, .08, .02, 3, .3, 6, partial_sum=True),
}
insert_fixture_data(stat, realm_data, RealmCount)
installation_data: FixtureData = {
None: self.generate_fixture_data(stat, 0.8, 0.2, 4, 0.3, 6, partial_sum=True),
installation_data: Mapping[Optional[str], List[int]] = {
None: self.generate_fixture_data(stat, .8, .2, 4, .3, 6, partial_sum=True),
}
insert_fixture_data(stat, installation_data, InstallationCount)
FillState.objects.create(
property=stat.property, end_time=last_end_time, state=FillState.DONE
)
FillState.objects.create(property=stat.property, end_time=last_end_time,
state=FillState.DONE)
stat = COUNT_STATS["7day_actives::day"]
stat = COUNT_STATS['realm_active_humans::day']
realm_data = {
None: self.generate_fixture_data(stat, 0.2, 0.07, 3, 0.3, 6, partial_sum=True),
None: self.generate_fixture_data(stat, .1, .03, 3, .5, 3, partial_sum=True),
}
insert_fixture_data(stat, realm_data, RealmCount)
installation_data = {
None: self.generate_fixture_data(stat, 2, 0.7, 4, 0.3, 6, partial_sum=True),
None: self.generate_fixture_data(stat, 1, .3, 4, .5, 3, partial_sum=True),
}
insert_fixture_data(stat, installation_data, InstallationCount)
FillState.objects.create(
property=stat.property, end_time=last_end_time, state=FillState.DONE
)
FillState.objects.create(property=stat.property, end_time=last_end_time,
state=FillState.DONE)
stat = COUNT_STATS["realm_active_humans::day"]
stat = COUNT_STATS['active_users_audit:is_bot:day']
realm_data = {
None: self.generate_fixture_data(stat, 0.8, 0.08, 3, 0.5, 3, partial_sum=True),
'false': self.generate_fixture_data(stat, .1, .03, 3.5, .8, 2, partial_sum=True),
}
insert_fixture_data(stat, realm_data, RealmCount)
installation_data = {
None: self.generate_fixture_data(stat, 1, 0.3, 4, 0.5, 3, partial_sum=True),
'false': self.generate_fixture_data(stat, 1, .3, 6, .8, 2, partial_sum=True),
}
insert_fixture_data(stat, installation_data, InstallationCount)
FillState.objects.create(
property=stat.property, end_time=last_end_time, state=FillState.DONE
)
FillState.objects.create(property=stat.property, end_time=last_end_time,
state=FillState.DONE)
stat = COUNT_STATS["active_users_audit:is_bot:day"]
realm_data = {
"false": self.generate_fixture_data(stat, 1, 0.2, 3.5, 0.8, 2, partial_sum=True),
"true": self.generate_fixture_data(stat, 0.3, 0.05, 3, 0.3, 2, partial_sum=True),
}
insert_fixture_data(stat, realm_data, RealmCount)
installation_data = {
"false": self.generate_fixture_data(stat, 3, 1, 4, 0.8, 2, partial_sum=True),
"true": self.generate_fixture_data(stat, 1, 0.4, 4, 0.8, 2, partial_sum=True),
}
insert_fixture_data(stat, installation_data, InstallationCount)
FillState.objects.create(
property=stat.property, end_time=last_end_time, state=FillState.DONE
)
stat = COUNT_STATS["messages_sent:is_bot:hour"]
user_data: FixtureData = {
"false": self.generate_fixture_data(stat, 2, 1, 1.5, 0.6, 8, holiday_rate=0.1),
stat = COUNT_STATS['messages_sent:is_bot:hour']
user_data: Mapping[Optional[str], List[int]] = {
'false': self.generate_fixture_data(stat, 2, 1, 1.5, .6, 8, holiday_rate=.1),
}
insert_fixture_data(stat, user_data, UserCount)
realm_data = {
"false": self.generate_fixture_data(stat, 35, 15, 6, 0.6, 4),
"true": self.generate_fixture_data(stat, 15, 15, 3, 0.4, 2),
}
realm_data = {'false': self.generate_fixture_data(stat, 35, 15, 6, .6, 4),
'true': self.generate_fixture_data(stat, 15, 15, 3, .4, 2)}
insert_fixture_data(stat, realm_data, RealmCount)
installation_data = {
"false": self.generate_fixture_data(stat, 350, 150, 6, 0.6, 4),
"true": self.generate_fixture_data(stat, 150, 150, 3, 0.4, 2),
}
installation_data = {'false': self.generate_fixture_data(stat, 350, 150, 6, .6, 4),
'true': self.generate_fixture_data(stat, 150, 150, 3, .4, 2)}
insert_fixture_data(stat, installation_data, InstallationCount)
FillState.objects.create(
property=stat.property, end_time=last_end_time, state=FillState.DONE
)
FillState.objects.create(property=stat.property, end_time=last_end_time,
state=FillState.DONE)
stat = COUNT_STATS["messages_sent:message_type:day"]
stat = COUNT_STATS['messages_sent:message_type:day']
user_data = {
"public_stream": self.generate_fixture_data(stat, 1.5, 1, 3, 0.6, 8),
"private_message": self.generate_fixture_data(stat, 0.5, 0.3, 1, 0.6, 8),
"huddle_message": self.generate_fixture_data(stat, 0.2, 0.2, 2, 0.6, 8),
}
'public_stream': self.generate_fixture_data(stat, 1.5, 1, 3, .6, 8),
'private_message': self.generate_fixture_data(stat, .5, .3, 1, .6, 8),
'huddle_message': self.generate_fixture_data(stat, .2, .2, 2, .6, 8)}
insert_fixture_data(stat, user_data, UserCount)
realm_data = {
"public_stream": self.generate_fixture_data(stat, 30, 8, 5, 0.6, 4),
"private_stream": self.generate_fixture_data(stat, 7, 7, 5, 0.6, 4),
"private_message": self.generate_fixture_data(stat, 13, 5, 5, 0.6, 4),
"huddle_message": self.generate_fixture_data(stat, 6, 3, 3, 0.6, 4),
}
'public_stream': self.generate_fixture_data(stat, 30, 8, 5, .6, 4),
'private_stream': self.generate_fixture_data(stat, 7, 7, 5, .6, 4),
'private_message': self.generate_fixture_data(stat, 13, 5, 5, .6, 4),
'huddle_message': self.generate_fixture_data(stat, 6, 3, 3, .6, 4)}
insert_fixture_data(stat, realm_data, RealmCount)
installation_data = {
"public_stream": self.generate_fixture_data(stat, 300, 80, 5, 0.6, 4),
"private_stream": self.generate_fixture_data(stat, 70, 70, 5, 0.6, 4),
"private_message": self.generate_fixture_data(stat, 130, 50, 5, 0.6, 4),
"huddle_message": self.generate_fixture_data(stat, 60, 30, 3, 0.6, 4),
}
'public_stream': self.generate_fixture_data(stat, 300, 80, 5, .6, 4),
'private_stream': self.generate_fixture_data(stat, 70, 70, 5, .6, 4),
'private_message': self.generate_fixture_data(stat, 130, 50, 5, .6, 4),
'huddle_message': self.generate_fixture_data(stat, 60, 30, 3, .6, 4)}
insert_fixture_data(stat, installation_data, InstallationCount)
FillState.objects.create(
property=stat.property, end_time=last_end_time, state=FillState.DONE
)
FillState.objects.create(property=stat.property, end_time=last_end_time,
state=FillState.DONE)
website, created = Client.objects.get_or_create(name="website")
old_desktop, created = Client.objects.get_or_create(name="desktop app Linux 0.3.7")
android, created = Client.objects.get_or_create(name="ZulipAndroid")
iOS, created = Client.objects.get_or_create(name="ZulipiOS")
react_native, created = Client.objects.get_or_create(name="ZulipMobile")
API, created = Client.objects.get_or_create(name="API: Python")
zephyr_mirror, created = Client.objects.get_or_create(name="zephyr_mirror")
unused, created = Client.objects.get_or_create(name="unused")
long_webhook, created = Client.objects.get_or_create(name="ZulipLooooooooooongNameWebhook")
website, created = Client.objects.get_or_create(name='website')
old_desktop, created = Client.objects.get_or_create(name='desktop app Linux 0.3.7')
android, created = Client.objects.get_or_create(name='ZulipAndroid')
iOS, created = Client.objects.get_or_create(name='ZulipiOS')
react_native, created = Client.objects.get_or_create(name='ZulipMobile')
API, created = Client.objects.get_or_create(name='API: Python')
zephyr_mirror, created = Client.objects.get_or_create(name='zephyr_mirror')
unused, created = Client.objects.get_or_create(name='unused')
long_webhook, created = Client.objects.get_or_create(name='ZulipLooooooooooongNameWebhook')
stat = COUNT_STATS["messages_sent:client:day"]
stat = COUNT_STATS['messages_sent:client:day']
user_data = {
website.id: self.generate_fixture_data(stat, 2, 1, 1.5, 0.6, 8),
zephyr_mirror.id: self.generate_fixture_data(stat, 0, 0.3, 1.5, 0.6, 8),
}
website.id: self.generate_fixture_data(stat, 2, 1, 1.5, .6, 8),
zephyr_mirror.id: self.generate_fixture_data(stat, 0, .3, 1.5, .6, 8)}
insert_fixture_data(stat, user_data, UserCount)
realm_data = {
website.id: self.generate_fixture_data(stat, 30, 20, 5, 0.6, 3),
old_desktop.id: self.generate_fixture_data(stat, 5, 3, 8, 0.6, 3),
android.id: self.generate_fixture_data(stat, 5, 5, 2, 0.6, 3),
iOS.id: self.generate_fixture_data(stat, 5, 5, 2, 0.6, 3),
react_native.id: self.generate_fixture_data(stat, 5, 5, 10, 0.6, 3),
API.id: self.generate_fixture_data(stat, 5, 5, 5, 0.6, 3),
zephyr_mirror.id: self.generate_fixture_data(stat, 1, 1, 3, 0.6, 3),
website.id: self.generate_fixture_data(stat, 30, 20, 5, .6, 3),
old_desktop.id: self.generate_fixture_data(stat, 5, 3, 8, .6, 3),
android.id: self.generate_fixture_data(stat, 5, 5, 2, .6, 3),
iOS.id: self.generate_fixture_data(stat, 5, 5, 2, .6, 3),
react_native.id: self.generate_fixture_data(stat, 5, 5, 10, .6, 3),
API.id: self.generate_fixture_data(stat, 5, 5, 5, .6, 3),
zephyr_mirror.id: self.generate_fixture_data(stat, 1, 1, 3, .6, 3),
unused.id: self.generate_fixture_data(stat, 0, 0, 0, 0, 0),
long_webhook.id: self.generate_fixture_data(stat, 5, 5, 2, 0.6, 3),
}
long_webhook.id: self.generate_fixture_data(stat, 5, 5, 2, .6, 3)}
insert_fixture_data(stat, realm_data, RealmCount)
installation_data = {
website.id: self.generate_fixture_data(stat, 300, 200, 5, 0.6, 3),
old_desktop.id: self.generate_fixture_data(stat, 50, 30, 8, 0.6, 3),
android.id: self.generate_fixture_data(stat, 50, 50, 2, 0.6, 3),
iOS.id: self.generate_fixture_data(stat, 50, 50, 2, 0.6, 3),
react_native.id: self.generate_fixture_data(stat, 5, 5, 10, 0.6, 3),
API.id: self.generate_fixture_data(stat, 50, 50, 5, 0.6, 3),
zephyr_mirror.id: self.generate_fixture_data(stat, 10, 10, 3, 0.6, 3),
website.id: self.generate_fixture_data(stat, 300, 200, 5, .6, 3),
old_desktop.id: self.generate_fixture_data(stat, 50, 30, 8, .6, 3),
android.id: self.generate_fixture_data(stat, 50, 50, 2, .6, 3),
iOS.id: self.generate_fixture_data(stat, 50, 50, 2, .6, 3),
react_native.id: self.generate_fixture_data(stat, 5, 5, 10, .6, 3),
API.id: self.generate_fixture_data(stat, 50, 50, 5, .6, 3),
zephyr_mirror.id: self.generate_fixture_data(stat, 10, 10, 3, .6, 3),
unused.id: self.generate_fixture_data(stat, 0, 0, 0, 0, 0),
long_webhook.id: self.generate_fixture_data(stat, 50, 50, 2, 0.6, 3),
}
long_webhook.id: self.generate_fixture_data(stat, 50, 50, 2, .6, 3)}
insert_fixture_data(stat, installation_data, InstallationCount)
FillState.objects.create(
property=stat.property, end_time=last_end_time, state=FillState.DONE
)
FillState.objects.create(property=stat.property, end_time=last_end_time,
state=FillState.DONE)
stat = COUNT_STATS["messages_in_stream:is_bot:day"]
realm_data = {
"false": self.generate_fixture_data(stat, 30, 5, 6, 0.6, 4),
"true": self.generate_fixture_data(stat, 20, 2, 3, 0.2, 3),
}
stat = COUNT_STATS['messages_in_stream:is_bot:day']
realm_data = {'false': self.generate_fixture_data(stat, 30, 5, 6, .6, 4),
'true': self.generate_fixture_data(stat, 20, 2, 3, .2, 3)}
insert_fixture_data(stat, realm_data, RealmCount)
stream_data: Mapping[Union[int, str, None], List[int]] = {
"false": self.generate_fixture_data(stat, 10, 7, 5, 0.6, 4),
"true": self.generate_fixture_data(stat, 5, 3, 2, 0.4, 2),
stream_data: Mapping[Optional[str], List[int]] = {
'false': self.generate_fixture_data(stat, 10, 7, 5, .6, 4),
'true': self.generate_fixture_data(stat, 5, 3, 2, .4, 2),
}
insert_fixture_data(stat, stream_data, StreamCount)
FillState.objects.create(
property=stat.property, end_time=last_end_time, state=FillState.DONE
)
FillState.objects.create(property=stat.property, end_time=last_end_time,
state=FillState.DONE)
stat = COUNT_STATS["messages_read::hour"]
stat = COUNT_STATS['messages_read::hour']
user_data = {
None: self.generate_fixture_data(stat, 7, 3, 2, 0.6, 8, holiday_rate=0.1),
None: self.generate_fixture_data(stat, 7, 3, 2, .6, 8, holiday_rate=.1),
}
insert_fixture_data(stat, user_data, UserCount)
realm_data = {None: self.generate_fixture_data(stat, 50, 35, 6, 0.6, 4)}
realm_data = {
None: self.generate_fixture_data(stat, 50, 35, 6, .6, 4)
}
insert_fixture_data(stat, realm_data, RealmCount)
FillState.objects.create(
property=stat.property, end_time=last_end_time, state=FillState.DONE
)
FillState.objects.create(property=stat.property, end_time=last_end_time,
state=FillState.DONE)

View File

@@ -0,0 +1,160 @@
import datetime
from argparse import ArgumentParser
from typing import Any, List
from django.core.management.base import BaseCommand, CommandError
from django.db.models import Count
from django.utils.timezone import now as timezone_now
from zerver.models import (
Message,
Realm,
Recipient,
Stream,
Subscription,
UserActivity,
UserMessage,
UserProfile,
get_realm,
)
MOBILE_CLIENT_LIST = ["Android", "ios"]
HUMAN_CLIENT_LIST = MOBILE_CLIENT_LIST + ["website"]
human_messages = Message.objects.filter(sending_client__name__in=HUMAN_CLIENT_LIST)
class Command(BaseCommand):
help = "Generate statistics on realm activity."
def add_arguments(self, parser: ArgumentParser) -> None:
parser.add_argument('realms', metavar='<realm>', type=str, nargs='*',
help="realm to generate statistics for")
def active_users(self, realm: Realm) -> List[UserProfile]:
# Has been active (on the website, for now) in the last 7 days.
activity_cutoff = timezone_now() - datetime.timedelta(days=7)
return [activity.user_profile for activity in (
UserActivity.objects.filter(user_profile__realm=realm,
user_profile__is_active=True,
last_visit__gt=activity_cutoff,
query="/json/users/me/pointer",
client__name="website"))]
def messages_sent_by(self, user: UserProfile, days_ago: int) -> int:
sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago)
return human_messages.filter(sender=user, date_sent__gt=sent_time_cutoff).count()
def total_messages(self, realm: Realm, days_ago: int) -> int:
sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago)
return Message.objects.filter(sender__realm=realm, date_sent__gt=sent_time_cutoff).count()
def human_messages(self, realm: Realm, days_ago: int) -> int:
sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago)
return human_messages.filter(sender__realm=realm, date_sent__gt=sent_time_cutoff).count()
def api_messages(self, realm: Realm, days_ago: int) -> int:
return (self.total_messages(realm, days_ago) - self.human_messages(realm, days_ago))
def stream_messages(self, realm: Realm, days_ago: int) -> int:
sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago)
return human_messages.filter(sender__realm=realm, date_sent__gt=sent_time_cutoff,
recipient__type=Recipient.STREAM).count()
def private_messages(self, realm: Realm, days_ago: int) -> int:
sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago)
return human_messages.filter(sender__realm=realm, date_sent__gt=sent_time_cutoff).exclude(
recipient__type=Recipient.STREAM).exclude(recipient__type=Recipient.HUDDLE).count()
def group_private_messages(self, realm: Realm, days_ago: int) -> int:
sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago)
return human_messages.filter(sender__realm=realm, date_sent__gt=sent_time_cutoff).exclude(
recipient__type=Recipient.STREAM).exclude(recipient__type=Recipient.PERSONAL).count()
def report_percentage(self, numerator: float, denominator: float, text: str) -> None:
if not denominator:
fraction = 0.0
else:
fraction = numerator / float(denominator)
print(f"{fraction * 100:.2f}% of", text)
def handle(self, *args: Any, **options: Any) -> None:
if options['realms']:
try:
realms = [get_realm(string_id) for string_id in options['realms']]
except Realm.DoesNotExist as e:
raise CommandError(e)
else:
realms = Realm.objects.all()
for realm in realms:
print(realm.string_id)
user_profiles = UserProfile.objects.filter(realm=realm, is_active=True)
active_users = self.active_users(realm)
num_active = len(active_users)
print(f"{num_active} active users ({len(user_profiles)} total)")
streams = Stream.objects.filter(realm=realm).extra(
tables=['zerver_subscription', 'zerver_recipient'],
where=['zerver_subscription.recipient_id = zerver_recipient.id',
'zerver_recipient.type = 2',
'zerver_recipient.type_id = zerver_stream.id',
'zerver_subscription.active = true']).annotate(count=Count("name"))
print(f"{streams.count()} streams")
for days_ago in (1, 7, 30):
print(f"In last {days_ago} days, users sent:")
sender_quantities = [self.messages_sent_by(user, days_ago) for user in user_profiles]
for quantity in sorted(sender_quantities, reverse=True):
print(quantity, end=' ')
print("")
print(f"{self.stream_messages(realm, days_ago)} stream messages")
print(f"{self.private_messages(realm, days_ago)} one-on-one private messages")
print(f"{self.api_messages(realm, days_ago)} messages sent via the API")
print(f"{self.group_private_messages(realm, days_ago)} group private messages")
num_notifications_enabled = len([x for x in active_users if x.enable_desktop_notifications])
self.report_percentage(num_notifications_enabled, num_active,
"active users have desktop notifications enabled")
num_enter_sends = len([x for x in active_users if x.enter_sends])
self.report_percentage(num_enter_sends, num_active,
"active users have enter-sends")
all_message_count = human_messages.filter(sender__realm=realm).count()
multi_paragraph_message_count = human_messages.filter(
sender__realm=realm, content__contains="\n\n").count()
self.report_percentage(multi_paragraph_message_count, all_message_count,
"all messages are multi-paragraph")
# Starred messages
starrers = UserMessage.objects.filter(user_profile__in=user_profiles,
flags=UserMessage.flags.starred).values(
"user_profile").annotate(count=Count("user_profile"))
print("{} users have starred {} messages".format(
len(starrers), sum([elt["count"] for elt in starrers])))
active_user_subs = Subscription.objects.filter(
user_profile__in=user_profiles, active=True)
# Streams not in home view
non_home_view = active_user_subs.filter(is_muted=True).values(
"user_profile").annotate(count=Count("user_profile"))
print("{} users have {} streams not in home view".format(
len(non_home_view), sum([elt["count"] for elt in non_home_view])))
# Code block markup
markup_messages = human_messages.filter(
sender__realm=realm, content__contains="~~~").values(
"sender").annotate(count=Count("sender"))
print("{} users have used code block markup on {} messages".format(
len(markup_messages), sum([elt["count"] for elt in markup_messages])))
# Notifications for stream messages
notifications = active_user_subs.filter(desktop_notifications=True).values(
"user_profile").annotate(count=Count("user_profile"))
print("{} users receive desktop notifications for {} streams".format(
len(notifications), sum([elt["count"] for elt in notifications])))
print("")

View File

@@ -0,0 +1,56 @@
from argparse import ArgumentParser
from typing import Any
from django.core.management.base import BaseCommand, CommandError
from django.db.models import Q
from zerver.models import Message, Realm, Recipient, Stream, Subscription, get_realm
class Command(BaseCommand):
help = "Generate statistics on the streams for a realm."
def add_arguments(self, parser: ArgumentParser) -> None:
parser.add_argument('realms', metavar='<realm>', type=str, nargs='*',
help="realm to generate statistics for")
def handle(self, *args: Any, **options: str) -> None:
if options['realms']:
try:
realms = [get_realm(string_id) for string_id in options['realms']]
except Realm.DoesNotExist as e:
raise CommandError(e)
else:
realms = Realm.objects.all()
for realm in realms:
streams = Stream.objects.filter(realm=realm).exclude(Q(name__istartswith="tutorial-"))
# private stream count
private_count = 0
# public stream count
public_count = 0
for stream in streams:
if stream.invite_only:
private_count += 1
else:
public_count += 1
print("------------")
print(realm.string_id, end=' ')
print("{:>10} {} public streams and".format("(", public_count), end=' ')
print(f"{private_count} private streams )")
print("------------")
print("{:>25} {:>15} {:>10} {:>12}".format("stream", "subscribers", "messages", "type"))
for stream in streams:
if stream.invite_only:
stream_type = 'private'
else:
stream_type = 'public'
print(f"{stream.name:>25}", end=' ')
recipient = Recipient.objects.filter(type=Recipient.STREAM, type_id=stream.id)
print("{:10}".format(len(Subscription.objects.filter(recipient=recipient,
active=True))), end=' ')
num_messages = len(Message.objects.filter(recipient=recipient))
print(f"{num_messages:12}", end=' ')
print(f"{stream_type:>15}")
print("")

View File

@@ -22,29 +22,28 @@ class Command(BaseCommand):
Run as a cron job that runs every hour."""
def add_arguments(self, parser: ArgumentParser) -> None:
parser.add_argument(
"--time",
"-t",
help="Update stat tables from current state to "
"--time. Defaults to the current time.",
default=timezone_now().isoformat(),
)
parser.add_argument("--utc", action="store_true", help="Interpret --time in UTC.")
parser.add_argument(
"--stat", "-s", help="CountStat to process. If omitted, all stats are processed."
)
parser.add_argument(
"--verbose", action="store_true", help="Print timing information to stdout."
)
parser.add_argument('--time', '-t',
type=str,
help='Update stat tables from current state to'
'--time. Defaults to the current time.',
default=timezone_now().isoformat())
parser.add_argument('--utc',
action='store_true',
help="Interpret --time in UTC.",
default=False)
parser.add_argument('--stat', '-s',
type=str,
help="CountStat to process. If omitted, all stats are processed.")
parser.add_argument('--verbose',
action='store_true',
help="Print timing information to stdout.",
default=False)
def handle(self, *args: Any, **options: Any) -> None:
try:
os.mkdir(settings.ANALYTICS_LOCK_DIR)
except OSError:
print(
f"{WARNING}Analytics lock {settings.ANALYTICS_LOCK_DIR} is unavailable;"
f" exiting.{ENDC}"
)
print(WARNING + "Analytics lock %s is unavailable; exiting... " + ENDC)
return
try:
@@ -59,37 +58,32 @@ class Command(BaseCommand):
logger.info("No realms, stopping update_analytics_counts")
return
fill_to_time = parse_datetime(options["time"])
assert fill_to_time is not None
if options["utc"]:
fill_to_time = parse_datetime(options['time'])
if options['utc']:
fill_to_time = fill_to_time.replace(tzinfo=timezone.utc)
if fill_to_time.tzinfo is None:
raise ValueError(
"--time must be time-zone-aware. Maybe you meant to use the --utc option?"
)
raise ValueError("--time must be timezone aware. Maybe you meant to use the --utc option?")
fill_to_time = floor_to_hour(fill_to_time.astimezone(timezone.utc))
if options["stat"] is not None:
stats = [COUNT_STATS[options["stat"]]]
if options['stat'] is not None:
stats = [COUNT_STATS[options['stat']]]
else:
stats = list(COUNT_STATS.values())
logger.info("Starting updating analytics counts through %s", fill_to_time)
if options["verbose"]:
if options['verbose']:
start = time.time()
last = start
for stat in stats:
process_count_stat(stat, fill_to_time)
if options["verbose"]:
if options['verbose']:
print(f"Updated {stat.property} in {time.time() - last:.3f}s")
last = time.time()
if options["verbose"]:
print(
f"Finished updating analytics counts through {fill_to_time} in {time.time() - start:.3f}s"
)
if options['verbose']:
print(f"Finished updating analytics counts through {fill_to_time} in {time.time() - start:.3f}s")
logger.info("Finished updating analytics counts through %s", fill_to_time)
if settings.PUSH_NOTIFICATION_BOUNCER_URL and settings.SUBMIT_USAGE_STATISTICS:

View File

@@ -0,0 +1,42 @@
import datetime
from argparse import ArgumentParser
from typing import Any
from django.core.management.base import BaseCommand, CommandError
from django.utils.timezone import now as timezone_now
from zerver.models import Message, Realm, Stream, UserProfile, get_realm
class Command(BaseCommand):
help = "Generate statistics on user activity."
def add_arguments(self, parser: ArgumentParser) -> None:
parser.add_argument('realms', metavar='<realm>', type=str, nargs='*',
help="realm to generate statistics for")
def messages_sent_by(self, user: UserProfile, week: int) -> int:
start = timezone_now() - datetime.timedelta(days=(week + 1)*7)
end = timezone_now() - datetime.timedelta(days=week*7)
return Message.objects.filter(sender=user, date_sent__gt=start, date_sent__lte=end).count()
def handle(self, *args: Any, **options: Any) -> None:
if options['realms']:
try:
realms = [get_realm(string_id) for string_id in options['realms']]
except Realm.DoesNotExist as e:
raise CommandError(e)
else:
realms = Realm.objects.all()
for realm in realms:
print(realm.string_id)
user_profiles = UserProfile.objects.filter(realm=realm, is_active=True)
print(f"{len(user_profiles)} users")
print(f"{len(Stream.objects.filter(realm=realm))} streams")
for user_profile in user_profiles:
print(f"{user_profile.email:>35}", end=' ')
for week in range(10):
print(f"{self.messages_sent_by(user_profile, week):5}", end=' ')
print("")

View File

@@ -4,205 +4,107 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("zerver", "0030_realm_org_type"),
('zerver', '0030_realm_org_type'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name="Anomaly",
name='Anomaly',
fields=[
(
"id",
models.AutoField(
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
),
),
("info", models.CharField(max_length=1000)),
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('info', models.CharField(max_length=1000)),
],
bases=(models.Model,),
),
migrations.CreateModel(
name="HuddleCount",
name='HuddleCount',
fields=[
(
"id",
models.AutoField(
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
),
),
(
"huddle",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="zerver.Recipient"
),
),
(
"user",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
("property", models.CharField(max_length=40)),
("end_time", models.DateTimeField()),
("interval", models.CharField(max_length=20)),
("value", models.BigIntegerField()),
(
"anomaly",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="analytics.Anomaly",
null=True,
),
),
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('huddle', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Recipient')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('property', models.CharField(max_length=40)),
('end_time', models.DateTimeField()),
('interval', models.CharField(max_length=20)),
('value', models.BigIntegerField()),
('anomaly', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='analytics.Anomaly', null=True)),
],
bases=(models.Model,),
),
migrations.CreateModel(
name="InstallationCount",
name='InstallationCount',
fields=[
(
"id",
models.AutoField(
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
),
),
("property", models.CharField(max_length=40)),
("end_time", models.DateTimeField()),
("interval", models.CharField(max_length=20)),
("value", models.BigIntegerField()),
(
"anomaly",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="analytics.Anomaly",
null=True,
),
),
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('property', models.CharField(max_length=40)),
('end_time', models.DateTimeField()),
('interval', models.CharField(max_length=20)),
('value', models.BigIntegerField()),
('anomaly', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='analytics.Anomaly', null=True)),
],
bases=(models.Model,),
),
migrations.CreateModel(
name="RealmCount",
name='RealmCount',
fields=[
(
"id",
models.AutoField(
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
),
),
(
"realm",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"
),
),
("property", models.CharField(max_length=40)),
("end_time", models.DateTimeField()),
("interval", models.CharField(max_length=20)),
("value", models.BigIntegerField()),
(
"anomaly",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="analytics.Anomaly",
null=True,
),
),
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('realm', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Realm')),
('property', models.CharField(max_length=40)),
('end_time', models.DateTimeField()),
('interval', models.CharField(max_length=20)),
('value', models.BigIntegerField()),
('anomaly', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='analytics.Anomaly', null=True)),
],
bases=(models.Model,),
),
migrations.CreateModel(
name="StreamCount",
name='StreamCount',
fields=[
(
"id",
models.AutoField(
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
),
),
(
"realm",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"
),
),
(
"stream",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="zerver.Stream"
),
),
("property", models.CharField(max_length=40)),
("end_time", models.DateTimeField()),
("interval", models.CharField(max_length=20)),
("value", models.BigIntegerField()),
(
"anomaly",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="analytics.Anomaly",
null=True,
),
),
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('realm', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Realm')),
('stream', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Stream')),
('property', models.CharField(max_length=40)),
('end_time', models.DateTimeField()),
('interval', models.CharField(max_length=20)),
('value', models.BigIntegerField()),
('anomaly', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='analytics.Anomaly', null=True)),
],
bases=(models.Model,),
),
migrations.CreateModel(
name="UserCount",
name='UserCount',
fields=[
(
"id",
models.AutoField(
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
),
),
(
"realm",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"
),
),
(
"user",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
("property", models.CharField(max_length=40)),
("end_time", models.DateTimeField()),
("interval", models.CharField(max_length=20)),
("value", models.BigIntegerField()),
(
"anomaly",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="analytics.Anomaly",
null=True,
),
),
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('realm', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Realm')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('property', models.CharField(max_length=40)),
('end_time', models.DateTimeField()),
('interval', models.CharField(max_length=20)),
('value', models.BigIntegerField()),
('anomaly', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='analytics.Anomaly', null=True)),
],
bases=(models.Model,),
),
migrations.AlterUniqueTogether(
name="usercount",
unique_together={("user", "property", "end_time", "interval")},
name='usercount',
unique_together={('user', 'property', 'end_time', 'interval')},
),
migrations.AlterUniqueTogether(
name="streamcount",
unique_together={("stream", "property", "end_time", "interval")},
name='streamcount',
unique_together={('stream', 'property', 'end_time', 'interval')},
),
migrations.AlterUniqueTogether(
name="realmcount",
unique_together={("realm", "property", "end_time", "interval")},
name='realmcount',
unique_together={('realm', 'property', 'end_time', 'interval')},
),
migrations.AlterUniqueTogether(
name="installationcount",
unique_together={("property", "end_time", "interval")},
name='installationcount',
unique_together={('property', 'end_time', 'interval')},
),
migrations.AlterUniqueTogether(
name="huddlecount",
unique_together={("huddle", "property", "end_time", "interval")},
name='huddlecount',
unique_together={('huddle', 'property', 'end_time', 'interval')},
),
]

View File

@@ -2,28 +2,29 @@ from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("analytics", "0001_initial"),
('analytics', '0001_initial'),
]
operations = [
migrations.AlterUniqueTogether(
name="huddlecount",
name='huddlecount',
unique_together=set(),
),
migrations.RemoveField(
model_name="huddlecount",
name="anomaly",
model_name='huddlecount',
name='anomaly',
),
migrations.RemoveField(
model_name="huddlecount",
name="huddle",
model_name='huddlecount',
name='huddle',
),
migrations.RemoveField(
model_name="huddlecount",
name="user",
model_name='huddlecount',
name='user',
),
migrations.DeleteModel(
name="HuddleCount",
name='HuddleCount',
),
]

View File

@@ -2,24 +2,20 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("analytics", "0002_remove_huddlecount"),
('analytics', '0002_remove_huddlecount'),
]
operations = [
migrations.CreateModel(
name="FillState",
name='FillState',
fields=[
(
"id",
models.AutoField(
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
),
),
("property", models.CharField(unique=True, max_length=40)),
("end_time", models.DateTimeField()),
("state", models.PositiveSmallIntegerField()),
("last_modified", models.DateTimeField(auto_now=True)),
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('property', models.CharField(unique=True, max_length=40)),
('end_time', models.DateTimeField()),
('state', models.PositiveSmallIntegerField()),
('last_modified', models.DateTimeField(auto_now=True)),
],
bases=(models.Model,),
),

View File

@@ -2,29 +2,30 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("analytics", "0003_fillstate"),
('analytics', '0003_fillstate'),
]
operations = [
migrations.AddField(
model_name="installationcount",
name="subgroup",
model_name='installationcount',
name='subgroup',
field=models.CharField(max_length=16, null=True),
),
migrations.AddField(
model_name="realmcount",
name="subgroup",
model_name='realmcount',
name='subgroup',
field=models.CharField(max_length=16, null=True),
),
migrations.AddField(
model_name="streamcount",
name="subgroup",
model_name='streamcount',
name='subgroup',
field=models.CharField(max_length=16, null=True),
),
migrations.AddField(
model_name="usercount",
name="subgroup",
model_name='usercount',
name='subgroup',
field=models.CharField(max_length=16, null=True),
),
]

View File

@@ -2,49 +2,50 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("analytics", "0004_add_subgroup"),
('analytics', '0004_add_subgroup'),
]
operations = [
migrations.AlterField(
model_name="installationcount",
name="interval",
model_name='installationcount',
name='interval',
field=models.CharField(max_length=8),
),
migrations.AlterField(
model_name="installationcount",
name="property",
model_name='installationcount',
name='property',
field=models.CharField(max_length=32),
),
migrations.AlterField(
model_name="realmcount",
name="interval",
model_name='realmcount',
name='interval',
field=models.CharField(max_length=8),
),
migrations.AlterField(
model_name="realmcount",
name="property",
model_name='realmcount',
name='property',
field=models.CharField(max_length=32),
),
migrations.AlterField(
model_name="streamcount",
name="interval",
model_name='streamcount',
name='interval',
field=models.CharField(max_length=8),
),
migrations.AlterField(
model_name="streamcount",
name="property",
model_name='streamcount',
name='property',
field=models.CharField(max_length=32),
),
migrations.AlterField(
model_name="usercount",
name="interval",
model_name='usercount',
name='interval',
field=models.CharField(max_length=8),
),
migrations.AlterField(
model_name="usercount",
name="property",
model_name='usercount',
name='property',
field=models.CharField(max_length=32),
),
]

View File

@@ -2,25 +2,26 @@ from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("analytics", "0005_alter_field_size"),
('analytics', '0005_alter_field_size'),
]
operations = [
migrations.AlterUniqueTogether(
name="installationcount",
unique_together={("property", "subgroup", "end_time", "interval")},
name='installationcount',
unique_together={('property', 'subgroup', 'end_time', 'interval')},
),
migrations.AlterUniqueTogether(
name="realmcount",
unique_together={("realm", "property", "subgroup", "end_time", "interval")},
name='realmcount',
unique_together={('realm', 'property', 'subgroup', 'end_time', 'interval')},
),
migrations.AlterUniqueTogether(
name="streamcount",
unique_together={("stream", "property", "subgroup", "end_time", "interval")},
name='streamcount',
unique_together={('stream', 'property', 'subgroup', 'end_time', 'interval')},
),
migrations.AlterUniqueTogether(
name="usercount",
unique_together={("user", "property", "subgroup", "end_time", "interval")},
name='usercount',
unique_together={('user', 'property', 'subgroup', 'end_time', 'interval')},
),
]

View File

@@ -3,41 +3,42 @@ from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("analytics", "0006_add_subgroup_to_unique_constraints"),
('analytics', '0006_add_subgroup_to_unique_constraints'),
]
operations = [
migrations.AlterUniqueTogether(
name="installationcount",
unique_together={("property", "subgroup", "end_time")},
name='installationcount',
unique_together={('property', 'subgroup', 'end_time')},
),
migrations.RemoveField(
model_name="installationcount",
name="interval",
model_name='installationcount',
name='interval',
),
migrations.AlterUniqueTogether(
name="realmcount",
unique_together={("realm", "property", "subgroup", "end_time")},
name='realmcount',
unique_together={('realm', 'property', 'subgroup', 'end_time')},
),
migrations.RemoveField(
model_name="realmcount",
name="interval",
model_name='realmcount',
name='interval',
),
migrations.AlterUniqueTogether(
name="streamcount",
unique_together={("stream", "property", "subgroup", "end_time")},
name='streamcount',
unique_together={('stream', 'property', 'subgroup', 'end_time')},
),
migrations.RemoveField(
model_name="streamcount",
name="interval",
model_name='streamcount',
name='interval',
),
migrations.AlterUniqueTogether(
name="usercount",
unique_together={("user", "property", "subgroup", "end_time")},
name='usercount',
unique_together={('user', 'property', 'subgroup', 'end_time')},
),
migrations.RemoveField(
model_name="usercount",
name="interval",
model_name='usercount',
name='interval',
),
]

View File

@@ -3,22 +3,23 @@ from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("zerver", "0050_userprofile_avatar_version"),
("analytics", "0007_remove_interval"),
('zerver', '0050_userprofile_avatar_version'),
('analytics', '0007_remove_interval'),
]
operations = [
migrations.AlterIndexTogether(
name="realmcount",
index_together={("property", "end_time")},
name='realmcount',
index_together={('property', 'end_time')},
),
migrations.AlterIndexTogether(
name="streamcount",
index_together={("property", "realm", "end_time")},
name='streamcount',
index_together={('property', 'realm', 'end_time')},
),
migrations.AlterIndexTogether(
name="usercount",
index_together={("property", "realm", "end_time")},
name='usercount',
index_together={('property', 'realm', 'end_time')},
),
]

View File

@@ -1,28 +1,26 @@
from django.db import migrations
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
from django.db.backends.postgresql.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
def delete_messages_sent_to_stream_stat(
apps: StateApps, schema_editor: BaseDatabaseSchemaEditor
) -> None:
UserCount = apps.get_model("analytics", "UserCount")
StreamCount = apps.get_model("analytics", "StreamCount")
RealmCount = apps.get_model("analytics", "RealmCount")
InstallationCount = apps.get_model("analytics", "InstallationCount")
FillState = apps.get_model("analytics", "FillState")
def delete_messages_sent_to_stream_stat(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
UserCount = apps.get_model('analytics', 'UserCount')
StreamCount = apps.get_model('analytics', 'StreamCount')
RealmCount = apps.get_model('analytics', 'RealmCount')
InstallationCount = apps.get_model('analytics', 'InstallationCount')
FillState = apps.get_model('analytics', 'FillState')
property = "messages_sent_to_stream:is_bot"
property = 'messages_sent_to_stream:is_bot'
UserCount.objects.filter(property=property).delete()
StreamCount.objects.filter(property=property).delete()
RealmCount.objects.filter(property=property).delete()
InstallationCount.objects.filter(property=property).delete()
FillState.objects.filter(property=property).delete()
class Migration(migrations.Migration):
dependencies = [
("analytics", "0008_add_count_indexes"),
('analytics', '0008_add_count_indexes'),
]
operations = [

View File

@@ -1,27 +1,25 @@
from django.db import migrations
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
from django.db.backends.postgresql.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
def clear_message_sent_by_message_type_values(
apps: StateApps, schema_editor: BaseDatabaseSchemaEditor
) -> None:
UserCount = apps.get_model("analytics", "UserCount")
StreamCount = apps.get_model("analytics", "StreamCount")
RealmCount = apps.get_model("analytics", "RealmCount")
InstallationCount = apps.get_model("analytics", "InstallationCount")
FillState = apps.get_model("analytics", "FillState")
def clear_message_sent_by_message_type_values(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
UserCount = apps.get_model('analytics', 'UserCount')
StreamCount = apps.get_model('analytics', 'StreamCount')
RealmCount = apps.get_model('analytics', 'RealmCount')
InstallationCount = apps.get_model('analytics', 'InstallationCount')
FillState = apps.get_model('analytics', 'FillState')
property = "messages_sent:message_type:day"
property = 'messages_sent:message_type:day'
UserCount.objects.filter(property=property).delete()
StreamCount.objects.filter(property=property).delete()
RealmCount.objects.filter(property=property).delete()
InstallationCount.objects.filter(property=property).delete()
FillState.objects.filter(property=property).delete()
class Migration(migrations.Migration):
dependencies = [("analytics", "0009_remove_messages_to_stream_stat")]
dependencies = [('analytics', '0009_remove_messages_to_stream_stat')]
operations = [
migrations.RunPython(clear_message_sent_by_message_type_values),

View File

@@ -1,14 +1,14 @@
from django.db import migrations
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
from django.db.backends.postgresql.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
def clear_analytics_tables(apps: StateApps, schema_editor: BaseDatabaseSchemaEditor) -> None:
UserCount = apps.get_model("analytics", "UserCount")
StreamCount = apps.get_model("analytics", "StreamCount")
RealmCount = apps.get_model("analytics", "RealmCount")
InstallationCount = apps.get_model("analytics", "InstallationCount")
FillState = apps.get_model("analytics", "FillState")
def clear_analytics_tables(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
UserCount = apps.get_model('analytics', 'UserCount')
StreamCount = apps.get_model('analytics', 'StreamCount')
RealmCount = apps.get_model('analytics', 'RealmCount')
InstallationCount = apps.get_model('analytics', 'InstallationCount')
FillState = apps.get_model('analytics', 'FillState')
UserCount.objects.all().delete()
StreamCount.objects.all().delete()
@@ -16,10 +16,10 @@ def clear_analytics_tables(apps: StateApps, schema_editor: BaseDatabaseSchemaEdi
InstallationCount.objects.all().delete()
FillState.objects.all().delete()
class Migration(migrations.Migration):
dependencies = [
("analytics", "0010_clear_messages_sent_values"),
('analytics', '0010_clear_messages_sent_values'),
]
operations = [

View File

@@ -5,37 +5,30 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("analytics", "0011_clear_analytics_tables"),
('analytics', '0011_clear_analytics_tables'),
]
operations = [
migrations.AlterField(
model_name="installationcount",
name="anomaly",
field=models.ForeignKey(
null=True, on_delete=django.db.models.deletion.SET_NULL, to="analytics.Anomaly"
),
model_name='installationcount',
name='anomaly',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='analytics.Anomaly'),
),
migrations.AlterField(
model_name="realmcount",
name="anomaly",
field=models.ForeignKey(
null=True, on_delete=django.db.models.deletion.SET_NULL, to="analytics.Anomaly"
),
model_name='realmcount',
name='anomaly',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='analytics.Anomaly'),
),
migrations.AlterField(
model_name="streamcount",
name="anomaly",
field=models.ForeignKey(
null=True, on_delete=django.db.models.deletion.SET_NULL, to="analytics.Anomaly"
),
model_name='streamcount',
name='anomaly',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='analytics.Anomaly'),
),
migrations.AlterField(
model_name="usercount",
name="anomaly",
field=models.ForeignKey(
null=True, on_delete=django.db.models.deletion.SET_NULL, to="analytics.Anomaly"
),
model_name='usercount',
name='anomaly',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='analytics.Anomaly'),
),
]

View File

@@ -4,28 +4,29 @@ from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("analytics", "0012_add_on_delete"),
('analytics', '0012_add_on_delete'),
]
operations = [
migrations.RemoveField(
model_name="installationcount",
name="anomaly",
model_name='installationcount',
name='anomaly',
),
migrations.RemoveField(
model_name="realmcount",
name="anomaly",
model_name='realmcount',
name='anomaly',
),
migrations.RemoveField(
model_name="streamcount",
name="anomaly",
model_name='streamcount',
name='anomaly',
),
migrations.RemoveField(
model_name="usercount",
name="anomaly",
model_name='usercount',
name='anomaly',
),
migrations.DeleteModel(
name="Anomaly",
name='Anomaly',
),
]

View File

@@ -4,13 +4,14 @@ from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("analytics", "0013_remove_anomaly"),
('analytics', '0013_remove_anomaly'),
]
operations = [
migrations.RemoveField(
model_name="fillstate",
name="last_modified",
model_name='fillstate',
name='last_modified',
),
]

View File

@@ -1,10 +1,10 @@
from django.db import migrations
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
from django.db.backends.postgresql.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
from django.db.models import Count, Sum
def clear_duplicate_counts(apps: StateApps, schema_editor: BaseDatabaseSchemaEditor) -> None:
def clear_duplicate_counts(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
"""This is a preparatory migration for our Analytics tables.
The backstory is that Django's unique_together indexes do not properly
@@ -20,30 +20,24 @@ def clear_duplicate_counts(apps: StateApps, schema_editor: BaseDatabaseSchemaEdi
this means deleting the extra rows, but for LoggingCountStat objects, we need to
additionally combine the sums.
"""
count_tables = dict(
realm=apps.get_model("analytics", "RealmCount"),
user=apps.get_model("analytics", "UserCount"),
stream=apps.get_model("analytics", "StreamCount"),
installation=apps.get_model("analytics", "InstallationCount"),
)
count_tables = dict(realm=apps.get_model('analytics', 'RealmCount'),
user=apps.get_model('analytics', 'UserCount'),
stream=apps.get_model('analytics', 'StreamCount'),
installation=apps.get_model('analytics', 'InstallationCount'))
for name, count_table in count_tables.items():
value = [name, "property", "end_time"]
if name == "installation":
value = ["property", "end_time"]
counts = (
count_table.objects.filter(subgroup=None)
.values(*value)
.annotate(Count("id"), Sum("value"))
.filter(id__count__gt=1)
)
value = [name, 'property', 'end_time']
if name == 'installation':
value = ['property', 'end_time']
counts = count_table.objects.filter(subgroup=None).values(*value).annotate(
Count('id'), Sum('value')).filter(id__count__gt=1)
for count in counts:
count.pop("id__count")
total_value = count.pop("value__sum")
count.pop('id__count')
total_value = count.pop('value__sum')
duplicate_counts = list(count_table.objects.filter(**count))
first_count = duplicate_counts[0]
if count["property"] in ["invites_sent::day", "active_users_log:is_bot:day"]:
if count['property'] in ["invites_sent::day", "active_users_log:is_bot:day"]:
# For LoggingCountStat objects, the right fix is to combine the totals;
# for other CountStat objects, we expect the duplicates to have the same value.
# And so all we need to do is delete them.
@@ -53,12 +47,13 @@ def clear_duplicate_counts(apps: StateApps, schema_editor: BaseDatabaseSchemaEdi
for duplicate_count in to_cleanup:
duplicate_count.delete()
class Migration(migrations.Migration):
dependencies = [
("analytics", "0014_remove_fillstate_last_modified"),
('analytics', '0014_remove_fillstate_last_modified'),
]
operations = [
migrations.RunPython(clear_duplicate_counts, reverse_code=migrations.RunPython.noop),
migrations.RunPython(clear_duplicate_counts,
reverse_code=migrations.RunPython.noop),
]

View File

@@ -4,89 +4,58 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("analytics", "0015_clear_duplicate_counts"),
('analytics', '0015_clear_duplicate_counts'),
]
operations = [
migrations.AlterUniqueTogether(
name="installationcount",
name='installationcount',
unique_together=set(),
),
migrations.AlterUniqueTogether(
name="realmcount",
name='realmcount',
unique_together=set(),
),
migrations.AlterUniqueTogether(
name="streamcount",
name='streamcount',
unique_together=set(),
),
migrations.AlterUniqueTogether(
name="usercount",
name='usercount',
unique_together=set(),
),
migrations.AddConstraint(
model_name="installationcount",
constraint=models.UniqueConstraint(
condition=models.Q(subgroup__isnull=False),
fields=("property", "subgroup", "end_time"),
name="unique_installation_count",
),
model_name='installationcount',
constraint=models.UniqueConstraint(condition=models.Q(subgroup__isnull=False), fields=('property', 'subgroup', 'end_time'), name='unique_installation_count'),
),
migrations.AddConstraint(
model_name="installationcount",
constraint=models.UniqueConstraint(
condition=models.Q(subgroup__isnull=True),
fields=("property", "end_time"),
name="unique_installation_count_null_subgroup",
),
model_name='installationcount',
constraint=models.UniqueConstraint(condition=models.Q(subgroup__isnull=True), fields=('property', 'end_time'), name='unique_installation_count_null_subgroup'),
),
migrations.AddConstraint(
model_name="realmcount",
constraint=models.UniqueConstraint(
condition=models.Q(subgroup__isnull=False),
fields=("realm", "property", "subgroup", "end_time"),
name="unique_realm_count",
),
model_name='realmcount',
constraint=models.UniqueConstraint(condition=models.Q(subgroup__isnull=False), fields=('realm', 'property', 'subgroup', 'end_time'), name='unique_realm_count'),
),
migrations.AddConstraint(
model_name="realmcount",
constraint=models.UniqueConstraint(
condition=models.Q(subgroup__isnull=True),
fields=("realm", "property", "end_time"),
name="unique_realm_count_null_subgroup",
),
model_name='realmcount',
constraint=models.UniqueConstraint(condition=models.Q(subgroup__isnull=True), fields=('realm', 'property', 'end_time'), name='unique_realm_count_null_subgroup'),
),
migrations.AddConstraint(
model_name="streamcount",
constraint=models.UniqueConstraint(
condition=models.Q(subgroup__isnull=False),
fields=("stream", "property", "subgroup", "end_time"),
name="unique_stream_count",
),
model_name='streamcount',
constraint=models.UniqueConstraint(condition=models.Q(subgroup__isnull=False), fields=('stream', 'property', 'subgroup', 'end_time'), name='unique_stream_count'),
),
migrations.AddConstraint(
model_name="streamcount",
constraint=models.UniqueConstraint(
condition=models.Q(subgroup__isnull=True),
fields=("stream", "property", "end_time"),
name="unique_stream_count_null_subgroup",
),
model_name='streamcount',
constraint=models.UniqueConstraint(condition=models.Q(subgroup__isnull=True), fields=('stream', 'property', 'end_time'), name='unique_stream_count_null_subgroup'),
),
migrations.AddConstraint(
model_name="usercount",
constraint=models.UniqueConstraint(
condition=models.Q(subgroup__isnull=False),
fields=("user", "property", "subgroup", "end_time"),
name="unique_user_count",
),
model_name='usercount',
constraint=models.UniqueConstraint(condition=models.Q(subgroup__isnull=False), fields=('user', 'property', 'subgroup', 'end_time'), name='unique_user_count'),
),
migrations.AddConstraint(
model_name="usercount",
constraint=models.UniqueConstraint(
condition=models.Q(subgroup__isnull=True),
fields=("user", "property", "end_time"),
name="unique_user_count_null_subgroup",
),
model_name='usercount',
constraint=models.UniqueConstraint(condition=models.Q(subgroup__isnull=True), fields=('user', 'property', 'end_time'), name='unique_user_count_null_subgroup'),
),
]

View File

@@ -1,4 +1,5 @@
import datetime
from typing import Optional
from django.db import models
from django.db.models import Q, UniqueConstraint
@@ -8,59 +9,60 @@ from zerver.models import Realm, Stream, UserProfile
class FillState(models.Model):
property = models.CharField(max_length=40, unique=True)
end_time = models.DateTimeField()
property: str = models.CharField(max_length=40, unique=True)
end_time: datetime.datetime = models.DateTimeField()
# Valid states are {DONE, STARTED}
DONE = 1
STARTED = 2
state = models.PositiveSmallIntegerField()
state: int = models.PositiveSmallIntegerField()
def __str__(self) -> str:
return f"{self.property} {self.end_time} {self.state}"
return f"<FillState: {self.property} {self.end_time} {self.state}>"
# The earliest/starting end_time in FillState
# We assume there is at least one realm
def installation_epoch() -> datetime.datetime:
earliest_realm_creation = Realm.objects.aggregate(models.Min("date_created"))[
"date_created__min"
]
earliest_realm_creation = Realm.objects.aggregate(models.Min('date_created'))['date_created__min']
return floor_to_day(earliest_realm_creation)
def last_successful_fill(property: str) -> Optional[datetime.datetime]:
fillstate = FillState.objects.filter(property=property).first()
if fillstate is None:
return None
if fillstate.state == FillState.DONE:
return fillstate.end_time
return fillstate.end_time - datetime.timedelta(hours=1)
class BaseCount(models.Model):
# Note: When inheriting from BaseCount, you may want to rearrange
# the order of the columns in the migration to make sure they
# match how you'd like the table to be arranged.
property = models.CharField(max_length=32)
subgroup = models.CharField(max_length=16, null=True)
end_time = models.DateTimeField()
value = models.BigIntegerField()
property: str = models.CharField(max_length=32)
subgroup: Optional[str] = models.CharField(max_length=16, null=True)
end_time: datetime.datetime = models.DateTimeField()
value: int = models.BigIntegerField()
class Meta:
abstract = True
class InstallationCount(BaseCount):
class Meta:
# Handles invalid duplicate InstallationCount data
constraints = [
UniqueConstraint(
fields=["property", "subgroup", "end_time"],
condition=Q(subgroup__isnull=False),
name="unique_installation_count",
),
name='unique_installation_count'),
UniqueConstraint(
fields=["property", "end_time"],
condition=Q(subgroup__isnull=True),
name="unique_installation_count_null_subgroup",
),
name='unique_installation_count_null_subgroup'),
]
def __str__(self) -> str:
return f"{self.property} {self.subgroup} {self.value}"
return f"<InstallationCount: {self.property} {self.subgroup} {self.value}>"
class RealmCount(BaseCount):
realm = models.ForeignKey(Realm, on_delete=models.CASCADE)
@@ -71,19 +73,16 @@ class RealmCount(BaseCount):
UniqueConstraint(
fields=["realm", "property", "subgroup", "end_time"],
condition=Q(subgroup__isnull=False),
name="unique_realm_count",
),
name='unique_realm_count'),
UniqueConstraint(
fields=["realm", "property", "end_time"],
condition=Q(subgroup__isnull=True),
name="unique_realm_count_null_subgroup",
),
name='unique_realm_count_null_subgroup'),
]
index_together = ["property", "end_time"]
def __str__(self) -> str:
return f"{self.realm!r} {self.property} {self.subgroup} {self.value}"
return f"<RealmCount: {self.realm} {self.property} {self.subgroup} {self.value}>"
class UserCount(BaseCount):
user = models.ForeignKey(UserProfile, on_delete=models.CASCADE)
@@ -95,21 +94,18 @@ class UserCount(BaseCount):
UniqueConstraint(
fields=["user", "property", "subgroup", "end_time"],
condition=Q(subgroup__isnull=False),
name="unique_user_count",
),
name='unique_user_count'),
UniqueConstraint(
fields=["user", "property", "end_time"],
condition=Q(subgroup__isnull=True),
name="unique_user_count_null_subgroup",
),
name='unique_user_count_null_subgroup'),
]
# This index dramatically improves the performance of
# aggregating from users to realms
index_together = ["property", "realm", "end_time"]
def __str__(self) -> str:
return f"{self.user!r} {self.property} {self.subgroup} {self.value}"
return f"<UserCount: {self.user} {self.property} {self.subgroup} {self.value}>"
class StreamCount(BaseCount):
stream = models.ForeignKey(Stream, on_delete=models.CASCADE)
@@ -121,17 +117,15 @@ class StreamCount(BaseCount):
UniqueConstraint(
fields=["stream", "property", "subgroup", "end_time"],
condition=Q(subgroup__isnull=False),
name="unique_stream_count",
),
name='unique_stream_count'),
UniqueConstraint(
fields=["stream", "property", "end_time"],
condition=Q(subgroup__isnull=True),
name="unique_stream_count_null_subgroup",
),
name='unique_stream_count_null_subgroup'),
]
# This index dramatically improves the performance of
# aggregating from streams to realms
index_together = ["property", "realm", "end_time"]
def __str__(self) -> str:
return f"{self.stream!r} {self.property} {self.subgroup} {self.value} {self.id}"
return f"<StreamCount: {self.stream} {self.property} {self.subgroup} {self.value} {self.id}>"

View File

@@ -1,48 +0,0 @@
from unittest import mock
from django.utils.timezone import now as timezone_now
from zerver.lib.test_classes import ZulipTestCase
from zerver.models import Client, UserActivity, UserProfile, flush_per_request_caches
class ActivityTest(ZulipTestCase):
@mock.patch("stripe.Customer.list", return_value=[])
def test_activity(self, unused_mock: mock.Mock) -> None:
self.login("hamlet")
client, _ = Client.objects.get_or_create(name="website")
query = "/json/messages/flags"
last_visit = timezone_now()
count = 150
for activity_user_profile in UserProfile.objects.all():
UserActivity.objects.get_or_create(
user_profile=activity_user_profile,
client=client,
query=query,
count=count,
last_visit=last_visit,
)
# Fails when not staff
result = self.client_get("/activity")
self.assertEqual(result.status_code, 302)
user_profile = self.example_user("hamlet")
user_profile.is_staff = True
user_profile.save(update_fields=["is_staff"])
flush_per_request_caches()
with self.assert_database_query_count(18):
result = self.client_get("/activity")
self.assertEqual(result.status_code, 200)
flush_per_request_caches()
with self.assert_database_query_count(8):
result = self.client_get("/realm_activity/zulip/")
self.assertEqual(result.status_code, 200)
iago = self.example_user("iago")
flush_per_request_caches()
with self.assert_database_query_count(5):
result = self.client_get(f"/user_activity/{iago.id}/")
self.assertEqual(result.status_code, 200)

File diff suppressed because it is too large Load Diff

View File

@@ -9,32 +9,22 @@ class TestFixtures(ZulipTestCase):
# test basic business_hour / non_business_hour calculation
# test we get an array of the right length with frequency=CountStat.DAY
data = generate_time_series_data(
days=7, business_hours_base=20, non_business_hours_base=15, spikiness=0
)
days=7, business_hours_base=20, non_business_hours_base=15, spikiness=0)
self.assertEqual(data, [400, 400, 400, 400, 400, 360, 360])
data = generate_time_series_data(
days=1,
business_hours_base=2000,
non_business_hours_base=1500,
growth=2,
spikiness=0,
frequency=CountStat.HOUR,
)
days=1, business_hours_base=2000, non_business_hours_base=1500,
growth=2, spikiness=0, frequency=CountStat.HOUR)
# test we get an array of the right length with frequency=CountStat.HOUR
self.assert_length(data, 24)
self.assertEqual(len(data), 24)
# test that growth doesn't affect the first data point
self.assertEqual(data[0], 2000)
# test that the last data point is growth times what it otherwise would be
self.assertEqual(data[-1], 1500 * 2)
self.assertEqual(data[-1], 1500*2)
# test autocorrelation == 1, since that's the easiest value to test
data = generate_time_series_data(
days=1,
business_hours_base=2000,
non_business_hours_base=2000,
autocorrelation=1,
frequency=CountStat.HOUR,
)
days=1, business_hours_base=2000, non_business_hours_base=2000,
autocorrelation=1, frequency=CountStat.HOUR)
self.assertEqual(data[0], data[1])
self.assertEqual(data[0], data[-1])

View File

@@ -1,629 +0,0 @@
from datetime import datetime, timedelta, timezone
from typing import List, Optional
from django.utils.timezone import now as timezone_now
from analytics.lib.counts import COUNT_STATS, CountStat
from analytics.lib.time_utils import time_range
from analytics.models import FillState, RealmCount, UserCount
from analytics.views.stats import rewrite_client_arrays, sort_by_totals, sort_client_labels
from zerver.lib.test_classes import ZulipTestCase
from zerver.lib.timestamp import ceiling_to_day, ceiling_to_hour, datetime_to_timestamp
from zerver.models import Client, get_realm
class TestStatsEndpoint(ZulipTestCase):
def test_stats(self) -> None:
self.user = self.example_user("hamlet")
self.login_user(self.user)
result = self.client_get("/stats")
self.assertEqual(result.status_code, 200)
# Check that we get something back
self.assert_in_response("Zulip analytics for", result)
def test_guest_user_cant_access_stats(self) -> None:
self.user = self.example_user("polonius")
self.login_user(self.user)
result = self.client_get("/stats")
self.assert_json_error(result, "Not allowed for guest users", 400)
result = self.client_get("/json/analytics/chart_data")
self.assert_json_error(result, "Not allowed for guest users", 400)
def test_stats_for_realm(self) -> None:
user = self.example_user("hamlet")
self.login_user(user)
result = self.client_get("/stats/realm/zulip/")
self.assertEqual(result.status_code, 302)
result = self.client_get("/stats/realm/not_existing_realm/")
self.assertEqual(result.status_code, 302)
user = self.example_user("hamlet")
user.is_staff = True
user.save(update_fields=["is_staff"])
result = self.client_get("/stats/realm/not_existing_realm/")
self.assertEqual(result.status_code, 404)
result = self.client_get("/stats/realm/zulip/")
self.assertEqual(result.status_code, 200)
self.assert_in_response("Zulip analytics for", result)
def test_stats_for_installation(self) -> None:
user = self.example_user("hamlet")
self.login_user(user)
result = self.client_get("/stats/installation")
self.assertEqual(result.status_code, 302)
user = self.example_user("hamlet")
user.is_staff = True
user.save(update_fields=["is_staff"])
result = self.client_get("/stats/installation")
self.assertEqual(result.status_code, 200)
self.assert_in_response("Zulip analytics for", result)
class TestGetChartData(ZulipTestCase):
def setUp(self) -> None:
super().setUp()
self.realm = get_realm("zulip")
self.user = self.example_user("hamlet")
self.login_user(self.user)
self.end_times_hour = [
ceiling_to_hour(self.realm.date_created) + timedelta(hours=i) for i in range(4)
]
self.end_times_day = [
ceiling_to_day(self.realm.date_created) + timedelta(days=i) for i in range(4)
]
def data(self, i: int) -> List[int]:
return [0, 0, i, 0]
def insert_data(
self, stat: CountStat, realm_subgroups: List[Optional[str]], user_subgroups: List[str]
) -> None:
if stat.frequency == CountStat.HOUR:
insert_time = self.end_times_hour[2]
fill_time = self.end_times_hour[-1]
if stat.frequency == CountStat.DAY:
insert_time = self.end_times_day[2]
fill_time = self.end_times_day[-1]
RealmCount.objects.bulk_create(
RealmCount(
property=stat.property,
subgroup=subgroup,
end_time=insert_time,
value=100 + i,
realm=self.realm,
)
for i, subgroup in enumerate(realm_subgroups)
)
UserCount.objects.bulk_create(
UserCount(
property=stat.property,
subgroup=subgroup,
end_time=insert_time,
value=200 + i,
realm=self.realm,
user=self.user,
)
for i, subgroup in enumerate(user_subgroups)
)
FillState.objects.create(property=stat.property, end_time=fill_time, state=FillState.DONE)
def test_number_of_humans(self) -> None:
stat = COUNT_STATS["realm_active_humans::day"]
self.insert_data(stat, [None], [])
stat = COUNT_STATS["1day_actives::day"]
self.insert_data(stat, [None], [])
stat = COUNT_STATS["active_users_audit:is_bot:day"]
self.insert_data(stat, ["false"], [])
result = self.client_get("/json/analytics/chart_data", {"chart_name": "number_of_humans"})
data = self.assert_json_success(result)
self.assertEqual(
data,
{
"msg": "",
"end_times": [datetime_to_timestamp(dt) for dt in self.end_times_day],
"frequency": CountStat.DAY,
"everyone": {
"_1day": self.data(100),
"_15day": self.data(100),
"all_time": self.data(100),
},
"display_order": None,
"result": "success",
},
)
def test_messages_sent_over_time(self) -> None:
stat = COUNT_STATS["messages_sent:is_bot:hour"]
self.insert_data(stat, ["true", "false"], ["false"])
result = self.client_get(
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
)
data = self.assert_json_success(result)
self.assertEqual(
data,
{
"msg": "",
"end_times": [datetime_to_timestamp(dt) for dt in self.end_times_hour],
"frequency": CountStat.HOUR,
"everyone": {"bot": self.data(100), "human": self.data(101)},
"user": {"bot": self.data(0), "human": self.data(200)},
"display_order": None,
"result": "success",
},
)
def test_messages_sent_by_message_type(self) -> None:
stat = COUNT_STATS["messages_sent:message_type:day"]
self.insert_data(
stat, ["public_stream", "private_message"], ["public_stream", "private_stream"]
)
result = self.client_get(
"/json/analytics/chart_data", {"chart_name": "messages_sent_by_message_type"}
)
data = self.assert_json_success(result)
self.assertEqual(
data,
{
"msg": "",
"end_times": [datetime_to_timestamp(dt) for dt in self.end_times_day],
"frequency": CountStat.DAY,
"everyone": {
"Public streams": self.data(100),
"Private streams": self.data(0),
"Direct messages": self.data(101),
"Group direct messages": self.data(0),
},
"user": {
"Public streams": self.data(200),
"Private streams": self.data(201),
"Direct messages": self.data(0),
"Group direct messages": self.data(0),
},
"display_order": [
"Direct messages",
"Public streams",
"Private streams",
"Group direct messages",
],
"result": "success",
},
)
def test_messages_sent_by_client(self) -> None:
stat = COUNT_STATS["messages_sent:client:day"]
client1 = Client.objects.create(name="client 1")
client2 = Client.objects.create(name="client 2")
client3 = Client.objects.create(name="client 3")
client4 = Client.objects.create(name="client 4")
self.insert_data(
stat,
[str(client4.id), str(client3.id), str(client2.id)],
[str(client3.id), str(client1.id)],
)
result = self.client_get(
"/json/analytics/chart_data", {"chart_name": "messages_sent_by_client"}
)
data = self.assert_json_success(result)
self.assertEqual(
data,
{
"msg": "",
"end_times": [datetime_to_timestamp(dt) for dt in self.end_times_day],
"frequency": CountStat.DAY,
"everyone": {
"client 4": self.data(100),
"client 3": self.data(101),
"client 2": self.data(102),
},
"user": {"client 3": self.data(200), "client 1": self.data(201)},
"display_order": ["client 1", "client 2", "client 3", "client 4"],
"result": "success",
},
)
def test_messages_read_over_time(self) -> None:
stat = COUNT_STATS["messages_read::hour"]
self.insert_data(stat, [None], [])
result = self.client_get(
"/json/analytics/chart_data", {"chart_name": "messages_read_over_time"}
)
data = self.assert_json_success(result)
self.assertEqual(
data,
{
"msg": "",
"end_times": [datetime_to_timestamp(dt) for dt in self.end_times_hour],
"frequency": CountStat.HOUR,
"everyone": {"read": self.data(100)},
"user": {"read": self.data(0)},
"display_order": None,
"result": "success",
},
)
def test_include_empty_subgroups(self) -> None:
FillState.objects.create(
property="realm_active_humans::day",
end_time=self.end_times_day[0],
state=FillState.DONE,
)
result = self.client_get("/json/analytics/chart_data", {"chart_name": "number_of_humans"})
data = self.assert_json_success(result)
self.assertEqual(data["everyone"], {"_1day": [0], "_15day": [0], "all_time": [0]})
self.assertFalse("user" in data)
FillState.objects.create(
property="messages_sent:is_bot:hour",
end_time=self.end_times_hour[0],
state=FillState.DONE,
)
result = self.client_get(
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
)
data = self.assert_json_success(result)
self.assertEqual(data["everyone"], {"human": [0], "bot": [0]})
self.assertEqual(data["user"], {"human": [0], "bot": [0]})
FillState.objects.create(
property="messages_sent:message_type:day",
end_time=self.end_times_day[0],
state=FillState.DONE,
)
result = self.client_get(
"/json/analytics/chart_data", {"chart_name": "messages_sent_by_message_type"}
)
data = self.assert_json_success(result)
self.assertEqual(
data["everyone"],
{
"Public streams": [0],
"Private streams": [0],
"Direct messages": [0],
"Group direct messages": [0],
},
)
self.assertEqual(
data["user"],
{
"Public streams": [0],
"Private streams": [0],
"Direct messages": [0],
"Group direct messages": [0],
},
)
FillState.objects.create(
property="messages_sent:client:day",
end_time=self.end_times_day[0],
state=FillState.DONE,
)
result = self.client_get(
"/json/analytics/chart_data", {"chart_name": "messages_sent_by_client"}
)
data = self.assert_json_success(result)
self.assertEqual(data["everyone"], {})
self.assertEqual(data["user"], {})
def test_start_and_end(self) -> None:
stat = COUNT_STATS["realm_active_humans::day"]
self.insert_data(stat, [None], [])
stat = COUNT_STATS["1day_actives::day"]
self.insert_data(stat, [None], [])
stat = COUNT_STATS["active_users_audit:is_bot:day"]
self.insert_data(stat, ["false"], [])
end_time_timestamps = [datetime_to_timestamp(dt) for dt in self.end_times_day]
# valid start and end
result = self.client_get(
"/json/analytics/chart_data",
{
"chart_name": "number_of_humans",
"start": end_time_timestamps[1],
"end": end_time_timestamps[2],
},
)
data = self.assert_json_success(result)
self.assertEqual(data["end_times"], end_time_timestamps[1:3])
self.assertEqual(
data["everyone"], {"_1day": [0, 100], "_15day": [0, 100], "all_time": [0, 100]}
)
# start later then end
result = self.client_get(
"/json/analytics/chart_data",
{
"chart_name": "number_of_humans",
"start": end_time_timestamps[2],
"end": end_time_timestamps[1],
},
)
self.assert_json_error_contains(result, "Start time is later than")
def test_min_length(self) -> None:
stat = COUNT_STATS["realm_active_humans::day"]
self.insert_data(stat, [None], [])
stat = COUNT_STATS["1day_actives::day"]
self.insert_data(stat, [None], [])
stat = COUNT_STATS["active_users_audit:is_bot:day"]
self.insert_data(stat, ["false"], [])
# test min_length is too short to change anything
result = self.client_get(
"/json/analytics/chart_data", {"chart_name": "number_of_humans", "min_length": 2}
)
data = self.assert_json_success(result)
self.assertEqual(
data["end_times"], [datetime_to_timestamp(dt) for dt in self.end_times_day]
)
self.assertEqual(
data["everyone"],
{"_1day": self.data(100), "_15day": self.data(100), "all_time": self.data(100)},
)
# test min_length larger than filled data
result = self.client_get(
"/json/analytics/chart_data", {"chart_name": "number_of_humans", "min_length": 5}
)
data = self.assert_json_success(result)
end_times = [
ceiling_to_day(self.realm.date_created) + timedelta(days=i) for i in range(-1, 4)
]
self.assertEqual(data["end_times"], [datetime_to_timestamp(dt) for dt in end_times])
self.assertEqual(
data["everyone"],
{
"_1day": [0, *self.data(100)],
"_15day": [0, *self.data(100)],
"all_time": [0, *self.data(100)],
},
)
def test_non_existent_chart(self) -> None:
result = self.client_get("/json/analytics/chart_data", {"chart_name": "does_not_exist"})
self.assert_json_error_contains(result, "Unknown chart name")
def test_analytics_not_running(self) -> None:
realm = get_realm("zulip")
self.assertEqual(FillState.objects.count(), 0)
realm.date_created = timezone_now() - timedelta(days=3)
realm.save(update_fields=["date_created"])
with self.assertLogs(level="WARNING") as m:
result = self.client_get(
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
)
self.assertEqual(
m.output,
[
f"WARNING:root:User from realm zulip attempted to access /stats, but the computed start time: {realm.date_created} (creation of realm or installation) is later than the computed end time: 0001-01-01 00:00:00+00:00 (last successful analytics update). Is the analytics cron job running?"
],
)
self.assert_json_error_contains(result, "No analytics data available")
realm.date_created = timezone_now() - timedelta(days=1, hours=2)
realm.save(update_fields=["date_created"])
with self.assertLogs(level="WARNING") as m:
result = self.client_get(
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
)
self.assertEqual(
m.output,
[
f"WARNING:root:User from realm zulip attempted to access /stats, but the computed start time: {realm.date_created} (creation of realm or installation) is later than the computed end time: 0001-01-01 00:00:00+00:00 (last successful analytics update). Is the analytics cron job running?"
],
)
self.assert_json_error_contains(result, "No analytics data available")
realm.date_created = timezone_now() - timedelta(days=1, minutes=10)
realm.save(update_fields=["date_created"])
result = self.client_get(
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
)
self.assert_json_success(result)
realm.date_created = timezone_now() - timedelta(hours=10)
realm.save(update_fields=["date_created"])
result = self.client_get(
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
)
self.assert_json_success(result)
end_time = timezone_now() - timedelta(days=5)
fill_state = FillState.objects.create(
property="messages_sent:is_bot:hour", end_time=end_time, state=FillState.DONE
)
realm.date_created = timezone_now() - timedelta(days=3)
realm.save(update_fields=["date_created"])
with self.assertLogs(level="WARNING") as m:
result = self.client_get(
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
)
self.assertEqual(
m.output,
[
f"WARNING:root:User from realm zulip attempted to access /stats, but the computed start time: {realm.date_created} (creation of realm or installation) is later than the computed end time: {end_time} (last successful analytics update). Is the analytics cron job running?"
],
)
self.assert_json_error_contains(result, "No analytics data available")
realm.date_created = timezone_now() - timedelta(days=1, minutes=10)
realm.save(update_fields=["date_created"])
result = self.client_get(
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
)
self.assert_json_success(result)
end_time = timezone_now() - timedelta(days=2)
fill_state.end_time = end_time
fill_state.save(update_fields=["end_time"])
realm.date_created = timezone_now() - timedelta(days=3)
realm.save(update_fields=["date_created"])
result = self.client_get(
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
)
self.assert_json_success(result)
realm.date_created = timezone_now() - timedelta(days=1, hours=2)
realm.save(update_fields=["date_created"])
with self.assertLogs(level="WARNING") as m:
result = self.client_get(
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
)
self.assertEqual(
m.output,
[
f"WARNING:root:User from realm zulip attempted to access /stats, but the computed start time: {realm.date_created} (creation of realm or installation) is later than the computed end time: {end_time} (last successful analytics update). Is the analytics cron job running?"
],
)
self.assert_json_error_contains(result, "No analytics data available")
realm.date_created = timezone_now() - timedelta(days=1, minutes=10)
realm.save(update_fields=["date_created"])
result = self.client_get(
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
)
self.assert_json_success(result)
def test_get_chart_data_for_realm(self) -> None:
user = self.example_user("hamlet")
self.login_user(user)
result = self.client_get(
"/json/analytics/chart_data/realm/zulip", {"chart_name": "number_of_humans"}
)
self.assert_json_error(result, "Must be an server administrator", 400)
user = self.example_user("hamlet")
user.is_staff = True
user.save(update_fields=["is_staff"])
stat = COUNT_STATS["realm_active_humans::day"]
self.insert_data(stat, [None], [])
result = self.client_get(
"/json/analytics/chart_data/realm/not_existing_realm",
{"chart_name": "number_of_humans"},
)
self.assert_json_error(result, "Invalid organization", 400)
result = self.client_get(
"/json/analytics/chart_data/realm/zulip", {"chart_name": "number_of_humans"}
)
self.assert_json_success(result)
def test_get_chart_data_for_installation(self) -> None:
user = self.example_user("hamlet")
self.login_user(user)
result = self.client_get(
"/json/analytics/chart_data/installation", {"chart_name": "number_of_humans"}
)
self.assert_json_error(result, "Must be an server administrator", 400)
user = self.example_user("hamlet")
user.is_staff = True
user.save(update_fields=["is_staff"])
stat = COUNT_STATS["realm_active_humans::day"]
self.insert_data(stat, [None], [])
result = self.client_get(
"/json/analytics/chart_data/installation", {"chart_name": "number_of_humans"}
)
self.assert_json_success(result)
class TestGetChartDataHelpers(ZulipTestCase):
def test_sort_by_totals(self) -> None:
empty: List[int] = []
value_arrays = {"c": [0, 1], "a": [9], "b": [1, 1, 1], "d": empty}
self.assertEqual(sort_by_totals(value_arrays), ["a", "b", "c", "d"])
def test_sort_client_labels(self) -> None:
data = {
"everyone": {"a": [16], "c": [15], "b": [14], "e": [13], "d": [12], "h": [11]},
"user": {"a": [6], "b": [5], "d": [4], "e": [3], "f": [2], "g": [1]},
}
self.assertEqual(sort_client_labels(data), ["a", "b", "c", "d", "e", "f", "g", "h"])
class TestTimeRange(ZulipTestCase):
def test_time_range(self) -> None:
HOUR = timedelta(hours=1)
DAY = timedelta(days=1)
a_time = datetime(2016, 3, 14, 22, 59, tzinfo=timezone.utc)
floor_hour = datetime(2016, 3, 14, 22, tzinfo=timezone.utc)
floor_day = datetime(2016, 3, 14, tzinfo=timezone.utc)
# test start == end
self.assertEqual(time_range(a_time, a_time, CountStat.HOUR, None), [])
self.assertEqual(time_range(a_time, a_time, CountStat.DAY, None), [])
# test start == end == boundary, and min_length == 0
self.assertEqual(time_range(floor_hour, floor_hour, CountStat.HOUR, 0), [floor_hour])
self.assertEqual(time_range(floor_day, floor_day, CountStat.DAY, 0), [floor_day])
# test start and end on different boundaries
self.assertEqual(
time_range(floor_hour, floor_hour + HOUR, CountStat.HOUR, None),
[floor_hour, floor_hour + HOUR],
)
self.assertEqual(
time_range(floor_day, floor_day + DAY, CountStat.DAY, None),
[floor_day, floor_day + DAY],
)
# test min_length
self.assertEqual(
time_range(floor_hour, floor_hour + HOUR, CountStat.HOUR, 4),
[floor_hour - 2 * HOUR, floor_hour - HOUR, floor_hour, floor_hour + HOUR],
)
self.assertEqual(
time_range(floor_day, floor_day + DAY, CountStat.DAY, 4),
[floor_day - 2 * DAY, floor_day - DAY, floor_day, floor_day + DAY],
)
class TestMapArrays(ZulipTestCase):
def test_map_arrays(self) -> None:
a = {
"desktop app 1.0": [1, 2, 3],
"desktop app 2.0": [10, 12, 13],
"desktop app 3.0": [21, 22, 23],
"website": [1, 2, 3],
"ZulipiOS": [1, 2, 3],
"ZulipElectron": [2, 5, 7],
"ZulipMobile": [1, 5, 7],
"ZulipPython": [1, 2, 3],
"API: Python": [1, 2, 3],
"SomethingRandom": [4, 5, 6],
"ZulipGitHubWebhook": [7, 7, 9],
"ZulipAndroid": [64, 63, 65],
"ZulipTerminal": [9, 10, 11],
}
result = rewrite_client_arrays(a)
self.assertEqual(
result,
{
"Old desktop app": [32, 36, 39],
"Old iOS app": [1, 2, 3],
"Desktop app": [2, 5, 7],
"Mobile app": [1, 5, 7],
"Web app": [1, 2, 3],
"Python API": [2, 4, 6],
"SomethingRandom": [4, 5, 6],
"GitHub webhook": [7, 7, 9],
"Old Android app": [64, 63, 65],
"Terminal app": [9, 10, 11],
},
)

View File

@@ -1,704 +0,0 @@
from datetime import datetime, timedelta, timezone
from typing import TYPE_CHECKING, Optional
from unittest import mock
import orjson
from django.utils.timezone import now as timezone_now
from corporate.lib.stripe import add_months, update_sponsorship_status
from corporate.models import Customer, CustomerPlan, LicenseLedger, get_customer_by_realm
from zerver.actions.invites import do_create_multiuse_invite_link
from zerver.actions.realm_settings import do_change_realm_org_type, do_send_realm_reactivation_email
from zerver.actions.user_settings import do_change_user_setting
from zerver.lib.test_classes import ZulipTestCase
from zerver.lib.test_helpers import reset_email_visibility_to_everyone_in_zulip_realm
from zerver.models import (
MultiuseInvite,
PreregistrationUser,
Realm,
UserMessage,
UserProfile,
get_org_type_display_name,
get_realm,
)
if TYPE_CHECKING:
from django.test.client import _MonkeyPatchedWSGIResponse as TestHttpResponse
class TestSupportEndpoint(ZulipTestCase):
def test_search(self) -> None:
reset_email_visibility_to_everyone_in_zulip_realm()
lear_user = self.lear_user("king")
lear_user.is_staff = True
lear_user.save(update_fields=["is_staff"])
lear_realm = get_realm("lear")
def assert_user_details_in_html_response(
html_response: "TestHttpResponse", full_name: str, email: str, role: str
) -> None:
self.assert_in_success_response(
[
'<span class="label">user</span>\n',
f"<h3>{full_name}</h3>",
f"<b>Email</b>: {email}",
"<b>Is active</b>: True<br />",
f"<b>Role</b>: {role}<br />",
],
html_response,
)
def create_invitation(
stream: str, invitee_email: str, realm: Optional[Realm] = None
) -> None:
invite_expires_in_minutes = 10 * 24 * 60
self.client_post(
"/json/invites",
{
"invitee_emails": [invitee_email],
"stream_ids": orjson.dumps([self.get_stream_id(stream, realm)]).decode(),
"invite_expires_in_minutes": invite_expires_in_minutes,
"invite_as": PreregistrationUser.INVITE_AS["MEMBER"],
},
subdomain=realm.string_id if realm is not None else "zulip",
)
def check_hamlet_user_query_result(result: "TestHttpResponse") -> None:
assert_user_details_in_html_response(
result, "King Hamlet", self.example_email("hamlet"), "Member"
)
self.assert_in_success_response(
[
f"<b>Admins</b>: {self.example_email('iago')}\n",
f"<b>Owners</b>: {self.example_email('desdemona')}\n",
'class="copy-button" data-copytext="{}">'.format(self.example_email("iago")),
'class="copy-button" data-copytext="{}">'.format(
self.example_email("desdemona")
),
],
result,
)
def check_lear_user_query_result(result: "TestHttpResponse") -> None:
assert_user_details_in_html_response(
result, lear_user.full_name, lear_user.email, "Member"
)
def check_othello_user_query_result(result: "TestHttpResponse") -> None:
assert_user_details_in_html_response(
result, "Othello, the Moor of Venice", self.example_email("othello"), "Member"
)
def check_polonius_user_query_result(result: "TestHttpResponse") -> None:
assert_user_details_in_html_response(
result, "Polonius", self.example_email("polonius"), "Guest"
)
def check_zulip_realm_query_result(result: "TestHttpResponse") -> None:
zulip_realm = get_realm("zulip")
first_human_user = zulip_realm.get_first_human_user()
assert first_human_user is not None
self.assert_in_success_response(
[
f"<b>First human user</b>: {first_human_user.delivery_email}\n",
f'<input type="hidden" name="realm_id" value="{zulip_realm.id}"',
"Zulip Dev</h3>",
'<option value="1" selected>Self-hosted</option>',
'<option value="2" >Limited</option>',
'input type="number" name="discount" value="None"',
'<option value="active" selected>Active</option>',
'<option value="deactivated" >Deactivated</option>',
f'<option value="{zulip_realm.org_type}" selected>',
'scrub-realm-button">',
'data-string-id="zulip"',
],
result,
)
def check_lear_realm_query_result(result: "TestHttpResponse") -> None:
self.assert_in_success_response(
[
f'<input type="hidden" name="realm_id" value="{lear_realm.id}"',
"Lear &amp; Co.</h3>",
'<option value="1" selected>Self-hosted</option>',
'<option value="2" >Limited</option>',
'input type="number" name="discount" value="None"',
'<option value="active" selected>Active</option>',
'<option value="deactivated" >Deactivated</option>',
'scrub-realm-button">',
'data-string-id="lear"',
"<b>Name</b>: Zulip Cloud Standard",
"<b>Status</b>: Active",
"<b>Billing schedule</b>: Annual",
"<b>Licenses</b>: 2/10 (Manual)",
"<b>Price per license</b>: $80.0",
"<b>Next invoice date</b>: 02 January 2017",
'<option value="send_invoice" selected>',
'<option value="charge_automatically" >',
],
result,
)
def check_preregistration_user_query_result(
result: "TestHttpResponse", email: str, invite: bool = False
) -> None:
self.assert_in_success_response(
[
'<span class="label">preregistration user</span>\n',
f"<b>Email</b>: {email}",
],
result,
)
if invite:
self.assert_in_success_response(['<span class="label">invite</span>'], result)
self.assert_in_success_response(
[
"<b>Expires in</b>: 1\xa0week, 3\xa0days",
"<b>Status</b>: Link has not been used",
],
result,
)
self.assert_in_success_response([], result)
else:
self.assert_not_in_success_response(['<span class="label">invite</span>'], result)
self.assert_in_success_response(
[
"<b>Expires in</b>: 1\xa0day",
"<b>Status</b>: Link has not been used",
],
result,
)
def check_realm_creation_query_result(result: "TestHttpResponse", email: str) -> None:
self.assert_in_success_response(
[
'<span class="label">preregistration user</span>\n',
'<span class="label">realm creation</span>\n',
"<b>Link</b>: http://testserver/accounts/do_confirm/",
"<b>Expires in</b>: 1\xa0day",
],
result,
)
def check_multiuse_invite_link_query_result(result: "TestHttpResponse") -> None:
self.assert_in_success_response(
[
'<span class="label">multiuse invite</span>\n',
"<b>Link</b>: http://zulip.testserver/join/",
"<b>Expires in</b>: 1\xa0week, 3\xa0days",
],
result,
)
def check_realm_reactivation_link_query_result(result: "TestHttpResponse") -> None:
self.assert_in_success_response(
[
'<span class="label">realm reactivation</span>\n',
"<b>Link</b>: http://zulip.testserver/reactivate/",
"<b>Expires in</b>: 1\xa0day",
],
result,
)
def get_check_query_result(
query: str, count: int, subdomain: str = "zulip"
) -> "TestHttpResponse":
result = self.client_get("/activity/support", {"q": query}, subdomain=subdomain)
self.assertEqual(result.content.decode().count("support-query-result"), count)
return result
self.login("cordelia")
result = self.client_get("/activity/support")
self.assertEqual(result.status_code, 302)
self.assertEqual(result["Location"], "/login/")
self.login("iago")
do_change_user_setting(
self.example_user("hamlet"),
"email_address_visibility",
UserProfile.EMAIL_ADDRESS_VISIBILITY_NOBODY,
acting_user=None,
)
customer = Customer.objects.create(realm=lear_realm, stripe_customer_id="cus_123")
now = datetime(2016, 1, 2, tzinfo=timezone.utc)
plan = CustomerPlan.objects.create(
customer=customer,
billing_cycle_anchor=now,
billing_schedule=CustomerPlan.ANNUAL,
tier=CustomerPlan.STANDARD,
price_per_license=8000,
next_invoice_date=add_months(now, 12),
)
LicenseLedger.objects.create(
licenses=10,
licenses_at_next_renewal=10,
event_time=timezone_now(),
is_renewal=True,
plan=plan,
)
result = self.client_get("/activity/support")
self.assert_in_success_response(
['<input type="text" name="q" class="input-xxlarge search-query"'], result
)
result = get_check_query_result(self.example_email("hamlet"), 1)
check_hamlet_user_query_result(result)
check_zulip_realm_query_result(result)
result = get_check_query_result(lear_user.email, 1)
check_lear_user_query_result(result)
check_lear_realm_query_result(result)
result = get_check_query_result(self.example_email("polonius"), 1)
check_polonius_user_query_result(result)
check_zulip_realm_query_result(result)
result = get_check_query_result("lear", 1)
check_lear_realm_query_result(result)
result = get_check_query_result("http://lear.testserver", 1)
check_lear_realm_query_result(result)
with self.settings(REALM_HOSTS={"zulip": "localhost"}):
result = get_check_query_result("http://localhost", 1)
check_zulip_realm_query_result(result)
result = get_check_query_result("hamlet@zulip.com, lear", 2)
check_hamlet_user_query_result(result)
check_zulip_realm_query_result(result)
check_lear_realm_query_result(result)
result = get_check_query_result("King hamlet,lear", 2)
check_hamlet_user_query_result(result)
check_zulip_realm_query_result(result)
check_lear_realm_query_result(result)
result = get_check_query_result("Othello, the Moor of Venice", 1)
check_othello_user_query_result(result)
check_zulip_realm_query_result(result)
result = get_check_query_result("lear, Hamlet <hamlet@zulip.com>", 2)
check_hamlet_user_query_result(result)
check_zulip_realm_query_result(result)
check_lear_realm_query_result(result)
with mock.patch(
"analytics.views.support.timezone_now",
return_value=timezone_now() - timedelta(minutes=50),
):
self.client_post("/accounts/home/", {"email": self.nonreg_email("test")})
self.login("iago")
result = get_check_query_result(self.nonreg_email("test"), 1)
check_preregistration_user_query_result(result, self.nonreg_email("test"))
check_zulip_realm_query_result(result)
create_invitation("Denmark", self.nonreg_email("test1"))
result = get_check_query_result(self.nonreg_email("test1"), 1)
check_preregistration_user_query_result(result, self.nonreg_email("test1"), invite=True)
check_zulip_realm_query_result(result)
email = self.nonreg_email("alice")
self.submit_realm_creation_form(
email, realm_subdomain="zuliptest", realm_name="Zulip test"
)
result = get_check_query_result(email, 1)
check_realm_creation_query_result(result, email)
invite_expires_in_minutes = 10 * 24 * 60
do_create_multiuse_invite_link(
self.example_user("hamlet"),
invited_as=1,
invite_expires_in_minutes=invite_expires_in_minutes,
)
result = get_check_query_result("zulip", 2)
check_multiuse_invite_link_query_result(result)
check_zulip_realm_query_result(result)
MultiuseInvite.objects.all().delete()
do_send_realm_reactivation_email(get_realm("zulip"), acting_user=None)
result = get_check_query_result("zulip", 2)
check_realm_reactivation_link_query_result(result)
check_zulip_realm_query_result(result)
lear_nonreg_email = "newguy@lear.org"
self.client_post("/accounts/home/", {"email": lear_nonreg_email}, subdomain="lear")
result = get_check_query_result(lear_nonreg_email, 1)
check_preregistration_user_query_result(result, lear_nonreg_email)
check_lear_realm_query_result(result)
self.login_user(lear_user)
create_invitation("general", "newguy2@lear.org", lear_realm)
result = get_check_query_result("newguy2@lear.org", 1, lear_realm.string_id)
check_preregistration_user_query_result(result, "newguy2@lear.org", invite=True)
check_lear_realm_query_result(result)
def test_get_org_type_display_name(self) -> None:
self.assertEqual(get_org_type_display_name(Realm.ORG_TYPES["business"]["id"]), "Business")
self.assertEqual(get_org_type_display_name(883), "")
def test_unspecified_org_type_correctly_displayed(self) -> None:
"""
Unspecified org type is special in that it is marked to not be shown
on the registration page (because organitions are not meant to be able to choose it),
but should be correctly shown at the /support/ endpoint.
"""
realm = get_realm("zulip")
do_change_realm_org_type(realm, 0, acting_user=None)
self.assertEqual(realm.org_type, 0)
self.login("iago")
result = self.client_get("/activity/support", {"q": "zulip"}, subdomain="zulip")
self.assert_in_success_response(
[
f'<input type="hidden" name="realm_id" value="{realm.id}"',
'<option value="0" selected>',
],
result,
)
@mock.patch("analytics.views.support.update_billing_method_of_current_plan")
def test_change_billing_method(self, m: mock.Mock) -> None:
cordelia = self.example_user("cordelia")
self.login_user(cordelia)
result = self.client_post(
"/activity/support", {"realm_id": f"{cordelia.realm_id}", "plan_type": "2"}
)
self.assertEqual(result.status_code, 302)
self.assertEqual(result["Location"], "/login/")
iago = self.example_user("iago")
self.login_user(iago)
result = self.client_post(
"/activity/support",
{"realm_id": f"{iago.realm_id}", "billing_method": "charge_automatically"},
)
m.assert_called_once_with(get_realm("zulip"), charge_automatically=True, acting_user=iago)
self.assert_in_success_response(
["Billing method of zulip updated to charge automatically"], result
)
m.reset_mock()
result = self.client_post(
"/activity/support", {"realm_id": f"{iago.realm_id}", "billing_method": "send_invoice"}
)
m.assert_called_once_with(get_realm("zulip"), charge_automatically=False, acting_user=iago)
self.assert_in_success_response(
["Billing method of zulip updated to pay by invoice"], result
)
def test_change_realm_plan_type(self) -> None:
cordelia = self.example_user("cordelia")
self.login_user(cordelia)
result = self.client_post(
"/activity/support", {"realm_id": f"{cordelia.realm_id}", "plan_type": "2"}
)
self.assertEqual(result.status_code, 302)
self.assertEqual(result["Location"], "/login/")
iago = self.example_user("iago")
self.login_user(iago)
with mock.patch("analytics.views.support.do_change_realm_plan_type") as m:
result = self.client_post(
"/activity/support", {"realm_id": f"{iago.realm_id}", "plan_type": "2"}
)
m.assert_called_once_with(get_realm("zulip"), 2, acting_user=iago)
self.assert_in_success_response(
["Plan type of zulip changed from self-hosted to limited"], result
)
with mock.patch("analytics.views.support.do_change_realm_plan_type") as m:
result = self.client_post(
"/activity/support", {"realm_id": f"{iago.realm_id}", "plan_type": "10"}
)
m.assert_called_once_with(get_realm("zulip"), 10, acting_user=iago)
self.assert_in_success_response(
["Plan type of zulip changed from self-hosted to plus"], result
)
def test_change_org_type(self) -> None:
cordelia = self.example_user("cordelia")
self.login_user(cordelia)
result = self.client_post(
"/activity/support", {"realm_id": f"{cordelia.realm_id}", "org_type": "70"}
)
self.assertEqual(result.status_code, 302)
self.assertEqual(result["Location"], "/login/")
iago = self.example_user("iago")
self.login_user(iago)
with mock.patch("analytics.views.support.do_change_realm_org_type") as m:
result = self.client_post(
"/activity/support", {"realm_id": f"{iago.realm_id}", "org_type": "70"}
)
m.assert_called_once_with(get_realm("zulip"), 70, acting_user=iago)
self.assert_in_success_response(
["Org type of zulip changed from Business to Government"], result
)
def test_attach_discount(self) -> None:
cordelia = self.example_user("cordelia")
lear_realm = get_realm("lear")
self.login_user(cordelia)
result = self.client_post(
"/activity/support", {"realm_id": f"{lear_realm.id}", "discount": "25"}
)
self.assertEqual(result.status_code, 302)
self.assertEqual(result["Location"], "/login/")
iago = self.example_user("iago")
self.login("iago")
with mock.patch("analytics.views.support.attach_discount_to_realm") as m:
result = self.client_post(
"/activity/support", {"realm_id": f"{lear_realm.id}", "discount": "25"}
)
m.assert_called_once_with(get_realm("lear"), 25, acting_user=iago)
self.assert_in_success_response(["Discount of lear changed to 25% from 0%"], result)
def test_change_sponsorship_status(self) -> None:
lear_realm = get_realm("lear")
self.assertIsNone(get_customer_by_realm(lear_realm))
cordelia = self.example_user("cordelia")
self.login_user(cordelia)
result = self.client_post(
"/activity/support", {"realm_id": f"{lear_realm.id}", "sponsorship_pending": "true"}
)
self.assertEqual(result.status_code, 302)
self.assertEqual(result["Location"], "/login/")
iago = self.example_user("iago")
self.login_user(iago)
result = self.client_post(
"/activity/support", {"realm_id": f"{lear_realm.id}", "sponsorship_pending": "true"}
)
self.assert_in_success_response(["lear marked as pending sponsorship."], result)
customer = get_customer_by_realm(lear_realm)
assert customer is not None
self.assertTrue(customer.sponsorship_pending)
result = self.client_post(
"/activity/support", {"realm_id": f"{lear_realm.id}", "sponsorship_pending": "false"}
)
self.assert_in_success_response(["lear is no longer pending sponsorship."], result)
customer = get_customer_by_realm(lear_realm)
assert customer is not None
self.assertFalse(customer.sponsorship_pending)
def test_approve_sponsorship(self) -> None:
lear_realm = get_realm("lear")
update_sponsorship_status(lear_realm, True, acting_user=None)
king_user = self.lear_user("king")
king_user.role = UserProfile.ROLE_REALM_OWNER
king_user.save()
cordelia = self.example_user("cordelia")
self.login_user(cordelia)
result = self.client_post(
"/activity/support",
{"realm_id": f"{lear_realm.id}", "approve_sponsorship": "true"},
)
self.assertEqual(result.status_code, 302)
self.assertEqual(result["Location"], "/login/")
iago = self.example_user("iago")
self.login_user(iago)
result = self.client_post(
"/activity/support",
{"realm_id": f"{lear_realm.id}", "approve_sponsorship": "true"},
)
self.assert_in_success_response(["Sponsorship approved for lear"], result)
lear_realm.refresh_from_db()
self.assertEqual(lear_realm.plan_type, Realm.PLAN_TYPE_STANDARD_FREE)
customer = get_customer_by_realm(lear_realm)
assert customer is not None
self.assertFalse(customer.sponsorship_pending)
messages = UserMessage.objects.filter(user_profile=king_user)
self.assertIn(
"request for sponsored hosting has been approved", messages[0].message.content
)
self.assert_length(messages, 1)
def test_activate_or_deactivate_realm(self) -> None:
cordelia = self.example_user("cordelia")
lear_realm = get_realm("lear")
self.login_user(cordelia)
result = self.client_post(
"/activity/support", {"realm_id": f"{lear_realm.id}", "status": "deactivated"}
)
self.assertEqual(result.status_code, 302)
self.assertEqual(result["Location"], "/login/")
self.login("iago")
with mock.patch("analytics.views.support.do_deactivate_realm") as m:
result = self.client_post(
"/activity/support", {"realm_id": f"{lear_realm.id}", "status": "deactivated"}
)
m.assert_called_once_with(lear_realm, acting_user=self.example_user("iago"))
self.assert_in_success_response(["lear deactivated"], result)
with mock.patch("analytics.views.support.do_send_realm_reactivation_email") as m:
result = self.client_post(
"/activity/support", {"realm_id": f"{lear_realm.id}", "status": "active"}
)
m.assert_called_once_with(lear_realm, acting_user=self.example_user("iago"))
self.assert_in_success_response(
["Realm reactivation email sent to admins of lear"], result
)
def test_change_subdomain(self) -> None:
cordelia = self.example_user("cordelia")
lear_realm = get_realm("lear")
self.login_user(cordelia)
result = self.client_post(
"/activity/support", {"realm_id": f"{lear_realm.id}", "new_subdomain": "new_name"}
)
self.assertEqual(result.status_code, 302)
self.assertEqual(result["Location"], "/login/")
self.login("iago")
result = self.client_post(
"/activity/support", {"realm_id": f"{lear_realm.id}", "new_subdomain": "new-name"}
)
self.assertEqual(result.status_code, 302)
self.assertEqual(result["Location"], "/activity/support?q=new-name")
realm_id = lear_realm.id
lear_realm = get_realm("new-name")
self.assertEqual(lear_realm.id, realm_id)
self.assertTrue(Realm.objects.filter(string_id="lear").exists())
self.assertTrue(Realm.objects.filter(string_id="lear")[0].deactivated)
result = self.client_post(
"/activity/support", {"realm_id": f"{lear_realm.id}", "new_subdomain": "new-name"}
)
self.assert_in_success_response(
["Subdomain unavailable. Please choose a different one."], result
)
result = self.client_post(
"/activity/support", {"realm_id": f"{lear_realm.id}", "new_subdomain": "zulip"}
)
self.assert_in_success_response(
["Subdomain unavailable. Please choose a different one."], result
)
result = self.client_post(
"/activity/support", {"realm_id": f"{lear_realm.id}", "new_subdomain": "lear"}
)
self.assert_in_success_response(
["Subdomain unavailable. Please choose a different one."], result
)
def test_downgrade_realm(self) -> None:
cordelia = self.example_user("cordelia")
self.login_user(cordelia)
result = self.client_post(
"/activity/support", {"realm_id": f"{cordelia.realm_id}", "plan_type": "2"}
)
self.assertEqual(result.status_code, 302)
self.assertEqual(result["Location"], "/login/")
iago = self.example_user("iago")
self.login_user(iago)
with mock.patch("analytics.views.support.downgrade_at_the_end_of_billing_cycle") as m:
result = self.client_post(
"/activity/support",
{
"realm_id": f"{iago.realm_id}",
"modify_plan": "downgrade_at_billing_cycle_end",
},
)
m.assert_called_once_with(get_realm("zulip"))
self.assert_in_success_response(
["zulip marked for downgrade at the end of billing cycle"], result
)
with mock.patch(
"analytics.views.support.downgrade_now_without_creating_additional_invoices"
) as m:
result = self.client_post(
"/activity/support",
{
"realm_id": f"{iago.realm_id}",
"modify_plan": "downgrade_now_without_additional_licenses",
},
)
m.assert_called_once_with(get_realm("zulip"))
self.assert_in_success_response(
["zulip downgraded without creating additional invoices"], result
)
with mock.patch(
"analytics.views.support.downgrade_now_without_creating_additional_invoices"
) as m1:
with mock.patch("analytics.views.support.void_all_open_invoices", return_value=1) as m2:
result = self.client_post(
"/activity/support",
{
"realm_id": f"{iago.realm_id}",
"modify_plan": "downgrade_now_void_open_invoices",
},
)
m1.assert_called_once_with(get_realm("zulip"))
m2.assert_called_once_with(get_realm("zulip"))
self.assert_in_success_response(
["zulip downgraded and voided 1 open invoices"], result
)
with mock.patch("analytics.views.support.switch_realm_from_standard_to_plus_plan") as m:
result = self.client_post(
"/activity/support",
{
"realm_id": f"{iago.realm_id}",
"modify_plan": "upgrade_to_plus",
},
)
m.assert_called_once_with(get_realm("zulip"))
self.assert_in_success_response(["zulip upgraded to Plus"], result)
def test_scrub_realm(self) -> None:
cordelia = self.example_user("cordelia")
lear_realm = get_realm("lear")
self.login_user(cordelia)
result = self.client_post(
"/activity/support", {"realm_id": f"{lear_realm.id}", "discount": "25"}
)
self.assertEqual(result.status_code, 302)
self.assertEqual(result["Location"], "/login/")
self.login("iago")
with mock.patch("analytics.views.support.do_scrub_realm") as m:
result = self.client_post(
"/activity/support", {"realm_id": f"{lear_realm.id}", "scrub_realm": "true"}
)
m.assert_called_once_with(lear_realm, acting_user=self.example_user("iago"))
self.assert_in_success_response(["lear scrubbed"], result)
with mock.patch("analytics.views.support.do_scrub_realm") as m:
result = self.client_post("/activity/support", {"realm_id": f"{lear_realm.id}"})
self.assert_json_error(result, "Invalid parameters")
m.assert_not_called()

View File

@@ -0,0 +1,724 @@
from datetime import datetime, timedelta, timezone
from typing import List, Optional
from unittest import mock
import ujson
from django.http import HttpResponse
from django.utils.timezone import now as timezone_now
from analytics.lib.counts import COUNT_STATS, CountStat
from analytics.lib.time_utils import time_range
from analytics.models import FillState, RealmCount, UserCount, last_successful_fill
from analytics.views import rewrite_client_arrays, sort_by_totals, sort_client_labels
from corporate.models import get_customer_by_realm
from zerver.lib.actions import do_create_multiuse_invite_link, do_send_realm_reactivation_email
from zerver.lib.test_classes import ZulipTestCase
from zerver.lib.test_helpers import reset_emails_in_zulip_realm
from zerver.lib.timestamp import ceiling_to_day, ceiling_to_hour, datetime_to_timestamp
from zerver.models import Client, MultiuseInvite, PreregistrationUser, get_realm
class TestStatsEndpoint(ZulipTestCase):
def test_stats(self) -> None:
self.user = self.example_user('hamlet')
self.login_user(self.user)
result = self.client_get('/stats')
self.assertEqual(result.status_code, 200)
# Check that we get something back
self.assert_in_response("Zulip analytics for", result)
def test_guest_user_cant_access_stats(self) -> None:
self.user = self.example_user('polonius')
self.login_user(self.user)
result = self.client_get('/stats')
self.assert_json_error(result, "Not allowed for guest users", 400)
result = self.client_get('/json/analytics/chart_data')
self.assert_json_error(result, "Not allowed for guest users", 400)
def test_stats_for_realm(self) -> None:
user = self.example_user('hamlet')
self.login_user(user)
result = self.client_get('/stats/realm/zulip/')
self.assertEqual(result.status_code, 302)
user = self.example_user('hamlet')
user.is_staff = True
user.save(update_fields=['is_staff'])
result = self.client_get('/stats/realm/not_existing_realm/')
self.assertEqual(result.status_code, 302)
result = self.client_get('/stats/realm/zulip/')
self.assertEqual(result.status_code, 200)
self.assert_in_response("Zulip analytics for", result)
def test_stats_for_installation(self) -> None:
user = self.example_user('hamlet')
self.login_user(user)
result = self.client_get('/stats/installation')
self.assertEqual(result.status_code, 302)
user = self.example_user('hamlet')
user.is_staff = True
user.save(update_fields=['is_staff'])
result = self.client_get('/stats/installation')
self.assertEqual(result.status_code, 200)
self.assert_in_response("Zulip analytics for", result)
class TestGetChartData(ZulipTestCase):
def setUp(self) -> None:
super().setUp()
self.realm = get_realm('zulip')
self.user = self.example_user('hamlet')
self.login_user(self.user)
self.end_times_hour = [ceiling_to_hour(self.realm.date_created) + timedelta(hours=i)
for i in range(4)]
self.end_times_day = [ceiling_to_day(self.realm.date_created) + timedelta(days=i)
for i in range(4)]
def data(self, i: int) -> List[int]:
return [0, 0, i, 0]
def insert_data(self, stat: CountStat, realm_subgroups: List[Optional[str]],
user_subgroups: List[str]) -> None:
if stat.frequency == CountStat.HOUR:
insert_time = self.end_times_hour[2]
fill_time = self.end_times_hour[-1]
if stat.frequency == CountStat.DAY:
insert_time = self.end_times_day[2]
fill_time = self.end_times_day[-1]
RealmCount.objects.bulk_create([
RealmCount(property=stat.property, subgroup=subgroup, end_time=insert_time,
value=100+i, realm=self.realm)
for i, subgroup in enumerate(realm_subgroups)])
UserCount.objects.bulk_create([
UserCount(property=stat.property, subgroup=subgroup, end_time=insert_time,
value=200+i, realm=self.realm, user=self.user)
for i, subgroup in enumerate(user_subgroups)])
FillState.objects.create(property=stat.property, end_time=fill_time, state=FillState.DONE)
def test_number_of_humans(self) -> None:
stat = COUNT_STATS['realm_active_humans::day']
self.insert_data(stat, [None], [])
stat = COUNT_STATS['1day_actives::day']
self.insert_data(stat, [None], [])
stat = COUNT_STATS['active_users_audit:is_bot:day']
self.insert_data(stat, ['false'], [])
result = self.client_get('/json/analytics/chart_data',
{'chart_name': 'number_of_humans'})
self.assert_json_success(result)
data = result.json()
self.assertEqual(data, {
'msg': '',
'end_times': [datetime_to_timestamp(dt) for dt in self.end_times_day],
'frequency': CountStat.DAY,
'everyone': {'_1day': self.data(100), '_15day': self.data(100), 'all_time': self.data(100)},
'display_order': None,
'result': 'success',
})
def test_messages_sent_over_time(self) -> None:
stat = COUNT_STATS['messages_sent:is_bot:hour']
self.insert_data(stat, ['true', 'false'], ['false'])
result = self.client_get('/json/analytics/chart_data',
{'chart_name': 'messages_sent_over_time'})
self.assert_json_success(result)
data = result.json()
self.assertEqual(data, {
'msg': '',
'end_times': [datetime_to_timestamp(dt) for dt in self.end_times_hour],
'frequency': CountStat.HOUR,
'everyone': {'bot': self.data(100), 'human': self.data(101)},
'user': {'bot': self.data(0), 'human': self.data(200)},
'display_order': None,
'result': 'success',
})
def test_messages_sent_by_message_type(self) -> None:
stat = COUNT_STATS['messages_sent:message_type:day']
self.insert_data(stat, ['public_stream', 'private_message'],
['public_stream', 'private_stream'])
result = self.client_get('/json/analytics/chart_data',
{'chart_name': 'messages_sent_by_message_type'})
self.assert_json_success(result)
data = result.json()
self.assertEqual(data, {
'msg': '',
'end_times': [datetime_to_timestamp(dt) for dt in self.end_times_day],
'frequency': CountStat.DAY,
'everyone': {'Public streams': self.data(100), 'Private streams': self.data(0),
'Private messages': self.data(101), 'Group private messages': self.data(0)},
'user': {'Public streams': self.data(200), 'Private streams': self.data(201),
'Private messages': self.data(0), 'Group private messages': self.data(0)},
'display_order': ['Private messages', 'Public streams', 'Private streams', 'Group private messages'],
'result': 'success',
})
def test_messages_sent_by_client(self) -> None:
stat = COUNT_STATS['messages_sent:client:day']
client1 = Client.objects.create(name='client 1')
client2 = Client.objects.create(name='client 2')
client3 = Client.objects.create(name='client 3')
client4 = Client.objects.create(name='client 4')
self.insert_data(stat, [client4.id, client3.id, client2.id],
[client3.id, client1.id])
result = self.client_get('/json/analytics/chart_data',
{'chart_name': 'messages_sent_by_client'})
self.assert_json_success(result)
data = result.json()
self.assertEqual(data, {
'msg': '',
'end_times': [datetime_to_timestamp(dt) for dt in self.end_times_day],
'frequency': CountStat.DAY,
'everyone': {'client 4': self.data(100), 'client 3': self.data(101),
'client 2': self.data(102)},
'user': {'client 3': self.data(200), 'client 1': self.data(201)},
'display_order': ['client 1', 'client 2', 'client 3', 'client 4'],
'result': 'success',
})
def test_messages_read_over_time(self) -> None:
stat = COUNT_STATS['messages_read::hour']
self.insert_data(stat, [None], [])
result = self.client_get('/json/analytics/chart_data',
{'chart_name': 'messages_read_over_time'})
self.assert_json_success(result)
data = result.json()
self.assertEqual(data, {
'msg': '',
'end_times': [datetime_to_timestamp(dt) for dt in self.end_times_hour],
'frequency': CountStat.HOUR,
'everyone': {'read': self.data(100)},
'user': {'read': self.data(0)},
'display_order': None,
'result': 'success',
})
def test_include_empty_subgroups(self) -> None:
FillState.objects.create(
property='realm_active_humans::day', end_time=self.end_times_day[0],
state=FillState.DONE)
result = self.client_get('/json/analytics/chart_data',
{'chart_name': 'number_of_humans'})
self.assert_json_success(result)
data = result.json()
self.assertEqual(data['everyone'], {"_1day": [0], "_15day": [0], "all_time": [0]})
self.assertFalse('user' in data)
FillState.objects.create(
property='messages_sent:is_bot:hour', end_time=self.end_times_hour[0],
state=FillState.DONE)
result = self.client_get('/json/analytics/chart_data',
{'chart_name': 'messages_sent_over_time'})
self.assert_json_success(result)
data = result.json()
self.assertEqual(data['everyone'], {'human': [0], 'bot': [0]})
self.assertEqual(data['user'], {'human': [0], 'bot': [0]})
FillState.objects.create(
property='messages_sent:message_type:day', end_time=self.end_times_day[0],
state=FillState.DONE)
result = self.client_get('/json/analytics/chart_data',
{'chart_name': 'messages_sent_by_message_type'})
self.assert_json_success(result)
data = result.json()
self.assertEqual(data['everyone'], {
'Public streams': [0], 'Private streams': [0],
'Private messages': [0], 'Group private messages': [0]})
self.assertEqual(data['user'], {
'Public streams': [0], 'Private streams': [0],
'Private messages': [0], 'Group private messages': [0]})
FillState.objects.create(
property='messages_sent:client:day', end_time=self.end_times_day[0],
state=FillState.DONE)
result = self.client_get('/json/analytics/chart_data',
{'chart_name': 'messages_sent_by_client'})
self.assert_json_success(result)
data = result.json()
self.assertEqual(data['everyone'], {})
self.assertEqual(data['user'], {})
def test_start_and_end(self) -> None:
stat = COUNT_STATS['realm_active_humans::day']
self.insert_data(stat, [None], [])
stat = COUNT_STATS['1day_actives::day']
self.insert_data(stat, [None], [])
stat = COUNT_STATS['active_users_audit:is_bot:day']
self.insert_data(stat, ['false'], [])
end_time_timestamps = [datetime_to_timestamp(dt) for dt in self.end_times_day]
# valid start and end
result = self.client_get('/json/analytics/chart_data',
{'chart_name': 'number_of_humans',
'start': end_time_timestamps[1],
'end': end_time_timestamps[2]})
self.assert_json_success(result)
data = result.json()
self.assertEqual(data['end_times'], end_time_timestamps[1:3])
self.assertEqual(data['everyone'], {'_1day': [0, 100], '_15day': [0, 100], 'all_time': [0, 100]})
# start later then end
result = self.client_get('/json/analytics/chart_data',
{'chart_name': 'number_of_humans',
'start': end_time_timestamps[2],
'end': end_time_timestamps[1]})
self.assert_json_error_contains(result, 'Start time is later than')
def test_min_length(self) -> None:
stat = COUNT_STATS['realm_active_humans::day']
self.insert_data(stat, [None], [])
stat = COUNT_STATS['1day_actives::day']
self.insert_data(stat, [None], [])
stat = COUNT_STATS['active_users_audit:is_bot:day']
self.insert_data(stat, ['false'], [])
# test min_length is too short to change anything
result = self.client_get('/json/analytics/chart_data',
{'chart_name': 'number_of_humans',
'min_length': 2})
self.assert_json_success(result)
data = result.json()
self.assertEqual(data['end_times'], [datetime_to_timestamp(dt) for dt in self.end_times_day])
self.assertEqual(data['everyone'], {'_1day': self.data(100), '_15day': self.data(100), 'all_time': self.data(100)})
# test min_length larger than filled data
result = self.client_get('/json/analytics/chart_data',
{'chart_name': 'number_of_humans',
'min_length': 5})
self.assert_json_success(result)
data = result.json()
end_times = [ceiling_to_day(self.realm.date_created) + timedelta(days=i) for i in range(-1, 4)]
self.assertEqual(data['end_times'], [datetime_to_timestamp(dt) for dt in end_times])
self.assertEqual(data['everyone'], {'_1day': [0]+self.data(100), '_15day': [0]+self.data(100), 'all_time': [0]+self.data(100)})
def test_non_existent_chart(self) -> None:
result = self.client_get('/json/analytics/chart_data',
{'chart_name': 'does_not_exist'})
self.assert_json_error_contains(result, 'Unknown chart name')
def test_analytics_not_running(self) -> None:
realm = get_realm("zulip")
self.assertEqual(FillState.objects.count(), 0)
realm.date_created = timezone_now() - timedelta(days=3)
realm.save(update_fields=["date_created"])
with mock.patch('logging.warning'):
result = self.client_get('/json/analytics/chart_data',
{'chart_name': 'messages_sent_over_time'})
self.assert_json_error_contains(result, 'No analytics data available')
realm.date_created = timezone_now() - timedelta(days=1, hours=2)
realm.save(update_fields=["date_created"])
with mock.patch('logging.warning'):
result = self.client_get('/json/analytics/chart_data',
{'chart_name': 'messages_sent_over_time'})
self.assert_json_error_contains(result, 'No analytics data available')
realm.date_created = timezone_now() - timedelta(days=1, minutes=10)
realm.save(update_fields=["date_created"])
result = self.client_get('/json/analytics/chart_data',
{'chart_name': 'messages_sent_over_time'})
self.assert_json_success(result)
realm.date_created = timezone_now() - timedelta(hours=10)
realm.save(update_fields=["date_created"])
result = self.client_get('/json/analytics/chart_data',
{'chart_name': 'messages_sent_over_time'})
self.assert_json_success(result)
end_time = timezone_now() - timedelta(days=5)
fill_state = FillState.objects.create(property='messages_sent:is_bot:hour', end_time=end_time,
state=FillState.DONE)
realm.date_created = timezone_now() - timedelta(days=3)
realm.save(update_fields=["date_created"])
with mock.patch('logging.warning'):
result = self.client_get('/json/analytics/chart_data',
{'chart_name': 'messages_sent_over_time'})
self.assert_json_error_contains(result, 'No analytics data available')
realm.date_created = timezone_now() - timedelta(days=1, minutes=10)
realm.save(update_fields=["date_created"])
result = self.client_get('/json/analytics/chart_data',
{'chart_name': 'messages_sent_over_time'})
self.assert_json_success(result)
end_time = timezone_now() - timedelta(days=2)
fill_state.end_time = end_time
fill_state.save(update_fields=["end_time"])
realm.date_created = timezone_now() - timedelta(days=3)
realm.save(update_fields=["date_created"])
result = self.client_get('/json/analytics/chart_data',
{'chart_name': 'messages_sent_over_time'})
self.assert_json_success(result)
realm.date_created = timezone_now() - timedelta(days=1, hours=2)
realm.save(update_fields=["date_created"])
with mock.patch('logging.warning'):
result = self.client_get('/json/analytics/chart_data',
{'chart_name': 'messages_sent_over_time'})
self.assert_json_error_contains(result, 'No analytics data available')
realm.date_created = timezone_now() - timedelta(days=1, minutes=10)
realm.save(update_fields=["date_created"])
result = self.client_get('/json/analytics/chart_data', {'chart_name': 'messages_sent_over_time'})
self.assert_json_success(result)
def test_get_chart_data_for_realm(self) -> None:
user = self.example_user('hamlet')
self.login_user(user)
result = self.client_get('/json/analytics/chart_data/realm/zulip',
{'chart_name': 'number_of_humans'})
self.assert_json_error(result, "Must be an server administrator", 400)
user = self.example_user('hamlet')
user.is_staff = True
user.save(update_fields=['is_staff'])
stat = COUNT_STATS['realm_active_humans::day']
self.insert_data(stat, [None], [])
result = self.client_get('/json/analytics/chart_data/realm/not_existing_realm',
{'chart_name': 'number_of_humans'})
self.assert_json_error(result, 'Invalid organization', 400)
result = self.client_get('/json/analytics/chart_data/realm/zulip',
{'chart_name': 'number_of_humans'})
self.assert_json_success(result)
def test_get_chart_data_for_installation(self) -> None:
user = self.example_user('hamlet')
self.login_user(user)
result = self.client_get('/json/analytics/chart_data/installation',
{'chart_name': 'number_of_humans'})
self.assert_json_error(result, "Must be an server administrator", 400)
user = self.example_user('hamlet')
user.is_staff = True
user.save(update_fields=['is_staff'])
stat = COUNT_STATS['realm_active_humans::day']
self.insert_data(stat, [None], [])
result = self.client_get('/json/analytics/chart_data/installation',
{'chart_name': 'number_of_humans'})
self.assert_json_success(result)
class TestSupportEndpoint(ZulipTestCase):
def test_search(self) -> None:
reset_emails_in_zulip_realm()
def check_hamlet_user_query_result(result: HttpResponse) -> None:
self.assert_in_success_response(['<span class="label">user</span>\n', '<h3>King Hamlet</h3>',
'<b>Email</b>: hamlet@zulip.com', '<b>Is active</b>: True<br>',
'<b>Admins</b>: desdemona@zulip.com, iago@zulip.com\n',
'class="copy-button" data-copytext="desdemona@zulip.com, iago@zulip.com"',
], result)
def check_zulip_realm_query_result(result: HttpResponse) -> None:
zulip_realm = get_realm("zulip")
self.assert_in_success_response([f'<input type="hidden" name="realm_id" value="{zulip_realm.id}"',
'Zulip Dev</h3>',
'<option value="1" selected>Self Hosted</option>',
'<option value="2" >Limited</option>',
'input type="number" name="discount" value="None"',
'<option value="active" selected>Active</option>',
'<option value="deactivated" >Deactivated</option>',
'scrub-realm-button">',
'data-string-id="zulip"'], result)
def check_lear_realm_query_result(result: HttpResponse) -> None:
lear_realm = get_realm("lear")
self.assert_in_success_response([f'<input type="hidden" name="realm_id" value="{lear_realm.id}"',
'Lear &amp; Co.</h3>',
'<option value="1" selected>Self Hosted</option>',
'<option value="2" >Limited</option>',
'input type="number" name="discount" value="None"',
'<option value="active" selected>Active</option>',
'<option value="deactivated" >Deactivated</option>',
'scrub-realm-button">',
'data-string-id="lear"'], result)
def check_preregistration_user_query_result(result: HttpResponse, email: str, invite: bool=False) -> None:
self.assert_in_success_response(['<span class="label">preregistration user</span>\n',
f'<b>Email</b>: {email}',
], result)
if invite:
self.assert_in_success_response(['<span class="label">invite</span>'], result)
self.assert_in_success_response(['<b>Expires in</b>: 1\xa0week, 3',
'<b>Status</b>: Link has never been clicked'], result)
self.assert_in_success_response([], result)
else:
self.assert_not_in_success_response(['<span class="label">invite</span>'], result)
self.assert_in_success_response(['<b>Expires in</b>: 1\xa0day',
'<b>Status</b>: Link has never been clicked'], result)
def check_realm_creation_query_result(result: HttpResponse, email: str) -> None:
self.assert_in_success_response(['<span class="label">preregistration user</span>\n',
'<span class="label">realm creation</span>\n',
'<b>Link</b>: http://testserver/accounts/do_confirm/',
'<b>Expires in</b>: 1\xa0day<br>\n',
], result)
def check_multiuse_invite_link_query_result(result: HttpResponse) -> None:
self.assert_in_success_response(['<span class="label">multiuse invite</span>\n',
'<b>Link</b>: http://zulip.testserver/join/',
'<b>Expires in</b>: 1\xa0week, 3',
], result)
def check_realm_reactivation_link_query_result(result: HttpResponse) -> None:
self.assert_in_success_response(['<span class="label">realm reactivation</span>\n',
'<b>Link</b>: http://zulip.testserver/reactivate/',
'<b>Expires in</b>: 1\xa0day',
], result)
self.login('cordelia')
result = self.client_get("/activity/support")
self.assertEqual(result.status_code, 302)
self.assertEqual(result["Location"], "/login/")
self.login('iago')
result = self.client_get("/activity/support")
self.assert_in_success_response(['<input type="text" name="q" class="input-xxlarge search-query"'], result)
result = self.client_get("/activity/support", {"q": "hamlet@zulip.com"})
check_hamlet_user_query_result(result)
check_zulip_realm_query_result(result)
result = self.client_get("/activity/support", {"q": "lear"})
check_lear_realm_query_result(result)
result = self.client_get("/activity/support", {"q": "http://lear.testserver"})
check_lear_realm_query_result(result)
with self.settings(REALM_HOSTS={'zulip': 'localhost'}):
result = self.client_get("/activity/support", {"q": "http://localhost"})
check_zulip_realm_query_result(result)
result = self.client_get("/activity/support", {"q": "hamlet@zulip.com, lear"})
check_hamlet_user_query_result(result)
check_zulip_realm_query_result(result)
check_lear_realm_query_result(result)
result = self.client_get("/activity/support", {"q": "lear, Hamlet <hamlet@zulip.com>"})
check_hamlet_user_query_result(result)
check_zulip_realm_query_result(result)
check_lear_realm_query_result(result)
self.client_post('/accounts/home/', {'email': self.nonreg_email("test")})
self.login('iago')
result = self.client_get("/activity/support", {"q": self.nonreg_email("test")})
check_preregistration_user_query_result(result, self.nonreg_email("test"))
check_zulip_realm_query_result(result)
stream_ids = [self.get_stream_id("Denmark")]
invitee_emails = [self.nonreg_email("test1")]
self.client_post("/json/invites", {"invitee_emails": invitee_emails,
"stream_ids": ujson.dumps(stream_ids),
"invite_as": PreregistrationUser.INVITE_AS['MEMBER']})
result = self.client_get("/activity/support", {"q": self.nonreg_email("test1")})
check_preregistration_user_query_result(result, self.nonreg_email("test1"), invite=True)
check_zulip_realm_query_result(result)
email = self.nonreg_email('alice')
self.client_post('/new/', {'email': email})
result = self.client_get("/activity/support", {"q": email})
check_realm_creation_query_result(result, email)
do_create_multiuse_invite_link(self.example_user("hamlet"), invited_as=1)
result = self.client_get("/activity/support", {"q": "zulip"})
check_multiuse_invite_link_query_result(result)
check_zulip_realm_query_result(result)
MultiuseInvite.objects.all().delete()
do_send_realm_reactivation_email(get_realm("zulip"))
result = self.client_get("/activity/support", {"q": "zulip"})
check_realm_reactivation_link_query_result(result)
check_zulip_realm_query_result(result)
def test_change_plan_type(self) -> None:
cordelia = self.example_user('cordelia')
self.login_user(cordelia)
result = self.client_post("/activity/support", {"realm_id": f"{cordelia.realm_id}", "plan_type": "2"})
self.assertEqual(result.status_code, 302)
self.assertEqual(result["Location"], "/login/")
iago = self.example_user("iago")
self.login_user(iago)
with mock.patch("analytics.views.do_change_plan_type") as m:
result = self.client_post("/activity/support", {"realm_id": f"{iago.realm_id}", "plan_type": "2"})
m.assert_called_once_with(get_realm("zulip"), 2)
self.assert_in_success_response(["Plan type of Zulip Dev changed from self hosted to limited"], result)
def test_attach_discount(self) -> None:
cordelia = self.example_user('cordelia')
lear_realm = get_realm('lear')
self.login_user(cordelia)
result = self.client_post("/activity/support", {"realm_id": f"{lear_realm.id}", "discount": "25"})
self.assertEqual(result.status_code, 302)
self.assertEqual(result["Location"], "/login/")
self.login('iago')
with mock.patch("analytics.views.attach_discount_to_realm") as m:
result = self.client_post("/activity/support", {"realm_id": f"{lear_realm.id}", "discount": "25"})
m.assert_called_once_with(get_realm("lear"), 25)
self.assert_in_success_response(["Discount of Lear &amp; Co. changed to 25 from None"], result)
def test_change_sponsorship_status(self) -> None:
lear_realm = get_realm("lear")
self.assertIsNone(get_customer_by_realm(lear_realm))
cordelia = self.example_user('cordelia')
self.login_user(cordelia)
result = self.client_post("/activity/support", {"realm_id": f"{lear_realm.id}",
"sponsorship_pending": "true"})
self.assertEqual(result.status_code, 302)
self.assertEqual(result["Location"], "/login/")
iago = self.example_user("iago")
self.login_user(iago)
result = self.client_post("/activity/support", {"realm_id": f"{lear_realm.id}",
"sponsorship_pending": "true"})
self.assert_in_success_response(["Lear &amp; Co. marked as pending sponsorship."], result)
customer = get_customer_by_realm(lear_realm)
assert(customer is not None)
self.assertTrue(customer.sponsorship_pending)
result = self.client_post("/activity/support", {"realm_id": f"{lear_realm.id}",
"sponsorship_pending": "false"})
self.assert_in_success_response(["Lear &amp; Co. is no longer pending sponsorship."], result)
customer = get_customer_by_realm(lear_realm)
assert(customer is not None)
self.assertFalse(customer.sponsorship_pending)
def test_activate_or_deactivate_realm(self) -> None:
cordelia = self.example_user('cordelia')
lear_realm = get_realm('lear')
self.login_user(cordelia)
result = self.client_post("/activity/support", {"realm_id": f"{lear_realm.id}", "status": "deactivated"})
self.assertEqual(result.status_code, 302)
self.assertEqual(result["Location"], "/login/")
self.login('iago')
with mock.patch("analytics.views.do_deactivate_realm") as m:
result = self.client_post("/activity/support", {"realm_id": f"{lear_realm.id}", "status": "deactivated"})
m.assert_called_once_with(lear_realm, self.example_user("iago"))
self.assert_in_success_response(["Lear &amp; Co. deactivated"], result)
with mock.patch("analytics.views.do_send_realm_reactivation_email") as m:
result = self.client_post("/activity/support", {"realm_id": f"{lear_realm.id}", "status": "active"})
m.assert_called_once_with(lear_realm)
self.assert_in_success_response(["Realm reactivation email sent to admins of Lear"], result)
def test_scrub_realm(self) -> None:
cordelia = self.example_user('cordelia')
lear_realm = get_realm('lear')
self.login_user(cordelia)
result = self.client_post("/activity/support", {"realm_id": f"{lear_realm.id}", "discount": "25"})
self.assertEqual(result.status_code, 302)
self.assertEqual(result["Location"], "/login/")
self.login('iago')
with mock.patch("analytics.views.do_scrub_realm") as m:
result = self.client_post("/activity/support", {"realm_id": f"{lear_realm.id}", "scrub_realm": "scrub_realm"})
m.assert_called_once_with(lear_realm, acting_user=self.example_user("iago"))
self.assert_in_success_response(["Lear &amp; Co. scrubbed"], result)
with mock.patch("analytics.views.do_scrub_realm") as m:
with self.assertRaises(AssertionError):
result = self.client_post("/activity/support", {"realm_id": f"{lear_realm.id}"})
m.assert_not_called()
class TestGetChartDataHelpers(ZulipTestCase):
# last_successful_fill is in analytics/models.py, but get_chart_data is
# the only function that uses it at the moment
def test_last_successful_fill(self) -> None:
self.assertIsNone(last_successful_fill('non-existant'))
a_time = datetime(2016, 3, 14, 19, tzinfo=timezone.utc)
one_hour_before = datetime(2016, 3, 14, 18, tzinfo=timezone.utc)
fillstate = FillState.objects.create(property='property', end_time=a_time,
state=FillState.DONE)
self.assertEqual(last_successful_fill('property'), a_time)
fillstate.state = FillState.STARTED
fillstate.save()
self.assertEqual(last_successful_fill('property'), one_hour_before)
def test_sort_by_totals(self) -> None:
empty: List[int] = []
value_arrays = {'c': [0, 1], 'a': [9], 'b': [1, 1, 1], 'd': empty}
self.assertEqual(sort_by_totals(value_arrays), ['a', 'b', 'c', 'd'])
def test_sort_client_labels(self) -> None:
data = {'everyone': {'a': [16], 'c': [15], 'b': [14], 'e': [13], 'd': [12], 'h': [11]},
'user': {'a': [6], 'b': [5], 'd': [4], 'e': [3], 'f': [2], 'g': [1]}}
self.assertEqual(sort_client_labels(data), ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h'])
class TestTimeRange(ZulipTestCase):
def test_time_range(self) -> None:
HOUR = timedelta(hours=1)
DAY = timedelta(days=1)
a_time = datetime(2016, 3, 14, 22, 59, tzinfo=timezone.utc)
floor_hour = datetime(2016, 3, 14, 22, tzinfo=timezone.utc)
floor_day = datetime(2016, 3, 14, tzinfo=timezone.utc)
# test start == end
self.assertEqual(time_range(a_time, a_time, CountStat.HOUR, None), [])
self.assertEqual(time_range(a_time, a_time, CountStat.DAY, None), [])
# test start == end == boundary, and min_length == 0
self.assertEqual(time_range(floor_hour, floor_hour, CountStat.HOUR, 0), [floor_hour])
self.assertEqual(time_range(floor_day, floor_day, CountStat.DAY, 0), [floor_day])
# test start and end on different boundaries
self.assertEqual(time_range(floor_hour, floor_hour+HOUR, CountStat.HOUR, None),
[floor_hour, floor_hour+HOUR])
self.assertEqual(time_range(floor_day, floor_day+DAY, CountStat.DAY, None),
[floor_day, floor_day+DAY])
# test min_length
self.assertEqual(time_range(floor_hour, floor_hour+HOUR, CountStat.HOUR, 4),
[floor_hour-2*HOUR, floor_hour-HOUR, floor_hour, floor_hour+HOUR])
self.assertEqual(time_range(floor_day, floor_day+DAY, CountStat.DAY, 4),
[floor_day-2*DAY, floor_day-DAY, floor_day, floor_day+DAY])
class TestMapArrays(ZulipTestCase):
def test_map_arrays(self) -> None:
a = {'desktop app 1.0': [1, 2, 3],
'desktop app 2.0': [10, 12, 13],
'desktop app 3.0': [21, 22, 23],
'website': [1, 2, 3],
'ZulipiOS': [1, 2, 3],
'ZulipElectron': [2, 5, 7],
'ZulipMobile': [1, 5, 7],
'ZulipPython': [1, 2, 3],
'API: Python': [1, 2, 3],
'SomethingRandom': [4, 5, 6],
'ZulipGitHubWebhook': [7, 7, 9],
'ZulipAndroid': [64, 63, 65]}
result = rewrite_client_arrays(a)
self.assertEqual(result,
{'Old desktop app': [32, 36, 39],
'Old iOS app': [1, 2, 3],
'Desktop app': [2, 5, 7],
'Mobile app': [1, 5, 7],
'Website': [1, 2, 3],
'Python API': [2, 4, 6],
'SomethingRandom': [4, 5, 6],
'GitHub webhook': [7, 7, 9],
'Old Android app': [64, 63, 65]})

View File

@@ -1,41 +1,34 @@
from typing import List, Union
from django.conf.urls import include
from django.urls import path
from django.urls.resolvers import URLPattern, URLResolver
from analytics.views.installation_activity import get_installation_activity
from analytics.views.realm_activity import get_realm_activity
from analytics.views.stats import (
get_chart_data,
get_chart_data_for_installation,
get_chart_data_for_realm,
get_chart_data_for_remote_installation,
get_chart_data_for_remote_realm,
stats,
stats_for_installation,
stats_for_realm,
stats_for_remote_installation,
stats_for_remote_realm,
)
from analytics.views.support import support
from analytics.views.user_activity import get_user_activity
from zerver.lib.rest import rest_path
import analytics.views
from zerver.lib.rest import rest_dispatch
i18n_urlpatterns: List[Union[URLPattern, URLResolver]] = [
i18n_urlpatterns = [
# Server admin (user_profile.is_staff) visible stats pages
path("activity", get_installation_activity),
path("activity/support", support, name="support"),
path("realm_activity/<realm_str>/", get_realm_activity),
path("user_activity/<user_profile_id>/", get_user_activity),
path("stats/realm/<realm_str>/", stats_for_realm),
path("stats/installation", stats_for_installation),
path("stats/remote/<int:remote_server_id>/installation", stats_for_remote_installation),
path(
"stats/remote/<int:remote_server_id>/realm/<int:remote_realm_id>/", stats_for_remote_realm
),
path('activity', analytics.views.get_activity,
name='analytics.views.get_activity'),
path('activity/support', analytics.views.support,
name='analytics.views.support'),
path('realm_activity/<str:realm_str>/', analytics.views.get_realm_activity,
name='analytics.views.get_realm_activity'),
path('user_activity/<str:email>/', analytics.views.get_user_activity,
name='analytics.views.get_user_activity'),
path('stats/realm/<str:realm_str>/', analytics.views.stats_for_realm,
name='analytics.views.stats_for_realm'),
path('stats/installation', analytics.views.stats_for_installation,
name='analytics.views.stats_for_installation'),
path('stats/remote/<int:remote_server_id>/installation',
analytics.views.stats_for_remote_installation,
name='analytics.views.stats_for_remote_installation'),
path('stats/remote/<int:remote_server_id>/realm/<int:remote_realm_id>/',
analytics.views.stats_for_remote_realm,
name='analytics.views.stats_for_remote_realm'),
# User-visible stats page
path("stats", stats, name="stats"),
path('stats', analytics.views.stats,
name='analytics.views.stats'),
]
# These endpoints are a part of the API (V1), which uses:
@@ -48,22 +41,22 @@ i18n_urlpatterns: List[Union[URLPattern, URLResolver]] = [
# All of these paths are accessed by either a /json or /api prefix
v1_api_and_json_patterns = [
# get data for the graphs at /stats
rest_path("analytics/chart_data", GET=get_chart_data),
rest_path("analytics/chart_data/realm/<realm_str>", GET=get_chart_data_for_realm),
rest_path("analytics/chart_data/installation", GET=get_chart_data_for_installation),
rest_path(
"analytics/chart_data/remote/<int:remote_server_id>/installation",
GET=get_chart_data_for_remote_installation,
),
rest_path(
"analytics/chart_data/remote/<int:remote_server_id>/realm/<int:remote_realm_id>",
GET=get_chart_data_for_remote_realm,
),
path('analytics/chart_data', rest_dispatch,
{'GET': 'analytics.views.get_chart_data'}),
path('analytics/chart_data/realm/<str:realm_str>', rest_dispatch,
{'GET': 'analytics.views.get_chart_data_for_realm'}),
path('analytics/chart_data/installation', rest_dispatch,
{'GET': 'analytics.views.get_chart_data_for_installation'}),
path('analytics/chart_data/remote/<int:remote_server_id>/installation', rest_dispatch,
{'GET': 'analytics.views.get_chart_data_for_remote_installation'}),
path('analytics/chart_data/remote/<int:remote_server_id>/realm/<int:remote_realm_id>',
rest_dispatch,
{'GET': 'analytics.views.get_chart_data_for_remote_realm'}),
]
i18n_urlpatterns += [
path("api/v1/", include(v1_api_and_json_patterns)),
path("json/", include(v1_api_and_json_patterns)),
path('api/v1/', include(v1_api_and_json_patterns)),
path('json/', include(v1_api_and_json_patterns)),
]
urlpatterns = i18n_urlpatterns

1534
analytics/views.py Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,154 +0,0 @@
import re
import sys
from datetime import datetime
from typing import Any, Collection, Dict, List, Optional, Sequence
from urllib.parse import urlencode
from django.conf import settings
from django.db.backends.utils import CursorWrapper
from django.template import loader
from django.urls import reverse
from markupsafe import Markup
from zerver.lib.url_encoding import append_url_query_string
from zerver.models import UserActivity, get_realm
if sys.version_info < (3, 9): # nocoverage
from backports import zoneinfo
else: # nocoverage
import zoneinfo
eastern_tz = zoneinfo.ZoneInfo("America/New_York")
if settings.BILLING_ENABLED:
pass
def make_table(
title: str, cols: Sequence[str], rows: Sequence[Any], has_row_class: bool = False
) -> str:
if not has_row_class:
def fix_row(row: Any) -> Dict[str, Any]:
return dict(cells=row, row_class=None)
rows = list(map(fix_row, rows))
data = dict(title=title, cols=cols, rows=rows)
content = loader.render_to_string(
"analytics/ad_hoc_query.html",
dict(data=data),
)
return content
def dictfetchall(cursor: CursorWrapper) -> List[Dict[str, Any]]:
"""Returns all rows from a cursor as a dict"""
desc = cursor.description
return [dict(zip((col[0] for col in desc), row)) for row in cursor.fetchall()]
def format_date_for_activity_reports(date: Optional[datetime]) -> str:
if date:
return date.astimezone(eastern_tz).strftime("%Y-%m-%d %H:%M")
else:
return ""
def user_activity_link(email: str, user_profile_id: int) -> Markup:
from analytics.views.user_activity import get_user_activity
url = reverse(get_user_activity, kwargs=dict(user_profile_id=user_profile_id))
return Markup('<a href="{url}">{email}</a>').format(url=url, email=email)
def realm_activity_link(realm_str: str) -> Markup:
from analytics.views.realm_activity import get_realm_activity
url = reverse(get_realm_activity, kwargs=dict(realm_str=realm_str))
return Markup('<a href="{url}">{realm_str}</a>').format(url=url, realm_str=realm_str)
def realm_stats_link(realm_str: str) -> Markup:
from analytics.views.stats import stats_for_realm
url = reverse(stats_for_realm, kwargs=dict(realm_str=realm_str))
return Markup('<a href="{url}"><i class="fa fa-pie-chart"></i></a>').format(url=url)
def realm_support_link(realm_str: str) -> Markup:
support_url = reverse("support")
query = urlencode({"q": realm_str})
url = append_url_query_string(support_url, query)
return Markup('<a href="{url}">{realm_str}</a>').format(url=url, realm_str=realm_str)
def realm_url_link(realm_str: str) -> Markup:
url = get_realm(realm_str).uri
return Markup('<a href="{url}"><i class="fa fa-home"></i></a>').format(url=url)
def remote_installation_stats_link(server_id: int, hostname: str) -> Markup:
from analytics.views.stats import stats_for_remote_installation
url = reverse(stats_for_remote_installation, kwargs=dict(remote_server_id=server_id))
return Markup('<a href="{url}"><i class="fa fa-pie-chart"></i>{hostname}</a>').format(
url=url, hostname=hostname
)
def get_user_activity_summary(records: Collection[UserActivity]) -> Dict[str, Any]:
#: The type annotation used above is clearly overly permissive.
#: We should perhaps use TypedDict to clearly lay out the schema
#: for the user activity summary.
summary: Dict[str, Any] = {}
def update(action: str, record: UserActivity) -> None:
if action not in summary:
summary[action] = dict(
count=record.count,
last_visit=record.last_visit,
)
else:
summary[action]["count"] += record.count
summary[action]["last_visit"] = max(
summary[action]["last_visit"],
record.last_visit,
)
if records:
first_record = next(iter(records))
summary["name"] = first_record.user_profile.full_name
summary["user_profile_id"] = first_record.user_profile.id
for record in records:
client = record.client.name
query = str(record.query)
update("use", record)
if client == "API":
m = re.match("/api/.*/external/(.*)", query)
if m:
client = m.group(1)
update(client, record)
if client.startswith("desktop"):
update("desktop", record)
if client == "website":
update("website", record)
if ("send_message" in query) or re.search("/api/.*/external/.*", query):
update("send", record)
if query in [
"/json/update_pointer",
"/json/users/me/pointer",
"/api/v1/update_pointer",
"update_pointer_backend",
]:
update("pointer", record)
update(client, record)
return summary

View File

@@ -1,620 +0,0 @@
import itertools
import time
from collections import defaultdict
from contextlib import suppress
from datetime import datetime, timedelta
from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union
from django.conf import settings
from django.db import connection
from django.http import HttpRequest, HttpResponse
from django.shortcuts import render
from django.template import loader
from django.utils.timezone import now as timezone_now
from markupsafe import Markup
from psycopg2.sql import SQL, Composable, Literal
from analytics.lib.counts import COUNT_STATS
from analytics.views.activity_common import (
dictfetchall,
format_date_for_activity_reports,
make_table,
realm_activity_link,
realm_stats_link,
realm_support_link,
realm_url_link,
remote_installation_stats_link,
)
from analytics.views.support import get_plan_name
from zerver.decorator import require_server_admin
from zerver.lib.request import has_request_variables
from zerver.lib.timestamp import timestamp_to_datetime
from zerver.models import Realm, UserActivityInterval, get_org_type_display_name
if settings.BILLING_ENABLED:
from corporate.lib.stripe import (
estimate_annual_recurring_revenue_by_realm,
get_realms_to_default_discount_dict,
)
def get_realm_day_counts() -> Dict[str, Dict[str, Markup]]:
query = SQL(
"""
select
r.string_id,
(now()::date - date_sent::date) age,
count(*) cnt
from zerver_message m
join zerver_userprofile up on up.id = m.sender_id
join zerver_realm r on r.id = up.realm_id
join zerver_client c on c.id = m.sending_client_id
where
(not up.is_bot)
and
date_sent > now()::date - interval '8 day'
and
c.name not in ('zephyr_mirror', 'ZulipMonitoring')
group by
r.string_id,
age
order by
r.string_id,
age
"""
)
cursor = connection.cursor()
cursor.execute(query)
rows = dictfetchall(cursor)
cursor.close()
counts: Dict[str, Dict[int, int]] = defaultdict(dict)
for row in rows:
counts[row["string_id"]][row["age"]] = row["cnt"]
result = {}
for string_id in counts:
raw_cnts = [counts[string_id].get(age, 0) for age in range(8)]
min_cnt = min(raw_cnts[1:])
max_cnt = max(raw_cnts[1:])
def format_count(cnt: int, style: Optional[str] = None) -> Markup:
if style is not None:
good_bad = style
elif cnt == min_cnt:
good_bad = "bad"
elif cnt == max_cnt:
good_bad = "good"
else:
good_bad = "neutral"
return Markup('<td class="number {good_bad}">{cnt}</td>').format(
good_bad=good_bad, cnt=cnt
)
cnts = format_count(raw_cnts[0], "neutral") + Markup().join(map(format_count, raw_cnts[1:]))
result[string_id] = dict(cnts=cnts)
return result
def realm_summary_table(realm_minutes: Dict[str, float]) -> str:
now = timezone_now()
query = SQL(
"""
SELECT
realm.string_id,
realm.date_created,
realm.plan_type,
realm.org_type,
coalesce(wau_table.value, 0) wau_count,
coalesce(dau_table.value, 0) dau_count,
coalesce(user_count_table.value, 0) user_profile_count,
coalesce(bot_count_table.value, 0) bot_count
FROM
zerver_realm as realm
LEFT OUTER JOIN (
SELECT
value _14day_active_humans,
realm_id
from
analytics_realmcount
WHERE
property = 'realm_active_humans::day'
AND end_time = %(realm_active_humans_end_time)s
) as _14day_active_humans_table ON realm.id = _14day_active_humans_table.realm_id
LEFT OUTER JOIN (
SELECT
value,
realm_id
from
analytics_realmcount
WHERE
property = '7day_actives::day'
AND end_time = %(seven_day_actives_end_time)s
) as wau_table ON realm.id = wau_table.realm_id
LEFT OUTER JOIN (
SELECT
value,
realm_id
from
analytics_realmcount
WHERE
property = '1day_actives::day'
AND end_time = %(one_day_actives_end_time)s
) as dau_table ON realm.id = dau_table.realm_id
LEFT OUTER JOIN (
SELECT
value,
realm_id
from
analytics_realmcount
WHERE
property = 'active_users_audit:is_bot:day'
AND subgroup = 'false'
AND end_time = %(active_users_audit_end_time)s
) as user_count_table ON realm.id = user_count_table.realm_id
LEFT OUTER JOIN (
SELECT
value,
realm_id
from
analytics_realmcount
WHERE
property = 'active_users_audit:is_bot:day'
AND subgroup = 'true'
AND end_time = %(active_users_audit_end_time)s
) as bot_count_table ON realm.id = bot_count_table.realm_id
WHERE
_14day_active_humans IS NOT NULL
or realm.plan_type = 3
ORDER BY
dau_count DESC,
string_id ASC
"""
)
cursor = connection.cursor()
cursor.execute(
query,
{
"realm_active_humans_end_time": COUNT_STATS[
"realm_active_humans::day"
].last_successful_fill(),
"seven_day_actives_end_time": COUNT_STATS["7day_actives::day"].last_successful_fill(),
"one_day_actives_end_time": COUNT_STATS["1day_actives::day"].last_successful_fill(),
"active_users_audit_end_time": COUNT_STATS[
"active_users_audit:is_bot:day"
].last_successful_fill(),
},
)
rows = dictfetchall(cursor)
cursor.close()
for row in rows:
row["date_created_day"] = row["date_created"].strftime("%Y-%m-%d")
row["age_days"] = int((now - row["date_created"]).total_seconds() / 86400)
row["is_new"] = row["age_days"] < 12 * 7
# get messages sent per day
counts = get_realm_day_counts()
for row in rows:
try:
row["history"] = counts[row["string_id"]]["cnts"]
except Exception:
row["history"] = ""
# estimate annual subscription revenue
total_arr = 0
if settings.BILLING_ENABLED:
estimated_arrs = estimate_annual_recurring_revenue_by_realm()
realms_to_default_discount = get_realms_to_default_discount_dict()
for row in rows:
row["plan_type_string"] = get_plan_name(row["plan_type"])
string_id = row["string_id"]
if string_id in estimated_arrs:
row["arr"] = estimated_arrs[string_id]
if row["plan_type"] in [Realm.PLAN_TYPE_STANDARD, Realm.PLAN_TYPE_PLUS]:
row["effective_rate"] = 100 - int(realms_to_default_discount.get(string_id, 0))
elif row["plan_type"] == Realm.PLAN_TYPE_STANDARD_FREE:
row["effective_rate"] = 0
elif (
row["plan_type"] == Realm.PLAN_TYPE_LIMITED
and string_id in realms_to_default_discount
):
row["effective_rate"] = 100 - int(realms_to_default_discount[string_id])
else:
row["effective_rate"] = ""
total_arr += sum(estimated_arrs.values())
for row in rows:
row["org_type_string"] = get_org_type_display_name(row["org_type"])
# augment data with realm_minutes
total_hours = 0.0
for row in rows:
string_id = row["string_id"]
minutes = realm_minutes.get(string_id, 0.0)
hours = minutes / 60.0
total_hours += hours
row["hours"] = str(int(hours))
with suppress(Exception):
row["hours_per_user"] = "{:.1f}".format(hours / row["dau_count"])
# formatting
for row in rows:
row["realm_url"] = realm_url_link(row["string_id"])
row["stats_link"] = realm_stats_link(row["string_id"])
row["support_link"] = realm_support_link(row["string_id"])
row["string_id"] = realm_activity_link(row["string_id"])
# Count active sites
def meets_goal(row: Dict[str, int]) -> bool:
return row["dau_count"] >= 5
num_active_sites = len(list(filter(meets_goal, rows)))
# create totals
total_dau_count = 0
total_user_profile_count = 0
total_bot_count = 0
total_wau_count = 0
for row in rows:
total_dau_count += int(row["dau_count"])
total_user_profile_count += int(row["user_profile_count"])
total_bot_count += int(row["bot_count"])
total_wau_count += int(row["wau_count"])
total_row = dict(
string_id="Total",
plan_type_string="",
org_type_string="",
effective_rate="",
arr=total_arr,
realm_url="",
stats_link="",
support_link="",
date_created_day="",
dau_count=total_dau_count,
user_profile_count=total_user_profile_count,
bot_count=total_bot_count,
hours=int(total_hours),
wau_count=total_wau_count,
)
rows.insert(0, total_row)
content = loader.render_to_string(
"analytics/realm_summary_table.html",
dict(
rows=rows,
num_active_sites=num_active_sites,
utctime=now.strftime("%Y-%m-%d %H:%M %Z"),
billing_enabled=settings.BILLING_ENABLED,
),
)
return content
def user_activity_intervals() -> Tuple[Markup, Dict[str, float]]:
day_end = timestamp_to_datetime(time.time())
day_start = day_end - timedelta(hours=24)
output = Markup()
output += "Per-user online duration for the last 24 hours:\n"
total_duration = timedelta(0)
all_intervals = (
UserActivityInterval.objects.filter(
end__gte=day_start,
start__lte=day_end,
)
.select_related(
"user_profile",
"user_profile__realm",
)
.only(
"start",
"end",
"user_profile__delivery_email",
"user_profile__realm__string_id",
)
.order_by(
"user_profile__realm__string_id",
"user_profile__delivery_email",
)
)
by_string_id = lambda row: row.user_profile.realm.string_id
by_email = lambda row: row.user_profile.delivery_email
realm_minutes = {}
for string_id, realm_intervals in itertools.groupby(all_intervals, by_string_id):
realm_duration = timedelta(0)
output += Markup("<hr>") + f"{string_id}\n"
for email, intervals in itertools.groupby(realm_intervals, by_email):
duration = timedelta(0)
for interval in intervals:
start = max(day_start, interval.start)
end = min(day_end, interval.end)
duration += end - start
total_duration += duration
realm_duration += duration
output += f" {email:<37}{duration}\n"
realm_minutes[string_id] = realm_duration.total_seconds() / 60
output += f"\nTotal duration: {total_duration}\n"
output += f"\nTotal duration in minutes: {total_duration.total_seconds() / 60.}\n"
output += f"Total duration amortized to a month: {total_duration.total_seconds() * 30. / 60.}"
content = Markup("<pre>{}</pre>").format(output)
return content, realm_minutes
def ad_hoc_queries() -> List[Dict[str, str]]:
def get_page(
query: Composable, cols: Sequence[str], title: str, totals_columns: Sequence[int] = []
) -> Dict[str, str]:
cursor = connection.cursor()
cursor.execute(query)
rows = cursor.fetchall()
rows = list(map(list, rows))
cursor.close()
def fix_rows(
i: int, fixup_func: Union[Callable[[str], Markup], Callable[[datetime], str]]
) -> None:
for row in rows:
row[i] = fixup_func(row[i])
total_row = []
for i, col in enumerate(cols):
if col == "Realm":
fix_rows(i, realm_activity_link)
elif col in ["Last time", "Last visit"]:
fix_rows(i, format_date_for_activity_reports)
elif col == "Hostname":
for row in rows:
row[i] = remote_installation_stats_link(row[0], row[i])
if len(totals_columns) > 0:
if i == 0:
total_row.append("Total")
elif i in totals_columns:
total_row.append(str(sum(row[i] for row in rows if row[i] is not None)))
else:
total_row.append("")
if len(totals_columns) > 0:
rows.insert(0, total_row)
content = make_table(title, cols, rows)
return dict(
content=content,
title=title,
)
pages = []
###
for mobile_type in ["Android", "ZulipiOS"]:
title = f"{mobile_type} usage"
query: Composable = SQL(
"""
select
realm.string_id,
up.id user_id,
client.name,
sum(count) as hits,
max(last_visit) as last_time
from zerver_useractivity ua
join zerver_client client on client.id = ua.client_id
join zerver_userprofile up on up.id = ua.user_profile_id
join zerver_realm realm on realm.id = up.realm_id
where
client.name like {mobile_type}
group by string_id, up.id, client.name
having max(last_visit) > now() - interval '2 week'
order by string_id, up.id, client.name
"""
).format(
mobile_type=Literal(mobile_type),
)
cols = [
"Realm",
"User id",
"Name",
"Hits",
"Last time",
]
pages.append(get_page(query, cols, title))
###
title = "Desktop users"
query = SQL(
"""
select
realm.string_id,
client.name,
sum(count) as hits,
max(last_visit) as last_time
from zerver_useractivity ua
join zerver_client client on client.id = ua.client_id
join zerver_userprofile up on up.id = ua.user_profile_id
join zerver_realm realm on realm.id = up.realm_id
where
client.name like 'desktop%%'
group by string_id, client.name
having max(last_visit) > now() - interval '2 week'
order by string_id, client.name
"""
)
cols = [
"Realm",
"Client",
"Hits",
"Last time",
]
pages.append(get_page(query, cols, title))
###
title = "Integrations by realm"
query = SQL(
"""
select
realm.string_id,
case
when query like '%%external%%' then split_part(query, '/', 5)
else client.name
end client_name,
sum(count) as hits,
max(last_visit) as last_time
from zerver_useractivity ua
join zerver_client client on client.id = ua.client_id
join zerver_userprofile up on up.id = ua.user_profile_id
join zerver_realm realm on realm.id = up.realm_id
where
(query in ('send_message_backend', '/api/v1/send_message')
and client.name not in ('Android', 'ZulipiOS')
and client.name not like 'test: Zulip%%'
)
or
query like '%%external%%'
group by string_id, client_name
having max(last_visit) > now() - interval '2 week'
order by string_id, client_name
"""
)
cols = [
"Realm",
"Client",
"Hits",
"Last time",
]
pages.append(get_page(query, cols, title))
###
title = "Integrations by client"
query = SQL(
"""
select
case
when query like '%%external%%' then split_part(query, '/', 5)
else client.name
end client_name,
realm.string_id,
sum(count) as hits,
max(last_visit) as last_time
from zerver_useractivity ua
join zerver_client client on client.id = ua.client_id
join zerver_userprofile up on up.id = ua.user_profile_id
join zerver_realm realm on realm.id = up.realm_id
where
(query in ('send_message_backend', '/api/v1/send_message')
and client.name not in ('Android', 'ZulipiOS')
and client.name not like 'test: Zulip%%'
)
or
query like '%%external%%'
group by client_name, string_id
having max(last_visit) > now() - interval '2 week'
order by client_name, string_id
"""
)
cols = [
"Client",
"Realm",
"Hits",
"Last time",
]
pages.append(get_page(query, cols, title))
title = "Remote Zulip servers"
query = SQL(
"""
with icount as (
select
server_id,
max(value) as max_value,
max(end_time) as max_end_time
from zilencer_remoteinstallationcount
where
property='active_users:is_bot:day'
and subgroup='false'
group by server_id
),
remote_push_devices as (
select server_id, count(distinct(user_id)) as push_user_count from zilencer_remotepushdevicetoken
group by server_id
)
select
rserver.id,
rserver.hostname,
rserver.contact_email,
max_value,
push_user_count,
max_end_time
from zilencer_remotezulipserver rserver
left join icount on icount.server_id = rserver.id
left join remote_push_devices on remote_push_devices.server_id = rserver.id
order by max_value DESC NULLS LAST, push_user_count DESC NULLS LAST
"""
)
cols = [
"ID",
"Hostname",
"Contact email",
"Analytics users",
"Mobile users",
"Last update time",
]
pages.append(get_page(query, cols, title, totals_columns=[3, 4]))
return pages
@require_server_admin
@has_request_variables
def get_installation_activity(request: HttpRequest) -> HttpResponse:
duration_content, realm_minutes = user_activity_intervals()
counts_content: str = realm_summary_table(realm_minutes)
data = [
("Counts", counts_content),
("Durations", duration_content),
]
for page in ad_hoc_queries():
data.append((page["title"], page["content"]))
title = "Activity"
return render(
request,
"analytics/activity.html",
context=dict(data=data, title=title, is_home=True),
)

View File

@@ -1,262 +0,0 @@
import itertools
from datetime import datetime
from typing import Any, Dict, List, Optional, Set, Tuple
from django.db import connection
from django.db.models import QuerySet
from django.http import HttpRequest, HttpResponse, HttpResponseNotFound
from django.shortcuts import render
from django.utils.timezone import now as timezone_now
from psycopg2.sql import SQL
from analytics.views.activity_common import (
format_date_for_activity_reports,
get_user_activity_summary,
make_table,
realm_stats_link,
user_activity_link,
)
from zerver.decorator import require_server_admin
from zerver.models import Realm, UserActivity
def get_user_activity_records_for_realm(realm: str, is_bot: bool) -> QuerySet[UserActivity]:
fields = [
"user_profile__full_name",
"user_profile__delivery_email",
"query",
"client__name",
"count",
"last_visit",
]
records = UserActivity.objects.filter(
user_profile__realm__string_id=realm,
user_profile__is_active=True,
user_profile__is_bot=is_bot,
)
records = records.order_by("user_profile__delivery_email", "-last_visit")
records = records.select_related("user_profile", "client").only(*fields)
return records
def realm_user_summary_table(
all_records: QuerySet[UserActivity], admin_emails: Set[str]
) -> Tuple[Dict[str, Any], str]:
user_records = {}
def by_email(record: UserActivity) -> str:
return record.user_profile.delivery_email
for email, records in itertools.groupby(all_records, by_email):
user_records[email] = get_user_activity_summary(list(records))
def get_last_visit(user_summary: Dict[str, Dict[str, datetime]], k: str) -> Optional[datetime]:
if k in user_summary:
return user_summary[k]["last_visit"]
else:
return None
def get_count(user_summary: Dict[str, Dict[str, str]], k: str) -> str:
if k in user_summary:
return user_summary[k]["count"]
else:
return ""
def is_recent(val: datetime) -> bool:
age = timezone_now() - val
return age.total_seconds() < 5 * 60
rows = []
for email, user_summary in user_records.items():
email_link = user_activity_link(email, user_summary["user_profile_id"])
sent_count = get_count(user_summary, "send")
cells = [user_summary["name"], email_link, sent_count]
row_class = ""
for field in ["use", "send", "pointer", "desktop", "ZulipiOS", "Android"]:
visit = get_last_visit(user_summary, field)
if field == "use":
if visit and is_recent(visit):
row_class += " recently_active"
if email in admin_emails:
row_class += " admin"
val = format_date_for_activity_reports(visit)
cells.append(val)
row = dict(cells=cells, row_class=row_class)
rows.append(row)
def by_used_time(row: Dict[str, Any]) -> str:
return row["cells"][3]
rows = sorted(rows, key=by_used_time, reverse=True)
cols = [
"Name",
"Email",
"Total sent",
"Heard from",
"Message sent",
"Pointer motion",
"Desktop",
"ZulipiOS",
"Android",
]
title = "Summary"
content = make_table(title, cols, rows, has_row_class=True)
return user_records, content
def realm_client_table(user_summaries: Dict[str, Dict[str, Any]]) -> str:
exclude_keys = [
"internal",
"name",
"user_profile_id",
"use",
"send",
"pointer",
"website",
"desktop",
]
rows = []
for email, user_summary in user_summaries.items():
email_link = user_activity_link(email, user_summary["user_profile_id"])
name = user_summary["name"]
for k, v in user_summary.items():
if k in exclude_keys:
continue
client = k
count = v["count"]
last_visit = v["last_visit"]
row = [
format_date_for_activity_reports(last_visit),
client,
name,
email_link,
count,
]
rows.append(row)
rows = sorted(rows, key=lambda r: r[0], reverse=True)
cols = [
"Last visit",
"Client",
"Name",
"Email",
"Count",
]
title = "Clients"
return make_table(title, cols, rows)
def sent_messages_report(realm: str) -> str:
title = "Recently sent messages for " + realm
cols = [
"Date",
"Humans",
"Bots",
]
query = SQL(
"""
select
series.day::date,
humans.cnt,
bots.cnt
from (
select generate_series(
(now()::date - interval '2 week'),
now()::date,
interval '1 day'
) as day
) as series
left join (
select
date_sent::date date_sent,
count(*) cnt
from zerver_message m
join zerver_userprofile up on up.id = m.sender_id
join zerver_realm r on r.id = up.realm_id
where
r.string_id = %s
and
(not up.is_bot)
and
date_sent > now() - interval '2 week'
group by
date_sent::date
order by
date_sent::date
) humans on
series.day = humans.date_sent
left join (
select
date_sent::date date_sent,
count(*) cnt
from zerver_message m
join zerver_userprofile up on up.id = m.sender_id
join zerver_realm r on r.id = up.realm_id
where
r.string_id = %s
and
up.is_bot
and
date_sent > now() - interval '2 week'
group by
date_sent::date
order by
date_sent::date
) bots on
series.day = bots.date_sent
"""
)
cursor = connection.cursor()
cursor.execute(query, [realm, realm])
rows = cursor.fetchall()
cursor.close()
return make_table(title, cols, rows)
@require_server_admin
def get_realm_activity(request: HttpRequest, realm_str: str) -> HttpResponse:
data: List[Tuple[str, str]] = []
all_user_records: Dict[str, Any] = {}
try:
admins = Realm.objects.get(string_id=realm_str).get_human_admin_users()
except Realm.DoesNotExist:
return HttpResponseNotFound()
admin_emails = {admin.delivery_email for admin in admins}
for is_bot, page_title in [(False, "Humans"), (True, "Bots")]:
all_records = get_user_activity_records_for_realm(realm_str, is_bot)
user_records, content = realm_user_summary_table(all_records, admin_emails)
all_user_records.update(user_records)
data += [(page_title, content)]
page_title = "Clients"
content = realm_client_table(all_user_records)
data += [(page_title, content)]
page_title = "History"
content = sent_messages_report(realm_str)
data += [(page_title, content)]
title = realm_str
realm_stats = realm_stats_link(realm_str)
return render(
request,
"analytics/activity.html",
context=dict(data=data, realm_stats_link=realm_stats, title=title),
)

View File

@@ -1,542 +0,0 @@
import logging
from collections import defaultdict
from datetime import datetime, timedelta, timezone
from typing import Any, Dict, List, Optional, Tuple, Type, TypeVar, Union, cast
from django.conf import settings
from django.db.models import QuerySet
from django.http import HttpRequest, HttpResponse, HttpResponseNotFound
from django.shortcuts import render
from django.utils import translation
from django.utils.timezone import now as timezone_now
from django.utils.translation import gettext as _
from analytics.lib.counts import COUNT_STATS, CountStat
from analytics.lib.time_utils import time_range
from analytics.models import (
BaseCount,
InstallationCount,
RealmCount,
StreamCount,
UserCount,
installation_epoch,
)
from zerver.decorator import (
require_non_guest_user,
require_server_admin,
require_server_admin_api,
to_utc_datetime,
zulip_login_required,
)
from zerver.lib.exceptions import JsonableError
from zerver.lib.i18n import get_and_set_request_language, get_language_translation_data
from zerver.lib.request import REQ, has_request_variables
from zerver.lib.response import json_success
from zerver.lib.timestamp import convert_to_UTC
from zerver.lib.validator import to_non_negative_int
from zerver.models import Client, Realm, UserProfile, get_realm
if settings.ZILENCER_ENABLED:
from zilencer.models import RemoteInstallationCount, RemoteRealmCount, RemoteZulipServer
MAX_TIME_FOR_FULL_ANALYTICS_GENERATION = timedelta(days=1, minutes=30)
def is_analytics_ready(realm: Realm) -> bool:
return (timezone_now() - realm.date_created) > MAX_TIME_FOR_FULL_ANALYTICS_GENERATION
def render_stats(
request: HttpRequest,
data_url_suffix: str,
realm: Optional[Realm],
*,
title: Optional[str] = None,
for_installation: bool = False,
remote: bool = False,
analytics_ready: bool = True,
) -> HttpResponse:
assert request.user.is_authenticated
if realm is not None:
# Same query to get guest user count as in get_seat_count in corporate/lib/stripe.py.
guest_users = UserProfile.objects.filter(
realm=realm, is_active=True, is_bot=False, role=UserProfile.ROLE_GUEST
).count()
space_used = realm.currently_used_upload_space_bytes()
if title:
pass
else:
title = realm.name or realm.string_id
else:
assert title
guest_users = None
space_used = None
page_params = dict(
data_url_suffix=data_url_suffix,
for_installation=for_installation,
remote=remote,
upload_space_used=space_used,
guest_users=guest_users,
)
request_language = get_and_set_request_language(
request,
request.user.default_language,
translation.get_language_from_path(request.path_info),
)
page_params["translation_data"] = get_language_translation_data(request_language)
return render(
request,
"analytics/stats.html",
context=dict(
target_name=title,
page_params=page_params,
analytics_ready=analytics_ready,
),
)
@zulip_login_required
def stats(request: HttpRequest) -> HttpResponse:
assert request.user.is_authenticated
realm = request.user.realm
if request.user.is_guest:
# TODO: Make @zulip_login_required pass the UserProfile so we
# can use @require_member_or_admin
raise JsonableError(_("Not allowed for guest users"))
return render_stats(request, "", realm, analytics_ready=is_analytics_ready(realm))
@require_server_admin
@has_request_variables
def stats_for_realm(request: HttpRequest, realm_str: str) -> HttpResponse:
try:
realm = get_realm(realm_str)
except Realm.DoesNotExist:
return HttpResponseNotFound()
return render_stats(
request,
f"/realm/{realm_str}",
realm,
analytics_ready=is_analytics_ready(realm),
)
@require_server_admin
@has_request_variables
def stats_for_remote_realm(
request: HttpRequest, remote_server_id: int, remote_realm_id: int
) -> HttpResponse:
assert settings.ZILENCER_ENABLED
server = RemoteZulipServer.objects.get(id=remote_server_id)
return render_stats(
request,
f"/remote/{server.id}/realm/{remote_realm_id}",
None,
title=f"Realm {remote_realm_id} on server {server.hostname}",
)
@require_server_admin_api
@has_request_variables
def get_chart_data_for_realm(
request: HttpRequest, /, user_profile: UserProfile, realm_str: str, **kwargs: Any
) -> HttpResponse:
try:
realm = get_realm(realm_str)
except Realm.DoesNotExist:
raise JsonableError(_("Invalid organization"))
return get_chart_data(request, user_profile, realm=realm, **kwargs)
@require_server_admin_api
@has_request_variables
def get_chart_data_for_remote_realm(
request: HttpRequest,
/,
user_profile: UserProfile,
remote_server_id: int,
remote_realm_id: int,
**kwargs: Any,
) -> HttpResponse:
assert settings.ZILENCER_ENABLED
server = RemoteZulipServer.objects.get(id=remote_server_id)
return get_chart_data(
request,
user_profile,
server=server,
remote=True,
remote_realm_id=int(remote_realm_id),
**kwargs,
)
@require_server_admin
def stats_for_installation(request: HttpRequest) -> HttpResponse:
assert request.user.is_authenticated
return render_stats(request, "/installation", None, title="installation", for_installation=True)
@require_server_admin
def stats_for_remote_installation(request: HttpRequest, remote_server_id: int) -> HttpResponse:
assert settings.ZILENCER_ENABLED
server = RemoteZulipServer.objects.get(id=remote_server_id)
return render_stats(
request,
f"/remote/{server.id}/installation",
None,
title=f"remote installation {server.hostname}",
for_installation=True,
remote=True,
)
@require_server_admin_api
@has_request_variables
def get_chart_data_for_installation(
request: HttpRequest, /, user_profile: UserProfile, chart_name: str = REQ(), **kwargs: Any
) -> HttpResponse:
return get_chart_data(request, user_profile, for_installation=True, **kwargs)
@require_server_admin_api
@has_request_variables
def get_chart_data_for_remote_installation(
request: HttpRequest,
/,
user_profile: UserProfile,
remote_server_id: int,
chart_name: str = REQ(),
**kwargs: Any,
) -> HttpResponse:
assert settings.ZILENCER_ENABLED
server = RemoteZulipServer.objects.get(id=remote_server_id)
return get_chart_data(
request,
user_profile,
for_installation=True,
remote=True,
server=server,
**kwargs,
)
@require_non_guest_user
@has_request_variables
def get_chart_data(
request: HttpRequest,
user_profile: UserProfile,
chart_name: str = REQ(),
min_length: Optional[int] = REQ(converter=to_non_negative_int, default=None),
start: Optional[datetime] = REQ(converter=to_utc_datetime, default=None),
end: Optional[datetime] = REQ(converter=to_utc_datetime, default=None),
realm: Optional[Realm] = None,
for_installation: bool = False,
remote: bool = False,
remote_realm_id: Optional[int] = None,
server: Optional["RemoteZulipServer"] = None,
) -> HttpResponse:
TableType = Union[
Type["RemoteInstallationCount"],
Type[InstallationCount],
Type["RemoteRealmCount"],
Type[RealmCount],
]
if for_installation:
if remote:
assert settings.ZILENCER_ENABLED
aggregate_table: TableType = RemoteInstallationCount
assert server is not None
else:
aggregate_table = InstallationCount
else:
if remote:
assert settings.ZILENCER_ENABLED
aggregate_table = RemoteRealmCount
assert server is not None
assert remote_realm_id is not None
else:
aggregate_table = RealmCount
tables: Union[Tuple[TableType], Tuple[TableType, Type[UserCount]]]
if chart_name == "number_of_humans":
stats = [
COUNT_STATS["1day_actives::day"],
COUNT_STATS["realm_active_humans::day"],
COUNT_STATS["active_users_audit:is_bot:day"],
]
tables = (aggregate_table,)
subgroup_to_label: Dict[CountStat, Dict[Optional[str], str]] = {
stats[0]: {None: "_1day"},
stats[1]: {None: "_15day"},
stats[2]: {"false": "all_time"},
}
labels_sort_function = None
include_empty_subgroups = True
elif chart_name == "messages_sent_over_time":
stats = [COUNT_STATS["messages_sent:is_bot:hour"]]
tables = (aggregate_table, UserCount)
subgroup_to_label = {stats[0]: {"false": "human", "true": "bot"}}
labels_sort_function = None
include_empty_subgroups = True
elif chart_name == "messages_sent_by_message_type":
stats = [COUNT_STATS["messages_sent:message_type:day"]]
tables = (aggregate_table, UserCount)
subgroup_to_label = {
stats[0]: {
"public_stream": _("Public streams"),
"private_stream": _("Private streams"),
"private_message": _("Direct messages"),
"huddle_message": _("Group direct messages"),
}
}
labels_sort_function = lambda data: sort_by_totals(data["everyone"])
include_empty_subgroups = True
elif chart_name == "messages_sent_by_client":
stats = [COUNT_STATS["messages_sent:client:day"]]
tables = (aggregate_table, UserCount)
# Note that the labels are further re-written by client_label_map
subgroup_to_label = {
stats[0]: {str(id): name for id, name in Client.objects.values_list("id", "name")}
}
labels_sort_function = sort_client_labels
include_empty_subgroups = False
elif chart_name == "messages_read_over_time":
stats = [COUNT_STATS["messages_read::hour"]]
tables = (aggregate_table, UserCount)
subgroup_to_label = {stats[0]: {None: "read"}}
labels_sort_function = None
include_empty_subgroups = True
else:
raise JsonableError(_("Unknown chart name: {}").format(chart_name))
# Most likely someone using our API endpoint. The /stats page does not
# pass a start or end in its requests.
if start is not None:
start = convert_to_UTC(start)
if end is not None:
end = convert_to_UTC(end)
if start is not None and end is not None and start > end:
raise JsonableError(
_("Start time is later than end time. Start: {start}, End: {end}").format(
start=start,
end=end,
)
)
if realm is None:
# Note that this value is invalid for Remote tables; be
# careful not to access it in those code paths.
realm = user_profile.realm
if remote:
# For remote servers, we don't have fillstate data, and thus
# should simply use the first and last data points for the
# table.
assert server is not None
assert aggregate_table is RemoteInstallationCount or aggregate_table is RemoteRealmCount
aggregate_table_remote = cast(
Union[Type[RemoteInstallationCount], Type[RemoteRealmCount]], aggregate_table
) # https://stackoverflow.com/questions/68540528/mypy-assertions-on-the-types-of-types
if not aggregate_table_remote.objects.filter(server=server).exists():
raise JsonableError(
_("No analytics data available. Please contact your server administrator.")
)
if start is None:
first = aggregate_table_remote.objects.filter(server=server).first()
assert first is not None
start = first.end_time
if end is None:
last = aggregate_table_remote.objects.filter(server=server).last()
assert last is not None
end = last.end_time
else:
# Otherwise, we can use tables on the current server to
# determine a nice range, and some additional validation.
if start is None:
if for_installation:
start = installation_epoch()
else:
start = realm.date_created
if end is None:
end = max(
stat.last_successful_fill() or datetime.min.replace(tzinfo=timezone.utc)
for stat in stats
)
if start > end and (timezone_now() - start > MAX_TIME_FOR_FULL_ANALYTICS_GENERATION):
logging.warning(
"User from realm %s attempted to access /stats, but the computed "
"start time: %s (creation of realm or installation) is later than the computed "
"end time: %s (last successful analytics update). Is the "
"analytics cron job running?",
realm.string_id,
start,
end,
)
raise JsonableError(
_("No analytics data available. Please contact your server administrator.")
)
assert len({stat.frequency for stat in stats}) == 1
end_times = time_range(start, end, stats[0].frequency, min_length)
data: Dict[str, Any] = {
"end_times": [int(end_time.timestamp()) for end_time in end_times],
"frequency": stats[0].frequency,
}
aggregation_level = {
InstallationCount: "everyone",
RealmCount: "everyone",
UserCount: "user",
}
if settings.ZILENCER_ENABLED:
aggregation_level[RemoteInstallationCount] = "everyone"
aggregation_level[RemoteRealmCount] = "everyone"
# -1 is a placeholder value, since there is no relevant filtering on InstallationCount
id_value = {
InstallationCount: -1,
RealmCount: realm.id,
UserCount: user_profile.id,
}
if settings.ZILENCER_ENABLED:
if server is not None:
id_value[RemoteInstallationCount] = server.id
# TODO: RemoteRealmCount logic doesn't correctly handle
# filtering by server_id as well.
if remote_realm_id is not None:
id_value[RemoteRealmCount] = remote_realm_id
for table in tables:
data[aggregation_level[table]] = {}
for stat in stats:
data[aggregation_level[table]].update(
get_time_series_by_subgroup(
stat,
table,
id_value[table],
end_times,
subgroup_to_label[stat],
include_empty_subgroups,
)
)
if labels_sort_function is not None:
data["display_order"] = labels_sort_function(data)
else:
data["display_order"] = None
return json_success(request, data=data)
def sort_by_totals(value_arrays: Dict[str, List[int]]) -> List[str]:
totals = [(sum(values), label) for label, values in value_arrays.items()]
totals.sort(reverse=True)
return [label for total, label in totals]
# For any given user, we want to show a fixed set of clients in the chart,
# regardless of the time aggregation or whether we're looking at realm or
# user data. This fixed set ideally includes the clients most important in
# understanding the realm's traffic and the user's traffic. This function
# tries to rank the clients so that taking the first N elements of the
# sorted list has a reasonable chance of doing so.
def sort_client_labels(data: Dict[str, Dict[str, List[int]]]) -> List[str]:
realm_order = sort_by_totals(data["everyone"])
user_order = sort_by_totals(data["user"])
label_sort_values: Dict[str, float] = {}
for i, label in enumerate(realm_order):
label_sort_values[label] = i
for i, label in enumerate(user_order):
label_sort_values[label] = min(i - 0.1, label_sort_values.get(label, i))
return [label for label, sort_value in sorted(label_sort_values.items(), key=lambda x: x[1])]
CountT = TypeVar("CountT", bound=BaseCount)
def table_filtered_to_id(table: Type[CountT], key_id: int) -> QuerySet[CountT]:
if table == RealmCount:
return table.objects.filter(realm_id=key_id)
elif table == UserCount:
return table.objects.filter(user_id=key_id)
elif table == StreamCount:
return table.objects.filter(stream_id=key_id)
elif table == InstallationCount:
return table.objects.all()
elif settings.ZILENCER_ENABLED and table == RemoteInstallationCount:
return table.objects.filter(server_id=key_id)
elif settings.ZILENCER_ENABLED and table == RemoteRealmCount:
return table.objects.filter(realm_id=key_id)
else:
raise AssertionError(f"Unknown table: {table}")
def client_label_map(name: str) -> str:
if name == "website":
return "Web app"
if name.startswith("desktop app"):
return "Old desktop app"
if name == "ZulipElectron":
return "Desktop app"
if name == "ZulipTerminal":
return "Terminal app"
if name == "ZulipAndroid":
return "Old Android app"
if name == "ZulipiOS":
return "Old iOS app"
if name == "ZulipMobile":
return "Mobile app"
if name in ["ZulipPython", "API: Python"]:
return "Python API"
if name.startswith("Zulip") and name.endswith("Webhook"):
return name[len("Zulip") : -len("Webhook")] + " webhook"
return name
def rewrite_client_arrays(value_arrays: Dict[str, List[int]]) -> Dict[str, List[int]]:
mapped_arrays: Dict[str, List[int]] = {}
for label, array in value_arrays.items():
mapped_label = client_label_map(label)
if mapped_label in mapped_arrays:
for i in range(0, len(array)):
mapped_arrays[mapped_label][i] += value_arrays[label][i]
else:
mapped_arrays[mapped_label] = [value_arrays[label][i] for i in range(0, len(array))]
return mapped_arrays
def get_time_series_by_subgroup(
stat: CountStat,
table: Type[BaseCount],
key_id: int,
end_times: List[datetime],
subgroup_to_label: Dict[Optional[str], str],
include_empty_subgroups: bool,
) -> Dict[str, List[int]]:
queryset = (
table_filtered_to_id(table, key_id)
.filter(property=stat.property)
.values_list("subgroup", "end_time", "value")
)
value_dicts: Dict[Optional[str], Dict[datetime, int]] = defaultdict(lambda: defaultdict(int))
for subgroup, end_time, value in queryset:
value_dicts[subgroup][end_time] = value
value_arrays = {}
for subgroup, label in subgroup_to_label.items():
if (subgroup in value_dicts) or include_empty_subgroups:
value_arrays[label] = [value_dicts[subgroup][end_time] for end_time in end_times]
if stat == COUNT_STATS["messages_sent:client:day"]:
# HACK: We rewrite these arrays to collapse the Client objects
# with similar names into a single sum, and generally give
# them better names
return rewrite_client_arrays(value_arrays)
return value_arrays

View File

@@ -1,395 +0,0 @@
import urllib
from contextlib import suppress
from dataclasses import dataclass
from datetime import timedelta
from decimal import Decimal
from typing import Any, Dict, Iterable, List, Optional
from urllib.parse import urlencode
from django.conf import settings
from django.core.exceptions import ValidationError
from django.core.validators import URLValidator
from django.http import HttpRequest, HttpResponse, HttpResponseRedirect
from django.shortcuts import render
from django.urls import reverse
from django.utils.timesince import timesince
from django.utils.timezone import now as timezone_now
from django.utils.translation import gettext as _
from confirmation.models import Confirmation, confirmation_url
from confirmation.settings import STATUS_USED
from zerver.actions.create_realm import do_change_realm_subdomain
from zerver.actions.realm_settings import (
do_change_realm_org_type,
do_change_realm_plan_type,
do_deactivate_realm,
do_scrub_realm,
do_send_realm_reactivation_email,
)
from zerver.decorator import require_server_admin
from zerver.forms import check_subdomain_available
from zerver.lib.exceptions import JsonableError
from zerver.lib.realm_icon import realm_icon_url
from zerver.lib.request import REQ, has_request_variables
from zerver.lib.subdomains import get_subdomain_from_hostname
from zerver.lib.validator import check_bool, check_string_in, to_decimal, to_non_negative_int
from zerver.models import (
MultiuseInvite,
PreregistrationRealm,
PreregistrationUser,
Realm,
RealmReactivationStatus,
UserProfile,
get_org_type_display_name,
get_realm,
)
from zerver.views.invite import get_invitee_emails_set
if settings.BILLING_ENABLED:
from corporate.lib.stripe import approve_sponsorship as do_approve_sponsorship
from corporate.lib.stripe import (
attach_discount_to_realm,
downgrade_at_the_end_of_billing_cycle,
downgrade_now_without_creating_additional_invoices,
get_discount_for_realm,
get_latest_seat_count,
make_end_of_cycle_updates_if_needed,
switch_realm_from_standard_to_plus_plan,
update_billing_method_of_current_plan,
update_sponsorship_status,
void_all_open_invoices,
)
from corporate.models import (
Customer,
CustomerPlan,
get_current_plan_by_realm,
get_customer_by_realm,
)
def get_plan_name(plan_type: int) -> str:
return {
Realm.PLAN_TYPE_SELF_HOSTED: "self-hosted",
Realm.PLAN_TYPE_LIMITED: "limited",
Realm.PLAN_TYPE_STANDARD: "standard",
Realm.PLAN_TYPE_STANDARD_FREE: "open source",
Realm.PLAN_TYPE_PLUS: "plus",
}[plan_type]
def get_confirmations(
types: List[int], object_ids: Iterable[int], hostname: Optional[str] = None
) -> List[Dict[str, Any]]:
lowest_datetime = timezone_now() - timedelta(days=30)
confirmations = Confirmation.objects.filter(
type__in=types, object_id__in=object_ids, date_sent__gte=lowest_datetime
)
confirmation_dicts = []
for confirmation in confirmations:
realm = confirmation.realm
content_object = confirmation.content_object
type = confirmation.type
expiry_date = confirmation.expiry_date
assert content_object is not None
if hasattr(content_object, "status"):
if content_object.status == STATUS_USED:
link_status = "Link has been used"
else:
link_status = "Link has not been used"
else:
link_status = ""
now = timezone_now()
if expiry_date is None:
expires_in = "Never"
elif now < expiry_date:
expires_in = timesince(now, expiry_date)
else:
expires_in = "Expired"
url = confirmation_url(confirmation.confirmation_key, realm, type)
confirmation_dicts.append(
{
"object": confirmation.content_object,
"url": url,
"type": type,
"link_status": link_status,
"expires_in": expires_in,
}
)
return confirmation_dicts
VALID_MODIFY_PLAN_METHODS = [
"downgrade_at_billing_cycle_end",
"downgrade_now_without_additional_licenses",
"downgrade_now_void_open_invoices",
"upgrade_to_plus",
]
VALID_STATUS_VALUES = [
"active",
"deactivated",
]
VALID_BILLING_METHODS = [
"send_invoice",
"charge_automatically",
]
@dataclass
class PlanData:
customer: Optional["Customer"] = None
current_plan: Optional["CustomerPlan"] = None
licenses: Optional[int] = None
licenses_used: Optional[int] = None
@require_server_admin
@has_request_variables
def support(
request: HttpRequest,
realm_id: Optional[int] = REQ(default=None, converter=to_non_negative_int),
plan_type: Optional[int] = REQ(default=None, converter=to_non_negative_int),
discount: Optional[Decimal] = REQ(default=None, converter=to_decimal),
new_subdomain: Optional[str] = REQ(default=None),
status: Optional[str] = REQ(default=None, str_validator=check_string_in(VALID_STATUS_VALUES)),
billing_method: Optional[str] = REQ(
default=None, str_validator=check_string_in(VALID_BILLING_METHODS)
),
sponsorship_pending: Optional[bool] = REQ(default=None, json_validator=check_bool),
approve_sponsorship: bool = REQ(default=False, json_validator=check_bool),
modify_plan: Optional[str] = REQ(
default=None, str_validator=check_string_in(VALID_MODIFY_PLAN_METHODS)
),
scrub_realm: bool = REQ(default=False, json_validator=check_bool),
query: Optional[str] = REQ("q", default=None),
org_type: Optional[int] = REQ(default=None, converter=to_non_negative_int),
) -> HttpResponse:
context: Dict[str, Any] = {}
if "success_message" in request.session:
context["success_message"] = request.session["success_message"]
del request.session["success_message"]
if settings.BILLING_ENABLED and request.method == "POST":
# We check that request.POST only has two keys in it: The
# realm_id and a field to change.
keys = set(request.POST.keys())
if "csrfmiddlewaretoken" in keys:
keys.remove("csrfmiddlewaretoken")
if len(keys) != 2:
raise JsonableError(_("Invalid parameters"))
assert realm_id is not None
realm = Realm.objects.get(id=realm_id)
acting_user = request.user
assert isinstance(acting_user, UserProfile)
if plan_type is not None:
current_plan_type = realm.plan_type
do_change_realm_plan_type(realm, plan_type, acting_user=acting_user)
msg = f"Plan type of {realm.string_id} changed from {get_plan_name(current_plan_type)} to {get_plan_name(plan_type)} "
context["success_message"] = msg
elif org_type is not None:
current_realm_type = realm.org_type
do_change_realm_org_type(realm, org_type, acting_user=acting_user)
msg = f"Org type of {realm.string_id} changed from {get_org_type_display_name(current_realm_type)} to {get_org_type_display_name(org_type)} "
context["success_message"] = msg
elif discount is not None:
current_discount = get_discount_for_realm(realm) or 0
attach_discount_to_realm(realm, discount, acting_user=acting_user)
context[
"success_message"
] = f"Discount of {realm.string_id} changed to {discount}% from {current_discount}%."
elif new_subdomain is not None:
old_subdomain = realm.string_id
try:
check_subdomain_available(new_subdomain)
except ValidationError as error:
context["error_message"] = error.message
else:
do_change_realm_subdomain(realm, new_subdomain, acting_user=acting_user)
request.session[
"success_message"
] = f"Subdomain changed from {old_subdomain} to {new_subdomain}"
return HttpResponseRedirect(
reverse("support") + "?" + urlencode({"q": new_subdomain})
)
elif status is not None:
if status == "active":
do_send_realm_reactivation_email(realm, acting_user=acting_user)
context[
"success_message"
] = f"Realm reactivation email sent to admins of {realm.string_id}."
elif status == "deactivated":
do_deactivate_realm(realm, acting_user=acting_user)
context["success_message"] = f"{realm.string_id} deactivated."
elif billing_method is not None:
if billing_method == "send_invoice":
update_billing_method_of_current_plan(
realm, charge_automatically=False, acting_user=acting_user
)
context[
"success_message"
] = f"Billing method of {realm.string_id} updated to pay by invoice."
elif billing_method == "charge_automatically":
update_billing_method_of_current_plan(
realm, charge_automatically=True, acting_user=acting_user
)
context[
"success_message"
] = f"Billing method of {realm.string_id} updated to charge automatically."
elif sponsorship_pending is not None:
if sponsorship_pending:
update_sponsorship_status(realm, True, acting_user=acting_user)
context["success_message"] = f"{realm.string_id} marked as pending sponsorship."
else:
update_sponsorship_status(realm, False, acting_user=acting_user)
context["success_message"] = f"{realm.string_id} is no longer pending sponsorship."
elif approve_sponsorship:
do_approve_sponsorship(realm, acting_user=acting_user)
context["success_message"] = f"Sponsorship approved for {realm.string_id}"
elif modify_plan is not None:
if modify_plan == "downgrade_at_billing_cycle_end":
downgrade_at_the_end_of_billing_cycle(realm)
context[
"success_message"
] = f"{realm.string_id} marked for downgrade at the end of billing cycle"
elif modify_plan == "downgrade_now_without_additional_licenses":
downgrade_now_without_creating_additional_invoices(realm)
context[
"success_message"
] = f"{realm.string_id} downgraded without creating additional invoices"
elif modify_plan == "downgrade_now_void_open_invoices":
downgrade_now_without_creating_additional_invoices(realm)
voided_invoices_count = void_all_open_invoices(realm)
context[
"success_message"
] = f"{realm.string_id} downgraded and voided {voided_invoices_count} open invoices"
elif modify_plan == "upgrade_to_plus":
switch_realm_from_standard_to_plus_plan(realm)
context["success_message"] = f"{realm.string_id} upgraded to Plus"
elif scrub_realm:
do_scrub_realm(realm, acting_user=acting_user)
context["success_message"] = f"{realm.string_id} scrubbed."
if query:
key_words = get_invitee_emails_set(query)
users = set(UserProfile.objects.filter(delivery_email__in=key_words))
realms = set(Realm.objects.filter(string_id__in=key_words))
for key_word in key_words:
try:
URLValidator()(key_word)
parse_result = urllib.parse.urlparse(key_word)
hostname = parse_result.hostname
assert hostname is not None
if parse_result.port:
hostname = f"{hostname}:{parse_result.port}"
subdomain = get_subdomain_from_hostname(hostname)
with suppress(Realm.DoesNotExist):
realms.add(get_realm(subdomain))
except ValidationError:
users.update(UserProfile.objects.filter(full_name__iexact=key_word))
# full_names can have , in them
users.update(UserProfile.objects.filter(full_name__iexact=query))
context["users"] = users
context["realms"] = realms
confirmations: List[Dict[str, Any]] = []
preregistration_user_ids = [
user.id for user in PreregistrationUser.objects.filter(email__in=key_words)
]
confirmations += get_confirmations(
[Confirmation.USER_REGISTRATION, Confirmation.INVITATION],
preregistration_user_ids,
hostname=request.get_host(),
)
preregistration_realm_ids = [
user.id for user in PreregistrationRealm.objects.filter(email__in=key_words)
]
confirmations += get_confirmations(
[Confirmation.REALM_CREATION],
preregistration_realm_ids,
hostname=request.get_host(),
)
multiuse_invite_ids = [
invite.id for invite in MultiuseInvite.objects.filter(realm__in=realms)
]
confirmations += get_confirmations([Confirmation.MULTIUSE_INVITE], multiuse_invite_ids)
realm_reactivation_status_objects = RealmReactivationStatus.objects.filter(realm__in=realms)
confirmations += get_confirmations(
[Confirmation.REALM_REACTIVATION], [obj.id for obj in realm_reactivation_status_objects]
)
context["confirmations"] = confirmations
# We want a union of all realms that might appear in the search result,
# but not necessary as a separate result item.
# Therefore, we do not modify the realms object in the context.
all_realms = realms.union(
[
confirmation["object"].realm
for confirmation in confirmations
# For confirmations, we only display realm details when the type is USER_REGISTRATION
# or INVITATION.
if confirmation["type"] in (Confirmation.USER_REGISTRATION, Confirmation.INVITATION)
]
+ [user.realm for user in users]
)
plan_data: Dict[int, PlanData] = {}
for realm in all_realms:
current_plan = get_current_plan_by_realm(realm)
plan_data[realm.id] = PlanData(
customer=get_customer_by_realm(realm),
current_plan=current_plan,
)
if current_plan is not None:
new_plan, last_ledger_entry = make_end_of_cycle_updates_if_needed(
current_plan, timezone_now()
)
if last_ledger_entry is not None:
if new_plan is not None:
plan_data[realm.id].current_plan = new_plan
else:
plan_data[realm.id].current_plan = current_plan
plan_data[realm.id].licenses = last_ledger_entry.licenses
plan_data[realm.id].licenses_used = get_latest_seat_count(realm)
context["plan_data"] = plan_data
def get_realm_owner_emails_as_string(realm: Realm) -> str:
return ", ".join(
realm.get_human_owner_users()
.order_by("delivery_email")
.values_list("delivery_email", flat=True)
)
def get_realm_admin_emails_as_string(realm: Realm) -> str:
return ", ".join(
realm.get_human_admin_users(include_realm_owners=False)
.order_by("delivery_email")
.values_list("delivery_email", flat=True)
)
context["get_realm_owner_emails_as_string"] = get_realm_owner_emails_as_string
context["get_realm_admin_emails_as_string"] = get_realm_admin_emails_as_string
context["get_discount_for_realm"] = get_discount_for_realm
context["get_org_type_display_name"] = get_org_type_display_name
context["realm_icon_url"] = realm_icon_url
context["Confirmation"] = Confirmation
context["sorted_realm_types"] = sorted(
Realm.ORG_TYPES.values(), key=lambda d: d["display_order"]
)
return render(request, "analytics/support.html", context=context)

View File

@@ -1,106 +0,0 @@
from typing import Any, Dict, List, Tuple
from django.conf import settings
from django.db.models import QuerySet
from django.http import HttpRequest, HttpResponse
from django.shortcuts import render
from analytics.views.activity_common import (
format_date_for_activity_reports,
get_user_activity_summary,
make_table,
)
from zerver.decorator import require_server_admin
from zerver.models import UserActivity, UserProfile, get_user_profile_by_id
if settings.BILLING_ENABLED:
pass
def get_user_activity_records(
user_profile: UserProfile,
) -> QuerySet[UserActivity]:
fields = [
"user_profile__full_name",
"query",
"client__name",
"count",
"last_visit",
]
records = UserActivity.objects.filter(
user_profile=user_profile,
)
records = records.order_by("-last_visit")
records = records.select_related("user_profile", "client").only(*fields)
return records
def raw_user_activity_table(records: QuerySet[UserActivity]) -> str:
cols = [
"query",
"client",
"count",
"last_visit",
]
def row(record: UserActivity) -> List[Any]:
return [
record.query,
record.client.name,
record.count,
format_date_for_activity_reports(record.last_visit),
]
rows = list(map(row, records))
title = "Raw data"
return make_table(title, cols, rows)
def user_activity_summary_table(user_summary: Dict[str, Dict[str, Any]]) -> str:
rows = []
for k, v in user_summary.items():
if k == "name" or k == "user_profile_id":
continue
client = k
count = v["count"]
last_visit = v["last_visit"]
row = [
format_date_for_activity_reports(last_visit),
client,
count,
]
rows.append(row)
rows = sorted(rows, key=lambda r: r[0], reverse=True)
cols = [
"last_visit",
"client",
"count",
]
title = "User activity"
return make_table(title, cols, rows)
@require_server_admin
def get_user_activity(request: HttpRequest, user_profile_id: int) -> HttpResponse:
user_profile = get_user_profile_by_id(user_profile_id)
records = get_user_activity_records(user_profile)
data: List[Tuple[str, str]] = []
user_summary = get_user_activity_summary(records)
content = user_activity_summary_table(user_summary)
data += [("Summary", content)]
content = raw_user_activity_table(records)
data += [("Info", content)]
title = user_profile.delivery_email
return render(
request,
"analytics/activity.html",
context=dict(data=data, title=title),
)

View File

@@ -1,31 +0,0 @@
{generate_api_header(API_ENDPOINT_NAME)}
## Usage examples
{start_tabs}
{generate_code_example(python)|API_ENDPOINT_NAME|example}
{generate_code_example(javascript)|API_ENDPOINT_NAME|example}
{tab|curl}
{generate_code_example(curl)|API_ENDPOINT_NAME|example}
{end_tabs}
## Parameters
{generate_api_arguments_table|zulip.yaml|API_ENDPOINT_NAME}
{generate_parameter_description(API_ENDPOINT_NAME)}
## Response
{generate_return_values_table|zulip.yaml|API_ENDPOINT_NAME}
{generate_response_description(API_ENDPOINT_NAME)}
#### Example response(s)
{generate_code_example|API_ENDPOINT_NAME|fixture}

File diff suppressed because it is too large Load Diff

View File

@@ -1,103 +0,0 @@
# Construct a narrow
A **narrow** is a set of filters for Zulip messages, that can be based
on many different factors (like sender, stream, topic, search
keywords, etc.). Narrows are used in various places in the the Zulip
API (most importantly, in the API for fetching messages).
It is simplest to explain the algorithm for encoding a search as a
narrow using a single example. Consider the following search query
(written as it would be entered in the Zulip web app's search box).
It filters for messages sent to stream `announce`, not sent by
`iago@zulip.com`, and containing the words `cool` and `sunglasses`:
```
stream:announce -sender:iago@zulip.com cool sunglasses
```
This query would be JSON-encoded for use in the Zulip API using JSON
as a list of simple objects, as follows:
```json
[
{
"operator": "stream",
"operand": "announce"
},
{
"operator": "sender",
"operand": "iago@zulip.com",
"negated": true
},
{
"operator": "search",
"operand": "cool sunglasses"
}
]
```
The Zulip help center article on [searching for messages](/help/search-for-messages)
documents the majority of the search/narrow options supported by the
Zulip API.
Note that many narrows, including all that lack a `stream` or `streams`
operator, search the current user's personal message history. See
[searching shared history](/help/search-for-messages#searching-shared-history)
for details.
**Changes**: In Zulip 7.0 (feature level 177), support was added
for three filters related to direct messages: `is:dm`, `dm` and
`dm-including`. The `dm` operator replaced and deprecated the
`pm-with` operator. The `is:dm` filter replaced and deprecated
the `is:private` filter. The `dm-including` operator replaced and
deprecated the `group-pm-with` operator.
The `dm-including` and `group-pm-with` operators return slightly
different results. For example, `dm-including:1234` returns all
direct messages (1-on-1 and group) that include the current user
and the user with the unique user ID of `1234`. On the other hand,
`group-pm-with:1234` returned only group direct messages that included
the current user and the user with the unique user ID of `1234`.
Both `dm` and `is:dm` are aliases of `pm-with` and `is:private`
respectively, and return the same exact results that the deprecated
filters did.
## Narrows that use IDs
The `near` and `id` operators, documented in the help center, use message
IDs for their operands.
* `near:12345`: Search messages around the message with ID `12345`.
* `id:12345`: Search for only message with ID `12345`.
There are a few additional narrow/search options (new in Zulip 2.1)
that use either stream IDs or user IDs that are not documented in the
help center because they are primarily useful to API clients:
* `stream:1234`: Search messages sent to the stream with ID `1234`.
* `sender:1234`: Search messages sent by user ID `1234`.
* `dm:1234`: Search the direct message conversation between
you and user ID `1234`.
* `dm:1234,5678`: Search the direct message conversation between
you, user ID `1234`, and user ID `5678`.
* `dm-including:1234`: Search all direct messages (1-on-1 and group)
that include you and user ID `1234`.
The operands for these search options must be encoded either as an
integer ID or a JSON list of integer IDs. For example, to query
messages sent by a user 1234 to a PM thread with yourself, user 1234,
and user 5678, the correct JSON-encoded query is:
```json
[
{
"operator": "dm",
"operand": [1234, 5678]
},
{
"operator": "sender",
"operand": 1234
}
]
```

View File

@@ -1,67 +0,0 @@
{generate_api_header(/scheduled_messages:post)}
## Usage examples
{start_tabs}
{generate_code_example(python)|/scheduled_messages:post|example}
{generate_code_example(javascript)|/scheduled_messages:post|example}
{tab|curl}
``` curl
# Create a scheduled stream message
curl -X POST {{ api_url }}/v1/scheduled_messages \
-u BOT_EMAIL_ADDRESS:BOT_API_KEY \
--data-urlencode type=stream \
--data-urlencode to=9 \
--data-urlencode topic=Hello \
--data-urlencode 'content=Thank you for' \
--data-urlencode scheduled_delivery_timestamp=3165826990
# Update a scheduled stream message
curl -X POST {{ api_url }}/v1/scheduled_messages \
-u BOT_EMAIL_ADDRESS:BOT_API_KEY \
--data-urlencode type=stream \
--data-urlencode to=9 \
--data-urlencode 'topic=Welcome aboard' \
--data-urlencode 'content=Thank you for the help!' \
--data-urlencode scheduled_delivery_timestamp=3165856990 \
--data-urlencode scheduled_message_id=1
# Create a scheduled direct message
curl -X POST {{ api_url }}/v1/messages \
-u BOT_EMAIL_ADDRESS:BOT_API_KEY \
--data-urlencode type=direct \
--data-urlencode 'to=[9, 10]' \
--data-urlencode 'content=Can we meet tomorrow?' \
--data-urlencode scheduled_delivery_timestamp=3165826990
# Update a scheduled direct message
curl -X POST {{ api_url }}/v1/messages \
-u BOT_EMAIL_ADDRESS:BOT_API_KEY \
--data-urlencode type=direct \
--data-urlencode 'to=[9, 10, 11]' \
--data-urlencode 'content=Can we meet tomorrow?' \
--data-urlencode scheduled_delivery_timestamp=3165856990 \
--data-urlencode scheduled_message_id=2
```
{end_tabs}
## Parameters
{generate_api_arguments_table|zulip.yaml|/scheduled_messages:post}
{generate_parameter_description(/scheduled_messages:post)}
## Response
{generate_return_values_table|zulip.yaml|/scheduled_messages:post}
{generate_response_description(/scheduled_messages:post)}
#### Example response(s)
{generate_code_example|/scheduled_messages:post|fixture}

View File

@@ -1,80 +0,0 @@
# HTTP headers
This page documents the HTTP headers used by the Zulip API.
Most important is that API clients authenticate to the server using
HTTP Basic authentication. If you're using the official [Python or
JavaScript bindings](/api/installation-instructions), this is taken
care of when you configure said bindings.
Otherwise, see the `curl` example on each endpoint's documentation
page, which details the request format.
Documented below are additional HTTP headers and header conventions
generally used by Zulip:
## The `User-Agent` header
Clients are not required to pass a `User-Agent` HTTP header, but we
highly recommend doing so when writing an integration. It's easy to do
and it can help save time when debugging issues related to an API
client.
If provided, the Zulip server will parse the `User-Agent` HTTP header
in order to identify specific clients and integrations. This
information is used by the server for logging, [usage
statistics](/help/analytics), and on rare occasions, for
backwards-compatibility logic to preserve support for older versions
of official clients.
Official Zulip clients and integrations use a `User-Agent` that starts
with something like `ZulipMobile/20.0.103 `, encoding the name of the
application and it's version.
Zulip's official API bindings have reasonable defaults for
`User-Agent`. For example, the official Zulip Python bindings have a
default `User-Agent` starting with `ZulipPython/{version}`, where
`version` is the version of the library.
You can give your bot/integration its own name by passing the `client`
parameter when initializing the Python bindings. For example, the
official Zulip Nagios integration is initialized like this:
``` python
client = zulip.Client(
config_file=opts.config, client=f"ZulipNagios/{VERSION}"
)
```
If you are working on an integration that you plan to share outside
your organization, you can get help picking a good name in
`#integrations` in the [Zulip development
community](https://zulip.com/development-community).
## Rate-limiting response headers
To help clients avoid exceeding rate limits, Zulip sets the following
HTTP headers in all API responses:
* `X-RateLimit-Remaining`: The number of additional requests of this
type that the client can send before exceeding its limit.
* `X-RateLimit-Limit`: The limit that would be applicable to a client
that had not made any recent requests of this type. This is useful
for designing a client's burst behavior so as to avoid ever reaching
a rate limit.
* `X-RateLimit-Reset`: The time at which the client will no longer
have any rate limits applied to it (and thus could do a burst of
`X-RateLimit-Limit` requests).
[Zulip's rate limiting rules are configurable][rate-limiting-rules],
and can vary by server and over time. The default configuration
currently limits:
* Every user is limited to 200 total API requests per minute.
* Separate, much lower limits for authentication/login attempts.
When the Zulip server has configured multiple rate limits that apply
to a given request, the values returned will be for the strictest
limit.
[rate-limiting-rules]: https://zulip.readthedocs.io/en/latest/production/security-model.html#rate-limiting

View File

@@ -1,113 +0,0 @@
#### Messages
* [Send a message](/api/send-message)
* [Upload a file](/api/upload-file)
* [Edit a message](/api/update-message)
* [Delete a message](/api/delete-message)
* [Get messages](/api/get-messages)
* [Construct a narrow](/api/construct-narrow)
* [Add an emoji reaction](/api/add-reaction)
* [Remove an emoji reaction](/api/remove-reaction)
* [Render a message](/api/render-message)
* [Fetch a single message](/api/get-message)
* [Check if messages match narrow](/api/check-messages-match-narrow)
* [Get a message's edit history](/api/get-message-history)
* [Update personal message flags](/api/update-message-flags)
* [Update personal message flags for narrow](/api/update-message-flags-for-narrow)
* [Mark messages as read in bulk](/api/mark-all-as-read)
* [Get a message's read receipts](/api/get-read-receipts)
#### Scheduled messages
* [Get scheduled messages](/api/get-scheduled-messages)
* [Create or edit a scheduled message](/api/create-or-update-scheduled-message)
* [Delete a scheduled message](/api/delete-scheduled-message)
#### Drafts
* [Get drafts](/api/get-drafts)
* [Create drafts](/api/create-drafts)
* [Edit a draft](/api/edit-draft)
* [Delete a draft](/api/delete-draft)
#### Streams
* [Get subscribed streams](/api/get-subscriptions)
* [Subscribe to a stream](/api/subscribe)
* [Unsubscribe from a stream](/api/unsubscribe)
* [Get subscription status](/api/get-subscription-status)
* [Get all subscribers](/api/get-subscribers)
* [Update subscription settings](/api/update-subscription-settings)
* [Get all streams](/api/get-streams)
* [Get a stream by ID](/api/get-stream-by-id)
* [Get stream ID](/api/get-stream-id)
* [Create a stream](/api/create-stream)
* [Update a stream](/api/update-stream)
* [Archive a stream](/api/archive-stream)
* [Get topics in a stream](/api/get-stream-topics)
* [Topic muting](/api/mute-topic)
* [Update personal preferences for a topic](/api/update-user-topic)
* [Delete a topic](/api/delete-topic)
* [Add a default stream](/api/add-default-stream)
* [Remove a default stream](/api/remove-default-stream)
#### Users
* [Get all users](/api/get-users)
* [Get own user](/api/get-own-user)
* [Get a user](/api/get-user)
* [Get a user by email](/api/get-user-by-email)
* [Update a user](/api/update-user)
* [Update your status](/api/update-status)
* [Create a user](/api/create-user)
* [Deactivate a user](/api/deactivate-user)
* [Reactivate a user](/api/reactivate-user)
* [Deactivate own user](/api/deactivate-own-user)
* [Set "typing" status](/api/set-typing-status)
* [Get user presence](/api/get-user-presence)
* [Get presence of all users](/api/get-presence)
* [Get attachments](/api/get-attachments)
* [Delete an attachment](/api/remove-attachment)
* [Update settings](/api/update-settings)
* [Get user groups](/api/get-user-groups)
* [Create a user group](/api/create-user-group)
* [Update a user group](/api/update-user-group)
* [Delete a user group](/api/remove-user-group)
* [Update user group members](/api/update-user-group-members)
* [Update user group subgroups](/api/update-user-group-subgroups)
* [Get user group membership status](/api/get-is-user-group-member)
* [Get user group members](/api/get-user-group-members)
* [Get subgroups of user group](/api/get-user-group-subgroups)
* [Mute a user](/api/mute-user)
* [Unmute a user](/api/unmute-user)
* [Get all alert words](/api/get-alert-words)
* [Add alert words](/api/add-alert-words)
* [Remove alert words](/api/remove-alert-words)
#### Server & organizations
* [Get server settings](/api/get-server-settings)
* [Get linkifiers](/api/get-linkifiers)
* [Add a linkifier](/api/add-linkifier)
* [Update a linkifier](/api/update-linkifier)
* [Remove a linkifier](/api/remove-linkifier)
* [Add a code playground](/api/add-code-playground)
* [Remove a code playground](/api/remove-code-playground)
* [Get all custom emoji](/api/get-custom-emoji)
* [Upload custom emoji](/api/upload-custom-emoji)
* [Get all custom profile fields](/api/get-custom-profile-fields)
* [Reorder custom profile fields](/api/reorder-custom-profile-fields)
* [Create a custom profile field](/api/create-custom-profile-field)
* [Update realm-level defaults of user settings](/api/update-realm-user-settings-defaults)
#### Real-time events
* [Real time events API](/api/real-time-events)
* [Register an event queue](/api/register-queue)
* [Get events from an event queue](/api/get-events)
* [Delete an event queue](/api/delete-queue)
#### Specialty endpoints
* [Fetch an API key (production)](/api/fetch-api-key)
* [Fetch an API key (development only)](/api/dev-fetch-api-key)

View File

@@ -1,26 +0,0 @@
# The Zulip API
Zulip's APIs allow you to integrate other services with Zulip. This
guide should help you find the API you need:
* First, check if the tool you'd like to integrate with Zulip
[already has a native integration](/integrations/).
* Next, check if [Zapier](https://zapier.com/apps) or
[IFTTT](https://ifttt.com/search) has an integration.
[Zulip's Zapier integration](/integrations/doc/zapier) and
[Zulip's IFTTT integration](/integrations/doc/ifttt) often allow
integrating a new service with Zulip without writing any code.
* If you'd like to send content into Zulip, you can
[write a native incoming webhook integration](/api/incoming-webhooks-overview)
or use [Zulip's API for sending messages](/api/send-message).
* If you're building an interactive bot that reacts to activity inside
Zulip, you'll want to look at Zulip's
[Python framework for interactive bots](/api/running-bots) or
[Zulip's real-time events API](/api/get-events).
And if you still need to build your own integration with Zulip, check out
the full [REST API](/api/rest), generally starting with
[installing the API client bindings](/api/installation-instructions).
In case you already know how you want to build your integration and you're
just looking for an API key, we've got you covered [here](/api/api-keys).

View File

@@ -1,89 +0,0 @@
{generate_api_header(/mark_all_as_read:post)}
## Usage examples
{start_tabs}
{generate_code_example(python)|/mark_all_as_read:post|example}
{generate_code_example(javascript)|/mark_all_as_read:post|example}
{tab|curl}
{generate_code_example(curl)|/mark_all_as_read:post|example}
{end_tabs}
## Parameters
{generate_api_arguments_table|zulip.yaml|/mark_all_as_read:post}
{generate_parameter_description(/mark_all_as_read:post)}
## Response
{generate_response_description(/mark_all_as_read:post)}
#### Example response(s)
{generate_code_example|/mark_all_as_read:post|fixture}
{generate_api_header(/mark_stream_as_read:post)}
## Usage examples
{start_tabs}
{generate_code_example(python)|/mark_stream_as_read:post|example}
{generate_code_example(javascript)|/mark_all_as_read:post|example}
{tab|curl}
{generate_code_example(curl)|/mark_stream_as_read:post|example}
{end_tabs}
## Parameters
{generate_api_arguments_table|zulip.yaml|/mark_stream_as_read:post}
{generate_parameter_description(/mark_all_as_read:post)}
## Response
{generate_response_description(/mark_all_as_read:post)}
#### Example response(s)
{generate_code_example|/mark_stream_as_read:post|fixture}
{generate_api_header(/mark_topic_as_read:post)}
## Usage examples
{start_tabs}
{generate_code_example(python)|/mark_topic_as_read:post|example}
{generate_code_example(javascript)|/mark_all_as_read:post|example}
{tab|curl}
{generate_code_example(curl)|/mark_topic_as_read:post|example}
{end_tabs}
## Parameters
{generate_api_arguments_table|zulip.yaml|/mark_topic_as_read:post}
{generate_parameter_description(/mark_all_as_read:post)}
## Response
{generate_response_description(/mark_all_as_read:post)}
#### Example response(s)
{generate_code_example|/mark_topic_as_read:post|fixture}

View File

@@ -1,184 +0,0 @@
# Outgoing webhooks
Outgoing webhooks allow you to build or set up Zulip integrations
which are notified when certain types of messages are sent in
Zulip. When one of those events is triggered, we'll send a HTTP POST
payload to the webhook's configured URL. Webhooks can be used to
power a wide range of Zulip integrations. For example, the
[Zulip Botserver][zulip-botserver] is built on top of this API.
Zulip supports outgoing webhooks both in a clean native Zulip format,
as well as a format that's compatible with
[Slack's outgoing webhook API][slack-outgoing-webhook], which can help
with porting an existing Slack integration to work with Zulip.
[zulip-botserver]: /api/deploying-bots#zulip-botserver
[slack-outgoing-webhook]: https://api.slack.com/custom-integrations/outgoing-webhooks
To register an outgoing webhook:
* Log in to the Zulip server.
* Navigate to *Personal settings (<i class="fa fa-cog"></i>)* -> *Bots* ->
*Add a new bot*. Select *Outgoing webhook* for bot type, the URL
you'd like Zulip to post to as the **Endpoint URL**, the format you
want, and click on *Create bot*. to submit the form/
* Your new bot user will appear in the *Active bots* panel, which you
can use to edit the bot's settings.
## Triggering
There are currently two ways to trigger an outgoing webhook:
1. **@-mention** the bot user in a stream. If the bot replies, its
reply will be sent to that stream and topic.
2. **Send a private message** with the bot as one of the recipients.
If the bot replies, its reply will be sent to that thread.
## Timeouts
The remote server must respond to a `POST` request in a timely manner.
The default timeout for outgoing webhooks is 10 seconds, though this
can be configured by the administrator of the Zulip server by setting
`OUTGOING_WEBHOOKS_TIMEOUT_SECONDS` in the [server's
settings][settings].
[settings]: https://zulip.readthedocs.io/en/latest/subsystems/settings.html#server-settings
## Outgoing webhook format
{generate_code_example|/zulip-outgoing-webhook:post|fixture}
### Fields documentation
{generate_return_values_table|zulip.yaml|/zulip-outgoing-webhook:post}
## Replying with a message
Many bots implemented using this outgoing webhook API will want to
send a reply message into Zulip. Zulip's outgoing webhook API
provides a convenient way to do that by simply returning an
appropriate HTTP response to the Zulip server.
A correctly implemented bot will return a JSON object containing one
of two possible formats, described below.
### Example response payloads
If the bot code wants to opt out of responding, it can explicitly
encode a JSON dictionary that contains `response_not_required` set
to `True`, so that no response message is sent to the user. (This
is helpful to distinguish deliberate non-responses from bugs.)
Here's an example of the JSON your server should respond with if
you would not like to send a response message:
```json
{
"response_not_required": true
}
```
Here's an example of the JSON your server should respond with if
you would like to send a response message:
```json
{
"content": "Hey, we just received **something** from Zulip!"
}
```
The `content` field should contain Zulip-format Markdown.
Note that an outgoing webhook bot can use the [Zulip REST
API](/api/rest) with its API key in case your bot needs to do
something else, like add an emoji reaction or upload a file.
## Slack-format webhook format
This interface translates Zulip's outgoing webhook's request into the
format that Slack's outgoing webhook interface sends. As a result,
one should be able to use this to interact with third-party
integrations designed to work with Slack's outgoing webhook interface.
Here's how we fill in the fields that a Slack-format webhook expects:
<table class="table">
<thead>
<tr>
<th>Name</th>
<th>Description</th>
</tr>
</thead>
<tbody>
<tr>
<td><code>token</code></td>
<td>A string of alphanumeric characters you can use to
authenticate the webhook request (each bot user uses a fixed token)</td>
</tr>
<tr>
<td><code>team_id</code></td>
<td>ID of the Zulip organization prefixed by "T".</td>
</tr>
<tr>
<td><code>team_domain</code></td>
<td>Hostname of the Zulip organization</td>
</tr>
<tr>
<td><code>channel_id</code></td>
<td>Stream ID prefixed by "C"</td>
</tr>
<tr>
<td><code>channel_name</code></td>
<td>Stream name</td>
</tr>
<tr>
<td><code>thread_ts</code></td>
<td>Timestamp for when message was sent</td>
</tr>
<tr>
<td><code>timestamp</code></td>
<td>Timestamp for when message was sent</td>
</tr>
<tr>
<td><code>user_id</code></td>
<td>ID of the user who sent the message prefixed by "U"</td>
</tr>
<tr>
<td><code>user_name</code></td>
<td>Full name of sender</td>
</tr>
<tr>
<td><code>text</code></td>
<td>The content of the message (in Markdown)</td>
</tr>
<tr>
<td><code>trigger_word</code></td>
<td>Trigger method</td>
</tr>
<tr>
<td><code>service_id</code></td>
<td>ID of the bot user</td>
</tr>
</tbody>
</table>
The above data is posted as list of tuples (not JSON), here's an example:
```
[('token', 'v9fpCdldZIej2bco3uoUvGp06PowKFOf'),
('team_id', 'T1512'),
('team_domain', 'zulip.example.com'),
('channel_id', 'C123'),
('channel_name', 'integrations'),
('thread_ts', 1532078950),
('timestamp', 1532078950),
('user_id', 'U21'),
('user_name', 'Full Name'),
('text', '@**test**'),
('trigger_word', 'mention'),
('service_id', 27)]
```
* For successful request, if data is returned, it returns that data,
else it returns a blank response.
* For failed request, it returns the reason of failure, as returned by
the server, or the exception message.

View File

@@ -1,34 +0,0 @@
# Error handling
Zulip's API will always return a JSON format response.
The HTTP status code indicates whether the request was successful
(200 = success, 40x = user error, 50x = server error). Every response
will contain at least two keys: `msg` (a human-readable error message)
and `result`, which will be either `error` or `success` (this is
redundant with the HTTP status code, but is convenient when printing
responses while debugging).
For some common errors, Zulip provides a `code` attribute. Where
present, clients should check `code`, rather than `msg`, when looking
for specific error conditions, since the `msg` strings are
internationalized (e.g. the server will send the error message
translated into French if the user has a French locale).
Each endpoint documents its own unique errors; documented below are
errors common to many endpoints:
{generate_code_example|/rest-error-handling:post|fixture}
## Ignored Parameters
In JSON success responses, all Zulip REST API endpoints may return
an array of parameters sent in the request that are not supported
by that specific endpoint.
While this can be expected, e.g. when sending both current and legacy
names for a parameter to a Zulip server of unknown version, this often
indicates either a bug in the client implementation or an attempt to
configure a new feature while connected to an older Zulip server that
does not support said feature.
{generate_code_example|/settings:patch|fixture}

View File

@@ -1,120 +0,0 @@
# Roles and permissions
Zulip offers several levels of permissions based on a
[user's role](/help/roles-and-permissions) in a Zulip organization.
Here are some important details to note when working with these
roles and permissions in Zulip's API:
## A user's role
A user's account data include a `role` property, which contains the
user's role in the Zulip organization. These roles are encoded as:
* Organization owner: 100
* Organization administrator: 200
* Organization moderator: 300
* Member: 400
* Guest: 600
User account data also include these boolean properties that duplicate
the related roles above:
* `is_owner` specifying whether the user is an organization owner.
* `is_admin` specifying whether the user is an organization administrator.
* `is_guest` specifying whether the user is a guest user.
These are intended as conveniences for simple clients, and clients
should prefer using the `role` field, since only that one is updated
by the [events API](/api/get-events).
Note that [`POST /register`](/api/register-queue) also returns an
`is_moderator` boolean property specifying whether the current user is
an organization moderator.
Additionally, user account data include an `is_billing_admin` property
specifying whether the user is a billing administrator for the Zulip
organization, which is not related to one of the roles listed above,
but rather allows for specific permissions related to billing
administration in [paid Zulip Cloud plans](https://zulip.com/plans/).
### User account data in the API
Endpoints that return the user account data / properties mentioned
above are:
* [`GET /users`](/api/get-users)
* [`GET /users/{user_id}`](/api/get-user)
* [`GET /users/{email}`](/api/get-user-by-email)
* [`GET /users/me`](/api/get-own-user)
* [`GET /events`](/api/get-events)
* [`POST /register`](/api/register-queue)
Note that the [`POST /register` endpoint](/api/register-queue) returns
the above boolean properties to describe the role of the current user,
when `realm_user` is present in `fetch_event_types`.
Additionally, the specific events returned by the
[`GET /events` endpoint](/api/get-events) containing data related
to user accounts and roles are the [`realm_user` add
event](/api/get-events#realm_user-add), and the
[`realm_user` update event](/api/get-events#realm_user-update).
## Permission levels
Many areas of Zulip are customizable by the roles
above, such as (but not limited to) [restricting message editing and
deletion](/help/restrict-message-editing-and-deletion) and
[streams permissions](/help/stream-permissions). The potential
permission levels are:
* Everyone / Any user including Guests (least restrictive)
* Members
* Full members
* Moderators
* Administrators
* Owners
* Nobody (most restrictive)
These permission levels and policies in the API are designed to be
cutoffs in that users with the specified role and above have the
specified ability or access. For example, a permission level documented
as 'moderators only' includes organization moderators, administrators,
and owners.
Note that specific settings and policies in the Zulip API that use these
permission levels will likely support a subset of those listed above.
## Determining if a user is a full member
When a Zulip organization has set up a [waiting period before new members
turn into full members](/help/restrict-permissions-of-new-members),
clients will need to determine if a user's account has aged past the
organization's waiting period threshold.
The `realm_waiting_period_threshold`, which is the number of days until
a user's account is treated as a full member, is returned by the
[`POST /register` endpoint](/api/register-queue) when `realm` is present
in `fetch_event_types`.
Clients can compare the `realm_waiting_period_threshold` to a user
accounts's `date_joined` property, which is the time the user account
was created, to determine if a user has the permissions of a full
member or a new member.

View File

@@ -1,77 +0,0 @@
{generate_api_header(/messages:post)}
## Usage examples
{start_tabs}
{generate_code_example(python)|/messages:post|example}
{generate_code_example(javascript)|/messages:post|example}
{tab|curl}
``` curl
# For stream messages
curl -X POST {{ api_url }}/v1/messages \
-u BOT_EMAIL_ADDRESS:BOT_API_KEY \
--data-urlencode type=stream \
--data-urlencode 'to="Denmark"' \
--data-urlencode topic=Castle \
--data-urlencode 'content=I come not, friends, to steal away your hearts.'
# For direct messages
curl -X POST {{ api_url }}/v1/messages \
-u BOT_EMAIL_ADDRESS:BOT_API_KEY \
--data-urlencode type=direct \
--data-urlencode 'to=[9]' \
--data-urlencode 'content=With mirth and laughter let old wrinkles come.'
```
{tab|zulip-send}
You can use `zulip-send`
(available after you `pip install zulip`) to easily send Zulips from
the command-line, providing the message content via STDIN.
```bash
# For stream messages
zulip-send --stream Denmark --subject Castle \
--user othello-bot@example.com --api-key a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5
# For direct messages
zulip-send hamlet@example.com \
--user othello-bot@example.com --api-key a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5
```
#### Passing in the message on the command-line
If you'd like, you can also provide the message on the command-line with the
`-m` or `--message` flag, as follows:
```bash
zulip-send --stream Denmark --subject Castle \
--message 'I come not, friends, to steal away your hearts.' \
--user othello-bot@example.com --api-key a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5
```
You can omit the `user` and `api-key` parameters if you have a `~/.zuliprc`
file.
{end_tabs}
## Parameters
{generate_api_arguments_table|zulip.yaml|/messages:post}
{generate_parameter_description(/messages:post)}
## Response
{generate_return_values_table|zulip.yaml|/messages:post}
{generate_response_description(/messages:post)}
#### Example response(s)
{generate_code_example|/messages:post|fixture}

View File

@@ -1,27 +0,0 @@
## Integrations
* [Overview](/api/integrations-overview)
* [Incoming webhook integrations](/api/incoming-webhooks-overview)
* [Hello world walkthrough](/api/incoming-webhooks-walkthrough)
* [Non-webhook integrations](/api/non-webhook-integrations)
## Interactive bots (beta)
* [Running bots](/api/running-bots)
* [Deploying bots](/api/deploying-bots)
* [Writing bots](/api/writing-bots)
* [Outgoing webhooks](/api/outgoing-webhooks)
## REST API
* [Overview](/api/rest)
* [Installation instructions](/api/installation-instructions)
* [API keys](/api/api-keys)
* [Configuring the Python bindings](/api/configuring-python-bindings)
* [HTTP headers](/api/http-headers)
* [Error handling](/api/rest-error-handling)
* [Roles and permissions](/api/roles-and-permissions)
* [Client libraries](/api/client-libraries)
* [API changelog](/api/changelog)
{!rest-endpoints.md!}

18
babel.config.js Normal file
View File

@@ -0,0 +1,18 @@
module.exports = {
presets: [
[
"@babel/preset-env",
{
corejs: 3,
loose: true, // Loose mode for…of loops are 5× faster in Firefox
useBuiltIns: "usage",
},
],
"@babel/typescript",
],
plugins: [
"@babel/proposal-class-properties",
["@babel/plugin-proposal-unicode-property-regex", {useUnicodeFlag: false}],
],
sourceType: "unambiguous",
};

View File

@@ -19,4 +19,4 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
VERSION = (0, 9, "pre")
VERSION = (0, 9, 'pre')

View File

@@ -3,36 +3,24 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("contenttypes", "0001_initial"),
('contenttypes', '0001_initial'),
]
operations = [
migrations.CreateModel(
name="Confirmation",
name='Confirmation',
fields=[
(
"id",
models.AutoField(
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
),
),
("object_id", models.PositiveIntegerField()),
("date_sent", models.DateTimeField(verbose_name="sent")),
(
"confirmation_key",
models.CharField(max_length=40, verbose_name="activation key"),
),
(
"content_type",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="contenttypes.ContentType"
),
),
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('object_id', models.PositiveIntegerField()),
('date_sent', models.DateTimeField(verbose_name='sent')),
('confirmation_key', models.CharField(max_length=40, verbose_name='activation key')),
('content_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')),
],
options={
"verbose_name": "confirmation email",
"verbose_name_plural": "confirmation emails",
'verbose_name': 'confirmation email',
'verbose_name_plural': 'confirmation emails',
},
bases=(models.Model,),
),

View File

@@ -3,25 +3,18 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("confirmation", "0001_initial"),
('confirmation', '0001_initial'),
]
operations = [
migrations.CreateModel(
name="RealmCreationKey",
name='RealmCreationKey',
fields=[
(
"id",
models.AutoField(
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
),
),
("creation_key", models.CharField(max_length=40, verbose_name="activation key")),
(
"date_created",
models.DateTimeField(default=django.utils.timezone.now, verbose_name="created"),
),
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('creation_key', models.CharField(max_length=40, verbose_name='activation key')),
('date_created', models.DateTimeField(default=django.utils.timezone.now, verbose_name='created')),
],
),
]

View File

@@ -3,17 +3,19 @@ from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("confirmation", "0002_realmcreationkey"),
('confirmation', '0002_realmcreationkey'),
]
operations = [
migrations.CreateModel(
name="EmailChangeConfirmation",
fields=[],
name='EmailChangeConfirmation',
fields=[
],
options={
"proxy": True,
'proxy': True,
},
bases=("confirmation.confirmation",),
bases=('confirmation.confirmation',),
),
]

View File

@@ -3,32 +3,33 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("confirmation", "0003_emailchangeconfirmation"),
('confirmation', '0003_emailchangeconfirmation'),
]
operations = [
migrations.DeleteModel(
name="EmailChangeConfirmation",
name='EmailChangeConfirmation',
),
migrations.AlterModelOptions(
name="confirmation",
name='confirmation',
options={},
),
migrations.AddField(
model_name="confirmation",
name="type",
model_name='confirmation',
name='type',
field=models.PositiveSmallIntegerField(default=1),
preserve_default=False,
),
migrations.AlterField(
model_name="confirmation",
name="confirmation_key",
model_name='confirmation',
name='confirmation_key',
field=models.CharField(max_length=40),
),
migrations.AlterField(
model_name="confirmation",
name="date_sent",
model_name='confirmation',
name='date_sent',
field=models.DateTimeField(),
),
]

View File

@@ -4,17 +4,16 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("zerver", "0124_stream_enable_notifications"),
("confirmation", "0004_remove_confirmationmanager"),
('zerver', '0124_stream_enable_notifications'),
('confirmation', '0004_remove_confirmationmanager'),
]
operations = [
migrations.AddField(
model_name="confirmation",
name="realm",
field=models.ForeignKey(
null=True, on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"
),
model_name='confirmation',
name='realm',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='zerver.Realm'),
),
]

View File

@@ -4,14 +4,15 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("confirmation", "0005_confirmation_realm"),
('confirmation', '0005_confirmation_realm'),
]
operations = [
migrations.AddField(
model_name="realmcreationkey",
name="presume_email_valid",
model_name='realmcreationkey',
name='presume_email_valid',
field=models.BooleanField(default=False),
),
]

View File

@@ -4,33 +4,34 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("confirmation", "0006_realmcreationkey_presume_email_valid"),
('confirmation', '0006_realmcreationkey_presume_email_valid'),
]
operations = [
migrations.AlterField(
model_name="confirmation",
name="confirmation_key",
model_name='confirmation',
name='confirmation_key',
field=models.CharField(db_index=True, max_length=40),
),
migrations.AlterField(
model_name="confirmation",
name="date_sent",
model_name='confirmation',
name='date_sent',
field=models.DateTimeField(db_index=True),
),
migrations.AlterField(
model_name="confirmation",
name="object_id",
model_name='confirmation',
name='object_id',
field=models.PositiveIntegerField(db_index=True),
),
migrations.AlterField(
model_name="realmcreationkey",
name="creation_key",
field=models.CharField(db_index=True, max_length=40, verbose_name="activation key"),
model_name='realmcreationkey',
name='creation_key',
field=models.CharField(db_index=True, max_length=40, verbose_name='activation key'),
),
migrations.AlterUniqueTogether(
name="confirmation",
unique_together={("type", "confirmation_key")},
name='confirmation',
unique_together={('type', 'confirmation_key')},
),
]

Some files were not shown because too many files have changed in this diff Show More