Compare commits

..

9 Commits

Author SHA1 Message Date
Anders Kaseorg
d111128def memcached: Switch from pylibmc to python-binary-memcached.
Signed-off-by: Anders Kaseorg <anders@zulip.com>
2020-07-31 17:34:48 -07:00
Tim Abbott
31f7006309 Release Zulip Server 3.1. 2020-07-30 15:44:18 -07:00
arpit551
d8b966e528 migrations: Upgrade migrations to remove duplicates in all Count tables.
This commit upgrades 0015_clear_duplicate_counts migration to remove
duplicate count in StreamCount, UserCount, InstallationCount as well.

Fixes https://github.com/zulip/docker-zulip/issues/266
2020-07-30 15:18:07 -07:00
Mateusz Mandera
444359ebd3 saml: Use self.logger in get_issuing_idp.
get_issuing_idp is no longer a class method, so that akward logger
fetching can be skipped and self.logger can be accessed.
2020-07-26 15:49:44 -07:00
Mateusz Mandera
c78bdd6330 saml: Fix incorrect settings object being passed in get_issuing_idp.
Fixes #15904.

settings is supposed to be a proper OneLogin_Saml2_Settings object,
rather than an empty dictionary. This bug wasn't easy to spot because
the codepath that causes this to demonstrate runs only if the
SAMLResponse contains encrypted assertions.
2020-07-26 15:49:43 -07:00
Gittenburg
f4e02f0e80 upload: Do not open compose box when editing.
Previously editing a message and uploading a file in
the edit textarea opened the message compose box.

Fixes #15890.
2020-07-23 11:29:51 -07:00
Gittenburg
77234ef40b message_edit: Fix invisible delete spinner.
Introduced in 953d475274.
2020-07-23 10:25:02 -07:00
Tim Abbott
00f9cd672b docs: Fix versions in stretch=>buster documentation. 2020-07-22 16:36:00 -07:00
Emilio López
c33a7dfff4 email_mirror: Fix exception handling unstructured headers.
This commit rewrites the way addresses are collected. If
the header with the address is not an AddressHeader (for instance,
Delivered-To and Envelope-To), we take its string representation.

Fixes: #15864 ("Error in email_mirror - _UnstructuredHeader has no attribute addresses").
2020-07-22 12:11:38 -07:00
3022 changed files with 168252 additions and 253124 deletions

View File

@@ -1,5 +1,6 @@
> 0.15%
> 0.15% in US
> 0.2%
> 0.2% in US
last 2 versions
Firefox ESR
not dead
Chrome 26 # similar to PhantomJS

383
.circleci/config.yml Normal file
View File

@@ -0,0 +1,383 @@
# See https://zulip.readthedocs.io/en/latest/testing/continuous-integration.html for
# high-level documentation on our CircleCI setup.
# See CircleCI upstream's docs on this config format:
# https://circleci.com/docs/2.0/language-python/
#
version: 2.0
aliases:
- &create_cache_directories
run:
name: create cache directories
command: |
dirs=(/srv/zulip-{npm,venv,emoji}-cache)
sudo mkdir -p "${dirs[@]}"
sudo chown -R circleci "${dirs[@]}"
- &restore_cache_package_json
restore_cache:
keys:
- v1-npm-base.{{ .Environment.CIRCLE_JOB }}-{{ checksum "package.json" }}-{{ checksum "yarn.lock" }}
- &restore_cache_requirements
restore_cache:
keys:
- v1-venv-base.{{ .Environment.CIRCLE_JOB }}-{{ checksum "requirements/thumbor-dev.txt" }}-{{ checksum "requirements/dev.txt" }}
- &restore_emoji_cache
restore_cache:
keys:
- v1-venv-base.{{ .Environment.CIRCLE_JOB }}-{{ checksum "tools/setup/emoji/emoji_map.json" }}-{{ checksum "tools/setup/emoji/build_emoji" }}-{{checksum "tools/setup/emoji/emoji_setup_utils.py" }}-{{ checksum "tools/setup/emoji/emoji_names.py" }}-{{ checksum "package.json" }}
- &install_dependencies
run:
name: install dependencies
command: |
sudo apt-get update
# Install moreutils so we can use `ts` and `mispipe` in the following.
sudo apt-get install -y moreutils
# CircleCI sets the following in Git config at clone time:
# url.ssh://git@github.com.insteadOf https://github.com
# This breaks the Git clones in the NVM `install.sh` we run
# in `install-node`.
# TODO: figure out why that breaks, and whether we want it.
# (Is it an optimization?)
rm -f /home/circleci/.gitconfig
# This is the main setup job for the test suite
mispipe "tools/ci/setup-backend --skip-dev-db-build" ts
# Cleaning caches is mostly unnecessary in Circle, because
# most builds don't get to write to the cache.
# mispipe "scripts/lib/clean-unused-caches --verbose --threshold 0 2>&1" ts
- &save_cache_package_json
save_cache:
paths:
- /srv/zulip-npm-cache
key: v1-npm-base.{{ .Environment.CIRCLE_JOB }}-{{ checksum "package.json" }}-{{ checksum "yarn.lock" }}
- &save_cache_requirements
save_cache:
paths:
- /srv/zulip-venv-cache
key: v1-venv-base.{{ .Environment.CIRCLE_JOB }}-{{ checksum "requirements/thumbor-dev.txt" }}-{{ checksum "requirements/dev.txt" }}
- &save_emoji_cache
save_cache:
paths:
- /srv/zulip-emoji-cache
key: v1-venv-base.{{ .Environment.CIRCLE_JOB }}-{{ checksum "tools/setup/emoji/emoji_map.json" }}-{{ checksum "tools/setup/emoji/build_emoji" }}-{{checksum "tools/setup/emoji/emoji_setup_utils.py" }}-{{ checksum "tools/setup/emoji/emoji_names.py" }}-{{ checksum "package.json" }}
- &do_bionic_hack
run:
name: do Bionic hack
command: |
# Temporary hack till `sudo service redis-server start` gets fixes in Bionic. See
# https://chat.zulip.org/#narrow/stream/3-backend/topic/Ubuntu.20bionic.20CircleCI
sudo sed -i '/^bind/s/bind.*/bind 0.0.0.0/' /etc/redis/redis.conf
- &run_backend_tests
run:
name: run backend tests
command: |
. /srv/zulip-py3-venv/bin/activate
mispipe "./tools/ci/backend 2>&1" ts
- &run_frontend_tests
run:
name: run frontend tests
command: |
. /srv/zulip-py3-venv/bin/activate
mispipe "./tools/ci/frontend 2>&1" ts
- &upload_coverage_report
run:
name: upload coverage report
command: |
# codecov requires `.coverage` file to be stored in pwd for
# uploading coverage results.
mv /home/circleci/zulip/var/.coverage /home/circleci/zulip/.coverage
. /srv/zulip-py3-venv/bin/activate
# TODO: Check that the next release of codecov doesn't
# throw find error.
# codecov==2.0.16 introduced a bug which uses "find"
# for locating files which is buggy on some platforms.
# It was fixed via https://github.com/codecov/codecov-python/pull/217
# and should get automatically fixed here once it's released.
# We cannot pin the version here because we need the latest version for uploading files.
# see https://community.codecov.io/t/http-400-while-uploading-to-s3-with-python-codecov-from-travis/1428/7
pip install codecov && codecov \
|| echo "Error in uploading coverage reports to codecov.io."
- &build_production
run:
name: build production
command: |
sudo apt-get update
# Install moreutils so we can use `ts` and `mispipe` in the following.
sudo apt-get install -y moreutils
mispipe "./tools/ci/production-build 2>&1" ts
- &production_extract_tarball
run:
name: production extract tarball
command: |
sudo apt-get update
# Install moreutils so we can use `ts` and `mispipe` in the following.
sudo apt-get install -y moreutils
mispipe "/tmp/production-extract-tarball 2>&1" ts
- &install_production
run:
name: install production
command: |
sudo service rabbitmq-server restart
sudo mispipe "/tmp/production-install 2>&1" ts
- &verify_production
run:
name: verify install
command: |
sudo mispipe "/tmp/production-verify 2>&1" ts
- &upgrade_postgresql
run:
name: upgrade postgresql
command: |
sudo mispipe "/tmp/production-upgrade-pg 2>&1" ts
- &check_xenial_provision_error
run:
name: check tools/provision error message on xenial
command: |
! tools/provision > >(tee provision.out)
grep -Fqx 'CRITICAL:root:Unsupported platform: ubuntu 16.04' provision.out
- &check_xenial_upgrade_error
run:
name: check scripts/lib/upgrade-zulip-stage-2 error message on xenial
command: |
! sudo scripts/lib/upgrade-zulip-stage-2 2> >(tee upgrade.err >&2)
grep -Fq 'upgrade-zulip-stage-2: Unsupported platform: ubuntu 16.04' upgrade.err
- &notify_failure_status
run:
name: On fail
when: on_fail
branches:
only: master
command: |
if [[ "$CIRCLE_REPOSITORY_URL" == "git@github.com:zulip/zulip.git" && "$ZULIP_BOT_KEY" != "" ]]; then
curl -H "Content-Type: application/json" \
-X POST -i 'https://chat.zulip.org/api/v1/external/circleci?api_key='"$ZULIP_BOT_KEY"'&stream=automated%20testing&topic=master%20failing' \
-d '{"payload": { "branch": "'"$CIRCLE_BRANCH"'", "reponame": "'"$CIRCLE_PROJECT_REPONAME"'", "status": "failed", "build_url": "'"$CIRCLE_BUILD_URL"'", "username": "'"$CIRCLE_USERNAME"'"}}'
fi
jobs:
"bionic-backend-frontend":
docker:
# This is built from tools/ci/images/bionic/Dockerfile .
# Bionic ships with Python 3.6.
- image: arpit551/circleci:bionic-python-test
working_directory: ~/zulip
steps:
- checkout
- *create_cache_directories
- *do_bionic_hack
- *restore_cache_package_json
- *restore_cache_requirements
- *restore_emoji_cache
- *install_dependencies
- *save_cache_package_json
- *save_cache_requirements
- *save_emoji_cache
- *run_backend_tests
- run:
name: test locked requirements
command: |
. /srv/zulip-py3-venv/bin/activate
mispipe "./tools/test-locked-requirements 2>&1" ts
- *run_frontend_tests
# We only need to upload coverage reports on whichever platform
# runs the frontend tests.
- *upload_coverage_report
- store_artifacts:
path: ./var/casper/
destination: casper
- store_artifacts:
path: ./var/puppeteer/
destination: puppeteer
- store_artifacts:
path: ../../../tmp/zulip-test-event-log/
destination: test-reports
- store_test_results:
path: ./var/xunit-test-results/casper/
- *notify_failure_status
"focal-backend":
docker:
# This is built from tools/ci/images/focal/Dockerfile.
# Focal ships with Python 3.8.2.
- image: arpit551/circleci:focal-python-test
working_directory: ~/zulip
steps:
- checkout
- *create_cache_directories
- *restore_cache_package_json
- *restore_cache_requirements
- *restore_emoji_cache
- *install_dependencies
- *save_cache_package_json
- *save_cache_requirements
- *save_emoji_cache
- *run_backend_tests
- run:
name: Check development database build
command: mispipe "tools/ci/setup-backend" ts
- *notify_failure_status
"xenial-legacy":
docker:
- image: arpit551/circleci:xenial-python-test
working_directory: ~/zulip
steps:
- checkout
- *check_xenial_provision_error
- *check_xenial_upgrade_error
- *notify_failure_status
"bionic-production-build":
docker:
# This is built from tools/ci/images/bionic/Dockerfile .
# Bionic ships with Python 3.6.
- image: arpit551/circleci:bionic-python-test
working_directory: ~/zulip
steps:
- checkout
- *create_cache_directories
- *do_bionic_hack
- *restore_cache_package_json
- *restore_cache_requirements
- *restore_emoji_cache
- *build_production
- *save_cache_package_json
- *save_cache_requirements
- *save_emoji_cache
# Persist the built tarball to be used in downstream job
# for installation of production server.
# See https://circleci.com/docs/2.0/workflows/#using-workspaces-to-share-data-among-jobs
- persist_to_workspace:
# Must be an absolute path,
# or relative path from working_directory.
# This is a directory on the container which is
# taken to be the root directory of the workspace.
root: /tmp
# Must be relative path from root
paths:
- zulip-server-test.tar.gz
- success-http-headers.template.txt
- production-install
- production-verify
- production-upgrade-pg
- production
- production-extract-tarball
- *notify_failure_status
"bionic-production-install":
docker:
# This is built from tools/ci/images/bionic/Dockerfile .
# Bionic ships with Python 3.6.
- image: arpit551/circleci:bionic-python-test
working_directory: ~/zulip
steps:
# Contains the built tarball from bionic-production-build job
- attach_workspace:
# Must be absolute path or relative path from working_directory
at: /tmp
- *create_cache_directories
- *do_bionic_hack
- *production_extract_tarball
- *restore_cache_package_json
- *install_production
- *verify_production
- *upgrade_postgresql
- *verify_production
- *save_cache_package_json
- *notify_failure_status
"focal-production-install":
docker:
# This is built from tools/ci/images/focal/Dockerfile.
# Focal ships with Python 3.8.2.
- image: arpit551/circleci:focal-python-test
working_directory: ~/zulip
steps:
# Contains the built tarball from bionic-production-build job
- attach_workspace:
# Must be absolute path or relative path from working_directory
at: /tmp
- *create_cache_directories
- run:
name: do memcached hack
command: |
# Temporary hack till memcached upstream is updated in Focal.
# https://bugs.launchpad.net/ubuntu/+source/memcached/+bug/1878721
echo "export SASL_CONF_PATH=/etc/sasl2" | sudo tee - a /etc/default/memcached
- *production_extract_tarball
- *restore_cache_package_json
- *install_production
- *verify_production
- *save_cache_package_json
- *notify_failure_status
workflows:
version: 2
"Ubuntu 16.04 Xenial (Python 3.5, legacy)":
jobs:
- "xenial-legacy"
"Ubuntu 18.04 Bionic (Python 3.6, backend+frontend)":
jobs:
- "bionic-backend-frontend"
"Ubuntu 20.04 Focal (Python 3.8, backend)":
jobs:
- "focal-backend"
"Production":
jobs:
- "bionic-production-build"
- "bionic-production-install":
requires:
- "bionic-production-build"
- "focal-production-install":
requires:
- "bionic-production-build"

View File

@@ -3,22 +3,19 @@ root = true
[*]
end_of_line = lf
charset = utf-8
indent_size = 4
indent_style = space
insert_final_newline = true
trim_trailing_whitespace = true
insert_final_newline = true
binary_next_line = true # for shfmt
switch_case_indent = true # for shfmt
[*.{sh,py,pyi,js,ts,json,xml,css,scss,hbs,html}]
indent_style = space
indent_size = 4
[{*.{js,json,ts},check-openapi}]
max_line_length = 100
[*.{py,pyi}]
[*.py]
max_line_length = 110
[*.{svg,rb,pp,yaml,yml}]
indent_size = 2
[*.{js,ts}]
max_line_length = 100
[package.json]
[*.{svg,rb,pp}]
indent_style = space
indent_size = 2

View File

@@ -6,9 +6,5 @@
/static/generated
/static/third
/static/webpack-bundles
/var/*
!/var/puppeteer
/var/puppeteer/*
!/var/puppeteer/test_credentials.d.ts
/zulip-current-venv
/var
/zulip-py3-venv

View File

@@ -1,44 +1,34 @@
{
"env": {
"es2020": true,
"node": true
"node": true,
"es6": true
},
"extends": [
"eslint:recommended",
"plugin:import/errors",
"plugin:import/warnings",
"plugin:unicorn/recommended",
"prettier"
],
"parser": "@babel/eslint-parser",
"parserOptions": {
"ecmaVersion": 2019,
"warnOnUnsupportedTypeScriptVersion": false,
"sourceType": "unambiguous"
"sourceType": "module"
},
"reportUnusedDisableDirectives": true,
"plugins": [
"eslint-plugin-empty-returns"
],
"rules": {
"array-callback-return": "error",
"arrow-body-style": "error",
"block-scoped-var": "error",
"consistent-return": "error",
"curly": "error",
"dot-notation": "error",
"empty-returns/main": "error",
"eqeqeq": "error",
"guard-for-in": "error",
"import/extensions": "error",
"import/first": "error",
"import/newline-after-import": "error",
"import/no-useless-path-segments": "error",
"import/order": [
"error",
"new-cap": [ "error",
{
"alphabetize": {"order": "asc"},
"newlines-between": "always"
"capIsNew": false
}
],
"import/unambiguous": "error",
"lines-around-directive": "error",
"new-cap": "error",
"no-alert": "error",
"no-array-constructor": "error",
"no-bitwise": "error",
@@ -50,7 +40,6 @@
"no-else-return": "error",
"no-eq-null": "error",
"no-eval": "error",
"no-implicit-coercion": "error",
"no-implied-eval": "error",
"no-inner-declarations": "off",
"no-iterator": "error",
@@ -69,161 +58,292 @@
"no-script-url": "error",
"no-self-compare": "error",
"no-sync": "error",
"no-throw-literal": "error",
"no-undef-init": "error",
"no-unneeded-ternary": ["error", {"defaultAssignment": false}],
"no-unneeded-ternary": [ "error", { "defaultAssignment": false } ],
"no-unused-expressions": "error",
"no-use-before-define": ["error", {"functions": false}],
"no-useless-concat": "error",
"no-unused-vars": [ "error",
{
"vars": "local",
"varsIgnorePattern": "print_elapsed_time|check_duplicate_ids"
}
],
"no-use-before-define": "error",
"no-useless-constructor": "error",
"no-var": "error",
"object-shorthand": "error",
"one-var": ["error", "never"],
"one-var": [ "error", "never" ],
"prefer-arrow-callback": "error",
"prefer-const": [
"error",
"prefer-const": [ "error",
{
"ignoreReadBeforeAssign": true
}
],
"radix": "error",
"sort-imports": ["error", {"ignoreDeclarationSort": true}],
"spaced-comment": ["error", "always", {"markers": ["/"]}],
"strict": "error",
"unicorn/consistent-function-scoping": "off",
"unicorn/explicit-length-check": "off",
"unicorn/filename-case": "off",
"unicorn/no-nested-ternary": "off",
"unicorn/no-null": "off",
"unicorn/no-process-exit": "off",
"unicorn/no-useless-undefined": "off",
"unicorn/number-literal-case": "off",
"unicorn/numeric-separators-style": "off",
"unicorn/prefer-module": "off",
"unicorn/prefer-node-protocol": "off",
"unicorn/prefer-spread": "off",
"unicorn/prefer-ternary": "off",
"unicorn/prevent-abbreviations": "off",
"valid-typeof": ["error", {"requireStringLiterals": true}],
"sort-imports": "error",
"spaced-comment": "off",
"strict": "off",
"valid-typeof": [ "error", { "requireStringLiterals": true } ],
"yoda": "error"
},
"overrides": [
{
"files": ["frontend_tests/puppeteer_lib/**", "frontend_tests/puppeteer_tests/**"],
"files": [
"frontend_tests/**/*.{js,ts}",
"static/js/**/*.{js,ts}"
],
"globals": {
"$": false,
"zulip_test": false
"ClipboardJS": false,
"FetchStatus": false,
"Filter": false,
"Handlebars": false,
"LightboxCanvas": false,
"MessageListData": false,
"MessageListView": false,
"Plotly": false,
"Sortable": false,
"WinChan": false,
"XDate": false,
"_": false,
"activity": false,
"admin": false,
"alert_words": false,
"alert_words_ui": false,
"attachments_ui": false,
"avatar": false,
"billing": false,
"blueslip": false,
"bot_data": false,
"bridge": false,
"buddy_data": false,
"buddy_list": false,
"channel": false,
"click_handlers": false,
"color_data": false,
"colorspace": false,
"common": false,
"components": false,
"compose": false,
"compose_actions": false,
"compose_fade": false,
"compose_pm_pill": false,
"compose_state": false,
"compose_ui": false,
"composebox_typeahead": false,
"condense": false,
"confirm_dialog": false,
"copy_and_paste": false,
"csrf_token": false,
"current_msg_list": true,
"drafts": false,
"dropdown_list_widget": false,
"echo": false,
"emoji": false,
"emoji_picker": false,
"favicon": false,
"feature_flags": false,
"feedback_widget": false,
"fenced_code": false,
"flatpickr": false,
"floating_recipient_bar": false,
"gear_menu": false,
"hash_util": false,
"hashchange": false,
"helpers": false,
"history": false,
"home_msg_list": false,
"hotspots": false,
"i18n": false,
"info_overlay": false,
"input_pill": false,
"invite": false,
"jQuery": false,
"katex": false,
"keydown_util": false,
"lightbox": false,
"list_cursor": false,
"list_render": false,
"list_util": false,
"loading": false,
"localStorage": false,
"local_message": false,
"localstorage": false,
"location": false,
"markdown": false,
"marked": false,
"md5": false,
"message_edit": false,
"message_edit_history": false,
"message_events": false,
"message_fetch": false,
"message_flags": false,
"message_list": false,
"message_live_update": false,
"message_scroll": false,
"message_store": false,
"message_util": false,
"message_viewport": false,
"moment": false,
"muting": false,
"muting_ui": false,
"narrow": false,
"narrow_state": false,
"navigate": false,
"night_mode": false,
"notifications": false,
"overlays": false,
"padded_widget": false,
"page_params": false,
"panels": false,
"people": false,
"pm_conversations": false,
"pm_list": false,
"pm_list_dom": false,
"pointer": false,
"popovers": false,
"presence": false,
"reactions": false,
"realm_icon": false,
"realm_logo": false,
"realm_night_logo": false,
"recent_senders": false,
"recent_topics": false,
"reload": false,
"reload_state": false,
"reminder": false,
"resize": false,
"rows": false,
"rtl": false,
"run_test": false,
"schema": false,
"scroll_bar": false,
"scroll_util": false,
"search": false,
"search_pill": false,
"search_pill_widget": false,
"search_suggestion": false,
"search_util": false,
"sent_messages": false,
"server_events": false,
"server_events_dispatch": false,
"settings": false,
"settings_account": false,
"settings_bots": false,
"settings_display": false,
"settings_emoji": false,
"settings_exports": false,
"settings_linkifiers": false,
"settings_invites": false,
"settings_muting": false,
"settings_notifications": false,
"settings_org": false,
"settings_panel_menu": false,
"settings_profile_fields": false,
"settings_sections": false,
"settings_streams": false,
"settings_toggle": false,
"settings_ui": false,
"settings_user_groups": false,
"settings_users": false,
"spoilers": false,
"starred_messages": false,
"stream_color": false,
"stream_create": false,
"stream_data": false,
"stream_edit": false,
"stream_events": false,
"stream_topic_history": false,
"stream_list": false,
"stream_muting": false,
"stream_popover": false,
"stream_sort": false,
"stream_ui_updates": false,
"StripeCheckout": false,
"submessage": false,
"subs": false,
"tab_bar": false,
"templates": false,
"tictactoe_widget": false,
"timerender": false,
"todo_widget": false,
"top_left_corner": false,
"topic_generator": false,
"topic_list": false,
"topic_zoom": false,
"transmit": false,
"tutorial": false,
"typeahead_helper": false,
"typing": false,
"typing_data": false,
"typing_events": false,
"ui": false,
"ui_init": false,
"ui_report": false,
"ui_util": false,
"unread": false,
"unread_ops": false,
"unread_ui": false,
"upgrade": false,
"upload": false,
"upload_widget": false,
"user_events": false,
"user_groups": false,
"user_pill": false,
"user_search": false,
"user_status": false,
"user_status_ui": false,
"poll_widget": false,
"vdom": false,
"widgetize": false,
"zcommand": false,
"zform": false,
"zxcvbn": false
}
},
{
"files": ["static/js/**"],
"globals": {
"StripeCheckout": false
"files": [
"frontend_tests/casper_tests/*.js",
"frontend_tests/casper_lib/*.js"
],
"rules": {
// Dont require ES features that PhantomJS doesnt support
"no-var": "off",
"prefer-arrow-callback": "off"
}
},
{
"files": ["**/*.ts"],
"extends": ["plugin:@typescript-eslint/recommended", "plugin:import/typescript"],
"extends": [
"plugin:@typescript-eslint/recommended",
"prettier/@typescript-eslint"
],
"parserOptions": {
"project": "tsconfig.json"
},
"rules": {
// Disable base rule to avoid conflict
"no-duplicate-imports": "off",
"empty-returns/main": "off",
"no-unused-vars": "off",
"no-useless-constructor": "off",
"@typescript-eslint/array-type": "error",
"@typescript-eslint/await-thenable": "error",
"@typescript-eslint/consistent-type-assertions": "error",
"@typescript-eslint/consistent-type-imports": "error",
"@typescript-eslint/explicit-function-return-type": [
"error",
{"allowExpressions": true}
],
"@typescript-eslint/explicit-function-return-type": ["error", { "allowExpressions": true }],
"@typescript-eslint/member-ordering": "error",
"@typescript-eslint/no-duplicate-imports": "off",
"@typescript-eslint/no-explicit-any": "off",
"@typescript-eslint/no-extraneous-class": "error",
"@typescript-eslint/no-non-null-assertion": "off",
"@typescript-eslint/no-parameter-properties": "error",
"@typescript-eslint/no-unnecessary-qualifier": "error",
"@typescript-eslint/no-unnecessary-type-assertion": "error",
"@typescript-eslint/no-unused-vars": ["error", {"varsIgnorePattern": "^_"}],
"@typescript-eslint/no-unused-vars": ["error", { "varsIgnorePattern": "^_" } ],
"@typescript-eslint/no-use-before-define": "error",
"@typescript-eslint/no-useless-constructor": "error",
"@typescript-eslint/prefer-includes": "error",
"@typescript-eslint/prefer-regexp-exec": "error",
"@typescript-eslint/prefer-string-starts-ends-with": "error",
"@typescript-eslint/promise-function-async": "error",
"@typescript-eslint/unified-signatures": "error",
"no-undef": "error"
}
},
{
"files": ["**/*.d.ts"],
"rules": {
"import/unambiguous": "off"
}
},
{
"files": ["frontend_tests/**"],
"globals": {
"CSS": false,
"document": false,
"navigator": false,
"window": false
},
"rules": {
"no-sync": "off"
}
},
{
"files": ["tools/debug-require.js"],
"env": {
"browser": true,
"es2020": false
},
"rules": {
// Dont require ES features that PhantomJS doesnt support
// TODO: Toggle these settings now that we don't use PhantomJS
"no-var": "off",
"object-shorthand": "off",
"prefer-arrow-callback": "off"
}
},
{
"files": ["static/**"],
"env": {
"browser": true,
"node": false
},
"rules": {
"no-console": "error"
},
"settings": {
"import/resolver": "webpack"
}
},
{
"files": ["static/shared/**"],
"env": {
"browser": false,
"shared-node-browser": true
},
"rules": {
"import/no-restricted-paths": [
"error",
{
"zones": [
{
"target": "./static/shared",
"from": ".",
"except": ["./node_modules", "./static/shared"]
}
]
}
]
"@typescript-eslint/unified-signatures": "error"
}
}
]

3
.github/FUNDING.yml vendored
View File

@@ -1,3 +0,0 @@
github: zulip
patreon: zulip
open_collective: zulip

View File

@@ -1,10 +1,10 @@
<!-- What's this PR for? (Just a link to an issue is fine.) -->
**Testing plan:** <!-- How have you tested? -->
**Testing Plan:** <!-- How have you tested? -->
**GIFs or screenshots:** <!-- If a UI change. See:
**GIFs or Screenshots:** <!-- If a UI change. See:
https://zulip.readthedocs.io/en/latest/tutorials/screenshot-and-gif-software.html
-->

View File

@@ -1,41 +0,0 @@
name: Cancel previous runs
on: [push, pull_request]
defaults:
run:
shell: bash
jobs:
cancel:
name: Cancel previous runs
runs-on: ubuntu-latest
timeout-minutes: 3
# Don't run this job for zulip/zulip pushes since we
# want to run those jobs.
if: ${{ github.event_name != 'push' || github.event.repository.full_name != 'zulip/zulip' }}
steps:
# We get workflow IDs from GitHub API so we don't have to maintain
# a hard-coded list of IDs which need to be updated when a workflow
# is added or removed. And, workflow IDs are different for other forks
# so this is required.
- name: Get workflow IDs.
id: workflow_ids
env:
# This is in <owner>/<repo> format e.g. zulip/zulip
REPOSITORY: ${{ github.repository }}
run: |
workflow_api_url=https://api.github.com/repos/$REPOSITORY/actions/workflows
curl $workflow_api_url -o workflows.json
script="const {workflows} = require('./workflows'); \
const ids = workflows.map(workflow => workflow.id); \
console.log(ids.join(','));"
ids=$(node -e "$script")
echo "::set-output name=ids::$ids"
- uses: styfle/cancel-workflow-action@0.4.1
with:
workflow_id: ${{ steps.workflow_ids.outputs.ids }}
access_token: ${{ github.token }}

View File

@@ -1,4 +1,4 @@
name: "Code scanning"
name: "Code Scanning"
on: [push, pull_request]
@@ -7,14 +7,14 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check out repository
- name: Checkout repository
uses: actions/checkout@v2
with:
# We must fetch at least the immediate parents so that if this is
# a pull request then we can check out the head.
# a pull request then we can checkout the head.
fetch-depth: 2
# If this run was triggered by a pull request event, then check out
# If this run was triggered by a pull request event, then checkout
# the head of the pull request instead of the merge commit.
- run: git checkout HEAD^2
if: ${{ github.event_name == 'pull_request' }}

View File

@@ -1,24 +0,0 @@
name: Legacy OS
on: [push, pull_request]
jobs:
xenial:
name: Ubuntu 16.04 Xenial (Python 3.5, legacy)
runs-on: ubuntu-16.04
steps:
- uses: actions/checkout@v2
- name: Check tools/provision error message on xenial
run: |
{ { ! tools/provision 2>&1 >&3; } | tee provision.err; } 3>&1 >&2
grep -Fqx 'Error: ubuntu 16.04 is no longer a supported platform for Zulip.' provision.err
- name: Check scripts/lib/upgrade-zulip-stage-2 error message on xenial
run: |
{ { ! sudo scripts/lib/upgrade-zulip-stage-2 2>&1 >&3; } | tee upgrade.err; } 3>&1 >&2
grep -Fq 'upgrade-zulip-stage-2: Unsupported platform: ubuntu 16.04' upgrade.err
- name: Report status
if: failure()
env:
ZULIP_BOT_KEY: ${{ secrets.ZULIP_BOT_KEY }}
run: tools/ci/send-failure-message

View File

@@ -1,208 +0,0 @@
name: Zulip production suite
on:
push:
paths:
- "**/migrations/**"
- puppet/**
- requirements/**
- scripts/**
- static/**
- tools/**
- zproject/**
- yarn.lock
- .github/workflows/production-suite.yml
pull_request:
paths:
- "**/migrations/**"
- puppet/**
- requirements/**
- scripts/**
- static/**
- tools/**
- zproject/**
- yarn.lock
- .github/workflows/production-suite.yml
defaults:
run:
shell: bash
jobs:
production_build:
name: Bionic production build
runs-on: ubuntu-latest
# This docker image was created by a generated Dockerfile at:
# tools/ci/images/bionic/Dockerfile
# Bionic ships with Python 3.6.
container: zulip/ci:bionic
steps:
- name: Add required permissions
run: |
# The checkout actions doesn't clone to ~/zulip or allow
# us to use the path option to clone outside the current
# /__w/zulip/zulip directory. Since this directory is owned
# by root we need to change it's ownership to allow the
# github user to clone the code here.
# Note: /__w/ is a docker volume mounted to $GITHUB_WORKSPACE
# which is /home/runner/work/.
sudo chown -R github .
# This is the GitHub Actions specific cache directory the
# the current github user must be able to access for the
# cache action to work. It is owned by root currently.
sudo chmod -R 0777 /__w/_temp/
- uses: actions/checkout@v2
- name: Create cache directories
run: |
dirs=(/srv/zulip-{npm,venv,emoji}-cache)
sudo mkdir -p "${dirs[@]}"
sudo chown -R github "${dirs[@]}"
- name: Restore node_modules cache
uses: actions/cache@v2
with:
path: /srv/zulip-npm-cache
key: v1-yarn-deps-${{ github.job }}-${{ hashFiles('package.json') }}-${{ hashFiles('yarn.lock') }}
restore-keys: v1-yarn-deps-${{ github.job }}
- name: Restore python cache
uses: actions/cache@v2
with:
path: /srv/zulip-venv-cache
key: v1-venv-${{ github.job }}-${{ hashFiles('requirements/dev.txt') }}
restore-keys: v1-venv-${{ github.job }}
- name: Restore emoji cache
uses: actions/cache@v2
with:
path: /srv/zulip-emoji-cache
key: v1-emoji-${{ github.job }}-${{ hashFiles('tools/setup/emoji/emoji_map.json') }}-${{ hashFiles('tools/setup/emoji/build_emoji') }}-${{ hashFiles('tools/setup/emoji/emoji_setup_utils.py') }}-${{ hashFiles('tools/setup/emoji/emoji_names.py') }}-${{ hashFiles('package.json') }}
restore-keys: v1-emoji-${{ github.job }}
- name: Do Bionic hack
run: |
# Temporary hack till `sudo service redis-server start` gets fixes in Bionic. See
# https://chat.zulip.org/#narrow/stream/3-backend/topic/Ubuntu.20bionic.20CircleCI
sudo sed -i '/^bind/s/bind.*/bind 0.0.0.0/' /etc/redis/redis.conf
- name: Build production tarball
run: ./tools/ci/production-build
- name: Upload production build artifacts for install jobs
uses: actions/upload-artifact@v2
with:
name: production-tarball
path: /tmp/production-build
retention-days: 14
- name: Report status
if: failure()
env:
ZULIP_BOT_KEY: ${{ secrets.ZULIP_BOT_KEY }}
run: tools/ci/send-failure-message
production_install:
strategy:
fail-fast: false
matrix:
include:
# Base images are built using `tools/ci/Dockerfile.template`.
# The comments at the top explain how to build and upload these images.
- docker_image: zulip/ci:bionic
name: Bionic production install
is_bionic: true
os: bionic
- docker_image: zulip/ci:focal
name: Focal production install
is_focal: true
os: focal
- docker_image: zulip/ci:buster
name: Buster production install
is_buster: true
os: buster
- docker_image: zulip/ci:bullseye
name: Bullseye production install
is_bullseye: true
os: bullseye
name: ${{ matrix.name }}
container: ${{ matrix.docker_image }}
runs-on: ubuntu-latest
needs: production_build
steps:
- name: Download built production tarball
uses: actions/download-artifact@v2
with:
name: production-tarball
path: /tmp
- name: Add required permissions and setup
run: |
# This is the GitHub Actions specific cache directory the
# the current github user must be able to access for the
# cache action to work. It is owned by root currently.
sudo chmod -R 0777 /__w/_temp/
# Create the zulip directory that the tools/ci/ scripts needs
mkdir -p /home/github/zulip
# Since actions/download-artifact@v2 loses all the permissions
# of the tarball uploaded by the upload artifact fix those.
chmod +x /tmp/production-extract-tarball
chmod +x /tmp/production-upgrade-pg
chmod +x /tmp/production-install
chmod +x /tmp/production-verify
chmod +x /tmp/send-failure-message
- name: Create cache directories
run: |
dirs=(/srv/zulip-{npm,venv,emoji}-cache)
sudo mkdir -p "${dirs[@]}"
sudo chown -R github "${dirs[@]}"
- name: Restore node_modules cache
uses: actions/cache@v2
with:
path: /srv/zulip-npm-cache
key: v1-yarn-deps-${{ matrix.os }}-${{ hashFiles('/tmp/package.json') }}-${{ hashFiles('/tmp/yarn.lock') }}
restore-keys: v1-yarn-deps-${{ matrix.os }}
- name: Do Bionic hack
if: ${{ matrix.is_bionic }}
run: |
# Temporary hack till `sudo service redis-server start` gets fixes in Bionic. See
# https://chat.zulip.org/#narrow/stream/3-backend/topic/Ubuntu.20bionic.20CircleCI
sudo sed -i '/^bind/s/bind.*/bind 0.0.0.0/' /etc/redis/redis.conf
- name: Production extract tarball
run: /tmp/production-extract-tarball
- name: Install production
run: |
sudo service rabbitmq-server restart
sudo /tmp/production-install
- name: Verify install
run: sudo /tmp/production-verify
- name: Upgrade postgresql
if: ${{ matrix.is_bionic }}
run: sudo /tmp/production-upgrade-pg
- name: Verify install after upgrading postgresql
if: ${{ matrix.is_bionic }}
run: sudo /tmp/production-verify
- name: Report status
if: failure()
env:
ZULIP_BOT_KEY: ${{ secrets.ZULIP_BOT_KEY }}
run: /tmp/send-failure-message

View File

@@ -1,24 +0,0 @@
name: Update one click apps
on:
release:
types: [published]
jobs:
update-digitalocean-oneclick-app:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Update DigitalOcean one click app
env:
DIGITALOCEAN_API_KEY: ${{ secrets.ONE_CLICK_ACTION_DIGITALOCEAN_API_KEY }}
ZULIP_API_KEY: ${{ secrets.ONE_CLICK_ACTION_ZULIP_BOT_API_KEY }}
ZULIP_EMAIL: ${{ secrets.ONE_CLICK_ACTION_ZULIP_BOT_EMAIL }}
ZULIP_SITE: https://chat.zulip.org
ONE_CLICK_ACTION_STREAM: kandra ops
PYTHON_DIGITALOCEAN_REQUEST_TIMEOUT_SEC: 30
RELEASE_VERSION: ${{ github.event.release.tag_name }}
run: |
export PATH="$HOME/.local/bin:$PATH"
git clone https://github.com/zulip/marketplace-partners
pip3 install python-digitalocean zulip fab-classic
echo $PATH
python3 tools/oneclickapps/prepare_digital_ocean_one_click_app_release.py

View File

@@ -1,7 +1,3 @@
# NOTE: Everything test in this file should be in `tools/test-all`. If there's a
# reason not to run it there, it should be there as a comment
# explaining why.
name: Zulip CI
on: [push, pull_request]
@@ -11,15 +7,14 @@ defaults:
shell: bash
jobs:
tests:
focal_bionic:
strategy:
fail-fast: false
matrix:
include:
# This docker image was created by a generated Dockerfile at:
# tools/ci/images/bionic/Dockerfile
# Bionic ships with Python 3.6.
- docker_image: zulip/ci:bionic
- docker_image: mepriyank/actions:bionic
name: Ubuntu 18.04 Bionic (Python 3.6, backend + frontend)
os: bionic
is_bionic: true
@@ -28,29 +23,20 @@ jobs:
# This docker image was created by a generated Dockerfile at:
# tools/ci/images/focal/Dockerfile
# Focal ships with Python 3.8.2.
- docker_image: zulip/ci:focal
- docker_image: mepriyank/actions:focal
name: Ubuntu 20.04 Focal (Python 3.8, backend)
os: focal
is_focal: true
include_frontend_tests: false
# This docker image was created by a generated Dockerfile at:
# tools/ci/images/focal/Dockerfile
# Bullseye ships with Python 3.9.2.
- docker_image: zulip/ci:bullseye
name: Debian 11 Bullseye (Python 3.9, backend)
os: bullseye
is_bullseye: true
include_frontend_tests: false
runs-on: ubuntu-latest
name: ${{ matrix.name }}
container: ${{ matrix.docker_image }}
env:
# GitHub Actions sets HOME to /github/home which causes
# problem later in provison and frontend test that runs
# tools/setup/postgresql-init-dev-db because of the .pgpass
# location. PostgreSQL (psql) expects .pgpass to be at
# tools/setup/postgres-init-dev-db because of the .pgpass
# location. Postgresql (psql) expects .pgpass to be at
# /home/github/.pgpass and setting home to `/home/github/`
# ensures it written there because we write it to ~/.pgpass.
HOME: /home/github/
@@ -91,7 +77,7 @@ jobs:
uses: actions/cache@v2
with:
path: /srv/zulip-venv-cache
key: v1-venv-${{ matrix.os }}-${{ hashFiles('requirements/dev.txt') }}
key: v1-venv-${{ matrix.os }}-${{ hashFiles('requirements/thumbor-dev.txt') }}-${{ hashFiles('requirements/dev.txt') }}
restore-keys: v1-venv-${{ matrix.os }}
- name: Restore emoji cache
@@ -111,111 +97,28 @@ jobs:
- name: Install dependencies
run: |
# This is the main setup job for the test suite
./tools/ci/setup-backend --skip-dev-db-build
mispipe "tools/ci/setup-backend --skip-dev-db-build" ts
# Cleaning caches is mostly unnecessary in GitHub Actions, because
# most builds don't get to write to the cache.
# scripts/lib/clean-unused-caches --verbose --threshold 0
- name: Run tools test
run: |
source tools/ci/activate-venv
./tools/test-tools
- name: Run backend lint
run: |
source tools/ci/activate-venv
echo "Test suite is running under $(python --version)."
./tools/lint --groups=backend --skip=gitlint,mypy # gitlint disabled because flaky
- name: Run frontend lint
if: ${{ matrix.include_frontend_tests }}
run: |
source tools/ci/activate-venv
./tools/lint --groups=frontend --skip=gitlint # gitlint disabled because flaky
# mispipe "scripts/lib/clean-unused-caches --verbose --threshold 0 2>&1" ts
- name: Run backend tests
run: |
source tools/ci/activate-venv
./tools/test-backend --coverage --include-webhooks --no-cov-cleanup --ban-console-output
. /srv/zulip-py3-venv/bin/activate && \
mispipe "./tools/ci/backend 2>&1" ts
- name: Run mypy
run: |
source tools/ci/activate-venv
# We run mypy after the backend tests so we get output from the
# backend tests, which tend to uncover more serious problems, first.
./tools/run-mypy --version
./tools/run-mypy
- name: Run miscellaneous tests
run: |
source tools/ci/activate-venv
# Currently our compiled requirements files will differ for different python versions
# so we will run test-locked-requirements only for Bionic.
# ./tools/test-locked-requirements
# ./tools/test-run-dev # https://github.com/zulip/zulip/pull/14233
#
# This test has been persistently flaky at like 1% frequency, is slow,
# and is for a very specific single feature, so we don't run it by default:
# ./tools/test-queue-worker-reload
./tools/test-migrations
./tools/setup/optimize-svg --check
./tools/setup/generate_integration_bots_avatars.py --check-missing
- name: Run documentation and api tests
run: |
source tools/ci/activate-venv
# In CI, we only test links we control in test-documentation to avoid flakes
./tools/test-documentation --skip-external-links
./tools/test-help-documentation --skip-external-links
./tools/test-api
- name: Run node tests
- name: Run frontend tests
if: ${{ matrix.include_frontend_tests }}
run: |
source tools/ci/activate-venv
# Run the node tests first, since they're fast and deterministic
./tools/test-js-with-node --coverage
- name: Check schemas
if: ${{ matrix.include_frontend_tests }}
run: |
source tools/ci/activate-venv
# Check that various schemas are consistent. (is fast)
./tools/check-schemas
- name: Check capitalization of strings
if: ${{ matrix.include_frontend_tests }}
run: |
source tools/ci/activate-venv
./manage.py makemessages --locale en
PYTHONWARNINGS=ignore ./tools/check-capitalization --no-generate
PYTHONWARNINGS=ignore ./tools/check-frontend-i18n --no-generate
- name: Run puppeteer tests
if: ${{ matrix.include_frontend_tests }}
run: |
source tools/ci/activate-venv
./tools/test-js-with-puppeteer
- name: Check for untracked files
run: |
source tools/ci/activate-venv
# This final check looks for untracked files that may have been
# created by test-backend or provision.
untracked="$(git ls-files --exclude-standard --others)"
if [ -n "$untracked" ]; then
printf >&2 "Error: untracked files:\n%s\n" "$untracked"
exit 1
fi
. /srv/zulip-py3-venv/bin/activate
mispipe "./tools/ci/frontend 2>&1" ts
- name: Test locked requirements
if: ${{ matrix.is_bionic }}
run: |
. /srv/zulip-py3-venv/bin/activate && \
./tools/test-locked-requirements
mispipe "./tools/test-locked-requirements 2>&1" ts
- name: Upload coverage reports
@@ -228,23 +131,41 @@ jobs:
mv ./var/.coverage ./.coverage
. /srv/zulip-py3-venv/bin/activate || true
# TODO: Check that the next release of codecov doesn't
# throw find error.
# codecov==2.0.16 introduced a bug which uses "find"
# for locating files which is buggy on some platforms.
# It was fixed via https://github.com/codecov/codecov-python/pull/217
# and should get automatically fixed here once it's released.
# We cannot pin the version here because we need the latest version for uploading files.
# see https://community.codecov.io/t/http-400-while-uploading-to-s3-with-python-codecov-from-travis/1428/7
pip install codecov && codecov || echo "Error in uploading coverage reports to codecov.io."
- name: Store Puppeteer artifacts
# Upload these on failure, as well
if: ${{ always() && matrix.include_frontend_tests }}
- name: Store puppeteer artifacts
if: ${{ matrix.include_frontend_tests }}
uses: actions/upload-artifact@v2
with:
name: puppeteer
path: ./var/puppeteer
retention-days: 60
# We cannot use upload-artifacts actions to upload the test
# reports from /tmp, that directory exists inside the docker
# image. Move them to ./var so we access it outside docker since
# the current directory is volume mounted outside the docker image.
- name: Move test reports to var
run: mv /tmp/zulip-test-event-log/ ./var/
- name: Store test reports
if: ${{ matrix.is_bionic }}
uses: actions/upload-artifact@v2
with:
name: test-reports
path: ./var/zulip-test-event-log/
- name: Check development database build
if: ${{ matrix.is_focal || matrix.is_bullseye }}
run: ./tools/ci/setup-backend
- name: Report status
if: failure()
env:
ZULIP_BOT_KEY: ${{ secrets.ZULIP_BOT_KEY }}
run: tools/ci/send-failure-message
if: ${{ matrix.is_focal }}
run: mispipe "tools/ci/setup-backend" ts
# TODO: We need to port the notify_failure step from CircleCI
# config, however, it might be the case that GitHub Notifications
# make this unnesscary. More details on settings to configure it:
# https://help.github.com/en/github/managing-subscriptions-and-notifications-on-github/configuring-notifications#github-actions-notification-options

10
.gitignore vendored
View File

@@ -27,14 +27,11 @@
package-lock.json
/.vagrant
/var/*
!/var/puppeteer
/var/puppeteer/*
!/var/puppeteer/test_credentials.d.ts
/var
/.dmypy.json
# Dockerfiles generated for continuous integration
# Dockerfiles generated for CircleCI
/tools/ci/images
# Generated i18n data
@@ -75,7 +72,7 @@ zulip.kdev4
*.sublime-workspace
.vscode/
*.DS_Store
# .cache/ is generated by Visual Studio Code's test runner
# .cache/ is generated by VSCode's test runner
.cache/
.eslintcache
@@ -84,4 +81,5 @@ core
## Miscellaneous
# (Ideally this section is empty.)
zthumbor/thumbor_local_settings.py
.transifexrc

View File

@@ -1,9 +1,9 @@
[general]
ignore=title-trailing-punctuation, body-min-length, body-is-missing
ignore=title-trailing-punctuation, body-min-length, body-is-missing, title-imperative-mood
extra-path=tools/lib/gitlint-rules.py
[title-match-regex]
[title-match-regex-allow-exception]
regex=^(.+:\ )?[A-Z].+\.$
[title-max-length]

7
.isort.cfg Normal file
View File

@@ -0,0 +1,7 @@
[settings]
src_paths = ., tools, tools/setup/emoji
multi_line_output = 3
known_third_party = zulip
include_trailing_comma = True
use_parentheses = True
line_length = 100

View File

@@ -1,7 +1,5 @@
Alex Vandiver <alexmv@zulip.com> <alex@chmrr.net>
Alex Vandiver <alexmv@zulip.com> <github@chmrr.net>
Allen Rabinovich <allenrabinovich@yahoo.com> <allenr@humbughq.com>
Allen Rabinovich <allenrabinovich@yahoo.com> <allenr@zulip.com>
Aman Agrawal <amanagr@zulip.com> <f2016561@pilani.bits-pilani.ac.in>
Anders Kaseorg <anders@zulip.com> <anders@zulipchat.com>
Anders Kaseorg <anders@zulip.com> <andersk@mit.edu>
@@ -13,19 +11,11 @@ Chris Bobbe <cbobbe@zulip.com> <csbobbe@gmail.com>
Greg Price <greg@zulip.com> <gnprice@gmail.com>
Greg Price <greg@zulip.com> <greg@zulipchat.com>
Greg Price <greg@zulip.com> <price@mit.edu>
Jeff Arnold <jbarnold@gmail.com> <jbarnold@humbughq.com>
Jeff Arnold <jbarnold@gmail.com> <jbarnold@zulip.com>
Jessica McKellar <jesstess@mit.edu> <jesstess@humbughq.com>
Jessica McKellar <jesstess@mit.edu> <jesstess@zulip.com>
Kevin Mehall <km@kevinmehall.net> <kevin@humbughq.com>
Kevin Mehall <km@kevinmehall.net> <kevin@zulip.com>
Ray Kraesig <rkraesig@zulip.com> <rkraesig@zulipchat.com>
Rishi Gupta <rishig@zulipchat.com> <rishig+git@mit.edu>
Rishi Gupta <rishig@zulipchat.com> <rishig@kandralabs.com>
Rishi Gupta <rishig@zulipchat.com> <rishig@users.noreply.github.com>
Reid Barton <rwbarton@gmail.com> <rwbarton@humbughq.com>
Scott Feeney <scott@oceanbase.org> <scott@humbughq.com>
Scott Feeney <scott@oceanbase.org> <scott@zulip.com>
Rishi Gupta <rishig@zulip.com> <rishig+git@mit.edu>
Rishi Gupta <rishig@zulip.com> <rishig@kandralabs.com>
Rishi Gupta <rishig@zulip.com> <rishig@users.noreply.github.com>
Rishi Gupta <rishig@zulip.com> <rishig@zulipchat.com>
Steve Howell <showell@zulip.com> <showell30@yahoo.com>
Steve Howell <showell@zulip.com> <showell@yahoo.com>
Steve Howell <showell@zulip.com> <showell@zulipchat.com>
@@ -35,6 +25,5 @@ Tim Abbott <tabbott@zulip.com> <tabbott@dropbox.com>
Tim Abbott <tabbott@zulip.com> <tabbott@humbughq.com>
Tim Abbott <tabbott@zulip.com> <tabbott@mit.edu>
Tim Abbott <tabbott@zulip.com> <tabbott@zulipchat.com>
Vishnu KS <vishnu@zulip.com> <hackerkid@vishnuks.com>
Vishnu KS <vishnu@zulip.com> <yo@vishnuks.com>
Alya Abbott <alya@zulip.com> <alyaabbott@elance-odesk.com>
Vishnu KS <yo@vishnuks.com> <hackerkid@vishnuks.com>
Vishnu KS <yo@vishnuks.com> <yo@vishnuks.com>

View File

@@ -1,6 +1 @@
/corporate/tests/stripe_fixtures
/locale
/static/third
/tools/setup/emoji/emoji_map.json
/zerver/tests/fixtures
/zerver/webhooks/*/fixtures

View File

@@ -8,7 +8,6 @@
"stubs/",
"zulip-py3-venv/lib/pyre_check/stubs/"
],
"typeshed": "zulip-py3-venv/lib/pyre_check/typeshed/",
"exclude": [
"/srv/zulip/zulip-py3-venv/.*"
]

67
.stylelintrc Normal file
View File

@@ -0,0 +1,67 @@
{
"rules": {
# Stylistic rules for CSS.
"function-comma-space-after": "always",
"function-comma-space-before": "never",
"function-max-empty-lines": 0,
"function-whitespace-after": "always",
"value-keyword-case": "lower",
"value-list-comma-newline-after": "always-multi-line",
"value-list-comma-space-after": "always-single-line",
"value-list-comma-space-before": "never",
"value-list-max-empty-lines": 0,
"unit-case": "lower",
"property-case": "lower",
"color-hex-case": "lower",
"declaration-bang-space-before": "always",
"declaration-colon-newline-after": "always-multi-line",
"declaration-colon-space-after": "always-single-line",
"declaration-colon-space-before": "never",
"declaration-block-semicolon-newline-after": "always",
"declaration-block-semicolon-space-before": "never",
"declaration-block-trailing-semicolon": "always",
"block-closing-brace-empty-line-before": "never",
"block-closing-brace-newline-after": "always",
"block-closing-brace-newline-before": "always",
"block-opening-brace-newline-after": "always",
"block-opening-brace-space-before": "always",
"selector-attribute-brackets-space-inside": "never",
"selector-attribute-operator-space-after": "never",
"selector-attribute-operator-space-before": "never",
"selector-combinator-space-after": "always",
"selector-combinator-space-before": "always",
"selector-descendant-combinator-no-non-space": true,
"selector-pseudo-class-parentheses-space-inside": "never",
"selector-pseudo-element-case": "lower",
"selector-pseudo-element-colon-notation": "double",
"selector-type-case": "lower",
"selector-list-comma-newline-after": "always",
"selector-list-comma-space-before": "never",
"media-feature-colon-space-after": "always",
"media-feature-colon-space-before": "never",
"media-feature-name-case": "lower",
"media-feature-parentheses-space-inside": "never",
"media-feature-range-operator-space-after": "always",
"media-feature-range-operator-space-before": "always",
"media-query-list-comma-newline-after": "always",
"media-query-list-comma-space-before": "never",
"at-rule-name-case": "lower",
"at-rule-name-space-after": "always",
"at-rule-semicolon-newline-after": "always",
"at-rule-semicolon-space-before": "never",
"comment-whitespace-inside": "always",
"indentation": 4,
# Limit language features
"color-no-hex": true,
"color-named": "never",
}
}

View File

@@ -14,7 +14,7 @@ This isn't an exhaustive list of things that you can't do. Rather, take it
in the spirit in which it's intended --- a guide to make it easier to enrich
all of us and the technical communities in which we participate.
## Expected behavior
## Expected Behavior
The following behaviors are expected and requested of all community members:
@@ -29,7 +29,7 @@ The following behaviors are expected and requested of all community members:
* Community event venues may be shared with members of the public; be
respectful to all patrons of these locations.
## Unacceptable behavior
## Unacceptable Behavior
The following behaviors are considered harassment and are unacceptable
within the Zulip community:
@@ -53,7 +53,7 @@ within the Zulip community:
presentations.
* Advocating for, or encouraging, any of the behaviors above.
## Reporting and enforcement
## Reporting and Enforcement
Harassment and other code of conduct violations reduce the value of the
community for everyone. If someone makes you or anyone else feel unsafe or
@@ -95,9 +95,10 @@ behavior occurring outside the scope of community activities when such
behavior has the potential to adversely affect the safety and well-being of
community members.
## License and attribution
## License and Attribution
This Code of Conduct is adapted from the
[Citizen Code of Conduct](http://citizencodeofconduct.org/) and the
[Django Code of Conduct](https://www.djangoproject.com/conduct/), and is
under a
[Creative Commons BY-SA](https://creativecommons.org/licenses/by-sa/4.0/)

View File

@@ -13,12 +13,11 @@ user, or anything else. Make sure to read the
before posting. The Zulip community is also governed by a
[code of conduct](https://zulip.readthedocs.io/en/latest/code-of-conduct.html).
You can subscribe to
[zulip-devel-announce@googlegroups.com](https://groups.google.com/g/zulip-devel-announce)
or our [Twitter](https://twitter.com/zulip) account for a very low
traffic (<1 email/month) way to hear about things like mentorship
opportunities with Google Summer of Code, in-person sprints at
conferences, and other opportunities to contribute.
You can subscribe to zulip-devel-announce@googlegroups.com or our
[Twitter](https://twitter.com/zulip) account for a lower traffic (~1
email/month) way to hear about things like mentorship opportunities with Google
Code-in, in-person sprints at conferences, and other opportunities to
contribute.
## Ways to contribute
@@ -45,7 +44,6 @@ don't require touching the codebase at all. We list a few of them below:
* [Reporting issues](#reporting-issues), including both feature requests and
bug reports.
* [Giving feedback](#user-feedback) if you are evaluating or using Zulip.
* [Sponsor Zulip](https://github.com/sponsors/zulip) through the GitHub sponsors program.
* [Translating](https://zulip.readthedocs.io/en/latest/translating/translating.html)
Zulip.
* [Outreach](#zulip-outreach): Star us on GitHub, upvote us
@@ -274,7 +272,7 @@ important parts of the project. We hope you apply!
### Google Summer of Code
The largest outreach program Zulip participates in is GSoC (14
students in 2017; 11 in 2018; 17 in 2019; 18 in 2020). While we don't control how
students in 2017; 11 in 2018; 17 in 2019). While we don't control how
many slots Google allocates to Zulip, we hope to mentor a similar
number of students in future summers.
@@ -298,10 +296,10 @@ same as with GSoC, and it has no separate application process; your
GSoC application is your ZSoC application. If we'd like to select you
for ZSoC, we'll contact you when the GSoC results are announced.
[gsoc-guide]: https://zulip.readthedocs.io/en/latest/contributing/gsoc-ideas.html
[gsoc-guide]: https://zulip.readthedocs.io/en/latest/overview/gsoc-ideas.html
[gsoc-faq]: https://developers.google.com/open-source/gsoc/faq
## Zulip outreach
## Zulip Outreach
**Upvoting Zulip**. Upvotes and reviews make a big difference in the public
perception of projects like Zulip. We've collected a few sites below

View File

@@ -1,11 +1,11 @@
# To build run `docker build -f Dockerfile-postgresql .` from the root of the
# zulip repo.
# Currently the PostgreSQL images do not support automatic upgrading of
# Currently the postgres images do not support automatic upgrading of
# the on-disk data in volumes. So the base image can not currently be upgraded
# without users needing a manual pgdump and restore.
# Install hunspell, Zulip stop words, and run Zulip database
# Install hunspell, zulip stop words, and run zulip database
# init.
FROM groonga/pgroonga:latest-alpine-10-slim
RUN apk add -U --no-cache hunspell-en

View File

@@ -1,3 +1,4 @@
Copyright 2011-2020 Dropbox, Inc., Kandra Labs, Inc., and contributors
Apache License
Version 2.0, January 2004

2
NOTICE
View File

@@ -1,5 +1,3 @@
Copyright 20122015 Dropbox, Inc., 20152021 Kandra Labs, Inc., and contributors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this project except in compliance with the License.
You may obtain a copy of the License at

View File

@@ -5,19 +5,16 @@ immediacy of real-time chat with the productivity benefits of threaded
conversations. Zulip is used by open source projects, Fortune 500 companies,
large standards bodies, and others who need a real-time chat system that
allows users to easily process hundreds or thousands of messages a day. With
over 700 contributors merging over 500 commits a month, Zulip is also the
over 500 contributors merging over 500 commits a month, Zulip is also the
largest and fastest growing open source group chat project.
[![GitHub Actions build status](https://github.com/zulip/zulip/actions/workflows/zulip-ci.yml/badge.svg?branch=master)](https://github.com/zulip/zulip/actions/workflows/zulip-ci.yml?query=branch%3Amaster)
[![coverage status](https://img.shields.io/codecov/c/github/zulip/zulip/master.svg)](https://codecov.io/gh/zulip/zulip/branch/master)
[![CircleCI branch](https://img.shields.io/circleci/project/github/zulip/zulip/master.svg)](https://circleci.com/gh/zulip/zulip/tree/master)
[![Coverage Status](https://img.shields.io/codecov/c/github/zulip/zulip/master.svg)](https://codecov.io/gh/zulip/zulip/branch/master)
[![Mypy coverage](https://img.shields.io/badge/mypy-100%25-green.svg)][mypy-coverage]
[![code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
[![code style: prettier](https://img.shields.io/badge/code_style-prettier-ff69b4.svg)](https://github.com/prettier/prettier)
[![GitHub release](https://img.shields.io/github/release/zulip/zulip.svg)](https://github.com/zulip/zulip/releases/latest)
[![docs](https://readthedocs.org/projects/zulip/badge/?version=latest)](https://zulip.readthedocs.io/en/latest/)
[![Zulip chat](https://img.shields.io/badge/zulip-join_chat-brightgreen.svg)](https://chat.zulip.org)
[![Twitter](https://img.shields.io/badge/twitter-@zulip-blue.svg?style=flat)](https://twitter.com/zulip)
[![GitHub Sponsors](https://img.shields.io/github/sponsors/zulip)](https://github.com/sponsors/zulip)
[mypy-coverage]: https://blog.zulip.org/2016/10/13/static-types-in-python-oh-mypy/
@@ -45,7 +42,7 @@ You might be interested in:
[give us feedback](https://zulip.readthedocs.io/en/latest/overview/contributing.html#user-feedback). We
would love to hear from you, even if you're just trying the product out.
* **Supporting Zulip**. Advocate for your organization to use Zulip, become a [sponsor](https://github.com/sponsors/zulip), write a
* **Supporting Zulip**. Advocate for your organization to use Zulip, write a
review in the mobile app stores, or
[upvote Zulip](https://zulip.readthedocs.io/en/latest/overview/contributing.html#zulip-outreach) on
product comparison sites.
@@ -58,7 +55,7 @@ You might be interested in:
[companies](https://zulip.com/for/companies/), or Zulip for
[working groups and part time communities](https://zulip.com/for/working-groups-and-communities/).
* **Running a Zulip server**. Use a preconfigured [DigitalOcean droplet](https://marketplace.digitalocean.com/apps/zulip),
* **Running a Zulip server**. Use a preconfigured [Digital Ocean droplet](https://marketplace.digitalocean.com/apps/zulip),
[install Zulip](https://zulip.readthedocs.io/en/stable/production/install.html)
directly, or use Zulip's
experimental [Docker image](https://zulip.readthedocs.io/en/latest/production/deployment.html#zulip-in-docker).
@@ -73,7 +70,7 @@ You might be interested in:
like Google Summer of Code.
You may also be interested in reading our [blog](https://blog.zulip.org/) or
following us on [Twitter](https://twitter.com/zulip).
following us on [twitter](https://twitter.com/zulip).
Zulip is distributed under the
[Apache 2.0](https://github.com/zulip/zulip/blob/master/LICENSE) license.

View File

@@ -1,9 +1,9 @@
# Security policy
# Security Policy
Security announcements are sent to zulip-announce@googlegroups.com,
so you should subscribe if you are running Zulip in production.
## Reporting a vulnerability
## Reporting a Vulnerability
We love responsible reports of (potential) security issues in Zulip,
whether in the latest release or our development branch.
@@ -14,19 +14,15 @@ response within 24 hours.
Please include details on the issue and how you'd like to be credited
in our release notes when we publish the fix.
Our [security model][security-model] document may be a helpful
resource.
Our [security
model](https://zulip.readthedocs.io/en/latest/production/security-model.html)
document may be a helpful resource.
## Supported versions
## Supported Versions
Zulip provides security support for the latest major release, in the
form of minor security/maintenance releases.
We work hard to make [upgrades][upgrades] reliable, so that there's no
reason to run older major releases.
See also our documentation on the [Zulip release lifecycle][release-lifecycle]
[security-model]: https://zulip.readthedocs.io/en/latest/production/security-model.html
[upgrades]: https://zulip.readthedocs.io/en/latest/production/upgrade-or-modify.html#upgrading-to-a-release
[release-cycle]: https://zulip.readthedocs.io/en/latest/overview/release-lifecycle.html
We work hard to make
[upgrades](https://zulip.readthedocs.io/en/latest/production/upgrade-or-modify.html#upgrading-to-a-release)
reliable, so that there's no reason to run older major releases.

77
Vagrantfile vendored
View File

@@ -2,16 +2,21 @@
VAGRANTFILE_API_VERSION = "2"
if Vagrant::VERSION == "1.8.7"
path = `command -v curl`
if path.include?("/opt/vagrant/embedded/bin/curl")
puts "In Vagrant 1.8.7, curl is broken. Please use Vagrant 2.0.2 " \
"or run 'sudo rm -f /opt/vagrant/embedded/bin/curl' to fix the " \
"issue before provisioning. See " \
"https://github.com/mitchellh/vagrant/issues/7997 " \
"for reference."
exit
end
def command?(name)
`which #{name} > /dev/null 2>&1`
$?.success?
end
if Vagrant::VERSION == "1.8.7" then
path = `which curl`
if path.include?('/opt/vagrant/embedded/bin/curl') then
puts "In Vagrant 1.8.7, curl is broken. Please use Vagrant 2.0.2 "\
"or run 'sudo rm -f /opt/vagrant/embedded/bin/curl' to fix the "\
"issue before provisioning. See "\
"https://github.com/mitchellh/vagrant/issues/7997 "\
"for reference."
exit
end
end
# Workaround: Vagrant removed the atlas.hashicorp.com to
@@ -21,7 +26,7 @@ end
# updating of boxes (since the old URL doesn't work). See
# https://github.com/hashicorp/vagrant/issues/9442
if Vagrant::DEFAULT_SERVER_URL == "atlas.hashicorp.com"
Vagrant::DEFAULT_SERVER_URL.replace("https://vagrantcloud.com")
Vagrant::DEFAULT_SERVER_URL.replace('https://vagrantcloud.com')
end
# Monkey patch https://github.com/hashicorp/vagrant/pull/10879 so we
@@ -31,7 +36,7 @@ begin
rescue LoadError
else
VagrantPlugins::DockerProvider::Provider.class_eval do
method(:usable?).owner == singleton_class or def self.usable?(raise_error = false)
method(:usable?).owner == singleton_class or def self.usable?(raise_error=false)
VagrantPlugins::DockerProvider::Driver.new.execute("docker", "version")
true
rescue Vagrant::Errors::CommandUnavailable, VagrantPlugins::DockerProvider::Errors::ExecuteError
@@ -53,18 +58,17 @@ Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
vm_memory = "2048"
ubuntu_mirror = ""
vboxadd_version = nil
config.vm.synced_folder ".", "/vagrant", disabled: true
config.vm.synced_folder ".", "/srv/zulip"
vagrant_config_file = ENV["HOME"] + "/.zulip-vagrant-config"
vagrant_config_file = ENV['HOME'] + "/.zulip-vagrant-config"
if File.file?(vagrant_config_file)
IO.foreach(vagrant_config_file) do |line|
line.chomp!
key, value = line.split(nil, 2)
case key
when /^([#;]|$)/ # ignore comments
when /^([#;]|$)/; # ignore comments
when "HTTP_PROXY"; http_proxy = value
when "HTTPS_PROXY"; https_proxy = value
when "NO_PROXY"; no_proxy = value
@@ -73,7 +77,6 @@ Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
when "GUEST_CPUS"; vm_num_cpus = value
when "GUEST_MEMORY_MB"; vm_memory = value
when "UBUNTU_MIRROR"; ubuntu_mirror = value
when "VBOXADD_VERSION"; vboxadd_version = value
end
end
end
@@ -91,9 +94,9 @@ Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
elsif !http_proxy.nil? or !https_proxy.nil?
# This prints twice due to https://github.com/hashicorp/vagrant/issues/7504
# We haven't figured out a workaround.
puts "You have specified value for proxy in ~/.zulip-vagrant-config file but did not " \
"install the vagrant-proxyconf plugin. To install it, run `vagrant plugin install " \
"vagrant-proxyconf` in a terminal. This error will appear twice."
puts 'You have specified value for proxy in ~/.zulip-vagrant-config file but did not ' \
'install the vagrant-proxyconf plugin. To install it, run `vagrant plugin install ' \
'vagrant-proxyconf` in a terminal. This error will appear twice.'
exit
end
@@ -115,33 +118,9 @@ Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
# It's possible we can get away with just 1.5GB; more testing needed
vb.memory = vm_memory
vb.cpus = vm_num_cpus
if !vboxadd_version.nil?
override.vbguest.installer = Class.new(VagrantVbguest::Installers::Ubuntu) do
define_method(:host_version) do |reload = false|
VagrantVbguest::Version(vboxadd_version)
end
end
override.vbguest.allow_downgrade = true
override.vbguest.iso_path = "https://download.virtualbox.org/virtualbox/#{vboxadd_version}/VBoxGuestAdditions_#{vboxadd_version}.iso"
end
end
config.vm.provider "hyperv" do |h, override|
override.vm.box = "bento/ubuntu-18.04"
h.memory = vm_memory
h.maxmemory = vm_memory
h.cpus = vm_num_cpus
end
config.vm.provider "parallels" do |prl, override|
override.vm.box = "bento/ubuntu-18.04"
override.vm.box_version = "202005.21.0"
prl.memory = vm_memory
prl.cpus = vm_num_cpus
end
$provision_script = <<SCRIPT
$provision_script = <<SCRIPT
set -x
set -e
set -o pipefail
@@ -160,9 +139,15 @@ sudo dpkg --purge landscape-client landscape-common ubuntu-release-upgrader-core
sudo dpkg-divert --add --rename /etc/default/motd-news
sudo sh -c 'echo ENABLED=0 > /etc/default/motd-news'
# If the host is running SELinux remount the /sys/fs/selinux directory as read only,
# needed for apt-get to work.
if [ -d "/sys/fs/selinux" ]; then
sudo mount -o remount,ro /sys/fs/selinux
fi
# Set default locale, this prevents errors if the user has another locale set.
if ! grep -q 'LC_ALL=C.UTF-8' /etc/default/locale; then
echo "LC_ALL=C.UTF-8" | sudo tee -a /etc/default/locale
if ! grep -q 'LC_ALL=en_US.UTF-8' /etc/default/locale; then
echo "LC_ALL=en_US.UTF-8" | sudo tee -a /etc/default/locale
fi
# Set an environment variable, so that we won't print the virtualenv

View File

@@ -17,6 +17,7 @@ from analytics.models import (
StreamCount,
UserCount,
installation_epoch,
last_successful_fill,
)
from zerver.lib.logging_util import log_to_file
from zerver.lib.timestamp import ceiling_to_day, ceiling_to_hour, floor_to_hour, verify_UTC
@@ -32,33 +33,21 @@ from zerver.models import (
## Logging setup ##
logger = logging.getLogger("zulip.management")
logger = logging.getLogger('zulip.management')
log_to_file(logger, settings.ANALYTICS_LOG_PATH)
# You can't subtract timedelta.max from a datetime, so use this instead
TIMEDELTA_MAX = timedelta(days=365 * 1000)
TIMEDELTA_MAX = timedelta(days=365*1000)
## Class definitions ##
class CountStat:
HOUR = "hour"
DAY = "day"
HOUR = 'hour'
DAY = 'day'
FREQUENCIES = frozenset([HOUR, DAY])
@property
def time_increment(self) -> timedelta:
if self.frequency == CountStat.HOUR:
return timedelta(hours=1)
return timedelta(days=1)
def __init__(
self,
property: str,
data_collector: "DataCollector",
frequency: str,
interval: Optional[timedelta] = None,
) -> None:
def __init__(self, property: str, data_collector: 'DataCollector', frequency: str,
interval: Optional[timedelta]=None) -> None:
self.property = property
self.data_collector = data_collector
# might have to do something different for bitfields
@@ -67,55 +56,34 @@ class CountStat:
self.frequency = frequency
if interval is not None:
self.interval = interval
else:
self.interval = self.time_increment
elif frequency == CountStat.HOUR:
self.interval = timedelta(hours=1)
else: # frequency == CountStat.DAY
self.interval = timedelta(days=1)
def __str__(self) -> str:
return f"<CountStat: {self.property}>"
def last_successful_fill(self) -> Optional[datetime]:
fillstate = FillState.objects.filter(property=self.property).first()
if fillstate is None:
return None
if fillstate.state == FillState.DONE:
return fillstate.end_time
return fillstate.end_time - self.time_increment
class LoggingCountStat(CountStat):
def __init__(self, property: str, output_table: Type[BaseCount], frequency: str) -> None:
CountStat.__init__(self, property, DataCollector(output_table, None), frequency)
class DependentCountStat(CountStat):
def __init__(
self,
property: str,
data_collector: "DataCollector",
frequency: str,
interval: Optional[timedelta] = None,
dependencies: Sequence[str] = [],
) -> None:
def __init__(self, property: str, data_collector: 'DataCollector', frequency: str,
interval: Optional[timedelta] = None, dependencies: Sequence[str] = []) -> None:
CountStat.__init__(self, property, data_collector, frequency, interval=interval)
self.dependencies = dependencies
class DataCollector:
def __init__(
self,
output_table: Type[BaseCount],
pull_function: Optional[Callable[[str, datetime, datetime, Optional[Realm]], int]],
) -> None:
def __init__(self, output_table: Type[BaseCount],
pull_function: Optional[Callable[[str, datetime, datetime, Optional[Realm]], int]]) -> None:
self.output_table = output_table
self.pull_function = pull_function
## CountStat-level operations ##
def process_count_stat(
stat: CountStat, fill_to_time: datetime, realm: Optional[Realm] = None
) -> None:
def process_count_stat(stat: CountStat, fill_to_time: datetime,
realm: Optional[Realm]=None) -> None:
# TODO: The realm argument is not yet supported, in that we don't
# have a solution for how to update FillState if it is passed. It
# exists solely as partial plumbing for when we do fully implement
@@ -125,6 +93,13 @@ def process_count_stat(
# the CountStat object passed in needs to have come from
# E.g. get_count_stats(realm), i.e. have the realm_id already
# entered into the SQL query defined by the CountState object.
if stat.frequency == CountStat.HOUR:
time_increment = timedelta(hours=1)
elif stat.frequency == CountStat.DAY:
time_increment = timedelta(days=1)
else:
raise AssertionError(f"Unknown frequency: {stat.frequency}")
verify_UTC(fill_to_time)
if floor_to_hour(fill_to_time) != fill_to_time:
raise ValueError(f"fill_to_time must be on an hour boundary: {fill_to_time}")
@@ -132,14 +107,14 @@ def process_count_stat(
fill_state = FillState.objects.filter(property=stat.property).first()
if fill_state is None:
currently_filled = installation_epoch()
fill_state = FillState.objects.create(
property=stat.property, end_time=currently_filled, state=FillState.DONE
)
fill_state = FillState.objects.create(property=stat.property,
end_time=currently_filled,
state=FillState.DONE)
logger.info("INITIALIZED %s %s", stat.property, currently_filled)
elif fill_state.state == FillState.STARTED:
logger.info("UNDO START %s %s", stat.property, fill_state.end_time)
do_delete_counts_at_hour(stat, fill_state.end_time)
currently_filled = fill_state.end_time - stat.time_increment
currently_filled = fill_state.end_time - time_increment
do_update_fill_state(fill_state, currently_filled, FillState.DONE)
logger.info("UNDO DONE %s", stat.property)
elif fill_state.state == FillState.DONE:
@@ -149,15 +124,14 @@ def process_count_stat(
if isinstance(stat, DependentCountStat):
for dependency in stat.dependencies:
dependency_fill_time = COUNT_STATS[dependency].last_successful_fill()
dependency_fill_time = last_successful_fill(dependency)
if dependency_fill_time is None:
logger.warning(
"DependentCountStat %s run before dependency %s.", stat.property, dependency
)
logger.warning("DependentCountStat %s run before dependency %s.",
stat.property, dependency)
return
fill_to_time = min(fill_to_time, dependency_fill_time)
currently_filled = currently_filled + stat.time_increment
currently_filled = currently_filled + time_increment
while currently_filled <= fill_to_time:
logger.info("START %s %s", stat.property, currently_filled)
start = time.time()
@@ -165,35 +139,26 @@ def process_count_stat(
do_fill_count_stat_at_hour(stat, currently_filled, realm)
do_update_fill_state(fill_state, currently_filled, FillState.DONE)
end = time.time()
currently_filled = currently_filled + stat.time_increment
logger.info("DONE %s (%dms)", stat.property, (end - start) * 1000)
currently_filled = currently_filled + time_increment
logger.info("DONE %s (%dms)", stat.property, (end-start)*1000)
def do_update_fill_state(fill_state: FillState, end_time: datetime, state: int) -> None:
fill_state.end_time = end_time
fill_state.state = state
fill_state.save()
# We assume end_time is valid (e.g. is on a day or hour boundary as appropriate)
# and is timezone aware. It is the caller's responsibility to enforce this!
def do_fill_count_stat_at_hour(
stat: CountStat, end_time: datetime, realm: Optional[Realm] = None
) -> None:
def do_fill_count_stat_at_hour(stat: CountStat, end_time: datetime, realm: Optional[Realm]=None) -> None:
start_time = end_time - stat.interval
if not isinstance(stat, LoggingCountStat):
timer = time.time()
assert stat.data_collector.pull_function is not None
assert(stat.data_collector.pull_function is not None)
rows_added = stat.data_collector.pull_function(stat.property, start_time, end_time, realm)
logger.info(
"%s run pull_function (%dms/%sr)",
stat.property,
(time.time() - timer) * 1000,
rows_added,
)
logger.info("%s run pull_function (%dms/%sr)",
stat.property, (time.time()-timer)*1000, rows_added)
do_aggregate_to_summary_table(stat, end_time, realm)
def do_delete_counts_at_hour(stat: CountStat, end_time: datetime) -> None:
if isinstance(stat, LoggingCountStat):
InstallationCount.objects.filter(property=stat.property, end_time=end_time).delete()
@@ -205,10 +170,8 @@ def do_delete_counts_at_hour(stat: CountStat, end_time: datetime) -> None:
RealmCount.objects.filter(property=stat.property, end_time=end_time).delete()
InstallationCount.objects.filter(property=stat.property, end_time=end_time).delete()
def do_aggregate_to_summary_table(
stat: CountStat, end_time: datetime, realm: Optional[Realm] = None
) -> None:
def do_aggregate_to_summary_table(stat: CountStat, end_time: datetime,
realm: Optional[Realm]=None) -> None:
cursor = connection.cursor()
# Aggregate into RealmCount
@@ -219,8 +182,7 @@ def do_aggregate_to_summary_table(
realm_clause = SQL("")
if output_table in (UserCount, StreamCount):
realmcount_query = SQL(
"""
realmcount_query = SQL("""
INSERT INTO analytics_realmcount
(realm_id, value, property, subgroup, end_time)
SELECT
@@ -235,25 +197,19 @@ def do_aggregate_to_summary_table(
{output_table}.end_time = %(end_time)s
{realm_clause}
GROUP BY zerver_realm.id, {output_table}.subgroup
"""
).format(
""").format(
output_table=Identifier(output_table._meta.db_table),
realm_clause=realm_clause,
)
start = time.time()
cursor.execute(
realmcount_query,
{
"property": stat.property,
"end_time": end_time,
},
)
cursor.execute(realmcount_query, {
'property': stat.property,
'end_time': end_time,
})
end = time.time()
logger.info(
"%s RealmCount aggregation (%dms/%sr)",
stat.property,
(end - start) * 1000,
cursor.rowcount,
stat.property, (end - start) * 1000, cursor.rowcount,
)
if realm is None:
@@ -262,8 +218,7 @@ def do_aggregate_to_summary_table(
#
# TODO: Add support for updating installation data after
# changing an individual realm's values.
installationcount_query = SQL(
"""
installationcount_query = SQL("""
INSERT INTO analytics_installationcount
(value, property, subgroup, end_time)
SELECT
@@ -273,47 +228,36 @@ def do_aggregate_to_summary_table(
property = %(property)s AND
end_time = %(end_time)s
GROUP BY analytics_realmcount.subgroup
"""
)
""")
start = time.time()
cursor.execute(
installationcount_query,
{
"property": stat.property,
"end_time": end_time,
},
)
cursor.execute(installationcount_query, {
'property': stat.property,
'end_time': end_time,
})
end = time.time()
logger.info(
"%s InstallationCount aggregation (%dms/%sr)",
stat.property,
(end - start) * 1000,
cursor.rowcount,
stat.property, (end - start) * 1000, cursor.rowcount,
)
cursor.close()
## Utility functions called from outside counts.py ##
# called from zerver/lib/actions.py; should not throw any errors
def do_increment_logging_stat(
zerver_object: Union[Realm, UserProfile, Stream],
stat: CountStat,
subgroup: Optional[Union[str, int, bool]],
event_time: datetime,
increment: int = 1,
) -> None:
def do_increment_logging_stat(zerver_object: Union[Realm, UserProfile, Stream], stat: CountStat,
subgroup: Optional[Union[str, int, bool]], event_time: datetime,
increment: int=1) -> None:
if not increment:
return
table = stat.data_collector.output_table
if table == RealmCount:
id_args = {"realm": zerver_object}
id_args = {'realm': zerver_object}
elif table == UserCount:
id_args = {"realm": zerver_object.realm, "user": zerver_object}
id_args = {'realm': zerver_object.realm, 'user': zerver_object}
else: # StreamCount
id_args = {"realm": zerver_object.realm, "stream": zerver_object}
id_args = {'realm': zerver_object.realm, 'stream': zerver_object}
if stat.frequency == CountStat.DAY:
end_time = ceiling_to_day(event_time)
@@ -321,16 +265,11 @@ def do_increment_logging_stat(
end_time = ceiling_to_hour(event_time)
row, created = table.objects.get_or_create(
property=stat.property,
subgroup=subgroup,
end_time=end_time,
defaults={"value": increment},
**id_args,
)
property=stat.property, subgroup=subgroup, end_time=end_time,
defaults={'value': increment}, **id_args)
if not created:
row.value = F("value") + increment
row.save(update_fields=["value"])
row.value = F('value') + increment
row.save(update_fields=['value'])
def do_drop_all_analytics_tables() -> None:
UserCount.objects.all().delete()
@@ -339,7 +278,6 @@ def do_drop_all_analytics_tables() -> None:
InstallationCount.objects.all().delete()
FillState.objects.all().delete()
def do_drop_single_stat(property: str) -> None:
UserCount.objects.filter(property=property).delete()
StreamCount.objects.filter(property=property).delete()
@@ -347,12 +285,10 @@ def do_drop_single_stat(property: str) -> None:
InstallationCount.objects.filter(property=property).delete()
FillState.objects.filter(property=property).delete()
## DataCollector-level operations ##
QueryFn = Callable[[Dict[str, Composable]], Composable]
def do_pull_by_sql_query(
property: str,
start_time: datetime,
@@ -361,44 +297,37 @@ def do_pull_by_sql_query(
group_by: Optional[Tuple[models.Model, str]],
) -> int:
if group_by is None:
subgroup = SQL("NULL")
group_by_clause = SQL("")
subgroup = SQL('NULL')
group_by_clause = SQL('')
else:
subgroup = Identifier(group_by[0]._meta.db_table, group_by[1])
group_by_clause = SQL(", {}").format(subgroup)
group_by_clause = SQL(', {}').format(subgroup)
# We do string replacement here because cursor.execute will reject a
# group_by_clause given as a param.
# We pass in the datetimes as params to cursor.execute so that we don't have to
# think about how to convert python datetimes to SQL datetimes.
query_ = query(
{
"subgroup": subgroup,
"group_by_clause": group_by_clause,
}
)
query_ = query({
'subgroup': subgroup,
'group_by_clause': group_by_clause,
})
cursor = connection.cursor()
cursor.execute(
query_,
{
"property": property,
"time_start": start_time,
"time_end": end_time,
},
)
cursor.execute(query_, {
'property': property,
'time_start': start_time,
'time_end': end_time,
})
rowcount = cursor.rowcount
cursor.close()
return rowcount
def sql_data_collector(
output_table: Type[BaseCount],
query: QueryFn,
group_by: Optional[Tuple[models.Model, str]],
) -> DataCollector:
def pull_function(
property: str, start_time: datetime, end_time: datetime, realm: Optional[Realm] = None
) -> int:
def pull_function(property: str, start_time: datetime, end_time: datetime,
realm: Optional[Realm] = None) -> int:
# The pull function type needs to accept a Realm argument
# because the 'minutes_active::day' CountStat uses
# DataCollector directly for do_pull_minutes_active, which
@@ -406,23 +335,16 @@ def sql_data_collector(
# realm should have been already encoded in the `query` we're
# passed.
return do_pull_by_sql_query(property, start_time, end_time, query, group_by)
return DataCollector(output_table, pull_function)
def do_pull_minutes_active(
property: str, start_time: datetime, end_time: datetime, realm: Optional[Realm] = None
) -> int:
user_activity_intervals = (
UserActivityInterval.objects.filter(
end__gt=start_time,
start__lt=end_time,
)
.select_related(
"user_profile",
)
.values_list("user_profile_id", "user_profile__realm_id", "start", "end")
)
def do_pull_minutes_active(property: str, start_time: datetime, end_time: datetime,
realm: Optional[Realm] = None) -> int:
user_activity_intervals = UserActivityInterval.objects.filter(
end__gt=start_time, start__lt=end_time,
).select_related(
'user_profile',
).values_list(
'user_profile_id', 'user_profile__realm_id', 'start', 'end')
seconds_active: Dict[Tuple[int, int], float] = defaultdict(float)
for user_id, realm_id, interval_start, interval_end in user_activity_intervals:
@@ -431,28 +353,18 @@ def do_pull_minutes_active(
end = min(end_time, interval_end)
seconds_active[(user_id, realm_id)] += (end - start).total_seconds()
rows = [
UserCount(
user_id=ids[0],
realm_id=ids[1],
property=property,
end_time=end_time,
value=int(seconds // 60),
)
for ids, seconds in seconds_active.items()
if seconds >= 60
]
rows = [UserCount(user_id=ids[0], realm_id=ids[1], property=property,
end_time=end_time, value=int(seconds // 60))
for ids, seconds in seconds_active.items() if seconds >= 60]
UserCount.objects.bulk_create(rows)
return len(rows)
def count_message_by_user_query(realm: Optional[Realm]) -> QueryFn:
if realm is None:
realm_clause = SQL("")
else:
realm_clause = SQL("zerver_userprofile.realm_id = {} AND").format(Literal(realm.id))
return lambda kwargs: SQL(
"""
return lambda kwargs: SQL("""
INSERT INTO analytics_usercount
(user_id, realm_id, value, property, subgroup, end_time)
SELECT
@@ -468,9 +380,7 @@ def count_message_by_user_query(realm: Optional[Realm]) -> QueryFn:
{realm_clause}
zerver_message.date_sent < %(time_end)s
GROUP BY zerver_userprofile.id {group_by_clause}
"""
).format(**kwargs, realm_clause=realm_clause)
""").format(**kwargs, realm_clause=realm_clause)
# Note: ignores the group_by / group_by_clause.
def count_message_type_by_user_query(realm: Optional[Realm]) -> QueryFn:
@@ -478,8 +388,7 @@ def count_message_type_by_user_query(realm: Optional[Realm]) -> QueryFn:
realm_clause = SQL("")
else:
realm_clause = SQL("zerver_userprofile.realm_id = {} AND").format(Literal(realm.id))
return lambda kwargs: SQL(
"""
return lambda kwargs: SQL("""
INSERT INTO analytics_usercount
(realm_id, user_id, value, property, subgroup, end_time)
SELECT realm_id, id, SUM(count) AS value, %(property)s, message_type, %(time_end)s
@@ -514,9 +423,7 @@ def count_message_type_by_user_query(realm: Optional[Realm]) -> QueryFn:
zerver_recipient.type, zerver_stream.invite_only
) AS subquery
GROUP BY realm_id, id, message_type
"""
).format(**kwargs, realm_clause=realm_clause)
""").format(**kwargs, realm_clause=realm_clause)
# This query joins to the UserProfile table since all current queries that
# use this also subgroup on UserProfile.is_bot. If in the future there is a
@@ -527,8 +434,7 @@ def count_message_by_stream_query(realm: Optional[Realm]) -> QueryFn:
realm_clause = SQL("")
else:
realm_clause = SQL("zerver_stream.realm_id = {} AND").format(Literal(realm.id))
return lambda kwargs: SQL(
"""
return lambda kwargs: SQL("""
INSERT INTO analytics_streamcount
(stream_id, realm_id, value, property, subgroup, end_time)
SELECT
@@ -550,9 +456,7 @@ def count_message_by_stream_query(realm: Optional[Realm]) -> QueryFn:
{realm_clause}
zerver_message.date_sent < %(time_end)s
GROUP BY zerver_stream.id {group_by_clause}
"""
).format(**kwargs, realm_clause=realm_clause)
""").format(**kwargs, realm_clause=realm_clause)
# Hardcodes the query needed by active_users:is_bot:day, since that is
# currently the only stat that uses this.
@@ -561,8 +465,7 @@ def count_user_by_realm_query(realm: Optional[Realm]) -> QueryFn:
realm_clause = SQL("")
else:
realm_clause = SQL("zerver_userprofile.realm_id = {} AND").format(Literal(realm.id))
return lambda kwargs: SQL(
"""
return lambda kwargs: SQL("""
INSERT INTO analytics_realmcount
(realm_id, value, property, subgroup, end_time)
SELECT
@@ -578,9 +481,7 @@ def count_user_by_realm_query(realm: Optional[Realm]) -> QueryFn:
{realm_clause}
zerver_userprofile.is_active = TRUE
GROUP BY zerver_realm.id {group_by_clause}
"""
).format(**kwargs, realm_clause=realm_clause)
""").format(**kwargs, realm_clause=realm_clause)
# Currently hardcodes the query needed for active_users_audit:is_bot:day.
# Assumes that a user cannot have two RealmAuditLog entries with the same event_time and
@@ -591,8 +492,7 @@ def check_realmauditlog_by_user_query(realm: Optional[Realm]) -> QueryFn:
realm_clause = SQL("")
else:
realm_clause = SQL("realm_id = {} AND").format(Literal(realm.id))
return lambda kwargs: SQL(
"""
return lambda kwargs: SQL("""
INSERT INTO analytics_usercount
(user_id, realm_id, value, property, subgroup, end_time)
SELECT
@@ -615,8 +515,7 @@ def check_realmauditlog_by_user_query(realm: Optional[Realm]) -> QueryFn:
ral1.modified_user_id = zerver_userprofile.id
WHERE
ral1.event_type in ({user_created}, {user_activated}, {user_reactivated})
"""
).format(
""").format(
**kwargs,
user_created=Literal(RealmAuditLog.USER_CREATED),
user_activated=Literal(RealmAuditLog.USER_ACTIVATED),
@@ -625,14 +524,12 @@ def check_realmauditlog_by_user_query(realm: Optional[Realm]) -> QueryFn:
realm_clause=realm_clause,
)
def check_useractivityinterval_by_user_query(realm: Optional[Realm]) -> QueryFn:
if realm is None:
realm_clause = SQL("")
else:
realm_clause = SQL("zerver_userprofile.realm_id = {} AND").format(Literal(realm.id))
return lambda kwargs: SQL(
"""
return lambda kwargs: SQL("""
INSERT INTO analytics_usercount
(user_id, realm_id, value, property, subgroup, end_time)
SELECT
@@ -646,17 +543,14 @@ def check_useractivityinterval_by_user_query(realm: Optional[Realm]) -> QueryFn:
{realm_clause}
zerver_useractivityinterval.start < %(time_end)s
GROUP BY zerver_userprofile.id {group_by_clause}
"""
).format(**kwargs, realm_clause=realm_clause)
""").format(**kwargs, realm_clause=realm_clause)
def count_realm_active_humans_query(realm: Optional[Realm]) -> QueryFn:
if realm is None:
realm_clause = SQL("")
else:
realm_clause = SQL("realm_id = {} AND").format(Literal(realm.id))
return lambda kwargs: SQL(
"""
return lambda kwargs: SQL("""
INSERT INTO analytics_realmcount
(realm_id, value, property, subgroup, end_time)
SELECT
@@ -681,13 +575,10 @@ def count_realm_active_humans_query(realm: Optional[Realm]) -> QueryFn:
ON
usercount1.user_id = usercount2.user_id
GROUP BY usercount1.realm_id
"""
).format(**kwargs, realm_clause=realm_clause)
""").format(**kwargs, realm_clause=realm_clause)
# Currently unused and untested
count_stream_by_realm_query = lambda kwargs: SQL(
"""
count_stream_by_realm_query = lambda kwargs: SQL("""
INSERT INTO analytics_realmcount
(realm_id, value, property, subgroup, end_time)
SELECT
@@ -701,77 +592,62 @@ count_stream_by_realm_query = lambda kwargs: SQL(
zerver_stream.date_created >= %(time_start)s AND
zerver_stream.date_created < %(time_end)s
GROUP BY zerver_realm.id {group_by_clause}
"""
).format(**kwargs)
""").format(**kwargs)
def get_count_stats(realm: Optional[Realm] = None) -> Dict[str, CountStat]:
def get_count_stats(realm: Optional[Realm]=None) -> Dict[str, CountStat]:
## CountStat declarations ##
count_stats_ = [
# Messages sent stats
# Messages Sent stats
# Stats that count the number of messages sent in various ways.
# These are also the set of stats that read from the Message table.
CountStat(
"messages_sent:is_bot:hour",
sql_data_collector(
UserCount, count_message_by_user_query(realm), (UserProfile, "is_bot")
),
CountStat.HOUR,
),
CountStat(
"messages_sent:message_type:day",
sql_data_collector(UserCount, count_message_type_by_user_query(realm), None),
CountStat.DAY,
),
CountStat(
"messages_sent:client:day",
sql_data_collector(
UserCount, count_message_by_user_query(realm), (Message, "sending_client_id")
),
CountStat.DAY,
),
CountStat(
"messages_in_stream:is_bot:day",
sql_data_collector(
StreamCount, count_message_by_stream_query(realm), (UserProfile, "is_bot")
),
CountStat.DAY,
),
# Number of users stats
CountStat('messages_sent:is_bot:hour',
sql_data_collector(UserCount, count_message_by_user_query(
realm), (UserProfile, 'is_bot')),
CountStat.HOUR),
CountStat('messages_sent:message_type:day',
sql_data_collector(
UserCount, count_message_type_by_user_query(realm), None),
CountStat.DAY),
CountStat('messages_sent:client:day',
sql_data_collector(UserCount, count_message_by_user_query(realm),
(Message, 'sending_client_id')), CountStat.DAY),
CountStat('messages_in_stream:is_bot:day',
sql_data_collector(StreamCount, count_message_by_stream_query(realm),
(UserProfile, 'is_bot')), CountStat.DAY),
# Number of Users stats
# Stats that count the number of active users in the UserProfile.is_active sense.
# 'active_users_audit:is_bot:day' is the canonical record of which users were
# active on which days (in the UserProfile.is_active sense).
# Important that this stay a daily stat, so that 'realm_active_humans::day' works as expected.
CountStat(
"active_users_audit:is_bot:day",
sql_data_collector(
UserCount, check_realmauditlog_by_user_query(realm), (UserProfile, "is_bot")
),
CountStat.DAY,
),
CountStat('active_users_audit:is_bot:day',
sql_data_collector(UserCount, check_realmauditlog_by_user_query(
realm), (UserProfile, 'is_bot')),
CountStat.DAY),
# Important note: LoggingCountStat objects aren't passed the
# Realm argument, because by nature they have a logging
# structure, not a pull-from-database structure, so there's no
# way to compute them for a single realm after the fact (the
# use case for passing a Realm argument).
# Sanity check on 'active_users_audit:is_bot:day', and a archetype for future LoggingCountStats.
# In RealmCount, 'active_users_audit:is_bot:day' should be the partial
# sum sequence of 'active_users_log:is_bot:day', for any realm that
# started after the latter stat was introduced.
LoggingCountStat("active_users_log:is_bot:day", RealmCount, CountStat.DAY),
LoggingCountStat('active_users_log:is_bot:day',
RealmCount, CountStat.DAY),
# Another sanity check on 'active_users_audit:is_bot:day'. Is only an
# approximation, e.g. if a user is deactivated between the end of the
# day and when this stat is run, they won't be counted. However, is the
# simplest of the three to inspect by hand.
CountStat(
"active_users:is_bot:day",
sql_data_collector(
RealmCount, count_user_by_realm_query(realm), (UserProfile, "is_bot")
),
CountStat.DAY,
interval=TIMEDELTA_MAX,
),
CountStat('active_users:is_bot:day',
sql_data_collector(RealmCount, count_user_by_realm_query(realm), (UserProfile, 'is_bot')),
CountStat.DAY, interval=TIMEDELTA_MAX),
# Messages read stats. messages_read::hour is the total
# number of messages read, whereas
# messages_read_interactions::hour tries to count the total
@@ -779,47 +655,40 @@ def get_count_stats(realm: Optional[Realm] = None) -> Dict[str, CountStat]:
# as read (imperfect because of batching of some request
# types, but less likely to be overwhelmed by a single bulk
# operation).
LoggingCountStat("messages_read::hour", UserCount, CountStat.HOUR),
LoggingCountStat("messages_read_interactions::hour", UserCount, CountStat.HOUR),
# User activity stats
LoggingCountStat('messages_read::hour', UserCount, CountStat.HOUR),
LoggingCountStat('messages_read_interactions::hour', UserCount, CountStat.HOUR),
# User Activity stats
# Stats that measure user activity in the UserActivityInterval sense.
CountStat(
"1day_actives::day",
sql_data_collector(UserCount, check_useractivityinterval_by_user_query(realm), None),
CountStat.DAY,
interval=timedelta(days=1) - UserActivityInterval.MIN_INTERVAL_LENGTH,
),
CountStat(
"7day_actives::day",
sql_data_collector(UserCount, check_useractivityinterval_by_user_query(realm), None),
CountStat.DAY,
interval=timedelta(days=7) - UserActivityInterval.MIN_INTERVAL_LENGTH,
),
CountStat(
"15day_actives::day",
sql_data_collector(UserCount, check_useractivityinterval_by_user_query(realm), None),
CountStat.DAY,
interval=timedelta(days=15) - UserActivityInterval.MIN_INTERVAL_LENGTH,
),
CountStat(
"minutes_active::day", DataCollector(UserCount, do_pull_minutes_active), CountStat.DAY
),
CountStat('1day_actives::day',
sql_data_collector(
UserCount, check_useractivityinterval_by_user_query(realm), None),
CountStat.DAY, interval=timedelta(days=1)-UserActivityInterval.MIN_INTERVAL_LENGTH),
CountStat('15day_actives::day',
sql_data_collector(
UserCount, check_useractivityinterval_by_user_query(realm), None),
CountStat.DAY, interval=timedelta(days=15)-UserActivityInterval.MIN_INTERVAL_LENGTH),
CountStat('minutes_active::day', DataCollector(
UserCount, do_pull_minutes_active), CountStat.DAY),
# Rate limiting stats
# Used to limit the number of invitation emails sent by a realm
LoggingCountStat("invites_sent::day", RealmCount, CountStat.DAY),
LoggingCountStat('invites_sent::day', RealmCount, CountStat.DAY),
# Dependent stats
# Must come after their dependencies.
# Canonical account of the number of active humans in a realm on each day.
DependentCountStat(
"realm_active_humans::day",
sql_data_collector(RealmCount, count_realm_active_humans_query(realm), None),
CountStat.DAY,
dependencies=["active_users_audit:is_bot:day", "15day_actives::day"],
),
DependentCountStat('realm_active_humans::day',
sql_data_collector(
RealmCount, count_realm_active_humans_query(realm), None),
CountStat.DAY,
dependencies=['active_users_audit:is_bot:day', '15day_actives::day']),
]
return OrderedDict((stat.property, stat) for stat in count_stats_)
return OrderedDict([(stat.property, stat) for stat in count_stats_])
# To avoid refactoring for now COUNT_STATS can be used as before
COUNT_STATS = get_count_stats()

View File

@@ -5,18 +5,11 @@ from typing import List
from analytics.lib.counts import CountStat
def generate_time_series_data(
days: int = 100,
business_hours_base: float = 10,
non_business_hours_base: float = 10,
growth: float = 1,
autocorrelation: float = 0,
spikiness: float = 1,
holiday_rate: float = 0,
frequency: str = CountStat.DAY,
partial_sum: bool = False,
random_seed: int = 26,
) -> List[int]:
def generate_time_series_data(days: int=100, business_hours_base: float=10,
non_business_hours_base: float=10, growth: float=1,
autocorrelation: float=0, spikiness: float=1,
holiday_rate: float=0, frequency: str=CountStat.DAY,
partial_sum: bool=False, random_seed: int=26) -> List[int]:
"""
Generate semi-realistic looking time series data for testing analytics graphs.
@@ -37,43 +30,35 @@ def generate_time_series_data(
random_seed -- Seed for random number generator.
"""
if frequency == CountStat.HOUR:
length = days * 24
length = days*24
seasonality = [non_business_hours_base] * 24 * 7
for day in range(5):
for hour in range(8):
seasonality[24 * day + hour] = business_hours_base
holidays = []
seasonality[24*day + hour] = business_hours_base
holidays = []
for i in range(days):
holidays.extend([random() < holiday_rate] * 24)
elif frequency == CountStat.DAY:
length = days
seasonality = [8 * business_hours_base + 16 * non_business_hours_base] * 5 + [
24 * non_business_hours_base
] * 2
seasonality = [8*business_hours_base + 16*non_business_hours_base] * 5 + \
[24*non_business_hours_base] * 2
holidays = [random() < holiday_rate for i in range(days)]
else:
raise AssertionError(f"Unknown frequency: {frequency}")
if length < 2:
raise AssertionError(
f"Must be generating at least 2 data points. Currently generating {length}"
)
growth_base = growth ** (1.0 / (length - 1))
values_no_noise = [
seasonality[i % len(seasonality)] * (growth_base ** i) for i in range(length)
]
raise AssertionError("Must be generating at least 2 data points. "
f"Currently generating {length}")
growth_base = growth ** (1. / (length-1))
values_no_noise = [seasonality[i % len(seasonality)] * (growth_base**i) for i in range(length)]
seed(random_seed)
noise_scalars = [gauss(0, 1)]
for i in range(1, length):
noise_scalars.append(
noise_scalars[-1] * autocorrelation + gauss(0, 1) * (1 - autocorrelation)
)
noise_scalars.append(noise_scalars[-1]*autocorrelation + gauss(0, 1)*(1-autocorrelation))
values = [
0 if holiday else int(v + sqrt(v) * noise_scalar * spikiness)
for v, noise_scalar, holiday in zip(values_no_noise, noise_scalars, holidays)
]
values = [0 if holiday else int(v + sqrt(v)*noise_scalar*spikiness)
for v, noise_scalar, holiday in zip(values_no_noise, noise_scalars, holidays)]
if partial_sum:
for i in range(1, length):
values[i] = values[i - 1] + values[i]
values[i] = values[i-1] + values[i]
return [max(v, 0) for v in values]

View File

@@ -9,9 +9,8 @@ from zerver.lib.timestamp import floor_to_day, floor_to_hour, verify_UTC
# If min_length is greater than 0, pads the list to the left.
# So informally, time_range(Sep 20, Sep 22, day, None) returns [Sep 20, Sep 21, Sep 22],
# and time_range(Sep 20, Sep 22, day, 5) returns [Sep 18, Sep 19, Sep 20, Sep 21, Sep 22]
def time_range(
start: datetime, end: datetime, frequency: str, min_length: Optional[int]
) -> List[datetime]:
def time_range(start: datetime, end: datetime, frequency: str,
min_length: Optional[int]) -> List[datetime]:
verify_UTC(start)
verify_UTC(end)
if frequency == CountStat.HOUR:
@@ -25,7 +24,7 @@ def time_range(
times = []
if min_length is not None:
start = min(start, end - (min_length - 1) * step)
start = min(start, end - (min_length-1)*step)
current = end
while current >= start:
times.append(current)

View File

@@ -0,0 +1,82 @@
import datetime
import logging
import time
from typing import Any, Dict
from django.core.management.base import BaseCommand, CommandParser
from zerver.lib.timestamp import timestamp_to_datetime
from zerver.models import Message, Recipient
def compute_stats(log_level: int) -> None:
logger = logging.getLogger()
logger.setLevel(log_level)
one_week_ago = timestamp_to_datetime(time.time()) - datetime.timedelta(weeks=1)
mit_query = Message.objects.filter(sender__realm__string_id="zephyr",
recipient__type=Recipient.STREAM,
date_sent__gt=one_week_ago)
for bot_sender_start in ["imap.", "rcmd.", "sys."]:
mit_query = mit_query.exclude(sender__email__startswith=(bot_sender_start))
# Filtering for "/" covers tabbott/extra@ and all the daemon/foo bots.
mit_query = mit_query.exclude(sender__email__contains=("/"))
mit_query = mit_query.exclude(sender__email__contains=("aim.com"))
mit_query = mit_query.exclude(
sender__email__in=["rss@mit.edu", "bash@mit.edu", "apache@mit.edu",
"bitcoin@mit.edu", "lp@mit.edu", "clocks@mit.edu",
"root@mit.edu", "nagios@mit.edu",
"www-data|local-realm@mit.edu"])
user_counts: Dict[str, Dict[str, int]] = {}
for m in mit_query.select_related("sending_client", "sender"):
email = m.sender.email
user_counts.setdefault(email, {})
user_counts[email].setdefault(m.sending_client.name, 0)
user_counts[email][m.sending_client.name] += 1
total_counts: Dict[str, int] = {}
total_user_counts: Dict[str, int] = {}
for email, counts in user_counts.items():
total_user_counts.setdefault(email, 0)
for client_name, count in counts.items():
total_counts.setdefault(client_name, 0)
total_counts[client_name] += count
total_user_counts[email] += count
logging.debug("%40s | %10s | %s", "User", "Messages", "Percentage Zulip")
top_percents: Dict[int, float] = {}
for size in [10, 25, 50, 100, 200, len(total_user_counts.keys())]:
top_percents[size] = 0.0
for i, email in enumerate(sorted(total_user_counts.keys(),
key=lambda x: -total_user_counts[x])):
percent_zulip = round(100 - (user_counts[email].get("zephyr_mirror", 0)) * 100. /
total_user_counts[email], 1)
for size in top_percents.keys():
top_percents.setdefault(size, 0)
if i < size:
top_percents[size] += (percent_zulip * 1.0 / size)
logging.debug("%40s | %10s | %s%%", email, total_user_counts[email],
percent_zulip)
logging.info("")
for size in sorted(top_percents.keys()):
logging.info("Top %6s | %s%%", size, round(top_percents[size], 1))
grand_total = sum(total_counts.values())
print(grand_total)
logging.info("%15s | %s", "Client", "Percentage")
for client in total_counts.keys():
logging.info("%15s | %s%%", client, round(100. * total_counts[client] / grand_total, 1))
class Command(BaseCommand):
help = "Compute statistics on MIT Zephyr usage."
def add_arguments(self, parser: CommandParser) -> None:
parser.add_argument('--verbose', default=False, action='store_true')
def handle(self, *args: Any, **options: Any) -> None:
level = logging.INFO
if options["verbose"]:
level = logging.DEBUG
compute_stats(level)

View File

@@ -0,0 +1,56 @@
import datetime
from typing import Any, Dict
from django.core.management.base import BaseCommand, CommandParser
from zerver.lib.statistics import seconds_usage_between
from zerver.models import UserProfile
def analyze_activity(options: Dict[str, Any]) -> None:
day_start = datetime.datetime.strptime(options["date"], "%Y-%m-%d").replace(tzinfo=datetime.timezone.utc)
day_end = day_start + datetime.timedelta(days=options["duration"])
user_profile_query = UserProfile.objects.all()
if options["realm"]:
user_profile_query = user_profile_query.filter(realm__string_id=options["realm"])
print("Per-user online duration:\n")
total_duration = datetime.timedelta(0)
for user_profile in user_profile_query:
duration = seconds_usage_between(user_profile, day_start, day_end)
if duration == datetime.timedelta(0):
continue
total_duration += duration
print(f"{user_profile.email:<37}{duration}")
print(f"\nTotal Duration: {total_duration}")
print(f"\nTotal Duration in minutes: {total_duration.total_seconds() / 60.}")
print(f"Total Duration amortized to a month: {total_duration.total_seconds() * 30. / 60.}")
class Command(BaseCommand):
help = """Report analytics of user activity on a per-user and realm basis.
This command aggregates user activity data that is collected by each user using Zulip. It attempts
to approximate how much each user has been using Zulip per day, measured by recording each 15 minute
period where some activity has occurred (mouse move or keyboard activity).
It will correctly not count server-initiated reloads in the activity statistics.
The duration flag can be used to control how many days to show usage duration for
Usage: ./manage.py analyze_user_activity [--realm=zulip] [--date=2013-09-10] [--duration=1]
By default, if no date is selected 2013-09-10 is used. If no realm is provided, information
is shown for all realms"""
def add_arguments(self, parser: CommandParser) -> None:
parser.add_argument('--realm', action='store')
parser.add_argument('--date', action='store', default="2013-09-06")
parser.add_argument('--duration', action='store', default=1, type=int,
help="How many days to show usage information for")
def handle(self, *args: Any, **options: Any) -> None:
analyze_activity(options)

View File

@@ -7,7 +7,7 @@ from django.core.management.base import BaseCommand
from django.utils.timezone import now as timezone_now
from analytics.lib.counts import COUNT_STATS, CountStat
from analytics.models import installation_epoch
from analytics.models import installation_epoch, last_successful_fill
from zerver.lib.timestamp import TimezoneNotUTCException, floor_to_day, floor_to_hour, verify_UTC
from zerver.models import Realm
@@ -18,7 +18,6 @@ states = {
3: "UNKNOWN",
}
class Command(BaseCommand):
help = """Checks FillState table.
@@ -26,8 +25,8 @@ class Command(BaseCommand):
def handle(self, *args: Any, **options: Any) -> None:
fill_state = self.get_fill_state()
status = fill_state["status"]
message = fill_state["message"]
status = fill_state['status']
message = fill_state['message']
state_file_path = "/var/lib/nagios_state/check-analytics-state"
state_file_tmp = state_file_path + "-tmp"
@@ -38,18 +37,18 @@ class Command(BaseCommand):
def get_fill_state(self) -> Dict[str, Any]:
if not Realm.objects.exists():
return {"status": 0, "message": "No realms exist, so not checking FillState."}
return {'status': 0, 'message': 'No realms exist, so not checking FillState.'}
warning_unfilled_properties = []
critical_unfilled_properties = []
for property, stat in COUNT_STATS.items():
last_fill = stat.last_successful_fill()
last_fill = last_successful_fill(property)
if last_fill is None:
last_fill = installation_epoch()
try:
verify_UTC(last_fill)
except TimezoneNotUTCException:
return {"status": 2, "message": f"FillState not in UTC for {property}"}
return {'status': 2, 'message': f'FillState not in UTC for {property}'}
if stat.frequency == CountStat.DAY:
floor_function = floor_to_day
@@ -61,10 +60,7 @@ class Command(BaseCommand):
critical_threshold = timedelta(minutes=150)
if floor_function(last_fill) != last_fill:
return {
"status": 2,
"message": f"FillState not on {stat.frequency} boundary for {property}",
}
return {'status': 2, 'message': f'FillState not on {stat.frequency} boundary for {property}'}
time_to_last_fill = timezone_now() - last_fill
if time_to_last_fill > critical_threshold:
@@ -73,18 +69,18 @@ class Command(BaseCommand):
warning_unfilled_properties.append(property)
if len(critical_unfilled_properties) == 0 and len(warning_unfilled_properties) == 0:
return {"status": 0, "message": "FillState looks fine."}
return {'status': 0, 'message': 'FillState looks fine.'}
if len(critical_unfilled_properties) == 0:
return {
"status": 1,
"message": "Missed filling {} once.".format(
", ".join(warning_unfilled_properties),
'status': 1,
'message': 'Missed filling {} once.'.format(
', '.join(warning_unfilled_properties),
),
}
return {
"status": 2,
"message": "Missed filling {} once. Missed filling {} at least twice.".format(
", ".join(warning_unfilled_properties),
", ".join(critical_unfilled_properties),
'status': 2,
'message': 'Missed filling {} once. Missed filling {} at least twice.'.format(
', '.join(warning_unfilled_properties),
', '.join(critical_unfilled_properties),
),
}

View File

@@ -10,12 +10,12 @@ class Command(BaseCommand):
help = """Clear analytics tables."""
def add_arguments(self, parser: ArgumentParser) -> None:
parser.add_argument("--force", action="store_true", help="Clear analytics tables.")
parser.add_argument('--force',
action='store_true',
help="Clear analytics tables.")
def handle(self, *args: Any, **options: Any) -> None:
if options["force"]:
if options['force']:
do_drop_all_analytics_tables()
else:
raise CommandError(
"Would delete all data from analytics tables (!); use --force to do so."
)
raise CommandError("Would delete all data from analytics tables (!); use --force to do so.")

View File

@@ -10,14 +10,18 @@ class Command(BaseCommand):
help = """Clear analytics tables."""
def add_arguments(self, parser: ArgumentParser) -> None:
parser.add_argument("--force", action="store_true", help="Actually do it.")
parser.add_argument("--property", help="The property of the stat to be cleared.")
parser.add_argument('--force',
action='store_true',
help="Actually do it.")
parser.add_argument('--property',
type=str,
help="The property of the stat to be cleared.")
def handle(self, *args: Any, **options: Any) -> None:
property = options["property"]
property = options['property']
if property not in COUNT_STATS:
raise CommandError(f"Invalid property: {property}")
if not options["force"]:
if not options['force']:
raise CommandError("No action taken. Use --force.")
do_drop_single_stat(property)

View File

@@ -0,0 +1,74 @@
import datetime
from argparse import ArgumentParser
from typing import Any, Optional
from django.db.models import Count, QuerySet
from django.utils.timezone import now as timezone_now
from zerver.lib.management import ZulipBaseCommand
from zerver.models import UserActivity
class Command(ZulipBaseCommand):
help = """Report rough client activity globally, for a realm, or for a user
Usage examples:
./manage.py client_activity --target server
./manage.py client_activity --target realm --realm zulip
./manage.py client_activity --target user --user hamlet@zulip.com --realm zulip"""
def add_arguments(self, parser: ArgumentParser) -> None:
parser.add_argument('--target', dest='target', required=True, type=str,
help="'server' will calculate client activity of the entire server. "
"'realm' will calculate client activity of realm. "
"'user' will calculate client activity of the user.")
parser.add_argument('--user', dest='user', type=str,
help="The email address of the user you want to calculate activity.")
self.add_realm_args(parser)
def compute_activity(self, user_activity_objects: QuerySet) -> None:
# Report data from the past week.
#
# This is a rough report of client activity because we inconsistently
# register activity from various clients; think of it as telling you
# approximately how many people from a group have used a particular
# client recently. For example, this might be useful to get a sense of
# how popular different versions of a desktop client are.
#
# Importantly, this does NOT tell you anything about the relative
# volumes of requests from clients.
threshold = timezone_now() - datetime.timedelta(days=7)
client_counts = user_activity_objects.filter(
last_visit__gt=threshold).values("client__name").annotate(
count=Count('client__name'))
total = 0
counts = []
for client_type in client_counts:
count = client_type["count"]
client = client_type["client__name"]
total += count
counts.append((count, client))
counts.sort()
for count in counts:
print(f"{count[1]:>25} {count[0]:15}")
print("Total:", total)
def handle(self, *args: Any, **options: Optional[str]) -> None:
realm = self.get_realm(options)
if options["user"] is None:
if options["target"] == "server" and realm is None:
# Report global activity.
self.compute_activity(UserActivity.objects.all())
elif options["target"] == "realm" and realm is not None:
self.compute_activity(UserActivity.objects.filter(user_profile__realm=realm))
else:
self.print_help("./manage.py", "client_activity")
elif options["target"] == "user":
user_profile = self.get_user(options["user"], realm)
self.compute_activity(UserActivity.objects.filter(user_profile=user_profile))
else:
self.print_help("./manage.py", "client_activity")

View File

@@ -16,7 +16,7 @@ from analytics.models import (
StreamCount,
UserCount,
)
from zerver.lib.actions import STREAM_ASSIGNMENT_COLORS, do_change_user_role, do_create_realm
from zerver.lib.actions import STREAM_ASSIGNMENT_COLORS, do_change_user_role
from zerver.lib.create_user import create_user
from zerver.lib.timestamp import floor_to_day
from zerver.models import Client, Realm, Recipient, Stream, Subscription, UserProfile
@@ -28,30 +28,16 @@ class Command(BaseCommand):
DAYS_OF_DATA = 100
random_seed = 26
def generate_fixture_data(
self,
stat: CountStat,
business_hours_base: float,
non_business_hours_base: float,
growth: float,
autocorrelation: float,
spikiness: float,
holiday_rate: float = 0,
partial_sum: bool = False,
) -> List[int]:
def generate_fixture_data(self, stat: CountStat, business_hours_base: float,
non_business_hours_base: float, growth: float,
autocorrelation: float, spikiness: float,
holiday_rate: float=0, partial_sum: bool=False) -> List[int]:
self.random_seed += 1
return generate_time_series_data(
days=self.DAYS_OF_DATA,
business_hours_base=business_hours_base,
non_business_hours_base=non_business_hours_base,
growth=growth,
autocorrelation=autocorrelation,
spikiness=spikiness,
holiday_rate=holiday_rate,
frequency=stat.frequency,
partial_sum=partial_sum,
random_seed=self.random_seed,
)
days=self.DAYS_OF_DATA, business_hours_base=business_hours_base,
non_business_hours_base=non_business_hours_base, growth=growth,
autocorrelation=autocorrelation, spikiness=spikiness, holiday_rate=holiday_rate,
frequency=stat.frequency, partial_sum=partial_sum, random_seed=self.random_seed)
def handle(self, *args: Any, **options: Any) -> None:
# TODO: This should arguably only delete the objects
@@ -59,7 +45,7 @@ class Command(BaseCommand):
do_drop_all_analytics_tables()
# This also deletes any objects with this realm as a foreign key
Realm.objects.filter(string_id="analytics").delete()
Realm.objects.filter(string_id='analytics').delete()
# Because we just deleted a bunch of objects in the database
# directly (rather than deleting individual objects in Django,
@@ -68,25 +54,23 @@ class Command(BaseCommand):
# memcached in order to ensure deleted objects aren't still
# present in the memcached cache.
from zerver.apps import flush_cache
flush_cache(None)
installation_time = timezone_now() - timedelta(days=self.DAYS_OF_DATA)
last_end_time = floor_to_day(timezone_now())
realm = do_create_realm(
string_id="analytics", name="Analytics", date_created=installation_time
)
realm = Realm.objects.create(
string_id='analytics', name='Analytics', date_created=installation_time)
with mock.patch("zerver.lib.create_user.timezone_now", return_value=installation_time):
shylock = create_user(
"shylock@analytics.ds",
"Shylock",
'shylock@analytics.ds',
'Shylock',
realm,
full_name="Shylock",
role=UserProfile.ROLE_REALM_OWNER,
full_name='Shylock',
role=UserProfile.ROLE_REALM_ADMINISTRATOR
)
do_change_user_role(shylock, UserProfile.ROLE_REALM_OWNER, acting_user=None)
stream = Stream.objects.create(name="all", realm=realm, date_created=installation_time)
do_change_user_role(shylock, UserProfile.ROLE_REALM_ADMINISTRATOR, acting_user=None)
stream = Stream.objects.create(
name='all', realm=realm, date_created=installation_time)
recipient = Recipient.objects.create(type_id=stream.id, type=Recipient.STREAM)
stream.recipient = recipient
stream.save(update_fields=["recipient"])
@@ -94,207 +78,163 @@ class Command(BaseCommand):
# Subscribe shylock to the stream to avoid invariant failures.
# TODO: This should use subscribe_users_to_streams from populate_db.
subs = [
Subscription(
recipient=recipient,
user_profile=shylock,
is_user_active=shylock.is_active,
color=STREAM_ASSIGNMENT_COLORS[0],
),
Subscription(recipient=recipient,
user_profile=shylock,
color=STREAM_ASSIGNMENT_COLORS[0]),
]
Subscription.objects.bulk_create(subs)
def insert_fixture_data(
stat: CountStat, fixture_data: Mapping[Optional[str], List[int]], table: Type[BaseCount]
) -> None:
end_times = time_range(
last_end_time, last_end_time, stat.frequency, len(list(fixture_data.values())[0])
)
def insert_fixture_data(stat: CountStat,
fixture_data: Mapping[Optional[str], List[int]],
table: Type[BaseCount]) -> None:
end_times = time_range(last_end_time, last_end_time, stat.frequency,
len(list(fixture_data.values())[0]))
if table == InstallationCount:
id_args: Dict[str, Any] = {}
if table == RealmCount:
id_args = {"realm": realm}
id_args = {'realm': realm}
if table == UserCount:
id_args = {"realm": realm, "user": shylock}
id_args = {'realm': realm, 'user': shylock}
if table == StreamCount:
id_args = {"stream": stream, "realm": realm}
id_args = {'stream': stream, 'realm': realm}
for subgroup, values in fixture_data.items():
table.objects.bulk_create(
table(
property=stat.property,
subgroup=subgroup,
end_time=end_time,
value=value,
**id_args,
)
for end_time, value in zip(end_times, values)
if value != 0
)
table.objects.bulk_create([
table(property=stat.property, subgroup=subgroup, end_time=end_time,
value=value, **id_args)
for end_time, value in zip(end_times, values) if value != 0])
stat = COUNT_STATS["1day_actives::day"]
stat = COUNT_STATS['1day_actives::day']
realm_data: Mapping[Optional[str], List[int]] = {
None: self.generate_fixture_data(stat, 0.08, 0.02, 3, 0.3, 6, partial_sum=True),
None: self.generate_fixture_data(stat, .08, .02, 3, .3, 6, partial_sum=True),
}
insert_fixture_data(stat, realm_data, RealmCount)
installation_data: Mapping[Optional[str], List[int]] = {
None: self.generate_fixture_data(stat, 0.8, 0.2, 4, 0.3, 6, partial_sum=True),
None: self.generate_fixture_data(stat, .8, .2, 4, .3, 6, partial_sum=True),
}
insert_fixture_data(stat, installation_data, InstallationCount)
FillState.objects.create(
property=stat.property, end_time=last_end_time, state=FillState.DONE
)
FillState.objects.create(property=stat.property, end_time=last_end_time,
state=FillState.DONE)
stat = COUNT_STATS["7day_actives::day"]
stat = COUNT_STATS['realm_active_humans::day']
realm_data = {
None: self.generate_fixture_data(stat, 0.2, 0.07, 3, 0.3, 6, partial_sum=True),
None: self.generate_fixture_data(stat, .1, .03, 3, .5, 3, partial_sum=True),
}
insert_fixture_data(stat, realm_data, RealmCount)
installation_data = {
None: self.generate_fixture_data(stat, 2, 0.7, 4, 0.3, 6, partial_sum=True),
None: self.generate_fixture_data(stat, 1, .3, 4, .5, 3, partial_sum=True),
}
insert_fixture_data(stat, installation_data, InstallationCount)
FillState.objects.create(
property=stat.property, end_time=last_end_time, state=FillState.DONE
)
FillState.objects.create(property=stat.property, end_time=last_end_time,
state=FillState.DONE)
stat = COUNT_STATS["realm_active_humans::day"]
stat = COUNT_STATS['active_users_audit:is_bot:day']
realm_data = {
None: self.generate_fixture_data(stat, 0.8, 0.08, 3, 0.5, 3, partial_sum=True),
'false': self.generate_fixture_data(stat, .1, .03, 3.5, .8, 2, partial_sum=True),
}
insert_fixture_data(stat, realm_data, RealmCount)
installation_data = {
None: self.generate_fixture_data(stat, 1, 0.3, 4, 0.5, 3, partial_sum=True),
'false': self.generate_fixture_data(stat, 1, .3, 6, .8, 2, partial_sum=True),
}
insert_fixture_data(stat, installation_data, InstallationCount)
FillState.objects.create(
property=stat.property, end_time=last_end_time, state=FillState.DONE
)
FillState.objects.create(property=stat.property, end_time=last_end_time,
state=FillState.DONE)
stat = COUNT_STATS["active_users_audit:is_bot:day"]
realm_data = {
"false": self.generate_fixture_data(stat, 1, 0.2, 3.5, 0.8, 2, partial_sum=True),
"true": self.generate_fixture_data(stat, 0.3, 0.05, 3, 0.3, 2, partial_sum=True),
}
insert_fixture_data(stat, realm_data, RealmCount)
installation_data = {
"false": self.generate_fixture_data(stat, 3, 1, 4, 0.8, 2, partial_sum=True),
"true": self.generate_fixture_data(stat, 1, 0.4, 4, 0.8, 2, partial_sum=True),
}
insert_fixture_data(stat, installation_data, InstallationCount)
FillState.objects.create(
property=stat.property, end_time=last_end_time, state=FillState.DONE
)
stat = COUNT_STATS["messages_sent:is_bot:hour"]
stat = COUNT_STATS['messages_sent:is_bot:hour']
user_data: Mapping[Optional[str], List[int]] = {
"false": self.generate_fixture_data(stat, 2, 1, 1.5, 0.6, 8, holiday_rate=0.1),
'false': self.generate_fixture_data(stat, 2, 1, 1.5, .6, 8, holiday_rate=.1),
}
insert_fixture_data(stat, user_data, UserCount)
realm_data = {
"false": self.generate_fixture_data(stat, 35, 15, 6, 0.6, 4),
"true": self.generate_fixture_data(stat, 15, 15, 3, 0.4, 2),
}
realm_data = {'false': self.generate_fixture_data(stat, 35, 15, 6, .6, 4),
'true': self.generate_fixture_data(stat, 15, 15, 3, .4, 2)}
insert_fixture_data(stat, realm_data, RealmCount)
installation_data = {
"false": self.generate_fixture_data(stat, 350, 150, 6, 0.6, 4),
"true": self.generate_fixture_data(stat, 150, 150, 3, 0.4, 2),
}
installation_data = {'false': self.generate_fixture_data(stat, 350, 150, 6, .6, 4),
'true': self.generate_fixture_data(stat, 150, 150, 3, .4, 2)}
insert_fixture_data(stat, installation_data, InstallationCount)
FillState.objects.create(
property=stat.property, end_time=last_end_time, state=FillState.DONE
)
FillState.objects.create(property=stat.property, end_time=last_end_time,
state=FillState.DONE)
stat = COUNT_STATS["messages_sent:message_type:day"]
stat = COUNT_STATS['messages_sent:message_type:day']
user_data = {
"public_stream": self.generate_fixture_data(stat, 1.5, 1, 3, 0.6, 8),
"private_message": self.generate_fixture_data(stat, 0.5, 0.3, 1, 0.6, 8),
"huddle_message": self.generate_fixture_data(stat, 0.2, 0.2, 2, 0.6, 8),
}
'public_stream': self.generate_fixture_data(stat, 1.5, 1, 3, .6, 8),
'private_message': self.generate_fixture_data(stat, .5, .3, 1, .6, 8),
'huddle_message': self.generate_fixture_data(stat, .2, .2, 2, .6, 8)}
insert_fixture_data(stat, user_data, UserCount)
realm_data = {
"public_stream": self.generate_fixture_data(stat, 30, 8, 5, 0.6, 4),
"private_stream": self.generate_fixture_data(stat, 7, 7, 5, 0.6, 4),
"private_message": self.generate_fixture_data(stat, 13, 5, 5, 0.6, 4),
"huddle_message": self.generate_fixture_data(stat, 6, 3, 3, 0.6, 4),
}
'public_stream': self.generate_fixture_data(stat, 30, 8, 5, .6, 4),
'private_stream': self.generate_fixture_data(stat, 7, 7, 5, .6, 4),
'private_message': self.generate_fixture_data(stat, 13, 5, 5, .6, 4),
'huddle_message': self.generate_fixture_data(stat, 6, 3, 3, .6, 4)}
insert_fixture_data(stat, realm_data, RealmCount)
installation_data = {
"public_stream": self.generate_fixture_data(stat, 300, 80, 5, 0.6, 4),
"private_stream": self.generate_fixture_data(stat, 70, 70, 5, 0.6, 4),
"private_message": self.generate_fixture_data(stat, 130, 50, 5, 0.6, 4),
"huddle_message": self.generate_fixture_data(stat, 60, 30, 3, 0.6, 4),
}
'public_stream': self.generate_fixture_data(stat, 300, 80, 5, .6, 4),
'private_stream': self.generate_fixture_data(stat, 70, 70, 5, .6, 4),
'private_message': self.generate_fixture_data(stat, 130, 50, 5, .6, 4),
'huddle_message': self.generate_fixture_data(stat, 60, 30, 3, .6, 4)}
insert_fixture_data(stat, installation_data, InstallationCount)
FillState.objects.create(
property=stat.property, end_time=last_end_time, state=FillState.DONE
)
FillState.objects.create(property=stat.property, end_time=last_end_time,
state=FillState.DONE)
website, created = Client.objects.get_or_create(name="website")
old_desktop, created = Client.objects.get_or_create(name="desktop app Linux 0.3.7")
android, created = Client.objects.get_or_create(name="ZulipAndroid")
iOS, created = Client.objects.get_or_create(name="ZulipiOS")
react_native, created = Client.objects.get_or_create(name="ZulipMobile")
API, created = Client.objects.get_or_create(name="API: Python")
zephyr_mirror, created = Client.objects.get_or_create(name="zephyr_mirror")
unused, created = Client.objects.get_or_create(name="unused")
long_webhook, created = Client.objects.get_or_create(name="ZulipLooooooooooongNameWebhook")
website, created = Client.objects.get_or_create(name='website')
old_desktop, created = Client.objects.get_or_create(name='desktop app Linux 0.3.7')
android, created = Client.objects.get_or_create(name='ZulipAndroid')
iOS, created = Client.objects.get_or_create(name='ZulipiOS')
react_native, created = Client.objects.get_or_create(name='ZulipMobile')
API, created = Client.objects.get_or_create(name='API: Python')
zephyr_mirror, created = Client.objects.get_or_create(name='zephyr_mirror')
unused, created = Client.objects.get_or_create(name='unused')
long_webhook, created = Client.objects.get_or_create(name='ZulipLooooooooooongNameWebhook')
stat = COUNT_STATS["messages_sent:client:day"]
stat = COUNT_STATS['messages_sent:client:day']
user_data = {
website.id: self.generate_fixture_data(stat, 2, 1, 1.5, 0.6, 8),
zephyr_mirror.id: self.generate_fixture_data(stat, 0, 0.3, 1.5, 0.6, 8),
}
website.id: self.generate_fixture_data(stat, 2, 1, 1.5, .6, 8),
zephyr_mirror.id: self.generate_fixture_data(stat, 0, .3, 1.5, .6, 8)}
insert_fixture_data(stat, user_data, UserCount)
realm_data = {
website.id: self.generate_fixture_data(stat, 30, 20, 5, 0.6, 3),
old_desktop.id: self.generate_fixture_data(stat, 5, 3, 8, 0.6, 3),
android.id: self.generate_fixture_data(stat, 5, 5, 2, 0.6, 3),
iOS.id: self.generate_fixture_data(stat, 5, 5, 2, 0.6, 3),
react_native.id: self.generate_fixture_data(stat, 5, 5, 10, 0.6, 3),
API.id: self.generate_fixture_data(stat, 5, 5, 5, 0.6, 3),
zephyr_mirror.id: self.generate_fixture_data(stat, 1, 1, 3, 0.6, 3),
website.id: self.generate_fixture_data(stat, 30, 20, 5, .6, 3),
old_desktop.id: self.generate_fixture_data(stat, 5, 3, 8, .6, 3),
android.id: self.generate_fixture_data(stat, 5, 5, 2, .6, 3),
iOS.id: self.generate_fixture_data(stat, 5, 5, 2, .6, 3),
react_native.id: self.generate_fixture_data(stat, 5, 5, 10, .6, 3),
API.id: self.generate_fixture_data(stat, 5, 5, 5, .6, 3),
zephyr_mirror.id: self.generate_fixture_data(stat, 1, 1, 3, .6, 3),
unused.id: self.generate_fixture_data(stat, 0, 0, 0, 0, 0),
long_webhook.id: self.generate_fixture_data(stat, 5, 5, 2, 0.6, 3),
}
long_webhook.id: self.generate_fixture_data(stat, 5, 5, 2, .6, 3)}
insert_fixture_data(stat, realm_data, RealmCount)
installation_data = {
website.id: self.generate_fixture_data(stat, 300, 200, 5, 0.6, 3),
old_desktop.id: self.generate_fixture_data(stat, 50, 30, 8, 0.6, 3),
android.id: self.generate_fixture_data(stat, 50, 50, 2, 0.6, 3),
iOS.id: self.generate_fixture_data(stat, 50, 50, 2, 0.6, 3),
react_native.id: self.generate_fixture_data(stat, 5, 5, 10, 0.6, 3),
API.id: self.generate_fixture_data(stat, 50, 50, 5, 0.6, 3),
zephyr_mirror.id: self.generate_fixture_data(stat, 10, 10, 3, 0.6, 3),
website.id: self.generate_fixture_data(stat, 300, 200, 5, .6, 3),
old_desktop.id: self.generate_fixture_data(stat, 50, 30, 8, .6, 3),
android.id: self.generate_fixture_data(stat, 50, 50, 2, .6, 3),
iOS.id: self.generate_fixture_data(stat, 50, 50, 2, .6, 3),
react_native.id: self.generate_fixture_data(stat, 5, 5, 10, .6, 3),
API.id: self.generate_fixture_data(stat, 50, 50, 5, .6, 3),
zephyr_mirror.id: self.generate_fixture_data(stat, 10, 10, 3, .6, 3),
unused.id: self.generate_fixture_data(stat, 0, 0, 0, 0, 0),
long_webhook.id: self.generate_fixture_data(stat, 50, 50, 2, 0.6, 3),
}
long_webhook.id: self.generate_fixture_data(stat, 50, 50, 2, .6, 3)}
insert_fixture_data(stat, installation_data, InstallationCount)
FillState.objects.create(
property=stat.property, end_time=last_end_time, state=FillState.DONE
)
FillState.objects.create(property=stat.property, end_time=last_end_time,
state=FillState.DONE)
stat = COUNT_STATS["messages_in_stream:is_bot:day"]
realm_data = {
"false": self.generate_fixture_data(stat, 30, 5, 6, 0.6, 4),
"true": self.generate_fixture_data(stat, 20, 2, 3, 0.2, 3),
}
stat = COUNT_STATS['messages_in_stream:is_bot:day']
realm_data = {'false': self.generate_fixture_data(stat, 30, 5, 6, .6, 4),
'true': self.generate_fixture_data(stat, 20, 2, 3, .2, 3)}
insert_fixture_data(stat, realm_data, RealmCount)
stream_data: Mapping[Optional[str], List[int]] = {
"false": self.generate_fixture_data(stat, 10, 7, 5, 0.6, 4),
"true": self.generate_fixture_data(stat, 5, 3, 2, 0.4, 2),
'false': self.generate_fixture_data(stat, 10, 7, 5, .6, 4),
'true': self.generate_fixture_data(stat, 5, 3, 2, .4, 2),
}
insert_fixture_data(stat, stream_data, StreamCount)
FillState.objects.create(
property=stat.property, end_time=last_end_time, state=FillState.DONE
)
FillState.objects.create(property=stat.property, end_time=last_end_time,
state=FillState.DONE)
stat = COUNT_STATS["messages_read::hour"]
stat = COUNT_STATS['messages_read::hour']
user_data = {
None: self.generate_fixture_data(stat, 7, 3, 2, 0.6, 8, holiday_rate=0.1),
None: self.generate_fixture_data(stat, 7, 3, 2, .6, 8, holiday_rate=.1),
}
insert_fixture_data(stat, user_data, UserCount)
realm_data = {None: self.generate_fixture_data(stat, 50, 35, 6, 0.6, 4)}
realm_data = {
None: self.generate_fixture_data(stat, 50, 35, 6, .6, 4)
}
insert_fixture_data(stat, realm_data, RealmCount)
FillState.objects.create(
property=stat.property, end_time=last_end_time, state=FillState.DONE
)
FillState.objects.create(property=stat.property, end_time=last_end_time,
state=FillState.DONE)

View File

@@ -0,0 +1,160 @@
import datetime
from argparse import ArgumentParser
from typing import Any, List
from django.core.management.base import BaseCommand, CommandError
from django.db.models import Count
from django.utils.timezone import now as timezone_now
from zerver.models import (
Message,
Realm,
Recipient,
Stream,
Subscription,
UserActivity,
UserMessage,
UserProfile,
get_realm,
)
MOBILE_CLIENT_LIST = ["Android", "ios"]
HUMAN_CLIENT_LIST = MOBILE_CLIENT_LIST + ["website"]
human_messages = Message.objects.filter(sending_client__name__in=HUMAN_CLIENT_LIST)
class Command(BaseCommand):
help = "Generate statistics on realm activity."
def add_arguments(self, parser: ArgumentParser) -> None:
parser.add_argument('realms', metavar='<realm>', type=str, nargs='*',
help="realm to generate statistics for")
def active_users(self, realm: Realm) -> List[UserProfile]:
# Has been active (on the website, for now) in the last 7 days.
activity_cutoff = timezone_now() - datetime.timedelta(days=7)
return [activity.user_profile for activity in (
UserActivity.objects.filter(user_profile__realm=realm,
user_profile__is_active=True,
last_visit__gt=activity_cutoff,
query="/json/users/me/pointer",
client__name="website"))]
def messages_sent_by(self, user: UserProfile, days_ago: int) -> int:
sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago)
return human_messages.filter(sender=user, date_sent__gt=sent_time_cutoff).count()
def total_messages(self, realm: Realm, days_ago: int) -> int:
sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago)
return Message.objects.filter(sender__realm=realm, date_sent__gt=sent_time_cutoff).count()
def human_messages(self, realm: Realm, days_ago: int) -> int:
sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago)
return human_messages.filter(sender__realm=realm, date_sent__gt=sent_time_cutoff).count()
def api_messages(self, realm: Realm, days_ago: int) -> int:
return (self.total_messages(realm, days_ago) - self.human_messages(realm, days_ago))
def stream_messages(self, realm: Realm, days_ago: int) -> int:
sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago)
return human_messages.filter(sender__realm=realm, date_sent__gt=sent_time_cutoff,
recipient__type=Recipient.STREAM).count()
def private_messages(self, realm: Realm, days_ago: int) -> int:
sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago)
return human_messages.filter(sender__realm=realm, date_sent__gt=sent_time_cutoff).exclude(
recipient__type=Recipient.STREAM).exclude(recipient__type=Recipient.HUDDLE).count()
def group_private_messages(self, realm: Realm, days_ago: int) -> int:
sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago)
return human_messages.filter(sender__realm=realm, date_sent__gt=sent_time_cutoff).exclude(
recipient__type=Recipient.STREAM).exclude(recipient__type=Recipient.PERSONAL).count()
def report_percentage(self, numerator: float, denominator: float, text: str) -> None:
if not denominator:
fraction = 0.0
else:
fraction = numerator / float(denominator)
print(f"{fraction * 100:.2f}% of", text)
def handle(self, *args: Any, **options: Any) -> None:
if options['realms']:
try:
realms = [get_realm(string_id) for string_id in options['realms']]
except Realm.DoesNotExist as e:
raise CommandError(e)
else:
realms = Realm.objects.all()
for realm in realms:
print(realm.string_id)
user_profiles = UserProfile.objects.filter(realm=realm, is_active=True)
active_users = self.active_users(realm)
num_active = len(active_users)
print(f"{num_active} active users ({len(user_profiles)} total)")
streams = Stream.objects.filter(realm=realm).extra(
tables=['zerver_subscription', 'zerver_recipient'],
where=['zerver_subscription.recipient_id = zerver_recipient.id',
'zerver_recipient.type = 2',
'zerver_recipient.type_id = zerver_stream.id',
'zerver_subscription.active = true']).annotate(count=Count("name"))
print(f"{streams.count()} streams")
for days_ago in (1, 7, 30):
print(f"In last {days_ago} days, users sent:")
sender_quantities = [self.messages_sent_by(user, days_ago) for user in user_profiles]
for quantity in sorted(sender_quantities, reverse=True):
print(quantity, end=' ')
print("")
print(f"{self.stream_messages(realm, days_ago)} stream messages")
print(f"{self.private_messages(realm, days_ago)} one-on-one private messages")
print(f"{self.api_messages(realm, days_ago)} messages sent via the API")
print(f"{self.group_private_messages(realm, days_ago)} group private messages")
num_notifications_enabled = len([x for x in active_users if x.enable_desktop_notifications])
self.report_percentage(num_notifications_enabled, num_active,
"active users have desktop notifications enabled")
num_enter_sends = len([x for x in active_users if x.enter_sends])
self.report_percentage(num_enter_sends, num_active,
"active users have enter-sends")
all_message_count = human_messages.filter(sender__realm=realm).count()
multi_paragraph_message_count = human_messages.filter(
sender__realm=realm, content__contains="\n\n").count()
self.report_percentage(multi_paragraph_message_count, all_message_count,
"all messages are multi-paragraph")
# Starred messages
starrers = UserMessage.objects.filter(user_profile__in=user_profiles,
flags=UserMessage.flags.starred).values(
"user_profile").annotate(count=Count("user_profile"))
print("{} users have starred {} messages".format(
len(starrers), sum([elt["count"] for elt in starrers])))
active_user_subs = Subscription.objects.filter(
user_profile__in=user_profiles, active=True)
# Streams not in home view
non_home_view = active_user_subs.filter(is_muted=True).values(
"user_profile").annotate(count=Count("user_profile"))
print("{} users have {} streams not in home view".format(
len(non_home_view), sum([elt["count"] for elt in non_home_view])))
# Code block markup
markup_messages = human_messages.filter(
sender__realm=realm, content__contains="~~~").values(
"sender").annotate(count=Count("sender"))
print("{} users have used code block markup on {} messages".format(
len(markup_messages), sum([elt["count"] for elt in markup_messages])))
# Notifications for stream messages
notifications = active_user_subs.filter(desktop_notifications=True).values(
"user_profile").annotate(count=Count("user_profile"))
print("{} users receive desktop notifications for {} streams".format(
len(notifications), sum([elt["count"] for elt in notifications])))
print("")

View File

@@ -11,14 +11,13 @@ class Command(BaseCommand):
help = "Generate statistics on the streams for a realm."
def add_arguments(self, parser: ArgumentParser) -> None:
parser.add_argument(
"realms", metavar="<realm>", nargs="*", help="realm to generate statistics for"
)
parser.add_argument('realms', metavar='<realm>', type=str, nargs='*',
help="realm to generate statistics for")
def handle(self, *args: Any, **options: str) -> None:
if options["realms"]:
if options['realms']:
try:
realms = [get_realm(string_id) for string_id in options["realms"]]
realms = [get_realm(string_id) for string_id in options['realms']]
except Realm.DoesNotExist as e:
raise CommandError(e)
else:
@@ -36,26 +35,22 @@ class Command(BaseCommand):
else:
public_count += 1
print("------------")
print(realm.string_id, end=" ")
print("{:>10} {} public streams and".format("(", public_count), end=" ")
print(realm.string_id, end=' ')
print("{:>10} {} public streams and".format("(", public_count), end=' ')
print(f"{private_count} private streams )")
print("------------")
print("{:>25} {:>15} {:>10} {:>12}".format("stream", "subscribers", "messages", "type"))
for stream in streams:
if stream.invite_only:
stream_type = "private"
stream_type = 'private'
else:
stream_type = "public"
print(f"{stream.name:>25}", end=" ")
stream_type = 'public'
print(f"{stream.name:>25}", end=' ')
recipient = Recipient.objects.filter(type=Recipient.STREAM, type_id=stream.id)
print(
"{:10}".format(
len(Subscription.objects.filter(recipient=recipient, active=True))
),
end=" ",
)
print("{:10}".format(len(Subscription.objects.filter(recipient=recipient,
active=True))), end=' ')
num_messages = len(Message.objects.filter(recipient=recipient))
print(f"{num_messages:12}", end=" ")
print(f"{num_messages:12}", end=' ')
print(f"{stream_type:>15}")
print("")

View File

@@ -22,29 +22,28 @@ class Command(BaseCommand):
Run as a cron job that runs every hour."""
def add_arguments(self, parser: ArgumentParser) -> None:
parser.add_argument(
"--time",
"-t",
help="Update stat tables from current state to "
"--time. Defaults to the current time.",
default=timezone_now().isoformat(),
)
parser.add_argument("--utc", action="store_true", help="Interpret --time in UTC.")
parser.add_argument(
"--stat", "-s", help="CountStat to process. If omitted, all stats are processed."
)
parser.add_argument(
"--verbose", action="store_true", help="Print timing information to stdout."
)
parser.add_argument('--time', '-t',
type=str,
help='Update stat tables from current state to'
'--time. Defaults to the current time.',
default=timezone_now().isoformat())
parser.add_argument('--utc',
action='store_true',
help="Interpret --time in UTC.",
default=False)
parser.add_argument('--stat', '-s',
type=str,
help="CountStat to process. If omitted, all stats are processed.")
parser.add_argument('--verbose',
action='store_true',
help="Print timing information to stdout.",
default=False)
def handle(self, *args: Any, **options: Any) -> None:
try:
os.mkdir(settings.ANALYTICS_LOCK_DIR)
except OSError:
print(
f"{WARNING}Analytics lock {settings.ANALYTICS_LOCK_DIR} is unavailable;"
f" exiting.{ENDC}"
)
print(WARNING + "Analytics lock %s is unavailable; exiting... " + ENDC)
return
try:
@@ -59,36 +58,32 @@ class Command(BaseCommand):
logger.info("No realms, stopping update_analytics_counts")
return
fill_to_time = parse_datetime(options["time"])
if options["utc"]:
fill_to_time = parse_datetime(options['time'])
if options['utc']:
fill_to_time = fill_to_time.replace(tzinfo=timezone.utc)
if fill_to_time.tzinfo is None:
raise ValueError(
"--time must be timezone aware. Maybe you meant to use the --utc option?"
)
raise ValueError("--time must be timezone aware. Maybe you meant to use the --utc option?")
fill_to_time = floor_to_hour(fill_to_time.astimezone(timezone.utc))
if options["stat"] is not None:
stats = [COUNT_STATS[options["stat"]]]
if options['stat'] is not None:
stats = [COUNT_STATS[options['stat']]]
else:
stats = list(COUNT_STATS.values())
logger.info("Starting updating analytics counts through %s", fill_to_time)
if options["verbose"]:
if options['verbose']:
start = time.time()
last = start
for stat in stats:
process_count_stat(stat, fill_to_time)
if options["verbose"]:
if options['verbose']:
print(f"Updated {stat.property} in {time.time() - last:.3f}s")
last = time.time()
if options["verbose"]:
print(
f"Finished updating analytics counts through {fill_to_time} in {time.time() - start:.3f}s"
)
if options['verbose']:
print(f"Finished updating analytics counts through {fill_to_time} in {time.time() - start:.3f}s")
logger.info("Finished updating analytics counts through %s", fill_to_time)
if settings.PUSH_NOTIFICATION_BOUNCER_URL and settings.SUBMIT_USAGE_STATISTICS:

View File

@@ -0,0 +1,42 @@
import datetime
from argparse import ArgumentParser
from typing import Any
from django.core.management.base import BaseCommand, CommandError
from django.utils.timezone import now as timezone_now
from zerver.models import Message, Realm, Stream, UserProfile, get_realm
class Command(BaseCommand):
help = "Generate statistics on user activity."
def add_arguments(self, parser: ArgumentParser) -> None:
parser.add_argument('realms', metavar='<realm>', type=str, nargs='*',
help="realm to generate statistics for")
def messages_sent_by(self, user: UserProfile, week: int) -> int:
start = timezone_now() - datetime.timedelta(days=(week + 1)*7)
end = timezone_now() - datetime.timedelta(days=week*7)
return Message.objects.filter(sender=user, date_sent__gt=start, date_sent__lte=end).count()
def handle(self, *args: Any, **options: Any) -> None:
if options['realms']:
try:
realms = [get_realm(string_id) for string_id in options['realms']]
except Realm.DoesNotExist as e:
raise CommandError(e)
else:
realms = Realm.objects.all()
for realm in realms:
print(realm.string_id)
user_profiles = UserProfile.objects.filter(realm=realm, is_active=True)
print(f"{len(user_profiles)} users")
print(f"{len(Stream.objects.filter(realm=realm))} streams")
for user_profile in user_profiles:
print(f"{user_profile.email:>35}", end=' ')
for week in range(10):
print(f"{self.messages_sent_by(user_profile, week):5}", end=' ')
print("")

View File

@@ -6,204 +6,105 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("zerver", "0030_realm_org_type"),
('zerver', '0030_realm_org_type'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name="Anomaly",
name='Anomaly',
fields=[
(
"id",
models.AutoField(
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
),
),
("info", models.CharField(max_length=1000)),
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('info', models.CharField(max_length=1000)),
],
bases=(models.Model,),
),
migrations.CreateModel(
name="HuddleCount",
name='HuddleCount',
fields=[
(
"id",
models.AutoField(
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
),
),
(
"huddle",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="zerver.Recipient"
),
),
(
"user",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
("property", models.CharField(max_length=40)),
("end_time", models.DateTimeField()),
("interval", models.CharField(max_length=20)),
("value", models.BigIntegerField()),
(
"anomaly",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="analytics.Anomaly",
null=True,
),
),
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('huddle', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Recipient')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('property', models.CharField(max_length=40)),
('end_time', models.DateTimeField()),
('interval', models.CharField(max_length=20)),
('value', models.BigIntegerField()),
('anomaly', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='analytics.Anomaly', null=True)),
],
bases=(models.Model,),
),
migrations.CreateModel(
name="InstallationCount",
name='InstallationCount',
fields=[
(
"id",
models.AutoField(
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
),
),
("property", models.CharField(max_length=40)),
("end_time", models.DateTimeField()),
("interval", models.CharField(max_length=20)),
("value", models.BigIntegerField()),
(
"anomaly",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="analytics.Anomaly",
null=True,
),
),
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('property', models.CharField(max_length=40)),
('end_time', models.DateTimeField()),
('interval', models.CharField(max_length=20)),
('value', models.BigIntegerField()),
('anomaly', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='analytics.Anomaly', null=True)),
],
bases=(models.Model,),
),
migrations.CreateModel(
name="RealmCount",
name='RealmCount',
fields=[
(
"id",
models.AutoField(
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
),
),
(
"realm",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"
),
),
("property", models.CharField(max_length=40)),
("end_time", models.DateTimeField()),
("interval", models.CharField(max_length=20)),
("value", models.BigIntegerField()),
(
"anomaly",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="analytics.Anomaly",
null=True,
),
),
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('realm', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Realm')),
('property', models.CharField(max_length=40)),
('end_time', models.DateTimeField()),
('interval', models.CharField(max_length=20)),
('value', models.BigIntegerField()),
('anomaly', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='analytics.Anomaly', null=True)),
],
bases=(models.Model,),
),
migrations.CreateModel(
name="StreamCount",
name='StreamCount',
fields=[
(
"id",
models.AutoField(
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
),
),
(
"realm",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"
),
),
(
"stream",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="zerver.Stream"
),
),
("property", models.CharField(max_length=40)),
("end_time", models.DateTimeField()),
("interval", models.CharField(max_length=20)),
("value", models.BigIntegerField()),
(
"anomaly",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="analytics.Anomaly",
null=True,
),
),
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('realm', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Realm')),
('stream', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Stream')),
('property', models.CharField(max_length=40)),
('end_time', models.DateTimeField()),
('interval', models.CharField(max_length=20)),
('value', models.BigIntegerField()),
('anomaly', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='analytics.Anomaly', null=True)),
],
bases=(models.Model,),
),
migrations.CreateModel(
name="UserCount",
name='UserCount',
fields=[
(
"id",
models.AutoField(
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
),
),
(
"realm",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"
),
),
(
"user",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
("property", models.CharField(max_length=40)),
("end_time", models.DateTimeField()),
("interval", models.CharField(max_length=20)),
("value", models.BigIntegerField()),
(
"anomaly",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="analytics.Anomaly",
null=True,
),
),
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('realm', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Realm')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('property', models.CharField(max_length=40)),
('end_time', models.DateTimeField()),
('interval', models.CharField(max_length=20)),
('value', models.BigIntegerField()),
('anomaly', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='analytics.Anomaly', null=True)),
],
bases=(models.Model,),
),
migrations.AlterUniqueTogether(
name="usercount",
unique_together={("user", "property", "end_time", "interval")},
name='usercount',
unique_together={('user', 'property', 'end_time', 'interval')},
),
migrations.AlterUniqueTogether(
name="streamcount",
unique_together={("stream", "property", "end_time", "interval")},
name='streamcount',
unique_together={('stream', 'property', 'end_time', 'interval')},
),
migrations.AlterUniqueTogether(
name="realmcount",
unique_together={("realm", "property", "end_time", "interval")},
name='realmcount',
unique_together={('realm', 'property', 'end_time', 'interval')},
),
migrations.AlterUniqueTogether(
name="installationcount",
unique_together={("property", "end_time", "interval")},
name='installationcount',
unique_together={('property', 'end_time', 'interval')},
),
migrations.AlterUniqueTogether(
name="huddlecount",
unique_together={("huddle", "property", "end_time", "interval")},
name='huddlecount',
unique_together={('huddle', 'property', 'end_time', 'interval')},
),
]

View File

@@ -4,27 +4,27 @@ from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("analytics", "0001_initial"),
('analytics', '0001_initial'),
]
operations = [
migrations.AlterUniqueTogether(
name="huddlecount",
name='huddlecount',
unique_together=set(),
),
migrations.RemoveField(
model_name="huddlecount",
name="anomaly",
model_name='huddlecount',
name='anomaly',
),
migrations.RemoveField(
model_name="huddlecount",
name="huddle",
model_name='huddlecount',
name='huddle',
),
migrations.RemoveField(
model_name="huddlecount",
name="user",
model_name='huddlecount',
name='user',
),
migrations.DeleteModel(
name="HuddleCount",
name='HuddleCount',
),
]

View File

@@ -4,23 +4,18 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("analytics", "0002_remove_huddlecount"),
('analytics', '0002_remove_huddlecount'),
]
operations = [
migrations.CreateModel(
name="FillState",
name='FillState',
fields=[
(
"id",
models.AutoField(
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
),
),
("property", models.CharField(unique=True, max_length=40)),
("end_time", models.DateTimeField()),
("state", models.PositiveSmallIntegerField()),
("last_modified", models.DateTimeField(auto_now=True)),
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('property', models.CharField(unique=True, max_length=40)),
('end_time', models.DateTimeField()),
('state', models.PositiveSmallIntegerField()),
('last_modified', models.DateTimeField(auto_now=True)),
],
bases=(models.Model,),
),

View File

@@ -4,28 +4,28 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("analytics", "0003_fillstate"),
('analytics', '0003_fillstate'),
]
operations = [
migrations.AddField(
model_name="installationcount",
name="subgroup",
model_name='installationcount',
name='subgroup',
field=models.CharField(max_length=16, null=True),
),
migrations.AddField(
model_name="realmcount",
name="subgroup",
model_name='realmcount',
name='subgroup',
field=models.CharField(max_length=16, null=True),
),
migrations.AddField(
model_name="streamcount",
name="subgroup",
model_name='streamcount',
name='subgroup',
field=models.CharField(max_length=16, null=True),
),
migrations.AddField(
model_name="usercount",
name="subgroup",
model_name='usercount',
name='subgroup',
field=models.CharField(max_length=16, null=True),
),
]

View File

@@ -4,48 +4,48 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("analytics", "0004_add_subgroup"),
('analytics', '0004_add_subgroup'),
]
operations = [
migrations.AlterField(
model_name="installationcount",
name="interval",
model_name='installationcount',
name='interval',
field=models.CharField(max_length=8),
),
migrations.AlterField(
model_name="installationcount",
name="property",
model_name='installationcount',
name='property',
field=models.CharField(max_length=32),
),
migrations.AlterField(
model_name="realmcount",
name="interval",
model_name='realmcount',
name='interval',
field=models.CharField(max_length=8),
),
migrations.AlterField(
model_name="realmcount",
name="property",
model_name='realmcount',
name='property',
field=models.CharField(max_length=32),
),
migrations.AlterField(
model_name="streamcount",
name="interval",
model_name='streamcount',
name='interval',
field=models.CharField(max_length=8),
),
migrations.AlterField(
model_name="streamcount",
name="property",
model_name='streamcount',
name='property',
field=models.CharField(max_length=32),
),
migrations.AlterField(
model_name="usercount",
name="interval",
model_name='usercount',
name='interval',
field=models.CharField(max_length=8),
),
migrations.AlterField(
model_name="usercount",
name="property",
model_name='usercount',
name='property',
field=models.CharField(max_length=32),
),
]

View File

@@ -4,24 +4,24 @@ from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("analytics", "0005_alter_field_size"),
('analytics', '0005_alter_field_size'),
]
operations = [
migrations.AlterUniqueTogether(
name="installationcount",
unique_together={("property", "subgroup", "end_time", "interval")},
name='installationcount',
unique_together={('property', 'subgroup', 'end_time', 'interval')},
),
migrations.AlterUniqueTogether(
name="realmcount",
unique_together={("realm", "property", "subgroup", "end_time", "interval")},
name='realmcount',
unique_together={('realm', 'property', 'subgroup', 'end_time', 'interval')},
),
migrations.AlterUniqueTogether(
name="streamcount",
unique_together={("stream", "property", "subgroup", "end_time", "interval")},
name='streamcount',
unique_together={('stream', 'property', 'subgroup', 'end_time', 'interval')},
),
migrations.AlterUniqueTogether(
name="usercount",
unique_together={("user", "property", "subgroup", "end_time", "interval")},
name='usercount',
unique_together={('user', 'property', 'subgroup', 'end_time', 'interval')},
),
]

View File

@@ -5,40 +5,40 @@ from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("analytics", "0006_add_subgroup_to_unique_constraints"),
('analytics', '0006_add_subgroup_to_unique_constraints'),
]
operations = [
migrations.AlterUniqueTogether(
name="installationcount",
unique_together={("property", "subgroup", "end_time")},
name='installationcount',
unique_together={('property', 'subgroup', 'end_time')},
),
migrations.RemoveField(
model_name="installationcount",
name="interval",
model_name='installationcount',
name='interval',
),
migrations.AlterUniqueTogether(
name="realmcount",
unique_together={("realm", "property", "subgroup", "end_time")},
name='realmcount',
unique_together={('realm', 'property', 'subgroup', 'end_time')},
),
migrations.RemoveField(
model_name="realmcount",
name="interval",
model_name='realmcount',
name='interval',
),
migrations.AlterUniqueTogether(
name="streamcount",
unique_together={("stream", "property", "subgroup", "end_time")},
name='streamcount',
unique_together={('stream', 'property', 'subgroup', 'end_time')},
),
migrations.RemoveField(
model_name="streamcount",
name="interval",
model_name='streamcount',
name='interval',
),
migrations.AlterUniqueTogether(
name="usercount",
unique_together={("user", "property", "subgroup", "end_time")},
name='usercount',
unique_together={('user', 'property', 'subgroup', 'end_time')},
),
migrations.RemoveField(
model_name="usercount",
name="interval",
model_name='usercount',
name='interval',
),
]

View File

@@ -5,21 +5,21 @@ from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("zerver", "0050_userprofile_avatar_version"),
("analytics", "0007_remove_interval"),
('zerver', '0050_userprofile_avatar_version'),
('analytics', '0007_remove_interval'),
]
operations = [
migrations.AlterIndexTogether(
name="realmcount",
index_together={("property", "end_time")},
name='realmcount',
index_together={('property', 'end_time')},
),
migrations.AlterIndexTogether(
name="streamcount",
index_together={("property", "realm", "end_time")},
name='streamcount',
index_together={('property', 'realm', 'end_time')},
),
migrations.AlterIndexTogether(
name="usercount",
index_together={("property", "realm", "end_time")},
name='usercount',
index_together={('property', 'realm', 'end_time')},
),
]

View File

@@ -3,27 +3,24 @@ from django.db.backends.postgresql.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
def delete_messages_sent_to_stream_stat(
apps: StateApps, schema_editor: DatabaseSchemaEditor
) -> None:
UserCount = apps.get_model("analytics", "UserCount")
StreamCount = apps.get_model("analytics", "StreamCount")
RealmCount = apps.get_model("analytics", "RealmCount")
InstallationCount = apps.get_model("analytics", "InstallationCount")
FillState = apps.get_model("analytics", "FillState")
def delete_messages_sent_to_stream_stat(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
UserCount = apps.get_model('analytics', 'UserCount')
StreamCount = apps.get_model('analytics', 'StreamCount')
RealmCount = apps.get_model('analytics', 'RealmCount')
InstallationCount = apps.get_model('analytics', 'InstallationCount')
FillState = apps.get_model('analytics', 'FillState')
property = "messages_sent_to_stream:is_bot"
property = 'messages_sent_to_stream:is_bot'
UserCount.objects.filter(property=property).delete()
StreamCount.objects.filter(property=property).delete()
RealmCount.objects.filter(property=property).delete()
InstallationCount.objects.filter(property=property).delete()
FillState.objects.filter(property=property).delete()
class Migration(migrations.Migration):
dependencies = [
("analytics", "0008_add_count_indexes"),
('analytics', '0008_add_count_indexes'),
]
operations = [

View File

@@ -3,26 +3,23 @@ from django.db.backends.postgresql.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
def clear_message_sent_by_message_type_values(
apps: StateApps, schema_editor: DatabaseSchemaEditor
) -> None:
UserCount = apps.get_model("analytics", "UserCount")
StreamCount = apps.get_model("analytics", "StreamCount")
RealmCount = apps.get_model("analytics", "RealmCount")
InstallationCount = apps.get_model("analytics", "InstallationCount")
FillState = apps.get_model("analytics", "FillState")
def clear_message_sent_by_message_type_values(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
UserCount = apps.get_model('analytics', 'UserCount')
StreamCount = apps.get_model('analytics', 'StreamCount')
RealmCount = apps.get_model('analytics', 'RealmCount')
InstallationCount = apps.get_model('analytics', 'InstallationCount')
FillState = apps.get_model('analytics', 'FillState')
property = "messages_sent:message_type:day"
property = 'messages_sent:message_type:day'
UserCount.objects.filter(property=property).delete()
StreamCount.objects.filter(property=property).delete()
RealmCount.objects.filter(property=property).delete()
InstallationCount.objects.filter(property=property).delete()
FillState.objects.filter(property=property).delete()
class Migration(migrations.Migration):
dependencies = [("analytics", "0009_remove_messages_to_stream_stat")]
dependencies = [('analytics', '0009_remove_messages_to_stream_stat')]
operations = [
migrations.RunPython(clear_message_sent_by_message_type_values),

View File

@@ -4,11 +4,11 @@ from django.db.migrations.state import StateApps
def clear_analytics_tables(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
UserCount = apps.get_model("analytics", "UserCount")
StreamCount = apps.get_model("analytics", "StreamCount")
RealmCount = apps.get_model("analytics", "RealmCount")
InstallationCount = apps.get_model("analytics", "InstallationCount")
FillState = apps.get_model("analytics", "FillState")
UserCount = apps.get_model('analytics', 'UserCount')
StreamCount = apps.get_model('analytics', 'StreamCount')
RealmCount = apps.get_model('analytics', 'RealmCount')
InstallationCount = apps.get_model('analytics', 'InstallationCount')
FillState = apps.get_model('analytics', 'FillState')
UserCount.objects.all().delete()
StreamCount.objects.all().delete()
@@ -16,11 +16,10 @@ def clear_analytics_tables(apps: StateApps, schema_editor: DatabaseSchemaEditor)
InstallationCount.objects.all().delete()
FillState.objects.all().delete()
class Migration(migrations.Migration):
dependencies = [
("analytics", "0010_clear_messages_sent_values"),
('analytics', '0010_clear_messages_sent_values'),
]
operations = [

View File

@@ -7,36 +7,28 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("analytics", "0011_clear_analytics_tables"),
('analytics', '0011_clear_analytics_tables'),
]
operations = [
migrations.AlterField(
model_name="installationcount",
name="anomaly",
field=models.ForeignKey(
null=True, on_delete=django.db.models.deletion.SET_NULL, to="analytics.Anomaly"
),
model_name='installationcount',
name='anomaly',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='analytics.Anomaly'),
),
migrations.AlterField(
model_name="realmcount",
name="anomaly",
field=models.ForeignKey(
null=True, on_delete=django.db.models.deletion.SET_NULL, to="analytics.Anomaly"
),
model_name='realmcount',
name='anomaly',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='analytics.Anomaly'),
),
migrations.AlterField(
model_name="streamcount",
name="anomaly",
field=models.ForeignKey(
null=True, on_delete=django.db.models.deletion.SET_NULL, to="analytics.Anomaly"
),
model_name='streamcount',
name='anomaly',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='analytics.Anomaly'),
),
migrations.AlterField(
model_name="usercount",
name="anomaly",
field=models.ForeignKey(
null=True, on_delete=django.db.models.deletion.SET_NULL, to="analytics.Anomaly"
),
model_name='usercount',
name='anomaly',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='analytics.Anomaly'),
),
]

View File

@@ -6,27 +6,27 @@ from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("analytics", "0012_add_on_delete"),
('analytics', '0012_add_on_delete'),
]
operations = [
migrations.RemoveField(
model_name="installationcount",
name="anomaly",
model_name='installationcount',
name='anomaly',
),
migrations.RemoveField(
model_name="realmcount",
name="anomaly",
model_name='realmcount',
name='anomaly',
),
migrations.RemoveField(
model_name="streamcount",
name="anomaly",
model_name='streamcount',
name='anomaly',
),
migrations.RemoveField(
model_name="usercount",
name="anomaly",
model_name='usercount',
name='anomaly',
),
migrations.DeleteModel(
name="Anomaly",
name='Anomaly',
),
]

View File

@@ -6,12 +6,12 @@ from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("analytics", "0013_remove_anomaly"),
('analytics', '0013_remove_anomaly'),
]
operations = [
migrations.RemoveField(
model_name="fillstate",
name="last_modified",
model_name='fillstate',
name='last_modified',
),
]

View File

@@ -20,30 +20,24 @@ def clear_duplicate_counts(apps: StateApps, schema_editor: DatabaseSchemaEditor)
this means deleting the extra rows, but for LoggingCountStat objects, we need to
additionally combine the sums.
"""
count_tables = dict(
realm=apps.get_model("analytics", "RealmCount"),
user=apps.get_model("analytics", "UserCount"),
stream=apps.get_model("analytics", "StreamCount"),
installation=apps.get_model("analytics", "InstallationCount"),
)
count_tables = dict(realm=apps.get_model('analytics', 'RealmCount'),
user=apps.get_model('analytics', 'UserCount'),
stream=apps.get_model('analytics', 'StreamCount'),
installation=apps.get_model('analytics', 'InstallationCount'))
for name, count_table in count_tables.items():
value = [name, "property", "end_time"]
if name == "installation":
value = ["property", "end_time"]
counts = (
count_table.objects.filter(subgroup=None)
.values(*value)
.annotate(Count("id"), Sum("value"))
.filter(id__count__gt=1)
)
value = [name, 'property', 'end_time']
if name == 'installation':
value = ['property', 'end_time']
counts = count_table.objects.filter(subgroup=None).values(*value).annotate(
Count('id'), Sum('value')).filter(id__count__gt=1)
for count in counts:
count.pop("id__count")
total_value = count.pop("value__sum")
count.pop('id__count')
total_value = count.pop('value__sum')
duplicate_counts = list(count_table.objects.filter(**count))
first_count = duplicate_counts[0]
if count["property"] in ["invites_sent::day", "active_users_log:is_bot:day"]:
if count['property'] in ["invites_sent::day", "active_users_log:is_bot:day"]:
# For LoggingCountStat objects, the right fix is to combine the totals;
# for other CountStat objects, we expect the duplicates to have the same value.
# And so all we need to do is delete them.
@@ -53,13 +47,13 @@ def clear_duplicate_counts(apps: StateApps, schema_editor: DatabaseSchemaEditor)
for duplicate_count in to_cleanup:
duplicate_count.delete()
class Migration(migrations.Migration):
dependencies = [
("analytics", "0014_remove_fillstate_last_modified"),
('analytics', '0014_remove_fillstate_last_modified'),
]
operations = [
migrations.RunPython(clear_duplicate_counts, reverse_code=migrations.RunPython.noop),
migrations.RunPython(clear_duplicate_counts,
reverse_code=migrations.RunPython.noop),
]

View File

@@ -6,88 +6,56 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("analytics", "0015_clear_duplicate_counts"),
('analytics', '0015_clear_duplicate_counts'),
]
operations = [
migrations.AlterUniqueTogether(
name="installationcount",
name='installationcount',
unique_together=set(),
),
migrations.AlterUniqueTogether(
name="realmcount",
name='realmcount',
unique_together=set(),
),
migrations.AlterUniqueTogether(
name="streamcount",
name='streamcount',
unique_together=set(),
),
migrations.AlterUniqueTogether(
name="usercount",
name='usercount',
unique_together=set(),
),
migrations.AddConstraint(
model_name="installationcount",
constraint=models.UniqueConstraint(
condition=models.Q(subgroup__isnull=False),
fields=("property", "subgroup", "end_time"),
name="unique_installation_count",
),
model_name='installationcount',
constraint=models.UniqueConstraint(condition=models.Q(subgroup__isnull=False), fields=('property', 'subgroup', 'end_time'), name='unique_installation_count'),
),
migrations.AddConstraint(
model_name="installationcount",
constraint=models.UniqueConstraint(
condition=models.Q(subgroup__isnull=True),
fields=("property", "end_time"),
name="unique_installation_count_null_subgroup",
),
model_name='installationcount',
constraint=models.UniqueConstraint(condition=models.Q(subgroup__isnull=True), fields=('property', 'end_time'), name='unique_installation_count_null_subgroup'),
),
migrations.AddConstraint(
model_name="realmcount",
constraint=models.UniqueConstraint(
condition=models.Q(subgroup__isnull=False),
fields=("realm", "property", "subgroup", "end_time"),
name="unique_realm_count",
),
model_name='realmcount',
constraint=models.UniqueConstraint(condition=models.Q(subgroup__isnull=False), fields=('realm', 'property', 'subgroup', 'end_time'), name='unique_realm_count'),
),
migrations.AddConstraint(
model_name="realmcount",
constraint=models.UniqueConstraint(
condition=models.Q(subgroup__isnull=True),
fields=("realm", "property", "end_time"),
name="unique_realm_count_null_subgroup",
),
model_name='realmcount',
constraint=models.UniqueConstraint(condition=models.Q(subgroup__isnull=True), fields=('realm', 'property', 'end_time'), name='unique_realm_count_null_subgroup'),
),
migrations.AddConstraint(
model_name="streamcount",
constraint=models.UniqueConstraint(
condition=models.Q(subgroup__isnull=False),
fields=("stream", "property", "subgroup", "end_time"),
name="unique_stream_count",
),
model_name='streamcount',
constraint=models.UniqueConstraint(condition=models.Q(subgroup__isnull=False), fields=('stream', 'property', 'subgroup', 'end_time'), name='unique_stream_count'),
),
migrations.AddConstraint(
model_name="streamcount",
constraint=models.UniqueConstraint(
condition=models.Q(subgroup__isnull=True),
fields=("stream", "property", "end_time"),
name="unique_stream_count_null_subgroup",
),
model_name='streamcount',
constraint=models.UniqueConstraint(condition=models.Q(subgroup__isnull=True), fields=('stream', 'property', 'end_time'), name='unique_stream_count_null_subgroup'),
),
migrations.AddConstraint(
model_name="usercount",
constraint=models.UniqueConstraint(
condition=models.Q(subgroup__isnull=False),
fields=("user", "property", "subgroup", "end_time"),
name="unique_user_count",
),
model_name='usercount',
constraint=models.UniqueConstraint(condition=models.Q(subgroup__isnull=False), fields=('user', 'property', 'subgroup', 'end_time'), name='unique_user_count'),
),
migrations.AddConstraint(
model_name="usercount",
constraint=models.UniqueConstraint(
condition=models.Q(subgroup__isnull=True),
fields=("user", "property", "end_time"),
name="unique_user_count_null_subgroup",
),
model_name='usercount',
constraint=models.UniqueConstraint(condition=models.Q(subgroup__isnull=True), fields=('user', 'property', 'end_time'), name='unique_user_count_null_subgroup'),
),
]

View File

@@ -20,15 +20,19 @@ class FillState(models.Model):
def __str__(self) -> str:
return f"<FillState: {self.property} {self.end_time} {self.state}>"
# The earliest/starting end_time in FillState
# We assume there is at least one realm
def installation_epoch() -> datetime.datetime:
earliest_realm_creation = Realm.objects.aggregate(models.Min("date_created"))[
"date_created__min"
]
earliest_realm_creation = Realm.objects.aggregate(models.Min('date_created'))['date_created__min']
return floor_to_day(earliest_realm_creation)
def last_successful_fill(property: str) -> Optional[datetime.datetime]:
fillstate = FillState.objects.filter(property=property).first()
if fillstate is None:
return None
if fillstate.state == FillState.DONE:
return fillstate.end_time
return fillstate.end_time - datetime.timedelta(hours=1)
class BaseCount(models.Model):
# Note: When inheriting from BaseCount, you may want to rearrange
@@ -42,27 +46,24 @@ class BaseCount(models.Model):
class Meta:
abstract = True
class InstallationCount(BaseCount):
class Meta:
# Handles invalid duplicate InstallationCount data
constraints = [
UniqueConstraint(
fields=["property", "subgroup", "end_time"],
condition=Q(subgroup__isnull=False),
name="unique_installation_count",
),
name='unique_installation_count'),
UniqueConstraint(
fields=["property", "end_time"],
condition=Q(subgroup__isnull=True),
name="unique_installation_count_null_subgroup",
),
name='unique_installation_count_null_subgroup'),
]
def __str__(self) -> str:
return f"<InstallationCount: {self.property} {self.subgroup} {self.value}>"
class RealmCount(BaseCount):
realm = models.ForeignKey(Realm, on_delete=models.CASCADE)
@@ -72,20 +73,17 @@ class RealmCount(BaseCount):
UniqueConstraint(
fields=["realm", "property", "subgroup", "end_time"],
condition=Q(subgroup__isnull=False),
name="unique_realm_count",
),
name='unique_realm_count'),
UniqueConstraint(
fields=["realm", "property", "end_time"],
condition=Q(subgroup__isnull=True),
name="unique_realm_count_null_subgroup",
),
name='unique_realm_count_null_subgroup'),
]
index_together = ["property", "end_time"]
def __str__(self) -> str:
return f"<RealmCount: {self.realm} {self.property} {self.subgroup} {self.value}>"
class UserCount(BaseCount):
user = models.ForeignKey(UserProfile, on_delete=models.CASCADE)
realm = models.ForeignKey(Realm, on_delete=models.CASCADE)
@@ -96,13 +94,11 @@ class UserCount(BaseCount):
UniqueConstraint(
fields=["user", "property", "subgroup", "end_time"],
condition=Q(subgroup__isnull=False),
name="unique_user_count",
),
name='unique_user_count'),
UniqueConstraint(
fields=["user", "property", "end_time"],
condition=Q(subgroup__isnull=True),
name="unique_user_count_null_subgroup",
),
name='unique_user_count_null_subgroup'),
]
# This index dramatically improves the performance of
# aggregating from users to realms
@@ -111,7 +107,6 @@ class UserCount(BaseCount):
def __str__(self) -> str:
return f"<UserCount: {self.user} {self.property} {self.subgroup} {self.value}>"
class StreamCount(BaseCount):
stream = models.ForeignKey(Stream, on_delete=models.CASCADE)
realm = models.ForeignKey(Realm, on_delete=models.CASCADE)
@@ -122,19 +117,15 @@ class StreamCount(BaseCount):
UniqueConstraint(
fields=["stream", "property", "subgroup", "end_time"],
condition=Q(subgroup__isnull=False),
name="unique_stream_count",
),
name='unique_stream_count'),
UniqueConstraint(
fields=["stream", "property", "end_time"],
condition=Q(subgroup__isnull=True),
name="unique_stream_count_null_subgroup",
),
name='unique_stream_count_null_subgroup'),
]
# This index dramatically improves the performance of
# aggregating from streams to realms
index_together = ["property", "realm", "end_time"]
def __str__(self) -> str:
return (
f"<StreamCount: {self.stream} {self.property} {self.subgroup} {self.value} {self.id}>"
)
return f"<StreamCount: {self.stream} {self.property} {self.subgroup} {self.value} {self.id}>"

File diff suppressed because it is too large Load Diff

View File

@@ -9,32 +9,22 @@ class TestFixtures(ZulipTestCase):
# test basic business_hour / non_business_hour calculation
# test we get an array of the right length with frequency=CountStat.DAY
data = generate_time_series_data(
days=7, business_hours_base=20, non_business_hours_base=15, spikiness=0
)
days=7, business_hours_base=20, non_business_hours_base=15, spikiness=0)
self.assertEqual(data, [400, 400, 400, 400, 400, 360, 360])
data = generate_time_series_data(
days=1,
business_hours_base=2000,
non_business_hours_base=1500,
growth=2,
spikiness=0,
frequency=CountStat.HOUR,
)
days=1, business_hours_base=2000, non_business_hours_base=1500,
growth=2, spikiness=0, frequency=CountStat.HOUR)
# test we get an array of the right length with frequency=CountStat.HOUR
self.assertEqual(len(data), 24)
# test that growth doesn't affect the first data point
self.assertEqual(data[0], 2000)
# test that the last data point is growth times what it otherwise would be
self.assertEqual(data[-1], 1500 * 2)
self.assertEqual(data[-1], 1500*2)
# test autocorrelation == 1, since that's the easiest value to test
data = generate_time_series_data(
days=1,
business_hours_base=2000,
non_business_hours_base=2000,
autocorrelation=1,
frequency=CountStat.HOUR,
)
days=1, business_hours_base=2000, non_business_hours_base=2000,
autocorrelation=1, frequency=CountStat.HOUR)
self.assertEqual(data[0], data[1])
self.assertEqual(data[0], data[-1])

File diff suppressed because it is too large Load Diff

View File

@@ -1,38 +1,34 @@
from django.conf.urls import include
from django.urls import path
from analytics.views import (
get_activity,
get_chart_data,
get_chart_data_for_installation,
get_chart_data_for_realm,
get_chart_data_for_remote_installation,
get_chart_data_for_remote_realm,
get_realm_activity,
get_user_activity,
stats,
stats_for_installation,
stats_for_realm,
stats_for_remote_installation,
stats_for_remote_realm,
support,
)
from zerver.lib.rest import rest_path
import analytics.views
from zerver.lib.rest import rest_dispatch
i18n_urlpatterns = [
# Server admin (user_profile.is_staff) visible stats pages
path("activity", get_activity),
path("activity/support", support, name="support"),
path("realm_activity/<realm_str>/", get_realm_activity),
path("user_activity/<email>/", get_user_activity),
path("stats/realm/<realm_str>/", stats_for_realm),
path("stats/installation", stats_for_installation),
path("stats/remote/<int:remote_server_id>/installation", stats_for_remote_installation),
path(
"stats/remote/<int:remote_server_id>/realm/<int:remote_realm_id>/", stats_for_remote_realm
),
path('activity', analytics.views.get_activity,
name='analytics.views.get_activity'),
path('activity/support', analytics.views.support,
name='analytics.views.support'),
path('realm_activity/<str:realm_str>/', analytics.views.get_realm_activity,
name='analytics.views.get_realm_activity'),
path('user_activity/<str:email>/', analytics.views.get_user_activity,
name='analytics.views.get_user_activity'),
path('stats/realm/<str:realm_str>/', analytics.views.stats_for_realm,
name='analytics.views.stats_for_realm'),
path('stats/installation', analytics.views.stats_for_installation,
name='analytics.views.stats_for_installation'),
path('stats/remote/<int:remote_server_id>/installation',
analytics.views.stats_for_remote_installation,
name='analytics.views.stats_for_remote_installation'),
path('stats/remote/<int:remote_server_id>/realm/<int:remote_realm_id>/',
analytics.views.stats_for_remote_realm,
name='analytics.views.stats_for_remote_realm'),
# User-visible stats page
path("stats", stats, name="stats"),
path('stats', analytics.views.stats,
name='analytics.views.stats'),
]
# These endpoints are a part of the API (V1), which uses:
@@ -45,22 +41,22 @@ i18n_urlpatterns = [
# All of these paths are accessed by either a /json or /api prefix
v1_api_and_json_patterns = [
# get data for the graphs at /stats
rest_path("analytics/chart_data", GET=get_chart_data),
rest_path("analytics/chart_data/realm/<realm_str>", GET=get_chart_data_for_realm),
rest_path("analytics/chart_data/installation", GET=get_chart_data_for_installation),
rest_path(
"analytics/chart_data/remote/<int:remote_server_id>/installation",
GET=get_chart_data_for_remote_installation,
),
rest_path(
"analytics/chart_data/remote/<int:remote_server_id>/realm/<int:remote_realm_id>",
GET=get_chart_data_for_remote_realm,
),
path('analytics/chart_data', rest_dispatch,
{'GET': 'analytics.views.get_chart_data'}),
path('analytics/chart_data/realm/<str:realm_str>', rest_dispatch,
{'GET': 'analytics.views.get_chart_data_for_realm'}),
path('analytics/chart_data/installation', rest_dispatch,
{'GET': 'analytics.views.get_chart_data_for_installation'}),
path('analytics/chart_data/remote/<int:remote_server_id>/installation', rest_dispatch,
{'GET': 'analytics.views.get_chart_data_for_remote_installation'}),
path('analytics/chart_data/remote/<int:remote_server_id>/realm/<int:remote_realm_id>',
rest_dispatch,
{'GET': 'analytics.views.get_chart_data_for_remote_realm'}),
]
i18n_urlpatterns += [
path("api/v1/", include(v1_api_and_json_patterns)),
path("json/", include(v1_api_and_json_patterns)),
path('api/v1/', include(v1_api_and_json_patterns)),
path('json/', include(v1_api_and_json_patterns)),
]
urlpatterns = i18n_urlpatterns

File diff suppressed because it is too large Load Diff

View File

@@ -1,26 +1,18 @@
"use strict";
module.exports = {
plugins: [
[
"formatjs",
{
additionalFunctionNames: ["$t", "$t_html"],
overrideIdFn: (id, defaultMessage) => defaultMessage,
},
],
],
presets: [
[
"@babel/preset-env",
{
corejs: "3.6",
corejs: 3,
loose: true, // Loose mode for…of loops are 5× faster in Firefox
shippedProposals: true,
useBuiltIns: "usage",
},
],
"@babel/typescript",
],
plugins: [
"@babel/proposal-class-properties",
["@babel/plugin-proposal-unicode-property-regex", {useUnicodeFlag: false}],
],
sourceType: "unambiguous",
};

View File

@@ -19,4 +19,4 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
VERSION = (0, 9, "pre")
VERSION = (0, 9, 'pre')

View File

@@ -5,35 +5,22 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("contenttypes", "0001_initial"),
('contenttypes', '0001_initial'),
]
operations = [
migrations.CreateModel(
name="Confirmation",
name='Confirmation',
fields=[
(
"id",
models.AutoField(
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
),
),
("object_id", models.PositiveIntegerField()),
("date_sent", models.DateTimeField(verbose_name="sent")),
(
"confirmation_key",
models.CharField(max_length=40, verbose_name="activation key"),
),
(
"content_type",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="contenttypes.ContentType"
),
),
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('object_id', models.PositiveIntegerField()),
('date_sent', models.DateTimeField(verbose_name='sent')),
('confirmation_key', models.CharField(max_length=40, verbose_name='activation key')),
('content_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')),
],
options={
"verbose_name": "confirmation email",
"verbose_name_plural": "confirmation emails",
'verbose_name': 'confirmation email',
'verbose_name_plural': 'confirmation emails',
},
bases=(models.Model,),
),

View File

@@ -5,24 +5,16 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("confirmation", "0001_initial"),
('confirmation', '0001_initial'),
]
operations = [
migrations.CreateModel(
name="RealmCreationKey",
name='RealmCreationKey',
fields=[
(
"id",
models.AutoField(
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
),
),
("creation_key", models.CharField(max_length=40, verbose_name="activation key")),
(
"date_created",
models.DateTimeField(default=django.utils.timezone.now, verbose_name="created"),
),
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('creation_key', models.CharField(max_length=40, verbose_name='activation key')),
('date_created', models.DateTimeField(default=django.utils.timezone.now, verbose_name='created')),
],
),
]

View File

@@ -5,16 +5,17 @@ from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("confirmation", "0002_realmcreationkey"),
('confirmation', '0002_realmcreationkey'),
]
operations = [
migrations.CreateModel(
name="EmailChangeConfirmation",
fields=[],
name='EmailChangeConfirmation',
fields=[
],
options={
"proxy": True,
'proxy': True,
},
bases=("confirmation.confirmation",),
bases=('confirmation.confirmation',),
),
]

View File

@@ -5,31 +5,31 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("confirmation", "0003_emailchangeconfirmation"),
('confirmation', '0003_emailchangeconfirmation'),
]
operations = [
migrations.DeleteModel(
name="EmailChangeConfirmation",
name='EmailChangeConfirmation',
),
migrations.AlterModelOptions(
name="confirmation",
name='confirmation',
options={},
),
migrations.AddField(
model_name="confirmation",
name="type",
model_name='confirmation',
name='type',
field=models.PositiveSmallIntegerField(default=1),
preserve_default=False,
),
migrations.AlterField(
model_name="confirmation",
name="confirmation_key",
model_name='confirmation',
name='confirmation_key',
field=models.CharField(max_length=40),
),
migrations.AlterField(
model_name="confirmation",
name="date_sent",
model_name='confirmation',
name='date_sent',
field=models.DateTimeField(),
),
]

View File

@@ -6,16 +6,14 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("zerver", "0124_stream_enable_notifications"),
("confirmation", "0004_remove_confirmationmanager"),
('zerver', '0124_stream_enable_notifications'),
('confirmation', '0004_remove_confirmationmanager'),
]
operations = [
migrations.AddField(
model_name="confirmation",
name="realm",
field=models.ForeignKey(
null=True, on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"
),
model_name='confirmation',
name='realm',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='zerver.Realm'),
),
]

View File

@@ -6,13 +6,13 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("confirmation", "0005_confirmation_realm"),
('confirmation', '0005_confirmation_realm'),
]
operations = [
migrations.AddField(
model_name="realmcreationkey",
name="presume_email_valid",
model_name='realmcreationkey',
name='presume_email_valid',
field=models.BooleanField(default=False),
),
]

View File

@@ -6,32 +6,32 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("confirmation", "0006_realmcreationkey_presume_email_valid"),
('confirmation', '0006_realmcreationkey_presume_email_valid'),
]
operations = [
migrations.AlterField(
model_name="confirmation",
name="confirmation_key",
model_name='confirmation',
name='confirmation_key',
field=models.CharField(db_index=True, max_length=40),
),
migrations.AlterField(
model_name="confirmation",
name="date_sent",
model_name='confirmation',
name='date_sent',
field=models.DateTimeField(db_index=True),
),
migrations.AlterField(
model_name="confirmation",
name="object_id",
model_name='confirmation',
name='object_id',
field=models.PositiveIntegerField(db_index=True),
),
migrations.AlterField(
model_name="realmcreationkey",
name="creation_key",
field=models.CharField(db_index=True, max_length=40, verbose_name="activation key"),
model_name='realmcreationkey',
name='creation_key',
field=models.CharField(db_index=True, max_length=40, verbose_name='activation key'),
),
migrations.AlterUniqueTogether(
name="confirmation",
unique_together={("type", "confirmation_key")},
name='confirmation',
unique_together={('type', 'confirmation_key')},
),
]

View File

@@ -1,9 +1,9 @@
# Copyright: (c) 2008, Jarek Zgoda <jarek.zgoda@gmail.com>
__revision__ = "$Id: models.py 28 2009-10-22 15:03:02Z jarek.zgoda $"
__revision__ = '$Id: models.py 28 2009-10-22 15:03:02Z jarek.zgoda $'
import datetime
import secrets
from base64 import b32encode
import string
from random import SystemRandom
from typing import Mapping, Optional, Union
from urllib.parse import urljoin
@@ -29,35 +29,28 @@ class ConfirmationKeyException(Exception):
super().__init__()
self.error_type = error_type
def render_confirmation_key_error(
request: HttpRequest, exception: ConfirmationKeyException
) -> HttpResponse:
def render_confirmation_key_error(request: HttpRequest, exception: ConfirmationKeyException) -> HttpResponse:
if exception.error_type == ConfirmationKeyException.WRONG_LENGTH:
return render(request, "confirmation/link_malformed.html")
return render(request, 'confirmation/link_malformed.html')
if exception.error_type == ConfirmationKeyException.EXPIRED:
return render(request, "confirmation/link_expired.html")
return render(request, "confirmation/link_does_not_exist.html")
return render(request, 'confirmation/link_expired.html')
return render(request, 'confirmation/link_does_not_exist.html')
def generate_key() -> str:
generator = SystemRandom()
# 24 characters * 5 bits of entropy/character = 120 bits of entropy
return b32encode(secrets.token_bytes(15)).decode().lower()
return ''.join(generator.choice(string.ascii_lowercase + string.digits) for _ in range(24))
ConfirmationObjT = Union[MultiuseInvite, PreregistrationUser, EmailChangeStatus]
def get_object_from_key(
confirmation_key: str, confirmation_type: int, activate_object: bool = True
) -> ConfirmationObjT:
def get_object_from_key(confirmation_key: str,
confirmation_type: int,
activate_object: bool=True) -> ConfirmationObjT:
# Confirmation keys used to be 40 characters
if len(confirmation_key) not in (24, 40):
raise ConfirmationKeyException(ConfirmationKeyException.WRONG_LENGTH)
try:
confirmation = Confirmation.objects.get(
confirmation_key=confirmation_key, type=confirmation_type
)
confirmation = Confirmation.objects.get(confirmation_key=confirmation_key,
type=confirmation_type)
except Confirmation.DoesNotExist:
raise ConfirmationKeyException(ConfirmationKeyException.DOES_NOT_EXIST)
@@ -67,49 +60,38 @@ def get_object_from_key(
obj = confirmation.content_object
if activate_object and hasattr(obj, "status"):
obj.status = getattr(settings, "STATUS_ACTIVE", 1)
obj.save(update_fields=["status"])
obj.status = getattr(settings, 'STATUS_ACTIVE', 1)
obj.save(update_fields=['status'])
return obj
def create_confirmation_link(
obj: ContentType, confirmation_type: int, url_args: Mapping[str, str] = {}
) -> str:
def create_confirmation_link(obj: ContentType,
confirmation_type: int,
url_args: Mapping[str, str] = {}) -> str:
key = generate_key()
realm = None
if hasattr(obj, "realm"):
if hasattr(obj, 'realm'):
realm = obj.realm
elif isinstance(obj, Realm):
realm = obj
Confirmation.objects.create(
content_object=obj,
date_sent=timezone_now(),
confirmation_key=key,
realm=realm,
type=confirmation_type,
)
Confirmation.objects.create(content_object=obj, date_sent=timezone_now(), confirmation_key=key,
realm=realm, type=confirmation_type)
return confirmation_url(key, realm, confirmation_type, url_args)
def confirmation_url(
confirmation_key: str,
realm: Optional[Realm],
confirmation_type: int,
url_args: Mapping[str, str] = {},
) -> str:
def confirmation_url(confirmation_key: str, realm: Optional[Realm],
confirmation_type: int,
url_args: Mapping[str, str] = {}) -> str:
url_args = dict(url_args)
url_args["confirmation_key"] = confirmation_key
url_args['confirmation_key'] = confirmation_key
return urljoin(
settings.ROOT_DOMAIN_URI if realm is None else realm.uri,
reverse(_properties[confirmation_type].url_name, kwargs=url_args),
)
class Confirmation(models.Model):
content_type = models.ForeignKey(ContentType, on_delete=CASCADE)
object_id: int = models.PositiveIntegerField(db_index=True)
content_object = GenericForeignKey("content_type", "object_id")
content_object = GenericForeignKey('content_type', 'object_id')
date_sent: datetime.datetime = models.DateTimeField(db_index=True)
confirmation_key: str = models.CharField(max_length=40, db_index=True)
realm: Optional[Realm] = models.ForeignKey(Realm, null=True, on_delete=CASCADE)
@@ -126,49 +108,39 @@ class Confirmation(models.Model):
type: int = models.PositiveSmallIntegerField()
def __str__(self) -> str:
return f"<Confirmation: {self.content_object}>"
return f'<Confirmation: {self.content_object}>'
class Meta:
unique_together = ("type", "confirmation_key")
class ConfirmationType:
def __init__(
self,
url_name: str,
validity_in_days: int = settings.CONFIRMATION_LINK_DEFAULT_VALIDITY_DAYS,
) -> None:
def __init__(self, url_name: str,
validity_in_days: int=settings.CONFIRMATION_LINK_DEFAULT_VALIDITY_DAYS) -> None:
self.url_name = url_name
self.validity_in_days = validity_in_days
_properties = {
Confirmation.USER_REGISTRATION: ConfirmationType("check_prereg_key_and_redirect"),
Confirmation.INVITATION: ConfirmationType(
"check_prereg_key_and_redirect", validity_in_days=settings.INVITATION_LINK_VALIDITY_DAYS
),
Confirmation.EMAIL_CHANGE: ConfirmationType("confirm_email_change"),
Confirmation.UNSUBSCRIBE: ConfirmationType(
"unsubscribe",
validity_in_days=1000000, # should never expire
),
Confirmation.USER_REGISTRATION: ConfirmationType('check_prereg_key_and_redirect'),
Confirmation.INVITATION: ConfirmationType('check_prereg_key_and_redirect',
validity_in_days=settings.INVITATION_LINK_VALIDITY_DAYS),
Confirmation.EMAIL_CHANGE: ConfirmationType('zerver.views.user_settings.confirm_email_change'),
Confirmation.UNSUBSCRIBE: ConfirmationType('zerver.views.unsubscribe.email_unsubscribe',
validity_in_days=1000000), # should never expire
Confirmation.MULTIUSE_INVITE: ConfirmationType(
"join", validity_in_days=settings.INVITATION_LINK_VALIDITY_DAYS
),
Confirmation.REALM_CREATION: ConfirmationType("check_prereg_key_and_redirect"),
Confirmation.REALM_REACTIVATION: ConfirmationType("realm_reactivation"),
'zerver.views.registration.accounts_home_from_multiuse_invite',
validity_in_days=settings.INVITATION_LINK_VALIDITY_DAYS),
Confirmation.REALM_CREATION: ConfirmationType('check_prereg_key_and_redirect'),
Confirmation.REALM_REACTIVATION: ConfirmationType('zerver.views.realm.realm_reactivation'),
}
def one_click_unsubscribe_link(user_profile: UserProfile, email_type: str) -> str:
"""
Generate a unique link that a logged-out user can visit to unsubscribe from
Zulip e-mails without having to first log in.
"""
return create_confirmation_link(
user_profile, Confirmation.UNSUBSCRIBE, url_args={"email_type": email_type}
)
return create_confirmation_link(user_profile,
Confirmation.UNSUBSCRIBE,
url_args = {'email_type': email_type})
# Functions related to links generated by the generate_realm_creation_link.py
# management command.
@@ -178,8 +150,7 @@ def one_click_unsubscribe_link(user_profile: UserProfile, email_type: str) -> st
# Arguably RealmCreationKey should just be another ConfirmationObjT and we should
# add another Confirmation.type for this; it's this way for historical reasons.
def validate_key(creation_key: Optional[str]) -> Optional["RealmCreationKey"]:
def validate_key(creation_key: Optional[str]) -> Optional['RealmCreationKey']:
"""Get the record for this key, raising InvalidCreationKey if non-None but invalid."""
if creation_key is None:
return None
@@ -192,21 +163,19 @@ def validate_key(creation_key: Optional[str]) -> Optional["RealmCreationKey"]:
raise RealmCreationKey.Invalid()
return key_record
def generate_realm_creation_url(by_admin: bool = False) -> str:
def generate_realm_creation_url(by_admin: bool=False) -> str:
key = generate_key()
RealmCreationKey.objects.create(
creation_key=key, date_created=timezone_now(), presume_email_valid=by_admin
)
RealmCreationKey.objects.create(creation_key=key,
date_created=timezone_now(),
presume_email_valid=by_admin)
return urljoin(
settings.ROOT_DOMAIN_URI,
reverse("create_realm", kwargs={"creation_key": key}),
reverse('zerver.views.create_realm', kwargs={'creation_key': key}),
)
class RealmCreationKey(models.Model):
creation_key = models.CharField("activation key", db_index=True, max_length=40)
date_created = models.DateTimeField("created", default=timezone_now)
creation_key = models.CharField('activation key', db_index=True, max_length=40)
date_created = models.DateTimeField('created', default=timezone_now)
# True just if we should presume the email address the user enters
# is theirs, and skip sending mail to it to confirm that.

View File

@@ -1,6 +1,6 @@
# Copyright: (c) 2008, Jarek Zgoda <jarek.zgoda@gmail.com>
__revision__ = "$Id: settings.py 12 2008-11-23 19:38:52Z jarek.zgoda $"
__revision__ = '$Id: settings.py 12 2008-11-23 19:38:52Z jarek.zgoda $'
STATUS_ACTIVE = 1
STATUS_REVOKED = 2

File diff suppressed because it is too large Load Diff

View File

@@ -9,78 +9,43 @@ class Migration(migrations.Migration):
initial = True
dependencies = [
("zerver", "0189_userprofile_add_some_emojisets"),
('zerver', '0189_userprofile_add_some_emojisets'),
]
operations = [
migrations.CreateModel(
name="BillingProcessor",
name='BillingProcessor',
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("state", models.CharField(max_length=20)),
("last_modified", models.DateTimeField(auto_now=True)),
(
"log_row",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="zerver.RealmAuditLog"
),
),
(
"realm",
models.OneToOneField(
null=True, on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"
),
),
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('state', models.CharField(max_length=20)),
('last_modified', models.DateTimeField(auto_now=True)),
('log_row', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.RealmAuditLog')),
('realm', models.OneToOneField(null=True, on_delete=django.db.models.deletion.CASCADE, to='zerver.Realm')),
],
),
migrations.CreateModel(
name="Coupon",
name='Coupon',
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("percent_off", models.SmallIntegerField(unique=True)),
("stripe_coupon_id", models.CharField(max_length=255, unique=True)),
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('percent_off', models.SmallIntegerField(unique=True)),
('stripe_coupon_id', models.CharField(max_length=255, unique=True)),
],
),
migrations.CreateModel(
name="Customer",
name='Customer',
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("stripe_customer_id", models.CharField(max_length=255, unique=True)),
("has_billing_relationship", models.BooleanField(default=False)),
(
"realm",
models.OneToOneField(
on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"
),
),
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('stripe_customer_id', models.CharField(max_length=255, unique=True)),
('has_billing_relationship', models.BooleanField(default=False)),
('realm', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='zerver.Realm')),
],
),
migrations.CreateModel(
name="Plan",
name='Plan',
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("nickname", models.CharField(max_length=40, unique=True)),
("stripe_plan_id", models.CharField(max_length=255, unique=True)),
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nickname', models.CharField(max_length=40, unique=True)),
('stripe_plan_id', models.CharField(max_length=255, unique=True)),
],
),
]

View File

@@ -6,13 +6,13 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("corporate", "0001_initial"),
('corporate', '0001_initial'),
]
operations = [
migrations.AddField(
model_name="customer",
name="default_discount",
model_name='customer',
name='default_discount',
field=models.DecimalField(decimal_places=4, max_digits=7, null=True),
),
]

View File

@@ -7,37 +7,27 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("corporate", "0002_customer_default_discount"),
('corporate', '0002_customer_default_discount'),
]
operations = [
migrations.CreateModel(
name="CustomerPlan",
name='CustomerPlan',
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("licenses", models.IntegerField()),
("automanage_licenses", models.BooleanField(default=False)),
("charge_automatically", models.BooleanField(default=False)),
("price_per_license", models.IntegerField(null=True)),
("fixed_price", models.IntegerField(null=True)),
("discount", models.DecimalField(decimal_places=4, max_digits=6, null=True)),
("billing_cycle_anchor", models.DateTimeField()),
("billing_schedule", models.SmallIntegerField()),
("billed_through", models.DateTimeField()),
("next_billing_date", models.DateTimeField(db_index=True)),
("tier", models.SmallIntegerField()),
("status", models.SmallIntegerField(default=1)),
(
"customer",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="corporate.Customer"
),
),
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('licenses', models.IntegerField()),
('automanage_licenses', models.BooleanField(default=False)),
('charge_automatically', models.BooleanField(default=False)),
('price_per_license', models.IntegerField(null=True)),
('fixed_price', models.IntegerField(null=True)),
('discount', models.DecimalField(decimal_places=4, max_digits=6, null=True)),
('billing_cycle_anchor', models.DateTimeField()),
('billing_schedule', models.SmallIntegerField()),
('billed_through', models.DateTimeField()),
('next_billing_date', models.DateTimeField(db_index=True)),
('tier', models.SmallIntegerField()),
('status', models.SmallIntegerField(default=1)),
('customer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='corporate.Customer')),
],
),
]

View File

@@ -7,29 +7,19 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("corporate", "0003_customerplan"),
('corporate', '0003_customerplan'),
]
operations = [
migrations.CreateModel(
name="LicenseLedger",
name='LicenseLedger',
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("is_renewal", models.BooleanField(default=False)),
("event_time", models.DateTimeField()),
("licenses", models.IntegerField()),
("licenses_at_next_renewal", models.IntegerField(null=True)),
(
"plan",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="corporate.CustomerPlan"
),
),
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('is_renewal', models.BooleanField(default=False)),
('event_time', models.DateTimeField()),
('licenses', models.IntegerField()),
('licenses_at_next_renewal', models.IntegerField(null=True)),
('plan', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='corporate.CustomerPlan')),
],
),
]

View File

@@ -7,32 +7,27 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("corporate", "0004_licenseledger"),
('corporate', '0004_licenseledger'),
]
operations = [
migrations.RenameField(
model_name="customerplan",
old_name="next_billing_date",
new_name="next_invoice_date",
model_name='customerplan',
old_name='next_billing_date',
new_name='next_invoice_date',
),
migrations.RemoveField(
model_name="customerplan",
name="billed_through",
model_name='customerplan',
name='billed_through',
),
migrations.AddField(
model_name="customerplan",
name="invoiced_through",
field=models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="+",
to="corporate.LicenseLedger",
),
model_name='customerplan',
name='invoiced_through',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='corporate.LicenseLedger'),
),
migrations.AddField(
model_name="customerplan",
name="invoicing_status",
model_name='customerplan',
name='invoicing_status',
field=models.SmallIntegerField(default=1),
),
]

View File

@@ -6,13 +6,13 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("corporate", "0005_customerplan_invoicing"),
('corporate', '0005_customerplan_invoicing'),
]
operations = [
migrations.AlterField(
model_name="customer",
name="stripe_customer_id",
model_name='customer',
name='stripe_customer_id',
field=models.CharField(max_length=255, null=True, unique=True),
),
]

View File

@@ -6,33 +6,33 @@ from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("corporate", "0006_nullable_stripe_customer_id"),
('corporate', '0006_nullable_stripe_customer_id'),
]
operations = [
migrations.RemoveField(
model_name="billingprocessor",
name="log_row",
model_name='billingprocessor',
name='log_row',
),
migrations.RemoveField(
model_name="billingprocessor",
name="realm",
model_name='billingprocessor',
name='realm',
),
migrations.DeleteModel(
name="Coupon",
name='Coupon',
),
migrations.DeleteModel(
name="Plan",
name='Plan',
),
migrations.RemoveField(
model_name="customer",
name="has_billing_relationship",
model_name='customer',
name='has_billing_relationship',
),
migrations.RemoveField(
model_name="customerplan",
name="licenses",
model_name='customerplan',
name='licenses',
),
migrations.DeleteModel(
name="BillingProcessor",
name='BillingProcessor',
),
]

View File

@@ -6,13 +6,13 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("corporate", "0007_remove_deprecated_fields"),
('corporate', '0007_remove_deprecated_fields'),
]
operations = [
migrations.AlterField(
model_name="customerplan",
name="next_invoice_date",
model_name='customerplan',
name='next_invoice_date',
field=models.DateTimeField(db_index=True, null=True),
),
]

View File

@@ -6,13 +6,13 @@ from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("corporate", "0008_nullable_next_invoice_date"),
('corporate', '0008_nullable_next_invoice_date'),
]
operations = [
migrations.AddField(
model_name="customer",
name="sponsorship_pending",
model_name='customer',
name='sponsorship_pending',
field=models.BooleanField(default=False),
),
]

View File

@@ -13,18 +13,14 @@ class Customer(models.Model):
stripe_customer_id: str = models.CharField(max_length=255, null=True, unique=True)
sponsorship_pending: bool = models.BooleanField(default=False)
# A percentage, like 85.
default_discount: Optional[Decimal] = models.DecimalField(
decimal_places=4, max_digits=7, null=True
)
default_discount: Optional[Decimal] = models.DecimalField(decimal_places=4, max_digits=7, null=True)
def __str__(self) -> str:
return f"<Customer {self.realm} {self.stripe_customer_id}>"
def get_customer_by_realm(realm: Realm) -> Optional[Customer]:
return Customer.objects.filter(realm=realm).first()
class CustomerPlan(models.Model):
customer: Customer = models.ForeignKey(Customer, on_delete=CASCADE)
automanage_licenses: bool = models.BooleanField(default=False)
@@ -46,8 +42,7 @@ class CustomerPlan(models.Model):
next_invoice_date: Optional[datetime.datetime] = models.DateTimeField(db_index=True, null=True)
invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
"LicenseLedger", null=True, on_delete=CASCADE, related_name="+"
)
'LicenseLedger', null=True, on_delete=CASCADE, related_name='+')
DONE = 1
STARTED = 2
INITIAL_INVOICE_TO_BE_SENT = 3
@@ -71,29 +66,9 @@ class CustomerPlan(models.Model):
# TODO maybe override setattr to ensure billing_cycle_anchor, etc are immutable
@property
def name(self) -> str:
return {
CustomerPlan.STANDARD: "Zulip Standard",
CustomerPlan.PLUS: "Zulip Plus",
CustomerPlan.ENTERPRISE: "Zulip Enterprise",
}[self.tier]
def get_plan_status_as_text(self) -> str:
return {
self.ACTIVE: "Active",
self.DOWNGRADE_AT_END_OF_CYCLE: "Scheduled for downgrade at end of cycle",
self.FREE_TRIAL: "Free trial",
self.ENDED: "Ended",
self.NEVER_STARTED: "Never started",
}[self.status]
def get_current_plan_by_customer(customer: Customer) -> Optional[CustomerPlan]:
return CustomerPlan.objects.filter(
customer=customer, status__lt=CustomerPlan.LIVE_STATUS_THRESHOLD
).first()
customer=customer, status__lt=CustomerPlan.LIVE_STATUS_THRESHOLD).first()
def get_current_plan_by_realm(realm: Realm) -> Optional[CustomerPlan]:
customer = get_customer_by_realm(realm)
@@ -101,7 +76,6 @@ def get_current_plan_by_realm(realm: Realm) -> Optional[CustomerPlan]:
return None
return get_current_plan_by_customer(customer)
class LicenseLedger(models.Model):
plan: CustomerPlan = models.ForeignKey(CustomerPlan, on_delete=CASCADE)
# Also True for the initial upgrade.

View File

@@ -1,6 +1,5 @@
{
"amount": 7200,
"amount_captured": 7200,
"amount_refunded": 0,
"application": null,
"application_fee": null,
@@ -19,7 +18,6 @@
"name": "Ada Starr",
"phone": null
},
"calculated_statement_descriptor": "ZULIP STANDARD",
"captured": true,
"created": 1000000000,
"currency": "usd",
@@ -27,7 +25,6 @@
"description": "Upgrade to Zulip Standard, $12.0 x 6",
"destination": null,
"dispute": null,
"disputed": false,
"failure_code": null,
"failure_message": null,
"fraud_details": {},
@@ -42,7 +39,7 @@
"network_status": "approved_by_network",
"reason": null,
"risk_level": "normal",
"risk_score": 0,
"risk_score": 00,
"seller_message": "Payment complete.",
"type": "authorized"
},
@@ -62,9 +59,7 @@
"exp_year": 2033,
"fingerprint": "NORMALIZED000001",
"funding": "credit",
"installments": null,
"last4": "4242",
"network": "visa",
"three_d_secure": null,
"wallet": null
},
@@ -110,7 +105,7 @@
},
"source_transfer": null,
"statement_descriptor": "Zulip Standard",
"statement_descriptor_suffix": null,
"statement_descriptor_suffix": "Zulip Standard",
"status": "succeeded",
"transfer_data": null,
"transfer_group": null

View File

@@ -1,6 +1,5 @@
{
"amount": 36000,
"amount_captured": 36000,
"amount_refunded": 0,
"application": null,
"application_fee": null,
@@ -19,7 +18,6 @@
"name": "Ada Starr",
"phone": null
},
"calculated_statement_descriptor": "ZULIP STANDARD",
"captured": true,
"created": 1000000000,
"currency": "usd",
@@ -27,7 +25,6 @@
"description": "Upgrade to Zulip Standard, $60.0 x 6",
"destination": null,
"dispute": null,
"disputed": false,
"failure_code": null,
"failure_message": null,
"fraud_details": {},
@@ -42,7 +39,7 @@
"network_status": "approved_by_network",
"reason": null,
"risk_level": "normal",
"risk_score": 0,
"risk_score": 00,
"seller_message": "Payment complete.",
"type": "authorized"
},
@@ -62,9 +59,7 @@
"exp_year": 2033,
"fingerprint": "NORMALIZED000001",
"funding": "credit",
"installments": null,
"last4": "4242",
"network": "visa",
"three_d_secure": null,
"wallet": null
},
@@ -110,7 +105,7 @@
},
"source_transfer": null,
"statement_descriptor": "Zulip Standard",
"statement_descriptor_suffix": null,
"statement_descriptor_suffix": "Zulip Standard",
"status": "succeeded",
"transfer_data": null,
"transfer_group": null

View File

@@ -2,7 +2,6 @@
"data": [
{
"amount": 7200,
"amount_captured": 7200,
"amount_refunded": 0,
"application": null,
"application_fee": null,
@@ -21,7 +20,6 @@
"name": "Ada Starr",
"phone": null
},
"calculated_statement_descriptor": "ZULIP STANDARD",
"captured": true,
"created": 1000000000,
"currency": "usd",
@@ -29,7 +27,6 @@
"description": "Upgrade to Zulip Standard, $12.0 x 6",
"destination": null,
"dispute": null,
"disputed": false,
"failure_code": null,
"failure_message": null,
"fraud_details": {},
@@ -44,7 +41,7 @@
"network_status": "approved_by_network",
"reason": null,
"risk_level": "normal",
"risk_score": 0,
"risk_score": 00,
"seller_message": "Payment complete.",
"type": "authorized"
},
@@ -64,9 +61,7 @@
"exp_year": 2033,
"fingerprint": "NORMALIZED000001",
"funding": "credit",
"installments": null,
"last4": "4242",
"network": "visa",
"three_d_secure": null,
"wallet": null
},
@@ -106,7 +101,7 @@
},
"source_transfer": null,
"statement_descriptor": "Zulip Standard",
"statement_descriptor_suffix": null,
"statement_descriptor_suffix": "Zulip Standard",
"status": "succeeded",
"transfer_data": null,
"transfer_group": null

View File

@@ -2,7 +2,6 @@
"data": [
{
"amount": 36000,
"amount_captured": 36000,
"amount_refunded": 0,
"application": null,
"application_fee": null,
@@ -21,7 +20,6 @@
"name": "Ada Starr",
"phone": null
},
"calculated_statement_descriptor": "ZULIP STANDARD",
"captured": true,
"created": 1000000000,
"currency": "usd",
@@ -29,7 +27,6 @@
"description": "Upgrade to Zulip Standard, $60.0 x 6",
"destination": null,
"dispute": null,
"disputed": false,
"failure_code": null,
"failure_message": null,
"fraud_details": {},
@@ -44,7 +41,7 @@
"network_status": "approved_by_network",
"reason": null,
"risk_level": "normal",
"risk_score": 0,
"risk_score": 00,
"seller_message": "Payment complete.",
"type": "authorized"
},
@@ -64,9 +61,7 @@
"exp_year": 2033,
"fingerprint": "NORMALIZED000001",
"funding": "credit",
"installments": null,
"last4": "4242",
"network": "visa",
"three_d_secure": null,
"wallet": null
},
@@ -106,14 +101,13 @@
},
"source_transfer": null,
"statement_descriptor": "Zulip Standard",
"statement_descriptor_suffix": null,
"statement_descriptor_suffix": "Zulip Standard",
"status": "succeeded",
"transfer_data": null,
"transfer_group": null
},
{
"amount": 7200,
"amount_captured": 7200,
"amount_refunded": 0,
"application": null,
"application_fee": null,
@@ -132,7 +126,6 @@
"name": "Ada Starr",
"phone": null
},
"calculated_statement_descriptor": "ZULIP STANDARD",
"captured": true,
"created": 1000000000,
"currency": "usd",
@@ -140,7 +133,6 @@
"description": "Upgrade to Zulip Standard, $12.0 x 6",
"destination": null,
"dispute": null,
"disputed": false,
"failure_code": null,
"failure_message": null,
"fraud_details": {},
@@ -155,7 +147,7 @@
"network_status": "approved_by_network",
"reason": null,
"risk_level": "normal",
"risk_score": 0,
"risk_score": 00,
"seller_message": "Payment complete.",
"type": "authorized"
},
@@ -175,9 +167,7 @@
"exp_year": 2033,
"fingerprint": "NORMALIZED000001",
"funding": "credit",
"installments": null,
"last4": "4242",
"network": "visa",
"three_d_secure": null,
"wallet": null
},
@@ -217,7 +207,7 @@
},
"source_transfer": null,
"statement_descriptor": "Zulip Standard",
"statement_descriptor_suffix": null,
"statement_descriptor_suffix": "Zulip Standard",
"status": "succeeded",
"transfer_data": null,
"transfer_group": null

View File

@@ -22,7 +22,6 @@
"realm_str": "zulip"
},
"name": null,
"next_invoice_sequence": 1,
"object": "customer",
"phone": null,
"preferred_locales": [],

View File

@@ -46,7 +46,6 @@
"realm_str": "zulip"
},
"name": null,
"next_invoice_sequence": 2,
"object": "customer",
"phone": null,
"preferred_locales": [],

View File

@@ -22,7 +22,6 @@
"realm_str": "zulip"
},
"name": null,
"next_invoice_sequence": 2,
"object": "customer",
"phone": null,
"preferred_locales": [],

View File

@@ -1,11 +1,10 @@
{
"account_country": "US",
"account_name": "Vishnu Test",
"account_tax_ids": null,
"account_name": "Dev account",
"amount_due": 0,
"amount_paid": 0,
"amount_remaining": 0,
"application_fee": null,
"application_fee_amount": null,
"attempt_count": 0,
"attempted": false,
"auto_advance": true,
@@ -24,30 +23,24 @@
"customer_shipping": null,
"customer_tax_exempt": "none",
"customer_tax_ids": [],
"date": 1000000000,
"default_payment_method": null,
"default_source": null,
"default_tax_rates": [],
"description": null,
"description": "",
"discount": null,
"discounts": [],
"due_date": null,
"ending_balance": null,
"finalized_at": null,
"footer": null,
"hosted_invoice_url": null,
"id": "in_NORMALIZED00000000000001",
"invoice_pdf": null,
"last_finalization_error": null,
"lines": {
"data": [
{
"amount": 7200,
"currency": "usd",
"description": "Zulip Standard",
"discount_amounts": [],
"discountable": false,
"discounts": [],
"id": "ii_NORMALIZED00000000000001",
"invoice_item": "ii_NORMALIZED00000000000001",
"livemode": false,
@@ -58,40 +51,18 @@
"start": 1000000000
},
"plan": null,
"price": {
"active": false,
"billing_scheme": "per_unit",
"created": 1000000000,
"currency": "usd",
"id": "price_1HufhsD2X8vgpBNGtyNs4AI9",
"livemode": false,
"lookup_key": null,
"metadata": {},
"nickname": null,
"object": "price",
"product": "prod_IVh67i06KRHwdX",
"recurring": null,
"tiers_mode": null,
"transform_quantity": null,
"type": "one_time",
"unit_amount": 1200,
"unit_amount_decimal": "1200"
},
"proration": false,
"quantity": 6,
"subscription": null,
"tax_amounts": [],
"tax_rates": [],
"type": "invoiceitem",
"unique_id": "il_1HufhsD2X8vgpBNGtA08rM3i"
"type": "invoiceitem"
},
{
"amount": -7200,
"currency": "usd",
"description": "Payment (Card ending in 4242)",
"discount_amounts": [],
"discountable": false,
"discounts": [],
"id": "ii_NORMALIZED00000000000002",
"invoice_item": "ii_NORMALIZED00000000000002",
"livemode": false,
@@ -102,32 +73,12 @@
"start": 1000000000
},
"plan": null,
"price": {
"active": false,
"billing_scheme": "per_unit",
"created": 1000000000,
"currency": "usd",
"id": "price_1HufhrD2X8vgpBNGD9sFn8tJ",
"livemode": false,
"lookup_key": null,
"metadata": {},
"nickname": null,
"object": "price",
"product": "prod_IVh6pGP4ldOFFV",
"recurring": null,
"tiers_mode": null,
"transform_quantity": null,
"type": "one_time",
"unit_amount": -7200,
"unit_amount_decimal": "-7200"
},
"proration": false,
"quantity": 1,
"subscription": null,
"tax_amounts": [],
"tax_rates": [],
"type": "invoiceitem",
"unique_id": "il_1HufhrD2X8vgpBNGf4QcWhh8"
"type": "invoiceitem"
}
],
"has_more": false,
@@ -161,8 +112,6 @@
"tax": null,
"tax_percent": null,
"total": 0,
"total_discount_amounts": [],
"total_tax_amounts": [],
"transfer_data": null,
"webhooks_delivered_at": null
}

View File

@@ -1,11 +1,10 @@
{
"account_country": "US",
"account_name": "Vishnu Test",
"account_tax_ids": null,
"account_name": "Dev account",
"amount_due": 0,
"amount_paid": 0,
"amount_remaining": 0,
"application_fee": null,
"application_fee_amount": null,
"attempt_count": 0,
"attempted": false,
"auto_advance": true,
@@ -24,74 +23,46 @@
"customer_shipping": null,
"customer_tax_exempt": "none",
"customer_tax_ids": [],
"date": 1000000000,
"default_payment_method": null,
"default_source": null,
"default_tax_rates": [],
"description": null,
"description": "",
"discount": null,
"discounts": [],
"due_date": null,
"ending_balance": null,
"finalized_at": null,
"footer": null,
"hosted_invoice_url": null,
"id": "in_NORMALIZED00000000000002",
"invoice_pdf": null,
"last_finalization_error": null,
"lines": {
"data": [
{
"amount": 36000,
"currency": "usd",
"description": "Zulip Standard",
"discount_amounts": [],
"discountable": false,
"discounts": [],
"id": "ii_NORMALIZED00000000000003",
"invoice_item": "ii_NORMALIZED00000000000003",
"livemode": false,
"metadata": {},
"object": "line_item",
"period": {
"end": 1357095845,
"start": 1325473445
"end": 1000000000,
"start": 1000000000
},
"plan": null,
"price": {
"active": false,
"billing_scheme": "per_unit",
"created": 1000000000,
"currency": "usd",
"id": "price_1HufhzD2X8vgpBNGlpQImV07",
"livemode": false,
"lookup_key": null,
"metadata": {},
"nickname": null,
"object": "price",
"product": "prod_IVh6VKlEd957ap",
"recurring": null,
"tiers_mode": null,
"transform_quantity": null,
"type": "one_time",
"unit_amount": 6000,
"unit_amount_decimal": "6000"
},
"proration": false,
"quantity": 6,
"subscription": null,
"tax_amounts": [],
"tax_rates": [],
"type": "invoiceitem",
"unique_id": "il_1HufhzD2X8vgpBNGwPaEObnC"
"type": "invoiceitem"
},
{
"amount": -36000,
"currency": "usd",
"description": "Payment (Card ending in 4242)",
"discount_amounts": [],
"discountable": false,
"discounts": [],
"id": "ii_NORMALIZED00000000000004",
"invoice_item": "ii_NORMALIZED00000000000004",
"livemode": false,
@@ -102,32 +73,12 @@
"start": 1000000000
},
"plan": null,
"price": {
"active": false,
"billing_scheme": "per_unit",
"created": 1000000000,
"currency": "usd",
"id": "price_1HufhyD2X8vgpBNG58auoETW",
"livemode": false,
"lookup_key": null,
"metadata": {},
"nickname": null,
"object": "price",
"product": "prod_IVh6Yrwv6xv7Bm",
"recurring": null,
"tiers_mode": null,
"transform_quantity": null,
"type": "one_time",
"unit_amount": -36000,
"unit_amount_decimal": "-36000"
},
"proration": false,
"quantity": 1,
"subscription": null,
"tax_amounts": [],
"tax_rates": [],
"type": "invoiceitem",
"unique_id": "il_1HufhyD2X8vgpBNGQAOpJ22e"
"type": "invoiceitem"
}
],
"has_more": false,
@@ -161,8 +112,6 @@
"tax": null,
"tax_percent": null,
"total": 0,
"total_discount_amounts": [],
"total_tax_amounts": [],
"transfer_data": null,
"webhooks_delivered_at": null
}

View File

@@ -1,124 +0,0 @@
{
"account_country": "US",
"account_name": "Vishnu Test",
"account_tax_ids": null,
"amount_due": 24000,
"amount_paid": 0,
"amount_remaining": 24000,
"application_fee": null,
"attempt_count": 0,
"attempted": false,
"auto_advance": true,
"billing": "charge_automatically",
"billing_reason": "manual",
"charge": null,
"collection_method": "charge_automatically",
"created": 1000000000,
"currency": "usd",
"custom_fields": null,
"customer": "cus_NORMALIZED0001",
"customer_address": null,
"customer_email": "hamlet@zulip.com",
"customer_name": null,
"customer_phone": null,
"customer_shipping": null,
"customer_tax_exempt": "none",
"customer_tax_ids": [],
"date": 1000000000,
"default_payment_method": null,
"default_source": null,
"default_tax_rates": [],
"description": null,
"discount": null,
"discounts": [],
"due_date": null,
"ending_balance": null,
"finalized_at": null,
"footer": null,
"hosted_invoice_url": null,
"id": "in_NORMALIZED00000000000003",
"invoice_pdf": null,
"last_finalization_error": null,
"lines": {
"data": [
{
"amount": 24000,
"currency": "usd",
"description": "Zulip Standard - renewal",
"discount_amounts": [],
"discountable": false,
"discounts": [],
"id": "ii_NORMALIZED00000000000005",
"invoice_item": "ii_NORMALIZED00000000000005",
"livemode": false,
"metadata": {},
"object": "line_item",
"period": {
"end": 1388631845,
"start": 1357095845
},
"plan": null,
"price": {
"active": false,
"billing_scheme": "per_unit",
"created": 1000000000,
"currency": "usd",
"id": "price_1Hufi2D2X8vgpBNGLrDQYzwi",
"livemode": false,
"lookup_key": null,
"metadata": {},
"nickname": null,
"object": "price",
"product": "prod_IVh6pB9D73emPf",
"recurring": null,
"tiers_mode": null,
"transform_quantity": null,
"type": "one_time",
"unit_amount": 4000,
"unit_amount_decimal": "4000"
},
"proration": false,
"quantity": 6,
"subscription": null,
"tax_amounts": [],
"tax_rates": [],
"type": "invoiceitem",
"unique_id": "il_1Hufi2D2X8vgpBNGj13daEPu"
}
],
"has_more": false,
"object": "list",
"total_count": 1,
"url": "/v1/invoices/in_NORMALIZED00000000000003/lines"
},
"livemode": false,
"metadata": {},
"next_payment_attempt": 1000000000,
"number": "NORMALI-0003",
"object": "invoice",
"paid": false,
"payment_intent": null,
"period_end": 1000000000,
"period_start": 1000000000,
"post_payment_credit_notes_amount": 0,
"pre_payment_credit_notes_amount": 0,
"receipt_number": null,
"starting_balance": 0,
"statement_descriptor": "Zulip Standard",
"status": "draft",
"status_transitions": {
"finalized_at": null,
"marked_uncollectible_at": null,
"paid_at": null,
"voided_at": null
},
"subscription": null,
"subtotal": 24000,
"tax": null,
"tax_percent": null,
"total": 24000,
"total_discount_amounts": [],
"total_tax_amounts": [],
"transfer_data": null,
"webhooks_delivered_at": null
}

View File

@@ -1,11 +1,10 @@
{
"account_country": "US",
"account_name": "Vishnu Test",
"account_tax_ids": null,
"account_name": "Dev account",
"amount_due": 0,
"amount_paid": 0,
"amount_remaining": 0,
"application_fee": null,
"application_fee_amount": null,
"attempt_count": 0,
"attempted": true,
"auto_advance": false,
@@ -24,30 +23,24 @@
"customer_shipping": null,
"customer_tax_exempt": "none",
"customer_tax_ids": [],
"date": 1000000000,
"default_payment_method": null,
"default_source": null,
"default_tax_rates": [],
"description": null,
"description": "",
"discount": null,
"discounts": [],
"due_date": null,
"due_date": 1000000000,
"ending_balance": 0,
"finalized_at": 1000000000,
"footer": null,
"hosted_invoice_url": "https://invoice.stripe.com/i/acct_NORMALIZED000001/invst_NORMALIZED0000000000000001jwmXq",
"hosted_invoice_url": "https://pay.stripe.com/invoice/invst_NORMALIZED0000000000000001",
"id": "in_NORMALIZED00000000000001",
"invoice_pdf": "https://pay.stripe.com/invoice/acct_NORMALIZED000001/invst_NORMALIZED0000000000000001jwmXq/pdf",
"last_finalization_error": null,
"invoice_pdf": "https://pay.stripe.com/invoice/invst_NORMALIZED0000000000000001/pdf",
"lines": {
"data": [
{
"amount": 7200,
"currency": "usd",
"description": "Zulip Standard",
"discount_amounts": [],
"discountable": false,
"discounts": [],
"id": "ii_NORMALIZED00000000000001",
"invoice_item": "ii_NORMALIZED00000000000001",
"livemode": false,
@@ -58,40 +51,18 @@
"start": 1000000000
},
"plan": null,
"price": {
"active": false,
"billing_scheme": "per_unit",
"created": 1000000000,
"currency": "usd",
"id": "price_1HufhsD2X8vgpBNGtyNs4AI9",
"livemode": false,
"lookup_key": null,
"metadata": {},
"nickname": null,
"object": "price",
"product": "prod_IVh67i06KRHwdX",
"recurring": null,
"tiers_mode": null,
"transform_quantity": null,
"type": "one_time",
"unit_amount": 1200,
"unit_amount_decimal": "1200"
},
"proration": false,
"quantity": 6,
"subscription": null,
"tax_amounts": [],
"tax_rates": [],
"type": "invoiceitem",
"unique_id": "il_1HufhsD2X8vgpBNGtA08rM3i"
"type": "invoiceitem"
},
{
"amount": -7200,
"currency": "usd",
"description": "Payment (Card ending in 4242)",
"discount_amounts": [],
"discountable": false,
"discounts": [],
"id": "ii_NORMALIZED00000000000002",
"invoice_item": "ii_NORMALIZED00000000000002",
"livemode": false,
@@ -102,32 +73,12 @@
"start": 1000000000
},
"plan": null,
"price": {
"active": false,
"billing_scheme": "per_unit",
"created": 1000000000,
"currency": "usd",
"id": "price_1HufhrD2X8vgpBNGD9sFn8tJ",
"livemode": false,
"lookup_key": null,
"metadata": {},
"nickname": null,
"object": "price",
"product": "prod_IVh6pGP4ldOFFV",
"recurring": null,
"tiers_mode": null,
"transform_quantity": null,
"type": "one_time",
"unit_amount": -7200,
"unit_amount_decimal": "-7200"
},
"proration": false,
"quantity": 1,
"subscription": null,
"tax_amounts": [],
"tax_rates": [],
"type": "invoiceitem",
"unique_id": "il_1HufhrD2X8vgpBNGf4QcWhh8"
"type": "invoiceitem"
}
],
"has_more": false,
@@ -161,8 +112,6 @@
"tax": null,
"tax_percent": null,
"total": 0,
"total_discount_amounts": [],
"total_tax_amounts": [],
"transfer_data": null,
"webhooks_delivered_at": null
"webhooks_delivered_at": 1000000000
}

View File

@@ -1,11 +1,10 @@
{
"account_country": "US",
"account_name": "Vishnu Test",
"account_tax_ids": null,
"account_name": "Dev account",
"amount_due": 0,
"amount_paid": 0,
"amount_remaining": 0,
"application_fee": null,
"application_fee_amount": null,
"attempt_count": 0,
"attempted": true,
"auto_advance": false,
@@ -24,74 +23,46 @@
"customer_shipping": null,
"customer_tax_exempt": "none",
"customer_tax_ids": [],
"date": 1000000000,
"default_payment_method": null,
"default_source": null,
"default_tax_rates": [],
"description": null,
"description": "",
"discount": null,
"discounts": [],
"due_date": null,
"due_date": 1000000000,
"ending_balance": 0,
"finalized_at": 1000000000,
"footer": null,
"hosted_invoice_url": "https://invoice.stripe.com/i/acct_NORMALIZED000001/invst_NORMALIZED0000000000000002TO6zL",
"hosted_invoice_url": "https://pay.stripe.com/invoice/invst_NORMALIZED0000000000000002",
"id": "in_NORMALIZED00000000000002",
"invoice_pdf": "https://pay.stripe.com/invoice/acct_NORMALIZED000001/invst_NORMALIZED0000000000000002TO6zL/pdf",
"last_finalization_error": null,
"invoice_pdf": "https://pay.stripe.com/invoice/invst_NORMALIZED0000000000000002/pdf",
"lines": {
"data": [
{
"amount": 36000,
"currency": "usd",
"description": "Zulip Standard",
"discount_amounts": [],
"discountable": false,
"discounts": [],
"id": "ii_NORMALIZED00000000000003",
"invoice_item": "ii_NORMALIZED00000000000003",
"livemode": false,
"metadata": {},
"object": "line_item",
"period": {
"end": 1357095845,
"start": 1325473445
"end": 1000000000,
"start": 1000000000
},
"plan": null,
"price": {
"active": false,
"billing_scheme": "per_unit",
"created": 1000000000,
"currency": "usd",
"id": "price_1HufhzD2X8vgpBNGlpQImV07",
"livemode": false,
"lookup_key": null,
"metadata": {},
"nickname": null,
"object": "price",
"product": "prod_IVh6VKlEd957ap",
"recurring": null,
"tiers_mode": null,
"transform_quantity": null,
"type": "one_time",
"unit_amount": 6000,
"unit_amount_decimal": "6000"
},
"proration": false,
"quantity": 6,
"subscription": null,
"tax_amounts": [],
"tax_rates": [],
"type": "invoiceitem",
"unique_id": "il_1HufhzD2X8vgpBNGwPaEObnC"
"type": "invoiceitem"
},
{
"amount": -36000,
"currency": "usd",
"description": "Payment (Card ending in 4242)",
"discount_amounts": [],
"discountable": false,
"discounts": [],
"id": "ii_NORMALIZED00000000000004",
"invoice_item": "ii_NORMALIZED00000000000004",
"livemode": false,
@@ -102,32 +73,12 @@
"start": 1000000000
},
"plan": null,
"price": {
"active": false,
"billing_scheme": "per_unit",
"created": 1000000000,
"currency": "usd",
"id": "price_1HufhyD2X8vgpBNG58auoETW",
"livemode": false,
"lookup_key": null,
"metadata": {},
"nickname": null,
"object": "price",
"product": "prod_IVh6Yrwv6xv7Bm",
"recurring": null,
"tiers_mode": null,
"transform_quantity": null,
"type": "one_time",
"unit_amount": -36000,
"unit_amount_decimal": "-36000"
},
"proration": false,
"quantity": 1,
"subscription": null,
"tax_amounts": [],
"tax_rates": [],
"type": "invoiceitem",
"unique_id": "il_1HufhyD2X8vgpBNGQAOpJ22e"
"type": "invoiceitem"
}
],
"has_more": false,
@@ -161,8 +112,6 @@
"tax": null,
"tax_percent": null,
"total": 0,
"total_discount_amounts": [],
"total_tax_amounts": [],
"transfer_data": null,
"webhooks_delivered_at": null
"webhooks_delivered_at": 1000000000
}

View File

@@ -1,124 +0,0 @@
{
"account_country": "US",
"account_name": "Vishnu Test",
"account_tax_ids": null,
"amount_due": 24000,
"amount_paid": 0,
"amount_remaining": 24000,
"application_fee": null,
"attempt_count": 0,
"attempted": false,
"auto_advance": true,
"billing": "charge_automatically",
"billing_reason": "manual",
"charge": null,
"collection_method": "charge_automatically",
"created": 1000000000,
"currency": "usd",
"custom_fields": null,
"customer": "cus_NORMALIZED0001",
"customer_address": null,
"customer_email": "hamlet@zulip.com",
"customer_name": null,
"customer_phone": null,
"customer_shipping": null,
"customer_tax_exempt": "none",
"customer_tax_ids": [],
"date": 1000000000,
"default_payment_method": null,
"default_source": null,
"default_tax_rates": [],
"description": null,
"discount": null,
"discounts": [],
"due_date": null,
"ending_balance": 0,
"finalized_at": 1000000000,
"footer": null,
"hosted_invoice_url": "https://invoice.stripe.com/i/acct_NORMALIZED000001/invst_NORMALIZED00000000000000039Nm5X",
"id": "in_NORMALIZED00000000000003",
"invoice_pdf": "https://pay.stripe.com/invoice/acct_NORMALIZED000001/invst_NORMALIZED00000000000000039Nm5X/pdf",
"last_finalization_error": null,
"lines": {
"data": [
{
"amount": 24000,
"currency": "usd",
"description": "Zulip Standard - renewal",
"discount_amounts": [],
"discountable": false,
"discounts": [],
"id": "ii_NORMALIZED00000000000005",
"invoice_item": "ii_NORMALIZED00000000000005",
"livemode": false,
"metadata": {},
"object": "line_item",
"period": {
"end": 1388631845,
"start": 1357095845
},
"plan": null,
"price": {
"active": false,
"billing_scheme": "per_unit",
"created": 1000000000,
"currency": "usd",
"id": "price_1Hufi2D2X8vgpBNGLrDQYzwi",
"livemode": false,
"lookup_key": null,
"metadata": {},
"nickname": null,
"object": "price",
"product": "prod_IVh6pB9D73emPf",
"recurring": null,
"tiers_mode": null,
"transform_quantity": null,
"type": "one_time",
"unit_amount": 4000,
"unit_amount_decimal": "4000"
},
"proration": false,
"quantity": 6,
"subscription": null,
"tax_amounts": [],
"tax_rates": [],
"type": "invoiceitem",
"unique_id": "il_1Hufi2D2X8vgpBNGj13daEPu"
}
],
"has_more": false,
"object": "list",
"total_count": 1,
"url": "/v1/invoices/in_NORMALIZED00000000000003/lines"
},
"livemode": false,
"metadata": {},
"next_payment_attempt": 1000000000,
"number": "NORMALI-0003",
"object": "invoice",
"paid": false,
"payment_intent": "pi_1Hufi3D2X8vgpBNGmAdVFaWD",
"period_end": 1000000000,
"period_start": 1000000000,
"post_payment_credit_notes_amount": 0,
"pre_payment_credit_notes_amount": 0,
"receipt_number": null,
"starting_balance": 0,
"statement_descriptor": "Zulip Standard",
"status": "open",
"status_transitions": {
"finalized_at": 1000000000,
"marked_uncollectible_at": null,
"paid_at": null,
"voided_at": null
},
"subscription": null,
"subtotal": 24000,
"tax": null,
"tax_percent": null,
"total": 24000,
"total_discount_amounts": [],
"total_tax_amounts": [],
"transfer_data": null,
"webhooks_delivered_at": null
}

View File

@@ -2,12 +2,11 @@
"data": [
{
"account_country": "US",
"account_name": "Vishnu Test",
"account_tax_ids": null,
"account_name": "Dev account",
"amount_due": 0,
"amount_paid": 0,
"amount_remaining": 0,
"application_fee": null,
"application_fee_amount": null,
"attempt_count": 0,
"attempted": true,
"auto_advance": false,
@@ -26,30 +25,24 @@
"customer_shipping": null,
"customer_tax_exempt": "none",
"customer_tax_ids": [],
"date": 1000000000,
"default_payment_method": null,
"default_source": null,
"default_tax_rates": [],
"description": null,
"description": "",
"discount": null,
"discounts": [],
"due_date": null,
"due_date": 1000000000,
"ending_balance": 0,
"finalized_at": 1000000000,
"footer": null,
"hosted_invoice_url": "https://invoice.stripe.com/i/acct_NORMALIZED000001/invst_NORMALIZED0000000000000001jwmXq",
"hosted_invoice_url": "https://pay.stripe.com/invoice/invst_NORMALIZED0000000000000001",
"id": "in_NORMALIZED00000000000001",
"invoice_pdf": "https://pay.stripe.com/invoice/acct_NORMALIZED000001/invst_NORMALIZED0000000000000001jwmXq/pdf",
"last_finalization_error": null,
"invoice_pdf": "https://pay.stripe.com/invoice/invst_NORMALIZED0000000000000001/pdf",
"lines": {
"data": [
{
"amount": 7200,
"currency": "usd",
"description": "Zulip Standard",
"discount_amounts": [],
"discountable": false,
"discounts": [],
"id": "ii_NORMALIZED00000000000001",
"invoice_item": "ii_NORMALIZED00000000000001",
"livemode": false,
@@ -60,40 +53,18 @@
"start": 1000000000
},
"plan": null,
"price": {
"active": false,
"billing_scheme": "per_unit",
"created": 1000000000,
"currency": "usd",
"id": "price_1HufhsD2X8vgpBNGtyNs4AI9",
"livemode": false,
"lookup_key": null,
"metadata": {},
"nickname": null,
"object": "price",
"product": "prod_IVh67i06KRHwdX",
"recurring": null,
"tiers_mode": null,
"transform_quantity": null,
"type": "one_time",
"unit_amount": 1200,
"unit_amount_decimal": "1200"
},
"proration": false,
"quantity": 6,
"subscription": null,
"tax_amounts": [],
"tax_rates": [],
"type": "invoiceitem",
"unique_id": "il_1HufhsD2X8vgpBNGtA08rM3i"
"type": "invoiceitem"
},
{
"amount": -7200,
"currency": "usd",
"description": "Payment (Card ending in 4242)",
"discount_amounts": [],
"discountable": false,
"discounts": [],
"id": "ii_NORMALIZED00000000000002",
"invoice_item": "ii_NORMALIZED00000000000002",
"livemode": false,
@@ -104,32 +75,12 @@
"start": 1000000000
},
"plan": null,
"price": {
"active": false,
"billing_scheme": "per_unit",
"created": 1000000000,
"currency": "usd",
"id": "price_1HufhrD2X8vgpBNGD9sFn8tJ",
"livemode": false,
"lookup_key": null,
"metadata": {},
"nickname": null,
"object": "price",
"product": "prod_IVh6pGP4ldOFFV",
"recurring": null,
"tiers_mode": null,
"transform_quantity": null,
"type": "one_time",
"unit_amount": -7200,
"unit_amount_decimal": "-7200"
},
"proration": false,
"quantity": 1,
"subscription": null,
"tax_amounts": [],
"tax_rates": [],
"type": "invoiceitem",
"unique_id": "il_1HufhrD2X8vgpBNGf4QcWhh8"
"type": "invoiceitem"
}
],
"has_more": false,
@@ -163,9 +114,7 @@
"tax": null,
"tax_percent": null,
"total": 0,
"total_discount_amounts": [],
"total_tax_amounts": [],
"transfer_data": null,
"webhooks_delivered_at": 1000000000
}
],

Some files were not shown because too many files have changed in this diff Show More