mirror of
https://github.com/zulip/zulip.git
synced 2025-10-23 04:52:12 +00:00
Compare commits
1 Commits
4.0-rc1
...
dockertest
Author | SHA1 | Date | |
---|---|---|---|
|
e5cde05710 |
@@ -1,5 +0,0 @@
|
||||
> 0.15%
|
||||
> 0.15% in US
|
||||
last 2 versions
|
||||
Firefox ESR
|
||||
not dead
|
146
.circleci/config.yml
Normal file
146
.circleci/config.yml
Normal file
@@ -0,0 +1,146 @@
|
||||
# See CircleCI upstream's docs on this config format:
|
||||
# https://circleci.com/docs/2.0/language-python/
|
||||
#
|
||||
version: 2
|
||||
jobs:
|
||||
"trusty-python-3.4":
|
||||
docker:
|
||||
# This is built from tools/circleci/images/trusty/Dockerfile .
|
||||
- image: gregprice/circleci:trusty-python-4.test
|
||||
|
||||
working_directory: ~/zulip
|
||||
|
||||
steps:
|
||||
- checkout
|
||||
|
||||
- run:
|
||||
name: create cache directories
|
||||
command: |
|
||||
dirs=(/srv/zulip-{npm,venv}-cache)
|
||||
sudo mkdir -p "${dirs[@]}"
|
||||
sudo chown -R circleci "${dirs[@]}"
|
||||
|
||||
- restore_cache:
|
||||
keys:
|
||||
- v1-npm-base.trusty-{{ checksum "package.json" }}-{{ checksum "yarn.lock" }}
|
||||
- restore_cache:
|
||||
keys:
|
||||
- v1-venv-base.trusty-{{ checksum "requirements/thumbor.txt" }}-{{ checksum "requirements/dev.txt" }}
|
||||
|
||||
- run:
|
||||
name: install dependencies
|
||||
command: |
|
||||
# Install moreutils so we can use `ts` and `mispipe` in the following.
|
||||
sudo apt-get install -y moreutils
|
||||
|
||||
# CircleCI sets the following in Git config at clone time:
|
||||
# url.ssh://git@github.com.insteadOf https://github.com
|
||||
# This breaks the Git clones in the NVM `install.sh` we run
|
||||
# in `install-node`.
|
||||
# TODO: figure out why that breaks, and whether we want it.
|
||||
# (Is it an optimization?)
|
||||
rm -f /home/circleci/.gitconfig
|
||||
|
||||
# This is the main setup job for the test suite
|
||||
mispipe "tools/travis/setup-backend" ts
|
||||
|
||||
# Cleaning caches is mostly unnecessary in Circle, because
|
||||
# most builds don't get to write to the cache.
|
||||
# mispipe "scripts/lib/clean-unused-caches --verbose --threshold 0" ts
|
||||
|
||||
- save_cache:
|
||||
paths:
|
||||
- /srv/zulip-npm-cache
|
||||
key: v1-npm-base.trusty-{{ checksum "package.json" }}-{{ checksum "yarn.lock" }}
|
||||
- save_cache:
|
||||
paths:
|
||||
- /srv/zulip-venv-cache
|
||||
key: v1-venv-base.trusty-{{ checksum "requirements/thumbor.txt" }}-{{ checksum "requirements/dev.txt" }}
|
||||
# TODO: in Travis we also cache ~/zulip-emoji-cache, ~/node, ~/misc
|
||||
|
||||
# The moment of truth! Run the tests.
|
||||
|
||||
- run:
|
||||
name: run backend tests
|
||||
command: |
|
||||
. /srv/zulip-py3-venv/bin/activate
|
||||
mispipe ./tools/travis/backend ts
|
||||
|
||||
- run:
|
||||
name: run frontend tests
|
||||
command: |
|
||||
. /srv/zulip-py3-venv/bin/activate
|
||||
mispipe ./tools/travis/frontend ts
|
||||
|
||||
- run:
|
||||
name: upload coverage report
|
||||
command: |
|
||||
. /srv/zulip-py3-venv/bin/activate
|
||||
pip install codecov && codecov \
|
||||
|| echo "Error in uploading coverage reports to codecov.io."
|
||||
|
||||
# - store_artifacts: # TODO
|
||||
# path: var/casper/
|
||||
# # also /tmp/zulip-test-event-log/
|
||||
# destination: test-reports
|
||||
|
||||
"xenial-python-3.5":
|
||||
docker:
|
||||
# This is built from tools/circleci/images/xenial/Dockerfile .
|
||||
- image: gregprice/circleci:xenial-python-3.test
|
||||
|
||||
working_directory: ~/zulip
|
||||
|
||||
steps:
|
||||
- checkout
|
||||
|
||||
- run:
|
||||
name: create cache directories
|
||||
command: |
|
||||
dirs=(/srv/zulip-{npm,venv}-cache)
|
||||
sudo mkdir -p "${dirs[@]}"
|
||||
sudo chown -R circleci "${dirs[@]}"
|
||||
|
||||
- restore_cache:
|
||||
keys:
|
||||
- v1-npm-base.xenial-{{ checksum "package.json" }}-{{ checksum "yarn.lock" }}
|
||||
- restore_cache:
|
||||
keys:
|
||||
- v1-venv-base.xenial-{{ checksum "requirements/thumbor.txt" }}-{{ checksum "requirements/dev.txt" }}
|
||||
|
||||
- run:
|
||||
name: install dependencies
|
||||
command: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y moreutils
|
||||
rm -f /home/circleci/.gitconfig
|
||||
mispipe "tools/travis/setup-backend" ts
|
||||
|
||||
- save_cache:
|
||||
paths:
|
||||
- /srv/zulip-npm-cache
|
||||
key: v1-npm-base.xenial-{{ checksum "package.json" }}-{{ checksum "yarn.lock" }}
|
||||
- save_cache:
|
||||
paths:
|
||||
- /srv/zulip-venv-cache
|
||||
key: v1-venv-base.xenial-{{ checksum "requirements/thumbor.txt" }}-{{ checksum "requirements/dev.txt" }}
|
||||
|
||||
- run:
|
||||
name: run backend tests
|
||||
command: |
|
||||
. /srv/zulip-py3-venv/bin/activate
|
||||
mispipe ./tools/travis/backend ts
|
||||
|
||||
- run:
|
||||
name: upload coverage report
|
||||
command: |
|
||||
. /srv/zulip-py3-venv/bin/activate
|
||||
pip install codecov && codecov \
|
||||
|| echo "Error in uploading coverage reports to codecov.io."
|
||||
|
||||
workflows:
|
||||
version: 2
|
||||
build:
|
||||
jobs:
|
||||
- "trusty-python-3.4"
|
||||
- "xenial-python-3.5"
|
@@ -5,8 +5,6 @@ coverage:
|
||||
project:
|
||||
default:
|
||||
target: auto
|
||||
# Codecov has the tendency to report a lot of false negatives,
|
||||
# so we basically suppress comments completely.
|
||||
threshold: 50%
|
||||
threshold: 0.50
|
||||
base: auto
|
||||
patch: off
|
||||
|
@@ -3,22 +3,17 @@ root = true
|
||||
[*]
|
||||
end_of_line = lf
|
||||
charset = utf-8
|
||||
indent_size = 4
|
||||
indent_style = space
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
insert_final_newline = true
|
||||
|
||||
binary_next_line = true # for shfmt
|
||||
switch_case_indent = true # for shfmt
|
||||
[*.{sh,py,pyi,js,json,yml,xml,css,md,markdown,handlebars,html}]
|
||||
indent_style = space
|
||||
indent_size = 4
|
||||
|
||||
[{*.{js,json,ts},check-openapi}]
|
||||
max_line_length = 100
|
||||
|
||||
[*.{py,pyi}]
|
||||
max_line_length = 110
|
||||
|
||||
[*.{svg,rb,pp,yaml,yml}]
|
||||
[*.{svg,rb,pp,pl}]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
||||
[package.json]
|
||||
indent_size = 2
|
||||
[*.{cfg}]
|
||||
indent_style = space
|
||||
indent_size = 8
|
||||
|
@@ -1,15 +1,2 @@
|
||||
# This is intended for generated files and vendored third-party files.
|
||||
# For our source code, instead of adding files here, consider using
|
||||
# specific eslint-disable comments in the files themselves.
|
||||
|
||||
/docs/_build
|
||||
/static/generated
|
||||
/static/third
|
||||
/static/webpack-bundles
|
||||
/var/*
|
||||
!/var/puppeteer
|
||||
/var/puppeteer/*
|
||||
!/var/puppeteer/test_credentials.d.ts
|
||||
/zulip-current-venv
|
||||
/zulip-py3-venv
|
||||
/zulip-thumbor-venv
|
||||
static/js/blueslip.js
|
||||
static/webpack-bundles
|
||||
|
555
.eslintrc.json
555
.eslintrc.json
@@ -1,227 +1,362 @@
|
||||
{
|
||||
"env": {
|
||||
"es2020": true,
|
||||
"node": true
|
||||
"node": true,
|
||||
"es6": true
|
||||
},
|
||||
"extends": [
|
||||
"eslint:recommended",
|
||||
"plugin:import/errors",
|
||||
"plugin:import/warnings",
|
||||
"plugin:unicorn/recommended",
|
||||
"prettier"
|
||||
],
|
||||
"parser": "@babel/eslint-parser",
|
||||
"parserOptions": {
|
||||
"warnOnUnsupportedTypeScriptVersion": false,
|
||||
"sourceType": "unambiguous"
|
||||
"sourceType": "module"
|
||||
},
|
||||
"reportUnusedDisableDirectives": true,
|
||||
"globals": {
|
||||
"$": false,
|
||||
"_": false,
|
||||
"jQuery": false,
|
||||
"Spinner": false,
|
||||
"Handlebars": false,
|
||||
"XDate": false,
|
||||
"zxcvbn": false,
|
||||
"SockJS": false,
|
||||
"marked": false,
|
||||
"md5": false,
|
||||
"moment": false,
|
||||
"i18n": false,
|
||||
"LightboxCanvas": false,
|
||||
"bridge": false,
|
||||
"page_params": false,
|
||||
"attachments_ui": false,
|
||||
"csrf_token": false,
|
||||
"typeahead_helper": false,
|
||||
"pygments_data": false,
|
||||
"popovers": false,
|
||||
"server_events": false,
|
||||
"server_events_dispatch": false,
|
||||
"message_scroll": false,
|
||||
"keydown_util": false,
|
||||
"info_overlay": false,
|
||||
"ui": false,
|
||||
"ui_report": false,
|
||||
"night_mode": false,
|
||||
"ui_util": false,
|
||||
"lightbox": false,
|
||||
"input_pill": false,
|
||||
"user_pill": false,
|
||||
"compose_pm_pill": false,
|
||||
"stream_color": false,
|
||||
"people": false,
|
||||
"user_groups": false,
|
||||
"navigate": false,
|
||||
"toMarkdown": false,
|
||||
"settings_toggle": false,
|
||||
"settings_account": false,
|
||||
"settings_display": false,
|
||||
"settings_notifications": false,
|
||||
"settings_muting": false,
|
||||
"settings_bots": false,
|
||||
"settings_sections": false,
|
||||
"settings_emoji": false,
|
||||
"settings_org": false,
|
||||
"settings_ui": false,
|
||||
"settings_users": false,
|
||||
"settings_streams": false,
|
||||
"settings_filters": false,
|
||||
"settings_invites": false,
|
||||
"settings_user_groups": false,
|
||||
"settings_profile_fields": false,
|
||||
"settings": false,
|
||||
"resize": false,
|
||||
"loading": false,
|
||||
"typing": false,
|
||||
"typing_events": false,
|
||||
"typing_data": false,
|
||||
"typing_status": false,
|
||||
"sent_messages": false,
|
||||
"transmit": false,
|
||||
"compose": false,
|
||||
"compose_actions": false,
|
||||
"compose_state": false,
|
||||
"compose_fade": false,
|
||||
"overlays": false,
|
||||
"stream_create": false,
|
||||
"stream_edit": false,
|
||||
"subs": false,
|
||||
"stream_muting": false,
|
||||
"stream_events": false,
|
||||
"timerender": false,
|
||||
"message_live_update": false,
|
||||
"message_edit": false,
|
||||
"reload": false,
|
||||
"composebox_typeahead": false,
|
||||
"search": false,
|
||||
"topic_list": false,
|
||||
"topic_generator": false,
|
||||
"gear_menu": false,
|
||||
"hashchange": false,
|
||||
"hash_util": false,
|
||||
"FetchStatus": false,
|
||||
"message_list": false,
|
||||
"Filter": false,
|
||||
"flatpickr": false,
|
||||
"pointer": false,
|
||||
"util": false,
|
||||
"MessageListData": false,
|
||||
"MessageListView": false,
|
||||
"blueslip": false,
|
||||
"rows": false,
|
||||
"WinChan": false,
|
||||
"muting_ui": false,
|
||||
"Socket": false,
|
||||
"channel": false,
|
||||
"components": false,
|
||||
"scroll_util": false,
|
||||
"message_viewport": false,
|
||||
"upload_widget": false,
|
||||
"avatar": false,
|
||||
"realm_icon": false,
|
||||
"feature_flags": false,
|
||||
"search_suggestion": false,
|
||||
"notifications": false,
|
||||
"message_flags": false,
|
||||
"bot_data": false,
|
||||
"top_left_corner": false,
|
||||
"stream_sort": false,
|
||||
"stream_list": false,
|
||||
"stream_popover": false,
|
||||
"narrow_state": false,
|
||||
"narrow": false,
|
||||
"admin_sections": false,
|
||||
"admin": false,
|
||||
"stream_data": false,
|
||||
"topic_data": false,
|
||||
"list_util": false,
|
||||
"muting": false,
|
||||
"Dict": false,
|
||||
"unread": false,
|
||||
"alert_words_ui": false,
|
||||
"message_store": false,
|
||||
"message_util": false,
|
||||
"message_events": false,
|
||||
"message_fetch": false,
|
||||
"favicon": false,
|
||||
"condense": false,
|
||||
"list_render": false,
|
||||
"floating_recipient_bar": false,
|
||||
"tab_bar": false,
|
||||
"emoji": false,
|
||||
"presence": false,
|
||||
"user_search": false,
|
||||
"buddy_data": false,
|
||||
"buddy_list": false,
|
||||
"list_cursor": false,
|
||||
"activity": false,
|
||||
"invite": false,
|
||||
"colorspace": false,
|
||||
"reactions": false,
|
||||
"tutorial": false,
|
||||
"templates": false,
|
||||
"alert_words": false,
|
||||
"fenced_code": false,
|
||||
"markdown": false,
|
||||
"echo": false,
|
||||
"localstorage": false,
|
||||
"localStorage": false,
|
||||
"current_msg_list": true,
|
||||
"home_msg_list": false,
|
||||
"pm_list": false,
|
||||
"pm_conversations": false,
|
||||
"recent_senders": false,
|
||||
"unread_ui": false,
|
||||
"unread_ops": false,
|
||||
"upload": false,
|
||||
"user_events": false,
|
||||
"Plotly": false,
|
||||
"emoji_codes": false,
|
||||
"drafts": false,
|
||||
"katex": false,
|
||||
"ClipboardJS": false,
|
||||
"emoji_picker": false,
|
||||
"hotspots": false,
|
||||
"compose_ui": false,
|
||||
"common": false,
|
||||
"panels": false,
|
||||
"PerfectScrollbar": false
|
||||
},
|
||||
"plugins": [
|
||||
"eslint-plugin-empty-returns"
|
||||
],
|
||||
"rules": {
|
||||
"array-callback-return": "error",
|
||||
"arrow-body-style": "error",
|
||||
"block-scoped-var": "error",
|
||||
"consistent-return": "error",
|
||||
"curly": "error",
|
||||
"dot-notation": "error",
|
||||
"eqeqeq": "error",
|
||||
"guard-for-in": "error",
|
||||
"import/extensions": "error",
|
||||
"import/first": "error",
|
||||
"import/newline-after-import": "error",
|
||||
"import/no-useless-path-segments": "error",
|
||||
"import/order": [
|
||||
"error",
|
||||
"array-bracket-spacing": "error",
|
||||
"arrow-spacing": [ "error", { "before": true, "after": true } ],
|
||||
"block-scoped-var": 2,
|
||||
"brace-style": [ "error", "1tbs", { "allowSingleLine": true } ],
|
||||
"camelcase": 0,
|
||||
"comma-dangle": [ "error",
|
||||
{
|
||||
"alphabetize": {"order": "asc"},
|
||||
"newlines-between": "always"
|
||||
"arrays": "always-multiline",
|
||||
"objects": "always-multiline",
|
||||
"imports": "always-multiline",
|
||||
"exports": "always-multiline",
|
||||
"functions": "never"
|
||||
}
|
||||
],
|
||||
"import/unambiguous": "error",
|
||||
"lines-around-directive": "error",
|
||||
"new-cap": "error",
|
||||
"no-alert": "error",
|
||||
"no-array-constructor": "error",
|
||||
"no-bitwise": "error",
|
||||
"no-caller": "error",
|
||||
"no-catch-shadow": "error",
|
||||
"no-constant-condition": ["error", {"checkLoops": false}],
|
||||
"no-div-regex": "error",
|
||||
"no-duplicate-imports": "error",
|
||||
"no-else-return": "error",
|
||||
"no-eq-null": "error",
|
||||
"no-eval": "error",
|
||||
"no-implicit-coercion": "error",
|
||||
"no-implied-eval": "error",
|
||||
"no-inner-declarations": "off",
|
||||
"no-iterator": "error",
|
||||
"no-label-var": "error",
|
||||
"no-labels": "error",
|
||||
"no-loop-func": "error",
|
||||
"no-multi-str": "error",
|
||||
"no-native-reassign": "error",
|
||||
"no-new-func": "error",
|
||||
"no-new-object": "error",
|
||||
"no-new-wrappers": "error",
|
||||
"no-octal-escape": "error",
|
||||
"no-plusplus": "error",
|
||||
"no-proto": "error",
|
||||
"no-return-assign": "error",
|
||||
"no-script-url": "error",
|
||||
"no-self-compare": "error",
|
||||
"no-sync": "error",
|
||||
"no-throw-literal": "error",
|
||||
"no-undef-init": "error",
|
||||
"no-unneeded-ternary": ["error", {"defaultAssignment": false}],
|
||||
"no-unused-expressions": "error",
|
||||
"no-use-before-define": ["error", {"functions": false}],
|
||||
"no-useless-concat": "error",
|
||||
"no-useless-constructor": "error",
|
||||
"no-var": "error",
|
||||
"object-shorthand": "error",
|
||||
"one-var": ["error", "never"],
|
||||
"prefer-arrow-callback": "error",
|
||||
"prefer-const": [
|
||||
"error",
|
||||
"complexity": [ 0, 4 ],
|
||||
"curly": 2,
|
||||
"dot-notation": [ "error", { "allowKeywords": true } ],
|
||||
"empty-returns/main": "error",
|
||||
"eol-last": [ "error", "always" ],
|
||||
"eqeqeq": 2,
|
||||
"func-style": [ "off", "expression" ],
|
||||
"guard-for-in": 2,
|
||||
"indent": ["error", 4, {
|
||||
"ArrayExpression": "first",
|
||||
"outerIIFEBody": 0,
|
||||
"ObjectExpression": "first",
|
||||
"SwitchCase": 0,
|
||||
"CallExpression": {"arguments": "first"},
|
||||
"FunctionExpression": {"parameters": "first"},
|
||||
"FunctionDeclaration": {"parameters": "first"}
|
||||
}],
|
||||
"keyword-spacing": [ "error",
|
||||
{
|
||||
"before": true,
|
||||
"after": true,
|
||||
"overrides": {
|
||||
"return": { "after": true },
|
||||
"throw": { "after": true },
|
||||
"case": { "after": true }
|
||||
}
|
||||
}
|
||||
],
|
||||
"max-depth": [ 0, 4 ],
|
||||
"max-len": [ "error", 100, 2,
|
||||
{
|
||||
"ignoreUrls": true,
|
||||
"ignoreComments": false,
|
||||
"ignoreRegExpLiterals": true,
|
||||
"ignoreStrings": true,
|
||||
"ignoreTemplateLiterals": true
|
||||
}
|
||||
],
|
||||
"max-params": [ 0, 3 ],
|
||||
"max-statements": [ 0, 10 ],
|
||||
"new-cap": [ "error",
|
||||
{
|
||||
"newIsCap": true,
|
||||
"capIsNew": false
|
||||
}
|
||||
],
|
||||
"new-parens": 2,
|
||||
"newline-per-chained-call": 0,
|
||||
"no-alert": 2,
|
||||
"no-array-constructor": "error",
|
||||
"no-bitwise": 2,
|
||||
"no-caller": 2,
|
||||
"no-case-declarations": "error",
|
||||
"no-catch-shadow": 2,
|
||||
"no-console": 0,
|
||||
"no-const-assign": "error",
|
||||
"no-control-regex": 2,
|
||||
"no-debugger": 2,
|
||||
"no-delete-var": 2,
|
||||
"no-div-regex": 2,
|
||||
"no-dupe-class-members": "error",
|
||||
"no-dupe-keys": 2,
|
||||
"no-duplicate-imports": "error",
|
||||
"no-else-return": 2,
|
||||
"no-empty": 2,
|
||||
"no-empty-character-class": 2,
|
||||
"no-eq-null": 2,
|
||||
"no-eval": 2,
|
||||
"no-ex-assign": 2,
|
||||
"no-extra-parens": [ "error", "functions" ],
|
||||
"no-extra-semi": 2,
|
||||
"no-fallthrough": 2,
|
||||
"no-floating-decimal": 2,
|
||||
"no-func-assign": 2,
|
||||
"no-implied-eval": 2,
|
||||
"no-iterator": "error",
|
||||
"no-label-var": 2,
|
||||
"no-labels": 2,
|
||||
"no-loop-func": 2,
|
||||
"no-mixed-requires": [ 0, false ],
|
||||
"no-multi-str": 2,
|
||||
"no-native-reassign": 2,
|
||||
"no-nested-ternary": 0,
|
||||
"no-new-func": "error",
|
||||
"no-new-object": 2,
|
||||
"no-new-wrappers": 2,
|
||||
"no-obj-calls": 2,
|
||||
"no-octal": 2,
|
||||
"no-octal-escape": 2,
|
||||
"no-param-reassign": 0,
|
||||
"no-plusplus": 2,
|
||||
"no-proto": 2,
|
||||
"no-redeclare": 2,
|
||||
"no-regex-spaces": 2,
|
||||
"no-restricted-syntax": 0,
|
||||
"no-return-assign": 2,
|
||||
"no-script-url": 2,
|
||||
"no-self-compare": 2,
|
||||
"no-shadow": 0,
|
||||
"no-sync": 2,
|
||||
"no-ternary": 0,
|
||||
"no-undef": "error",
|
||||
"no-undef-init": 2,
|
||||
"no-underscore-dangle": 0,
|
||||
"no-unneeded-ternary": [ "error", { "defaultAssignment": false } ],
|
||||
"no-unreachable": 2,
|
||||
"no-unused-expressions": 2,
|
||||
"no-unused-vars": [ "error",
|
||||
{
|
||||
"vars": "local",
|
||||
"args": "after-used",
|
||||
"varsIgnorePattern": "print_elapsed_time|check_duplicate_ids"
|
||||
}
|
||||
],
|
||||
"no-use-before-define": 2,
|
||||
"no-useless-constructor": "error",
|
||||
// The Zulip codebase complies partially with the "no-useless-escape"
|
||||
// rule; only regex expressions haven't been updated yet.
|
||||
// Updated regex expressions are currently being tested in casper
|
||||
// files and will decide about a potential future enforcement of this rule.
|
||||
"no-useless-escape": 0,
|
||||
"no-whitespace-before-property": 0,
|
||||
"no-with": 2,
|
||||
"one-var": [ "error", "never" ],
|
||||
"padded-blocks": 0,
|
||||
"prefer-const": [ "error",
|
||||
{
|
||||
"destructuring": "any",
|
||||
"ignoreReadBeforeAssign": true
|
||||
}
|
||||
],
|
||||
"radix": "error",
|
||||
"sort-imports": ["error", {"ignoreDeclarationSort": true}],
|
||||
"spaced-comment": ["error", "always", {"markers": ["/"]}],
|
||||
"strict": "error",
|
||||
"unicorn/consistent-function-scoping": "off",
|
||||
"unicorn/explicit-length-check": "off",
|
||||
"unicorn/filename-case": "off",
|
||||
"unicorn/no-nested-ternary": "off",
|
||||
"unicorn/no-null": "off",
|
||||
"unicorn/no-process-exit": "off",
|
||||
"unicorn/no-useless-undefined": "off",
|
||||
"unicorn/number-literal-case": "off",
|
||||
"unicorn/prefer-spread": "off",
|
||||
"unicorn/prefer-ternary": "off",
|
||||
"unicorn/prevent-abbreviations": "off",
|
||||
"valid-typeof": ["error", {"requireStringLiterals": true}],
|
||||
"yoda": "error"
|
||||
},
|
||||
"overrides": [
|
||||
{
|
||||
"files": ["frontend_tests/puppeteer_lib/**", "frontend_tests/puppeteer_tests/**"],
|
||||
"globals": {
|
||||
"$": false,
|
||||
"zulip_test": false
|
||||
"quote-props": [ "error", "as-needed",
|
||||
{
|
||||
"keywords": false,
|
||||
"unnecessary": true,
|
||||
"numbers": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"files": ["static/js/**"],
|
||||
"globals": {
|
||||
"StripeCheckout": false
|
||||
],
|
||||
"quotes": [ 0, "single" ],
|
||||
"radix": 2,
|
||||
"semi": 2,
|
||||
"space-before-blocks": 2,
|
||||
"space-before-function-paren": [ "error",
|
||||
{
|
||||
"anonymous": "always",
|
||||
"named": "never",
|
||||
"asyncArrow": "always"
|
||||
}
|
||||
},
|
||||
{
|
||||
"files": ["**/*.ts"],
|
||||
"extends": ["plugin:@typescript-eslint/recommended", "plugin:import/typescript"],
|
||||
"parserOptions": {
|
||||
"project": "tsconfig.json"
|
||||
},
|
||||
"rules": {
|
||||
// Disable base rule to avoid conflict
|
||||
"no-duplicate-imports": "off",
|
||||
"no-unused-vars": "off",
|
||||
"no-useless-constructor": "off",
|
||||
|
||||
"@typescript-eslint/array-type": "error",
|
||||
"@typescript-eslint/await-thenable": "error",
|
||||
"@typescript-eslint/consistent-type-assertions": "error",
|
||||
"@typescript-eslint/consistent-type-imports": "error",
|
||||
"@typescript-eslint/explicit-function-return-type": [
|
||||
"error",
|
||||
{"allowExpressions": true}
|
||||
],
|
||||
"@typescript-eslint/member-ordering": "error",
|
||||
"@typescript-eslint/no-duplicate-imports": "off",
|
||||
"@typescript-eslint/no-explicit-any": "off",
|
||||
"@typescript-eslint/no-extraneous-class": "error",
|
||||
"@typescript-eslint/no-non-null-assertion": "off",
|
||||
"@typescript-eslint/no-parameter-properties": "error",
|
||||
"@typescript-eslint/no-unnecessary-qualifier": "error",
|
||||
"@typescript-eslint/no-unnecessary-type-assertion": "error",
|
||||
"@typescript-eslint/no-unused-vars": ["error", {"varsIgnorePattern": "^_"}],
|
||||
"@typescript-eslint/no-use-before-define": "error",
|
||||
"@typescript-eslint/no-useless-constructor": "error",
|
||||
"@typescript-eslint/prefer-includes": "error",
|
||||
"@typescript-eslint/prefer-regexp-exec": "error",
|
||||
"@typescript-eslint/prefer-string-starts-ends-with": "error",
|
||||
"@typescript-eslint/promise-function-async": "error",
|
||||
"@typescript-eslint/unified-signatures": "error",
|
||||
"no-undef": "error"
|
||||
}
|
||||
},
|
||||
{
|
||||
"files": ["**/*.d.ts"],
|
||||
"rules": {
|
||||
"import/unambiguous": "off"
|
||||
}
|
||||
},
|
||||
{
|
||||
"files": ["frontend_tests/**"],
|
||||
"globals": {
|
||||
"CSS": false,
|
||||
"document": false,
|
||||
"navigator": false,
|
||||
"window": false
|
||||
},
|
||||
"rules": {
|
||||
"no-sync": "off"
|
||||
}
|
||||
},
|
||||
{
|
||||
"files": ["tools/debug-require.js"],
|
||||
"env": {
|
||||
"browser": true,
|
||||
"es2020": false
|
||||
},
|
||||
"rules": {
|
||||
// Don’t require ES features that PhantomJS doesn’t support
|
||||
// TODO: Toggle these settings now that we don't use PhantomJS
|
||||
"no-var": "off",
|
||||
"object-shorthand": "off",
|
||||
"prefer-arrow-callback": "off"
|
||||
}
|
||||
},
|
||||
{
|
||||
"files": ["static/**"],
|
||||
"env": {
|
||||
"browser": true,
|
||||
"node": false
|
||||
},
|
||||
"rules": {
|
||||
"no-console": "error"
|
||||
},
|
||||
"settings": {
|
||||
"import/resolver": "webpack"
|
||||
}
|
||||
},
|
||||
{
|
||||
"files": ["static/shared/**"],
|
||||
"env": {
|
||||
"browser": false,
|
||||
"shared-node-browser": true
|
||||
},
|
||||
"rules": {
|
||||
"import/no-restricted-paths": [
|
||||
"error",
|
||||
{
|
||||
"zones": [
|
||||
{
|
||||
"target": "./static/shared",
|
||||
"from": ".",
|
||||
"except": ["./node_modules", "./static/shared"]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
],
|
||||
"space-in-parens": 2,
|
||||
"space-infix-ops": 0,
|
||||
"spaced-comment": 0,
|
||||
"strict": 0,
|
||||
"template-curly-spacing": "error",
|
||||
"unnecessary-strict": 0,
|
||||
"use-isnan": 2,
|
||||
"valid-typeof": [ "error", { "requireStringLiterals": true } ],
|
||||
"wrap-iife": [ "error", "outside", { "functionPrototypeMethods": false } ],
|
||||
"wrap-regex": 0,
|
||||
"yoda": 2
|
||||
}
|
||||
}
|
||||
|
1
.gitattributes
vendored
1
.gitattributes
vendored
@@ -11,3 +11,4 @@
|
||||
*.otf binary
|
||||
*.tif binary
|
||||
*.ogg binary
|
||||
yarn.lock binary
|
||||
|
3
.github/FUNDING.yml
vendored
3
.github/FUNDING.yml
vendored
@@ -1,3 +0,0 @@
|
||||
github: zulip
|
||||
patreon: zulip
|
||||
open_collective: zulip
|
4
.github/pull_request_template.md
vendored
4
.github/pull_request_template.md
vendored
@@ -1,10 +1,10 @@
|
||||
<!-- What's this PR for? (Just a link to an issue is fine.) -->
|
||||
|
||||
|
||||
**Testing plan:** <!-- How have you tested? -->
|
||||
**Testing Plan:** <!-- How have you tested? -->
|
||||
|
||||
|
||||
**GIFs or screenshots:** <!-- If a UI change. See:
|
||||
**GIFs or Screenshots:** <!-- If a UI change. See:
|
||||
https://zulip.readthedocs.io/en/latest/tutorials/screenshot-and-gif-software.html
|
||||
-->
|
||||
|
||||
|
41
.github/workflows/cancel-previous-runs.yml
vendored
41
.github/workflows/cancel-previous-runs.yml
vendored
@@ -1,41 +0,0 @@
|
||||
name: Cancel previous runs
|
||||
on: [push, pull_request]
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
cancel:
|
||||
name: Cancel previous runs
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 3
|
||||
|
||||
# Don't run this job for zulip/zulip pushes since we
|
||||
# want to run those jobs.
|
||||
if: ${{ github.event_name != 'push' || github.event.repository.full_name != 'zulip/zulip' }}
|
||||
|
||||
steps:
|
||||
# We get workflow IDs from GitHub API so we don't have to maintain
|
||||
# a hard-coded list of IDs which need to be updated when a workflow
|
||||
# is added or removed. And, workflow IDs are different for other forks
|
||||
# so this is required.
|
||||
- name: Get workflow IDs.
|
||||
id: workflow_ids
|
||||
env:
|
||||
# This is in <owner>/<repo> format e.g. zulip/zulip
|
||||
REPOSITORY: ${{ github.repository }}
|
||||
run: |
|
||||
workflow_api_url=https://api.github.com/repos/$REPOSITORY/actions/workflows
|
||||
curl $workflow_api_url -o workflows.json
|
||||
|
||||
script="const {workflows} = require('./workflows'); \
|
||||
const ids = workflows.map(workflow => workflow.id); \
|
||||
console.log(ids.join(','));"
|
||||
ids=$(node -e "$script")
|
||||
echo "::set-output name=ids::$ids"
|
||||
|
||||
- uses: styfle/cancel-workflow-action@0.4.1
|
||||
with:
|
||||
workflow_id: ${{ steps.workflow_ids.outputs.ids }}
|
||||
access_token: ${{ github.token }}
|
30
.github/workflows/codeql-analysis.yml
vendored
30
.github/workflows/codeql-analysis.yml
vendored
@@ -1,30 +0,0 @@
|
||||
name: "Code scanning"
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
CodeQL:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
# We must fetch at least the immediate parents so that if this is
|
||||
# a pull request then we can check out the head.
|
||||
fetch-depth: 2
|
||||
|
||||
# If this run was triggered by a pull request event, then check out
|
||||
# the head of the pull request instead of the merge commit.
|
||||
- run: git checkout HEAD^2
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v1
|
||||
|
||||
# Override language selection by uncommenting this and choosing your languages
|
||||
# with:
|
||||
# languages: go, javascript, csharp, python, cpp, java
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v1
|
24
.github/workflows/legacy-os.yml
vendored
24
.github/workflows/legacy-os.yml
vendored
@@ -1,24 +0,0 @@
|
||||
name: Legacy OS
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
xenial:
|
||||
name: Ubuntu 16.04 Xenial (Python 3.5, legacy)
|
||||
runs-on: ubuntu-16.04
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Check tools/provision error message on xenial
|
||||
run: |
|
||||
{ { ! tools/provision 2>&1 >&3; } | tee provision.err; } 3>&1 >&2
|
||||
grep -Fqx 'Error: ubuntu 16.04 is no longer a supported platform for Zulip.' provision.err
|
||||
- name: Check scripts/lib/upgrade-zulip-stage-2 error message on xenial
|
||||
run: |
|
||||
{ { ! sudo scripts/lib/upgrade-zulip-stage-2 2>&1 >&3; } | tee upgrade.err; } 3>&1 >&2
|
||||
grep -Fq 'upgrade-zulip-stage-2: Unsupported platform: ubuntu 16.04' upgrade.err
|
||||
|
||||
- name: Report status
|
||||
if: failure()
|
||||
env:
|
||||
ZULIP_BOT_KEY: ${{ secrets.ZULIP_BOT_KEY }}
|
||||
run: tools/ci/send-failure-message
|
208
.github/workflows/production-suite.yml
vendored
208
.github/workflows/production-suite.yml
vendored
@@ -1,208 +0,0 @@
|
||||
name: Zulip production suite
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- "**/migrations/**"
|
||||
- puppet/**
|
||||
- requirements/**
|
||||
- scripts/**
|
||||
- static/**
|
||||
- tools/**
|
||||
- zproject/**
|
||||
- yarn.lock
|
||||
- .github/workflows/production-suite.yml
|
||||
pull_request:
|
||||
paths:
|
||||
- "**/migrations/**"
|
||||
- puppet/**
|
||||
- requirements/**
|
||||
- scripts/**
|
||||
- static/**
|
||||
- tools/**
|
||||
- zproject/**
|
||||
- yarn.lock
|
||||
- .github/workflows/production-suite.yml
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
production_build:
|
||||
name: Bionic production build
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
# This docker image was created by a generated Dockerfile at:
|
||||
# tools/ci/images/bionic/Dockerfile
|
||||
# Bionic ships with Python 3.6.
|
||||
container: zulip/ci:bionic
|
||||
steps:
|
||||
- name: Add required permissions
|
||||
run: |
|
||||
# The checkout actions doesn't clone to ~/zulip or allow
|
||||
# us to use the path option to clone outside the current
|
||||
# /__w/zulip/zulip directory. Since this directory is owned
|
||||
# by root we need to change it's ownership to allow the
|
||||
# github user to clone the code here.
|
||||
# Note: /__w/ is a docker volume mounted to $GITHUB_WORKSPACE
|
||||
# which is /home/runner/work/.
|
||||
sudo chown -R github .
|
||||
|
||||
# This is the GitHub Actions specific cache directory the
|
||||
# the current github user must be able to access for the
|
||||
# cache action to work. It is owned by root currently.
|
||||
sudo chmod -R 0777 /__w/_temp/
|
||||
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Create cache directories
|
||||
run: |
|
||||
dirs=(/srv/zulip-{npm,venv,emoji}-cache)
|
||||
sudo mkdir -p "${dirs[@]}"
|
||||
sudo chown -R github "${dirs[@]}"
|
||||
|
||||
- name: Restore node_modules cache
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: /srv/zulip-npm-cache
|
||||
key: v1-yarn-deps-${{ github.job }}-${{ hashFiles('package.json') }}-${{ hashFiles('yarn.lock') }}
|
||||
restore-keys: v1-yarn-deps-${{ github.job }}
|
||||
|
||||
- name: Restore python cache
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: /srv/zulip-venv-cache
|
||||
key: v1-venv-${{ github.job }}-${{ hashFiles('requirements/thumbor-dev.txt') }}-${{ hashFiles('requirements/dev.txt') }}
|
||||
restore-keys: v1-venv-${{ github.job }}
|
||||
|
||||
- name: Restore emoji cache
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: /srv/zulip-emoji-cache
|
||||
key: v1-emoji-${{ github.job }}-${{ hashFiles('tools/setup/emoji/emoji_map.json') }}-${{ hashFiles('tools/setup/emoji/build_emoji') }}-${{ hashFiles('tools/setup/emoji/emoji_setup_utils.py') }}-${{ hashFiles('tools/setup/emoji/emoji_names.py') }}-${{ hashFiles('package.json') }}
|
||||
restore-keys: v1-emoji-${{ github.job }}
|
||||
|
||||
- name: Do Bionic hack
|
||||
run: |
|
||||
# Temporary hack till `sudo service redis-server start` gets fixes in Bionic. See
|
||||
# https://chat.zulip.org/#narrow/stream/3-backend/topic/Ubuntu.20bionic.20CircleCI
|
||||
sudo sed -i '/^bind/s/bind.*/bind 0.0.0.0/' /etc/redis/redis.conf
|
||||
|
||||
- name: Build production tarball
|
||||
run: ./tools/ci/production-build
|
||||
|
||||
- name: Upload production build artifacts for install jobs
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: production-tarball
|
||||
path: /tmp/production-build
|
||||
retention-days: 14
|
||||
|
||||
- name: Report status
|
||||
if: failure()
|
||||
env:
|
||||
ZULIP_BOT_KEY: ${{ secrets.ZULIP_BOT_KEY }}
|
||||
run: tools/ci/send-failure-message
|
||||
|
||||
production_install:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
# Base images are built using `tools/ci/Dockerfile.template`.
|
||||
# The comments at the top explain how to build and upload these images.
|
||||
- docker_image: zulip/ci:bionic
|
||||
name: Bionic production install
|
||||
is_bionic: true
|
||||
os: bionic
|
||||
|
||||
- docker_image: zulip/ci:focal
|
||||
name: Focal production install
|
||||
is_focal: true
|
||||
os: focal
|
||||
|
||||
- docker_image: zulip/ci:buster
|
||||
name: Buster production install
|
||||
is_buster: true
|
||||
os: buster
|
||||
|
||||
- docker_image: zulip/ci:bullseye
|
||||
name: Bullseye production install
|
||||
is_bullseye: true
|
||||
os: bullseye
|
||||
|
||||
name: ${{ matrix.name }}
|
||||
container: ${{ matrix.docker_image }}
|
||||
runs-on: ubuntu-latest
|
||||
needs: production_build
|
||||
|
||||
steps:
|
||||
- name: Download built production tarball
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: production-tarball
|
||||
path: /tmp
|
||||
|
||||
- name: Add required permissions and setup
|
||||
run: |
|
||||
# This is the GitHub Actions specific cache directory the
|
||||
# the current github user must be able to access for the
|
||||
# cache action to work. It is owned by root currently.
|
||||
sudo chmod -R 0777 /__w/_temp/
|
||||
|
||||
# Create the zulip directory that the tools/ci/ scripts needs
|
||||
mkdir -p /home/github/zulip
|
||||
|
||||
# Since actions/download-artifact@v2 loses all the permissions
|
||||
# of the tarball uploaded by the upload artifact fix those.
|
||||
chmod +x /tmp/production-extract-tarball
|
||||
chmod +x /tmp/production-upgrade-pg
|
||||
chmod +x /tmp/production-install
|
||||
chmod +x /tmp/production-verify
|
||||
chmod +x /tmp/send-failure-message
|
||||
|
||||
- name: Create cache directories
|
||||
run: |
|
||||
dirs=(/srv/zulip-{npm,venv,emoji}-cache)
|
||||
sudo mkdir -p "${dirs[@]}"
|
||||
sudo chown -R github "${dirs[@]}"
|
||||
|
||||
- name: Restore node_modules cache
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: /srv/zulip-npm-cache
|
||||
key: v1-yarn-deps-${{ matrix.os }}-${{ hashFiles('/tmp/package.json') }}-${{ hashFiles('/tmp/yarn.lock') }}
|
||||
restore-keys: v1-yarn-deps-${{ matrix.os }}
|
||||
|
||||
- name: Do Bionic hack
|
||||
if: ${{ matrix.is_bionic }}
|
||||
run: |
|
||||
# Temporary hack till `sudo service redis-server start` gets fixes in Bionic. See
|
||||
# https://chat.zulip.org/#narrow/stream/3-backend/topic/Ubuntu.20bionic.20CircleCI
|
||||
sudo sed -i '/^bind/s/bind.*/bind 0.0.0.0/' /etc/redis/redis.conf
|
||||
|
||||
- name: Production extract tarball
|
||||
run: /tmp/production-extract-tarball
|
||||
|
||||
- name: Install production
|
||||
run: |
|
||||
sudo service rabbitmq-server restart
|
||||
sudo /tmp/production-install
|
||||
|
||||
- name: Verify install
|
||||
run: sudo /tmp/production-verify
|
||||
|
||||
- name: Upgrade postgresql
|
||||
if: ${{ matrix.is_bionic }}
|
||||
run: sudo /tmp/production-upgrade-pg
|
||||
|
||||
- name: Verify install after upgrading postgresql
|
||||
if: ${{ matrix.is_bionic }}
|
||||
run: sudo /tmp/production-verify
|
||||
|
||||
- name: Report status
|
||||
if: failure()
|
||||
env:
|
||||
ZULIP_BOT_KEY: ${{ secrets.ZULIP_BOT_KEY }}
|
||||
run: /tmp/send-failure-message
|
24
.github/workflows/update-oneclick-apps.yml
vendored
24
.github/workflows/update-oneclick-apps.yml
vendored
@@ -1,24 +0,0 @@
|
||||
name: Update one click apps
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
jobs:
|
||||
update-digitalocean-oneclick-app:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Update DigitalOcean one click app
|
||||
env:
|
||||
DIGITALOCEAN_API_KEY: ${{ secrets.ONE_CLICK_ACTION_DIGITALOCEAN_API_KEY }}
|
||||
ZULIP_API_KEY: ${{ secrets.ONE_CLICK_ACTION_ZULIP_BOT_API_KEY }}
|
||||
ZULIP_EMAIL: ${{ secrets.ONE_CLICK_ACTION_ZULIP_BOT_EMAIL }}
|
||||
ZULIP_SITE: https://chat.zulip.org
|
||||
ONE_CLICK_ACTION_STREAM: kandra ops
|
||||
PYTHON_DIGITALOCEAN_REQUEST_TIMEOUT_SEC: 30
|
||||
RELEASE_VERSION: ${{ github.event.release.tag_name }}
|
||||
run: |
|
||||
export PATH="$HOME/.local/bin:$PATH"
|
||||
git clone https://github.com/zulip/marketplace-partners
|
||||
pip3 install python-digitalocean zulip fab-classic
|
||||
echo $PATH
|
||||
python3 tools/oneclickapps/prepare_digital_ocean_one_click_app_release.py
|
250
.github/workflows/zulip-ci.yml
vendored
250
.github/workflows/zulip-ci.yml
vendored
@@ -1,250 +0,0 @@
|
||||
# NOTE: Everything test in this file should be in `tools/test-all`. If there's a
|
||||
# reason not to run it there, it should be there as a comment
|
||||
# explaining why.
|
||||
|
||||
name: Zulip CI
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
# This docker image was created by a generated Dockerfile at:
|
||||
# tools/ci/images/bionic/Dockerfile
|
||||
# Bionic ships with Python 3.6.
|
||||
- docker_image: zulip/ci:bionic
|
||||
name: Ubuntu 18.04 Bionic (Python 3.6, backend + frontend)
|
||||
os: bionic
|
||||
is_bionic: true
|
||||
include_frontend_tests: true
|
||||
|
||||
# This docker image was created by a generated Dockerfile at:
|
||||
# tools/ci/images/focal/Dockerfile
|
||||
# Focal ships with Python 3.8.2.
|
||||
- docker_image: zulip/ci:focal
|
||||
name: Ubuntu 20.04 Focal (Python 3.8, backend)
|
||||
os: focal
|
||||
is_focal: true
|
||||
include_frontend_tests: false
|
||||
|
||||
# This docker image was created by a generated Dockerfile at:
|
||||
# tools/ci/images/focal/Dockerfile
|
||||
# Bullseye ships with Python 3.9.2.
|
||||
- docker_image: zulip/ci:bullseye
|
||||
name: Debian 11 Bullseye (Python 3.9, backend)
|
||||
os: bullseye
|
||||
is_bullseye: true
|
||||
include_frontend_tests: false
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
name: ${{ matrix.name }}
|
||||
container: ${{ matrix.docker_image }}
|
||||
env:
|
||||
# GitHub Actions sets HOME to /github/home which causes
|
||||
# problem later in provison and frontend test that runs
|
||||
# tools/setup/postgresql-init-dev-db because of the .pgpass
|
||||
# location. PostgreSQL (psql) expects .pgpass to be at
|
||||
# /home/github/.pgpass and setting home to `/home/github/`
|
||||
# ensures it written there because we write it to ~/.pgpass.
|
||||
HOME: /home/github/
|
||||
|
||||
steps:
|
||||
- name: Add required permissions
|
||||
run: |
|
||||
# The checkout actions doesn't clone to ~/zulip or allow
|
||||
# us to use the path option to clone outside the current
|
||||
# /__w/zulip/zulip directory. Since this directory is owned
|
||||
# by root we need to change it's ownership to allow the
|
||||
# github user to clone the code here.
|
||||
# Note: /__w/ is a docker volume mounted to $GITHUB_WORKSPACE
|
||||
# which is /home/runner/work/.
|
||||
sudo chown -R github .
|
||||
|
||||
# This is the GitHub Actions specific cache directory the
|
||||
# the current github user must be able to access for the
|
||||
# cache action to work. It is owned by root currently.
|
||||
sudo chmod -R 0777 /__w/_temp/
|
||||
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Create cache directories
|
||||
run: |
|
||||
dirs=(/srv/zulip-{npm,venv,emoji}-cache)
|
||||
sudo mkdir -p "${dirs[@]}"
|
||||
sudo chown -R github "${dirs[@]}"
|
||||
|
||||
- name: Restore node_modules cache
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: /srv/zulip-npm-cache
|
||||
key: v1-yarn-deps-${{ matrix.os }}-${{ hashFiles('package.json') }}-${{ hashFiles('yarn.lock') }}
|
||||
restore-keys: v1-yarn-deps-${{ matrix.os }}
|
||||
|
||||
- name: Restore python cache
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: /srv/zulip-venv-cache
|
||||
key: v1-venv-${{ matrix.os }}-${{ hashFiles('requirements/thumbor-dev.txt') }}-${{ hashFiles('requirements/dev.txt') }}
|
||||
restore-keys: v1-venv-${{ matrix.os }}
|
||||
|
||||
- name: Restore emoji cache
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: /srv/zulip-emoji-cache
|
||||
key: v1-emoji-${{ matrix.os }}-${{ hashFiles('tools/setup/emoji/emoji_map.json') }}-${{ hashFiles('tools/setup/emoji/build_emoji') }}-${{ hashFiles('tools/setup/emoji/emoji_setup_utils.py') }}-${{ hashFiles('tools/setup/emoji/emoji_names.py') }}-${{ hashFiles('package.json') }}
|
||||
restore-keys: v1-emoji-${{ matrix.os }}
|
||||
|
||||
- name: Do Bionic hack
|
||||
if: ${{ matrix.is_bionic }}
|
||||
run: |
|
||||
# Temporary hack till `sudo service redis-server start` gets fixes in Bionic. See
|
||||
# https://chat.zulip.org/#narrow/stream/3-backend/topic/Ubuntu.20bionic.20CircleCI
|
||||
sudo sed -i '/^bind/s/bind.*/bind 0.0.0.0/' /etc/redis/redis.conf
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
# This is the main setup job for the test suite
|
||||
./tools/ci/setup-backend --skip-dev-db-build
|
||||
|
||||
# Cleaning caches is mostly unnecessary in GitHub Actions, because
|
||||
# most builds don't get to write to the cache.
|
||||
# scripts/lib/clean-unused-caches --verbose --threshold 0
|
||||
|
||||
- name: Run tools test
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
./tools/test-tools
|
||||
|
||||
- name: Run backend lint
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
echo "Test suite is running under $(python --version)."
|
||||
./tools/lint --groups=backend --skip=gitlint,mypy # gitlint disabled because flaky
|
||||
|
||||
- name: Run frontend lint
|
||||
if: ${{ matrix.include_frontend_tests }}
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
./tools/lint --groups=frontend --skip=gitlint # gitlint disabled because flaky
|
||||
|
||||
- name: Run backend tests
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
./tools/test-backend --coverage --include-webhooks --no-cov-cleanup --ban-console-output
|
||||
|
||||
- name: Run mypy
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
# We run mypy after the backend tests so we get output from the
|
||||
# backend tests, which tend to uncover more serious problems, first.
|
||||
./tools/run-mypy --version
|
||||
./tools/run-mypy
|
||||
|
||||
- name: Run miscellaneous tests
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
|
||||
# Currently our compiled requirements files will differ for different python versions
|
||||
# so we will run test-locked-requirements only for Bionic.
|
||||
# ./tools/test-locked-requirements
|
||||
# ./tools/test-run-dev # https://github.com/zulip/zulip/pull/14233
|
||||
#
|
||||
# This test has been persistently flaky at like 1% frequency, is slow,
|
||||
# and is for a very specific single feature, so we don't run it by default:
|
||||
# ./tools/test-queue-worker-reload
|
||||
|
||||
./tools/test-migrations
|
||||
./tools/setup/optimize-svg --check
|
||||
./tools/setup/generate_integration_bots_avatars.py --check-missing
|
||||
|
||||
- name: Run documentation and api tests
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
# In CI, we only test links we control in test-documentation to avoid flakes
|
||||
./tools/test-documentation --skip-external-links
|
||||
./tools/test-help-documentation --skip-external-links
|
||||
./tools/test-api
|
||||
|
||||
- name: Run node tests
|
||||
if: ${{ matrix.include_frontend_tests }}
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
# Run the node tests first, since they're fast and deterministic
|
||||
./tools/test-js-with-node --coverage
|
||||
|
||||
- name: Check schemas
|
||||
if: ${{ matrix.include_frontend_tests }}
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
# Check that various schemas are consistent. (is fast)
|
||||
./tools/check-schemas
|
||||
|
||||
- name: Check capitalization of strings
|
||||
if: ${{ matrix.include_frontend_tests }}
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
./manage.py makemessages --locale en
|
||||
PYTHONWARNINGS=ignore ./tools/check-capitalization --no-generate
|
||||
PYTHONWARNINGS=ignore ./tools/check-frontend-i18n --no-generate
|
||||
|
||||
- name: Run puppeteer tests
|
||||
if: ${{ matrix.include_frontend_tests }}
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
./tools/test-js-with-puppeteer
|
||||
|
||||
- name: Check for untracked files
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
# This final check looks for untracked files that may have been
|
||||
# created by test-backend or provision.
|
||||
untracked="$(git ls-files --exclude-standard --others)"
|
||||
if [ -n "$untracked" ]; then
|
||||
printf >&2 "Error: untracked files:\n%s\n" "$untracked"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Test locked requirements
|
||||
if: ${{ matrix.is_bionic }}
|
||||
run: |
|
||||
. /srv/zulip-py3-venv/bin/activate && \
|
||||
./tools/test-locked-requirements
|
||||
|
||||
- name: Upload coverage reports
|
||||
|
||||
# Only upload coverage when both frontend and backend
|
||||
# tests are ran.
|
||||
if: ${{ matrix.include_frontend_tests }}
|
||||
run: |
|
||||
# Codcov requires `.coverage` file to be stored in the
|
||||
# current working directory.
|
||||
mv ./var/.coverage ./.coverage
|
||||
. /srv/zulip-py3-venv/bin/activate || true
|
||||
|
||||
pip install codecov && codecov || echo "Error in uploading coverage reports to codecov.io."
|
||||
|
||||
- name: Store Puppeteer artifacts
|
||||
# Upload these on failure, as well
|
||||
if: ${{ always() && matrix.include_frontend_tests }}
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: puppeteer
|
||||
path: ./var/puppeteer
|
||||
retention-days: 60
|
||||
|
||||
- name: Check development database build
|
||||
if: ${{ matrix.is_focal || matrix.is_bullseye }}
|
||||
run: ./tools/ci/setup-backend
|
||||
|
||||
- name: Report status
|
||||
if: failure()
|
||||
env:
|
||||
ZULIP_BOT_KEY: ${{ secrets.ZULIP_BOT_KEY }}
|
||||
run: tools/ci/send-failure-message
|
22
.gitignore
vendored
22
.gitignore
vendored
@@ -27,21 +27,10 @@
|
||||
package-lock.json
|
||||
|
||||
/.vagrant
|
||||
/var/*
|
||||
!/var/puppeteer
|
||||
/var/puppeteer/*
|
||||
!/var/puppeteer/test_credentials.d.ts
|
||||
/var
|
||||
|
||||
/.dmypy.json
|
||||
|
||||
# Dockerfiles generated for continuous integration
|
||||
/tools/ci/images
|
||||
|
||||
# Generated i18n data
|
||||
/locale/en
|
||||
/locale/language_options.json
|
||||
/locale/language_name_map.json
|
||||
/locale/*/mobile.json
|
||||
# Dockerfiles generated for CircleCI
|
||||
/tools/circleci/images
|
||||
|
||||
# Static build
|
||||
*.mo
|
||||
@@ -51,7 +40,6 @@ npm-debug.log
|
||||
/staticfiles.json
|
||||
/webpack-stats-production.json
|
||||
/yarn-error.log
|
||||
zulip-git-version
|
||||
|
||||
# Test / analysis tools
|
||||
.coverage
|
||||
@@ -60,7 +48,6 @@ zulip-git-version
|
||||
/zproject/prod_settings.py
|
||||
/zulip-current-venv
|
||||
/zulip-py3-venv
|
||||
/zulip-thumbor-venv
|
||||
|
||||
## Files left by various editors and local environments
|
||||
# (Ideally these should be in everyone's respective personal gitignore files.)
|
||||
@@ -80,9 +67,6 @@ zulip.kdev4
|
||||
.cache/
|
||||
.eslintcache
|
||||
|
||||
# Core dump files
|
||||
core
|
||||
|
||||
## Miscellaneous
|
||||
# (Ideally this section is empty.)
|
||||
zthumbor/thumbor_local_settings.py
|
||||
|
4
.gitlint
4
.gitlint
@@ -1,9 +1,9 @@
|
||||
[general]
|
||||
ignore=title-trailing-punctuation, body-min-length, body-is-missing
|
||||
ignore=title-trailing-punctuation, body-min-length, body-is-missing, title-imperative-mood
|
||||
|
||||
extra-path=tools/lib/gitlint-rules.py
|
||||
|
||||
[title-match-regex]
|
||||
[title-match-regex-allow-exception]
|
||||
regex=^(.+:\ )?[A-Z].+\.$
|
||||
|
||||
[title-max-length]
|
||||
|
10
.isort.cfg
Normal file
10
.isort.cfg
Normal file
@@ -0,0 +1,10 @@
|
||||
[settings]
|
||||
line_length = 79
|
||||
multi_line_output = 2
|
||||
balanced_wrapping = true
|
||||
known_third_party = django, ujson, sqlalchemy
|
||||
known_first_party = zerver, zproject, version, confirmation, zilencer, analytics, frontend_tests, scripts, corporate
|
||||
sections = FUTURE, STDLIB, THIRDPARTY, FIRSTPARTY, LOCALFOLDER
|
||||
lines_after_imports = 1
|
||||
# See the comment related to ioloop_logging for why this is skipped.
|
||||
skip = zerver/management/commands/runtornado.py
|
40
.mailmap
40
.mailmap
@@ -1,40 +0,0 @@
|
||||
Alex Vandiver <alexmv@zulip.com> <alex@chmrr.net>
|
||||
Alex Vandiver <alexmv@zulip.com> <github@chmrr.net>
|
||||
Allen Rabinovich <allenrabinovich@yahoo.com> <allenr@humbughq.com>
|
||||
Allen Rabinovich <allenrabinovich@yahoo.com> <allenr@zulip.com>
|
||||
Aman Agrawal <amanagr@zulip.com> <f2016561@pilani.bits-pilani.ac.in>
|
||||
Anders Kaseorg <anders@zulip.com> <anders@zulipchat.com>
|
||||
Anders Kaseorg <anders@zulip.com> <andersk@mit.edu>
|
||||
Brock Whittaker <brock@zulipchat.com> <bjwhitta@asu.edu>
|
||||
Brock Whittaker <brock@zulipchat.com> <brockwhittaker@Brocks-MacBook.local>
|
||||
Brock Whittaker <brock@zulipchat.com> <brock@zulipchat.org>
|
||||
Chris Bobbe <cbobbe@zulip.com> <cbobbe@zulipchat.com>
|
||||
Chris Bobbe <cbobbe@zulip.com> <csbobbe@gmail.com>
|
||||
Greg Price <greg@zulip.com> <gnprice@gmail.com>
|
||||
Greg Price <greg@zulip.com> <greg@zulipchat.com>
|
||||
Greg Price <greg@zulip.com> <price@mit.edu>
|
||||
Jeff Arnold <jbarnold@gmail.com> <jbarnold@humbughq.com>
|
||||
Jeff Arnold <jbarnold@gmail.com> <jbarnold@zulip.com>
|
||||
Jessica McKellar <jesstess@mit.edu> <jesstess@humbughq.com>
|
||||
Jessica McKellar <jesstess@mit.edu> <jesstess@zulip.com>
|
||||
Kevin Mehall <km@kevinmehall.net> <kevin@humbughq.com>
|
||||
Kevin Mehall <km@kevinmehall.net> <kevin@zulip.com>
|
||||
Ray Kraesig <rkraesig@zulip.com> <rkraesig@zulipchat.com>
|
||||
Rishi Gupta <rishig@zulipchat.com> <rishig+git@mit.edu>
|
||||
Rishi Gupta <rishig@zulipchat.com> <rishig@kandralabs.com>
|
||||
Rishi Gupta <rishig@zulipchat.com> <rishig@users.noreply.github.com>
|
||||
Reid Barton <rwbarton@gmail.com> <rwbarton@humbughq.com>
|
||||
Scott Feeney <scott@oceanbase.org> <scott@humbughq.com>
|
||||
Scott Feeney <scott@oceanbase.org> <scott@zulip.com>
|
||||
Steve Howell <showell@zulip.com> <showell30@yahoo.com>
|
||||
Steve Howell <showell@zulip.com> <showell@yahoo.com>
|
||||
Steve Howell <showell@zulip.com> <showell@zulipchat.com>
|
||||
Steve Howell <showell@zulip.com> <steve@humbughq.com>
|
||||
Steve Howell <showell@zulip.com> <steve@zulip.com>
|
||||
Tim Abbott <tabbott@zulip.com> <tabbott@dropbox.com>
|
||||
Tim Abbott <tabbott@zulip.com> <tabbott@humbughq.com>
|
||||
Tim Abbott <tabbott@zulip.com> <tabbott@mit.edu>
|
||||
Tim Abbott <tabbott@zulip.com> <tabbott@zulipchat.com>
|
||||
Vishnu KS <vishnu@zulip.com> <hackerkid@vishnuks.com>
|
||||
Vishnu KS <vishnu@zulip.com> <yo@vishnuks.com>
|
||||
Alya Abbott <alya@zulip.com> <alyaabbott@elance-odesk.com>
|
@@ -1,6 +0,0 @@
|
||||
/corporate/tests/stripe_fixtures
|
||||
/locale
|
||||
/static/third
|
||||
/tools/setup/emoji/emoji_map.json
|
||||
/zerver/tests/fixtures
|
||||
/zerver/webhooks/*/fixtures
|
@@ -1,15 +0,0 @@
|
||||
{
|
||||
"source_directories": ["."],
|
||||
"taint_models_path": [
|
||||
"stubs/taint",
|
||||
"zulip-py3-venv/lib/pyre_check/taint/"
|
||||
],
|
||||
"search_path": [
|
||||
"stubs/",
|
||||
"zulip-py3-venv/lib/pyre_check/stubs/"
|
||||
],
|
||||
"typeshed": "zulip-py3-venv/lib/pyre_check/typeshed/",
|
||||
"exclude": [
|
||||
"/srv/zulip/zulip-py3-venv/.*"
|
||||
]
|
||||
}
|
@@ -1 +0,0 @@
|
||||
sonar.inclusions=**/*.py,**/*.html
|
67
.travis.yml
Normal file
67
.travis.yml
Normal file
@@ -0,0 +1,67 @@
|
||||
# See https://zulip.readthedocs.io/en/latest/testing/travis.html for
|
||||
# high-level documentation on our Travis CI setup.
|
||||
dist: trusty
|
||||
group: deprecated-2017Q4
|
||||
install:
|
||||
# Disable sometimes-broken sources.list in Travis base images
|
||||
- sudo rm -vf /etc/apt/sources.list.d/*
|
||||
- sudo apt-get update
|
||||
|
||||
# Disable Travis CI's built-in NVM installation
|
||||
- mispipe "mv ~/.nvm ~/.travis-nvm-disabled" ts
|
||||
|
||||
# Install codecov, the library for the code coverage reporting tool we use
|
||||
# With a retry to minimize impact of transient networking errors.
|
||||
- mispipe "pip install codecov" ts || mispipe "pip install codecov" ts
|
||||
|
||||
# This is the main setup job for the test suite
|
||||
- mispipe "tools/travis/setup-$TEST_SUITE" ts
|
||||
|
||||
# Clean any caches that are not in use to avoid our cache
|
||||
# becoming huge.
|
||||
- mispipe "scripts/lib/clean-unused-caches --verbose --threshold 0" ts
|
||||
|
||||
script:
|
||||
# We unset GEM_PATH here as a hack to work around Travis CI having
|
||||
# broken running their system puppet with Ruby. See
|
||||
# https://travis-ci.org/zulip/zulip/jobs/240120991 for an example traceback.
|
||||
- unset GEM_PATH
|
||||
- mispipe "./tools/travis/$TEST_SUITE" ts
|
||||
cache:
|
||||
yarn: true
|
||||
apt: false
|
||||
directories:
|
||||
- $HOME/zulip-venv-cache
|
||||
- $HOME/zulip-npm-cache
|
||||
- $HOME/zulip-emoji-cache
|
||||
- $HOME/node
|
||||
- $HOME/misc
|
||||
env:
|
||||
global:
|
||||
- BOTO_CONFIG=/tmp/nowhere
|
||||
language: python
|
||||
# Our test suites generally run on Python 3.4, the version in
|
||||
# Ubuntu 14.04 trusty, which is the oldest OS release we support.
|
||||
matrix:
|
||||
include:
|
||||
# Travis will actually run the jobs in the order they're listed here;
|
||||
# that doesn't seem to be documented, but it's what we see empirically.
|
||||
# We only get 4 jobs running at a time, so we try to make the first few
|
||||
# the most likely to break.
|
||||
- python: "3.4"
|
||||
env: TEST_SUITE=production
|
||||
# Other suites moved to CircleCI -- see .circleci/.
|
||||
sudo: required
|
||||
addons:
|
||||
artifacts:
|
||||
paths:
|
||||
# Casper debugging data (screenshots, etc.) is super useful for
|
||||
# debugging test flakes.
|
||||
- $(ls var/casper/* | tr "\n" ":")
|
||||
- $(ls /tmp/zulip-test-event-log/* | tr "\n" ":")
|
||||
postgresql: "9.3"
|
||||
apt:
|
||||
packages:
|
||||
- moreutils
|
||||
after_success:
|
||||
- codecov
|
22
.tx/config
22
.tx/config
@@ -3,31 +3,31 @@ host = https://www.transifex.com
|
||||
lang_map = zh-Hans: zh_Hans, zh-Hant: zh_Hant
|
||||
|
||||
[zulip.djangopo]
|
||||
file_filter = locale/<lang>/LC_MESSAGES/django.po
|
||||
source_file = locale/en/LC_MESSAGES/django.po
|
||||
source_file = static/locale/en/LC_MESSAGES/django.po
|
||||
source_lang = en
|
||||
type = PO
|
||||
file_filter = static/locale/<lang>/LC_MESSAGES/django.po
|
||||
|
||||
[zulip.translationsjson]
|
||||
file_filter = locale/<lang>/translations.json
|
||||
source_file = locale/en/translations.json
|
||||
source_file = static/locale/en/translations.json
|
||||
source_lang = en
|
||||
type = KEYVALUEJSON
|
||||
file_filter = static/locale/<lang>/translations.json
|
||||
|
||||
[zulip.mobile]
|
||||
file_filter = locale/<lang>/mobile.json
|
||||
source_file = locale/en/mobile.json
|
||||
[zulip.messages]
|
||||
source_file = static/locale/en/mobile.json
|
||||
source_lang = en
|
||||
type = KEYVALUEJSON
|
||||
file_filter = static/locale/<lang>/mobile.json
|
||||
|
||||
[zulip-test.djangopo]
|
||||
file_filter = locale/<lang>/LC_MESSAGES/django.po
|
||||
source_file = locale/en/LC_MESSAGES/django.po
|
||||
source_file = static/locale/en/LC_MESSAGES/django.po
|
||||
source_lang = en
|
||||
type = PO
|
||||
file_filter = static/locale/<lang>/LC_MESSAGES/django.po
|
||||
|
||||
[zulip-test.translationsjson]
|
||||
file_filter = locale/<lang>/translations.json
|
||||
source_file = locale/en/translations.json
|
||||
source_file = static/locale/en/translations.json
|
||||
source_lang = en
|
||||
type = KEYVALUEJSON
|
||||
file_filter = static/locale/<lang>/translations.json
|
||||
|
@@ -14,7 +14,7 @@ This isn't an exhaustive list of things that you can't do. Rather, take it
|
||||
in the spirit in which it's intended --- a guide to make it easier to enrich
|
||||
all of us and the technical communities in which we participate.
|
||||
|
||||
## Expected behavior
|
||||
## Expected Behavior
|
||||
|
||||
The following behaviors are expected and requested of all community members:
|
||||
|
||||
@@ -29,7 +29,7 @@ The following behaviors are expected and requested of all community members:
|
||||
* Community event venues may be shared with members of the public; be
|
||||
respectful to all patrons of these locations.
|
||||
|
||||
## Unacceptable behavior
|
||||
## Unacceptable Behavior
|
||||
|
||||
The following behaviors are considered harassment and are unacceptable
|
||||
within the Zulip community:
|
||||
@@ -53,7 +53,7 @@ within the Zulip community:
|
||||
presentations.
|
||||
* Advocating for, or encouraging, any of the behaviors above.
|
||||
|
||||
## Reporting and enforcement
|
||||
## Reporting and Enforcement
|
||||
|
||||
Harassment and other code of conduct violations reduce the value of the
|
||||
community for everyone. If someone makes you or anyone else feel unsafe or
|
||||
@@ -78,7 +78,7 @@ something you can do while a violation is happening, do it. A lot of the
|
||||
harms of harassment and other violations can be mitigated by the victim
|
||||
knowing that the other people present are on their side.
|
||||
|
||||
All reports will be kept confidential. In some cases, we may determine that a
|
||||
All reports will be kept confidential. In some cases we may determine that a
|
||||
public statement will need to be made. In such cases, the identities of all
|
||||
victims and reporters will remain confidential unless those individuals
|
||||
instruct us otherwise.
|
||||
@@ -95,10 +95,11 @@ behavior occurring outside the scope of community activities when such
|
||||
behavior has the potential to adversely affect the safety and well-being of
|
||||
community members.
|
||||
|
||||
## License and attribution
|
||||
## License and Attribution
|
||||
|
||||
This Code of Conduct is adapted from the
|
||||
[Citizen Code of Conduct](http://citizencodeofconduct.org/) and the
|
||||
[Django Code of Conduct](https://www.djangoproject.com/conduct/), and is
|
||||
under a
|
||||
[Creative Commons BY-SA](https://creativecommons.org/licenses/by-sa/4.0/)
|
||||
[Creative Commons BY-SA](http://creativecommons.org/licenses/by-sa/4.0/)
|
||||
license.
|
||||
|
151
CONTRIBUTING.md
151
CONTRIBUTING.md
@@ -13,12 +13,10 @@ user, or anything else. Make sure to read the
|
||||
before posting. The Zulip community is also governed by a
|
||||
[code of conduct](https://zulip.readthedocs.io/en/latest/code-of-conduct.html).
|
||||
|
||||
You can subscribe to
|
||||
[zulip-devel-announce@googlegroups.com](https://groups.google.com/g/zulip-devel-announce)
|
||||
or our [Twitter](https://twitter.com/zulip) account for a very low
|
||||
traffic (<1 email/month) way to hear about things like mentorship
|
||||
opportunities with Google Summer of Code, in-person sprints at
|
||||
conferences, and other opportunities to contribute.
|
||||
You can subscribe to zulip-devel@googlegroups.com for a lower traffic (~1
|
||||
email/month) way to hear about things like mentorship opportunities with Google
|
||||
Code-in, in-person sprints at conferences, and other opportunities to
|
||||
contribute.
|
||||
|
||||
## Ways to contribute
|
||||
|
||||
@@ -30,11 +28,11 @@ needs doing:
|
||||
[backend](https://github.com/zulip/zulip), web
|
||||
[frontend](https://github.com/zulip/zulip), React Native
|
||||
[mobile app](https://github.com/zulip/zulip-mobile), or Electron
|
||||
[desktop app](https://github.com/zulip/zulip-desktop).
|
||||
[desktop app](https://github.com/zulip/zulip-electron).
|
||||
* Building out our
|
||||
[Python API and bots](https://github.com/zulip/python-zulip-api) framework.
|
||||
* [Writing an integration](https://zulip.com/api/integrations-overview).
|
||||
* Improving our [user](https://zulip.com/help/) or
|
||||
* [Writing an integration](https://zulipchat.com/api/integration-guide).
|
||||
* Improving our [user](https://zulipchat.com/help/) or
|
||||
[developer](https://zulip.readthedocs.io/en/latest/) documentation.
|
||||
* [Reviewing code](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html)
|
||||
and manually testing pull requests.
|
||||
@@ -45,11 +43,10 @@ don't require touching the codebase at all. We list a few of them below:
|
||||
* [Reporting issues](#reporting-issues), including both feature requests and
|
||||
bug reports.
|
||||
* [Giving feedback](#user-feedback) if you are evaluating or using Zulip.
|
||||
* [Sponsor Zulip](https://github.com/sponsors/zulip) through the GitHub sponsors program.
|
||||
* [Translating](https://zulip.readthedocs.io/en/latest/translating/translating.html)
|
||||
Zulip.
|
||||
* [Outreach](#zulip-outreach): Star us on GitHub, upvote us
|
||||
on product comparison sites, or write for [the Zulip blog](https://blog.zulip.org/).
|
||||
on product comparison sites, or write for [the Zulip blog](http://blog.zulip.org/).
|
||||
|
||||
## Your first (codebase) contribution
|
||||
|
||||
@@ -61,22 +58,23 @@ to help.
|
||||
[Zulip community server](https://zulip.readthedocs.io/en/latest/contributing/chat-zulip-org.html),
|
||||
paying special attention to the community norms. If you'd like, introduce
|
||||
yourself in
|
||||
[#new members](https://chat.zulip.org/#narrow/stream/95-new-members), using
|
||||
[#new members](https://chat.zulip.org/#narrow/stream/new.20members), using
|
||||
your name as the topic. Bonus: tell us about your first impressions of
|
||||
Zulip, and anything that felt confusing/broken as you started using the
|
||||
product.
|
||||
* Read [What makes a great Zulip contributor](#what-makes-a-great-zulip-contributor).
|
||||
* [Install the development environment](https://zulip.readthedocs.io/en/latest/development/overview.html),
|
||||
getting help in
|
||||
[#development help](https://chat.zulip.org/#narrow/stream/49-development-help)
|
||||
[#development help](https://chat.zulip.org/#narrow/stream/development.20help)
|
||||
if you run into any troubles.
|
||||
* Read the
|
||||
[Zulip guide to Git](https://zulip.readthedocs.io/en/latest/git/index.html)
|
||||
and do the Git tutorial (coming soon) if you are unfamiliar with
|
||||
Git, getting help in
|
||||
[#git help](https://chat.zulip.org/#narrow/stream/44-git-help) if
|
||||
you run into any troubles. Be sure to check out the
|
||||
[extremely useful Zulip-specific tools page](https://zulip.readthedocs.io/en/latest/git/zulip-tools.html).
|
||||
and do the Git tutorial (coming soon) if you are unfamiliar with Git,
|
||||
getting help in
|
||||
[#git help](https://chat.zulip.org/#narrow/stream/git.20help) if you run
|
||||
into any troubles.
|
||||
* Sign the
|
||||
[Dropbox Contributor License Agreement](https://opensource.dropbox.com/cla/).
|
||||
|
||||
### Picking an issue
|
||||
|
||||
@@ -86,53 +84,43 @@ on.
|
||||
|
||||
* If you're interested in
|
||||
[mobile](https://github.com/zulip/zulip-mobile/issues?q=is%3Aopen+is%3Aissue),
|
||||
[desktop](https://github.com/zulip/zulip-desktop/issues?q=is%3Aopen+is%3Aissue),
|
||||
[desktop](https://github.com/zulip/zulip-electron/issues?q=is%3Aopen+is%3Aissue),
|
||||
or
|
||||
[bots](https://github.com/zulip/python-zulip-api/issues?q=is%3Aopen+is%3Aissue)
|
||||
development, check the respective links for open issues, or post in
|
||||
[#mobile](https://chat.zulip.org/#narrow/stream/48-mobile),
|
||||
[#desktop](https://chat.zulip.org/#narrow/stream/16-desktop), or
|
||||
[#integration](https://chat.zulip.org/#narrow/stream/127-integrations).
|
||||
* For the main server and web repository, we recommend browsing
|
||||
recently opened issues to look for issues you are confident you can
|
||||
fix correctly in a way that clearly communicates why your changes
|
||||
are the correct fix. Our GitHub workflow bot, zulipbot, limits
|
||||
users who have 0 commits merged to claiming a single issue labeled
|
||||
with "good first issue" or "help wanted".
|
||||
[#mobile](https://chat.zulip.org/#narrow/stream/mobile),
|
||||
[#electron](https://chat.zulip.org/#narrow/stream/electron), or
|
||||
[#bots](https://chat.zulip.org/#narrow/stream/bots).
|
||||
* For the main server and web repository, start by looking through issues
|
||||
with the label
|
||||
[good first issue](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A"good+first+issue").
|
||||
These are smaller projects particularly suitable for a first contribution.
|
||||
* We also partition all of our issues in the main repo into areas like
|
||||
admin, compose, emoji, hotkeys, i18n, onboarding, search, etc. Look
|
||||
through our [list of labels](https://github.com/zulip/zulip/labels), and
|
||||
click on some of the `area:` labels to see all the issues related to your
|
||||
areas of interest.
|
||||
* If the lists of issues are overwhelming, post in
|
||||
[#new members](https://chat.zulip.org/#narrow/stream/95-new-members) with a
|
||||
[#new members](https://chat.zulip.org/#narrow/stream/new.20members) with a
|
||||
bit about your background and interests, and we'll help you out. The most
|
||||
important thing to say is whether you're looking for a backend (Python),
|
||||
frontend (JavaScript and TypeScript), mobile (React Native), desktop (Electron),
|
||||
documentation (English) or visual design (JavaScript/TypeScript + CSS) issue, and a
|
||||
frontend (JavaScript), mobile (React Native), desktop (Electron),
|
||||
documentation (English) or visual design (JavaScript + CSS) issue, and a
|
||||
bit about your programming experience and available time.
|
||||
|
||||
We also welcome suggestions of features that you feel would be valuable or
|
||||
changes that you feel would make Zulip a better open source project. If you
|
||||
have a new feature you'd like to add, we recommend you start by posting in
|
||||
[#new members](https://chat.zulip.org/#narrow/stream/95-new-members) with the
|
||||
[#new members](https://chat.zulip.org/#narrow/stream/new.20members) with the
|
||||
feature idea and the problem that you're hoping to solve.
|
||||
|
||||
Other notes:
|
||||
* For a first pull request, it's better to aim for a smaller contribution
|
||||
than a bigger one. Many first contributions have fewer than 10 lines of
|
||||
changes (not counting changes to tests).
|
||||
* The full list of issues explicitly looking for a contributor can be
|
||||
found with the
|
||||
[good first issue](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22)
|
||||
and
|
||||
* The full list of issues looking for a contributor can be found with the
|
||||
[help wanted](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22)
|
||||
labels. Avoid issues with the "difficult" label unless you
|
||||
understand why it is difficult and are confident you can resolve the
|
||||
issue correctly and completely. Issues without one of these labels
|
||||
are fair game if Tim has written a clear technical design proposal
|
||||
in the issue, or it is a bug that you can reproduce and you are
|
||||
confident you can fix the issue correctly.
|
||||
label.
|
||||
* For most new contributors, there's a lot to learn while making your first
|
||||
pull request. It's OK if it takes you a while; that's normal! You'll be
|
||||
able to work a lot faster as you build experience.
|
||||
@@ -144,12 +132,6 @@ the issue thread. [Zulipbot](https://github.com/zulip/zulipbot) is a GitHub
|
||||
workflow bot; it will assign you to the issue and label the issue as "in
|
||||
progress". Some additional notes:
|
||||
|
||||
* You can only claim issues with the
|
||||
[good first issue](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22)
|
||||
or
|
||||
[help wanted](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22)
|
||||
labels. Zulipbot will give you an error if you try to claim an issue
|
||||
without one of those labels.
|
||||
* You're encouraged to ask questions on how to best implement or debug your
|
||||
changes -- the Zulip maintainers are excited to answer questions to help
|
||||
you stay unblocked and working efficiently. You can ask questions on
|
||||
@@ -172,8 +154,9 @@ labels.
|
||||
|
||||
## What makes a great Zulip contributor?
|
||||
|
||||
Zulip has a lot of experience working with new contributors. In our
|
||||
experience, these are the best predictors of success:
|
||||
Zulip runs a lot of [internship programs](#internship-programs), so we have
|
||||
a lot of experience with new contributors. In our experience, these are the
|
||||
best predictors of success:
|
||||
|
||||
* Posting good questions. This generally means explaining your current
|
||||
understanding, saying what you've done or tried so far, and including
|
||||
@@ -193,8 +176,8 @@ experience, these are the best predictors of success:
|
||||
able to address things within a few days.
|
||||
* Being helpful and friendly on chat.zulip.org.
|
||||
|
||||
These are also the main criteria we use to select candidates for all
|
||||
of our outreach programs.
|
||||
These are also the main criteria we use to select interns for all of our
|
||||
internship programs.
|
||||
|
||||
## Reporting issues
|
||||
|
||||
@@ -203,9 +186,9 @@ bugs, feel free to just open an issue on the relevant project on GitHub.
|
||||
|
||||
If you have a feature request or are not yet sure what the underlying bug
|
||||
is, the best place to post issues is
|
||||
[#issues](https://chat.zulip.org/#narrow/stream/9-issues) (or
|
||||
[#mobile](https://chat.zulip.org/#narrow/stream/48-mobile) or
|
||||
[#desktop](https://chat.zulip.org/#narrow/stream/16-desktop)) on the
|
||||
[#issues](https://chat.zulip.org/#narrow/stream/issues) (or
|
||||
[#mobile](https://chat.zulip.org/#narrow/stream/mobile) or
|
||||
[#electron](https://chat.zulip.org/#narrow/stream/electron)) on the
|
||||
[Zulip community server](https://zulip.readthedocs.io/en/latest/contributing/chat-zulip-org.html).
|
||||
This allows us to interactively figure out what is going on, let you know if
|
||||
a similar issue has already been opened, and collect any other information
|
||||
@@ -215,9 +198,8 @@ and how to reproduce it if known, your browser/OS if relevant, and a
|
||||
if appropriate.
|
||||
|
||||
**Reporting security issues**. Please do not report security issues
|
||||
publicly, including on public streams on chat.zulip.org. You can
|
||||
email security@zulip.com. We create a CVE for every security
|
||||
issue in our released software.
|
||||
publicly, including on public streams on chat.zulip.org. You can email
|
||||
zulip-security@googlegroups.com. We create a CVE for every security issue.
|
||||
|
||||
## User feedback
|
||||
|
||||
@@ -232,7 +214,7 @@ to:
|
||||
* Pros and cons: What are the pros and cons of Zulip for your organization,
|
||||
and the pros and cons of other products you are evaluating?
|
||||
* Features: What are the features that are most important for your
|
||||
organization? In the best-case scenario, what would your chat solution do
|
||||
organization? In the best case scenario, what would your chat solution do
|
||||
for you?
|
||||
* Onboarding: If you remember it, what was your impression during your first
|
||||
few minutes of using Zulip? What did you notice, and how did you feel? Was
|
||||
@@ -240,20 +222,21 @@ to:
|
||||
* Organization: What does your organization do? How big is the organization?
|
||||
A link to your organization's website?
|
||||
|
||||
## Outreach programs
|
||||
## Internship programs
|
||||
|
||||
Zulip participates in [Google Summer of Code
|
||||
(GSoC)](https://developers.google.com/open-source/gsoc/) every year.
|
||||
In the past, we've also participated in
|
||||
[Outreachy](https://www.outreachy.org/), [Google
|
||||
Code-In](https://developers.google.com/open-source/gci/), and hosted
|
||||
summer interns from Harvard, MIT, and Stanford.
|
||||
Zulip runs internship programs with
|
||||
[Outreachy](https://www.outreachy.org/),
|
||||
[Google Summer of Code (GSoC)](https://developers.google.com/open-source/gsoc/)
|
||||
[1], and the
|
||||
[MIT Externship program](https://alum.mit.edu/students/NetworkwithAlumni/ExternshipProgram),
|
||||
and has in the past taken summer interns from Harvard, MIT, and
|
||||
Stanford.
|
||||
|
||||
While each third-party program has its own rules and requirements, the
|
||||
Zulip community's approaches all of these programs with these ideas in
|
||||
mind:
|
||||
* We try to make the application process as valuable for the applicant as
|
||||
possible. Expect high-quality code reviews, a supportive community, and
|
||||
possible. Expect high quality code reviews, a supportive community, and
|
||||
publicly viewable patches you can link to from your resume, regardless of
|
||||
whether you are selected.
|
||||
* To apply, you'll have to submit at least one pull request to a Zulip
|
||||
@@ -267,22 +250,26 @@ mind:
|
||||
application to makes mistakes in your first few PRs as long as your
|
||||
work improves.
|
||||
|
||||
Most of our outreach program participants end up sticking around the
|
||||
project long-term, and many have become core team members, maintaining
|
||||
important parts of the project. We hope you apply!
|
||||
Zulip also participates in
|
||||
[Google Code-In](https://developers.google.com/open-source/gci/). Our
|
||||
selection criteria for Finalists and Grand Prize Winners is the same as our
|
||||
selection criteria for interns above.
|
||||
|
||||
Most of our interns end up sticking around the project long-term, and many
|
||||
quickly become core team members. We hope you apply!
|
||||
|
||||
### Google Summer of Code
|
||||
|
||||
The largest outreach program Zulip participates in is GSoC (14
|
||||
students in 2017; 11 in 2018; 17 in 2019; 18 in 2020). While we don't control how
|
||||
many slots Google allocates to Zulip, we hope to mentor a similar
|
||||
number of students in future summers.
|
||||
GSoC is by far the largest of our internship programs (we had 14 GSoC
|
||||
students in summer 2017). While we don't control how many slots
|
||||
Google allocates to Zulip, we hope to mentor a similar number of
|
||||
students in 2018.
|
||||
|
||||
If you're reading this well before the application deadline and want
|
||||
to make your application strong, we recommend getting involved in the
|
||||
community and fixing issues in Zulip now. Having good contributions
|
||||
and building a reputation for doing good work is the best way to have
|
||||
a strong application. About half of Zulip's GSoC students for Summer
|
||||
and building a reputation for doing good work is best way to have a
|
||||
strong application. About half of Zulip's GSoC students for Summer
|
||||
2017 had made significant contributions to the project by February
|
||||
2017, and about half had not. Our
|
||||
[GSoC project ideas page][gsoc-guide] has lots more details on how
|
||||
@@ -298,10 +285,14 @@ same as with GSoC, and it has no separate application process; your
|
||||
GSoC application is your ZSoC application. If we'd like to select you
|
||||
for ZSoC, we'll contact you when the GSoC results are announced.
|
||||
|
||||
[gsoc-guide]: https://zulip.readthedocs.io/en/latest/contributing/gsoc-ideas.html
|
||||
[gsoc-guide]: https://zulip.readthedocs.io/en/latest/overview/gsoc-ideas.html
|
||||
[gsoc-faq]: https://developers.google.com/open-source/gsoc/faq
|
||||
|
||||
## Zulip outreach
|
||||
[1] Formally, [GSoC isn't an internship][gsoc-faq], but it is similar
|
||||
enough that we're treating it as such for the purposes of this
|
||||
documentation.
|
||||
|
||||
## Zulip Outreach
|
||||
|
||||
**Upvoting Zulip**. Upvotes and reviews make a big difference in the public
|
||||
perception of projects like Zulip. We've collected a few sites below
|
||||
@@ -310,7 +301,7 @@ list typically takes about 15 minutes.
|
||||
* Star us on GitHub. There are four main repositories:
|
||||
[server/web](https://github.com/zulip/zulip),
|
||||
[mobile](https://github.com/zulip/zulip-mobile),
|
||||
[desktop](https://github.com/zulip/zulip-desktop), and
|
||||
[desktop](https://github.com/zulip/zulip-electron), and
|
||||
[Python API](https://github.com/zulip/python-zulip-api).
|
||||
* [Follow us](https://twitter.com/zulip) on Twitter.
|
||||
|
||||
@@ -335,7 +326,7 @@ have been using Zulip for a while and want to contribute more.
|
||||
about a technical aspect of Zulip can be a great way to spread the word
|
||||
about Zulip.
|
||||
|
||||
We also occasionally [publish](https://blog.zulip.org/) long-form
|
||||
We also occasionally [publish](http://blog.zulip.org/) longer form
|
||||
articles related to Zulip. Our posts typically get tens of thousands
|
||||
of views, and we always have good ideas for blog posts that we can
|
||||
outline but don't have time to write. If you are an experienced writer
|
||||
|
17
Dockerfile-dev
Normal file
17
Dockerfile-dev
Normal file
@@ -0,0 +1,17 @@
|
||||
FROM ubuntu:trusty
|
||||
|
||||
EXPOSE 9991
|
||||
|
||||
RUN apt-get update && apt-get install -y wget
|
||||
|
||||
RUN locale-gen en_US.UTF-8
|
||||
|
||||
RUN useradd -d /home/zulip -m zulip && echo 'zulip ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers
|
||||
|
||||
USER zulip
|
||||
|
||||
RUN ln -nsf /srv/zulip ~/zulip
|
||||
|
||||
RUN echo 'export LC_ALL="en_US.UTF-8" LANG="en_US.UTF-8" LANGUAGE="en_US.UTF-8"' >> ~zulip/.bashrc
|
||||
|
||||
WORKDIR /srv/zulip
|
@@ -1,15 +0,0 @@
|
||||
# To build run `docker build -f Dockerfile-postgresql .` from the root of the
|
||||
# zulip repo.
|
||||
|
||||
# Currently the PostgreSQL images do not support automatic upgrading of
|
||||
# the on-disk data in volumes. So the base image can not currently be upgraded
|
||||
# without users needing a manual pgdump and restore.
|
||||
|
||||
# Install hunspell, Zulip stop words, and run Zulip database
|
||||
# init.
|
||||
FROM groonga/pgroonga:latest-alpine-10-slim
|
||||
RUN apk add -U --no-cache hunspell-en
|
||||
RUN ln -sf /usr/share/hunspell/en_US.dic /usr/local/share/postgresql/tsearch_data/en_us.dict && ln -sf /usr/share/hunspell/en_US.aff /usr/local/share/postgresql/tsearch_data/en_us.affix
|
||||
COPY puppet/zulip/files/postgresql/zulip_english.stop /usr/local/share/postgresql/tsearch_data/zulip_english.stop
|
||||
COPY scripts/setup/create-db.sql /docker-entrypoint-initdb.d/zulip-create-db.sql
|
||||
COPY scripts/setup/create-pgroonga.sql /docker-entrypoint-initdb.d/zulip-create-pgroonga.sql
|
46
LICENSE
46
LICENSE
@@ -1,3 +1,24 @@
|
||||
Copyright 2011-2017 Dropbox, Inc., Kandra Labs, Inc., and contributors
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
The software includes some works released by third parties under other
|
||||
free and open source licenses. Those works are redistributed under the
|
||||
license terms under which the works were received. For more details,
|
||||
see the ``docs/THIRDPARTY`` file included with this distribution.
|
||||
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
@@ -175,28 +196,3 @@
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
18
NOTICE
18
NOTICE
@@ -1,18 +0,0 @@
|
||||
Copyright 2012–2015 Dropbox, Inc., 2015–2021 Kandra Labs, Inc., and contributors
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this project except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
The software includes some works released by third parties under other
|
||||
free and open source licenses. Those works are redistributed under the
|
||||
license terms under which the works were received. For more details,
|
||||
see the ``docs/THIRDPARTY`` file included with this distribution.
|
55
README.md
55
README.md
@@ -5,19 +5,16 @@ immediacy of real-time chat with the productivity benefits of threaded
|
||||
conversations. Zulip is used by open source projects, Fortune 500 companies,
|
||||
large standards bodies, and others who need a real-time chat system that
|
||||
allows users to easily process hundreds or thousands of messages a day. With
|
||||
over 700 contributors merging over 500 commits a month, Zulip is also the
|
||||
over 300 contributors merging over 500 commits a month, Zulip is also the
|
||||
largest and fastest growing open source group chat project.
|
||||
|
||||
[](https://github.com/zulip/zulip/actions/workflows/zulip-ci.yml?query=branch%3Amaster)
|
||||
[](https://codecov.io/gh/zulip/zulip/branch/master)
|
||||
[](https://circleci.com/gh/zulip/zulip)
|
||||
[](https://travis-ci.org/zulip/zulip)
|
||||
[](https://codecov.io/gh/zulip/zulip)
|
||||
[][mypy-coverage]
|
||||
[](https://github.com/psf/black)
|
||||
[](https://github.com/prettier/prettier)
|
||||
[](https://github.com/zulip/zulip/releases/latest)
|
||||
[](https://zulip.readthedocs.io/en/latest/)
|
||||
[](https://chat.zulip.org)
|
||||
[](https://twitter.com/zulip)
|
||||
[](https://github.com/sponsors/zulip)
|
||||
|
||||
[mypy-coverage]: https://blog.zulip.org/2016/10/13/static-types-in-python-oh-mypy/
|
||||
|
||||
@@ -32,12 +29,12 @@ You might be interested in:
|
||||
|
||||
* **Contributing code**. Check out our
|
||||
[guide for new contributors](https://zulip.readthedocs.io/en/latest/overview/contributing.html)
|
||||
to get started. Zulip prides itself on maintaining a clean and
|
||||
to get started. Zulip prides itself on maintaining a clean and
|
||||
well-tested codebase, and a stock of hundreds of
|
||||
[beginner-friendly issues][beginner-friendly].
|
||||
|
||||
* **Contributing non-code**.
|
||||
[Report an issue](https://zulip.readthedocs.io/en/latest/overview/contributing.html#reporting-issues),
|
||||
[Report an issue](https://zulip.readthedocs.io/en/latest/overview/contributing.html#reporting-issue),
|
||||
[translate](https://zulip.readthedocs.io/en/latest/translating/translating.html) Zulip
|
||||
into your language,
|
||||
[write](https://zulip.readthedocs.io/en/latest/overview/contributing.html#zulip-outreach)
|
||||
@@ -45,7 +42,7 @@ You might be interested in:
|
||||
[give us feedback](https://zulip.readthedocs.io/en/latest/overview/contributing.html#user-feedback). We
|
||||
would love to hear from you, even if you're just trying the product out.
|
||||
|
||||
* **Supporting Zulip**. Advocate for your organization to use Zulip, become a [sponsor](https://github.com/sponsors/zulip), write a
|
||||
* **Supporting Zulip**. Advocate for your organization to use Zulip, write a
|
||||
review in the mobile app stores, or
|
||||
[upvote Zulip](https://zulip.readthedocs.io/en/latest/overview/contributing.html#zulip-outreach) on
|
||||
product comparison sites.
|
||||
@@ -54,26 +51,32 @@ You might be interested in:
|
||||
the
|
||||
[Zulip community server](https://zulip.readthedocs.io/en/latest/contributing/chat-zulip-org.html). We
|
||||
also recommend reading Zulip for
|
||||
[open source](https://zulip.com/for/open-source/), Zulip for
|
||||
[companies](https://zulip.com/for/companies/), or Zulip for
|
||||
[working groups and part time communities](https://zulip.com/for/working-groups-and-communities/).
|
||||
[open source](https://zulipchat.com/for/open-source/), Zulip for
|
||||
[companies](https://zulipchat.com/for/companies/), or Zulip for
|
||||
[working groups and part time communities](https://zulipchat.com/for/working-groups-and-communities/).
|
||||
|
||||
* **Running a Zulip server**. Use a preconfigured [DigitalOcean droplet](https://marketplace.digitalocean.com/apps/zulip),
|
||||
[install Zulip](https://zulip.readthedocs.io/en/stable/production/install.html)
|
||||
directly, or use Zulip's
|
||||
experimental [Docker image](https://zulip.readthedocs.io/en/latest/production/deployment.html#zulip-in-docker).
|
||||
Commercial support is available; see <https://zulip.com/plans> for details.
|
||||
* **Running a Zulip server**. Setting up a server takes just a couple of
|
||||
minutes. Zulip runs on Ubuntu 16.04 Xenial and Ubuntu 14.04 Trusty. The
|
||||
installation process is
|
||||
[documented here](https://zulip.readthedocs.io/en/stable/prod.html).
|
||||
Commercial support is available; see <https://zulipchat.com/plans> for
|
||||
details.
|
||||
|
||||
* **Using Zulip without setting up a server**. <https://zulip.com>
|
||||
offers free and commercial hosting, including providing our paid
|
||||
plan for free to fellow open source projects.
|
||||
* **Using Zulip without setting up a server**. <https://zulipchat.com> offers
|
||||
free and commercial hosting.
|
||||
|
||||
* **Participating in [outreach
|
||||
programs](https://zulip.readthedocs.io/en/latest/overview/contributing.html#outreach-programs)**
|
||||
like Google Summer of Code.
|
||||
* **Applying for a Zulip internship**. Zulip runs internship programs with
|
||||
[Outreachy](https://www.outreachy.org/),
|
||||
[Google Summer of Code](https://developers.google.com/open-source/gsoc/),
|
||||
and the
|
||||
[MIT Externship program](https://alum.mit.edu/students/NetworkwithAlumni/ExternshipProgram). Zulip
|
||||
also participates in
|
||||
[Google Code-In](https://developers.google.com/open-source/gci/). More
|
||||
information is available
|
||||
[here](https://zulip.readthedocs.io/en/latest/overview/contributing.html#internship-programs).
|
||||
|
||||
You may also be interested in reading our [blog](https://blog.zulip.org/) or
|
||||
following us on [Twitter](https://twitter.com/zulip).
|
||||
You may also be interested in reading our [blog](http://blog.zulip.org/) or
|
||||
following us on [twitter](https://twitter.com/zulip).
|
||||
Zulip is distributed under the
|
||||
[Apache 2.0](https://github.com/zulip/zulip/blob/master/LICENSE) license.
|
||||
|
||||
|
28
SECURITY.md
28
SECURITY.md
@@ -1,28 +0,0 @@
|
||||
# Security policy
|
||||
|
||||
Security announcements are sent to zulip-announce@googlegroups.com,
|
||||
so you should subscribe if you are running Zulip in production.
|
||||
|
||||
## Reporting a vulnerability
|
||||
|
||||
We love responsible reports of (potential) security issues in Zulip,
|
||||
whether in the latest release or our development branch.
|
||||
|
||||
Our security contact is security@zulip.com. Reporters should expect a
|
||||
response within 24 hours.
|
||||
|
||||
Please include details on the issue and how you'd like to be credited
|
||||
in our release notes when we publish the fix.
|
||||
|
||||
Our [security
|
||||
model](https://zulip.readthedocs.io/en/latest/production/security-model.html)
|
||||
document may be a helpful resource.
|
||||
|
||||
## Supported versions
|
||||
|
||||
Zulip provides security support for the latest major release, in the
|
||||
form of minor security/maintenance releases.
|
||||
|
||||
We work hard to make
|
||||
[upgrades](https://zulip.readthedocs.io/en/latest/production/upgrade-or-modify.html#upgrading-to-a-release)
|
||||
reliable, so that there's no reason to run older major releases.
|
191
Vagrantfile
vendored
191
Vagrantfile
vendored
@@ -7,15 +7,52 @@ def command?(name)
|
||||
$?.success?
|
||||
end
|
||||
|
||||
if Vagrant::VERSION == "1.8.7"
|
||||
path = `which curl`
|
||||
if path.include?("/opt/vagrant/embedded/bin/curl")
|
||||
puts "In Vagrant 1.8.7, curl is broken. Please use Vagrant 2.0.2 " \
|
||||
"or run 'sudo rm -f /opt/vagrant/embedded/bin/curl' to fix the " \
|
||||
"issue before provisioning. See " \
|
||||
"https://github.com/mitchellh/vagrant/issues/7997 " \
|
||||
"for reference."
|
||||
exit
|
||||
if Vagrant::VERSION == "1.8.7" then
|
||||
path = `which curl`
|
||||
if path.include?('/opt/vagrant/embedded/bin/curl') then
|
||||
puts "In Vagrant 1.8.7, curl is broken. Please use Vagrant 2.0.2 "\
|
||||
"or run 'sudo rm -f /opt/vagrant/embedded/bin/curl' to fix the "\
|
||||
"issue before provisioning. See "\
|
||||
"https://github.com/mitchellh/vagrant/issues/7997 "\
|
||||
"for reference."
|
||||
exit
|
||||
end
|
||||
end
|
||||
|
||||
# Workaround: the lxc-config in vagrant-lxc is incompatible with changes in
|
||||
# LXC 2.1.0, found in Ubuntu 17.10 artful. LXC 2.1.1 (in 18.04 LTS bionic)
|
||||
# ignores the old config key, so this will only be needed for artful.
|
||||
#
|
||||
# vagrant-lxc upstream has an attempted fix:
|
||||
# https://github.com/fgrehm/vagrant-lxc/issues/445
|
||||
# but it didn't work in our testing. This is a temporary issue, so we just
|
||||
# hack in a fix: we patch the skeleton `lxc-config` file right in the
|
||||
# distribution of the vagrant-lxc "box" we use. If the user doesn't yet
|
||||
# have the box (e.g. on first setup), Vagrant would download it but too
|
||||
# late for us to patch it like this; so we prompt them to explicitly add it
|
||||
# first and then rerun.
|
||||
if ['up', 'provision'].include? ARGV[0]
|
||||
if command? "lxc-ls"
|
||||
LXC_VERSION = `lxc-ls --version`.strip unless defined? LXC_VERSION
|
||||
if LXC_VERSION == "2.1.0"
|
||||
lxc_config_file = ENV['HOME'] + "/.vagrant.d/boxes/fgrehm-VAGRANTSLASH-trusty64-lxc/1.2.0/lxc/lxc-config"
|
||||
if File.file?(lxc_config_file)
|
||||
lines = File.readlines(lxc_config_file)
|
||||
deprecated_line = "lxc.pivotdir = lxc_putold\n"
|
||||
if lines[1] == deprecated_line
|
||||
lines[1] = "# #{deprecated_line}"
|
||||
File.open(lxc_config_file, 'w') do |f|
|
||||
f.puts(lines)
|
||||
end
|
||||
end
|
||||
else
|
||||
puts 'You are running LXC 2.1.0, and fgrehm/trusty64-lxc box is incompatible '\
|
||||
"with it by default. First add the box by doing:\n"\
|
||||
" vagrant box add https://vagrantcloud.com/fgrehm/trusty64-lxc\n"\
|
||||
'Once this command succeeds, do "vagrant up" again.'
|
||||
exit
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -26,59 +63,40 @@ end
|
||||
# updating of boxes (since the old URL doesn't work). See
|
||||
# https://github.com/hashicorp/vagrant/issues/9442
|
||||
if Vagrant::DEFAULT_SERVER_URL == "atlas.hashicorp.com"
|
||||
Vagrant::DEFAULT_SERVER_URL.replace("https://vagrantcloud.com")
|
||||
end
|
||||
|
||||
# Monkey patch https://github.com/hashicorp/vagrant/pull/10879 so we
|
||||
# can fall back to another provider if docker is not installed.
|
||||
begin
|
||||
require Vagrant.source_root.join("plugins", "providers", "docker", "provider")
|
||||
rescue LoadError
|
||||
else
|
||||
VagrantPlugins::DockerProvider::Provider.class_eval do
|
||||
method(:usable?).owner == singleton_class or def self.usable?(raise_error = false)
|
||||
VagrantPlugins::DockerProvider::Driver.new.execute("docker", "version")
|
||||
true
|
||||
rescue Vagrant::Errors::CommandUnavailable, VagrantPlugins::DockerProvider::Errors::ExecuteError
|
||||
raise if raise_error
|
||||
return false
|
||||
end
|
||||
end
|
||||
Vagrant::DEFAULT_SERVER_URL.replace('https://vagrantcloud.com')
|
||||
end
|
||||
|
||||
Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
|
||||
|
||||
# For LXC. VirtualBox hosts use a different box, described below.
|
||||
config.vm.box = "fgrehm/trusty64-lxc"
|
||||
|
||||
# The Zulip development environment runs on 9991 on the guest.
|
||||
host_port = 9991
|
||||
http_proxy = https_proxy = no_proxy = nil
|
||||
host_ip_addr = "127.0.0.1"
|
||||
|
||||
# System settings for the virtual machine.
|
||||
vm_num_cpus = "2"
|
||||
vm_memory = "2048"
|
||||
|
||||
ubuntu_mirror = ""
|
||||
vboxadd_version = nil
|
||||
|
||||
config.vm.synced_folder ".", "/vagrant", disabled: true
|
||||
config.vm.synced_folder ".", "/srv/zulip"
|
||||
if (/darwin/ =~ RUBY_PLATFORM) != nil
|
||||
config.vm.synced_folder ".", "/srv/zulip", type: "nfs",
|
||||
linux__nfs_options: ['rw']
|
||||
config.vm.network "private_network", type: "dhcp"
|
||||
else
|
||||
config.vm.synced_folder ".", "/srv/zulip"
|
||||
end
|
||||
|
||||
vagrant_config_file = ENV["HOME"] + "/.zulip-vagrant-config"
|
||||
vagrant_config_file = ENV['HOME'] + "/.zulip-vagrant-config"
|
||||
if File.file?(vagrant_config_file)
|
||||
IO.foreach(vagrant_config_file) do |line|
|
||||
line.chomp!
|
||||
key, value = line.split(nil, 2)
|
||||
case key
|
||||
when /^([#;]|$)/ # ignore comments
|
||||
when /^([#;]|$)/; # ignore comments
|
||||
when "HTTP_PROXY"; http_proxy = value
|
||||
when "HTTPS_PROXY"; https_proxy = value
|
||||
when "NO_PROXY"; no_proxy = value
|
||||
when "HOST_PORT"; host_port = value.to_i
|
||||
when "HOST_IP_ADDR"; host_ip_addr = value
|
||||
when "GUEST_CPUS"; vm_num_cpus = value
|
||||
when "GUEST_MEMORY_MB"; vm_memory = value
|
||||
when "UBUNTU_MIRROR"; ubuntu_mirror = value
|
||||
when "VBOXADD_VERSION"; vboxadd_version = value
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -96,57 +114,46 @@ Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
|
||||
elsif !http_proxy.nil? or !https_proxy.nil?
|
||||
# This prints twice due to https://github.com/hashicorp/vagrant/issues/7504
|
||||
# We haven't figured out a workaround.
|
||||
puts "You have specified value for proxy in ~/.zulip-vagrant-config file but did not " \
|
||||
"install the vagrant-proxyconf plugin. To install it, run `vagrant plugin install " \
|
||||
"vagrant-proxyconf` in a terminal. This error will appear twice."
|
||||
puts 'You have specified value for proxy in ~/.zulip-vagrant-config file but did not ' \
|
||||
'install the vagrant-proxyconf plugin. To install it, run `vagrant plugin install ' \
|
||||
'vagrant-proxyconf` in a terminal. This error will appear twice.'
|
||||
exit
|
||||
end
|
||||
|
||||
config.vm.network "forwarded_port", guest: 9991, host: host_port, host_ip: host_ip_addr
|
||||
config.vm.network "forwarded_port", guest: 9994, host: host_port + 3, host_ip: host_ip_addr
|
||||
# Specify Docker provider before VirtualBox provider so it's preferred.
|
||||
config.vm.provider "docker" do |d, override|
|
||||
d.build_dir = File.join(__dir__, "tools", "setup", "dev-vagrant-docker")
|
||||
d.build_args = ["--build-arg", "VAGRANT_UID=#{Process.uid}"]
|
||||
if !ubuntu_mirror.empty?
|
||||
d.build_args += ["--build-arg", "UBUNTU_MIRROR=#{ubuntu_mirror}"]
|
||||
# Specify LXC provider before VirtualBox provider so it's preferred.
|
||||
config.vm.provider "lxc" do |lxc|
|
||||
if command? "lxc-ls"
|
||||
LXC_VERSION = `lxc-ls --version`.strip unless defined? LXC_VERSION
|
||||
if LXC_VERSION >= "1.1.0" and LXC_VERSION < "3.0.0"
|
||||
# Allow start without AppArmor, otherwise Box will not Start on Ubuntu 14.10
|
||||
# see https://github.com/fgrehm/vagrant-lxc/issues/333
|
||||
lxc.customize 'aa_allow_incomplete', 1
|
||||
end
|
||||
if LXC_VERSION >= "3.0.0"
|
||||
lxc.customize 'apparmor.allow_incomplete', 1
|
||||
end
|
||||
if LXC_VERSION >= "2.0.0"
|
||||
lxc.backingstore = 'dir'
|
||||
end
|
||||
end
|
||||
d.has_ssh = true
|
||||
d.create_args = ["--ulimit", "nofile=1024:65536"]
|
||||
end
|
||||
|
||||
config.vm.provider "virtualbox" do |vb, override|
|
||||
override.vm.box = "hashicorp/bionic64"
|
||||
override.vm.box = "ubuntu/trusty64"
|
||||
# It's possible we can get away with just 1.5GB; more testing needed
|
||||
vb.memory = vm_memory
|
||||
vb.cpus = vm_num_cpus
|
||||
|
||||
if !vboxadd_version.nil?
|
||||
override.vbguest.installer = Class.new(VagrantVbguest::Installers::Ubuntu) do
|
||||
define_method(:host_version) do |reload = false|
|
||||
VagrantVbguest::Version(vboxadd_version)
|
||||
end
|
||||
end
|
||||
override.vbguest.allow_downgrade = true
|
||||
override.vbguest.iso_path = "https://download.virtualbox.org/virtualbox/#{vboxadd_version}/VBoxGuestAdditions_#{vboxadd_version}.iso"
|
||||
end
|
||||
vb.memory = 2048
|
||||
vb.cpus = 2
|
||||
end
|
||||
|
||||
config.vm.provider "hyperv" do |h, override|
|
||||
override.vm.box = "bento/ubuntu-18.04"
|
||||
h.memory = vm_memory
|
||||
h.maxmemory = vm_memory
|
||||
h.cpus = vm_num_cpus
|
||||
config.vm.provider "vmware_fusion" do |vb, override|
|
||||
override.vm.box = "puphpet/ubuntu1404-x64"
|
||||
vb.vmx["memsize"] = "2048"
|
||||
vb.vmx["numvcpus"] = "2"
|
||||
end
|
||||
|
||||
config.vm.provider "parallels" do |prl, override|
|
||||
override.vm.box = "bento/ubuntu-18.04"
|
||||
override.vm.box_version = "202005.21.0"
|
||||
prl.memory = vm_memory
|
||||
prl.cpus = vm_num_cpus
|
||||
end
|
||||
|
||||
$provision_script = <<SCRIPT
|
||||
$provision_script = <<SCRIPT
|
||||
set -x
|
||||
set -e
|
||||
set -o pipefail
|
||||
@@ -155,15 +162,25 @@ set -o pipefail
|
||||
# something that we don't want to happen when running provision in a
|
||||
# development environment not using Vagrant.
|
||||
|
||||
# Set the Ubuntu mirror
|
||||
[ ! '#{ubuntu_mirror}' ] || sudo sed -i 's|http://\\(\\w*\\.\\)*archive\\.ubuntu\\.com/ubuntu/\\? |#{ubuntu_mirror} |' /etc/apt/sources.list
|
||||
|
||||
# Set the MOTD on the system to have Zulip instructions
|
||||
sudo ln -nsf /srv/zulip/tools/setup/dev-motd /etc/update-motd.d/99-zulip-dev
|
||||
sudo rm -f /etc/update-motd.d/10-help-text
|
||||
sudo dpkg --purge landscape-client landscape-common ubuntu-release-upgrader-core update-manager-core update-notifier-common ubuntu-server
|
||||
sudo dpkg-divert --add --rename /etc/default/motd-news
|
||||
sudo sh -c 'echo ENABLED=0 > /etc/default/motd-news'
|
||||
sudo rm -f /etc/update-motd.d/*
|
||||
sudo bash -c 'cat << EndOfMessage > /etc/motd
|
||||
Welcome to the Zulip development environment! Popular commands:
|
||||
* tools/provision - Update the development environment
|
||||
* tools/run-dev.py - Run the development server
|
||||
* tools/lint - Run the linter (quick and catches many problmes)
|
||||
* tools/test-* - Run tests (use --help to learn about options)
|
||||
|
||||
Read https://zulip.readthedocs.io/en/latest/testing/testing.html to learn
|
||||
how to run individual test suites so that you can get a fast debug cycle.
|
||||
|
||||
EndOfMessage'
|
||||
|
||||
# If the host is running SELinux remount the /sys/fs/selinux directory as read only,
|
||||
# needed for apt-get to work.
|
||||
if [ -d "/sys/fs/selinux" ]; then
|
||||
sudo mount -o remount,ro /sys/fs/selinux
|
||||
fi
|
||||
|
||||
# Set default locale, this prevents errors if the user has another locale set.
|
||||
if ! grep -q 'LC_ALL=en_US.UTF-8' /etc/default/locale; then
|
||||
@@ -185,7 +202,7 @@ if [ ! -w /srv/zulip ]; then
|
||||
# sudo is required since our uid is not 1000
|
||||
echo ' vagrant halt -f'
|
||||
echo ' rm -rf /PATH/TO/ZULIP/CLONE/.vagrant'
|
||||
echo ' sudo chown -R 1000:$(id -g) /PATH/TO/ZULIP/CLONE'
|
||||
echo ' sudo chown -R 1000:$(whoami) /PATH/TO/ZULIP/CLONE'
|
||||
echo "Replace /PATH/TO/ZULIP/CLONE with the path to where zulip code is cloned."
|
||||
echo "You can resume setting up your vagrant environment by running:"
|
||||
echo " vagrant up"
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -4,19 +4,11 @@ from typing import List
|
||||
|
||||
from analytics.lib.counts import CountStat
|
||||
|
||||
|
||||
def generate_time_series_data(
|
||||
days: int = 100,
|
||||
business_hours_base: float = 10,
|
||||
non_business_hours_base: float = 10,
|
||||
growth: float = 1,
|
||||
autocorrelation: float = 0,
|
||||
spikiness: float = 1,
|
||||
holiday_rate: float = 0,
|
||||
frequency: str = CountStat.DAY,
|
||||
partial_sum: bool = False,
|
||||
random_seed: int = 26,
|
||||
) -> List[int]:
|
||||
def generate_time_series_data(days: int=100, business_hours_base: float=10,
|
||||
non_business_hours_base: float=10, growth: float=1,
|
||||
autocorrelation: float=0, spikiness: float=1,
|
||||
holiday_rate: float=0, frequency: str=CountStat.DAY,
|
||||
partial_sum: bool=False, random_seed: int=26) -> List[int]:
|
||||
"""
|
||||
Generate semi-realistic looking time series data for testing analytics graphs.
|
||||
|
||||
@@ -37,43 +29,35 @@ def generate_time_series_data(
|
||||
random_seed -- Seed for random number generator.
|
||||
"""
|
||||
if frequency == CountStat.HOUR:
|
||||
length = days * 24
|
||||
length = days*24
|
||||
seasonality = [non_business_hours_base] * 24 * 7
|
||||
for day in range(5):
|
||||
for hour in range(8):
|
||||
seasonality[24 * day + hour] = business_hours_base
|
||||
holidays = []
|
||||
seasonality[24*day + hour] = business_hours_base
|
||||
holidays = []
|
||||
for i in range(days):
|
||||
holidays.extend([random() < holiday_rate] * 24)
|
||||
elif frequency == CountStat.DAY:
|
||||
length = days
|
||||
seasonality = [8 * business_hours_base + 16 * non_business_hours_base] * 5 + [
|
||||
24 * non_business_hours_base
|
||||
] * 2
|
||||
seasonality = [8*business_hours_base + 16*non_business_hours_base] * 5 + \
|
||||
[24*non_business_hours_base] * 2
|
||||
holidays = [random() < holiday_rate for i in range(days)]
|
||||
else:
|
||||
raise AssertionError(f"Unknown frequency: {frequency}")
|
||||
raise AssertionError("Unknown frequency: %s" % (frequency,))
|
||||
if length < 2:
|
||||
raise AssertionError(
|
||||
f"Must be generating at least 2 data points. Currently generating {length}"
|
||||
)
|
||||
growth_base = growth ** (1.0 / (length - 1))
|
||||
values_no_noise = [
|
||||
seasonality[i % len(seasonality)] * (growth_base ** i) for i in range(length)
|
||||
]
|
||||
raise AssertionError("Must be generating at least 2 data points. "
|
||||
"Currently generating %s" % (length,))
|
||||
growth_base = growth ** (1. / (length-1))
|
||||
values_no_noise = [seasonality[i % len(seasonality)] * (growth_base**i) for i in range(length)]
|
||||
|
||||
seed(random_seed)
|
||||
noise_scalars = [gauss(0, 1)]
|
||||
for i in range(1, length):
|
||||
noise_scalars.append(
|
||||
noise_scalars[-1] * autocorrelation + gauss(0, 1) * (1 - autocorrelation)
|
||||
)
|
||||
noise_scalars.append(noise_scalars[-1]*autocorrelation + gauss(0, 1)*(1-autocorrelation))
|
||||
|
||||
values = [
|
||||
0 if holiday else int(v + sqrt(v) * noise_scalar * spikiness)
|
||||
for v, noise_scalar, holiday in zip(values_no_noise, noise_scalars, holidays)
|
||||
]
|
||||
values = [0 if holiday else int(v + sqrt(v)*noise_scalar*spikiness)
|
||||
for v, noise_scalar, holiday in zip(values_no_noise, noise_scalars, holidays)]
|
||||
if partial_sum:
|
||||
for i in range(1, length):
|
||||
values[i] = values[i - 1] + values[i]
|
||||
values[i] = values[i-1] + values[i]
|
||||
return [max(v, 0) for v in values]
|
||||
|
@@ -4,14 +4,12 @@ from typing import List, Optional
|
||||
from analytics.lib.counts import CountStat
|
||||
from zerver.lib.timestamp import floor_to_day, floor_to_hour, verify_UTC
|
||||
|
||||
|
||||
# If min_length is None, returns end_times from ceiling(start) to floor(end), inclusive.
|
||||
# If min_length is greater than 0, pads the list to the left.
|
||||
# So informally, time_range(Sep 20, Sep 22, day, None) returns [Sep 20, Sep 21, Sep 22],
|
||||
# and time_range(Sep 20, Sep 22, day, 5) returns [Sep 18, Sep 19, Sep 20, Sep 21, Sep 22]
|
||||
def time_range(
|
||||
start: datetime, end: datetime, frequency: str, min_length: Optional[int]
|
||||
) -> List[datetime]:
|
||||
def time_range(start: datetime, end: datetime, frequency: str,
|
||||
min_length: Optional[int]) -> List[datetime]:
|
||||
verify_UTC(start)
|
||||
verify_UTC(end)
|
||||
if frequency == CountStat.HOUR:
|
||||
@@ -21,11 +19,11 @@ def time_range(
|
||||
end = floor_to_day(end)
|
||||
step = timedelta(days=1)
|
||||
else:
|
||||
raise AssertionError(f"Unknown frequency: {frequency}")
|
||||
raise AssertionError("Unknown frequency: %s" % (frequency,))
|
||||
|
||||
times = []
|
||||
if min_length is not None:
|
||||
start = min(start, end - (min_length - 1) * step)
|
||||
start = min(start, end - (min_length-1)*step)
|
||||
current = end
|
||||
while current >= start:
|
||||
times.append(current)
|
||||
|
81
analytics/management/commands/analyze_mit.py
Normal file
81
analytics/management/commands/analyze_mit.py
Normal file
@@ -0,0 +1,81 @@
|
||||
import datetime
|
||||
import logging
|
||||
import time
|
||||
from typing import Any, Dict
|
||||
|
||||
from django.core.management.base import BaseCommand, CommandParser
|
||||
|
||||
from zerver.lib.timestamp import timestamp_to_datetime
|
||||
from zerver.models import Message, Recipient
|
||||
|
||||
def compute_stats(log_level: int) -> None:
|
||||
logger = logging.getLogger()
|
||||
logger.setLevel(log_level)
|
||||
|
||||
one_week_ago = timestamp_to_datetime(time.time()) - datetime.timedelta(weeks=1)
|
||||
mit_query = Message.objects.filter(sender__realm__string_id="zephyr",
|
||||
recipient__type=Recipient.STREAM,
|
||||
pub_date__gt=one_week_ago)
|
||||
for bot_sender_start in ["imap.", "rcmd.", "sys."]:
|
||||
mit_query = mit_query.exclude(sender__email__startswith=(bot_sender_start))
|
||||
# Filtering for "/" covers tabbott/extra@ and all the daemon/foo bots.
|
||||
mit_query = mit_query.exclude(sender__email__contains=("/"))
|
||||
mit_query = mit_query.exclude(sender__email__contains=("aim.com"))
|
||||
mit_query = mit_query.exclude(
|
||||
sender__email__in=["rss@mit.edu", "bash@mit.edu", "apache@mit.edu",
|
||||
"bitcoin@mit.edu", "lp@mit.edu", "clocks@mit.edu",
|
||||
"root@mit.edu", "nagios@mit.edu",
|
||||
"www-data|local-realm@mit.edu"])
|
||||
user_counts = {} # type: Dict[str, Dict[str, int]]
|
||||
for m in mit_query.select_related("sending_client", "sender"):
|
||||
email = m.sender.email
|
||||
user_counts.setdefault(email, {})
|
||||
user_counts[email].setdefault(m.sending_client.name, 0)
|
||||
user_counts[email][m.sending_client.name] += 1
|
||||
|
||||
total_counts = {} # type: Dict[str, int]
|
||||
total_user_counts = {} # type: Dict[str, int]
|
||||
for email, counts in user_counts.items():
|
||||
total_user_counts.setdefault(email, 0)
|
||||
for client_name, count in counts.items():
|
||||
total_counts.setdefault(client_name, 0)
|
||||
total_counts[client_name] += count
|
||||
total_user_counts[email] += count
|
||||
|
||||
logging.debug("%40s | %10s | %s" % ("User", "Messages", "Percentage Zulip"))
|
||||
top_percents = {} # type: Dict[int, float]
|
||||
for size in [10, 25, 50, 100, 200, len(total_user_counts.keys())]:
|
||||
top_percents[size] = 0.0
|
||||
for i, email in enumerate(sorted(total_user_counts.keys(),
|
||||
key=lambda x: -total_user_counts[x])):
|
||||
percent_zulip = round(100 - (user_counts[email].get("zephyr_mirror", 0)) * 100. /
|
||||
total_user_counts[email], 1)
|
||||
for size in top_percents.keys():
|
||||
top_percents.setdefault(size, 0)
|
||||
if i < size:
|
||||
top_percents[size] += (percent_zulip * 1.0 / size)
|
||||
|
||||
logging.debug("%40s | %10s | %s%%" % (email, total_user_counts[email],
|
||||
percent_zulip))
|
||||
|
||||
logging.info("")
|
||||
for size in sorted(top_percents.keys()):
|
||||
logging.info("Top %6s | %s%%" % (size, round(top_percents[size], 1)))
|
||||
|
||||
grand_total = sum(total_counts.values())
|
||||
print(grand_total)
|
||||
logging.info("%15s | %s" % ("Client", "Percentage"))
|
||||
for client in total_counts.keys():
|
||||
logging.info("%15s | %s%%" % (client, round(100. * total_counts[client] / grand_total, 1)))
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Compute statistics on MIT Zephyr usage."
|
||||
|
||||
def add_arguments(self, parser: CommandParser) -> None:
|
||||
parser.add_argument('--verbose', default=False, action='store_true')
|
||||
|
||||
def handle(self, *args: Any, **options: Any) -> None:
|
||||
level = logging.INFO
|
||||
if options["verbose"]:
|
||||
level = logging.DEBUG
|
||||
compute_stats(level)
|
56
analytics/management/commands/analyze_user_activity.py
Normal file
56
analytics/management/commands/analyze_user_activity.py
Normal file
@@ -0,0 +1,56 @@
|
||||
import datetime
|
||||
from typing import Any, Dict
|
||||
|
||||
from django.core.management.base import BaseCommand, CommandParser
|
||||
from django.utils.timezone import utc
|
||||
|
||||
from zerver.lib.statistics import seconds_usage_between
|
||||
from zerver.models import UserProfile
|
||||
|
||||
def analyze_activity(options: Dict[str, Any]) -> None:
|
||||
day_start = datetime.datetime.strptime(options["date"], "%Y-%m-%d").replace(tzinfo=utc)
|
||||
day_end = day_start + datetime.timedelta(days=options["duration"])
|
||||
|
||||
user_profile_query = UserProfile.objects.all()
|
||||
if options["realm"]:
|
||||
user_profile_query = user_profile_query.filter(realm__string_id=options["realm"])
|
||||
|
||||
print("Per-user online duration:\n")
|
||||
total_duration = datetime.timedelta(0)
|
||||
for user_profile in user_profile_query:
|
||||
duration = seconds_usage_between(user_profile, day_start, day_end)
|
||||
|
||||
if duration == datetime.timedelta(0):
|
||||
continue
|
||||
|
||||
total_duration += duration
|
||||
print("%-*s%s" % (37, user_profile.email, duration,))
|
||||
|
||||
print("\nTotal Duration: %s" % (total_duration,))
|
||||
print("\nTotal Duration in minutes: %s" % (total_duration.total_seconds() / 60.,))
|
||||
print("Total Duration amortized to a month: %s" % (total_duration.total_seconds() * 30. / 60.,))
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = """Report analytics of user activity on a per-user and realm basis.
|
||||
|
||||
This command aggregates user activity data that is collected by each user using Zulip. It attempts
|
||||
to approximate how much each user has been using Zulip per day, measured by recording each 15 minute
|
||||
period where some activity has occurred (mouse move or keyboard activity).
|
||||
|
||||
It will correctly not count server-initiated reloads in the activity statistics.
|
||||
|
||||
The duration flag can be used to control how many days to show usage duration for
|
||||
|
||||
Usage: ./manage.py analyze_user_activity [--realm=zulip] [--date=2013-09-10] [--duration=1]
|
||||
|
||||
By default, if no date is selected 2013-09-10 is used. If no realm is provided, information
|
||||
is shown for all realms"""
|
||||
|
||||
def add_arguments(self, parser: CommandParser) -> None:
|
||||
parser.add_argument('--realm', action='store')
|
||||
parser.add_argument('--date', action='store', default="2013-09-06")
|
||||
parser.add_argument('--duration', action='store', default=1, type=int,
|
||||
help="How many days to show usage information for")
|
||||
|
||||
def handle(self, *args: Any, **options: Any) -> None:
|
||||
analyze_activity(options)
|
@@ -1,24 +1,29 @@
|
||||
import os
|
||||
import time
|
||||
from argparse import ArgumentParser
|
||||
from datetime import timedelta
|
||||
from typing import Any, Dict
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.utils.timezone import now as timezone_now
|
||||
|
||||
from analytics.models import InstallationCount, installation_epoch, \
|
||||
last_successful_fill
|
||||
from analytics.lib.counts import COUNT_STATS, CountStat
|
||||
from analytics.models import installation_epoch
|
||||
from zerver.lib.timestamp import TimezoneNotUTCException, floor_to_day, floor_to_hour, verify_UTC
|
||||
from zerver.lib.timestamp import floor_to_hour, floor_to_day, verify_UTC, \
|
||||
TimezoneNotUTCException
|
||||
from zerver.models import Realm
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
from typing import Any, Dict
|
||||
|
||||
states = {
|
||||
0: "OK",
|
||||
1: "WARNING",
|
||||
2: "CRITICAL",
|
||||
3: "UNKNOWN",
|
||||
3: "UNKNOWN"
|
||||
}
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = """Checks FillState table.
|
||||
|
||||
@@ -26,30 +31,31 @@ class Command(BaseCommand):
|
||||
|
||||
def handle(self, *args: Any, **options: Any) -> None:
|
||||
fill_state = self.get_fill_state()
|
||||
status = fill_state["status"]
|
||||
message = fill_state["message"]
|
||||
status = fill_state['status']
|
||||
message = fill_state['message']
|
||||
|
||||
state_file_path = "/var/lib/nagios_state/check-analytics-state"
|
||||
state_file_tmp = state_file_path + "-tmp"
|
||||
|
||||
with open(state_file_tmp, "w") as f:
|
||||
f.write(f"{int(time.time())}|{status}|{states[status]}|{message}\n")
|
||||
os.rename(state_file_tmp, state_file_path)
|
||||
f.write("%s|%s|%s|%s\n" % (
|
||||
int(time.time()), status, states[status], message))
|
||||
subprocess.check_call(["mv", state_file_tmp, state_file_path])
|
||||
|
||||
def get_fill_state(self) -> Dict[str, Any]:
|
||||
if not Realm.objects.exists():
|
||||
return {"status": 0, "message": "No realms exist, so not checking FillState."}
|
||||
return {'status': 0, 'message': 'No realms exist, so not checking FillState.'}
|
||||
|
||||
warning_unfilled_properties = []
|
||||
critical_unfilled_properties = []
|
||||
for property, stat in COUNT_STATS.items():
|
||||
last_fill = stat.last_successful_fill()
|
||||
last_fill = last_successful_fill(property)
|
||||
if last_fill is None:
|
||||
last_fill = installation_epoch()
|
||||
try:
|
||||
verify_UTC(last_fill)
|
||||
except TimezoneNotUTCException:
|
||||
return {"status": 2, "message": f"FillState not in UTC for {property}"}
|
||||
return {'status': 2, 'message': 'FillState not in UTC for %s' % (property,)}
|
||||
|
||||
if stat.frequency == CountStat.DAY:
|
||||
floor_function = floor_to_day
|
||||
@@ -61,10 +67,8 @@ class Command(BaseCommand):
|
||||
critical_threshold = timedelta(minutes=150)
|
||||
|
||||
if floor_function(last_fill) != last_fill:
|
||||
return {
|
||||
"status": 2,
|
||||
"message": f"FillState not on {stat.frequency} boundary for {property}",
|
||||
}
|
||||
return {'status': 2, 'message': 'FillState not on %s boundary for %s' %
|
||||
(stat.frequency, property)}
|
||||
|
||||
time_to_last_fill = timezone_now() - last_fill
|
||||
if time_to_last_fill > critical_threshold:
|
||||
@@ -73,18 +77,9 @@ class Command(BaseCommand):
|
||||
warning_unfilled_properties.append(property)
|
||||
|
||||
if len(critical_unfilled_properties) == 0 and len(warning_unfilled_properties) == 0:
|
||||
return {"status": 0, "message": "FillState looks fine."}
|
||||
return {'status': 0, 'message': 'FillState looks fine.'}
|
||||
if len(critical_unfilled_properties) == 0:
|
||||
return {
|
||||
"status": 1,
|
||||
"message": "Missed filling {} once.".format(
|
||||
", ".join(warning_unfilled_properties),
|
||||
),
|
||||
}
|
||||
return {
|
||||
"status": 2,
|
||||
"message": "Missed filling {} once. Missed filling {} at least twice.".format(
|
||||
", ".join(warning_unfilled_properties),
|
||||
", ".join(critical_unfilled_properties),
|
||||
),
|
||||
}
|
||||
return {'status': 1, 'message': 'Missed filling %s once.' %
|
||||
(', '.join(warning_unfilled_properties),)}
|
||||
return {'status': 2, 'message': 'Missed filling %s once. Missed filling %s at least twice.' %
|
||||
(', '.join(warning_unfilled_properties), ', '.join(critical_unfilled_properties))}
|
||||
|
@@ -1,21 +1,22 @@
|
||||
import sys
|
||||
from argparse import ArgumentParser
|
||||
from typing import Any
|
||||
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from analytics.lib.counts import do_drop_all_analytics_tables
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = """Clear analytics tables."""
|
||||
|
||||
def add_arguments(self, parser: ArgumentParser) -> None:
|
||||
parser.add_argument("--force", action="store_true", help="Clear analytics tables.")
|
||||
parser.add_argument('--force',
|
||||
action='store_true',
|
||||
help="Clear analytics tables.")
|
||||
|
||||
def handle(self, *args: Any, **options: Any) -> None:
|
||||
if options["force"]:
|
||||
if options['force']:
|
||||
do_drop_all_analytics_tables()
|
||||
else:
|
||||
raise CommandError(
|
||||
"Would delete all data from analytics tables (!); use --force to do so."
|
||||
)
|
||||
print("Would delete all data from analytics tables (!); use --force to do so.")
|
||||
sys.exit(1)
|
||||
|
@@ -1,23 +1,29 @@
|
||||
import sys
|
||||
from argparse import ArgumentParser
|
||||
from typing import Any
|
||||
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from analytics.lib.counts import COUNT_STATS, do_drop_single_stat
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = """Clear analytics tables."""
|
||||
|
||||
def add_arguments(self, parser: ArgumentParser) -> None:
|
||||
parser.add_argument("--force", action="store_true", help="Actually do it.")
|
||||
parser.add_argument("--property", help="The property of the stat to be cleared.")
|
||||
parser.add_argument('--force',
|
||||
action='store_true',
|
||||
help="Actually do it.")
|
||||
parser.add_argument('--property',
|
||||
type=str,
|
||||
help="The property of the stat to be cleared.")
|
||||
|
||||
def handle(self, *args: Any, **options: Any) -> None:
|
||||
property = options["property"]
|
||||
property = options['property']
|
||||
if property not in COUNT_STATS:
|
||||
raise CommandError(f"Invalid property: {property}")
|
||||
if not options["force"]:
|
||||
raise CommandError("No action taken. Use --force.")
|
||||
print("Invalid property: %s" % (property,))
|
||||
sys.exit(1)
|
||||
if not options['force']:
|
||||
print("No action taken. Use --force.")
|
||||
sys.exit(1)
|
||||
|
||||
do_drop_single_stat(property)
|
||||
|
73
analytics/management/commands/client_activity.py
Normal file
73
analytics/management/commands/client_activity.py
Normal file
@@ -0,0 +1,73 @@
|
||||
import datetime
|
||||
from argparse import ArgumentParser
|
||||
from typing import Any
|
||||
|
||||
from django.db.models import Count, QuerySet
|
||||
from django.utils.timezone import now as timezone_now
|
||||
|
||||
from zerver.lib.management import ZulipBaseCommand
|
||||
from zerver.models import UserActivity
|
||||
|
||||
class Command(ZulipBaseCommand):
|
||||
help = """Report rough client activity globally, for a realm, or for a user
|
||||
|
||||
Usage examples:
|
||||
|
||||
./manage.py client_activity --target server
|
||||
./manage.py client_activity --target realm --realm zulip
|
||||
./manage.py client_activity --target user --user hamlet@zulip.com --realm zulip"""
|
||||
|
||||
def add_arguments(self, parser: ArgumentParser) -> None:
|
||||
parser.add_argument('--target', dest='target', required=True, type=str,
|
||||
help="'server' will calculate client activity of the entire server. "
|
||||
"'realm' will calculate client activity of realm. "
|
||||
"'user' will calculate client activity of the user.")
|
||||
parser.add_argument('--user', dest='user', type=str,
|
||||
help="The email address of the user you want to calculate activity.")
|
||||
self.add_realm_args(parser)
|
||||
|
||||
def compute_activity(self, user_activity_objects: QuerySet) -> None:
|
||||
# Report data from the past week.
|
||||
#
|
||||
# This is a rough report of client activity because we inconsistently
|
||||
# register activity from various clients; think of it as telling you
|
||||
# approximately how many people from a group have used a particular
|
||||
# client recently. For example, this might be useful to get a sense of
|
||||
# how popular different versions of a desktop client are.
|
||||
#
|
||||
# Importantly, this does NOT tell you anything about the relative
|
||||
# volumes of requests from clients.
|
||||
threshold = timezone_now() - datetime.timedelta(days=7)
|
||||
client_counts = user_activity_objects.filter(
|
||||
last_visit__gt=threshold).values("client__name").annotate(
|
||||
count=Count('client__name'))
|
||||
|
||||
total = 0
|
||||
counts = []
|
||||
for client_type in client_counts:
|
||||
count = client_type["count"]
|
||||
client = client_type["client__name"]
|
||||
total += count
|
||||
counts.append((count, client))
|
||||
|
||||
counts.sort()
|
||||
|
||||
for count in counts:
|
||||
print("%25s %15d" % (count[1], count[0]))
|
||||
print("Total:", total)
|
||||
|
||||
def handle(self, *args: Any, **options: str) -> None:
|
||||
realm = self.get_realm(options)
|
||||
if options["user"] is None:
|
||||
if options["target"] == "server" and realm is None:
|
||||
# Report global activity.
|
||||
self.compute_activity(UserActivity.objects.all())
|
||||
elif options["target"] == "realm" and realm is not None:
|
||||
self.compute_activity(UserActivity.objects.filter(user_profile__realm=realm))
|
||||
else:
|
||||
self.print_help("./manage.py", "client_activity")
|
||||
elif options["target"] == "user":
|
||||
user_profile = self.get_user(options["user"], realm)
|
||||
self.compute_activity(UserActivity.objects.filter(user_profile=user_profile))
|
||||
else:
|
||||
self.print_help("./manage.py", "client_activity")
|
@@ -1,26 +1,18 @@
|
||||
from datetime import timedelta
|
||||
from typing import Any, Dict, List, Mapping, Optional, Type
|
||||
from unittest import mock
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Any, Dict, List, Mapping, Optional, Type, Union
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.utils.timezone import now as timezone_now
|
||||
|
||||
from analytics.lib.counts import COUNT_STATS, CountStat, do_drop_all_analytics_tables
|
||||
from analytics.lib.counts import COUNT_STATS, \
|
||||
CountStat, do_drop_all_analytics_tables
|
||||
from analytics.lib.fixtures import generate_time_series_data
|
||||
from analytics.lib.time_utils import time_range
|
||||
from analytics.models import (
|
||||
BaseCount,
|
||||
FillState,
|
||||
InstallationCount,
|
||||
RealmCount,
|
||||
StreamCount,
|
||||
UserCount,
|
||||
)
|
||||
from zerver.lib.actions import STREAM_ASSIGNMENT_COLORS, do_change_user_role, do_create_realm
|
||||
from zerver.lib.create_user import create_user
|
||||
from analytics.models import BaseCount, FillState, RealmCount, UserCount, StreamCount
|
||||
from zerver.lib.timestamp import floor_to_day
|
||||
from zerver.models import Client, Realm, Recipient, Stream, Subscription, UserProfile
|
||||
|
||||
from zerver.models import Realm, UserProfile, Stream, Message, Client, \
|
||||
RealmAuditLog, Recipient
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = """Populates analytics tables with randomly generated data."""
|
||||
@@ -28,273 +20,131 @@ class Command(BaseCommand):
|
||||
DAYS_OF_DATA = 100
|
||||
random_seed = 26
|
||||
|
||||
def generate_fixture_data(
|
||||
self,
|
||||
stat: CountStat,
|
||||
business_hours_base: float,
|
||||
non_business_hours_base: float,
|
||||
growth: float,
|
||||
autocorrelation: float,
|
||||
spikiness: float,
|
||||
holiday_rate: float = 0,
|
||||
partial_sum: bool = False,
|
||||
) -> List[int]:
|
||||
def create_user(self, email: str,
|
||||
full_name: str,
|
||||
is_staff: bool,
|
||||
date_joined: datetime,
|
||||
realm: Realm) -> UserProfile:
|
||||
user = UserProfile.objects.create(
|
||||
email=email, full_name=full_name, is_staff=is_staff,
|
||||
realm=realm, short_name=full_name, pointer=-1, last_pointer_updater='none',
|
||||
api_key='42', date_joined=date_joined)
|
||||
RealmAuditLog.objects.create(
|
||||
realm=realm, modified_user=user, event_type='user_created',
|
||||
event_time=user.date_joined)
|
||||
return user
|
||||
|
||||
def generate_fixture_data(self, stat: CountStat, business_hours_base: float,
|
||||
non_business_hours_base: float, growth: float,
|
||||
autocorrelation: float, spikiness: float,
|
||||
holiday_rate: float=0, partial_sum: bool=False) -> List[int]:
|
||||
self.random_seed += 1
|
||||
return generate_time_series_data(
|
||||
days=self.DAYS_OF_DATA,
|
||||
business_hours_base=business_hours_base,
|
||||
non_business_hours_base=non_business_hours_base,
|
||||
growth=growth,
|
||||
autocorrelation=autocorrelation,
|
||||
spikiness=spikiness,
|
||||
holiday_rate=holiday_rate,
|
||||
frequency=stat.frequency,
|
||||
partial_sum=partial_sum,
|
||||
random_seed=self.random_seed,
|
||||
)
|
||||
days=self.DAYS_OF_DATA, business_hours_base=business_hours_base,
|
||||
non_business_hours_base=non_business_hours_base, growth=growth,
|
||||
autocorrelation=autocorrelation, spikiness=spikiness, holiday_rate=holiday_rate,
|
||||
frequency=stat.frequency, partial_sum=partial_sum, random_seed=self.random_seed)
|
||||
|
||||
def handle(self, *args: Any, **options: Any) -> None:
|
||||
# TODO: This should arguably only delete the objects
|
||||
# associated with the "analytics" realm.
|
||||
do_drop_all_analytics_tables()
|
||||
|
||||
# This also deletes any objects with this realm as a foreign key
|
||||
Realm.objects.filter(string_id="analytics").delete()
|
||||
|
||||
# Because we just deleted a bunch of objects in the database
|
||||
# directly (rather than deleting individual objects in Django,
|
||||
# in which case our post_save hooks would have flushed the
|
||||
# individual objects from memcached for us), we need to flush
|
||||
# memcached in order to ensure deleted objects aren't still
|
||||
# present in the memcached cache.
|
||||
from zerver.apps import flush_cache
|
||||
|
||||
flush_cache(None)
|
||||
# I believe this also deletes any objects with this realm as a foreign key
|
||||
Realm.objects.filter(string_id='analytics').delete()
|
||||
|
||||
installation_time = timezone_now() - timedelta(days=self.DAYS_OF_DATA)
|
||||
last_end_time = floor_to_day(timezone_now())
|
||||
realm = do_create_realm(
|
||||
string_id="analytics", name="Analytics", date_created=installation_time
|
||||
)
|
||||
realm = Realm.objects.create(
|
||||
string_id='analytics', name='Analytics', date_created=installation_time)
|
||||
shylock = self.create_user('shylock@analytics.ds', 'Shylock', True, installation_time, realm)
|
||||
stream = Stream.objects.create(
|
||||
name='all', realm=realm, date_created=installation_time)
|
||||
Recipient.objects.create(type_id=stream.id, type=Recipient.STREAM)
|
||||
|
||||
with mock.patch("zerver.lib.create_user.timezone_now", return_value=installation_time):
|
||||
shylock = create_user(
|
||||
"shylock@analytics.ds",
|
||||
"Shylock",
|
||||
realm,
|
||||
full_name="Shylock",
|
||||
role=UserProfile.ROLE_REALM_OWNER,
|
||||
)
|
||||
do_change_user_role(shylock, UserProfile.ROLE_REALM_OWNER, acting_user=None)
|
||||
stream = Stream.objects.create(name="all", realm=realm, date_created=installation_time)
|
||||
recipient = Recipient.objects.create(type_id=stream.id, type=Recipient.STREAM)
|
||||
stream.recipient = recipient
|
||||
stream.save(update_fields=["recipient"])
|
||||
|
||||
# Subscribe shylock to the stream to avoid invariant failures.
|
||||
# TODO: This should use subscribe_users_to_streams from populate_db.
|
||||
subs = [
|
||||
Subscription(
|
||||
recipient=recipient,
|
||||
user_profile=shylock,
|
||||
is_user_active=shylock.is_active,
|
||||
color=STREAM_ASSIGNMENT_COLORS[0],
|
||||
),
|
||||
]
|
||||
Subscription.objects.bulk_create(subs)
|
||||
|
||||
def insert_fixture_data(
|
||||
stat: CountStat, fixture_data: Mapping[Optional[str], List[int]], table: Type[BaseCount]
|
||||
) -> None:
|
||||
end_times = time_range(
|
||||
last_end_time, last_end_time, stat.frequency, len(list(fixture_data.values())[0])
|
||||
)
|
||||
if table == InstallationCount:
|
||||
id_args: Dict[str, Any] = {}
|
||||
def insert_fixture_data(stat: CountStat,
|
||||
fixture_data: Mapping[Optional[str], List[int]],
|
||||
table: Type[BaseCount]) -> None:
|
||||
end_times = time_range(last_end_time, last_end_time, stat.frequency,
|
||||
len(list(fixture_data.values())[0]))
|
||||
if table == RealmCount:
|
||||
id_args = {"realm": realm}
|
||||
id_args = {'realm': realm}
|
||||
if table == UserCount:
|
||||
id_args = {"realm": realm, "user": shylock}
|
||||
id_args = {'realm': realm, 'user': shylock}
|
||||
if table == StreamCount:
|
||||
id_args = {"stream": stream, "realm": realm}
|
||||
id_args = {'stream': stream, 'realm': realm}
|
||||
|
||||
for subgroup, values in fixture_data.items():
|
||||
table.objects.bulk_create(
|
||||
table(
|
||||
property=stat.property,
|
||||
subgroup=subgroup,
|
||||
end_time=end_time,
|
||||
value=value,
|
||||
**id_args,
|
||||
)
|
||||
for end_time, value in zip(end_times, values)
|
||||
if value != 0
|
||||
)
|
||||
table.objects.bulk_create([
|
||||
table(property=stat.property, subgroup=subgroup, end_time=end_time,
|
||||
value=value, **id_args)
|
||||
for end_time, value in zip(end_times, values) if value != 0])
|
||||
|
||||
stat = COUNT_STATS["1day_actives::day"]
|
||||
realm_data: Mapping[Optional[str], List[int]] = {
|
||||
None: self.generate_fixture_data(stat, 0.08, 0.02, 3, 0.3, 6, partial_sum=True),
|
||||
}
|
||||
insert_fixture_data(stat, realm_data, RealmCount)
|
||||
installation_data: Mapping[Optional[str], List[int]] = {
|
||||
None: self.generate_fixture_data(stat, 0.8, 0.2, 4, 0.3, 6, partial_sum=True),
|
||||
}
|
||||
insert_fixture_data(stat, installation_data, InstallationCount)
|
||||
FillState.objects.create(
|
||||
property=stat.property, end_time=last_end_time, state=FillState.DONE
|
||||
)
|
||||
|
||||
stat = COUNT_STATS["7day_actives::day"]
|
||||
stat = COUNT_STATS['realm_active_humans::day']
|
||||
realm_data = {
|
||||
None: self.generate_fixture_data(stat, 0.2, 0.07, 3, 0.3, 6, partial_sum=True),
|
||||
}
|
||||
None: self.generate_fixture_data(stat, .1, .03, 3, .5, 3, partial_sum=True),
|
||||
} # type: Mapping[Optional[str], List[int]]
|
||||
insert_fixture_data(stat, realm_data, RealmCount)
|
||||
installation_data = {
|
||||
None: self.generate_fixture_data(stat, 2, 0.7, 4, 0.3, 6, partial_sum=True),
|
||||
}
|
||||
insert_fixture_data(stat, installation_data, InstallationCount)
|
||||
FillState.objects.create(
|
||||
property=stat.property, end_time=last_end_time, state=FillState.DONE
|
||||
)
|
||||
FillState.objects.create(property=stat.property, end_time=last_end_time,
|
||||
state=FillState.DONE)
|
||||
|
||||
stat = COUNT_STATS["realm_active_humans::day"]
|
||||
realm_data = {
|
||||
None: self.generate_fixture_data(stat, 0.8, 0.08, 3, 0.5, 3, partial_sum=True),
|
||||
}
|
||||
insert_fixture_data(stat, realm_data, RealmCount)
|
||||
installation_data = {
|
||||
None: self.generate_fixture_data(stat, 1, 0.3, 4, 0.5, 3, partial_sum=True),
|
||||
}
|
||||
insert_fixture_data(stat, installation_data, InstallationCount)
|
||||
FillState.objects.create(
|
||||
property=stat.property, end_time=last_end_time, state=FillState.DONE
|
||||
)
|
||||
|
||||
stat = COUNT_STATS["active_users_audit:is_bot:day"]
|
||||
realm_data = {
|
||||
"false": self.generate_fixture_data(stat, 1, 0.2, 3.5, 0.8, 2, partial_sum=True),
|
||||
"true": self.generate_fixture_data(stat, 0.3, 0.05, 3, 0.3, 2, partial_sum=True),
|
||||
}
|
||||
insert_fixture_data(stat, realm_data, RealmCount)
|
||||
installation_data = {
|
||||
"false": self.generate_fixture_data(stat, 3, 1, 4, 0.8, 2, partial_sum=True),
|
||||
"true": self.generate_fixture_data(stat, 1, 0.4, 4, 0.8, 2, partial_sum=True),
|
||||
}
|
||||
insert_fixture_data(stat, installation_data, InstallationCount)
|
||||
FillState.objects.create(
|
||||
property=stat.property, end_time=last_end_time, state=FillState.DONE
|
||||
)
|
||||
|
||||
stat = COUNT_STATS["messages_sent:is_bot:hour"]
|
||||
user_data: Mapping[Optional[str], List[int]] = {
|
||||
"false": self.generate_fixture_data(stat, 2, 1, 1.5, 0.6, 8, holiday_rate=0.1),
|
||||
}
|
||||
stat = COUNT_STATS['messages_sent:is_bot:hour']
|
||||
user_data = {'false': self.generate_fixture_data(
|
||||
stat, 2, 1, 1.5, .6, 8, holiday_rate=.1)} # type: Mapping[Optional[str], List[int]]
|
||||
insert_fixture_data(stat, user_data, UserCount)
|
||||
realm_data = {
|
||||
"false": self.generate_fixture_data(stat, 35, 15, 6, 0.6, 4),
|
||||
"true": self.generate_fixture_data(stat, 15, 15, 3, 0.4, 2),
|
||||
}
|
||||
realm_data = {'false': self.generate_fixture_data(stat, 35, 15, 6, .6, 4),
|
||||
'true': self.generate_fixture_data(stat, 15, 15, 3, .4, 2)}
|
||||
insert_fixture_data(stat, realm_data, RealmCount)
|
||||
installation_data = {
|
||||
"false": self.generate_fixture_data(stat, 350, 150, 6, 0.6, 4),
|
||||
"true": self.generate_fixture_data(stat, 150, 150, 3, 0.4, 2),
|
||||
}
|
||||
insert_fixture_data(stat, installation_data, InstallationCount)
|
||||
FillState.objects.create(
|
||||
property=stat.property, end_time=last_end_time, state=FillState.DONE
|
||||
)
|
||||
FillState.objects.create(property=stat.property, end_time=last_end_time,
|
||||
state=FillState.DONE)
|
||||
|
||||
stat = COUNT_STATS["messages_sent:message_type:day"]
|
||||
stat = COUNT_STATS['messages_sent:message_type:day']
|
||||
user_data = {
|
||||
"public_stream": self.generate_fixture_data(stat, 1.5, 1, 3, 0.6, 8),
|
||||
"private_message": self.generate_fixture_data(stat, 0.5, 0.3, 1, 0.6, 8),
|
||||
"huddle_message": self.generate_fixture_data(stat, 0.2, 0.2, 2, 0.6, 8),
|
||||
}
|
||||
'public_stream': self.generate_fixture_data(stat, 1.5, 1, 3, .6, 8),
|
||||
'private_message': self.generate_fixture_data(stat, .5, .3, 1, .6, 8),
|
||||
'huddle_message': self.generate_fixture_data(stat, .2, .2, 2, .6, 8)}
|
||||
insert_fixture_data(stat, user_data, UserCount)
|
||||
realm_data = {
|
||||
"public_stream": self.generate_fixture_data(stat, 30, 8, 5, 0.6, 4),
|
||||
"private_stream": self.generate_fixture_data(stat, 7, 7, 5, 0.6, 4),
|
||||
"private_message": self.generate_fixture_data(stat, 13, 5, 5, 0.6, 4),
|
||||
"huddle_message": self.generate_fixture_data(stat, 6, 3, 3, 0.6, 4),
|
||||
}
|
||||
'public_stream': self.generate_fixture_data(stat, 30, 8, 5, .6, 4),
|
||||
'private_stream': self.generate_fixture_data(stat, 7, 7, 5, .6, 4),
|
||||
'private_message': self.generate_fixture_data(stat, 13, 5, 5, .6, 4),
|
||||
'huddle_message': self.generate_fixture_data(stat, 6, 3, 3, .6, 4)}
|
||||
insert_fixture_data(stat, realm_data, RealmCount)
|
||||
installation_data = {
|
||||
"public_stream": self.generate_fixture_data(stat, 300, 80, 5, 0.6, 4),
|
||||
"private_stream": self.generate_fixture_data(stat, 70, 70, 5, 0.6, 4),
|
||||
"private_message": self.generate_fixture_data(stat, 130, 50, 5, 0.6, 4),
|
||||
"huddle_message": self.generate_fixture_data(stat, 60, 30, 3, 0.6, 4),
|
||||
}
|
||||
insert_fixture_data(stat, installation_data, InstallationCount)
|
||||
FillState.objects.create(
|
||||
property=stat.property, end_time=last_end_time, state=FillState.DONE
|
||||
)
|
||||
FillState.objects.create(property=stat.property, end_time=last_end_time,
|
||||
state=FillState.DONE)
|
||||
|
||||
website, created = Client.objects.get_or_create(name="website")
|
||||
old_desktop, created = Client.objects.get_or_create(name="desktop app Linux 0.3.7")
|
||||
android, created = Client.objects.get_or_create(name="ZulipAndroid")
|
||||
iOS, created = Client.objects.get_or_create(name="ZulipiOS")
|
||||
react_native, created = Client.objects.get_or_create(name="ZulipMobile")
|
||||
API, created = Client.objects.get_or_create(name="API: Python")
|
||||
zephyr_mirror, created = Client.objects.get_or_create(name="zephyr_mirror")
|
||||
unused, created = Client.objects.get_or_create(name="unused")
|
||||
long_webhook, created = Client.objects.get_or_create(name="ZulipLooooooooooongNameWebhook")
|
||||
website, created = Client.objects.get_or_create(name='website')
|
||||
old_desktop, created = Client.objects.get_or_create(name='desktop app Linux 0.3.7')
|
||||
android, created = Client.objects.get_or_create(name='ZulipAndroid')
|
||||
iOS, created = Client.objects.get_or_create(name='ZulipiOS')
|
||||
react_native, created = Client.objects.get_or_create(name='ZulipMobile')
|
||||
API, created = Client.objects.get_or_create(name='API: Python')
|
||||
zephyr_mirror, created = Client.objects.get_or_create(name='zephyr_mirror')
|
||||
unused, created = Client.objects.get_or_create(name='unused')
|
||||
long_webhook, created = Client.objects.get_or_create(name='ZulipLooooooooooongNameWebhook')
|
||||
|
||||
stat = COUNT_STATS["messages_sent:client:day"]
|
||||
stat = COUNT_STATS['messages_sent:client:day']
|
||||
user_data = {
|
||||
website.id: self.generate_fixture_data(stat, 2, 1, 1.5, 0.6, 8),
|
||||
zephyr_mirror.id: self.generate_fixture_data(stat, 0, 0.3, 1.5, 0.6, 8),
|
||||
}
|
||||
website.id: self.generate_fixture_data(stat, 2, 1, 1.5, .6, 8),
|
||||
zephyr_mirror.id: self.generate_fixture_data(stat, 0, .3, 1.5, .6, 8)}
|
||||
insert_fixture_data(stat, user_data, UserCount)
|
||||
realm_data = {
|
||||
website.id: self.generate_fixture_data(stat, 30, 20, 5, 0.6, 3),
|
||||
old_desktop.id: self.generate_fixture_data(stat, 5, 3, 8, 0.6, 3),
|
||||
android.id: self.generate_fixture_data(stat, 5, 5, 2, 0.6, 3),
|
||||
iOS.id: self.generate_fixture_data(stat, 5, 5, 2, 0.6, 3),
|
||||
react_native.id: self.generate_fixture_data(stat, 5, 5, 10, 0.6, 3),
|
||||
API.id: self.generate_fixture_data(stat, 5, 5, 5, 0.6, 3),
|
||||
zephyr_mirror.id: self.generate_fixture_data(stat, 1, 1, 3, 0.6, 3),
|
||||
website.id: self.generate_fixture_data(stat, 30, 20, 5, .6, 3),
|
||||
old_desktop.id: self.generate_fixture_data(stat, 5, 3, 8, .6, 3),
|
||||
android.id: self.generate_fixture_data(stat, 5, 5, 2, .6, 3),
|
||||
iOS.id: self.generate_fixture_data(stat, 5, 5, 2, .6, 3),
|
||||
react_native.id: self.generate_fixture_data(stat, 5, 5, 10, .6, 3),
|
||||
API.id: self.generate_fixture_data(stat, 5, 5, 5, .6, 3),
|
||||
zephyr_mirror.id: self.generate_fixture_data(stat, 1, 1, 3, .6, 3),
|
||||
unused.id: self.generate_fixture_data(stat, 0, 0, 0, 0, 0),
|
||||
long_webhook.id: self.generate_fixture_data(stat, 5, 5, 2, 0.6, 3),
|
||||
}
|
||||
long_webhook.id: self.generate_fixture_data(stat, 5, 5, 2, .6, 3)}
|
||||
insert_fixture_data(stat, realm_data, RealmCount)
|
||||
installation_data = {
|
||||
website.id: self.generate_fixture_data(stat, 300, 200, 5, 0.6, 3),
|
||||
old_desktop.id: self.generate_fixture_data(stat, 50, 30, 8, 0.6, 3),
|
||||
android.id: self.generate_fixture_data(stat, 50, 50, 2, 0.6, 3),
|
||||
iOS.id: self.generate_fixture_data(stat, 50, 50, 2, 0.6, 3),
|
||||
react_native.id: self.generate_fixture_data(stat, 5, 5, 10, 0.6, 3),
|
||||
API.id: self.generate_fixture_data(stat, 50, 50, 5, 0.6, 3),
|
||||
zephyr_mirror.id: self.generate_fixture_data(stat, 10, 10, 3, 0.6, 3),
|
||||
unused.id: self.generate_fixture_data(stat, 0, 0, 0, 0, 0),
|
||||
long_webhook.id: self.generate_fixture_data(stat, 50, 50, 2, 0.6, 3),
|
||||
}
|
||||
insert_fixture_data(stat, installation_data, InstallationCount)
|
||||
FillState.objects.create(
|
||||
property=stat.property, end_time=last_end_time, state=FillState.DONE
|
||||
)
|
||||
FillState.objects.create(property=stat.property, end_time=last_end_time,
|
||||
state=FillState.DONE)
|
||||
|
||||
stat = COUNT_STATS["messages_in_stream:is_bot:day"]
|
||||
realm_data = {
|
||||
"false": self.generate_fixture_data(stat, 30, 5, 6, 0.6, 4),
|
||||
"true": self.generate_fixture_data(stat, 20, 2, 3, 0.2, 3),
|
||||
}
|
||||
stat = COUNT_STATS['messages_in_stream:is_bot:day']
|
||||
realm_data = {'false': self.generate_fixture_data(stat, 30, 5, 6, .6, 4),
|
||||
'true': self.generate_fixture_data(stat, 20, 2, 3, .2, 3)}
|
||||
insert_fixture_data(stat, realm_data, RealmCount)
|
||||
stream_data: Mapping[Optional[str], List[int]] = {
|
||||
"false": self.generate_fixture_data(stat, 10, 7, 5, 0.6, 4),
|
||||
"true": self.generate_fixture_data(stat, 5, 3, 2, 0.4, 2),
|
||||
}
|
||||
stream_data = {'false': self.generate_fixture_data(stat, 10, 7, 5, .6, 4),
|
||||
'true': self.generate_fixture_data(stat, 5, 3, 2, .4, 2)} # type: Mapping[Optional[str], List[int]]
|
||||
insert_fixture_data(stat, stream_data, StreamCount)
|
||||
FillState.objects.create(
|
||||
property=stat.property, end_time=last_end_time, state=FillState.DONE
|
||||
)
|
||||
|
||||
stat = COUNT_STATS["messages_read::hour"]
|
||||
user_data = {
|
||||
None: self.generate_fixture_data(stat, 7, 3, 2, 0.6, 8, holiday_rate=0.1),
|
||||
}
|
||||
insert_fixture_data(stat, user_data, UserCount)
|
||||
realm_data = {None: self.generate_fixture_data(stat, 50, 35, 6, 0.6, 4)}
|
||||
insert_fixture_data(stat, realm_data, RealmCount)
|
||||
FillState.objects.create(
|
||||
property=stat.property, end_time=last_end_time, state=FillState.DONE
|
||||
)
|
||||
FillState.objects.create(property=stat.property, end_time=last_end_time,
|
||||
state=FillState.DONE)
|
||||
|
153
analytics/management/commands/realm_stats.py
Normal file
153
analytics/management/commands/realm_stats.py
Normal file
@@ -0,0 +1,153 @@
|
||||
import datetime
|
||||
from argparse import ArgumentParser
|
||||
from typing import Any, List
|
||||
|
||||
import pytz
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db.models import Count
|
||||
from django.utils.timezone import now as timezone_now
|
||||
|
||||
from zerver.models import Message, Realm, Recipient, Stream, \
|
||||
Subscription, UserActivity, UserMessage, UserProfile, get_realm
|
||||
|
||||
MOBILE_CLIENT_LIST = ["Android", "ios"]
|
||||
HUMAN_CLIENT_LIST = MOBILE_CLIENT_LIST + ["website"]
|
||||
|
||||
human_messages = Message.objects.filter(sending_client__name__in=HUMAN_CLIENT_LIST)
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Generate statistics on realm activity."
|
||||
|
||||
def add_arguments(self, parser: ArgumentParser) -> None:
|
||||
parser.add_argument('realms', metavar='<realm>', type=str, nargs='*',
|
||||
help="realm to generate statistics for")
|
||||
|
||||
def active_users(self, realm: Realm) -> List[UserProfile]:
|
||||
# Has been active (on the website, for now) in the last 7 days.
|
||||
activity_cutoff = timezone_now() - datetime.timedelta(days=7)
|
||||
return [activity.user_profile for activity in (
|
||||
UserActivity.objects.filter(user_profile__realm=realm,
|
||||
user_profile__is_active=True,
|
||||
last_visit__gt=activity_cutoff,
|
||||
query="/json/users/me/pointer",
|
||||
client__name="website"))]
|
||||
|
||||
def messages_sent_by(self, user: UserProfile, days_ago: int) -> int:
|
||||
sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago)
|
||||
return human_messages.filter(sender=user, pub_date__gt=sent_time_cutoff).count()
|
||||
|
||||
def total_messages(self, realm: Realm, days_ago: int) -> int:
|
||||
sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago)
|
||||
return Message.objects.filter(sender__realm=realm, pub_date__gt=sent_time_cutoff).count()
|
||||
|
||||
def human_messages(self, realm: Realm, days_ago: int) -> int:
|
||||
sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago)
|
||||
return human_messages.filter(sender__realm=realm, pub_date__gt=sent_time_cutoff).count()
|
||||
|
||||
def api_messages(self, realm: Realm, days_ago: int) -> int:
|
||||
return (self.total_messages(realm, days_ago) - self.human_messages(realm, days_ago))
|
||||
|
||||
def stream_messages(self, realm: Realm, days_ago: int) -> int:
|
||||
sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago)
|
||||
return human_messages.filter(sender__realm=realm, pub_date__gt=sent_time_cutoff,
|
||||
recipient__type=Recipient.STREAM).count()
|
||||
|
||||
def private_messages(self, realm: Realm, days_ago: int) -> int:
|
||||
sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago)
|
||||
return human_messages.filter(sender__realm=realm, pub_date__gt=sent_time_cutoff).exclude(
|
||||
recipient__type=Recipient.STREAM).exclude(recipient__type=Recipient.HUDDLE).count()
|
||||
|
||||
def group_private_messages(self, realm: Realm, days_ago: int) -> int:
|
||||
sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago)
|
||||
return human_messages.filter(sender__realm=realm, pub_date__gt=sent_time_cutoff).exclude(
|
||||
recipient__type=Recipient.STREAM).exclude(recipient__type=Recipient.PERSONAL).count()
|
||||
|
||||
def report_percentage(self, numerator: float, denominator: float, text: str) -> None:
|
||||
if not denominator:
|
||||
fraction = 0.0
|
||||
else:
|
||||
fraction = numerator / float(denominator)
|
||||
print("%.2f%% of" % (fraction * 100,), text)
|
||||
|
||||
def handle(self, *args: Any, **options: Any) -> None:
|
||||
if options['realms']:
|
||||
try:
|
||||
realms = [get_realm(string_id) for string_id in options['realms']]
|
||||
except Realm.DoesNotExist as e:
|
||||
print(e)
|
||||
exit(1)
|
||||
else:
|
||||
realms = Realm.objects.all()
|
||||
|
||||
for realm in realms:
|
||||
print(realm.string_id)
|
||||
|
||||
user_profiles = UserProfile.objects.filter(realm=realm, is_active=True)
|
||||
active_users = self.active_users(realm)
|
||||
num_active = len(active_users)
|
||||
|
||||
print("%d active users (%d total)" % (num_active, len(user_profiles)))
|
||||
streams = Stream.objects.filter(realm=realm).extra(
|
||||
tables=['zerver_subscription', 'zerver_recipient'],
|
||||
where=['zerver_subscription.recipient_id = zerver_recipient.id',
|
||||
'zerver_recipient.type = 2',
|
||||
'zerver_recipient.type_id = zerver_stream.id',
|
||||
'zerver_subscription.active = true']).annotate(count=Count("name"))
|
||||
print("%d streams" % (streams.count(),))
|
||||
|
||||
for days_ago in (1, 7, 30):
|
||||
print("In last %d days, users sent:" % (days_ago,))
|
||||
sender_quantities = [self.messages_sent_by(user, days_ago) for user in user_profiles]
|
||||
for quantity in sorted(sender_quantities, reverse=True):
|
||||
print(quantity, end=' ')
|
||||
print("")
|
||||
|
||||
print("%d stream messages" % (self.stream_messages(realm, days_ago),))
|
||||
print("%d one-on-one private messages" % (self.private_messages(realm, days_ago),))
|
||||
print("%d messages sent via the API" % (self.api_messages(realm, days_ago),))
|
||||
print("%d group private messages" % (self.group_private_messages(realm, days_ago),))
|
||||
|
||||
num_notifications_enabled = len([x for x in active_users if x.enable_desktop_notifications])
|
||||
self.report_percentage(num_notifications_enabled, num_active,
|
||||
"active users have desktop notifications enabled")
|
||||
|
||||
num_enter_sends = len([x for x in active_users if x.enter_sends])
|
||||
self.report_percentage(num_enter_sends, num_active,
|
||||
"active users have enter-sends")
|
||||
|
||||
all_message_count = human_messages.filter(sender__realm=realm).count()
|
||||
multi_paragraph_message_count = human_messages.filter(
|
||||
sender__realm=realm, content__contains="\n\n").count()
|
||||
self.report_percentage(multi_paragraph_message_count, all_message_count,
|
||||
"all messages are multi-paragraph")
|
||||
|
||||
# Starred messages
|
||||
starrers = UserMessage.objects.filter(user_profile__in=user_profiles,
|
||||
flags=UserMessage.flags.starred).values(
|
||||
"user_profile").annotate(count=Count("user_profile"))
|
||||
print("%d users have starred %d messages" % (
|
||||
len(starrers), sum([elt["count"] for elt in starrers])))
|
||||
|
||||
active_user_subs = Subscription.objects.filter(
|
||||
user_profile__in=user_profiles, active=True)
|
||||
|
||||
# Streams not in home view
|
||||
non_home_view = active_user_subs.filter(in_home_view=False).values(
|
||||
"user_profile").annotate(count=Count("user_profile"))
|
||||
print("%d users have %d streams not in home view" % (
|
||||
len(non_home_view), sum([elt["count"] for elt in non_home_view])))
|
||||
|
||||
# Code block markup
|
||||
markup_messages = human_messages.filter(
|
||||
sender__realm=realm, content__contains="~~~").values(
|
||||
"sender").annotate(count=Count("sender"))
|
||||
print("%d users have used code block markup on %s messages" % (
|
||||
len(markup_messages), sum([elt["count"] for elt in markup_messages])))
|
||||
|
||||
# Notifications for stream messages
|
||||
notifications = active_user_subs.filter(notifications=True).values(
|
||||
"user_profile").annotate(count=Count("user_profile"))
|
||||
print("%d users receive desktop notifications for %d streams" % (
|
||||
len(notifications), sum([elt["count"] for elt in notifications])))
|
||||
|
||||
print("")
|
@@ -1,61 +1,44 @@
|
||||
from argparse import ArgumentParser
|
||||
from typing import Any
|
||||
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db.models import Q
|
||||
|
||||
from zerver.models import Message, Realm, Recipient, Stream, Subscription, get_realm
|
||||
|
||||
from zerver.models import Message, Realm, \
|
||||
Recipient, Stream, Subscription, get_realm
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Generate statistics on the streams for a realm."
|
||||
|
||||
def add_arguments(self, parser: ArgumentParser) -> None:
|
||||
parser.add_argument(
|
||||
"realms", metavar="<realm>", nargs="*", help="realm to generate statistics for"
|
||||
)
|
||||
parser.add_argument('realms', metavar='<realm>', type=str, nargs='*',
|
||||
help="realm to generate statistics for")
|
||||
|
||||
def handle(self, *args: Any, **options: str) -> None:
|
||||
if options["realms"]:
|
||||
if options['realms']:
|
||||
try:
|
||||
realms = [get_realm(string_id) for string_id in options["realms"]]
|
||||
realms = [get_realm(string_id) for string_id in options['realms']]
|
||||
except Realm.DoesNotExist as e:
|
||||
raise CommandError(e)
|
||||
print(e)
|
||||
exit(1)
|
||||
else:
|
||||
realms = Realm.objects.all()
|
||||
|
||||
for realm in realms:
|
||||
print(realm.string_id)
|
||||
print("------------")
|
||||
print("%25s %15s %10s" % ("stream", "subscribers", "messages"))
|
||||
streams = Stream.objects.filter(realm=realm).exclude(Q(name__istartswith="tutorial-"))
|
||||
# private stream count
|
||||
private_count = 0
|
||||
# public stream count
|
||||
public_count = 0
|
||||
invite_only_count = 0
|
||||
for stream in streams:
|
||||
if stream.invite_only:
|
||||
private_count += 1
|
||||
else:
|
||||
public_count += 1
|
||||
print("------------")
|
||||
print(realm.string_id, end=" ")
|
||||
print("{:>10} {} public streams and".format("(", public_count), end=" ")
|
||||
print(f"{private_count} private streams )")
|
||||
print("------------")
|
||||
print("{:>25} {:>15} {:>10} {:>12}".format("stream", "subscribers", "messages", "type"))
|
||||
|
||||
for stream in streams:
|
||||
if stream.invite_only:
|
||||
stream_type = "private"
|
||||
else:
|
||||
stream_type = "public"
|
||||
print(f"{stream.name:>25}", end=" ")
|
||||
invite_only_count += 1
|
||||
continue
|
||||
print("%25s" % (stream.name,), end=' ')
|
||||
recipient = Recipient.objects.filter(type=Recipient.STREAM, type_id=stream.id)
|
||||
print(
|
||||
"{:10}".format(
|
||||
len(Subscription.objects.filter(recipient=recipient, active=True))
|
||||
),
|
||||
end=" ",
|
||||
)
|
||||
print("%10d" % (len(Subscription.objects.filter(recipient=recipient,
|
||||
active=True)),), end=' ')
|
||||
num_messages = len(Message.objects.filter(recipient=recipient))
|
||||
print(f"{num_messages:12}", end=" ")
|
||||
print(f"{stream_type:>15}")
|
||||
print("%12d" % (num_messages,))
|
||||
print("%d invite-only streams" % (invite_only_count,))
|
||||
print("")
|
||||
|
@@ -1,50 +1,47 @@
|
||||
import os
|
||||
import time
|
||||
from argparse import ArgumentParser
|
||||
from datetime import timezone
|
||||
from typing import Any, Dict
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.utils.dateparse import parse_datetime
|
||||
from django.utils.timezone import now as timezone_now
|
||||
from django.utils.timezone import utc as timezone_utc
|
||||
|
||||
from analytics.lib.counts import COUNT_STATS, logger, process_count_stat
|
||||
from scripts.lib.zulip_tools import ENDC, WARNING
|
||||
from zerver.lib.remote_server import send_analytics_to_remote_server
|
||||
from zerver.lib.timestamp import floor_to_hour
|
||||
from zerver.models import Realm
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = """Fills Analytics tables.
|
||||
|
||||
Run as a cron job that runs every hour."""
|
||||
|
||||
def add_arguments(self, parser: ArgumentParser) -> None:
|
||||
parser.add_argument(
|
||||
"--time",
|
||||
"-t",
|
||||
help="Update stat tables from current state to "
|
||||
"--time. Defaults to the current time.",
|
||||
default=timezone_now().isoformat(),
|
||||
)
|
||||
parser.add_argument("--utc", action="store_true", help="Interpret --time in UTC.")
|
||||
parser.add_argument(
|
||||
"--stat", "-s", help="CountStat to process. If omitted, all stats are processed."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--verbose", action="store_true", help="Print timing information to stdout."
|
||||
)
|
||||
parser.add_argument('--time', '-t',
|
||||
type=str,
|
||||
help='Update stat tables from current state to'
|
||||
'--time. Defaults to the current time.',
|
||||
default=timezone_now().isoformat())
|
||||
parser.add_argument('--utc',
|
||||
action='store_true',
|
||||
help="Interpret --time in UTC.",
|
||||
default=False)
|
||||
parser.add_argument('--stat', '-s',
|
||||
type=str,
|
||||
help="CountStat to process. If omitted, all stats are processed.")
|
||||
parser.add_argument('--verbose',
|
||||
action='store_true',
|
||||
help="Print timing information to stdout.",
|
||||
default=False)
|
||||
|
||||
def handle(self, *args: Any, **options: Any) -> None:
|
||||
try:
|
||||
os.mkdir(settings.ANALYTICS_LOCK_DIR)
|
||||
except OSError:
|
||||
print(
|
||||
f"{WARNING}Analytics lock {settings.ANALYTICS_LOCK_DIR} is unavailable;"
|
||||
f" exiting.{ENDC}"
|
||||
)
|
||||
print(WARNING + "Analytics lock %s is unavailable; exiting... " + ENDC)
|
||||
return
|
||||
|
||||
try:
|
||||
@@ -59,37 +56,31 @@ class Command(BaseCommand):
|
||||
logger.info("No realms, stopping update_analytics_counts")
|
||||
return
|
||||
|
||||
fill_to_time = parse_datetime(options["time"])
|
||||
if options["utc"]:
|
||||
fill_to_time = fill_to_time.replace(tzinfo=timezone.utc)
|
||||
fill_to_time = parse_datetime(options['time'])
|
||||
if options['utc']:
|
||||
fill_to_time = fill_to_time.replace(tzinfo=timezone_utc)
|
||||
if fill_to_time.tzinfo is None:
|
||||
raise ValueError(
|
||||
"--time must be timezone aware. Maybe you meant to use the --utc option?"
|
||||
)
|
||||
raise ValueError("--time must be timezone aware. Maybe you meant to use the --utc option?")
|
||||
|
||||
fill_to_time = floor_to_hour(fill_to_time.astimezone(timezone.utc))
|
||||
fill_to_time = floor_to_hour(fill_to_time.astimezone(timezone_utc))
|
||||
|
||||
if options["stat"] is not None:
|
||||
stats = [COUNT_STATS[options["stat"]]]
|
||||
if options['stat'] is not None:
|
||||
stats = [COUNT_STATS[options['stat']]]
|
||||
else:
|
||||
stats = list(COUNT_STATS.values())
|
||||
|
||||
logger.info("Starting updating analytics counts through %s", fill_to_time)
|
||||
if options["verbose"]:
|
||||
logger.info("Starting updating analytics counts through %s" % (fill_to_time,))
|
||||
if options['verbose']:
|
||||
start = time.time()
|
||||
last = start
|
||||
|
||||
for stat in stats:
|
||||
process_count_stat(stat, fill_to_time)
|
||||
if options["verbose"]:
|
||||
print(f"Updated {stat.property} in {time.time() - last:.3f}s")
|
||||
if options['verbose']:
|
||||
print("Updated %s in %.3fs" % (stat.property, time.time() - last))
|
||||
last = time.time()
|
||||
|
||||
if options["verbose"]:
|
||||
print(
|
||||
f"Finished updating analytics counts through {fill_to_time} in {time.time() - start:.3f}s"
|
||||
)
|
||||
logger.info("Finished updating analytics counts through %s", fill_to_time)
|
||||
|
||||
if settings.PUSH_NOTIFICATION_BOUNCER_URL and settings.SUBMIT_USAGE_STATISTICS:
|
||||
send_analytics_to_remote_server()
|
||||
if options['verbose']:
|
||||
print("Finished updating analytics counts through %s in %.3fs" %
|
||||
(fill_to_time, time.time() - start))
|
||||
logger.info("Finished updating analytics counts through %s" % (fill_to_time,))
|
||||
|
42
analytics/management/commands/user_stats.py
Normal file
42
analytics/management/commands/user_stats.py
Normal file
@@ -0,0 +1,42 @@
|
||||
import datetime
|
||||
from argparse import ArgumentParser
|
||||
from typing import Any
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.utils.timezone import now as timezone_now
|
||||
|
||||
from zerver.models import Message, Realm, Stream, UserProfile, get_realm
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Generate statistics on user activity."
|
||||
|
||||
def add_arguments(self, parser: ArgumentParser) -> None:
|
||||
parser.add_argument('realms', metavar='<realm>', type=str, nargs='*',
|
||||
help="realm to generate statistics for")
|
||||
|
||||
def messages_sent_by(self, user: UserProfile, week: int) -> int:
|
||||
start = timezone_now() - datetime.timedelta(days=(week + 1)*7)
|
||||
end = timezone_now() - datetime.timedelta(days=week*7)
|
||||
return Message.objects.filter(sender=user, pub_date__gt=start, pub_date__lte=end).count()
|
||||
|
||||
def handle(self, *args: Any, **options: Any) -> None:
|
||||
if options['realms']:
|
||||
try:
|
||||
realms = [get_realm(string_id) for string_id in options['realms']]
|
||||
except Realm.DoesNotExist as e:
|
||||
print(e)
|
||||
exit(1)
|
||||
else:
|
||||
realms = Realm.objects.all()
|
||||
|
||||
for realm in realms:
|
||||
print(realm.string_id)
|
||||
user_profiles = UserProfile.objects.filter(realm=realm, is_active=True)
|
||||
print("%d users" % (len(user_profiles),))
|
||||
print("%d streams" % (len(Stream.objects.filter(realm=realm)),))
|
||||
|
||||
for user_profile in user_profiles:
|
||||
print("%35s" % (user_profile.email,), end=' ')
|
||||
for week in range(10):
|
||||
print("%5d" % (self.messages_sent_by(user_profile, week)), end=' ')
|
||||
print("")
|
@@ -1,209 +1,112 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
import zerver.lib.str_utils
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("zerver", "0030_realm_org_type"),
|
||||
('zerver', '0030_realm_org_type'),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="Anomaly",
|
||||
name='Anomaly',
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
|
||||
),
|
||||
),
|
||||
("info", models.CharField(max_length=1000)),
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('info', models.CharField(max_length=1000)),
|
||||
],
|
||||
bases=(models.Model,),
|
||||
bases=(zerver.lib.str_utils.ModelReprMixin, models.Model),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="HuddleCount",
|
||||
name='HuddleCount',
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"huddle",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="zerver.Recipient"
|
||||
),
|
||||
),
|
||||
(
|
||||
"user",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
|
||||
),
|
||||
),
|
||||
("property", models.CharField(max_length=40)),
|
||||
("end_time", models.DateTimeField()),
|
||||
("interval", models.CharField(max_length=20)),
|
||||
("value", models.BigIntegerField()),
|
||||
(
|
||||
"anomaly",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="analytics.Anomaly",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('huddle', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Recipient')),
|
||||
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
|
||||
('property', models.CharField(max_length=40)),
|
||||
('end_time', models.DateTimeField()),
|
||||
('interval', models.CharField(max_length=20)),
|
||||
('value', models.BigIntegerField()),
|
||||
('anomaly', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='analytics.Anomaly', null=True)),
|
||||
],
|
||||
bases=(models.Model,),
|
||||
bases=(zerver.lib.str_utils.ModelReprMixin, models.Model),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="InstallationCount",
|
||||
name='InstallationCount',
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
|
||||
),
|
||||
),
|
||||
("property", models.CharField(max_length=40)),
|
||||
("end_time", models.DateTimeField()),
|
||||
("interval", models.CharField(max_length=20)),
|
||||
("value", models.BigIntegerField()),
|
||||
(
|
||||
"anomaly",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="analytics.Anomaly",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('property', models.CharField(max_length=40)),
|
||||
('end_time', models.DateTimeField()),
|
||||
('interval', models.CharField(max_length=20)),
|
||||
('value', models.BigIntegerField()),
|
||||
('anomaly', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='analytics.Anomaly', null=True)),
|
||||
],
|
||||
bases=(models.Model,),
|
||||
bases=(zerver.lib.str_utils.ModelReprMixin, models.Model),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="RealmCount",
|
||||
name='RealmCount',
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"realm",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"
|
||||
),
|
||||
),
|
||||
("property", models.CharField(max_length=40)),
|
||||
("end_time", models.DateTimeField()),
|
||||
("interval", models.CharField(max_length=20)),
|
||||
("value", models.BigIntegerField()),
|
||||
(
|
||||
"anomaly",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="analytics.Anomaly",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('realm', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Realm')),
|
||||
('property', models.CharField(max_length=40)),
|
||||
('end_time', models.DateTimeField()),
|
||||
('interval', models.CharField(max_length=20)),
|
||||
('value', models.BigIntegerField()),
|
||||
('anomaly', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='analytics.Anomaly', null=True)),
|
||||
|
||||
],
|
||||
bases=(models.Model,),
|
||||
bases=(zerver.lib.str_utils.ModelReprMixin, models.Model),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="StreamCount",
|
||||
name='StreamCount',
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"realm",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"
|
||||
),
|
||||
),
|
||||
(
|
||||
"stream",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="zerver.Stream"
|
||||
),
|
||||
),
|
||||
("property", models.CharField(max_length=40)),
|
||||
("end_time", models.DateTimeField()),
|
||||
("interval", models.CharField(max_length=20)),
|
||||
("value", models.BigIntegerField()),
|
||||
(
|
||||
"anomaly",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="analytics.Anomaly",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('realm', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Realm')),
|
||||
('stream', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Stream')),
|
||||
('property', models.CharField(max_length=40)),
|
||||
('end_time', models.DateTimeField()),
|
||||
('interval', models.CharField(max_length=20)),
|
||||
('value', models.BigIntegerField()),
|
||||
('anomaly', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='analytics.Anomaly', null=True)),
|
||||
],
|
||||
bases=(models.Model,),
|
||||
bases=(zerver.lib.str_utils.ModelReprMixin, models.Model),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="UserCount",
|
||||
name='UserCount',
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"realm",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"
|
||||
),
|
||||
),
|
||||
(
|
||||
"user",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
|
||||
),
|
||||
),
|
||||
("property", models.CharField(max_length=40)),
|
||||
("end_time", models.DateTimeField()),
|
||||
("interval", models.CharField(max_length=20)),
|
||||
("value", models.BigIntegerField()),
|
||||
(
|
||||
"anomaly",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="analytics.Anomaly",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('realm', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Realm')),
|
||||
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
|
||||
('property', models.CharField(max_length=40)),
|
||||
('end_time', models.DateTimeField()),
|
||||
('interval', models.CharField(max_length=20)),
|
||||
('value', models.BigIntegerField()),
|
||||
('anomaly', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='analytics.Anomaly', null=True)),
|
||||
],
|
||||
bases=(models.Model,),
|
||||
bases=(zerver.lib.str_utils.ModelReprMixin, models.Model),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="usercount",
|
||||
unique_together={("user", "property", "end_time", "interval")},
|
||||
name='usercount',
|
||||
unique_together=set([('user', 'property', 'end_time', 'interval')]),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="streamcount",
|
||||
unique_together={("stream", "property", "end_time", "interval")},
|
||||
name='streamcount',
|
||||
unique_together=set([('stream', 'property', 'end_time', 'interval')]),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="realmcount",
|
||||
unique_together={("realm", "property", "end_time", "interval")},
|
||||
name='realmcount',
|
||||
unique_together=set([('realm', 'property', 'end_time', 'interval')]),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="installationcount",
|
||||
unique_together={("property", "end_time", "interval")},
|
||||
name='installationcount',
|
||||
unique_together=set([('property', 'end_time', 'interval')]),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="huddlecount",
|
||||
unique_together={("huddle", "property", "end_time", "interval")},
|
||||
name='huddlecount',
|
||||
unique_together=set([('huddle', 'property', 'end_time', 'interval')]),
|
||||
),
|
||||
]
|
||||
|
@@ -1,30 +1,30 @@
|
||||
from django.db import migrations
|
||||
|
||||
# -*- coding: utf-8 -*-
|
||||
from django.db import migrations, models
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("analytics", "0001_initial"),
|
||||
('analytics', '0001_initial'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterUniqueTogether(
|
||||
name="huddlecount",
|
||||
unique_together=set(),
|
||||
name='huddlecount',
|
||||
unique_together=set([]),
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="huddlecount",
|
||||
name="anomaly",
|
||||
model_name='huddlecount',
|
||||
name='anomaly',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="huddlecount",
|
||||
name="huddle",
|
||||
model_name='huddlecount',
|
||||
name='huddle',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="huddlecount",
|
||||
name="user",
|
||||
model_name='huddlecount',
|
||||
name='user',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name="HuddleCount",
|
||||
name='HuddleCount',
|
||||
),
|
||||
]
|
||||
|
@@ -1,27 +1,24 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from django.db import migrations, models
|
||||
|
||||
import zerver.lib.str_utils
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("analytics", "0002_remove_huddlecount"),
|
||||
('analytics', '0002_remove_huddlecount'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="FillState",
|
||||
name='FillState',
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
|
||||
),
|
||||
),
|
||||
("property", models.CharField(unique=True, max_length=40)),
|
||||
("end_time", models.DateTimeField()),
|
||||
("state", models.PositiveSmallIntegerField()),
|
||||
("last_modified", models.DateTimeField(auto_now=True)),
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('property', models.CharField(unique=True, max_length=40)),
|
||||
('end_time', models.DateTimeField()),
|
||||
('state', models.PositiveSmallIntegerField()),
|
||||
('last_modified', models.DateTimeField(auto_now=True)),
|
||||
],
|
||||
bases=(models.Model,),
|
||||
bases=(zerver.lib.str_utils.ModelReprMixin, models.Model),
|
||||
),
|
||||
]
|
||||
|
@@ -1,31 +1,31 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("analytics", "0003_fillstate"),
|
||||
('analytics', '0003_fillstate'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="installationcount",
|
||||
name="subgroup",
|
||||
model_name='installationcount',
|
||||
name='subgroup',
|
||||
field=models.CharField(max_length=16, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="realmcount",
|
||||
name="subgroup",
|
||||
model_name='realmcount',
|
||||
name='subgroup',
|
||||
field=models.CharField(max_length=16, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="streamcount",
|
||||
name="subgroup",
|
||||
model_name='streamcount',
|
||||
name='subgroup',
|
||||
field=models.CharField(max_length=16, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="usercount",
|
||||
name="subgroup",
|
||||
model_name='usercount',
|
||||
name='subgroup',
|
||||
field=models.CharField(max_length=16, null=True),
|
||||
),
|
||||
]
|
||||
|
@@ -1,51 +1,51 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("analytics", "0004_add_subgroup"),
|
||||
('analytics', '0004_add_subgroup'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="installationcount",
|
||||
name="interval",
|
||||
model_name='installationcount',
|
||||
name='interval',
|
||||
field=models.CharField(max_length=8),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="installationcount",
|
||||
name="property",
|
||||
model_name='installationcount',
|
||||
name='property',
|
||||
field=models.CharField(max_length=32),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="realmcount",
|
||||
name="interval",
|
||||
model_name='realmcount',
|
||||
name='interval',
|
||||
field=models.CharField(max_length=8),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="realmcount",
|
||||
name="property",
|
||||
model_name='realmcount',
|
||||
name='property',
|
||||
field=models.CharField(max_length=32),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="streamcount",
|
||||
name="interval",
|
||||
model_name='streamcount',
|
||||
name='interval',
|
||||
field=models.CharField(max_length=8),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="streamcount",
|
||||
name="property",
|
||||
model_name='streamcount',
|
||||
name='property',
|
||||
field=models.CharField(max_length=32),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="usercount",
|
||||
name="interval",
|
||||
model_name='usercount',
|
||||
name='interval',
|
||||
field=models.CharField(max_length=8),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="usercount",
|
||||
name="property",
|
||||
model_name='usercount',
|
||||
name='property',
|
||||
field=models.CharField(max_length=32),
|
||||
),
|
||||
]
|
||||
|
@@ -1,27 +1,27 @@
|
||||
from django.db import migrations
|
||||
|
||||
# -*- coding: utf-8 -*-
|
||||
from django.db import migrations, models
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("analytics", "0005_alter_field_size"),
|
||||
('analytics', '0005_alter_field_size'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterUniqueTogether(
|
||||
name="installationcount",
|
||||
unique_together={("property", "subgroup", "end_time", "interval")},
|
||||
name='installationcount',
|
||||
unique_together=set([('property', 'subgroup', 'end_time', 'interval')]),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="realmcount",
|
||||
unique_together={("realm", "property", "subgroup", "end_time", "interval")},
|
||||
name='realmcount',
|
||||
unique_together=set([('realm', 'property', 'subgroup', 'end_time', 'interval')]),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="streamcount",
|
||||
unique_together={("stream", "property", "subgroup", "end_time", "interval")},
|
||||
name='streamcount',
|
||||
unique_together=set([('stream', 'property', 'subgroup', 'end_time', 'interval')]),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="usercount",
|
||||
unique_together={("user", "property", "subgroup", "end_time", "interval")},
|
||||
name='usercount',
|
||||
unique_together=set([('user', 'property', 'subgroup', 'end_time', 'interval')]),
|
||||
),
|
||||
]
|
||||
|
@@ -1,44 +1,45 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.10.4 on 2017-01-16 20:50
|
||||
from django.conf import settings
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("analytics", "0006_add_subgroup_to_unique_constraints"),
|
||||
('analytics', '0006_add_subgroup_to_unique_constraints'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterUniqueTogether(
|
||||
name="installationcount",
|
||||
unique_together={("property", "subgroup", "end_time")},
|
||||
name='installationcount',
|
||||
unique_together=set([('property', 'subgroup', 'end_time')]),
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="installationcount",
|
||||
name="interval",
|
||||
model_name='installationcount',
|
||||
name='interval',
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="realmcount",
|
||||
unique_together={("realm", "property", "subgroup", "end_time")},
|
||||
name='realmcount',
|
||||
unique_together=set([('realm', 'property', 'subgroup', 'end_time')]),
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="realmcount",
|
||||
name="interval",
|
||||
model_name='realmcount',
|
||||
name='interval',
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="streamcount",
|
||||
unique_together={("stream", "property", "subgroup", "end_time")},
|
||||
name='streamcount',
|
||||
unique_together=set([('stream', 'property', 'subgroup', 'end_time')]),
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="streamcount",
|
||||
name="interval",
|
||||
model_name='streamcount',
|
||||
name='interval',
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="usercount",
|
||||
unique_together={("user", "property", "subgroup", "end_time")},
|
||||
name='usercount',
|
||||
unique_together=set([('user', 'property', 'subgroup', 'end_time')]),
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="usercount",
|
||||
name="interval",
|
||||
model_name='usercount',
|
||||
name='interval',
|
||||
),
|
||||
]
|
||||
|
@@ -1,25 +1,25 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.10.5 on 2017-02-01 22:28
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("zerver", "0050_userprofile_avatar_version"),
|
||||
("analytics", "0007_remove_interval"),
|
||||
('zerver', '0050_userprofile_avatar_version'),
|
||||
('analytics', '0007_remove_interval'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterIndexTogether(
|
||||
name="realmcount",
|
||||
index_together={("property", "end_time")},
|
||||
name='realmcount',
|
||||
index_together=set([('property', 'end_time')]),
|
||||
),
|
||||
migrations.AlterIndexTogether(
|
||||
name="streamcount",
|
||||
index_together={("property", "realm", "end_time")},
|
||||
name='streamcount',
|
||||
index_together=set([('property', 'realm', 'end_time')]),
|
||||
),
|
||||
migrations.AlterIndexTogether(
|
||||
name="usercount",
|
||||
index_together={("property", "realm", "end_time")},
|
||||
name='usercount',
|
||||
index_together=set([('property', 'realm', 'end_time')]),
|
||||
),
|
||||
]
|
||||
|
@@ -1,29 +1,26 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from django.db import migrations
|
||||
from django.db.backends.postgresql.schema import DatabaseSchemaEditor
|
||||
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
|
||||
from django.db.migrations.state import StateApps
|
||||
|
||||
def delete_messages_sent_to_stream_stat(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
|
||||
UserCount = apps.get_model('analytics', 'UserCount')
|
||||
StreamCount = apps.get_model('analytics', 'StreamCount')
|
||||
RealmCount = apps.get_model('analytics', 'RealmCount')
|
||||
InstallationCount = apps.get_model('analytics', 'InstallationCount')
|
||||
FillState = apps.get_model('analytics', 'FillState')
|
||||
|
||||
def delete_messages_sent_to_stream_stat(
|
||||
apps: StateApps, schema_editor: DatabaseSchemaEditor
|
||||
) -> None:
|
||||
UserCount = apps.get_model("analytics", "UserCount")
|
||||
StreamCount = apps.get_model("analytics", "StreamCount")
|
||||
RealmCount = apps.get_model("analytics", "RealmCount")
|
||||
InstallationCount = apps.get_model("analytics", "InstallationCount")
|
||||
FillState = apps.get_model("analytics", "FillState")
|
||||
|
||||
property = "messages_sent_to_stream:is_bot"
|
||||
property = 'messages_sent_to_stream:is_bot'
|
||||
UserCount.objects.filter(property=property).delete()
|
||||
StreamCount.objects.filter(property=property).delete()
|
||||
RealmCount.objects.filter(property=property).delete()
|
||||
InstallationCount.objects.filter(property=property).delete()
|
||||
FillState.objects.filter(property=property).delete()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("analytics", "0008_add_count_indexes"),
|
||||
('analytics', '0008_add_count_indexes'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
|
@@ -1,28 +1,25 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from django.db import migrations
|
||||
from django.db.backends.postgresql.schema import DatabaseSchemaEditor
|
||||
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
|
||||
from django.db.migrations.state import StateApps
|
||||
|
||||
def clear_message_sent_by_message_type_values(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
|
||||
UserCount = apps.get_model('analytics', 'UserCount')
|
||||
StreamCount = apps.get_model('analytics', 'StreamCount')
|
||||
RealmCount = apps.get_model('analytics', 'RealmCount')
|
||||
InstallationCount = apps.get_model('analytics', 'InstallationCount')
|
||||
FillState = apps.get_model('analytics', 'FillState')
|
||||
|
||||
def clear_message_sent_by_message_type_values(
|
||||
apps: StateApps, schema_editor: DatabaseSchemaEditor
|
||||
) -> None:
|
||||
UserCount = apps.get_model("analytics", "UserCount")
|
||||
StreamCount = apps.get_model("analytics", "StreamCount")
|
||||
RealmCount = apps.get_model("analytics", "RealmCount")
|
||||
InstallationCount = apps.get_model("analytics", "InstallationCount")
|
||||
FillState = apps.get_model("analytics", "FillState")
|
||||
|
||||
property = "messages_sent:message_type:day"
|
||||
property = 'messages_sent:message_type:day'
|
||||
UserCount.objects.filter(property=property).delete()
|
||||
StreamCount.objects.filter(property=property).delete()
|
||||
RealmCount.objects.filter(property=property).delete()
|
||||
InstallationCount.objects.filter(property=property).delete()
|
||||
FillState.objects.filter(property=property).delete()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [("analytics", "0009_remove_messages_to_stream_stat")]
|
||||
dependencies = [('analytics', '0009_remove_messages_to_stream_stat')]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(clear_message_sent_by_message_type_values),
|
||||
|
@@ -1,14 +1,14 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from django.db import migrations
|
||||
from django.db.backends.postgresql.schema import DatabaseSchemaEditor
|
||||
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
|
||||
from django.db.migrations.state import StateApps
|
||||
|
||||
|
||||
def clear_analytics_tables(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
|
||||
UserCount = apps.get_model("analytics", "UserCount")
|
||||
StreamCount = apps.get_model("analytics", "StreamCount")
|
||||
RealmCount = apps.get_model("analytics", "RealmCount")
|
||||
InstallationCount = apps.get_model("analytics", "InstallationCount")
|
||||
FillState = apps.get_model("analytics", "FillState")
|
||||
UserCount = apps.get_model('analytics', 'UserCount')
|
||||
StreamCount = apps.get_model('analytics', 'StreamCount')
|
||||
RealmCount = apps.get_model('analytics', 'RealmCount')
|
||||
InstallationCount = apps.get_model('analytics', 'InstallationCount')
|
||||
FillState = apps.get_model('analytics', 'FillState')
|
||||
|
||||
UserCount.objects.all().delete()
|
||||
StreamCount.objects.all().delete()
|
||||
@@ -16,11 +16,10 @@ def clear_analytics_tables(apps: StateApps, schema_editor: DatabaseSchemaEditor)
|
||||
InstallationCount.objects.all().delete()
|
||||
FillState.objects.all().delete()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("analytics", "0010_clear_messages_sent_values"),
|
||||
('analytics', '0010_clear_messages_sent_values'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
|
@@ -1,42 +1,36 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11.6 on 2018-01-29 08:14
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("analytics", "0011_clear_analytics_tables"),
|
||||
('analytics', '0011_clear_analytics_tables'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="installationcount",
|
||||
name="anomaly",
|
||||
field=models.ForeignKey(
|
||||
null=True, on_delete=django.db.models.deletion.SET_NULL, to="analytics.Anomaly"
|
||||
),
|
||||
model_name='installationcount',
|
||||
name='anomaly',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='analytics.Anomaly'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="realmcount",
|
||||
name="anomaly",
|
||||
field=models.ForeignKey(
|
||||
null=True, on_delete=django.db.models.deletion.SET_NULL, to="analytics.Anomaly"
|
||||
),
|
||||
model_name='realmcount',
|
||||
name='anomaly',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='analytics.Anomaly'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="streamcount",
|
||||
name="anomaly",
|
||||
field=models.ForeignKey(
|
||||
null=True, on_delete=django.db.models.deletion.SET_NULL, to="analytics.Anomaly"
|
||||
),
|
||||
model_name='streamcount',
|
||||
name='anomaly',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='analytics.Anomaly'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="usercount",
|
||||
name="anomaly",
|
||||
field=models.ForeignKey(
|
||||
null=True, on_delete=django.db.models.deletion.SET_NULL, to="analytics.Anomaly"
|
||||
),
|
||||
model_name='usercount',
|
||||
name='anomaly',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='analytics.Anomaly'),
|
||||
),
|
||||
]
|
||||
|
@@ -1,32 +0,0 @@
|
||||
# Generated by Django 1.11.18 on 2019-02-02 02:47
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("analytics", "0012_add_on_delete"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name="installationcount",
|
||||
name="anomaly",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="realmcount",
|
||||
name="anomaly",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="streamcount",
|
||||
name="anomaly",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="usercount",
|
||||
name="anomaly",
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name="Anomaly",
|
||||
),
|
||||
]
|
@@ -1,17 +0,0 @@
|
||||
# Generated by Django 1.11.26 on 2020-01-27 04:32
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("analytics", "0013_remove_anomaly"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name="fillstate",
|
||||
name="last_modified",
|
||||
),
|
||||
]
|
@@ -1,65 +0,0 @@
|
||||
from django.db import migrations
|
||||
from django.db.backends.postgresql.schema import DatabaseSchemaEditor
|
||||
from django.db.migrations.state import StateApps
|
||||
from django.db.models import Count, Sum
|
||||
|
||||
|
||||
def clear_duplicate_counts(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
|
||||
"""This is a preparatory migration for our Analytics tables.
|
||||
|
||||
The backstory is that Django's unique_together indexes do not properly
|
||||
handle the subgroup=None corner case (allowing duplicate rows that have a
|
||||
subgroup of None), which meant that in race conditions, rather than updating
|
||||
an existing row for the property/(realm, stream, user)/time with subgroup=None, Django would
|
||||
create a duplicate row.
|
||||
|
||||
In the next migration, we'll add a proper constraint to fix this bug, but
|
||||
we need to fix any existing problematic rows before we can add that constraint.
|
||||
|
||||
We fix this in an appropriate fashion for each type of CountStat object; mainly
|
||||
this means deleting the extra rows, but for LoggingCountStat objects, we need to
|
||||
additionally combine the sums.
|
||||
"""
|
||||
count_tables = dict(
|
||||
realm=apps.get_model("analytics", "RealmCount"),
|
||||
user=apps.get_model("analytics", "UserCount"),
|
||||
stream=apps.get_model("analytics", "StreamCount"),
|
||||
installation=apps.get_model("analytics", "InstallationCount"),
|
||||
)
|
||||
|
||||
for name, count_table in count_tables.items():
|
||||
value = [name, "property", "end_time"]
|
||||
if name == "installation":
|
||||
value = ["property", "end_time"]
|
||||
counts = (
|
||||
count_table.objects.filter(subgroup=None)
|
||||
.values(*value)
|
||||
.annotate(Count("id"), Sum("value"))
|
||||
.filter(id__count__gt=1)
|
||||
)
|
||||
|
||||
for count in counts:
|
||||
count.pop("id__count")
|
||||
total_value = count.pop("value__sum")
|
||||
duplicate_counts = list(count_table.objects.filter(**count))
|
||||
first_count = duplicate_counts[0]
|
||||
if count["property"] in ["invites_sent::day", "active_users_log:is_bot:day"]:
|
||||
# For LoggingCountStat objects, the right fix is to combine the totals;
|
||||
# for other CountStat objects, we expect the duplicates to have the same value.
|
||||
# And so all we need to do is delete them.
|
||||
first_count.value = total_value
|
||||
first_count.save()
|
||||
to_cleanup = duplicate_counts[1:]
|
||||
for duplicate_count in to_cleanup:
|
||||
duplicate_count.delete()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("analytics", "0014_remove_fillstate_last_modified"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(clear_duplicate_counts, reverse_code=migrations.RunPython.noop),
|
||||
]
|
@@ -1,93 +0,0 @@
|
||||
# Generated by Django 2.2.10 on 2020-02-29 19:40
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("analytics", "0015_clear_duplicate_counts"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterUniqueTogether(
|
||||
name="installationcount",
|
||||
unique_together=set(),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="realmcount",
|
||||
unique_together=set(),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="streamcount",
|
||||
unique_together=set(),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="usercount",
|
||||
unique_together=set(),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="installationcount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=False),
|
||||
fields=("property", "subgroup", "end_time"),
|
||||
name="unique_installation_count",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="installationcount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=True),
|
||||
fields=("property", "end_time"),
|
||||
name="unique_installation_count_null_subgroup",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="realmcount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=False),
|
||||
fields=("realm", "property", "subgroup", "end_time"),
|
||||
name="unique_realm_count",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="realmcount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=True),
|
||||
fields=("realm", "property", "end_time"),
|
||||
name="unique_realm_count_null_subgroup",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="streamcount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=False),
|
||||
fields=("stream", "property", "subgroup", "end_time"),
|
||||
name="unique_stream_count",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="streamcount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=True),
|
||||
fields=("stream", "property", "end_time"),
|
||||
name="unique_stream_count_null_subgroup",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="usercount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=False),
|
||||
fields=("user", "property", "subgroup", "end_time"),
|
||||
name="unique_user_count",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="usercount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=True),
|
||||
fields=("user", "property", "end_time"),
|
||||
name="unique_user_count_null_subgroup",
|
||||
),
|
||||
),
|
||||
]
|
@@ -1,140 +1,100 @@
|
||||
import datetime
|
||||
from typing import Optional
|
||||
from typing import Any, Dict, Optional, Tuple, Union
|
||||
|
||||
from django.db import models
|
||||
from django.db.models import Q, UniqueConstraint
|
||||
|
||||
from zerver.lib.timestamp import floor_to_day
|
||||
from zerver.models import Realm, Stream, UserProfile
|
||||
|
||||
from zerver.models import Realm, Recipient, Stream, UserProfile
|
||||
|
||||
class FillState(models.Model):
|
||||
property: str = models.CharField(max_length=40, unique=True)
|
||||
end_time: datetime.datetime = models.DateTimeField()
|
||||
property = models.CharField(max_length=40, unique=True) # type: str
|
||||
end_time = models.DateTimeField() # type: datetime.datetime
|
||||
|
||||
# Valid states are {DONE, STARTED}
|
||||
DONE = 1
|
||||
STARTED = 2
|
||||
state: int = models.PositiveSmallIntegerField()
|
||||
state = models.PositiveSmallIntegerField() # type: int
|
||||
|
||||
last_modified = models.DateTimeField(auto_now=True) # type: datetime.datetime
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"<FillState: {self.property} {self.end_time} {self.state}>"
|
||||
|
||||
return "<FillState: %s %s %s>" % (self.property, self.end_time, self.state)
|
||||
|
||||
# The earliest/starting end_time in FillState
|
||||
# We assume there is at least one realm
|
||||
def installation_epoch() -> datetime.datetime:
|
||||
earliest_realm_creation = Realm.objects.aggregate(models.Min("date_created"))[
|
||||
"date_created__min"
|
||||
]
|
||||
earliest_realm_creation = Realm.objects.aggregate(models.Min('date_created'))['date_created__min']
|
||||
return floor_to_day(earliest_realm_creation)
|
||||
|
||||
def last_successful_fill(property: str) -> Optional[datetime.datetime]:
|
||||
fillstate = FillState.objects.filter(property=property).first()
|
||||
if fillstate is None:
|
||||
return None
|
||||
if fillstate.state == FillState.DONE:
|
||||
return fillstate.end_time
|
||||
return fillstate.end_time - datetime.timedelta(hours=1)
|
||||
|
||||
# would only ever make entries here by hand
|
||||
class Anomaly(models.Model):
|
||||
info = models.CharField(max_length=1000) # type: str
|
||||
|
||||
def __str__(self) -> str:
|
||||
return "<Anomaly: %s... %s>" % (self.info, self.id)
|
||||
|
||||
class BaseCount(models.Model):
|
||||
# Note: When inheriting from BaseCount, you may want to rearrange
|
||||
# the order of the columns in the migration to make sure they
|
||||
# match how you'd like the table to be arranged.
|
||||
property: str = models.CharField(max_length=32)
|
||||
subgroup: Optional[str] = models.CharField(max_length=16, null=True)
|
||||
end_time: datetime.datetime = models.DateTimeField()
|
||||
value: int = models.BigIntegerField()
|
||||
property = models.CharField(max_length=32) # type: str
|
||||
subgroup = models.CharField(max_length=16, null=True) # type: Optional[str]
|
||||
end_time = models.DateTimeField() # type: datetime.datetime
|
||||
value = models.BigIntegerField() # type: int
|
||||
anomaly = models.ForeignKey(Anomaly, on_delete=models.SET_NULL, null=True) # type: Optional[Anomaly]
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
|
||||
class InstallationCount(BaseCount):
|
||||
|
||||
class Meta:
|
||||
# Handles invalid duplicate InstallationCount data
|
||||
constraints = [
|
||||
UniqueConstraint(
|
||||
fields=["property", "subgroup", "end_time"],
|
||||
condition=Q(subgroup__isnull=False),
|
||||
name="unique_installation_count",
|
||||
),
|
||||
UniqueConstraint(
|
||||
fields=["property", "end_time"],
|
||||
condition=Q(subgroup__isnull=True),
|
||||
name="unique_installation_count_null_subgroup",
|
||||
),
|
||||
]
|
||||
unique_together = ("property", "subgroup", "end_time")
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"<InstallationCount: {self.property} {self.subgroup} {self.value}>"
|
||||
|
||||
return "<InstallationCount: %s %s %s>" % (self.property, self.subgroup, self.value)
|
||||
|
||||
class RealmCount(BaseCount):
|
||||
realm = models.ForeignKey(Realm, on_delete=models.CASCADE)
|
||||
|
||||
class Meta:
|
||||
# Handles invalid duplicate RealmCount data
|
||||
constraints = [
|
||||
UniqueConstraint(
|
||||
fields=["realm", "property", "subgroup", "end_time"],
|
||||
condition=Q(subgroup__isnull=False),
|
||||
name="unique_realm_count",
|
||||
),
|
||||
UniqueConstraint(
|
||||
fields=["realm", "property", "end_time"],
|
||||
condition=Q(subgroup__isnull=True),
|
||||
name="unique_realm_count_null_subgroup",
|
||||
),
|
||||
]
|
||||
unique_together = ("realm", "property", "subgroup", "end_time")
|
||||
index_together = ["property", "end_time"]
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"<RealmCount: {self.realm} {self.property} {self.subgroup} {self.value}>"
|
||||
|
||||
return "<RealmCount: %s %s %s %s>" % (self.realm, self.property, self.subgroup, self.value)
|
||||
|
||||
class UserCount(BaseCount):
|
||||
user = models.ForeignKey(UserProfile, on_delete=models.CASCADE)
|
||||
realm = models.ForeignKey(Realm, on_delete=models.CASCADE)
|
||||
|
||||
class Meta:
|
||||
# Handles invalid duplicate UserCount data
|
||||
constraints = [
|
||||
UniqueConstraint(
|
||||
fields=["user", "property", "subgroup", "end_time"],
|
||||
condition=Q(subgroup__isnull=False),
|
||||
name="unique_user_count",
|
||||
),
|
||||
UniqueConstraint(
|
||||
fields=["user", "property", "end_time"],
|
||||
condition=Q(subgroup__isnull=True),
|
||||
name="unique_user_count_null_subgroup",
|
||||
),
|
||||
]
|
||||
unique_together = ("user", "property", "subgroup", "end_time")
|
||||
# This index dramatically improves the performance of
|
||||
# aggregating from users to realms
|
||||
index_together = ["property", "realm", "end_time"]
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"<UserCount: {self.user} {self.property} {self.subgroup} {self.value}>"
|
||||
|
||||
return "<UserCount: %s %s %s %s>" % (self.user, self.property, self.subgroup, self.value)
|
||||
|
||||
class StreamCount(BaseCount):
|
||||
stream = models.ForeignKey(Stream, on_delete=models.CASCADE)
|
||||
realm = models.ForeignKey(Realm, on_delete=models.CASCADE)
|
||||
|
||||
class Meta:
|
||||
# Handles invalid duplicate StreamCount data
|
||||
constraints = [
|
||||
UniqueConstraint(
|
||||
fields=["stream", "property", "subgroup", "end_time"],
|
||||
condition=Q(subgroup__isnull=False),
|
||||
name="unique_stream_count",
|
||||
),
|
||||
UniqueConstraint(
|
||||
fields=["stream", "property", "end_time"],
|
||||
condition=Q(subgroup__isnull=True),
|
||||
name="unique_stream_count_null_subgroup",
|
||||
),
|
||||
]
|
||||
unique_together = ("stream", "property", "subgroup", "end_time")
|
||||
# This index dramatically improves the performance of
|
||||
# aggregating from streams to realms
|
||||
index_together = ["property", "realm", "end_time"]
|
||||
|
||||
def __str__(self) -> str:
|
||||
return (
|
||||
f"<StreamCount: {self.stream} {self.property} {self.subgroup} {self.value} {self.id}>"
|
||||
)
|
||||
return "<StreamCount: %s %s %s %s %s>" % (
|
||||
self.stream, self.property, self.subgroup, self.value, self.id)
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -2,39 +2,28 @@ from analytics.lib.counts import CountStat
|
||||
from analytics.lib.fixtures import generate_time_series_data
|
||||
from zerver.lib.test_classes import ZulipTestCase
|
||||
|
||||
|
||||
# A very light test suite; the code being tested is not run in production.
|
||||
class TestFixtures(ZulipTestCase):
|
||||
def test_deterministic_settings(self) -> None:
|
||||
# test basic business_hour / non_business_hour calculation
|
||||
# test we get an array of the right length with frequency=CountStat.DAY
|
||||
data = generate_time_series_data(
|
||||
days=7, business_hours_base=20, non_business_hours_base=15, spikiness=0
|
||||
)
|
||||
days=7, business_hours_base=20, non_business_hours_base=15, spikiness=0)
|
||||
self.assertEqual(data, [400, 400, 400, 400, 400, 360, 360])
|
||||
|
||||
data = generate_time_series_data(
|
||||
days=1,
|
||||
business_hours_base=2000,
|
||||
non_business_hours_base=1500,
|
||||
growth=2,
|
||||
spikiness=0,
|
||||
frequency=CountStat.HOUR,
|
||||
)
|
||||
days=1, business_hours_base=2000, non_business_hours_base=1500,
|
||||
growth=2, spikiness=0, frequency=CountStat.HOUR)
|
||||
# test we get an array of the right length with frequency=CountStat.HOUR
|
||||
self.assertEqual(len(data), 24)
|
||||
# test that growth doesn't affect the first data point
|
||||
self.assertEqual(data[0], 2000)
|
||||
# test that the last data point is growth times what it otherwise would be
|
||||
self.assertEqual(data[-1], 1500 * 2)
|
||||
self.assertEqual(data[-1], 1500*2)
|
||||
|
||||
# test autocorrelation == 1, since that's the easiest value to test
|
||||
data = generate_time_series_data(
|
||||
days=1,
|
||||
business_hours_base=2000,
|
||||
non_business_hours_base=2000,
|
||||
autocorrelation=1,
|
||||
frequency=CountStat.HOUR,
|
||||
)
|
||||
days=1, business_hours_base=2000, non_business_hours_base=2000,
|
||||
autocorrelation=1, frequency=CountStat.HOUR)
|
||||
self.assertEqual(data[0], data[1])
|
||||
self.assertEqual(data[0], data[-1])
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -1,38 +1,22 @@
|
||||
from django.conf.urls import include
|
||||
from django.urls import path
|
||||
from django.conf.urls import include, url
|
||||
|
||||
from analytics.views import (
|
||||
get_activity,
|
||||
get_chart_data,
|
||||
get_chart_data_for_installation,
|
||||
get_chart_data_for_realm,
|
||||
get_chart_data_for_remote_installation,
|
||||
get_chart_data_for_remote_realm,
|
||||
get_realm_activity,
|
||||
get_user_activity,
|
||||
stats,
|
||||
stats_for_installation,
|
||||
stats_for_realm,
|
||||
stats_for_remote_installation,
|
||||
stats_for_remote_realm,
|
||||
support,
|
||||
)
|
||||
from zerver.lib.rest import rest_path
|
||||
import analytics.views
|
||||
from zerver.lib.rest import rest_dispatch
|
||||
|
||||
i18n_urlpatterns = [
|
||||
# Server admin (user_profile.is_staff) visible stats pages
|
||||
path("activity", get_activity),
|
||||
path("activity/support", support, name="support"),
|
||||
path("realm_activity/<realm_str>/", get_realm_activity),
|
||||
path("user_activity/<email>/", get_user_activity),
|
||||
path("stats/realm/<realm_str>/", stats_for_realm),
|
||||
path("stats/installation", stats_for_installation),
|
||||
path("stats/remote/<int:remote_server_id>/installation", stats_for_remote_installation),
|
||||
path(
|
||||
"stats/remote/<int:remote_server_id>/realm/<int:remote_realm_id>/", stats_for_remote_realm
|
||||
),
|
||||
url(r'^activity$', analytics.views.get_activity,
|
||||
name='analytics.views.get_activity'),
|
||||
url(r'^realm_activity/(?P<realm_str>[\S]+)/$', analytics.views.get_realm_activity,
|
||||
name='analytics.views.get_realm_activity'),
|
||||
url(r'^user_activity/(?P<email>[\S]+)/$', analytics.views.get_user_activity,
|
||||
name='analytics.views.get_user_activity'),
|
||||
url(r'^stats/realm/(?P<realm_str>[\S]+)/$', analytics.views.stats_for_realm,
|
||||
name='analytics.views.stats_for_realm'),
|
||||
|
||||
# User-visible stats page
|
||||
path("stats", stats, name="stats"),
|
||||
url(r'^stats$', analytics.views.stats,
|
||||
name='analytics.views.stats'),
|
||||
]
|
||||
|
||||
# These endpoints are a part of the API (V1), which uses:
|
||||
@@ -45,22 +29,15 @@ i18n_urlpatterns = [
|
||||
# All of these paths are accessed by either a /json or /api prefix
|
||||
v1_api_and_json_patterns = [
|
||||
# get data for the graphs at /stats
|
||||
rest_path("analytics/chart_data", GET=get_chart_data),
|
||||
rest_path("analytics/chart_data/realm/<realm_str>", GET=get_chart_data_for_realm),
|
||||
rest_path("analytics/chart_data/installation", GET=get_chart_data_for_installation),
|
||||
rest_path(
|
||||
"analytics/chart_data/remote/<int:remote_server_id>/installation",
|
||||
GET=get_chart_data_for_remote_installation,
|
||||
),
|
||||
rest_path(
|
||||
"analytics/chart_data/remote/<int:remote_server_id>/realm/<int:remote_realm_id>",
|
||||
GET=get_chart_data_for_remote_realm,
|
||||
),
|
||||
url(r'^analytics/chart_data$', rest_dispatch,
|
||||
{'GET': 'analytics.views.get_chart_data'}),
|
||||
url(r'^analytics/chart_data/realm/(?P<realm_str>[\S]+)$', rest_dispatch,
|
||||
{'GET': 'analytics.views.get_chart_data_for_realm'}),
|
||||
]
|
||||
|
||||
i18n_urlpatterns += [
|
||||
path("api/v1/", include(v1_api_and_json_patterns)),
|
||||
path("json/", include(v1_api_and_json_patterns)),
|
||||
url(r'^api/v1/', include(v1_api_and_json_patterns)),
|
||||
url(r'^json/', include(v1_api_and_json_patterns)),
|
||||
]
|
||||
|
||||
urlpatterns = i18n_urlpatterns
|
||||
|
1549
analytics/views.py
1549
analytics/views.py
File diff suppressed because it is too large
Load Diff
@@ -1,26 +0,0 @@
|
||||
"use strict";
|
||||
|
||||
module.exports = {
|
||||
plugins: [
|
||||
[
|
||||
"formatjs",
|
||||
{
|
||||
additionalFunctionNames: ["$t", "$t_html"],
|
||||
overrideIdFn: (id, defaultMessage) => defaultMessage,
|
||||
},
|
||||
],
|
||||
],
|
||||
presets: [
|
||||
[
|
||||
"@babel/preset-env",
|
||||
{
|
||||
corejs: "3.6",
|
||||
loose: true, // Loose mode for…of loops are 5× faster in Firefox
|
||||
shippedProposals: true,
|
||||
useBuiltIns: "usage",
|
||||
},
|
||||
],
|
||||
"@babel/typescript",
|
||||
],
|
||||
sourceType: "unambiguous",
|
||||
};
|
@@ -1,3 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright: (c) 2008, Jarek Zgoda <jarek.zgoda@gmail.com>
|
||||
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a
|
||||
@@ -19,4 +21,4 @@
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
# IN THE SOFTWARE.
|
||||
|
||||
VERSION = (0, 9, "pre")
|
||||
VERSION = (0, 9, 'pre')
|
||||
|
@@ -1,39 +1,27 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from django.db import models, migrations
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("contenttypes", "0001_initial"),
|
||||
('contenttypes', '0001_initial'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="Confirmation",
|
||||
name='Confirmation',
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
|
||||
),
|
||||
),
|
||||
("object_id", models.PositiveIntegerField()),
|
||||
("date_sent", models.DateTimeField(verbose_name="sent")),
|
||||
(
|
||||
"confirmation_key",
|
||||
models.CharField(max_length=40, verbose_name="activation key"),
|
||||
),
|
||||
(
|
||||
"content_type",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="contenttypes.ContentType"
|
||||
),
|
||||
),
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('object_id', models.PositiveIntegerField()),
|
||||
('date_sent', models.DateTimeField(verbose_name='sent')),
|
||||
('confirmation_key', models.CharField(max_length=40, verbose_name='activation key')),
|
||||
('content_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "confirmation email",
|
||||
"verbose_name_plural": "confirmation emails",
|
||||
'verbose_name': 'confirmation email',
|
||||
'verbose_name_plural': 'confirmation emails',
|
||||
},
|
||||
bases=(models.Model,),
|
||||
),
|
||||
|
@@ -1,28 +1,21 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from django.db import models, migrations
|
||||
import django.utils.timezone
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("confirmation", "0001_initial"),
|
||||
('confirmation', '0001_initial'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="RealmCreationKey",
|
||||
name='RealmCreationKey',
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
|
||||
),
|
||||
),
|
||||
("creation_key", models.CharField(max_length=40, verbose_name="activation key")),
|
||||
(
|
||||
"date_created",
|
||||
models.DateTimeField(default=django.utils.timezone.now, verbose_name="created"),
|
||||
),
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('creation_key', models.CharField(max_length=40, verbose_name='activation key')),
|
||||
('date_created', models.DateTimeField(default=django.utils.timezone.now, verbose_name='created')),
|
||||
],
|
||||
),
|
||||
]
|
||||
|
@@ -1,3 +1,4 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.10.4 on 2017-01-17 09:16
|
||||
from django.db import migrations
|
||||
|
||||
@@ -5,16 +6,17 @@ from django.db import migrations
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("confirmation", "0002_realmcreationkey"),
|
||||
('confirmation', '0002_realmcreationkey'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="EmailChangeConfirmation",
|
||||
fields=[],
|
||||
name='EmailChangeConfirmation',
|
||||
fields=[
|
||||
],
|
||||
options={
|
||||
"proxy": True,
|
||||
'proxy': True,
|
||||
},
|
||||
bases=("confirmation.confirmation",),
|
||||
bases=('confirmation.confirmation',),
|
||||
),
|
||||
]
|
||||
|
@@ -1,3 +1,4 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11.2 on 2017-07-08 04:23
|
||||
from django.db import migrations, models
|
||||
|
||||
@@ -5,31 +6,31 @@ from django.db import migrations, models
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("confirmation", "0003_emailchangeconfirmation"),
|
||||
('confirmation', '0003_emailchangeconfirmation'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.DeleteModel(
|
||||
name="EmailChangeConfirmation",
|
||||
name='EmailChangeConfirmation',
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name="confirmation",
|
||||
name='confirmation',
|
||||
options={},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="confirmation",
|
||||
name="type",
|
||||
model_name='confirmation',
|
||||
name='type',
|
||||
field=models.PositiveSmallIntegerField(default=1),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="confirmation",
|
||||
name="confirmation_key",
|
||||
model_name='confirmation',
|
||||
name='confirmation_key',
|
||||
field=models.CharField(max_length=40),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="confirmation",
|
||||
name="date_sent",
|
||||
model_name='confirmation',
|
||||
name='date_sent',
|
||||
field=models.DateTimeField(),
|
||||
),
|
||||
]
|
||||
|
@@ -1,21 +1,22 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11.6 on 2017-11-30 00:13
|
||||
import django.db.models.deletion
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("zerver", "0124_stream_enable_notifications"),
|
||||
("confirmation", "0004_remove_confirmationmanager"),
|
||||
('zerver', '0124_stream_enable_notifications'),
|
||||
('confirmation', '0004_remove_confirmationmanager'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="confirmation",
|
||||
name="realm",
|
||||
field=models.ForeignKey(
|
||||
null=True, on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"
|
||||
),
|
||||
model_name='confirmation',
|
||||
name='realm',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='zerver.Realm'),
|
||||
),
|
||||
]
|
||||
|
@@ -1,4 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11.6 on 2018-01-29 18:39
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
@@ -6,13 +8,13 @@ from django.db import migrations, models
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("confirmation", "0005_confirmation_realm"),
|
||||
('confirmation', '0005_confirmation_realm'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="realmcreationkey",
|
||||
name="presume_email_valid",
|
||||
model_name='realmcreationkey',
|
||||
name='presume_email_valid',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
|
@@ -1,37 +0,0 @@
|
||||
# Generated by Django 2.2.10 on 2020-03-27 09:02
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("confirmation", "0006_realmcreationkey_presume_email_valid"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="confirmation",
|
||||
name="confirmation_key",
|
||||
field=models.CharField(db_index=True, max_length=40),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="confirmation",
|
||||
name="date_sent",
|
||||
field=models.DateTimeField(db_index=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="confirmation",
|
||||
name="object_id",
|
||||
field=models.PositiveIntegerField(db_index=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="realmcreationkey",
|
||||
name="creation_key",
|
||||
field=models.CharField(db_index=True, max_length=40, verbose_name="activation key"),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="confirmation",
|
||||
unique_together={("type", "confirmation_key")},
|
||||
),
|
||||
]
|
@@ -1,24 +1,28 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright: (c) 2008, Jarek Zgoda <jarek.zgoda@gmail.com>
|
||||
|
||||
__revision__ = "$Id: models.py 28 2009-10-22 15:03:02Z jarek.zgoda $"
|
||||
import datetime
|
||||
import secrets
|
||||
from base64 import b32encode
|
||||
from typing import Mapping, Optional, Union
|
||||
from urllib.parse import urljoin
|
||||
__revision__ = '$Id: models.py 28 2009-10-22 15:03:02Z jarek.zgoda $'
|
||||
|
||||
import datetime
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.contenttypes.fields import GenericForeignKey
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.db import models
|
||||
from django.db.models import CASCADE
|
||||
from django.urls import reverse
|
||||
from django.conf import settings
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.contrib.contenttypes.fields import GenericForeignKey
|
||||
from django.http import HttpRequest, HttpResponse
|
||||
from django.shortcuts import render
|
||||
from django.urls import reverse
|
||||
from django.utils.timezone import now as timezone_now
|
||||
|
||||
from zerver.models import EmailChangeStatus, MultiuseInvite, PreregistrationUser, Realm, UserProfile
|
||||
|
||||
from zerver.lib.send_email import send_email
|
||||
from zerver.lib.utils import generate_random_token
|
||||
from zerver.models import PreregistrationUser, EmailChangeStatus, MultiuseInvite, \
|
||||
UserProfile, Realm
|
||||
from random import SystemRandom
|
||||
import string
|
||||
from typing import Any, Dict, Optional, Union
|
||||
|
||||
class ConfirmationKeyException(Exception):
|
||||
WRONG_LENGTH = 1
|
||||
@@ -29,35 +33,27 @@ class ConfirmationKeyException(Exception):
|
||||
super().__init__()
|
||||
self.error_type = error_type
|
||||
|
||||
|
||||
def render_confirmation_key_error(
|
||||
request: HttpRequest, exception: ConfirmationKeyException
|
||||
) -> HttpResponse:
|
||||
def render_confirmation_key_error(request: HttpRequest, exception: ConfirmationKeyException) -> HttpResponse:
|
||||
if exception.error_type == ConfirmationKeyException.WRONG_LENGTH:
|
||||
return render(request, "confirmation/link_malformed.html")
|
||||
return render(request, 'confirmation/link_malformed.html')
|
||||
if exception.error_type == ConfirmationKeyException.EXPIRED:
|
||||
return render(request, "confirmation/link_expired.html")
|
||||
return render(request, "confirmation/link_does_not_exist.html")
|
||||
|
||||
return render(request, 'confirmation/link_expired.html')
|
||||
return render(request, 'confirmation/link_does_not_exist.html')
|
||||
|
||||
def generate_key() -> str:
|
||||
generator = SystemRandom()
|
||||
# 24 characters * 5 bits of entropy/character = 120 bits of entropy
|
||||
return b32encode(secrets.token_bytes(15)).decode().lower()
|
||||
|
||||
return ''.join(generator.choice(string.ascii_lowercase + string.digits) for _ in range(24))
|
||||
|
||||
ConfirmationObjT = Union[MultiuseInvite, PreregistrationUser, EmailChangeStatus]
|
||||
|
||||
|
||||
def get_object_from_key(
|
||||
confirmation_key: str, confirmation_type: int, activate_object: bool = True
|
||||
) -> ConfirmationObjT:
|
||||
def get_object_from_key(confirmation_key: str,
|
||||
confirmation_type: int) -> ConfirmationObjT:
|
||||
# Confirmation keys used to be 40 characters
|
||||
if len(confirmation_key) not in (24, 40):
|
||||
raise ConfirmationKeyException(ConfirmationKeyException.WRONG_LENGTH)
|
||||
try:
|
||||
confirmation = Confirmation.objects.get(
|
||||
confirmation_key=confirmation_key, type=confirmation_type
|
||||
)
|
||||
confirmation = Confirmation.objects.get(confirmation_key=confirmation_key,
|
||||
type=confirmation_type)
|
||||
except Confirmation.DoesNotExist:
|
||||
raise ConfirmationKeyException(ConfirmationKeyException.DOES_NOT_EXIST)
|
||||
|
||||
@@ -66,53 +62,35 @@ def get_object_from_key(
|
||||
raise ConfirmationKeyException(ConfirmationKeyException.EXPIRED)
|
||||
|
||||
obj = confirmation.content_object
|
||||
if activate_object and hasattr(obj, "status"):
|
||||
obj.status = getattr(settings, "STATUS_ACTIVE", 1)
|
||||
obj.save(update_fields=["status"])
|
||||
if hasattr(obj, "status"):
|
||||
obj.status = getattr(settings, 'STATUS_ACTIVE', 1)
|
||||
obj.save(update_fields=['status'])
|
||||
return obj
|
||||
|
||||
|
||||
def create_confirmation_link(
|
||||
obj: ContentType, confirmation_type: int, url_args: Mapping[str, str] = {}
|
||||
) -> str:
|
||||
def create_confirmation_link(obj: ContentType, host: str,
|
||||
confirmation_type: int,
|
||||
url_args: Optional[Dict[str, str]]=None) -> str:
|
||||
key = generate_key()
|
||||
realm = None
|
||||
if hasattr(obj, "realm"):
|
||||
realm = obj.realm
|
||||
elif isinstance(obj, Realm):
|
||||
realm = obj
|
||||
|
||||
Confirmation.objects.create(
|
||||
content_object=obj,
|
||||
date_sent=timezone_now(),
|
||||
confirmation_key=key,
|
||||
realm=realm,
|
||||
type=confirmation_type,
|
||||
)
|
||||
return confirmation_url(key, realm, confirmation_type, url_args)
|
||||
|
||||
|
||||
def confirmation_url(
|
||||
confirmation_key: str,
|
||||
realm: Optional[Realm],
|
||||
confirmation_type: int,
|
||||
url_args: Mapping[str, str] = {},
|
||||
) -> str:
|
||||
url_args = dict(url_args)
|
||||
url_args["confirmation_key"] = confirmation_key
|
||||
return urljoin(
|
||||
settings.ROOT_DOMAIN_URI if realm is None else realm.uri,
|
||||
reverse(_properties[confirmation_type].url_name, kwargs=url_args),
|
||||
)
|
||||
Confirmation.objects.create(content_object=obj, date_sent=timezone_now(), confirmation_key=key,
|
||||
realm=obj.realm, type=confirmation_type)
|
||||
return confirmation_url(key, host, confirmation_type, url_args)
|
||||
|
||||
def confirmation_url(confirmation_key: str, host: str,
|
||||
confirmation_type: int,
|
||||
url_args: Optional[Dict[str, str]]=None) -> str:
|
||||
if url_args is None:
|
||||
url_args = {}
|
||||
url_args['confirmation_key'] = confirmation_key
|
||||
return '%s%s%s' % (settings.EXTERNAL_URI_SCHEME, host,
|
||||
reverse(_properties[confirmation_type].url_name, kwargs=url_args))
|
||||
|
||||
class Confirmation(models.Model):
|
||||
content_type = models.ForeignKey(ContentType, on_delete=CASCADE)
|
||||
object_id: int = models.PositiveIntegerField(db_index=True)
|
||||
content_object = GenericForeignKey("content_type", "object_id")
|
||||
date_sent: datetime.datetime = models.DateTimeField(db_index=True)
|
||||
confirmation_key: str = models.CharField(max_length=40, db_index=True)
|
||||
realm: Optional[Realm] = models.ForeignKey(Realm, null=True, on_delete=CASCADE)
|
||||
object_id = models.PositiveIntegerField() # type: int
|
||||
content_object = GenericForeignKey('content_type', 'object_id')
|
||||
date_sent = models.DateTimeField() # type: datetime.datetime
|
||||
confirmation_key = models.CharField(max_length=40) # type: str
|
||||
realm = models.ForeignKey(Realm, null=True, on_delete=CASCADE) # type: Optional[Realm]
|
||||
|
||||
# The following list is the set of valid types
|
||||
USER_REGISTRATION = 1
|
||||
@@ -122,54 +100,30 @@ class Confirmation(models.Model):
|
||||
SERVER_REGISTRATION = 5
|
||||
MULTIUSE_INVITE = 6
|
||||
REALM_CREATION = 7
|
||||
REALM_REACTIVATION = 8
|
||||
type: int = models.PositiveSmallIntegerField()
|
||||
type = models.PositiveSmallIntegerField() # type: int
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"<Confirmation: {self.content_object}>"
|
||||
|
||||
class Meta:
|
||||
unique_together = ("type", "confirmation_key")
|
||||
|
||||
return '<Confirmation: %s>' % (self.content_object,)
|
||||
|
||||
class ConfirmationType:
|
||||
def __init__(
|
||||
self,
|
||||
url_name: str,
|
||||
validity_in_days: int = settings.CONFIRMATION_LINK_DEFAULT_VALIDITY_DAYS,
|
||||
) -> None:
|
||||
def __init__(self, url_name: str,
|
||||
validity_in_days: int=settings.CONFIRMATION_LINK_DEFAULT_VALIDITY_DAYS) -> None:
|
||||
self.url_name = url_name
|
||||
self.validity_in_days = validity_in_days
|
||||
|
||||
|
||||
_properties = {
|
||||
Confirmation.USER_REGISTRATION: ConfirmationType("check_prereg_key_and_redirect"),
|
||||
Confirmation.INVITATION: ConfirmationType(
|
||||
"check_prereg_key_and_redirect", validity_in_days=settings.INVITATION_LINK_VALIDITY_DAYS
|
||||
),
|
||||
Confirmation.EMAIL_CHANGE: ConfirmationType("confirm_email_change"),
|
||||
Confirmation.UNSUBSCRIBE: ConfirmationType(
|
||||
"unsubscribe",
|
||||
validity_in_days=1000000, # should never expire
|
||||
),
|
||||
Confirmation.USER_REGISTRATION: ConfirmationType('check_prereg_key_and_redirect'),
|
||||
Confirmation.INVITATION: ConfirmationType('check_prereg_key_and_redirect',
|
||||
validity_in_days=settings.INVITATION_LINK_VALIDITY_DAYS),
|
||||
Confirmation.EMAIL_CHANGE: ConfirmationType('zerver.views.user_settings.confirm_email_change'),
|
||||
Confirmation.UNSUBSCRIBE: ConfirmationType('zerver.views.unsubscribe.email_unsubscribe',
|
||||
validity_in_days=1000000), # should never expire
|
||||
Confirmation.MULTIUSE_INVITE: ConfirmationType(
|
||||
"join", validity_in_days=settings.INVITATION_LINK_VALIDITY_DAYS
|
||||
),
|
||||
Confirmation.REALM_CREATION: ConfirmationType("check_prereg_key_and_redirect"),
|
||||
Confirmation.REALM_REACTIVATION: ConfirmationType("realm_reactivation"),
|
||||
'zerver.views.registration.accounts_home_from_multiuse_invite',
|
||||
validity_in_days=settings.INVITATION_LINK_VALIDITY_DAYS),
|
||||
Confirmation.REALM_CREATION: ConfirmationType('check_prereg_key_and_redirect'),
|
||||
}
|
||||
|
||||
|
||||
def one_click_unsubscribe_link(user_profile: UserProfile, email_type: str) -> str:
|
||||
"""
|
||||
Generate a unique link that a logged-out user can visit to unsubscribe from
|
||||
Zulip e-mails without having to first log in.
|
||||
"""
|
||||
return create_confirmation_link(
|
||||
user_profile, Confirmation.UNSUBSCRIBE, url_args={"email_type": email_type}
|
||||
)
|
||||
|
||||
|
||||
# Functions related to links generated by the generate_realm_creation_link.py
|
||||
# management command.
|
||||
# Note that being validated here will just allow the user to access the create_realm
|
||||
@@ -178,8 +132,7 @@ def one_click_unsubscribe_link(user_profile: UserProfile, email_type: str) -> st
|
||||
# Arguably RealmCreationKey should just be another ConfirmationObjT and we should
|
||||
# add another Confirmation.type for this; it's this way for historical reasons.
|
||||
|
||||
|
||||
def validate_key(creation_key: Optional[str]) -> Optional["RealmCreationKey"]:
|
||||
def validate_key(creation_key: Optional[str]) -> Optional['RealmCreationKey']:
|
||||
"""Get the record for this key, raising InvalidCreationKey if non-None but invalid."""
|
||||
if creation_key is None:
|
||||
return None
|
||||
@@ -192,25 +145,23 @@ def validate_key(creation_key: Optional[str]) -> Optional["RealmCreationKey"]:
|
||||
raise RealmCreationKey.Invalid()
|
||||
return key_record
|
||||
|
||||
|
||||
def generate_realm_creation_url(by_admin: bool = False) -> str:
|
||||
def generate_realm_creation_url(by_admin: bool=False) -> str:
|
||||
key = generate_key()
|
||||
RealmCreationKey.objects.create(
|
||||
creation_key=key, date_created=timezone_now(), presume_email_valid=by_admin
|
||||
)
|
||||
return urljoin(
|
||||
settings.ROOT_DOMAIN_URI,
|
||||
reverse("create_realm", kwargs={"creation_key": key}),
|
||||
)
|
||||
|
||||
RealmCreationKey.objects.create(creation_key=key,
|
||||
date_created=timezone_now(),
|
||||
presume_email_valid=by_admin)
|
||||
return '%s%s%s' % (settings.EXTERNAL_URI_SCHEME,
|
||||
settings.EXTERNAL_HOST,
|
||||
reverse('zerver.views.create_realm',
|
||||
kwargs={'creation_key': key}))
|
||||
|
||||
class RealmCreationKey(models.Model):
|
||||
creation_key = models.CharField("activation key", db_index=True, max_length=40)
|
||||
date_created = models.DateTimeField("created", default=timezone_now)
|
||||
creation_key = models.CharField('activation key', max_length=40)
|
||||
date_created = models.DateTimeField('created', default=timezone_now)
|
||||
|
||||
# True just if we should presume the email address the user enters
|
||||
# is theirs, and skip sending mail to it to confirm that.
|
||||
presume_email_valid: bool = models.BooleanField(default=False)
|
||||
presume_email_valid = models.BooleanField(default=False) # type: bool
|
||||
|
||||
class Invalid(Exception):
|
||||
pass
|
||||
|
@@ -1,6 +1,9 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright: (c) 2008, Jarek Zgoda <jarek.zgoda@gmail.com>
|
||||
|
||||
__revision__ = "$Id: settings.py 12 2008-11-23 19:38:52Z jarek.zgoda $"
|
||||
from typing import Any, Dict
|
||||
|
||||
__revision__ = '$Id: settings.py 12 2008-11-23 19:38:52Z jarek.zgoda $'
|
||||
|
||||
STATUS_ACTIVE = 1
|
||||
STATUS_REVOKED = 2
|
||||
|
@@ -1,897 +0,0 @@
|
||||
import logging
|
||||
import math
|
||||
import os
|
||||
import secrets
|
||||
from datetime import datetime, timedelta
|
||||
from decimal import Decimal
|
||||
from functools import wraps
|
||||
from typing import Callable, Dict, Optional, Tuple, TypeVar, cast
|
||||
|
||||
import orjson
|
||||
import stripe
|
||||
from django.conf import settings
|
||||
from django.core.signing import Signer
|
||||
from django.db import transaction
|
||||
from django.utils.timezone import now as timezone_now
|
||||
from django.utils.translation import gettext as _
|
||||
from django.utils.translation import gettext_lazy
|
||||
from django.utils.translation import override as override_language
|
||||
|
||||
from corporate.models import (
|
||||
Customer,
|
||||
CustomerPlan,
|
||||
LicenseLedger,
|
||||
get_current_plan_by_customer,
|
||||
get_current_plan_by_realm,
|
||||
get_customer_by_realm,
|
||||
)
|
||||
from zerver.lib.logging_util import log_to_file
|
||||
from zerver.lib.timestamp import datetime_to_timestamp, timestamp_to_datetime
|
||||
from zerver.models import Realm, RealmAuditLog, UserProfile, get_system_bot
|
||||
from zproject.config import get_secret
|
||||
|
||||
STRIPE_PUBLISHABLE_KEY = get_secret("stripe_publishable_key")
|
||||
stripe.api_key = get_secret("stripe_secret_key")
|
||||
|
||||
BILLING_LOG_PATH = os.path.join(
|
||||
"/var/log/zulip" if not settings.DEVELOPMENT else settings.DEVELOPMENT_LOG_DIRECTORY,
|
||||
"billing.log",
|
||||
)
|
||||
billing_logger = logging.getLogger("corporate.stripe")
|
||||
log_to_file(billing_logger, BILLING_LOG_PATH)
|
||||
log_to_file(logging.getLogger("stripe"), BILLING_LOG_PATH)
|
||||
|
||||
CallableT = TypeVar("CallableT", bound=Callable[..., object])
|
||||
|
||||
MIN_INVOICED_LICENSES = 30
|
||||
MAX_INVOICED_LICENSES = 1000
|
||||
DEFAULT_INVOICE_DAYS_UNTIL_DUE = 30
|
||||
|
||||
|
||||
def get_latest_seat_count(realm: Realm) -> int:
|
||||
non_guests = (
|
||||
UserProfile.objects.filter(realm=realm, is_active=True, is_bot=False)
|
||||
.exclude(role=UserProfile.ROLE_GUEST)
|
||||
.count()
|
||||
)
|
||||
guests = UserProfile.objects.filter(
|
||||
realm=realm, is_active=True, is_bot=False, role=UserProfile.ROLE_GUEST
|
||||
).count()
|
||||
return max(non_guests, math.ceil(guests / 5))
|
||||
|
||||
|
||||
def sign_string(string: str) -> Tuple[str, str]:
|
||||
salt = secrets.token_hex(32)
|
||||
signer = Signer(salt=salt)
|
||||
return signer.sign(string), salt
|
||||
|
||||
|
||||
def unsign_string(signed_string: str, salt: str) -> str:
|
||||
signer = Signer(salt=salt)
|
||||
return signer.unsign(signed_string)
|
||||
|
||||
|
||||
# Be extremely careful changing this function. Historical billing periods
|
||||
# are not stored anywhere, and are just computed on the fly using this
|
||||
# function. Any change you make here should return the same value (or be
|
||||
# within a few seconds) for basically any value from when the billing system
|
||||
# went online to within a year from now.
|
||||
def add_months(dt: datetime, months: int) -> datetime:
|
||||
assert months >= 0
|
||||
# It's fine that the max day in Feb is 28 for leap years.
|
||||
MAX_DAY_FOR_MONTH = {
|
||||
1: 31,
|
||||
2: 28,
|
||||
3: 31,
|
||||
4: 30,
|
||||
5: 31,
|
||||
6: 30,
|
||||
7: 31,
|
||||
8: 31,
|
||||
9: 30,
|
||||
10: 31,
|
||||
11: 30,
|
||||
12: 31,
|
||||
}
|
||||
year = dt.year
|
||||
month = dt.month + months
|
||||
while month > 12:
|
||||
year += 1
|
||||
month -= 12
|
||||
day = min(dt.day, MAX_DAY_FOR_MONTH[month])
|
||||
# datetimes don't support leap seconds, so don't need to worry about those
|
||||
return dt.replace(year=year, month=month, day=day)
|
||||
|
||||
|
||||
def next_month(billing_cycle_anchor: datetime, dt: datetime) -> datetime:
|
||||
estimated_months = round((dt - billing_cycle_anchor).days * 12.0 / 365)
|
||||
for months in range(max(estimated_months - 1, 0), estimated_months + 2):
|
||||
proposed_next_month = add_months(billing_cycle_anchor, months)
|
||||
if 20 < (proposed_next_month - dt).days < 40:
|
||||
return proposed_next_month
|
||||
raise AssertionError(
|
||||
"Something wrong in next_month calculation with "
|
||||
f"billing_cycle_anchor: {billing_cycle_anchor}, dt: {dt}"
|
||||
)
|
||||
|
||||
|
||||
def start_of_next_billing_cycle(plan: CustomerPlan, event_time: datetime) -> datetime:
|
||||
if plan.status == CustomerPlan.FREE_TRIAL:
|
||||
assert plan.next_invoice_date is not None # for mypy
|
||||
return plan.next_invoice_date
|
||||
|
||||
months_per_period = {
|
||||
CustomerPlan.ANNUAL: 12,
|
||||
CustomerPlan.MONTHLY: 1,
|
||||
}[plan.billing_schedule]
|
||||
periods = 1
|
||||
dt = plan.billing_cycle_anchor
|
||||
while dt <= event_time:
|
||||
dt = add_months(plan.billing_cycle_anchor, months_per_period * periods)
|
||||
periods += 1
|
||||
return dt
|
||||
|
||||
|
||||
def next_invoice_date(plan: CustomerPlan) -> Optional[datetime]:
|
||||
if plan.status == CustomerPlan.ENDED:
|
||||
return None
|
||||
assert plan.next_invoice_date is not None # for mypy
|
||||
months_per_period = {
|
||||
CustomerPlan.ANNUAL: 12,
|
||||
CustomerPlan.MONTHLY: 1,
|
||||
}[plan.billing_schedule]
|
||||
if plan.automanage_licenses:
|
||||
months_per_period = 1
|
||||
periods = 1
|
||||
dt = plan.billing_cycle_anchor
|
||||
while dt <= plan.next_invoice_date:
|
||||
dt = add_months(plan.billing_cycle_anchor, months_per_period * periods)
|
||||
periods += 1
|
||||
return dt
|
||||
|
||||
|
||||
def renewal_amount(plan: CustomerPlan, event_time: datetime) -> int: # nocoverage: TODO
|
||||
if plan.fixed_price is not None:
|
||||
return plan.fixed_price
|
||||
new_plan, last_ledger_entry = make_end_of_cycle_updates_if_needed(plan, event_time)
|
||||
if last_ledger_entry is None:
|
||||
return 0
|
||||
if last_ledger_entry.licenses_at_next_renewal is None:
|
||||
return 0
|
||||
if new_plan is not None:
|
||||
plan = new_plan
|
||||
assert plan.price_per_license is not None # for mypy
|
||||
return plan.price_per_license * last_ledger_entry.licenses_at_next_renewal
|
||||
|
||||
|
||||
def get_idempotency_key(ledger_entry: LicenseLedger) -> Optional[str]:
|
||||
if settings.TEST_SUITE:
|
||||
return None
|
||||
return f"ledger_entry:{ledger_entry.id}" # nocoverage
|
||||
|
||||
|
||||
class BillingError(Exception):
|
||||
# error messages
|
||||
CONTACT_SUPPORT = gettext_lazy("Something went wrong. Please contact {email}.")
|
||||
TRY_RELOADING = gettext_lazy("Something went wrong. Please reload the page.")
|
||||
|
||||
# description is used only for tests
|
||||
def __init__(self, description: str, message: Optional[str] = None) -> None:
|
||||
self.description = description
|
||||
if message is None:
|
||||
message = BillingError.CONTACT_SUPPORT.format(email=settings.ZULIP_ADMINISTRATOR)
|
||||
self.message = message
|
||||
|
||||
|
||||
class StripeCardError(BillingError):
|
||||
pass
|
||||
|
||||
|
||||
class StripeConnectionError(BillingError):
|
||||
pass
|
||||
|
||||
|
||||
class InvalidBillingSchedule(Exception):
|
||||
def __init__(self, billing_schedule: int) -> None:
|
||||
self.message = f"Unknown billing_schedule: {billing_schedule}"
|
||||
super().__init__(self.message)
|
||||
|
||||
|
||||
def catch_stripe_errors(func: CallableT) -> CallableT:
|
||||
@wraps(func)
|
||||
def wrapped(*args: object, **kwargs: object) -> object:
|
||||
if settings.DEVELOPMENT and not settings.TEST_SUITE: # nocoverage
|
||||
if STRIPE_PUBLISHABLE_KEY is None:
|
||||
raise BillingError(
|
||||
"missing stripe config",
|
||||
"Missing Stripe config. "
|
||||
"See https://zulip.readthedocs.io/en/latest/subsystems/billing.html.",
|
||||
)
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
# See https://stripe.com/docs/api/python#error_handling, though
|
||||
# https://stripe.com/docs/api/ruby#error_handling suggests there are additional fields, and
|
||||
# https://stripe.com/docs/error-codes gives a more detailed set of error codes
|
||||
except stripe.error.StripeError as e:
|
||||
err = e.json_body.get("error", {})
|
||||
if isinstance(e, stripe.error.CardError):
|
||||
billing_logger.info(
|
||||
"Stripe card error: %s %s %s %s",
|
||||
e.http_status,
|
||||
err.get("type"),
|
||||
err.get("code"),
|
||||
err.get("param"),
|
||||
)
|
||||
# TODO: Look into i18n for this
|
||||
raise StripeCardError("card error", err.get("message"))
|
||||
billing_logger.error(
|
||||
"Stripe error: %s %s %s %s",
|
||||
e.http_status,
|
||||
err.get("type"),
|
||||
err.get("code"),
|
||||
err.get("param"),
|
||||
)
|
||||
if isinstance(
|
||||
e, (stripe.error.RateLimitError, stripe.error.APIConnectionError)
|
||||
): # nocoverage TODO
|
||||
raise StripeConnectionError(
|
||||
"stripe connection error",
|
||||
_("Something went wrong. Please wait a few seconds and try again."),
|
||||
)
|
||||
raise BillingError("other stripe error")
|
||||
|
||||
return cast(CallableT, wrapped)
|
||||
|
||||
|
||||
@catch_stripe_errors
|
||||
def stripe_get_customer(stripe_customer_id: str) -> stripe.Customer:
|
||||
return stripe.Customer.retrieve(stripe_customer_id, expand=["default_source"])
|
||||
|
||||
|
||||
@catch_stripe_errors
|
||||
def do_create_stripe_customer(user: UserProfile, stripe_token: Optional[str] = None) -> Customer:
|
||||
realm = user.realm
|
||||
# We could do a better job of handling race conditions here, but if two
|
||||
# people from a realm try to upgrade at exactly the same time, the main
|
||||
# bad thing that will happen is that we will create an extra stripe
|
||||
# customer that we can delete or ignore.
|
||||
stripe_customer = stripe.Customer.create(
|
||||
description=f"{realm.string_id} ({realm.name})",
|
||||
email=user.delivery_email,
|
||||
metadata={"realm_id": realm.id, "realm_str": realm.string_id},
|
||||
source=stripe_token,
|
||||
)
|
||||
event_time = timestamp_to_datetime(stripe_customer.created)
|
||||
with transaction.atomic():
|
||||
RealmAuditLog.objects.create(
|
||||
realm=user.realm,
|
||||
acting_user=user,
|
||||
event_type=RealmAuditLog.STRIPE_CUSTOMER_CREATED,
|
||||
event_time=event_time,
|
||||
)
|
||||
if stripe_token is not None:
|
||||
RealmAuditLog.objects.create(
|
||||
realm=user.realm,
|
||||
acting_user=user,
|
||||
event_type=RealmAuditLog.STRIPE_CARD_CHANGED,
|
||||
event_time=event_time,
|
||||
)
|
||||
customer, created = Customer.objects.update_or_create(
|
||||
realm=realm, defaults={"stripe_customer_id": stripe_customer.id}
|
||||
)
|
||||
user.is_billing_admin = True
|
||||
user.save(update_fields=["is_billing_admin"])
|
||||
return customer
|
||||
|
||||
|
||||
@catch_stripe_errors
|
||||
def do_replace_payment_source(
|
||||
user: UserProfile, stripe_token: str, pay_invoices: bool = False
|
||||
) -> stripe.Customer:
|
||||
customer = get_customer_by_realm(user.realm)
|
||||
assert customer is not None # for mypy
|
||||
|
||||
stripe_customer = stripe_get_customer(customer.stripe_customer_id)
|
||||
stripe_customer.source = stripe_token
|
||||
# Deletes existing card: https://stripe.com/docs/api#update_customer-source
|
||||
updated_stripe_customer = stripe.Customer.save(stripe_customer)
|
||||
RealmAuditLog.objects.create(
|
||||
realm=user.realm,
|
||||
acting_user=user,
|
||||
event_type=RealmAuditLog.STRIPE_CARD_CHANGED,
|
||||
event_time=timezone_now(),
|
||||
)
|
||||
if pay_invoices:
|
||||
for stripe_invoice in stripe.Invoice.list(
|
||||
billing="charge_automatically", customer=stripe_customer.id, status="open"
|
||||
):
|
||||
# The user will get either a receipt or a "failed payment" email, but the in-app
|
||||
# messaging could be clearer here (e.g. it could explicitly tell the user that there
|
||||
# were payment(s) and that they succeeded or failed).
|
||||
# Worth fixing if we notice that a lot of cards end up failing at this step.
|
||||
stripe.Invoice.pay(stripe_invoice)
|
||||
return updated_stripe_customer
|
||||
|
||||
|
||||
# event_time should roughly be timezone_now(). Not designed to handle
|
||||
# event_times in the past or future
|
||||
@transaction.atomic
|
||||
def make_end_of_cycle_updates_if_needed(
|
||||
plan: CustomerPlan, event_time: datetime
|
||||
) -> Tuple[Optional[CustomerPlan], Optional[LicenseLedger]]:
|
||||
last_ledger_entry = LicenseLedger.objects.filter(plan=plan).order_by("-id").first()
|
||||
last_renewal = (
|
||||
LicenseLedger.objects.filter(plan=plan, is_renewal=True).order_by("-id").first().event_time
|
||||
)
|
||||
next_billing_cycle = start_of_next_billing_cycle(plan, last_renewal)
|
||||
if next_billing_cycle <= event_time:
|
||||
if plan.status == CustomerPlan.ACTIVE:
|
||||
return None, LicenseLedger.objects.create(
|
||||
plan=plan,
|
||||
is_renewal=True,
|
||||
event_time=next_billing_cycle,
|
||||
licenses=last_ledger_entry.licenses_at_next_renewal,
|
||||
licenses_at_next_renewal=last_ledger_entry.licenses_at_next_renewal,
|
||||
)
|
||||
if plan.status == CustomerPlan.FREE_TRIAL:
|
||||
plan.invoiced_through = last_ledger_entry
|
||||
assert plan.next_invoice_date is not None
|
||||
plan.billing_cycle_anchor = plan.next_invoice_date.replace(microsecond=0)
|
||||
plan.status = CustomerPlan.ACTIVE
|
||||
plan.save(update_fields=["invoiced_through", "billing_cycle_anchor", "status"])
|
||||
return None, LicenseLedger.objects.create(
|
||||
plan=plan,
|
||||
is_renewal=True,
|
||||
event_time=next_billing_cycle,
|
||||
licenses=last_ledger_entry.licenses_at_next_renewal,
|
||||
licenses_at_next_renewal=last_ledger_entry.licenses_at_next_renewal,
|
||||
)
|
||||
|
||||
if plan.status == CustomerPlan.SWITCH_TO_ANNUAL_AT_END_OF_CYCLE:
|
||||
if plan.fixed_price is not None: # nocoverage
|
||||
raise NotImplementedError("Can't switch fixed priced monthly plan to annual.")
|
||||
|
||||
plan.status = CustomerPlan.ENDED
|
||||
plan.save(update_fields=["status"])
|
||||
|
||||
discount = plan.customer.default_discount or plan.discount
|
||||
_, _, _, price_per_license = compute_plan_parameters(
|
||||
automanage_licenses=plan.automanage_licenses,
|
||||
billing_schedule=CustomerPlan.ANNUAL,
|
||||
discount=plan.discount,
|
||||
)
|
||||
|
||||
new_plan = CustomerPlan.objects.create(
|
||||
customer=plan.customer,
|
||||
billing_schedule=CustomerPlan.ANNUAL,
|
||||
automanage_licenses=plan.automanage_licenses,
|
||||
charge_automatically=plan.charge_automatically,
|
||||
price_per_license=price_per_license,
|
||||
discount=discount,
|
||||
billing_cycle_anchor=next_billing_cycle,
|
||||
tier=plan.tier,
|
||||
status=CustomerPlan.ACTIVE,
|
||||
next_invoice_date=next_billing_cycle,
|
||||
invoiced_through=None,
|
||||
invoicing_status=CustomerPlan.INITIAL_INVOICE_TO_BE_SENT,
|
||||
)
|
||||
|
||||
new_plan_ledger_entry = LicenseLedger.objects.create(
|
||||
plan=new_plan,
|
||||
is_renewal=True,
|
||||
event_time=next_billing_cycle,
|
||||
licenses=last_ledger_entry.licenses_at_next_renewal,
|
||||
licenses_at_next_renewal=last_ledger_entry.licenses_at_next_renewal,
|
||||
)
|
||||
|
||||
RealmAuditLog.objects.create(
|
||||
realm=new_plan.customer.realm,
|
||||
event_time=event_time,
|
||||
event_type=RealmAuditLog.CUSTOMER_SWITCHED_FROM_MONTHLY_TO_ANNUAL_PLAN,
|
||||
extra_data=orjson.dumps(
|
||||
{
|
||||
"monthly_plan_id": plan.id,
|
||||
"annual_plan_id": new_plan.id,
|
||||
}
|
||||
).decode(),
|
||||
)
|
||||
return new_plan, new_plan_ledger_entry
|
||||
|
||||
if plan.status == CustomerPlan.DOWNGRADE_AT_END_OF_CYCLE:
|
||||
process_downgrade(plan)
|
||||
return None, None
|
||||
return None, last_ledger_entry
|
||||
|
||||
|
||||
# Returns Customer instead of stripe_customer so that we don't make a Stripe
|
||||
# API call if there's nothing to update
|
||||
def update_or_create_stripe_customer(
|
||||
user: UserProfile, stripe_token: Optional[str] = None
|
||||
) -> Customer:
|
||||
realm = user.realm
|
||||
customer = get_customer_by_realm(realm)
|
||||
if customer is None or customer.stripe_customer_id is None:
|
||||
return do_create_stripe_customer(user, stripe_token=stripe_token)
|
||||
if stripe_token is not None:
|
||||
do_replace_payment_source(user, stripe_token)
|
||||
return customer
|
||||
|
||||
|
||||
def calculate_discounted_price_per_license(
|
||||
original_price_per_license: int, discount: Decimal
|
||||
) -> int:
|
||||
# There are no fractional cents in Stripe, so round down to nearest integer.
|
||||
return int(float(original_price_per_license * (1 - discount / 100)) + 0.00001)
|
||||
|
||||
|
||||
def get_price_per_license(
|
||||
tier: int, billing_schedule: int, discount: Optional[Decimal] = None
|
||||
) -> int:
|
||||
# TODO use variables to account for Zulip Plus
|
||||
assert tier == CustomerPlan.STANDARD
|
||||
|
||||
price_per_license: Optional[int] = None
|
||||
if billing_schedule == CustomerPlan.ANNUAL:
|
||||
price_per_license = 8000
|
||||
elif billing_schedule == CustomerPlan.MONTHLY:
|
||||
price_per_license = 800
|
||||
else: # nocoverage
|
||||
raise InvalidBillingSchedule(billing_schedule)
|
||||
if discount is not None:
|
||||
price_per_license = calculate_discounted_price_per_license(price_per_license, discount)
|
||||
return price_per_license
|
||||
|
||||
|
||||
def compute_plan_parameters(
|
||||
automanage_licenses: bool,
|
||||
billing_schedule: int,
|
||||
discount: Optional[Decimal],
|
||||
free_trial: bool = False,
|
||||
) -> Tuple[datetime, datetime, datetime, int]:
|
||||
# Everything in Stripe is stored as timestamps with 1 second resolution,
|
||||
# so standardize on 1 second resolution.
|
||||
# TODO talk about leapseconds?
|
||||
billing_cycle_anchor = timezone_now().replace(microsecond=0)
|
||||
if billing_schedule == CustomerPlan.ANNUAL:
|
||||
period_end = add_months(billing_cycle_anchor, 12)
|
||||
elif billing_schedule == CustomerPlan.MONTHLY:
|
||||
period_end = add_months(billing_cycle_anchor, 1)
|
||||
else: # nocoverage
|
||||
raise InvalidBillingSchedule(billing_schedule)
|
||||
|
||||
price_per_license = get_price_per_license(CustomerPlan.STANDARD, billing_schedule, discount)
|
||||
|
||||
next_invoice_date = period_end
|
||||
if automanage_licenses:
|
||||
next_invoice_date = add_months(billing_cycle_anchor, 1)
|
||||
if free_trial:
|
||||
period_end = billing_cycle_anchor + timedelta(days=settings.FREE_TRIAL_DAYS)
|
||||
next_invoice_date = period_end
|
||||
return billing_cycle_anchor, next_invoice_date, period_end, price_per_license
|
||||
|
||||
|
||||
def decimal_to_float(obj: object) -> object:
|
||||
if isinstance(obj, Decimal):
|
||||
return float(obj)
|
||||
raise TypeError # nocoverage
|
||||
|
||||
|
||||
# Only used for cloud signups
|
||||
@catch_stripe_errors
|
||||
def process_initial_upgrade(
|
||||
user: UserProfile,
|
||||
licenses: int,
|
||||
automanage_licenses: bool,
|
||||
billing_schedule: int,
|
||||
stripe_token: Optional[str],
|
||||
) -> None:
|
||||
realm = user.realm
|
||||
customer = update_or_create_stripe_customer(user, stripe_token=stripe_token)
|
||||
charge_automatically = stripe_token is not None
|
||||
free_trial = settings.FREE_TRIAL_DAYS not in (None, 0)
|
||||
|
||||
if get_current_plan_by_customer(customer) is not None:
|
||||
# Unlikely race condition from two people upgrading (clicking "Make payment")
|
||||
# at exactly the same time. Doesn't fully resolve the race condition, but having
|
||||
# a check here reduces the likelihood.
|
||||
billing_logger.warning(
|
||||
"Customer %s trying to upgrade, but has an active subscription",
|
||||
customer,
|
||||
)
|
||||
raise BillingError(
|
||||
"subscribing with existing subscription", str(BillingError.TRY_RELOADING)
|
||||
)
|
||||
|
||||
(
|
||||
billing_cycle_anchor,
|
||||
next_invoice_date,
|
||||
period_end,
|
||||
price_per_license,
|
||||
) = compute_plan_parameters(
|
||||
automanage_licenses, billing_schedule, customer.default_discount, free_trial
|
||||
)
|
||||
# The main design constraint in this function is that if you upgrade with a credit card, and the
|
||||
# charge fails, everything should be rolled back as if nothing had happened. This is because we
|
||||
# expect frequent card failures on initial signup.
|
||||
# Hence, if we're going to charge a card, do it at the beginning, even if we later may have to
|
||||
# adjust the number of licenses.
|
||||
if charge_automatically:
|
||||
if not free_trial:
|
||||
stripe_charge = stripe.Charge.create(
|
||||
amount=price_per_license * licenses,
|
||||
currency="usd",
|
||||
customer=customer.stripe_customer_id,
|
||||
description=f"Upgrade to Zulip Standard, ${price_per_license/100} x {licenses}",
|
||||
receipt_email=user.delivery_email,
|
||||
statement_descriptor="Zulip Standard",
|
||||
)
|
||||
# Not setting a period start and end, but maybe we should? Unclear what will make things
|
||||
# most similar to the renewal case from an accounting perspective.
|
||||
assert isinstance(stripe_charge.source, stripe.Card)
|
||||
description = f"Payment (Card ending in {stripe_charge.source.last4})"
|
||||
stripe.InvoiceItem.create(
|
||||
amount=price_per_license * licenses * -1,
|
||||
currency="usd",
|
||||
customer=customer.stripe_customer_id,
|
||||
description=description,
|
||||
discountable=False,
|
||||
)
|
||||
|
||||
# TODO: The correctness of this relies on user creation, deactivation, etc being
|
||||
# in a transaction.atomic() with the relevant RealmAuditLog entries
|
||||
with transaction.atomic():
|
||||
# billed_licenses can greater than licenses if users are added between the start of
|
||||
# this function (process_initial_upgrade) and now
|
||||
billed_licenses = max(get_latest_seat_count(realm), licenses)
|
||||
plan_params = {
|
||||
"automanage_licenses": automanage_licenses,
|
||||
"charge_automatically": charge_automatically,
|
||||
"price_per_license": price_per_license,
|
||||
"discount": customer.default_discount,
|
||||
"billing_cycle_anchor": billing_cycle_anchor,
|
||||
"billing_schedule": billing_schedule,
|
||||
"tier": CustomerPlan.STANDARD,
|
||||
}
|
||||
if free_trial:
|
||||
plan_params["status"] = CustomerPlan.FREE_TRIAL
|
||||
plan = CustomerPlan.objects.create(
|
||||
customer=customer, next_invoice_date=next_invoice_date, **plan_params
|
||||
)
|
||||
ledger_entry = LicenseLedger.objects.create(
|
||||
plan=plan,
|
||||
is_renewal=True,
|
||||
event_time=billing_cycle_anchor,
|
||||
licenses=billed_licenses,
|
||||
licenses_at_next_renewal=billed_licenses,
|
||||
)
|
||||
plan.invoiced_through = ledger_entry
|
||||
plan.save(update_fields=["invoiced_through"])
|
||||
RealmAuditLog.objects.create(
|
||||
realm=realm,
|
||||
acting_user=user,
|
||||
event_time=billing_cycle_anchor,
|
||||
event_type=RealmAuditLog.CUSTOMER_PLAN_CREATED,
|
||||
extra_data=orjson.dumps(plan_params, default=decimal_to_float).decode(),
|
||||
)
|
||||
|
||||
if not free_trial:
|
||||
stripe.InvoiceItem.create(
|
||||
currency="usd",
|
||||
customer=customer.stripe_customer_id,
|
||||
description="Zulip Standard",
|
||||
discountable=False,
|
||||
period={
|
||||
"start": datetime_to_timestamp(billing_cycle_anchor),
|
||||
"end": datetime_to_timestamp(period_end),
|
||||
},
|
||||
quantity=billed_licenses,
|
||||
unit_amount=price_per_license,
|
||||
)
|
||||
|
||||
if charge_automatically:
|
||||
billing_method = "charge_automatically"
|
||||
days_until_due = None
|
||||
else:
|
||||
billing_method = "send_invoice"
|
||||
days_until_due = DEFAULT_INVOICE_DAYS_UNTIL_DUE
|
||||
|
||||
stripe_invoice = stripe.Invoice.create(
|
||||
auto_advance=True,
|
||||
billing=billing_method,
|
||||
customer=customer.stripe_customer_id,
|
||||
days_until_due=days_until_due,
|
||||
statement_descriptor="Zulip Standard",
|
||||
)
|
||||
stripe.Invoice.finalize_invoice(stripe_invoice)
|
||||
|
||||
from zerver.lib.actions import do_change_plan_type
|
||||
|
||||
do_change_plan_type(realm, Realm.STANDARD, acting_user=user)
|
||||
|
||||
|
||||
def update_license_ledger_for_automanaged_plan(
|
||||
realm: Realm, plan: CustomerPlan, event_time: datetime
|
||||
) -> None:
|
||||
new_plan, last_ledger_entry = make_end_of_cycle_updates_if_needed(plan, event_time)
|
||||
if last_ledger_entry is None:
|
||||
return
|
||||
if new_plan is not None:
|
||||
plan = new_plan
|
||||
licenses_at_next_renewal = get_latest_seat_count(realm)
|
||||
licenses = max(licenses_at_next_renewal, last_ledger_entry.licenses)
|
||||
|
||||
LicenseLedger.objects.create(
|
||||
plan=plan,
|
||||
event_time=event_time,
|
||||
licenses=licenses,
|
||||
licenses_at_next_renewal=licenses_at_next_renewal,
|
||||
)
|
||||
|
||||
|
||||
def update_license_ledger_if_needed(realm: Realm, event_time: datetime) -> None:
|
||||
plan = get_current_plan_by_realm(realm)
|
||||
if plan is None:
|
||||
return
|
||||
if not plan.automanage_licenses:
|
||||
return
|
||||
update_license_ledger_for_automanaged_plan(realm, plan, event_time)
|
||||
|
||||
|
||||
def invoice_plan(plan: CustomerPlan, event_time: datetime) -> None:
|
||||
if plan.invoicing_status == CustomerPlan.STARTED:
|
||||
raise NotImplementedError("Plan with invoicing_status==STARTED needs manual resolution.")
|
||||
make_end_of_cycle_updates_if_needed(plan, event_time)
|
||||
|
||||
if plan.invoicing_status == CustomerPlan.INITIAL_INVOICE_TO_BE_SENT:
|
||||
invoiced_through_id = -1
|
||||
licenses_base = None
|
||||
else:
|
||||
assert plan.invoiced_through is not None
|
||||
licenses_base = plan.invoiced_through.licenses
|
||||
invoiced_through_id = plan.invoiced_through.id
|
||||
|
||||
invoice_item_created = False
|
||||
for ledger_entry in LicenseLedger.objects.filter(
|
||||
plan=plan, id__gt=invoiced_through_id, event_time__lte=event_time
|
||||
).order_by("id"):
|
||||
price_args: Dict[str, int] = {}
|
||||
if ledger_entry.is_renewal:
|
||||
if plan.fixed_price is not None:
|
||||
price_args = {"amount": plan.fixed_price}
|
||||
else:
|
||||
assert plan.price_per_license is not None # needed for mypy
|
||||
price_args = {
|
||||
"unit_amount": plan.price_per_license,
|
||||
"quantity": ledger_entry.licenses,
|
||||
}
|
||||
description = "Zulip Standard - renewal"
|
||||
elif licenses_base is not None and ledger_entry.licenses != licenses_base:
|
||||
assert plan.price_per_license
|
||||
last_renewal = (
|
||||
LicenseLedger.objects.filter(
|
||||
plan=plan, is_renewal=True, event_time__lte=ledger_entry.event_time
|
||||
)
|
||||
.order_by("-id")
|
||||
.first()
|
||||
.event_time
|
||||
)
|
||||
period_end = start_of_next_billing_cycle(plan, ledger_entry.event_time)
|
||||
proration_fraction = (period_end - ledger_entry.event_time) / (
|
||||
period_end - last_renewal
|
||||
)
|
||||
price_args = {
|
||||
"unit_amount": int(plan.price_per_license * proration_fraction + 0.5),
|
||||
"quantity": ledger_entry.licenses - licenses_base,
|
||||
}
|
||||
description = "Additional license ({} - {})".format(
|
||||
ledger_entry.event_time.strftime("%b %-d, %Y"), period_end.strftime("%b %-d, %Y")
|
||||
)
|
||||
|
||||
if price_args:
|
||||
plan.invoiced_through = ledger_entry
|
||||
plan.invoicing_status = CustomerPlan.STARTED
|
||||
plan.save(update_fields=["invoicing_status", "invoiced_through"])
|
||||
stripe.InvoiceItem.create(
|
||||
currency="usd",
|
||||
customer=plan.customer.stripe_customer_id,
|
||||
description=description,
|
||||
discountable=False,
|
||||
period={
|
||||
"start": datetime_to_timestamp(ledger_entry.event_time),
|
||||
"end": datetime_to_timestamp(
|
||||
start_of_next_billing_cycle(plan, ledger_entry.event_time)
|
||||
),
|
||||
},
|
||||
idempotency_key=get_idempotency_key(ledger_entry),
|
||||
**price_args,
|
||||
)
|
||||
invoice_item_created = True
|
||||
plan.invoiced_through = ledger_entry
|
||||
plan.invoicing_status = CustomerPlan.DONE
|
||||
plan.save(update_fields=["invoicing_status", "invoiced_through"])
|
||||
licenses_base = ledger_entry.licenses
|
||||
|
||||
if invoice_item_created:
|
||||
if plan.charge_automatically:
|
||||
billing_method = "charge_automatically"
|
||||
days_until_due = None
|
||||
else:
|
||||
billing_method = "send_invoice"
|
||||
days_until_due = DEFAULT_INVOICE_DAYS_UNTIL_DUE
|
||||
stripe_invoice = stripe.Invoice.create(
|
||||
auto_advance=True,
|
||||
billing=billing_method,
|
||||
customer=plan.customer.stripe_customer_id,
|
||||
days_until_due=days_until_due,
|
||||
statement_descriptor="Zulip Standard",
|
||||
)
|
||||
stripe.Invoice.finalize_invoice(stripe_invoice)
|
||||
|
||||
plan.next_invoice_date = next_invoice_date(plan)
|
||||
plan.save(update_fields=["next_invoice_date"])
|
||||
|
||||
|
||||
def invoice_plans_as_needed(event_time: datetime = timezone_now()) -> None:
|
||||
for plan in CustomerPlan.objects.filter(next_invoice_date__lte=event_time):
|
||||
invoice_plan(plan, event_time)
|
||||
|
||||
|
||||
def attach_discount_to_realm(
|
||||
realm: Realm, discount: Decimal, *, acting_user: Optional[UserProfile]
|
||||
) -> None:
|
||||
customer = get_customer_by_realm(realm)
|
||||
old_discount: Optional[Decimal] = None
|
||||
if customer is not None:
|
||||
old_discount = customer.default_discount
|
||||
customer.default_discount = discount
|
||||
customer.save(update_fields=["default_discount"])
|
||||
else:
|
||||
Customer.objects.create(realm=realm, default_discount=discount)
|
||||
plan = get_current_plan_by_realm(realm)
|
||||
if plan is not None:
|
||||
plan.price_per_license = get_price_per_license(plan.tier, plan.billing_schedule, discount)
|
||||
plan.discount = discount
|
||||
plan.save(update_fields=["price_per_license", "discount"])
|
||||
RealmAuditLog.objects.create(
|
||||
realm=realm,
|
||||
acting_user=acting_user,
|
||||
event_type=RealmAuditLog.REALM_DISCOUNT_CHANGED,
|
||||
event_time=timezone_now(),
|
||||
extra_data={"old_discount": old_discount, "new_discount": discount},
|
||||
)
|
||||
|
||||
|
||||
def update_sponsorship_status(
|
||||
realm: Realm, sponsorship_pending: bool, *, acting_user: Optional[UserProfile]
|
||||
) -> None:
|
||||
customer, _ = Customer.objects.get_or_create(realm=realm)
|
||||
customer.sponsorship_pending = sponsorship_pending
|
||||
customer.save(update_fields=["sponsorship_pending"])
|
||||
RealmAuditLog.objects.create(
|
||||
realm=realm,
|
||||
acting_user=acting_user,
|
||||
event_type=RealmAuditLog.REALM_SPONSORSHIP_PENDING_STATUS_CHANGED,
|
||||
event_time=timezone_now(),
|
||||
extra_data={
|
||||
"sponsorship_pending": sponsorship_pending,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def approve_sponsorship(realm: Realm, *, acting_user: Optional[UserProfile]) -> None:
|
||||
from zerver.lib.actions import do_change_plan_type, internal_send_private_message
|
||||
|
||||
do_change_plan_type(realm, Realm.STANDARD_FREE, acting_user=acting_user)
|
||||
customer = get_customer_by_realm(realm)
|
||||
if customer is not None and customer.sponsorship_pending:
|
||||
customer.sponsorship_pending = False
|
||||
customer.save(update_fields=["sponsorship_pending"])
|
||||
RealmAuditLog.objects.create(
|
||||
realm=realm,
|
||||
acting_user=acting_user,
|
||||
event_type=RealmAuditLog.REALM_SPONSORSHIP_APPROVED,
|
||||
event_time=timezone_now(),
|
||||
)
|
||||
notification_bot = get_system_bot(settings.NOTIFICATION_BOT)
|
||||
for billing_admin in realm.get_human_billing_admin_users():
|
||||
with override_language(billing_admin.default_language):
|
||||
# Using variable to make life easier for translators if these details change.
|
||||
plan_name = "Zulip Cloud Standard"
|
||||
emoji = ":tada:"
|
||||
message = _(
|
||||
f"Your organization's request for sponsored hosting has been approved! {emoji}.\n"
|
||||
f"You have been upgraded to {plan_name}, free of charge."
|
||||
)
|
||||
internal_send_private_message(notification_bot, billing_admin, message)
|
||||
|
||||
|
||||
def get_discount_for_realm(realm: Realm) -> Optional[Decimal]:
|
||||
customer = get_customer_by_realm(realm)
|
||||
if customer is not None:
|
||||
return customer.default_discount
|
||||
return None
|
||||
|
||||
|
||||
def do_change_plan_status(plan: CustomerPlan, status: int) -> None:
|
||||
plan.status = status
|
||||
plan.save(update_fields=["status"])
|
||||
billing_logger.info(
|
||||
"Change plan status: Customer.id: %s, CustomerPlan.id: %s, status: %s",
|
||||
plan.customer.id,
|
||||
plan.id,
|
||||
status,
|
||||
)
|
||||
|
||||
|
||||
def process_downgrade(plan: CustomerPlan) -> None:
|
||||
from zerver.lib.actions import do_change_plan_type
|
||||
|
||||
do_change_plan_type(plan.customer.realm, Realm.LIMITED, acting_user=None)
|
||||
plan.status = CustomerPlan.ENDED
|
||||
plan.save(update_fields=["status"])
|
||||
|
||||
|
||||
def estimate_annual_recurring_revenue_by_realm() -> Dict[str, int]: # nocoverage
|
||||
annual_revenue = {}
|
||||
for plan in CustomerPlan.objects.filter(status=CustomerPlan.ACTIVE).select_related(
|
||||
"customer__realm"
|
||||
):
|
||||
# TODO: figure out what to do for plans that don't automatically
|
||||
# renew, but which probably will renew
|
||||
renewal_cents = renewal_amount(plan, timezone_now())
|
||||
if plan.billing_schedule == CustomerPlan.MONTHLY:
|
||||
renewal_cents *= 12
|
||||
# TODO: Decimal stuff
|
||||
annual_revenue[plan.customer.realm.string_id] = int(renewal_cents / 100)
|
||||
return annual_revenue
|
||||
|
||||
|
||||
# During realm deactivation we instantly downgrade the plan to Limited.
|
||||
# Extra users added in the final month are not charged. Also used
|
||||
# for the cancellation of Free Trial.
|
||||
def downgrade_now_without_creating_additional_invoices(realm: Realm) -> None:
|
||||
plan = get_current_plan_by_realm(realm)
|
||||
if plan is None:
|
||||
return
|
||||
|
||||
process_downgrade(plan)
|
||||
plan.invoiced_through = LicenseLedger.objects.filter(plan=plan).order_by("id").last()
|
||||
plan.next_invoice_date = next_invoice_date(plan)
|
||||
plan.save(update_fields=["invoiced_through", "next_invoice_date"])
|
||||
|
||||
|
||||
def downgrade_at_the_end_of_billing_cycle(realm: Realm) -> None:
|
||||
plan = get_current_plan_by_realm(realm)
|
||||
assert plan is not None
|
||||
do_change_plan_status(plan, CustomerPlan.DOWNGRADE_AT_END_OF_CYCLE)
|
||||
|
||||
|
||||
def void_all_open_invoices(realm: Realm) -> int:
|
||||
customer = get_customer_by_realm(realm)
|
||||
if customer is None:
|
||||
return 0
|
||||
invoices = stripe.Invoice.list(customer=customer.stripe_customer_id)
|
||||
voided_invoices_count = 0
|
||||
for invoice in invoices:
|
||||
if invoice.status == "open":
|
||||
stripe.Invoice.void_invoice(invoice.id)
|
||||
voided_invoices_count += 1
|
||||
return voided_invoices_count
|
||||
|
||||
|
||||
def update_billing_method_of_current_plan(
|
||||
realm: Realm, charge_automatically: bool, *, acting_user: Optional[UserProfile]
|
||||
) -> None:
|
||||
plan = get_current_plan_by_realm(realm)
|
||||
if plan is not None:
|
||||
plan.charge_automatically = charge_automatically
|
||||
plan.save(update_fields=["charge_automatically"])
|
||||
RealmAuditLog.objects.create(
|
||||
realm=realm,
|
||||
acting_user=acting_user,
|
||||
event_type=RealmAuditLog.REALM_BILLING_METHOD_CHANGED,
|
||||
event_time=timezone_now(),
|
||||
extra_data={
|
||||
"charge_automatically": charge_automatically,
|
||||
},
|
||||
)
|
@@ -1,86 +0,0 @@
|
||||
# Generated by Django 1.11.14 on 2018-09-25 12:02
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
("zerver", "0189_userprofile_add_some_emojisets"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="BillingProcessor",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
("state", models.CharField(max_length=20)),
|
||||
("last_modified", models.DateTimeField(auto_now=True)),
|
||||
(
|
||||
"log_row",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="zerver.RealmAuditLog"
|
||||
),
|
||||
),
|
||||
(
|
||||
"realm",
|
||||
models.OneToOneField(
|
||||
null=True, on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="Coupon",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
("percent_off", models.SmallIntegerField(unique=True)),
|
||||
("stripe_coupon_id", models.CharField(max_length=255, unique=True)),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="Customer",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
("stripe_customer_id", models.CharField(max_length=255, unique=True)),
|
||||
("has_billing_relationship", models.BooleanField(default=False)),
|
||||
(
|
||||
"realm",
|
||||
models.OneToOneField(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="Plan",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
("nickname", models.CharField(max_length=40, unique=True)),
|
||||
("stripe_plan_id", models.CharField(max_length=255, unique=True)),
|
||||
],
|
||||
),
|
||||
]
|
@@ -1,18 +0,0 @@
|
||||
# Generated by Django 1.11.16 on 2018-12-12 20:19
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("corporate", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="customer",
|
||||
name="default_discount",
|
||||
field=models.DecimalField(decimal_places=4, max_digits=7, null=True),
|
||||
),
|
||||
]
|
@@ -1,43 +0,0 @@
|
||||
# Generated by Django 1.11.16 on 2018-12-22 21:05
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("corporate", "0002_customer_default_discount"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="CustomerPlan",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
("licenses", models.IntegerField()),
|
||||
("automanage_licenses", models.BooleanField(default=False)),
|
||||
("charge_automatically", models.BooleanField(default=False)),
|
||||
("price_per_license", models.IntegerField(null=True)),
|
||||
("fixed_price", models.IntegerField(null=True)),
|
||||
("discount", models.DecimalField(decimal_places=4, max_digits=6, null=True)),
|
||||
("billing_cycle_anchor", models.DateTimeField()),
|
||||
("billing_schedule", models.SmallIntegerField()),
|
||||
("billed_through", models.DateTimeField()),
|
||||
("next_billing_date", models.DateTimeField(db_index=True)),
|
||||
("tier", models.SmallIntegerField()),
|
||||
("status", models.SmallIntegerField(default=1)),
|
||||
(
|
||||
"customer",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="corporate.Customer"
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
]
|
@@ -1,35 +0,0 @@
|
||||
# Generated by Django 1.11.18 on 2019-01-19 05:01
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("corporate", "0003_customerplan"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="LicenseLedger",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
("is_renewal", models.BooleanField(default=False)),
|
||||
("event_time", models.DateTimeField()),
|
||||
("licenses", models.IntegerField()),
|
||||
("licenses_at_next_renewal", models.IntegerField(null=True)),
|
||||
(
|
||||
"plan",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="corporate.CustomerPlan"
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
]
|
@@ -1,38 +0,0 @@
|
||||
# Generated by Django 1.11.18 on 2019-01-28 13:04
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("corporate", "0004_licenseledger"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RenameField(
|
||||
model_name="customerplan",
|
||||
old_name="next_billing_date",
|
||||
new_name="next_invoice_date",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="customerplan",
|
||||
name="billed_through",
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="customerplan",
|
||||
name="invoiced_through",
|
||||
field=models.ForeignKey(
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="+",
|
||||
to="corporate.LicenseLedger",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="customerplan",
|
||||
name="invoicing_status",
|
||||
field=models.SmallIntegerField(default=1),
|
||||
),
|
||||
]
|
@@ -1,18 +0,0 @@
|
||||
# Generated by Django 1.11.18 on 2019-01-29 01:46
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("corporate", "0005_customerplan_invoicing"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="customer",
|
||||
name="stripe_customer_id",
|
||||
field=models.CharField(max_length=255, null=True, unique=True),
|
||||
),
|
||||
]
|
@@ -1,38 +0,0 @@
|
||||
# Generated by Django 1.11.18 on 2019-01-31 22:16
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("corporate", "0006_nullable_stripe_customer_id"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name="billingprocessor",
|
||||
name="log_row",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="billingprocessor",
|
||||
name="realm",
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name="Coupon",
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name="Plan",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="customer",
|
||||
name="has_billing_relationship",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="customerplan",
|
||||
name="licenses",
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name="BillingProcessor",
|
||||
),
|
||||
]
|
@@ -1,18 +0,0 @@
|
||||
# Generated by Django 1.11.20 on 2019-04-11 00:45
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("corporate", "0007_remove_deprecated_fields"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="customerplan",
|
||||
name="next_invoice_date",
|
||||
field=models.DateTimeField(db_index=True, null=True),
|
||||
),
|
||||
]
|
@@ -1,18 +0,0 @@
|
||||
# Generated by Django 2.2.13 on 2020-06-09 12:09
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("corporate", "0008_nullable_next_invoice_date"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="customer",
|
||||
name="sponsorship_pending",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
@@ -1,113 +0,0 @@
|
||||
import datetime
|
||||
from decimal import Decimal
|
||||
from typing import Optional
|
||||
|
||||
from django.db import models
|
||||
from django.db.models import CASCADE
|
||||
|
||||
from zerver.models import Realm
|
||||
|
||||
|
||||
class Customer(models.Model):
|
||||
realm: Realm = models.OneToOneField(Realm, on_delete=CASCADE)
|
||||
stripe_customer_id: str = models.CharField(max_length=255, null=True, unique=True)
|
||||
sponsorship_pending: bool = models.BooleanField(default=False)
|
||||
# A percentage, like 85.
|
||||
default_discount: Optional[Decimal] = models.DecimalField(
|
||||
decimal_places=4, max_digits=7, null=True
|
||||
)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"<Customer {self.realm} {self.stripe_customer_id}>"
|
||||
|
||||
|
||||
def get_customer_by_realm(realm: Realm) -> Optional[Customer]:
|
||||
return Customer.objects.filter(realm=realm).first()
|
||||
|
||||
|
||||
class CustomerPlan(models.Model):
|
||||
customer: Customer = models.ForeignKey(Customer, on_delete=CASCADE)
|
||||
automanage_licenses: bool = models.BooleanField(default=False)
|
||||
charge_automatically: bool = models.BooleanField(default=False)
|
||||
|
||||
# Both of these are in cents. Exactly one of price_per_license or
|
||||
# fixed_price should be set. fixed_price is only for manual deals, and
|
||||
# can't be set via the self-serve billing system.
|
||||
price_per_license: Optional[int] = models.IntegerField(null=True)
|
||||
fixed_price: Optional[int] = models.IntegerField(null=True)
|
||||
|
||||
# Discount that was applied. For display purposes only.
|
||||
discount: Optional[Decimal] = models.DecimalField(decimal_places=4, max_digits=6, null=True)
|
||||
|
||||
billing_cycle_anchor: datetime.datetime = models.DateTimeField()
|
||||
ANNUAL = 1
|
||||
MONTHLY = 2
|
||||
billing_schedule: int = models.SmallIntegerField()
|
||||
|
||||
next_invoice_date: Optional[datetime.datetime] = models.DateTimeField(db_index=True, null=True)
|
||||
invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
|
||||
"LicenseLedger", null=True, on_delete=CASCADE, related_name="+"
|
||||
)
|
||||
DONE = 1
|
||||
STARTED = 2
|
||||
INITIAL_INVOICE_TO_BE_SENT = 3
|
||||
invoicing_status: int = models.SmallIntegerField(default=DONE)
|
||||
|
||||
STANDARD = 1
|
||||
PLUS = 2 # not available through self-serve signup
|
||||
ENTERPRISE = 10
|
||||
tier: int = models.SmallIntegerField()
|
||||
|
||||
ACTIVE = 1
|
||||
DOWNGRADE_AT_END_OF_CYCLE = 2
|
||||
FREE_TRIAL = 3
|
||||
SWITCH_TO_ANNUAL_AT_END_OF_CYCLE = 4
|
||||
# "Live" plans should have a value < LIVE_STATUS_THRESHOLD.
|
||||
# There should be at most one live plan per customer.
|
||||
LIVE_STATUS_THRESHOLD = 10
|
||||
ENDED = 11
|
||||
NEVER_STARTED = 12
|
||||
status: int = models.SmallIntegerField(default=ACTIVE)
|
||||
|
||||
# TODO maybe override setattr to ensure billing_cycle_anchor, etc are immutable
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return {
|
||||
CustomerPlan.STANDARD: "Zulip Standard",
|
||||
CustomerPlan.PLUS: "Zulip Plus",
|
||||
CustomerPlan.ENTERPRISE: "Zulip Enterprise",
|
||||
}[self.tier]
|
||||
|
||||
def get_plan_status_as_text(self) -> str:
|
||||
return {
|
||||
self.ACTIVE: "Active",
|
||||
self.DOWNGRADE_AT_END_OF_CYCLE: "Scheduled for downgrade at end of cycle",
|
||||
self.FREE_TRIAL: "Free trial",
|
||||
self.ENDED: "Ended",
|
||||
self.NEVER_STARTED: "Never started",
|
||||
}[self.status]
|
||||
|
||||
|
||||
def get_current_plan_by_customer(customer: Customer) -> Optional[CustomerPlan]:
|
||||
return CustomerPlan.objects.filter(
|
||||
customer=customer, status__lt=CustomerPlan.LIVE_STATUS_THRESHOLD
|
||||
).first()
|
||||
|
||||
|
||||
def get_current_plan_by_realm(realm: Realm) -> Optional[CustomerPlan]:
|
||||
customer = get_customer_by_realm(realm)
|
||||
if customer is None:
|
||||
return None
|
||||
return get_current_plan_by_customer(customer)
|
||||
|
||||
|
||||
class LicenseLedger(models.Model):
|
||||
plan: CustomerPlan = models.ForeignKey(CustomerPlan, on_delete=CASCADE)
|
||||
# Also True for the initial upgrade.
|
||||
is_renewal: bool = models.BooleanField(default=False)
|
||||
event_time: datetime.datetime = models.DateTimeField()
|
||||
licenses: int = models.IntegerField()
|
||||
# None means the plan does not automatically renew.
|
||||
# This cannot be None if plan.automanage_licenses.
|
||||
licenses_at_next_renewal: Optional[int] = models.IntegerField(null=True)
|
@@ -1,117 +0,0 @@
|
||||
{
|
||||
"amount": 7200,
|
||||
"amount_captured": 7200,
|
||||
"amount_refunded": 0,
|
||||
"application": null,
|
||||
"application_fee": null,
|
||||
"application_fee_amount": null,
|
||||
"balance_transaction": "txn_NORMALIZED00000000000001",
|
||||
"billing_details": {
|
||||
"address": {
|
||||
"city": "Pacific",
|
||||
"country": "United States",
|
||||
"line1": "Under the sea,",
|
||||
"line2": null,
|
||||
"postal_code": "33333",
|
||||
"state": null
|
||||
},
|
||||
"email": null,
|
||||
"name": "Ada Starr",
|
||||
"phone": null
|
||||
},
|
||||
"calculated_statement_descriptor": "ZULIP STANDARD",
|
||||
"captured": true,
|
||||
"created": 1000000000,
|
||||
"currency": "usd",
|
||||
"customer": "cus_NORMALIZED0001",
|
||||
"description": "Upgrade to Zulip Standard, $12.0 x 6",
|
||||
"destination": null,
|
||||
"dispute": null,
|
||||
"disputed": false,
|
||||
"failure_code": null,
|
||||
"failure_message": null,
|
||||
"fraud_details": {},
|
||||
"id": "ch_NORMALIZED00000000000001",
|
||||
"invoice": null,
|
||||
"livemode": false,
|
||||
"metadata": {},
|
||||
"object": "charge",
|
||||
"on_behalf_of": null,
|
||||
"order": null,
|
||||
"outcome": {
|
||||
"network_status": "approved_by_network",
|
||||
"reason": null,
|
||||
"risk_level": "normal",
|
||||
"risk_score": 0,
|
||||
"seller_message": "Payment complete.",
|
||||
"type": "authorized"
|
||||
},
|
||||
"paid": true,
|
||||
"payment_intent": null,
|
||||
"payment_method": "card_NORMALIZED00000000000001",
|
||||
"payment_method_details": {
|
||||
"card": {
|
||||
"brand": "visa",
|
||||
"checks": {
|
||||
"address_line1_check": "pass",
|
||||
"address_postal_code_check": "pass",
|
||||
"cvc_check": "pass"
|
||||
},
|
||||
"country": "US",
|
||||
"exp_month": 3,
|
||||
"exp_year": 2033,
|
||||
"fingerprint": "NORMALIZED000001",
|
||||
"funding": "credit",
|
||||
"installments": null,
|
||||
"last4": "4242",
|
||||
"network": "visa",
|
||||
"three_d_secure": null,
|
||||
"wallet": null
|
||||
},
|
||||
"type": "card"
|
||||
},
|
||||
"receipt_email": "hamlet@zulip.com",
|
||||
"receipt_number": null,
|
||||
"receipt_url": "https://pay.stripe.com/receipts/acct_NORMALIZED000001/ch_NORMALIZED00000000000001/rcpt_NORMALIZED000000000000000000001",
|
||||
"refunded": false,
|
||||
"refunds": {
|
||||
"data": [],
|
||||
"has_more": false,
|
||||
"object": "list",
|
||||
"total_count": 0,
|
||||
"url": "/v1/charges/ch_NORMALIZED00000000000001/refunds"
|
||||
},
|
||||
"review": null,
|
||||
"shipping": null,
|
||||
"source": {
|
||||
"address_city": "Pacific",
|
||||
"address_country": "United States",
|
||||
"address_line1": "Under the sea,",
|
||||
"address_line1_check": "pass",
|
||||
"address_line2": null,
|
||||
"address_state": null,
|
||||
"address_zip": "33333",
|
||||
"address_zip_check": "pass",
|
||||
"brand": "Visa",
|
||||
"country": "US",
|
||||
"customer": "cus_NORMALIZED0001",
|
||||
"cvc_check": "pass",
|
||||
"dynamic_last4": null,
|
||||
"exp_month": 3,
|
||||
"exp_year": 2033,
|
||||
"fingerprint": "NORMALIZED000001",
|
||||
"funding": "credit",
|
||||
"id": "card_NORMALIZED00000000000001",
|
||||
"last4": "4242",
|
||||
"metadata": {},
|
||||
"name": "Ada Starr",
|
||||
"object": "card",
|
||||
"tokenization_method": null
|
||||
},
|
||||
"source_transfer": null,
|
||||
"statement_descriptor": "Zulip Standard",
|
||||
"statement_descriptor_suffix": null,
|
||||
"status": "succeeded",
|
||||
"transfer_data": null,
|
||||
"transfer_group": null
|
||||
}
|
@@ -1,117 +0,0 @@
|
||||
{
|
||||
"amount": 36000,
|
||||
"amount_captured": 36000,
|
||||
"amount_refunded": 0,
|
||||
"application": null,
|
||||
"application_fee": null,
|
||||
"application_fee_amount": null,
|
||||
"balance_transaction": "txn_NORMALIZED00000000000002",
|
||||
"billing_details": {
|
||||
"address": {
|
||||
"city": "Pacific",
|
||||
"country": "United States",
|
||||
"line1": "Under the sea,",
|
||||
"line2": null,
|
||||
"postal_code": "33333",
|
||||
"state": null
|
||||
},
|
||||
"email": null,
|
||||
"name": "Ada Starr",
|
||||
"phone": null
|
||||
},
|
||||
"calculated_statement_descriptor": "ZULIP STANDARD",
|
||||
"captured": true,
|
||||
"created": 1000000000,
|
||||
"currency": "usd",
|
||||
"customer": "cus_NORMALIZED0001",
|
||||
"description": "Upgrade to Zulip Standard, $60.0 x 6",
|
||||
"destination": null,
|
||||
"dispute": null,
|
||||
"disputed": false,
|
||||
"failure_code": null,
|
||||
"failure_message": null,
|
||||
"fraud_details": {},
|
||||
"id": "ch_NORMALIZED00000000000002",
|
||||
"invoice": null,
|
||||
"livemode": false,
|
||||
"metadata": {},
|
||||
"object": "charge",
|
||||
"on_behalf_of": null,
|
||||
"order": null,
|
||||
"outcome": {
|
||||
"network_status": "approved_by_network",
|
||||
"reason": null,
|
||||
"risk_level": "normal",
|
||||
"risk_score": 0,
|
||||
"seller_message": "Payment complete.",
|
||||
"type": "authorized"
|
||||
},
|
||||
"paid": true,
|
||||
"payment_intent": null,
|
||||
"payment_method": "card_NORMALIZED00000000000002",
|
||||
"payment_method_details": {
|
||||
"card": {
|
||||
"brand": "visa",
|
||||
"checks": {
|
||||
"address_line1_check": "pass",
|
||||
"address_postal_code_check": "pass",
|
||||
"cvc_check": "pass"
|
||||
},
|
||||
"country": "US",
|
||||
"exp_month": 3,
|
||||
"exp_year": 2033,
|
||||
"fingerprint": "NORMALIZED000001",
|
||||
"funding": "credit",
|
||||
"installments": null,
|
||||
"last4": "4242",
|
||||
"network": "visa",
|
||||
"three_d_secure": null,
|
||||
"wallet": null
|
||||
},
|
||||
"type": "card"
|
||||
},
|
||||
"receipt_email": "hamlet@zulip.com",
|
||||
"receipt_number": null,
|
||||
"receipt_url": "https://pay.stripe.com/receipts/acct_NORMALIZED000001/ch_NORMALIZED00000000000002/rcpt_NORMALIZED000000000000000000002",
|
||||
"refunded": false,
|
||||
"refunds": {
|
||||
"data": [],
|
||||
"has_more": false,
|
||||
"object": "list",
|
||||
"total_count": 0,
|
||||
"url": "/v1/charges/ch_NORMALIZED00000000000002/refunds"
|
||||
},
|
||||
"review": null,
|
||||
"shipping": null,
|
||||
"source": {
|
||||
"address_city": "Pacific",
|
||||
"address_country": "United States",
|
||||
"address_line1": "Under the sea,",
|
||||
"address_line1_check": "pass",
|
||||
"address_line2": null,
|
||||
"address_state": null,
|
||||
"address_zip": "33333",
|
||||
"address_zip_check": "pass",
|
||||
"brand": "Visa",
|
||||
"country": "US",
|
||||
"customer": "cus_NORMALIZED0001",
|
||||
"cvc_check": "pass",
|
||||
"dynamic_last4": null,
|
||||
"exp_month": 3,
|
||||
"exp_year": 2033,
|
||||
"fingerprint": "NORMALIZED000001",
|
||||
"funding": "credit",
|
||||
"id": "card_NORMALIZED00000000000002",
|
||||
"last4": "4242",
|
||||
"metadata": {},
|
||||
"name": "Ada Starr",
|
||||
"object": "card",
|
||||
"tokenization_method": null
|
||||
},
|
||||
"source_transfer": null,
|
||||
"statement_descriptor": "Zulip Standard",
|
||||
"statement_descriptor_suffix": null,
|
||||
"status": "succeeded",
|
||||
"transfer_data": null,
|
||||
"transfer_group": null
|
||||
}
|
@@ -1,118 +0,0 @@
|
||||
{
|
||||
"data": [
|
||||
{
|
||||
"amount": 7200,
|
||||
"amount_captured": 7200,
|
||||
"amount_refunded": 0,
|
||||
"application": null,
|
||||
"application_fee": null,
|
||||
"application_fee_amount": null,
|
||||
"balance_transaction": "txn_NORMALIZED00000000000001",
|
||||
"billing_details": {
|
||||
"address": {
|
||||
"city": "Pacific",
|
||||
"country": "United States",
|
||||
"line1": "Under the sea,",
|
||||
"line2": null,
|
||||
"postal_code": "33333",
|
||||
"state": null
|
||||
},
|
||||
"email": null,
|
||||
"name": "Ada Starr",
|
||||
"phone": null
|
||||
},
|
||||
"calculated_statement_descriptor": "ZULIP STANDARD",
|
||||
"captured": true,
|
||||
"created": 1000000000,
|
||||
"currency": "usd",
|
||||
"customer": "cus_NORMALIZED0001",
|
||||
"description": "Upgrade to Zulip Standard, $12.0 x 6",
|
||||
"destination": null,
|
||||
"dispute": null,
|
||||
"disputed": false,
|
||||
"failure_code": null,
|
||||
"failure_message": null,
|
||||
"fraud_details": {},
|
||||
"id": "ch_NORMALIZED00000000000001",
|
||||
"invoice": null,
|
||||
"livemode": false,
|
||||
"metadata": {},
|
||||
"object": "charge",
|
||||
"on_behalf_of": null,
|
||||
"order": null,
|
||||
"outcome": {
|
||||
"network_status": "approved_by_network",
|
||||
"reason": null,
|
||||
"risk_level": "normal",
|
||||
"risk_score": 0,
|
||||
"seller_message": "Payment complete.",
|
||||
"type": "authorized"
|
||||
},
|
||||
"paid": true,
|
||||
"payment_intent": null,
|
||||
"payment_method": "card_NORMALIZED00000000000001",
|
||||
"payment_method_details": {
|
||||
"card": {
|
||||
"brand": "visa",
|
||||
"checks": {
|
||||
"address_line1_check": "pass",
|
||||
"address_postal_code_check": "pass",
|
||||
"cvc_check": "pass"
|
||||
},
|
||||
"country": "US",
|
||||
"exp_month": 3,
|
||||
"exp_year": 2033,
|
||||
"fingerprint": "NORMALIZED000001",
|
||||
"funding": "credit",
|
||||
"installments": null,
|
||||
"last4": "4242",
|
||||
"network": "visa",
|
||||
"three_d_secure": null,
|
||||
"wallet": null
|
||||
},
|
||||
"type": "card"
|
||||
},
|
||||
"receipt_email": "hamlet@zulip.com",
|
||||
"receipt_number": null,
|
||||
"receipt_url": "https://pay.stripe.com/receipts/acct_NORMALIZED000001/ch_NORMALIZED00000000000001/rcpt_NORMALIZED000000000000000000001",
|
||||
"refunded": false,
|
||||
"refunds": {},
|
||||
"review": null,
|
||||
"shipping": null,
|
||||
"source": {
|
||||
"address_city": "Pacific",
|
||||
"address_country": "United States",
|
||||
"address_line1": "Under the sea,",
|
||||
"address_line1_check": "pass",
|
||||
"address_line2": null,
|
||||
"address_state": null,
|
||||
"address_zip": "33333",
|
||||
"address_zip_check": "pass",
|
||||
"brand": "Visa",
|
||||
"country": "US",
|
||||
"customer": "cus_NORMALIZED0001",
|
||||
"cvc_check": "pass",
|
||||
"dynamic_last4": null,
|
||||
"exp_month": 3,
|
||||
"exp_year": 2033,
|
||||
"fingerprint": "NORMALIZED000001",
|
||||
"funding": "credit",
|
||||
"id": "card_NORMALIZED00000000000001",
|
||||
"last4": "4242",
|
||||
"metadata": {},
|
||||
"name": "Ada Starr",
|
||||
"object": "card",
|
||||
"tokenization_method": null
|
||||
},
|
||||
"source_transfer": null,
|
||||
"statement_descriptor": "Zulip Standard",
|
||||
"statement_descriptor_suffix": null,
|
||||
"status": "succeeded",
|
||||
"transfer_data": null,
|
||||
"transfer_group": null
|
||||
}
|
||||
],
|
||||
"has_more": false,
|
||||
"object": "list",
|
||||
"url": "/v1/charges"
|
||||
}
|
@@ -1,229 +0,0 @@
|
||||
{
|
||||
"data": [
|
||||
{
|
||||
"amount": 36000,
|
||||
"amount_captured": 36000,
|
||||
"amount_refunded": 0,
|
||||
"application": null,
|
||||
"application_fee": null,
|
||||
"application_fee_amount": null,
|
||||
"balance_transaction": "txn_NORMALIZED00000000000002",
|
||||
"billing_details": {
|
||||
"address": {
|
||||
"city": "Pacific",
|
||||
"country": "United States",
|
||||
"line1": "Under the sea,",
|
||||
"line2": null,
|
||||
"postal_code": "33333",
|
||||
"state": null
|
||||
},
|
||||
"email": null,
|
||||
"name": "Ada Starr",
|
||||
"phone": null
|
||||
},
|
||||
"calculated_statement_descriptor": "ZULIP STANDARD",
|
||||
"captured": true,
|
||||
"created": 1000000000,
|
||||
"currency": "usd",
|
||||
"customer": "cus_NORMALIZED0001",
|
||||
"description": "Upgrade to Zulip Standard, $60.0 x 6",
|
||||
"destination": null,
|
||||
"dispute": null,
|
||||
"disputed": false,
|
||||
"failure_code": null,
|
||||
"failure_message": null,
|
||||
"fraud_details": {},
|
||||
"id": "ch_NORMALIZED00000000000002",
|
||||
"invoice": null,
|
||||
"livemode": false,
|
||||
"metadata": {},
|
||||
"object": "charge",
|
||||
"on_behalf_of": null,
|
||||
"order": null,
|
||||
"outcome": {
|
||||
"network_status": "approved_by_network",
|
||||
"reason": null,
|
||||
"risk_level": "normal",
|
||||
"risk_score": 0,
|
||||
"seller_message": "Payment complete.",
|
||||
"type": "authorized"
|
||||
},
|
||||
"paid": true,
|
||||
"payment_intent": null,
|
||||
"payment_method": "card_NORMALIZED00000000000002",
|
||||
"payment_method_details": {
|
||||
"card": {
|
||||
"brand": "visa",
|
||||
"checks": {
|
||||
"address_line1_check": "pass",
|
||||
"address_postal_code_check": "pass",
|
||||
"cvc_check": "pass"
|
||||
},
|
||||
"country": "US",
|
||||
"exp_month": 3,
|
||||
"exp_year": 2033,
|
||||
"fingerprint": "NORMALIZED000001",
|
||||
"funding": "credit",
|
||||
"installments": null,
|
||||
"last4": "4242",
|
||||
"network": "visa",
|
||||
"three_d_secure": null,
|
||||
"wallet": null
|
||||
},
|
||||
"type": "card"
|
||||
},
|
||||
"receipt_email": "hamlet@zulip.com",
|
||||
"receipt_number": null,
|
||||
"receipt_url": "https://pay.stripe.com/receipts/acct_NORMALIZED000001/ch_NORMALIZED00000000000002/rcpt_NORMALIZED000000000000000000002",
|
||||
"refunded": false,
|
||||
"refunds": {},
|
||||
"review": null,
|
||||
"shipping": null,
|
||||
"source": {
|
||||
"address_city": "Pacific",
|
||||
"address_country": "United States",
|
||||
"address_line1": "Under the sea,",
|
||||
"address_line1_check": "pass",
|
||||
"address_line2": null,
|
||||
"address_state": null,
|
||||
"address_zip": "33333",
|
||||
"address_zip_check": "pass",
|
||||
"brand": "Visa",
|
||||
"country": "US",
|
||||
"customer": "cus_NORMALIZED0001",
|
||||
"cvc_check": "pass",
|
||||
"dynamic_last4": null,
|
||||
"exp_month": 3,
|
||||
"exp_year": 2033,
|
||||
"fingerprint": "NORMALIZED000001",
|
||||
"funding": "credit",
|
||||
"id": "card_NORMALIZED00000000000002",
|
||||
"last4": "4242",
|
||||
"metadata": {},
|
||||
"name": "Ada Starr",
|
||||
"object": "card",
|
||||
"tokenization_method": null
|
||||
},
|
||||
"source_transfer": null,
|
||||
"statement_descriptor": "Zulip Standard",
|
||||
"statement_descriptor_suffix": null,
|
||||
"status": "succeeded",
|
||||
"transfer_data": null,
|
||||
"transfer_group": null
|
||||
},
|
||||
{
|
||||
"amount": 7200,
|
||||
"amount_captured": 7200,
|
||||
"amount_refunded": 0,
|
||||
"application": null,
|
||||
"application_fee": null,
|
||||
"application_fee_amount": null,
|
||||
"balance_transaction": "txn_NORMALIZED00000000000001",
|
||||
"billing_details": {
|
||||
"address": {
|
||||
"city": "Pacific",
|
||||
"country": "United States",
|
||||
"line1": "Under the sea,",
|
||||
"line2": null,
|
||||
"postal_code": "33333",
|
||||
"state": null
|
||||
},
|
||||
"email": null,
|
||||
"name": "Ada Starr",
|
||||
"phone": null
|
||||
},
|
||||
"calculated_statement_descriptor": "ZULIP STANDARD",
|
||||
"captured": true,
|
||||
"created": 1000000000,
|
||||
"currency": "usd",
|
||||
"customer": "cus_NORMALIZED0001",
|
||||
"description": "Upgrade to Zulip Standard, $12.0 x 6",
|
||||
"destination": null,
|
||||
"dispute": null,
|
||||
"disputed": false,
|
||||
"failure_code": null,
|
||||
"failure_message": null,
|
||||
"fraud_details": {},
|
||||
"id": "ch_NORMALIZED00000000000001",
|
||||
"invoice": null,
|
||||
"livemode": false,
|
||||
"metadata": {},
|
||||
"object": "charge",
|
||||
"on_behalf_of": null,
|
||||
"order": null,
|
||||
"outcome": {
|
||||
"network_status": "approved_by_network",
|
||||
"reason": null,
|
||||
"risk_level": "normal",
|
||||
"risk_score": 0,
|
||||
"seller_message": "Payment complete.",
|
||||
"type": "authorized"
|
||||
},
|
||||
"paid": true,
|
||||
"payment_intent": null,
|
||||
"payment_method": "card_NORMALIZED00000000000001",
|
||||
"payment_method_details": {
|
||||
"card": {
|
||||
"brand": "visa",
|
||||
"checks": {
|
||||
"address_line1_check": "pass",
|
||||
"address_postal_code_check": "pass",
|
||||
"cvc_check": "pass"
|
||||
},
|
||||
"country": "US",
|
||||
"exp_month": 3,
|
||||
"exp_year": 2033,
|
||||
"fingerprint": "NORMALIZED000001",
|
||||
"funding": "credit",
|
||||
"installments": null,
|
||||
"last4": "4242",
|
||||
"network": "visa",
|
||||
"three_d_secure": null,
|
||||
"wallet": null
|
||||
},
|
||||
"type": "card"
|
||||
},
|
||||
"receipt_email": "hamlet@zulip.com",
|
||||
"receipt_number": null,
|
||||
"receipt_url": "https://pay.stripe.com/receipts/acct_NORMALIZED000001/ch_NORMALIZED00000000000001/rcpt_NORMALIZED000000000000000000001",
|
||||
"refunded": false,
|
||||
"refunds": {},
|
||||
"review": null,
|
||||
"shipping": null,
|
||||
"source": {
|
||||
"address_city": "Pacific",
|
||||
"address_country": "United States",
|
||||
"address_line1": "Under the sea,",
|
||||
"address_line1_check": "pass",
|
||||
"address_line2": null,
|
||||
"address_state": null,
|
||||
"address_zip": "33333",
|
||||
"address_zip_check": "pass",
|
||||
"brand": "Visa",
|
||||
"country": "US",
|
||||
"customer": "cus_NORMALIZED0001",
|
||||
"cvc_check": "pass",
|
||||
"dynamic_last4": null,
|
||||
"exp_month": 3,
|
||||
"exp_year": 2033,
|
||||
"fingerprint": "NORMALIZED000001",
|
||||
"funding": "credit",
|
||||
"id": "card_NORMALIZED00000000000001",
|
||||
"last4": "4242",
|
||||
"metadata": {},
|
||||
"name": "Ada Starr",
|
||||
"object": "card",
|
||||
"tokenization_method": null
|
||||
},
|
||||
"source_transfer": null,
|
||||
"statement_descriptor": "Zulip Standard",
|
||||
"statement_descriptor_suffix": null,
|
||||
"status": "succeeded",
|
||||
"transfer_data": null,
|
||||
"transfer_group": null
|
||||
}
|
||||
],
|
||||
"has_more": false,
|
||||
"object": "list",
|
||||
"url": "/v1/charges"
|
||||
}
|
@@ -1,80 +0,0 @@
|
||||
{
|
||||
"account_balance": 0,
|
||||
"address": null,
|
||||
"balance": 0,
|
||||
"created": 1000000000,
|
||||
"currency": null,
|
||||
"default_source": "card_NORMALIZED00000000000001",
|
||||
"delinquent": false,
|
||||
"description": "zulip (Zulip Dev)",
|
||||
"discount": null,
|
||||
"email": "hamlet@zulip.com",
|
||||
"id": "cus_NORMALIZED0001",
|
||||
"invoice_prefix": "NORMA01",
|
||||
"invoice_settings": {
|
||||
"custom_fields": null,
|
||||
"default_payment_method": null,
|
||||
"footer": null
|
||||
},
|
||||
"livemode": false,
|
||||
"metadata": {
|
||||
"realm_id": "1",
|
||||
"realm_str": "zulip"
|
||||
},
|
||||
"name": null,
|
||||
"next_invoice_sequence": 1,
|
||||
"object": "customer",
|
||||
"phone": null,
|
||||
"preferred_locales": [],
|
||||
"shipping": null,
|
||||
"sources": {
|
||||
"data": [
|
||||
{
|
||||
"address_city": "Pacific",
|
||||
"address_country": "United States",
|
||||
"address_line1": "Under the sea,",
|
||||
"address_line1_check": "pass",
|
||||
"address_line2": null,
|
||||
"address_state": null,
|
||||
"address_zip": "33333",
|
||||
"address_zip_check": "pass",
|
||||
"brand": "Visa",
|
||||
"country": "US",
|
||||
"customer": "cus_NORMALIZED0001",
|
||||
"cvc_check": "pass",
|
||||
"dynamic_last4": null,
|
||||
"exp_month": 3,
|
||||
"exp_year": 2033,
|
||||
"fingerprint": "NORMALIZED000001",
|
||||
"funding": "credit",
|
||||
"id": "card_NORMALIZED00000000000001",
|
||||
"last4": "4242",
|
||||
"metadata": {},
|
||||
"name": "Ada Starr",
|
||||
"object": "card",
|
||||
"tokenization_method": null
|
||||
}
|
||||
],
|
||||
"has_more": false,
|
||||
"object": "list",
|
||||
"total_count": 1,
|
||||
"url": "/v1/customers/cus_NORMALIZED0001/sources"
|
||||
},
|
||||
"subscriptions": {
|
||||
"data": [],
|
||||
"has_more": false,
|
||||
"object": "list",
|
||||
"total_count": 0,
|
||||
"url": "/v1/customers/cus_NORMALIZED0001/subscriptions"
|
||||
},
|
||||
"tax_exempt": "none",
|
||||
"tax_ids": {
|
||||
"data": [],
|
||||
"has_more": false,
|
||||
"object": "list",
|
||||
"total_count": 0,
|
||||
"url": "/v1/customers/cus_NORMALIZED0001/tax_ids"
|
||||
},
|
||||
"tax_info": null,
|
||||
"tax_info_verification": null
|
||||
}
|
@@ -1,104 +0,0 @@
|
||||
{
|
||||
"account_balance": 0,
|
||||
"address": null,
|
||||
"balance": 0,
|
||||
"created": 1000000000,
|
||||
"currency": "usd",
|
||||
"default_source": {
|
||||
"address_city": "Pacific",
|
||||
"address_country": "United States",
|
||||
"address_line1": "Under the sea,",
|
||||
"address_line1_check": "pass",
|
||||
"address_line2": null,
|
||||
"address_state": null,
|
||||
"address_zip": "33333",
|
||||
"address_zip_check": "pass",
|
||||
"brand": "Visa",
|
||||
"country": "US",
|
||||
"customer": "cus_NORMALIZED0001",
|
||||
"cvc_check": "pass",
|
||||
"dynamic_last4": null,
|
||||
"exp_month": 3,
|
||||
"exp_year": 2033,
|
||||
"fingerprint": "NORMALIZED000001",
|
||||
"funding": "credit",
|
||||
"id": "card_NORMALIZED00000000000001",
|
||||
"last4": "4242",
|
||||
"metadata": {},
|
||||
"name": "Ada Starr",
|
||||
"object": "card",
|
||||
"tokenization_method": null
|
||||
},
|
||||
"delinquent": false,
|
||||
"description": "zulip (Zulip Dev)",
|
||||
"discount": null,
|
||||
"email": "hamlet@zulip.com",
|
||||
"id": "cus_NORMALIZED0001",
|
||||
"invoice_prefix": "NORMA01",
|
||||
"invoice_settings": {
|
||||
"custom_fields": null,
|
||||
"default_payment_method": null,
|
||||
"footer": null
|
||||
},
|
||||
"livemode": false,
|
||||
"metadata": {
|
||||
"realm_id": "1",
|
||||
"realm_str": "zulip"
|
||||
},
|
||||
"name": null,
|
||||
"next_invoice_sequence": 2,
|
||||
"object": "customer",
|
||||
"phone": null,
|
||||
"preferred_locales": [],
|
||||
"shipping": null,
|
||||
"sources": {
|
||||
"data": [
|
||||
{
|
||||
"address_city": "Pacific",
|
||||
"address_country": "United States",
|
||||
"address_line1": "Under the sea,",
|
||||
"address_line1_check": "pass",
|
||||
"address_line2": null,
|
||||
"address_state": null,
|
||||
"address_zip": "33333",
|
||||
"address_zip_check": "pass",
|
||||
"brand": "Visa",
|
||||
"country": "US",
|
||||
"customer": "cus_NORMALIZED0001",
|
||||
"cvc_check": "pass",
|
||||
"dynamic_last4": null,
|
||||
"exp_month": 3,
|
||||
"exp_year": 2033,
|
||||
"fingerprint": "NORMALIZED000001",
|
||||
"funding": "credit",
|
||||
"id": "card_NORMALIZED00000000000001",
|
||||
"last4": "4242",
|
||||
"metadata": {},
|
||||
"name": "Ada Starr",
|
||||
"object": "card",
|
||||
"tokenization_method": null
|
||||
}
|
||||
],
|
||||
"has_more": false,
|
||||
"object": "list",
|
||||
"total_count": 1,
|
||||
"url": "/v1/customers/cus_NORMALIZED0001/sources"
|
||||
},
|
||||
"subscriptions": {
|
||||
"data": [],
|
||||
"has_more": false,
|
||||
"object": "list",
|
||||
"total_count": 0,
|
||||
"url": "/v1/customers/cus_NORMALIZED0001/subscriptions"
|
||||
},
|
||||
"tax_exempt": "none",
|
||||
"tax_ids": {
|
||||
"data": [],
|
||||
"has_more": false,
|
||||
"object": "list",
|
||||
"total_count": 0,
|
||||
"url": "/v1/customers/cus_NORMALIZED0001/tax_ids"
|
||||
},
|
||||
"tax_info": null,
|
||||
"tax_info_verification": null
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user