mirror of
https://github.com/zulip/zulip.git
synced 2025-10-24 00:23:49 +00:00
Compare commits
2 Commits
11.0-dev
...
buddy-list
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1d0e5b1b4e | ||
|
|
5c77244fb0 |
@@ -25,7 +25,3 @@ forin
|
||||
uper
|
||||
slac
|
||||
couldn
|
||||
ges
|
||||
assertIn
|
||||
thirdparty
|
||||
asend
|
||||
|
||||
@@ -8,11 +8,10 @@ indent_style = space
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
|
||||
[[shell]]
|
||||
binary_next_line = true
|
||||
switch_case_indent = true
|
||||
binary_next_line = true # for shfmt
|
||||
switch_case_indent = true # for shfmt
|
||||
|
||||
[{*.{cjs,cts,js,json,mjs,mts,ts},check-openapi}]
|
||||
[{*.{js,json,ts},check-openapi}]
|
||||
max_line_length = 100
|
||||
|
||||
[*.{py,pyi}]
|
||||
|
||||
15
.eslintignore
Normal file
15
.eslintignore
Normal file
@@ -0,0 +1,15 @@
|
||||
# This is intended for generated files and vendored third-party files.
|
||||
# For our source code, instead of adding files here, consider using
|
||||
# specific eslint-disable comments in the files themselves.
|
||||
|
||||
/docs/_build
|
||||
/static/generated
|
||||
/static/webpack-bundles
|
||||
/var/*
|
||||
!/var/puppeteer
|
||||
/var/puppeteer/*
|
||||
!/var/puppeteer/test_credentials.d.ts
|
||||
/web/generated
|
||||
/web/third
|
||||
/zulip-current-venv
|
||||
/zulip-py3-venv
|
||||
282
.eslintrc.json
Normal file
282
.eslintrc.json
Normal file
@@ -0,0 +1,282 @@
|
||||
{
|
||||
"root": true,
|
||||
"env": {
|
||||
"es2020": true,
|
||||
"node": true
|
||||
},
|
||||
"extends": [
|
||||
"eslint:recommended",
|
||||
"plugin:import/errors",
|
||||
"plugin:import/warnings",
|
||||
"plugin:no-jquery/recommended",
|
||||
"plugin:no-jquery/deprecated",
|
||||
"plugin:unicorn/recommended",
|
||||
"prettier"
|
||||
],
|
||||
"parser": "@babel/eslint-parser",
|
||||
"parserOptions": {
|
||||
"requireConfigFile": false,
|
||||
"warnOnUnsupportedTypeScriptVersion": false,
|
||||
"sourceType": "unambiguous"
|
||||
},
|
||||
"plugins": ["formatjs", "no-jquery"],
|
||||
"settings": {
|
||||
"formatjs": {
|
||||
"additionalFunctionNames": ["$t", "$t_html"]
|
||||
},
|
||||
"no-jquery": {
|
||||
"collectionReturningPlugins": {
|
||||
"expectOne": "always"
|
||||
},
|
||||
"variablePattern": "^\\$(?!t$|t_html$)."
|
||||
}
|
||||
},
|
||||
"reportUnusedDisableDirectives": true,
|
||||
"rules": {
|
||||
"array-callback-return": "error",
|
||||
"arrow-body-style": "error",
|
||||
"block-scoped-var": "error",
|
||||
"consistent-return": "error",
|
||||
"curly": "error",
|
||||
"dot-notation": "error",
|
||||
"eqeqeq": "error",
|
||||
"formatjs/enforce-default-message": ["error", "literal"],
|
||||
"formatjs/enforce-placeholders": [
|
||||
"error",
|
||||
{"ignoreList": ["b", "code", "em", "i", "kbd", "p", "strong"]}
|
||||
],
|
||||
"formatjs/no-id": "error",
|
||||
"guard-for-in": "error",
|
||||
"import/extensions": "error",
|
||||
"import/first": "error",
|
||||
"import/newline-after-import": "error",
|
||||
"import/no-cycle": ["error", {"ignoreExternal": true}],
|
||||
"import/no-duplicates": "error",
|
||||
"import/no-self-import": "error",
|
||||
"import/no-unresolved": "off",
|
||||
"import/no-useless-path-segments": "error",
|
||||
"import/order": ["error", {"alphabetize": {"order": "asc"}, "newlines-between": "always"}],
|
||||
"import/unambiguous": "error",
|
||||
"lines-around-directive": "error",
|
||||
"new-cap": "error",
|
||||
"no-alert": "error",
|
||||
"no-array-constructor": "error",
|
||||
"no-bitwise": "error",
|
||||
"no-caller": "error",
|
||||
"no-catch-shadow": "error",
|
||||
"no-constant-condition": ["error", {"checkLoops": false}],
|
||||
"no-div-regex": "error",
|
||||
"no-else-return": "error",
|
||||
"no-eq-null": "error",
|
||||
"no-eval": "error",
|
||||
"no-implicit-coercion": "error",
|
||||
"no-implied-eval": "error",
|
||||
"no-inner-declarations": "off",
|
||||
"no-iterator": "error",
|
||||
"no-jquery/no-constructor-attributes": "error",
|
||||
"no-jquery/no-parse-html-literal": "error",
|
||||
"no-label-var": "error",
|
||||
"no-labels": "error",
|
||||
"no-loop-func": "error",
|
||||
"no-multi-str": "error",
|
||||
"no-native-reassign": "error",
|
||||
"no-new-func": "error",
|
||||
"no-new-object": "error",
|
||||
"no-new-wrappers": "error",
|
||||
"no-octal-escape": "error",
|
||||
"no-plusplus": "error",
|
||||
"no-proto": "error",
|
||||
"no-return-assign": "error",
|
||||
"no-script-url": "error",
|
||||
"no-self-compare": "error",
|
||||
"no-sync": "error",
|
||||
"no-throw-literal": "error",
|
||||
"no-undef-init": "error",
|
||||
"no-unneeded-ternary": ["error", {"defaultAssignment": false}],
|
||||
"no-unused-expressions": "error",
|
||||
"no-unused-vars": [
|
||||
"error",
|
||||
{"args": "all", "argsIgnorePattern": "^_", "ignoreRestSiblings": true}
|
||||
],
|
||||
"no-use-before-define": ["error", {"functions": false}],
|
||||
"no-useless-concat": "error",
|
||||
"no-useless-constructor": "error",
|
||||
"no-var": "error",
|
||||
"object-shorthand": ["error", "always", {"avoidExplicitReturnArrows": true}],
|
||||
"one-var": ["error", "never"],
|
||||
"prefer-arrow-callback": "error",
|
||||
"prefer-const": ["error", {"ignoreReadBeforeAssign": true}],
|
||||
"radix": "error",
|
||||
"sort-imports": ["error", {"ignoreDeclarationSort": true}],
|
||||
"spaced-comment": ["error", "always", {"markers": ["/"]}],
|
||||
"strict": "error",
|
||||
"unicorn/consistent-function-scoping": "off",
|
||||
"unicorn/explicit-length-check": "off",
|
||||
"unicorn/filename-case": "off",
|
||||
"unicorn/no-await-expression-member": "off",
|
||||
"unicorn/no-negated-condition": "off",
|
||||
"unicorn/no-null": "off",
|
||||
"unicorn/no-process-exit": "off",
|
||||
"unicorn/no-useless-undefined": "off",
|
||||
"unicorn/numeric-separators-style": "off",
|
||||
"unicorn/prefer-module": "off",
|
||||
"unicorn/prefer-node-protocol": "off",
|
||||
"unicorn/prefer-ternary": "off",
|
||||
"unicorn/prefer-top-level-await": "off",
|
||||
"unicorn/prevent-abbreviations": "off",
|
||||
"unicorn/switch-case-braces": "off",
|
||||
"valid-typeof": ["error", {"requireStringLiterals": true}],
|
||||
"yoda": "error"
|
||||
},
|
||||
"overrides": [
|
||||
{
|
||||
"files": ["web/tests/**"],
|
||||
"rules": {
|
||||
"no-jquery/no-selector-prop": "off"
|
||||
}
|
||||
},
|
||||
{
|
||||
"files": ["web/e2e-tests/**"],
|
||||
"globals": {
|
||||
"zulip_test": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"files": ["web/src/**"],
|
||||
"globals": {
|
||||
"StripeCheckout": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"files": ["**/*.ts"],
|
||||
"extends": [
|
||||
"plugin:@typescript-eslint/recommended",
|
||||
"plugin:@typescript-eslint/recommended-requiring-type-checking",
|
||||
"plugin:@typescript-eslint/strict",
|
||||
"plugin:import/typescript"
|
||||
],
|
||||
"parserOptions": {
|
||||
"project": "tsconfig.json"
|
||||
},
|
||||
"settings": {
|
||||
"import/resolver": {
|
||||
"node": {
|
||||
"extensions": [".ts", ".d.ts", ".js"] // https://github.com/import-js/eslint-plugin-import/issues/2267
|
||||
}
|
||||
}
|
||||
},
|
||||
"globals": {
|
||||
"JQuery": false
|
||||
},
|
||||
"rules": {
|
||||
// Disable base rule to avoid conflict
|
||||
"no-use-before-define": "off",
|
||||
|
||||
"@typescript-eslint/consistent-type-assertions": [
|
||||
"error",
|
||||
{"assertionStyle": "never"}
|
||||
],
|
||||
"@typescript-eslint/consistent-type-definitions": ["error", "type"],
|
||||
"@typescript-eslint/consistent-type-imports": "error",
|
||||
"@typescript-eslint/explicit-function-return-type": [
|
||||
"error",
|
||||
{"allowExpressions": true}
|
||||
],
|
||||
"@typescript-eslint/member-ordering": "error",
|
||||
"@typescript-eslint/no-non-null-assertion": "off",
|
||||
"@typescript-eslint/no-unnecessary-condition": "off",
|
||||
"@typescript-eslint/no-unnecessary-qualifier": "error",
|
||||
"@typescript-eslint/no-unsafe-argument": "off",
|
||||
"@typescript-eslint/no-unsafe-assignment": "off",
|
||||
"@typescript-eslint/no-unsafe-call": "off",
|
||||
"@typescript-eslint/no-unsafe-member-access": "off",
|
||||
"@typescript-eslint/no-unsafe-return": "off",
|
||||
"@typescript-eslint/no-unused-vars": [
|
||||
"error",
|
||||
{"args": "all", "argsIgnorePattern": "^_", "ignoreRestSiblings": true}
|
||||
],
|
||||
"@typescript-eslint/no-use-before-define": ["error", {"functions": false}],
|
||||
"@typescript-eslint/parameter-properties": "error",
|
||||
"@typescript-eslint/promise-function-async": "error",
|
||||
"no-undef": "error"
|
||||
}
|
||||
},
|
||||
{
|
||||
"files": ["**/*.d.ts"],
|
||||
"rules": {
|
||||
"import/unambiguous": "off"
|
||||
}
|
||||
},
|
||||
{
|
||||
"files": ["web/e2e-tests/**", "web/tests/**"],
|
||||
"globals": {
|
||||
"CSS": false,
|
||||
"document": false,
|
||||
"navigator": false,
|
||||
"window": false
|
||||
},
|
||||
"rules": {
|
||||
"formatjs/no-id": "off",
|
||||
"new-cap": "off",
|
||||
"no-sync": "off",
|
||||
"unicorn/prefer-prototype-methods": "off"
|
||||
}
|
||||
},
|
||||
{
|
||||
"files": ["web/debug-require.js"],
|
||||
"env": {
|
||||
"browser": true,
|
||||
"es2020": false
|
||||
},
|
||||
"rules": {
|
||||
// Don’t require ES features that PhantomJS doesn’t support
|
||||
// TODO: Toggle these settings now that we don't use PhantomJS
|
||||
"no-var": "off",
|
||||
"object-shorthand": "off",
|
||||
"prefer-arrow-callback": "off"
|
||||
}
|
||||
},
|
||||
{
|
||||
"files": ["web/shared/**", "web/src/**", "web/third/**"],
|
||||
"env": {
|
||||
"browser": true,
|
||||
"node": false
|
||||
},
|
||||
"globals": {
|
||||
"ZULIP_VERSION": false
|
||||
},
|
||||
"rules": {
|
||||
"no-console": "error"
|
||||
},
|
||||
"settings": {
|
||||
"import/resolver": {
|
||||
"webpack": {
|
||||
"config": "./web/webpack.config.ts"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"files": ["web/shared/**"],
|
||||
"env": {
|
||||
"browser": false,
|
||||
"shared-node-browser": true
|
||||
},
|
||||
"rules": {
|
||||
"import/no-restricted-paths": [
|
||||
"error",
|
||||
{
|
||||
"zones": [
|
||||
{
|
||||
"target": "./web/shared",
|
||||
"from": ".",
|
||||
"except": ["./node_modules", "./web/shared"]
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"unicorn/prefer-string-replace-all": "off"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
7
.github/ISSUE_TEMPLATE/2_bug_report.md
vendored
7
.github/ISSUE_TEMPLATE/2_bug_report.md
vendored
@@ -11,8 +11,7 @@ labels: ["bug"]
|
||||
**Zulip Server and web app version:**
|
||||
|
||||
- [ ] Zulip Cloud (`*.zulipchat.com`)
|
||||
- [ ] Zulip Server 9.x
|
||||
- [ ] Zulip Server 8.x
|
||||
- [ ] Zulip Server 7.x
|
||||
- [ ] Zulip Server 6.x or older
|
||||
- [ ] Zulip Server 7.0+
|
||||
- [ ] Zulip Server 6.0+
|
||||
- [ ] Zulip Server 5.0 or older
|
||||
- [ ] Other or not sure
|
||||
|
||||
82
.github/funding.json
vendored
82
.github/funding.json
vendored
@@ -1,82 +0,0 @@
|
||||
{
|
||||
"version": "v1.0.0",
|
||||
"entity": {
|
||||
"type": "organisation",
|
||||
"role": "steward",
|
||||
"name": "Kandra Labs, Inc.",
|
||||
"email": "support@zulip.com",
|
||||
"description": "Guiding the Zulip community in developing a world-class organized team chat product with apps for every major desktop and mobile platform requires leadership from a talented, dedicated team. We believe that the only sustainable model is for our core team to be compensated fairly for their time. We have thus founded a company (Kandra Labs) to steward and financially support Zulip’s development. We are growing our business sustainably, without venture capital funding. VCs are incentivized to push companies to gamble for explosive growth. Often, the result is that a company with a useful product burns rapidly through its resources and goes out of business. We have built Zulip as a sustainable business (also supported by SBIR grants from the US National Science Foundation), and are being thoughtful about our pace of spending. Funding our company without venture capital also allows us to live by our values, without investor pressure to compromise them when doing so might be “good business” or “what everyone does”.",
|
||||
"webpageUrl": {
|
||||
"url": "https://zulip.com/values/",
|
||||
"wellKnown": "https://zulip.com/.well-known/funding-manifest-urls"
|
||||
}
|
||||
},
|
||||
"projects": [
|
||||
{
|
||||
"guid": "zulip",
|
||||
"name": "Zulip",
|
||||
"description": "Zulip is an open-source team chat application designed for seamless remote and hybrid work. With conversations organized by topic, Zulip is ideal for both live and asynchronous communication. Zulip’s 100% open-source software is available as a cloud service or a self-hosted solution, and is used by thousands of organizations around the world. An important part of Zulip’s mission is ensuring that worthy organizations, from programming-language developers to research communities, are able to use Zulip whether or not they have funding. For this reason, we sponsor Zulip Cloud Standard for open source projects, non-profits, education, and academic research. This program has grown exponentially since its inception; today we are proud to fully sponsor Zulip hosting for several hundred organizations. Support from the community will help us continue to afford these programs as their popularity grows. ",
|
||||
"webpageUrl": {
|
||||
"url": "https://zulip.com/",
|
||||
"wellKnown": "https://zulip.com/.well-known/funding-manifest-urls"
|
||||
},
|
||||
"repositoryUrl": {
|
||||
"url": "https://github.com/zulip"
|
||||
},
|
||||
"licenses": ["spdx:Apache-2.0"],
|
||||
"tags": ["communication", "team-chat", "collaboration"]
|
||||
}
|
||||
],
|
||||
"funding": {
|
||||
"channels": [
|
||||
{
|
||||
"guid": "github-sponsors",
|
||||
"type": "payment-provider",
|
||||
"address": "https://github.com/sponsors/zulip",
|
||||
"description": "Preferred channel for sponsoring Zulip, since GitHub Sponsors does not charge any fees to sponsored projects."
|
||||
},
|
||||
{
|
||||
"guid": "patreon",
|
||||
"type": "payment-provider",
|
||||
"address": "https://patreon.com/zulip"
|
||||
},
|
||||
{
|
||||
"guid": "open-collective",
|
||||
"type": "payment-provider",
|
||||
"address": "https://opencollective.com/zulip"
|
||||
}
|
||||
],
|
||||
"plans": [
|
||||
{
|
||||
"guid": "github-sponsors",
|
||||
"status": "active",
|
||||
"name": "Support Zulip",
|
||||
"description": "Contribute to Zulip's development and free hosting for open source projects and other worthy organizations!",
|
||||
"amount": 0,
|
||||
"currency": "USD",
|
||||
"frequency": "monthly",
|
||||
"channels": ["github-sponsors"]
|
||||
},
|
||||
{
|
||||
"guid": "patreon",
|
||||
"status": "active",
|
||||
"name": "Support Zulip",
|
||||
"description": "Contribute to Zulip's development and free hosting for open source projects and other worthy organizations!",
|
||||
"amount": 0,
|
||||
"currency": "USD",
|
||||
"frequency": "monthly",
|
||||
"channels": ["patreon"]
|
||||
},
|
||||
{
|
||||
"guid": "open-collective",
|
||||
"status": "active",
|
||||
"name": "Support Zulip",
|
||||
"description": "Contribute to Zulip's development and free hosting for open source projects and other worthy organizations!",
|
||||
"amount": 0,
|
||||
"currency": "USD",
|
||||
"frequency": "monthly",
|
||||
"channels": ["open-collective"]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
6
.github/workflows/codeql-analysis.yml
vendored
6
.github/workflows/codeql-analysis.yml
vendored
@@ -26,15 +26,15 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v3
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
uses: github/codeql-action/init@v2
|
||||
|
||||
# Override language selection by uncommenting this and choosing your languages
|
||||
# with:
|
||||
# languages: go, javascript, csharp, python, cpp, java
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
||||
uses: github/codeql-action/analyze@v2
|
||||
|
||||
110
.github/workflows/production-suite.yml
vendored
110
.github/workflows/production-suite.yml
vendored
@@ -11,16 +11,15 @@ on:
|
||||
- manage.py
|
||||
- pnpm-lock.yaml
|
||||
- puppet/**
|
||||
- requirements/**
|
||||
- scripts/**
|
||||
- tools/**
|
||||
- uv.lock
|
||||
- web/babel.config.js
|
||||
- web/postcss.config.js
|
||||
- web/third/**
|
||||
- web/webpack.config.ts
|
||||
- zerver/worker/queue_processors.py
|
||||
- zerver/lib/push_notifications.py
|
||||
- zerver/lib/storage.py
|
||||
- zerver/decorator.py
|
||||
- zproject/**
|
||||
workflow_dispatch:
|
||||
@@ -40,13 +39,13 @@ jobs:
|
||||
production_build:
|
||||
# This job builds a release tarball from the current commit, which
|
||||
# will be used for all of the following install/upgrade tests.
|
||||
name: Ubuntu 22.04 production build
|
||||
name: Ubuntu 20.04 production build
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
# Docker images are built from 'tools/ci/Dockerfile'; the comments at
|
||||
# the top explain how to build and upload these images.
|
||||
# Ubuntu 22.04 ships with Python 3.10.12.
|
||||
container: zulip/ci:jammy
|
||||
# Ubuntu 20.04 ships with Python 3.8.10.
|
||||
container: zulip/ci:focal
|
||||
|
||||
steps:
|
||||
- name: Add required permissions
|
||||
@@ -65,39 +64,39 @@ jobs:
|
||||
# cache action to work. It is owned by root currently.
|
||||
sudo chmod -R 0777 /__w/_temp/
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Create cache directories
|
||||
run: |
|
||||
dirs=(/srv/zulip-emoji-cache)
|
||||
dirs=(/srv/zulip-{venv,emoji}-cache)
|
||||
sudo mkdir -p "${dirs[@]}"
|
||||
sudo chown -R github "${dirs[@]}"
|
||||
|
||||
- name: Restore pnpm store
|
||||
uses: actions/cache@v4
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: /__w/.pnpm-store
|
||||
key: v1-pnpm-store-jammy-${{ hashFiles('pnpm-lock.yaml') }}
|
||||
key: v1-pnpm-store-focal-${{ hashFiles('pnpm-lock.yaml') }}
|
||||
|
||||
- name: Restore uv cache
|
||||
uses: actions/cache@v4
|
||||
- name: Restore python cache
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.cache/uv
|
||||
key: uv-jammy-${{ hashFiles('uv.lock') }}
|
||||
restore-keys: uv-jammy-
|
||||
path: /srv/zulip-venv-cache
|
||||
key: v1-venv-focal-${{ hashFiles('requirements/dev.txt') }}
|
||||
restore-keys: v1-venv-focal
|
||||
|
||||
- name: Restore emoji cache
|
||||
uses: actions/cache@v4
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: /srv/zulip-emoji-cache
|
||||
key: v1-emoji-jammy-${{ hashFiles('tools/setup/emoji/emoji_map.json') }}-${{ hashFiles('tools/setup/emoji/build_emoji') }}-${{ hashFiles('tools/setup/emoji/emoji_setup_utils.py') }}-${{ hashFiles('tools/setup/emoji/emoji_names.py') }}-${{ hashFiles('package.json') }}
|
||||
restore-keys: v1-emoji-jammy
|
||||
key: v1-emoji-focal-${{ hashFiles('tools/setup/emoji/emoji_map.json') }}-${{ hashFiles('tools/setup/emoji/build_emoji') }}-${{ hashFiles('tools/setup/emoji/emoji_setup_utils.py') }}-${{ hashFiles('tools/setup/emoji/emoji_names.py') }}-${{ hashFiles('package.json') }}
|
||||
restore-keys: v1-emoji-focal
|
||||
|
||||
- name: Build production tarball
|
||||
run: ./tools/ci/production-build
|
||||
|
||||
- name: Upload production build artifacts for install jobs
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: production-tarball
|
||||
path: /tmp/production-build
|
||||
@@ -109,9 +108,6 @@ jobs:
|
||||
path="$(pnpm store path)"
|
||||
[[ "$path" == /__w/.pnpm-store/* ]]
|
||||
|
||||
- name: Minimize uv cache
|
||||
run: uv cache prune --ci
|
||||
|
||||
- name: Generate failure report string
|
||||
id: failure_report_string
|
||||
if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }}
|
||||
@@ -139,20 +135,25 @@ jobs:
|
||||
include:
|
||||
# Docker images are built from 'tools/ci/Dockerfile'; the comments at
|
||||
# the top explain how to build and upload these images.
|
||||
- docker_image: zulip/ci:focal
|
||||
name: Ubuntu 20.04 production install and PostgreSQL upgrade with pgroonga
|
||||
os: focal
|
||||
extra-args: ""
|
||||
|
||||
- docker_image: zulip/ci:jammy
|
||||
name: Ubuntu 22.04 production install and PostgreSQL upgrade with pgroonga
|
||||
name: Ubuntu 22.04 production install
|
||||
os: jammy
|
||||
extra-args: ""
|
||||
|
||||
- docker_image: zulip/ci:noble
|
||||
name: Ubuntu 24.04 production install
|
||||
os: noble
|
||||
extra-args: ""
|
||||
- docker_image: zulip/ci:bullseye
|
||||
name: Debian 11 production install with custom db name and user
|
||||
os: bullseye
|
||||
extra-args: --test-custom-db
|
||||
|
||||
- docker_image: zulip/ci:bookworm
|
||||
name: Debian 12 production install with custom db name and user
|
||||
name: Debian 12 production install
|
||||
os: bookworm
|
||||
extra-args: --test-custom-db
|
||||
extra-args: ""
|
||||
|
||||
name: ${{ matrix.name }}
|
||||
container:
|
||||
@@ -163,7 +164,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Download built production tarball
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: production-tarball
|
||||
path: /tmp
|
||||
@@ -175,7 +176,7 @@ jobs:
|
||||
# cache action to work. It is owned by root currently.
|
||||
sudo chmod -R 0777 /__w/_temp/
|
||||
|
||||
# Since actions/download-artifact@v4 loses all the permissions
|
||||
# Since actions/download-artifact@v2 loses all the permissions
|
||||
# of the tarball uploaded by the upload artifact fix those.
|
||||
chmod +x /tmp/production-upgrade-pg
|
||||
chmod +x /tmp/production-pgroonga
|
||||
@@ -185,7 +186,7 @@ jobs:
|
||||
|
||||
- name: Create cache directories
|
||||
run: |
|
||||
dirs=(/srv/zulip-emoji-cache)
|
||||
dirs=(/srv/zulip-{venv,emoji}-cache)
|
||||
sudo mkdir -p "${dirs[@]}"
|
||||
sudo chown -R github "${dirs[@]}"
|
||||
|
||||
@@ -196,19 +197,19 @@ jobs:
|
||||
run: sudo /tmp/production-verify ${{ matrix.extra-args }}
|
||||
|
||||
- name: Install pgroonga
|
||||
if: ${{ matrix.os == 'jammy' }}
|
||||
if: ${{ matrix.os == 'focal' }}
|
||||
run: sudo /tmp/production-pgroonga
|
||||
|
||||
- name: Verify install after installing pgroonga
|
||||
if: ${{ matrix.os == 'jammy' }}
|
||||
if: ${{ matrix.os == 'focal' }}
|
||||
run: sudo /tmp/production-verify ${{ matrix.extra-args }}
|
||||
|
||||
- name: Upgrade postgresql
|
||||
if: ${{ matrix.os == 'jammy' }}
|
||||
if: ${{ matrix.os == 'focal' }}
|
||||
run: sudo /tmp/production-upgrade-pg
|
||||
|
||||
- name: Verify install after upgrading postgresql
|
||||
if: ${{ matrix.os == 'jammy' }}
|
||||
if: ${{ matrix.os == 'focal' }}
|
||||
run: sudo /tmp/production-verify ${{ matrix.extra-args }}
|
||||
|
||||
- name: Generate failure report string
|
||||
@@ -241,18 +242,18 @@ jobs:
|
||||
include:
|
||||
# Docker images are built from 'tools/ci/Dockerfile.prod'; the comments at
|
||||
# the top explain how to build and upload these images.
|
||||
- docker_image: zulip/ci:jammy-6.0
|
||||
- docker_image: zulip/ci:focal-3.2
|
||||
name: 3.2 Version Upgrade
|
||||
os: focal
|
||||
- docker_image: zulip/ci:bullseye-4.2
|
||||
name: 4.2 Version Upgrade
|
||||
os: bullseye
|
||||
- docker_image: zulip/ci:bullseye-5.0
|
||||
name: 5.0 Version Upgrade
|
||||
os: bullseye
|
||||
- docker_image: zulip/ci:bullseye-6.0
|
||||
name: 6.0 Version Upgrade
|
||||
os: jammy
|
||||
- docker_image: zulip/ci:bookworm-7.0
|
||||
name: 7.0 Version Upgrade
|
||||
os: bookworm
|
||||
- docker_image: zulip/ci:bookworm-8.0
|
||||
name: 8.0 Version Upgrade
|
||||
os: bookworm
|
||||
- docker_image: zulip/ci:noble-9.0
|
||||
name: 9.0 Version Upgrade
|
||||
os: noble
|
||||
os: bullseye
|
||||
|
||||
name: ${{ matrix.name }}
|
||||
container:
|
||||
@@ -263,7 +264,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Download built production tarball
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: production-tarball
|
||||
path: /tmp
|
||||
@@ -275,7 +276,7 @@ jobs:
|
||||
# cache action to work. It is owned by root currently.
|
||||
sudo chmod -R 0777 /__w/_temp/
|
||||
|
||||
# Since actions/download-artifact@v4 loses all the permissions
|
||||
# Since actions/download-artifact@v2 loses all the permissions
|
||||
# of the tarball uploaded by the upload artifact fix those.
|
||||
chmod +x /tmp/production-upgrade
|
||||
chmod +x /tmp/production-verify
|
||||
@@ -283,10 +284,21 @@ jobs:
|
||||
|
||||
- name: Create cache directories
|
||||
run: |
|
||||
dirs=(/srv/zulip-emoji-cache)
|
||||
dirs=(/srv/zulip-{venv,emoji}-cache)
|
||||
sudo mkdir -p "${dirs[@]}"
|
||||
sudo chown -R github "${dirs[@]}"
|
||||
|
||||
- name: Temporarily bootstrap PostgreSQL upgrades
|
||||
# https://chat.zulip.org/#narrow/stream/43-automated-testing/topic/postgres.20client.20upgrade.20failures/near/1640444
|
||||
# On Debian, there is an ordering issue with post-install maintainer
|
||||
# scripts when postgresql-client-common is upgraded at the same time as
|
||||
# postgresql-client and postgresql-client-15. Upgrade just
|
||||
# postgresql-client-common first, so the main upgrade process can
|
||||
# succeed. This is a _temporary_ work-around to improve CI signal, as
|
||||
# the failure does represent a real failure that production systems may
|
||||
# encounter.
|
||||
run: sudo apt-get update && sudo apt-get install -y --only-upgrade postgresql-client-common
|
||||
|
||||
- name: Upgrade production
|
||||
run: sudo /tmp/production-upgrade
|
||||
|
||||
|
||||
2
.github/workflows/update-oneclick-apps.yml
vendored
2
.github/workflows/update-oneclick-apps.yml
vendored
@@ -9,7 +9,7 @@ jobs:
|
||||
update-digitalocean-oneclick-app:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- name: Update DigitalOcean one click app
|
||||
env:
|
||||
DIGITALOCEAN_API_KEY: ${{ secrets.ONE_CLICK_ACTION_DIGITALOCEAN_API_KEY }}
|
||||
|
||||
69
.github/workflows/zulip-ci.yml
vendored
69
.github/workflows/zulip-ci.yml
vendored
@@ -30,22 +30,28 @@ jobs:
|
||||
include:
|
||||
# Base images are built using `tools/ci/Dockerfile.prod.template`.
|
||||
# The comments at the top explain how to build and upload these images.
|
||||
# Ubuntu 22.04 ships with Python 3.10.12.
|
||||
- docker_image: zulip/ci:jammy
|
||||
name: Ubuntu 22.04 (Python 3.10, backend + frontend)
|
||||
os: jammy
|
||||
# Ubuntu 20.04 ships with Python 3.8.10.
|
||||
- docker_image: zulip/ci:focal
|
||||
name: Ubuntu 20.04 (Python 3.8, backend + frontend)
|
||||
os: focal
|
||||
include_documentation_tests: false
|
||||
include_frontend_tests: true
|
||||
# Debian 12 ships with Python 3.11.2.
|
||||
- docker_image: zulip/ci:bookworm
|
||||
name: Debian 12 (Python 3.11, backend + documentation)
|
||||
os: bookworm
|
||||
# Debian 11 ships with Python 3.9.2.
|
||||
- docker_image: zulip/ci:bullseye
|
||||
name: Debian 11 (Python 3.9, backend + documentation)
|
||||
os: bullseye
|
||||
include_documentation_tests: true
|
||||
include_frontend_tests: false
|
||||
# Ubuntu 24.04 ships with Python 3.12.2.
|
||||
- docker_image: zulip/ci:noble
|
||||
name: Ubuntu 24.04 (Python 3.12, backend)
|
||||
os: noble
|
||||
# Ubuntu 22.04 ships with Python 3.10.4.
|
||||
- docker_image: zulip/ci:jammy
|
||||
name: Ubuntu 22.04 (Python 3.10, backend)
|
||||
os: jammy
|
||||
include_documentation_tests: false
|
||||
include_frontend_tests: false
|
||||
# Debian 12 ships with Python 3.11.2.
|
||||
- docker_image: zulip/ci:bookworm
|
||||
name: Debian 12 (Python 3.11, backend)
|
||||
os: bookworm
|
||||
include_documentation_tests: false
|
||||
include_frontend_tests: false
|
||||
|
||||
@@ -62,29 +68,29 @@ jobs:
|
||||
HOME: /home/github/
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Create cache directories
|
||||
run: |
|
||||
dirs=(/srv/zulip-emoji-cache)
|
||||
dirs=(/srv/zulip-{venv,emoji}-cache)
|
||||
sudo mkdir -p "${dirs[@]}"
|
||||
sudo chown -R github "${dirs[@]}"
|
||||
|
||||
- name: Restore pnpm store
|
||||
uses: actions/cache@v4
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: /__w/.pnpm-store
|
||||
key: v1-pnpm-store-${{ matrix.os }}-${{ hashFiles('pnpm-lock.yaml') }}
|
||||
|
||||
- name: Restore uv cache
|
||||
uses: actions/cache@v4
|
||||
- name: Restore python cache
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.cache/uv
|
||||
key: uv-${{ matrix.os }}-${{ hashFiles('uv.lock') }}
|
||||
restore-keys: uv-${{ matrix.os }}-
|
||||
path: /srv/zulip-venv-cache
|
||||
key: v1-venv-${{ matrix.os }}-${{ hashFiles('requirements/dev.txt') }}
|
||||
restore-keys: v1-venv-${{ matrix.os }}
|
||||
|
||||
- name: Restore emoji cache
|
||||
uses: actions/cache@v4
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: /srv/zulip-emoji-cache
|
||||
key: v1-emoji-${{ matrix.os }}-${{ hashFiles('tools/setup/emoji/emoji_map.json', 'tools/setup/emoji/build_emoji', 'tools/setup/emoji/emoji_setup_utils.py', 'tools/setup/emoji/emoji_names.py', 'package.json') }}
|
||||
@@ -167,7 +173,7 @@ jobs:
|
||||
- name: Run backend tests
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
./tools/test-backend ${{ matrix.os != 'bookworm' && '--coverage' || '' }} --xml-report --no-html-report --include-webhooks --include-transaction-tests --no-cov-cleanup --ban-console-output
|
||||
./tools/test-backend --coverage --xml-report --no-html-report --include-webhooks --include-transaction-tests --no-cov-cleanup --ban-console-output
|
||||
|
||||
- name: Run mypy
|
||||
run: |
|
||||
@@ -180,8 +186,11 @@ jobs:
|
||||
- name: Run miscellaneous tests
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
uv lock --check
|
||||
|
||||
# Currently our compiled requirements files will differ for different
|
||||
# Python versions, so we will run test-locked-requirements only on the
|
||||
# platform with the oldest one.
|
||||
# ./tools/test-locked-requirements
|
||||
# ./tools/test-run-dev # https://github.com/zulip/zulip/pull/14233
|
||||
#
|
||||
# This test has been persistently flaky at like 1% frequency, is slow,
|
||||
@@ -211,20 +220,25 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Test locked requirements
|
||||
if: ${{ matrix.os == 'focal' }}
|
||||
run: |
|
||||
. /srv/zulip-py3-venv/bin/activate && \
|
||||
./tools/test-locked-requirements
|
||||
|
||||
- name: Upload coverage reports
|
||||
|
||||
# Only upload coverage when both frontend and backend
|
||||
# tests are run.
|
||||
if: ${{ matrix.include_frontend_tests }}
|
||||
uses: codecov/codecov-action@v4
|
||||
uses: codecov/codecov-action@v3
|
||||
with:
|
||||
files: var/coverage.xml,var/node-coverage/lcov.info
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
- name: Store Puppeteer artifacts
|
||||
# Upload these on failure, as well
|
||||
if: ${{ always() && matrix.include_frontend_tests }}
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: puppeteer
|
||||
path: ./var/puppeteer
|
||||
@@ -239,9 +253,6 @@ jobs:
|
||||
path="$(pnpm store path)"
|
||||
[[ "$path" == /__w/.pnpm-store/* ]]
|
||||
|
||||
- name: Minimize uv cache
|
||||
run: uv cache prune --ci
|
||||
|
||||
- name: Generate failure report string
|
||||
id: failure_report_string
|
||||
if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }}
|
||||
|
||||
12
.gitignore
vendored
12
.gitignore
vendored
@@ -17,25 +17,23 @@
|
||||
# See `git help ignore` for details on the format.
|
||||
|
||||
## Config files for the dev environment
|
||||
/zproject/apns-dev.pem
|
||||
/zproject/apns-dev-key.p8
|
||||
/zproject/dev-secrets.conf
|
||||
/zproject/custom_dev_settings.py
|
||||
/tools/conf.ini
|
||||
/tools/custom_provision
|
||||
/tools/droplets/conf.ini
|
||||
|
||||
## Byproducts of setting up and using the dev environment
|
||||
*.pyc
|
||||
*.tsbuildinfo
|
||||
package-lock.json
|
||||
|
||||
/.vagrant
|
||||
/var
|
||||
/var/*
|
||||
!/var/puppeteer
|
||||
/var/puppeteer/*
|
||||
!/var/puppeteer/test_credentials.d.ts
|
||||
|
||||
/.dmypy.json
|
||||
/.ruff_cache
|
||||
/.venv
|
||||
|
||||
# Generated i18n data
|
||||
/locale/en
|
||||
@@ -58,6 +56,8 @@ zulip-git-version
|
||||
|
||||
## Files (or really symlinks) created in a prod deployment
|
||||
/zproject/prod_settings.py
|
||||
/zulip-current-venv
|
||||
/zulip-py3-venv
|
||||
|
||||
## Files left by various editors and local environments
|
||||
# (Ideally these should be in everyone's respective personal gitignore files.)
|
||||
|
||||
58
.mailmap
58
.mailmap
@@ -18,9 +18,6 @@ acrefoot <acrefoot@zulip.com> <acrefoot@humbughq.com>
|
||||
Adam Benesh <Adam.Benesh@gmail.com>
|
||||
Adam Benesh <Adam.Benesh@gmail.com> <Adam-Daniel.Benesh@t-systems.com>
|
||||
Adarsh Tiwari <xoldyckk@gmail.com>
|
||||
Aditya Chaudhary <aditya.chaudhary1558@gmail.com>
|
||||
Adnan Shabbir Husain <generaladnan139@gmail.com>
|
||||
Adnan Shabbir Husain <generaladnan139@gmail.com> <78212328+adnan-td@users.noreply.github.com>
|
||||
Alex Vandiver <alexmv@zulip.com> <alex@chmrr.net>
|
||||
Alex Vandiver <alexmv@zulip.com> <github@chmrr.net>
|
||||
Allen Rabinovich <allenrabinovich@yahoo.com> <allenr@humbughq.com>
|
||||
@@ -29,32 +26,21 @@ Alya Abbott <alya@zulip.com> <2090066+alya@users.noreply.github.com>
|
||||
Alya Abbott <alya@zulip.com> <alyaabbott@elance-odesk.com>
|
||||
Aman Agrawal <amanagr@zulip.com>
|
||||
Aman Agrawal <amanagr@zulip.com> <f2016561@pilani.bits-pilani.ac.in>
|
||||
Aman Vishwakarma <vishwakarmarambhawan572@gmail.com>
|
||||
Aman Vishwakarma <vishwakarmarambhawan572@gmail.com> <185982038+whilstsomebody@users.noreply.github.com>
|
||||
Anders Kaseorg <anders@zulip.com> <anders@zulipchat.com>
|
||||
Anders Kaseorg <anders@zulip.com> <andersk@mit.edu>
|
||||
aparna-bhatt <aparnabhatt2001@gmail.com> <86338542+aparna-bhatt@users.noreply.github.com>
|
||||
Apoorva Pendse <apoorvavpendse@gmail.com>
|
||||
Aryan Bhokare <aryan1bhokare@gmail.com>
|
||||
Aryan Bhokare <aryan1bhokare@gmail.com> <92683836+aryan-bhokare@users.noreply.github.com>
|
||||
Aryan Shridhar <aryanshridhar7@gmail.com>
|
||||
Aryan Shridhar <aryanshridhar7@gmail.com> <53977614+aryanshridhar@users.noreply.github.com>
|
||||
Ashwat Kumar Singh <ashwat.kumarsingh.met20@itbhu.ac.in>
|
||||
Austin Riba <austin@zulip.com> <austin@m51.io>
|
||||
Bedo Khaled <bedokhaled66@gmail.com>
|
||||
Bedo Khaled <bedokhaled66@gmail.com> <64221784+abdelrahman725@users.noreply.github.com>
|
||||
BIKI DAS <bikid475@gmail.com>
|
||||
Brijmohan Siyag <brijsiyag@gmail.com>
|
||||
Brock Whittaker <whittakerbrock@gmail.com> <bjwhitta@asu.edu>
|
||||
Brock Whittaker <whittakerbrock@gmail.com> <brock@zulip.com>
|
||||
Brock Whittaker <whittakerbrock@gmail.com> <brock@zulip.org>
|
||||
Brock Whittaker <whittakerbrock@gmail.com> <brock@zulipchat.org>
|
||||
Brock Whittaker <whittakerbrock@gmail.com> <brockwhittaker@Brocks-MacBook.local>
|
||||
Brock Whittaker <brock@zulipchat.com> <bjwhitta@asu.edu>
|
||||
Brock Whittaker <brock@zulipchat.com> <brock@zulipchat.org>
|
||||
Brock Whittaker <brock@zulipchat.com> <brockwhittaker@Brocks-MacBook.local>
|
||||
Chris Bobbe <cbobbe@zulip.com> <cbobbe@zulipchat.com>
|
||||
Chris Bobbe <cbobbe@zulip.com> <csbobbe@gmail.com>
|
||||
codewithnick <nikhilsingh526452@gmail.com>
|
||||
Danny Su <contact@dannysu.com> <opensource@emailengine.org>
|
||||
Dhruv Goyal <dhruvgoyal.dev@gmail.com>
|
||||
Dinesh <chdinesh1089@gmail.com>
|
||||
Dinesh <chdinesh1089@gmail.com> <chdinesh1089>
|
||||
Eeshan Garg <eeshan@zulip.com> <jerryguitarist@gmail.com>
|
||||
@@ -67,9 +53,6 @@ Greg Price <greg@zulip.com> <gnprice@gmail.com>
|
||||
Greg Price <greg@zulip.com> <greg@zulipchat.com>
|
||||
Greg Price <greg@zulip.com> <price@mit.edu>
|
||||
Hardik Dharmani <Ddharmani99@gmail.com> <ddharmani99@gmail.com>
|
||||
Harsh Bansal <harsh@harshbansal.in>
|
||||
Harsh Meena <reharshmeena@gmail.com>
|
||||
Harsh Meena <reharshmeena@gmail.com> <116981900+reharsh@users.noreply.github.com>
|
||||
Hemant Umre <hemantumre12@gmail.com> <87542880+HemantUmre12@users.noreply.github.com>
|
||||
Jai soni <jai_s@me.iitr.ac.in>
|
||||
Jai soni <jai_s@me.iitr.ac.in> <76561593+jai2201@users.noreply.github.com>
|
||||
@@ -77,46 +60,27 @@ Jeff Arnold <jbarnold@gmail.com> <jbarnold@humbughq.com>
|
||||
Jeff Arnold <jbarnold@gmail.com> <jbarnold@zulip.com>
|
||||
Jessica McKellar <jesstess@mit.edu> <jesstess@humbughq.com>
|
||||
Jessica McKellar <jesstess@mit.edu> <jesstess@zulip.com>
|
||||
Jitendra Kumar <jk69854@gmail.com>
|
||||
Jitendra Kumar <jk69854@gmail.com> <36557466+jitendra-ky@users.noreply.github.com>
|
||||
John Lu <JohnLu10212004@gmail.com>
|
||||
John Lu <JohnLu10212004@gmail.com> <87673068+JohnLu2004@users.noreply.github.com>
|
||||
Joseph Ho <josephho678@gmail.com>
|
||||
Joseph Ho <josephho678@gmail.com> <62449508+Joelute@users.noreply.github.com>
|
||||
Julia Bichler <julia.bichler@tum.de> <74348920+juliaBichler01@users.noreply.github.com>
|
||||
Karl Stolley <karl@zulip.com> <karl@stolley.dev>
|
||||
Kartikay Sambher <kartikaysambher@gmail.com>
|
||||
Kevin Mehall <km@kevinmehall.net> <kevin@humbughq.com>
|
||||
Kevin Mehall <km@kevinmehall.net> <kevin@zulip.com>
|
||||
Kevin Scott <kevin.scott.98@gmail.com>
|
||||
Kislay Verma <kislayuv27@gmail.com>
|
||||
Klara Brrettby <klara.bratteby@gmail.com>
|
||||
Klara Brrettby <klara.bratteby@gmail.com> <93648999+klarabratteby@users.noreply.github.com>
|
||||
Kumar Aniket <sachinaniket2004@gmail.com>
|
||||
Kumar Aniket <sachinaniket2004@gmail.com> <142340063+opmkumar@users.noreply.github.com>
|
||||
Kunal Sharma <v.shm.kunal@gmail.com>
|
||||
Lalit Kumar Singh <lalitkumarsingh3716@gmail.com>
|
||||
Lalit Kumar Singh <lalitkumarsingh3716@gmail.com> <lalits01@smartek21.com>
|
||||
Lauryn Menard <lauryn@zulip.com> <63245456+laurynmm@users.noreply.github.com>
|
||||
Lauryn Menard <lauryn@zulip.com> <lauryn.menard@gmail.com>
|
||||
m-e-l-u-h-a-n <purushottam.tiwari.cd.cse19@itbhu.ac.in>
|
||||
m-e-l-u-h-a-n <purushottam.tiwari.cd.cse19@itbhu.ac.in> <pururshottam.tiwari.cd.cse19@itbhu.ac.in>
|
||||
Maneesh Shukla <shuklamaneesh24@gmail.com> <143504391+shuklamaneesh23@users.noreply.github.com>
|
||||
Mateusz Mandera <mateusz.mandera@zulip.com> <mateusz.mandera@protonmail.com>
|
||||
Matt Keller <matt@zulip.com>
|
||||
Matt Keller <matt@zulip.com> <m@cognusion.com>
|
||||
Nehal Sharma <bablinaneh@gmail.com>
|
||||
Nehal Sharma <bablinaneh@gmail.com> <68962290+N-Shar-ma@users.noreply.github.com>
|
||||
Nimish Medatwal <medatwalnimish@gmail.com>
|
||||
Noble Mittal <noblemittal@outlook.com> <62551163+beingnoble03@users.noreply.github.com>
|
||||
nzai <nzaih18@gmail.com> <70953556+nzaih1999@users.noreply.github.com>
|
||||
Palash Baderia <palash.baderia@outlook.com>
|
||||
Palash Baderia <palash.baderia@outlook.com> <66828942+palashb01@users.noreply.github.com>
|
||||
Palash Raghuwanshi <singhpalash0@gmail.com>
|
||||
Parth <mittalparth22@gmail.com>
|
||||
Prakhar Pratyush <prakhar@zulip.com> <prakhar841301@gmail.com>
|
||||
Pratik Chanda <pratikchanda2000@gmail.com>
|
||||
Pratik Solanki <pratiksolanki2021@gmail.com>
|
||||
Priyam Seth <sethpriyam1@gmail.com> <b19188@students.iitmandi.ac.in>
|
||||
Ray Kraesig <rkraesig@zulip.com> <rkraesig@zulipchat.com>
|
||||
Reid Barton <rwbarton@gmail.com> <rwbarton@humbughq.com>
|
||||
@@ -125,24 +89,16 @@ Rishabh Maheshwari <b20063@students.iitmandi.ac.in>
|
||||
Rishi Gupta <rishig@zulipchat.com> <rishig+git@mit.edu>
|
||||
Rishi Gupta <rishig@zulipchat.com> <rishig@kandralabs.com>
|
||||
Rishi Gupta <rishig@zulipchat.com> <rishig@users.noreply.github.com>
|
||||
Ritwik Patnaik <ritwikpatnaik@gmail.com>
|
||||
Rixant Rokaha <rixantrokaha@gmail.com>
|
||||
Rixant Rokaha <rixantrokaha@gmail.com> <rishantrokaha@gmail.com>
|
||||
Rixant Rokaha <rixantrokaha@gmail.com> <rrokaha@caldwell.edu>
|
||||
Rohan Gudimetla <rohan.gudimetla07@gmail.com>
|
||||
Sahil Batra <sahil@zulip.com> <35494118+sahil839@users.noreply.github.com>
|
||||
Sahil Batra <sahil@zulip.com> <sahilbatra839@gmail.com>
|
||||
Sanchit Sharma <ssharmas10662@gmail.com>
|
||||
Satyam Bansal <sbansal1999@gmail.com>
|
||||
Sayam Samal <samal.sayam@gmail.com>
|
||||
Scott Feeney <scott@oceanbase.org> <scott@humbughq.com>
|
||||
Scott Feeney <scott@oceanbase.org> <scott@zulip.com>
|
||||
Shashank Singh <21bec103@iiitdmj.ac.in>
|
||||
Shlok Patel <shlokcpatel2001@gmail.com>
|
||||
Shu Chen <shu@zulip.com>
|
||||
Shubham Padia <shubham@zulip.com>
|
||||
Shubham Padia <shubham@zulip.com> <shubham-padia@users.noreply.github.com>
|
||||
Shubham Padia <shubham@zulip.com> <shubham@glints.com>
|
||||
Somesh Ranjan <somesh.ranjan.met20@itbhu.ac.in> <77766761+somesh202@users.noreply.github.com>
|
||||
Steve Howell <showell@zulip.com> <showell30@yahoo.com>
|
||||
Steve Howell <showell@zulip.com> <showell@yahoo.com>
|
||||
@@ -150,24 +106,16 @@ Steve Howell <showell@zulip.com> <showell@zulipchat.com>
|
||||
Steve Howell <showell@zulip.com> <steve@humbughq.com>
|
||||
Steve Howell <showell@zulip.com> <steve@zulip.com>
|
||||
strifel <info@strifel.de>
|
||||
Sujal Shah <sujalshah28092004@gmail.com>
|
||||
Tanmay Kumar <tnmdotkr@gmail.com>
|
||||
Tanmay Kumar <tnmdotkr@gmail.com> <133781250+tnmkr@users.noreply.github.com>
|
||||
Tim Abbott <tabbott@zulip.com>
|
||||
Tim Abbott <tabbott@zulip.com> <tabbott@dropbox.com>
|
||||
Tim Abbott <tabbott@zulip.com> <tabbott@humbughq.com>
|
||||
Tim Abbott <tabbott@zulip.com> <tabbott@mit.edu>
|
||||
Tim Abbott <tabbott@zulip.com> <tabbott@zulipchat.com>
|
||||
Tomasz Kolek <tomasz-kolek@o2.pl> <tomasz-kolek@go2.pl>
|
||||
Ujjawal Modi <umodi2003@gmail.com> <99073049+Ujjawal3@users.noreply.github.com>
|
||||
umkay <ukhan@zulipchat.com> <umaimah.k@gmail.com>
|
||||
umkay <ukhan@zulipchat.com> <umkay@users.noreply.github.com>
|
||||
Viktor Illmer <1476338+v-ji@users.noreply.github.com>
|
||||
Vishesh Singh <vishesh.bhu1971@gmail.com>
|
||||
Vishesh Singh <vishesh.bhu1971@gmail.com> <142628839+NotVishesh@users.noreply.github.com>
|
||||
Vishnu KS <vishnu@zulip.com> <hackerkid@vishnuks.com>
|
||||
Vishnu KS <vishnu@zulip.com> <yo@vishnuks.com>
|
||||
Vivek Tripathi <vivektripathi8005@gmail.com>
|
||||
Waseem Daher <wdaher@zulip.com> <wdaher@dropbox.com>
|
||||
Waseem Daher <wdaher@zulip.com> <wdaher@humbughq.com>
|
||||
Yash RE <33805964+YashRE42@users.noreply.github.com>
|
||||
|
||||
@@ -5,13 +5,7 @@ pnpm-lock.yaml
|
||||
/locale
|
||||
/templates/**/*.md
|
||||
/tools/setup/emoji/emoji_map.json
|
||||
/web/third/*
|
||||
!/web/third/marked
|
||||
/web/third/marked/*
|
||||
!/web/third/marked/lib
|
||||
/web/third/marked/lib/*
|
||||
!/web/third/marked/lib/marked.d.cts
|
||||
/web/third
|
||||
/zerver/tests/fixtures
|
||||
/zerver/webhooks/*/doc.md
|
||||
/zerver/webhooks/github/githubsponsors.md
|
||||
/zerver/webhooks/*/fixtures
|
||||
|
||||
15
.pyre_configuration
Normal file
15
.pyre_configuration
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"source_directories": ["."],
|
||||
"taint_models_path": [
|
||||
"stubs/taint",
|
||||
"zulip-py3-venv/lib/pyre_check/taint/"
|
||||
],
|
||||
"search_path": [
|
||||
"stubs/",
|
||||
"zulip-py3-venv/lib/pyre_check/stubs/"
|
||||
],
|
||||
"typeshed": "zulip-py3-venv/lib/pyre_check/typeshed/",
|
||||
"exclude": [
|
||||
"/srv/zulip/zulip-py3-venv/.*"
|
||||
]
|
||||
}
|
||||
@@ -5,15 +5,11 @@ build:
|
||||
os: ubuntu-22.04
|
||||
tools:
|
||||
python: "3.10"
|
||||
jobs:
|
||||
create_environment:
|
||||
- asdf plugin add uv
|
||||
- asdf install uv 0.6.6
|
||||
- asdf global uv 0.6.6
|
||||
- UV_PROJECT_ENVIRONMENT=$READTHEDOCS_VIRTUALENV_PATH uv venv
|
||||
install:
|
||||
- UV_PROJECT_ENVIRONMENT=$READTHEDOCS_VIRTUALENV_PATH uv sync --frozen --only-group=docs
|
||||
|
||||
sphinx:
|
||||
configuration: docs/conf.py
|
||||
fail_on_warning: true
|
||||
|
||||
python:
|
||||
install:
|
||||
- requirements: requirements/docs.txt
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
|
||||
[main]
|
||||
host = https://www.transifex.com
|
||||
lang_map = zh-Hans: zh_Hans
|
||||
lang_map = zh-Hans: zh_Hans, zh-Hant: zh_Hant
|
||||
|
||||
[o:zulip:p:zulip:r:djangopo]
|
||||
file_filter = locale/<lang>/LC_MESSAGES/django.po
|
||||
|
||||
@@ -66,7 +66,7 @@ organizers may take any action they deem appropriate, up to and including a
|
||||
temporary ban or permanent expulsion from the community without warning (and
|
||||
without refund in the case of a paid event).
|
||||
|
||||
If someone outside the development community (e.g., a user of the Zulip
|
||||
If someone outside the development community (e.g. a user of the Zulip
|
||||
software) engages in unacceptable behavior that affects someone in the
|
||||
community, we still want to know. Even if we don't have direct control over
|
||||
the violator, the community organizers can still support the people
|
||||
|
||||
500
CONTRIBUTING.md
500
CONTRIBUTING.md
@@ -1,102 +1,107 @@
|
||||
# Contributing guide
|
||||
|
||||
Welcome! This is a step-by-step guide on how to get started contributing code to
|
||||
the [Zulip](https://zulip.com/) organized team chat [open-source
|
||||
project](https://github.com/zulip). Thousands of people use Zulip every day, and
|
||||
your work on Zulip will have a meaningful impact on their experience. We hope
|
||||
you'll join us!
|
||||
Welcome to the Zulip community!
|
||||
|
||||
To learn about ways to contribute without writing code, please see our
|
||||
suggestions for how you can [support the Zulip
|
||||
project](https://zulip.com/help/support-zulip-project).
|
||||
## Zulip development community
|
||||
|
||||
## Learning from the docs
|
||||
The primary communication forum for the Zulip community is the Zulip
|
||||
server hosted at [chat.zulip.org](https://chat.zulip.org/):
|
||||
|
||||
Zulip has a documentation-based approach to onboarding new contributors. As you
|
||||
are getting started, this page will be your go-to for figuring out what to do
|
||||
next. You will also explore other guides, learning about how to put together
|
||||
your first pull request, diving into [Zulip's
|
||||
subsystems](https://zulip.readthedocs.io/en/latest/subsystems/index.html), and
|
||||
much more. We hope you'll find this process to be a great learning experience.
|
||||
- **Users** and **administrators** of Zulip organizations stop by to
|
||||
ask questions, offer feedback, and participate in product design
|
||||
discussions.
|
||||
- **Contributors to the project**, including the **core Zulip
|
||||
development team**, discuss ongoing and future projects, brainstorm
|
||||
ideas, and generally help each other out.
|
||||
|
||||
This page will guide you through the following steps:
|
||||
Everyone is welcome to [sign up](https://chat.zulip.org/) and
|
||||
participate — we love hearing from our users! Public streams in the
|
||||
community receive thousands of messages a week. We recommend signing
|
||||
up using the special invite links for
|
||||
[users](https://chat.zulip.org/join/t5crtoe62bpcxyisiyglmtvb/),
|
||||
[self-hosters](https://chat.zulip.org/join/wnhv3jzm6afa4raenedanfno/)
|
||||
and
|
||||
[contributors](https://chat.zulip.org/join/npzwak7vpmaknrhxthna3c7p/)
|
||||
to get a curated list of initial stream subscriptions.
|
||||
|
||||
1. [Getting started](#getting-started)
|
||||
1. [Finding an issue to work on](#finding-an-issue-to-work-on)
|
||||
1. [Getting help](#getting-help) as you work on your first pull request
|
||||
1. Learning [what makes a great Zulip contributor](#what-makes-a-great-zulip-contributor)
|
||||
1. [Submitting a pull request](#submitting-a-pull-request)
|
||||
1. [Going beyond the first issue](#beyond-the-first-issue)
|
||||
|
||||
Any time you feel lost, come back to this guide. The information you need is
|
||||
likely somewhere on this page (perhaps in the list of [common
|
||||
questions](#common-questions)), or in one of the many references it points to.
|
||||
|
||||
If you've done all you can with the documentation and are still feeling stuck,
|
||||
join the [Zulip development community](https://zulip.com/development-community/)
|
||||
to ask for help! Before you post, be sure to review [community
|
||||
To learn how to get started participating in the community, including [community
|
||||
norms](https://zulip.com/development-community/#community-norms) and [where to
|
||||
post](https://zulip.com/development-community/#where-do-i-send-my-message) your
|
||||
question. The Zulip community is governed by a [code of
|
||||
post](https://zulip.com/development-community/#where-do-i-send-my-message),
|
||||
check out our [Zulip development community
|
||||
guide](https://zulip.com/development-community/). The Zulip community is
|
||||
governed by a [code of
|
||||
conduct](https://zulip.readthedocs.io/en/latest/code-of-conduct.html).
|
||||
|
||||
## Getting started
|
||||
## Ways to contribute
|
||||
|
||||
### Learning how to use Git (the Zulip way)
|
||||
To make a code or documentation contribution, read our
|
||||
[step-by-step guide](#your-first-codebase-contribution) to getting
|
||||
started with the Zulip codebase. A small sample of the type of work that
|
||||
needs doing:
|
||||
|
||||
Zulip uses GitHub for source control and code review, and becoming familiar with
|
||||
Git is essential for navigating and contributing to the Zulip codebase. [Our
|
||||
guide to Git](https://zulip.readthedocs.io/en/latest/git/index.html) will help
|
||||
you get started even if you've never used Git before.
|
||||
- Bug squashing and feature development on our Python/Django
|
||||
[backend](https://github.com/zulip/zulip), web
|
||||
[frontend](https://github.com/zulip/zulip), React Native
|
||||
[mobile app](https://github.com/zulip/zulip-mobile), or Electron
|
||||
[desktop app](https://github.com/zulip/zulip-desktop).
|
||||
- Building out our
|
||||
[Python API and bots](https://github.com/zulip/python-zulip-api) framework.
|
||||
- [Writing an integration](https://zulip.com/api/integrations-overview).
|
||||
- Improving our [user](https://zulip.com/help/) or
|
||||
[developer](https://zulip.readthedocs.io/en/latest/) documentation.
|
||||
- [Reviewing code](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html)
|
||||
and manually testing pull requests.
|
||||
|
||||
If you're familiar with Git, you'll still want to take a look at [our
|
||||
Zulip-specific Git
|
||||
tools](https://zulip.readthedocs.io/en/latest/git/zulip-tools.html).
|
||||
**Non-code contributions**: Some of the most valuable ways to contribute
|
||||
don't require touching the codebase at all. For example, you can:
|
||||
|
||||
### Setting up your development environment and diving in
|
||||
- Report issues, including both [feature
|
||||
requests](https://zulip.readthedocs.io/en/latest/contributing/suggesting-features.html)
|
||||
and [bug
|
||||
reports](https://zulip.readthedocs.io/en/latest/contributing/reporting-bugs.html).
|
||||
- [Give feedback](#user-feedback) if you are evaluating or using Zulip.
|
||||
- [Participate
|
||||
thoughtfully](https://zulip.readthedocs.io/en/latest/contributing/design-discussions.html)
|
||||
in design discussions.
|
||||
- [Sponsor Zulip](https://github.com/sponsors/zulip) through the GitHub sponsors program.
|
||||
- [Translate](https://zulip.readthedocs.io/en/latest/translating/translating.html)
|
||||
Zulip into your language.
|
||||
- [Stay connected](#stay-connected) with Zulip, and [help others
|
||||
find us](#help-others-find-zulip).
|
||||
|
||||
To get started contributing code to Zulip, you will need to set up the
|
||||
development environment for the Zulip codebase you want to work on. You'll then
|
||||
want to take some time to familiarize yourself with the code.
|
||||
## Your first codebase contribution
|
||||
|
||||
#### Server and web app
|
||||
This section has a step by step guide to starting as a Zulip codebase
|
||||
contributor. It's long, but don't worry about doing all the steps perfectly;
|
||||
no one gets it right the first time, and there are a lot of people available
|
||||
to help.
|
||||
|
||||
1. [Install the development
|
||||
environment](https://zulip.readthedocs.io/en/latest/development/overview.html).
|
||||
1. Familiarize yourself with [using the development
|
||||
environment](https://zulip.readthedocs.io/en/latest/development/using.html).
|
||||
1. Go through the [new application feature
|
||||
tutorial](https://zulip.readthedocs.io/en/latest/tutorials/new-feature-tutorial.html)
|
||||
to get familiar with how the Zulip codebase is organized and how to find code
|
||||
in it.
|
||||
|
||||
#### Flutter-based mobile app
|
||||
|
||||
1. Set up a development environment following the instructions in [the project
|
||||
README](https://github.com/zulip/zulip-flutter).
|
||||
1. Start reading recent commits to see the code we're writing.
|
||||
Use either a [graphical Git viewer][] like `gitk`, or `git log -p`
|
||||
with [the "secret" to reading its output][git-log-secret].
|
||||
1. Pick some of the code that appears in those Git commits and that looks
|
||||
interesting. Use your IDE to visit that code and to navigate to related code,
|
||||
reading to see how it works and how the codebase is organized.
|
||||
|
||||
[graphical Git viewer]: https://zulip.readthedocs.io/en/latest/git/setup.html#get-a-graphical-client
|
||||
[git-log-secret]: https://github.com/zulip/zulip-mobile/blob/main/docs/howto/git.md#git-log-secret
|
||||
|
||||
#### Desktop app
|
||||
|
||||
Follow [this
|
||||
documentation](https://github.com/zulip/zulip-desktop/blob/main/development.md)
|
||||
to set up the Zulip Desktop development environment.
|
||||
|
||||
#### Terminal app
|
||||
|
||||
Follow [this
|
||||
documentation](https://github.com/zulip/zulip-terminal?tab=readme-ov-file#setting-up-a-development-environment)
|
||||
to set up the Zulip Terminal development environment.
|
||||
|
||||
## Finding an issue to work on
|
||||
- First, make an account on the
|
||||
[Zulip community server](https://zulip.com/development-community/),
|
||||
paying special attention to the
|
||||
[community norms](https://zulip.com/development-community/#community-norms).
|
||||
If you'd like, introduce yourself in
|
||||
[#new members](https://chat.zulip.org/#narrow/stream/95-new-members), using
|
||||
your name as the topic. Bonus: tell us about your first impressions of
|
||||
Zulip, and anything that felt confusing/broken or interesting/helpful as you
|
||||
started using the product.
|
||||
- Read [What makes a great Zulip contributor](#what-makes-a-great-zulip-contributor).
|
||||
- [Install the development environment](https://zulip.readthedocs.io/en/latest/development/overview.html),
|
||||
getting help in
|
||||
[#provision help](https://chat.zulip.org/#narrow/stream/21-provision-help)
|
||||
if you run into any troubles.
|
||||
- Familiarize yourself with [using the development environment](https://zulip.readthedocs.io/en/latest/development/using.html).
|
||||
- Go through the [new application feature
|
||||
tutorial](https://zulip.readthedocs.io/en/latest/tutorials/new-feature-tutorial.html) to get familiar with
|
||||
how the Zulip codebase is organized and how to find code in it.
|
||||
- Read the [Zulip guide to
|
||||
Git](https://zulip.readthedocs.io/en/latest/git/index.html) if you
|
||||
are unfamiliar with Git or Zulip's rebase-based Git workflow,
|
||||
getting help in [#git
|
||||
help](https://chat.zulip.org/#narrow/stream/44-git-help) if you run
|
||||
into any troubles. Even Git experts should read the [Zulip-specific
|
||||
Git tools
|
||||
page](https://zulip.readthedocs.io/en/latest/git/zulip-tools.html).
|
||||
|
||||
### Where to look for an issue
|
||||
|
||||
@@ -107,20 +112,12 @@ repository](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3
|
||||
alone.
|
||||
|
||||
You can look through issues tagged with the "help wanted" label, which is used
|
||||
to indicate the issues that are open for contributions. You'll be able to claim
|
||||
unassigned issues, which you can find using the `no:assignee` filter in GitHub.
|
||||
You can also pick up issues that are assigned but are no longer being worked on.
|
||||
|
||||
Some repositories use the "good first issue" label to tag issues that are
|
||||
especially approachable for new contributors.
|
||||
|
||||
Here are some handy links for issues to look through:
|
||||
to indicate the issues that are ready for contributions. Some repositories also
|
||||
use the "good first issue" label to tag issues that are especially approachable
|
||||
for new contributors.
|
||||
|
||||
- [Server and web app](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22)
|
||||
- Mobile apps: no "help wanted" label, but see the
|
||||
[project board](https://github.com/orgs/zulip/projects/5/views/4)
|
||||
for the upcoming Flutter-based app. Look for issues up through the
|
||||
"Launch" milestone, and that aren't already assigned.
|
||||
- [Mobile apps](https://github.com/zulip/zulip-mobile/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22)
|
||||
- [Desktop app](https://github.com/zulip/zulip-desktop/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22)
|
||||
- [Terminal app](https://github.com/zulip/zulip-terminal/issues?q=is%3Aopen+is%3Aissue+label%3A"help+wanted")
|
||||
- [Python API bindings and bots](https://github.com/zulip/python-zulip-api/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22)
|
||||
@@ -133,29 +130,21 @@ changes to tests).
|
||||
|
||||
We recommend the following process for finding an issue to work on:
|
||||
|
||||
1. Find an issue tagged with the "help wanted" label that is either unassigned,
|
||||
or looks to be abandoned.
|
||||
1. Read the description of the issue and make sure you understand it.
|
||||
1. If it seems promising, poke around the product
|
||||
1. Read the description of an issue tagged with the "help wanted" label and make
|
||||
sure you understand it.
|
||||
2. If it seems promising, poke around the product
|
||||
(on [chat.zulip.org](https://chat.zulip.org) or in the development
|
||||
environment) until you know how the piece being
|
||||
described fits into the bigger picture. If after some exploration the
|
||||
description seems confusing or ambiguous, post a question on the GitHub
|
||||
issue, as others may benefit from the clarification as well.
|
||||
1. When you find an issue you like, try to get started working on it. See if you
|
||||
3. When you find an issue you like, try to get started working on it. See if you
|
||||
can find the part of the code you'll need to modify (`git grep` is your
|
||||
friend!) and get some idea of how you'll approach the problem.
|
||||
1. If you feel lost, that's OK! Go through these steps again with another issue.
|
||||
4. If you feel lost, that's OK! Go through these steps again with another issue.
|
||||
There's plenty to work on, and the exploration you do will help you learn
|
||||
more about the project.
|
||||
|
||||
An assigned issue can be considered abandoned if:
|
||||
|
||||
- There is no recent contributor activity.
|
||||
- There are no open PRs, or an open PR needs work in order to be ready for
|
||||
review. For example, a PR may need to be updated to address reviewer feedback
|
||||
or to pass tests.
|
||||
|
||||
Note that you are _not_ claiming an issue while you are iterating through steps
|
||||
1-4. _Before you claim an issue_, you should be confident that you will be able to
|
||||
tackle it effectively.
|
||||
@@ -190,10 +179,9 @@ are set up with a GitHub workflow bot called
|
||||
requests in order to create a better workflow for Zulip contributors.
|
||||
|
||||
To claim an issue in these repositories, simply post a comment that says
|
||||
`@zulipbot claim` to the issue thread. If the issue is [tagged with a help
|
||||
wanted label and is not assigned to someone
|
||||
else](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22+no%3Aassignee),
|
||||
Zulipbot will immediately assign the issue to you.
|
||||
`@zulipbot claim` to the issue thread. If the issue is tagged with a [help
|
||||
wanted](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22)
|
||||
label, Zulipbot will immediately assign the issue to you.
|
||||
|
||||
Note that new contributors can only claim one issue until their first pull request is
|
||||
merged. This is to encourage folks to finish ongoing work before starting
|
||||
@@ -204,65 +192,112 @@ issue you're interested in.
|
||||
#### In other Zulip repositories
|
||||
|
||||
There is no bot for other Zulip repositories
|
||||
([`zulip/zulip-flutter`](https://github.com/zulip/zulip-flutter/), etc.). If
|
||||
([`zulip/zulip-mobile`](https://github.com/zulip/zulip-mobile/), etc.). If
|
||||
you are interested in claiming an issue in one of these repositories, simply
|
||||
post a comment on the issue thread saying that you've started work on the
|
||||
issue and would like to claim it. In your comment, describe what part of the
|
||||
code you're modifying and how you plan to approach the problem, based on
|
||||
what you learned in steps 1–4 [above](#picking-an-issue-to-work-on).
|
||||
|
||||
There is no need to @-mention the issue creator in your comment. There is
|
||||
also no need to post the same information in multiple places, for example in
|
||||
a chat thread in addition to the GitHub issue.
|
||||
post a comment on the issue thread saying that you'd like to work on it. There
|
||||
is no need to @-mention the issue creator in your comment.
|
||||
|
||||
Please follow the same guidelines as described above: find an issue labeled
|
||||
"help wanted", and only pick up one issue at a time to start with.
|
||||
|
||||
## Getting help
|
||||
### Working on an issue
|
||||
|
||||
You may have questions as you work on your pull request. For example, you might
|
||||
not be sure about some details of what's required, or have questions about your
|
||||
implementation approach. Zulip's maintainers are happy to answer thoughtfully
|
||||
posed questions, and discuss any difficulties that might arise as you work on
|
||||
your PR.
|
||||
You're encouraged to ask questions on how to best implement or debug your
|
||||
changes -- the Zulip maintainers are excited to answer questions to help you
|
||||
stay unblocked and working efficiently. You can ask questions in the [Zulip
|
||||
development community](https://zulip.com/development-community/), or on the
|
||||
GitHub issue or pull request.
|
||||
|
||||
If you haven't done so yet, now is the time to join the [Zulip development
|
||||
community](https://zulip.com/development-community/). If you'd like, introduce
|
||||
yourself in the [#new
|
||||
members](https://chat.zulip.org/#narrow/channel/95-new-members) channel, using
|
||||
your name as the [topic](https://zulip.com/help/introduction-to-topics).
|
||||
To get early feedback on any UI changes, we encourage you to post screenshots of
|
||||
your work in the [#design
|
||||
stream](https://chat.zulip.org/#narrow/stream/101-design) in the [Zulip
|
||||
development community](https://zulip.com/development-community/)
|
||||
|
||||
You can get help in public channels in the community:
|
||||
For more advice, see [What makes a great Zulip
|
||||
contributor?](#what-makes-a-great-zulip-contributor) below. It's OK if your
|
||||
first issue takes you a while; that's normal! You'll be able to work a lot
|
||||
faster as you build experience.
|
||||
|
||||
1. **Review** the [Zulip development community
|
||||
guidelines](https://zulip.com/development-community/#community-norms).
|
||||
### Submitting a pull request
|
||||
|
||||
1. **Decide where to post.** If there is a discussion thread linked from the
|
||||
issue you're working on, that's usually the best place to post any
|
||||
clarification questions about the issue. Otherwise, follow [these
|
||||
guidelines](https://zulip.com/development-community/#where-do-i-send-my-message)
|
||||
to figure out where to post your question. Don’t stress too much about
|
||||
picking the right place if you’re not sure, as moderators can [move your
|
||||
question thread to a different
|
||||
channel](https://zulip.com/help/move-content-to-another-channel) if needed.
|
||||
See the [pull request review
|
||||
process](https://zulip.readthedocs.io/en/latest/contributing/review-process.html)
|
||||
guide for detailed instructions on how to submit a pull request, and information
|
||||
on the stages of review your PR will go through.
|
||||
|
||||
1. **Write** up your question, being sure to follow our [guide on asking great
|
||||
questions](https://zulip.readthedocs.io/en/latest/contributing/asking-great-questions.html).
|
||||
The guide explains what you need to do make sure that folks will be able to
|
||||
help you out, and that you're making good use of maintainers' limited time.
|
||||
### Beyond the first issue
|
||||
|
||||
1. **Review** your message before you send it. Will your question make sense to
|
||||
someone who is familiar with Zulip, but might not have the details of what
|
||||
you are working on fresh in mind?
|
||||
To find a second issue to work on, we recommend looking through issues with the same
|
||||
`area:` label as the last issue you resolved. You'll be able to reuse the
|
||||
work you did learning how that part of the codebase works. Also, the path to
|
||||
becoming a core developer often involves taking ownership of one of these area
|
||||
labels.
|
||||
|
||||
Well-posed questions will generally get a response within 1-2 business days.
|
||||
There is no need to @-mention anyone when you ask a question, as maintainers
|
||||
keep a close eye on all the ongoing discussions.
|
||||
### Common questions
|
||||
|
||||
- **What if somebody is already working on the issue I want do claim?** There
|
||||
are lots of issue to work on! If somebody else is actively working on the
|
||||
issue, you can find a different one, or help with
|
||||
reviewing their work.
|
||||
- **What if somebody else claims an issue while I'm figuring out whether or not to
|
||||
work on it?** No worries! You can contribute by providing feedback on
|
||||
their pull request. If you've made good progress in understanding part of the
|
||||
codebase, you can also find another "help wanted" issue in the same area to
|
||||
work on.
|
||||
- **What if there is already a pull request for the issue I want to work on?**
|
||||
Start by reviewing the existing work. If you agree with the approach, you can
|
||||
use the existing pull request (PR) as a starting point for your contribution. If
|
||||
you think a different approach is needed, you can post a new PR, with a comment that clearly
|
||||
explains _why_ you decided to start from scratch.
|
||||
- **What if I ask if someone is still working on an issue, and they don't
|
||||
respond?** If you don't get a reply within 2-3 days, go ahead and post a comment
|
||||
that you are working on the issue, and submit a pull request. If the original
|
||||
assignee ends up submitting a pull request first, no worries! You can help by
|
||||
providing feedback on their work, or submit your own PR if you think a
|
||||
different approach is needed (as described above).
|
||||
- **Can I come up with my own feature idea and work on it?** We welcome
|
||||
suggestions of features or other improvements that you feel would be valuable. If you
|
||||
have a new feature you'd like to add, you can start a conversation [in our
|
||||
development community](https://zulip.com/development-community/#where-do-i-send-my-message)
|
||||
explaining the feature idea and the problem that you're hoping to solve.
|
||||
- **I'm waiting for the next round of review on my PR. Can I pick up
|
||||
another issue in the meantime?** Someone's first Zulip PR often
|
||||
requires quite a bit of iteration, so please [make sure your pull
|
||||
request is reviewable][reviewable-pull-requests] and go through at
|
||||
least one round of feedback from others before picking up a second
|
||||
issue. After that, sure! If
|
||||
[Zulipbot](https://github.com/zulip/zulipbot) does not allow you to
|
||||
claim an issue, you can post a comment describing the status of your
|
||||
other work on the issue you're interested in, and asking for the
|
||||
issue to be assigned to you. Note that addressing feedback on
|
||||
in-progress PRs should always take priority over starting a new PR.
|
||||
- **I think my PR is done, but it hasn't been merged yet. What's going on?**
|
||||
1. **Double-check that you have addressed all the feedback**, including any comments
|
||||
on [Git commit
|
||||
discipline](https://zulip.readthedocs.io/en/latest/contributing/commit-discipline.html).
|
||||
2. If all the feedback has been addressed, did you [leave a
|
||||
comment](https://zulip.readthedocs.io/en/latest/contributing/review-process.html#how-to-help-move-the-review-process-forward)
|
||||
explaining that you have done so and **requesting another review**? If not,
|
||||
it may not be clear to project maintainers or reviewers that your PR is
|
||||
ready for another look.
|
||||
3. There may be a pause between initial rounds of review for your PR and final
|
||||
review by project maintainers. This is normal, and we encourage you to **work
|
||||
on other issues** while you wait.
|
||||
4. If you think the PR is ready and haven't seen any updates for a couple
|
||||
of weeks, it can be helpful to **leave another comment**. Summarize the
|
||||
overall state of the review process and your work, and indicate that you
|
||||
are waiting for a review.
|
||||
5. Finally, **Zulip project maintainers are people too**! They may be busy
|
||||
with other work, and sometimes they might even take a vacation. ;) It can
|
||||
occasionally take a few weeks for a PR in the final stages of the review
|
||||
process to be merged.
|
||||
|
||||
[reviewable-pull-requests]: https://zulip.readthedocs.io/en/latest/contributing/reviewable-prs.html
|
||||
|
||||
## What makes a great Zulip contributor?
|
||||
|
||||
As you're working on your first code contribution, here are some best practices
|
||||
to keep in mind.
|
||||
Zulip has a lot of experience working with new contributors. In our
|
||||
experience, these are the best predictors of success:
|
||||
|
||||
- [Asking great questions][great-questions]. It's very hard to answer a general
|
||||
question like, "How do I do this issue?" When asking for help, explain your
|
||||
@@ -291,114 +326,30 @@ to keep in mind.
|
||||
|
||||
[great-questions]: https://zulip.readthedocs.io/en/latest/contributing/asking-great-questions.html
|
||||
|
||||
## Submitting a pull request
|
||||
## User feedback
|
||||
|
||||
See the [guide on submitting a pull
|
||||
request](https://zulip.readthedocs.io/en/latest/contributing/reviewable-prs.html)
|
||||
for detailed instructions on how to present your proposed changes to Zulip.
|
||||
Nearly every feature we develop starts with a user request. If you are part
|
||||
of a group that is either using or considering using Zulip, we would love to
|
||||
hear about your experience with the product. If you're not sure what to
|
||||
write, here are some questions we're always very curious to know the answer
|
||||
to:
|
||||
|
||||
The [pull request review process
|
||||
guide](https://zulip.readthedocs.io/en/latest/contributing/review-process.html)
|
||||
explains the stages of review your PR will go through, and offers guidance on
|
||||
how to help the review process move forward.
|
||||
- Evaluation: What is the process by which your organization chose or will
|
||||
choose a group chat product?
|
||||
- Pros and cons: What are the pros and cons of Zulip for your organization,
|
||||
and the pros and cons of other products you are evaluating?
|
||||
- Features: What are the features that are most important for your
|
||||
organization? In the best-case scenario, what would your chat solution do
|
||||
for you?
|
||||
- Onboarding: If you remember it, what was your impression during your first
|
||||
few minutes of using Zulip? What did you notice, and how did you feel? Was
|
||||
there anything that stood out to you as confusing, or broken, or great?
|
||||
- Organization: What does your organization do? How big is the organization?
|
||||
A link to your organization's website?
|
||||
|
||||
It's OK if your first issue takes you a while; that's normal! You'll be able to
|
||||
work a lot faster as you build experience.
|
||||
|
||||
## Beyond the first issue
|
||||
|
||||
To find a second issue to work on, we recommend looking through issues with the same
|
||||
`area:` label as the last issue you resolved. You'll be able to reuse the
|
||||
work you did learning how that part of the codebase works. Also, the path to
|
||||
becoming a core developer often involves taking ownership of one of these area
|
||||
labels.
|
||||
|
||||
## Common questions
|
||||
|
||||
- **What if somebody is already working on the issue I want to claim?** There
|
||||
are lots of issues to work on (likely
|
||||
[hundreds](https://github.com/zulip/zulip/issues?q=is%3Aissue%20state%3Aopen%20label%3A%22help%20wanted%22%20no%3Aassignee)
|
||||
in the server repository)! If somebody else is actively working on the issue,
|
||||
you can find a different one, or help with reviewing their work.
|
||||
|
||||
- **What if it looks like the person who's assigned an issue is no longer
|
||||
working on it?** Post a comment on the issue, e.g., "Hi @ someone! Are you
|
||||
still working on this one? I'd like to pick it up if not." You can pick up the
|
||||
issue if they say they don't plan to work on it more.
|
||||
|
||||
- **What if I don't get a response?** If you don't get a reply within 2-3
|
||||
days, go ahead and post a comment that you are working on the issue, and
|
||||
submit a pull request. If the original assignee ends up submitting a pull
|
||||
request first, no worries! You can help by providing feedback on their work,
|
||||
or submit your own PR if you think a different approach is needed (as
|
||||
described above).
|
||||
|
||||
- **What if there is already a pull request for the issue I want to work on?**
|
||||
See our [guide on continuing unfinished
|
||||
work](https://zulip.readthedocs.io/en/latest/contributing/continuing-unfinished-work.html).
|
||||
|
||||
- **What if somebody else claims an issue while I'm figuring out whether or not to
|
||||
work on it?** No worries! You can contribute by providing feedback on
|
||||
their pull request. If you've made good progress in understanding part of the
|
||||
codebase, you can also find another "help wanted" issue in the same area to
|
||||
work on.
|
||||
|
||||
- **Can I work on an old issue?** Of course! Open issues marked as “help wanted”
|
||||
are generally eligible to be worked on. If you find that the context around
|
||||
the issue has changed (e.g., the UI looks different), do your best to apply
|
||||
the current patterns, and comment on any differences from the spec in your PR
|
||||
description.
|
||||
|
||||
If picking up a bug, start by checking if you can replicate it. If it no longer
|
||||
replicates, post a comment on the issue explaining how you tested the
|
||||
behavior, and what you saw, with screenshots as appropriate. And if you _can_
|
||||
replicate it, fixing it is great!
|
||||
|
||||
If you're starting a major project where the issue was filed more than a
|
||||
couple of years ago, it's a good idea to post to the development community
|
||||
discussion thread for that issue to check if the thinking around it has
|
||||
changed.
|
||||
|
||||
- **Can I come up with my own feature idea and work on it?** We welcome
|
||||
suggestions of features or other improvements that you feel would be valuable. If you
|
||||
have a new feature you'd like to add, you can start a conversation [in our
|
||||
development community](https://zulip.com/development-community/#where-do-i-send-my-message)
|
||||
explaining the feature idea and the problem that you're hoping to solve.
|
||||
- **I'm waiting for the next round of review on my PR. Can I pick up
|
||||
another issue in the meantime?** Someone's first Zulip PR often
|
||||
requires quite a bit of iteration, so please [make sure your pull
|
||||
request is reviewable][reviewable-pull-requests] and go through at
|
||||
least one round of feedback from others before picking up a second
|
||||
issue. After that, sure! If
|
||||
[Zulipbot](https://github.com/zulip/zulipbot) does not allow you to
|
||||
claim an issue, you can post a comment describing the status of your
|
||||
other work on the issue you're interested in (including links to all open
|
||||
PRs), and asking for the issue to be assigned to you. Note that addressing
|
||||
feedback on in-progress PRs should always take priority over starting a new
|
||||
PR.
|
||||
- **I think my PR is done, but it hasn't been merged yet. What's going on?**
|
||||
1. **Double-check that you have addressed all the feedback**, including any comments
|
||||
on [Git commit
|
||||
discipline](https://zulip.readthedocs.io/en/latest/contributing/commit-discipline.html),
|
||||
and that automated tests are passing.
|
||||
2. If all the feedback has been addressed, did you [leave a
|
||||
comment](https://zulip.readthedocs.io/en/latest/contributing/review-process.html#how-to-help-move-the-review-process-forward)
|
||||
explaining that you have done so and **requesting another review**? If not,
|
||||
it may not be clear to project maintainers or reviewers that your PR is
|
||||
ready for another look.
|
||||
3. There may be a pause between initial rounds of review for your PR and final
|
||||
review by project maintainers. This is normal, and we encourage you to **work
|
||||
on other issues** while you wait.
|
||||
4. If you think the PR is ready and haven't seen any updates for a couple
|
||||
of weeks, it can be helpful to **leave another comment**. Summarize the
|
||||
overall state of the review process and your work, and indicate that you
|
||||
are waiting for a review.
|
||||
5. Finally, **Zulip project maintainers are people too**! They may be busy
|
||||
with other work, and sometimes they might even take a vacation. ;) It can
|
||||
occasionally take a few weeks for a PR in the final stages of the review
|
||||
process to be merged.
|
||||
|
||||
[reviewable-pull-requests]: https://zulip.readthedocs.io/en/latest/contributing/reviewable-prs.html
|
||||
You can contact us in the [#feedback stream of the Zulip development
|
||||
community](https://chat.zulip.org/#narrow/stream/137-feedback) or
|
||||
by emailing [support@zulip.com](mailto:support@zulip.com).
|
||||
|
||||
## Outreach programs
|
||||
|
||||
@@ -416,3 +367,34 @@ more about participating in an outreach program with Zulip. Most of our program
|
||||
participants end up sticking around the project long-term, and many have become
|
||||
core team members, maintaining important parts of the project. We hope you
|
||||
apply!
|
||||
|
||||
## Stay connected
|
||||
|
||||
Even if you are not logging into the development community on a regular basis,
|
||||
you can still stay connected with the project.
|
||||
|
||||
- Follow us [on Twitter](https://twitter.com/zulip).
|
||||
- Subscribe to [our blog](https://blog.zulip.org/).
|
||||
- Join or follow the project [on LinkedIn](https://www.linkedin.com/company/zulip-project/).
|
||||
|
||||
## Help others find Zulip
|
||||
|
||||
Here are some ways you can help others find Zulip:
|
||||
|
||||
- Star us on GitHub. There are four main repositories:
|
||||
[server/web](https://github.com/zulip/zulip),
|
||||
[mobile](https://github.com/zulip/zulip-mobile),
|
||||
[desktop](https://github.com/zulip/zulip-desktop), and
|
||||
[Python API](https://github.com/zulip/python-zulip-api).
|
||||
|
||||
- "Like" and retweet [our tweets](https://twitter.com/zulip).
|
||||
|
||||
- Upvote and post feedback on Zulip on comparison websites. A couple specific
|
||||
ones to highlight:
|
||||
|
||||
- [AlternativeTo](https://alternativeto.net/software/zulip-chat-server/). You can also
|
||||
[upvote Zulip](https://alternativeto.net/software/slack/) on their page
|
||||
for Slack.
|
||||
- [Add Zulip to your stack](https://stackshare.io/zulip) on StackShare, star
|
||||
it, and upvote the reasons why people like Zulip that you find most
|
||||
compelling.
|
||||
|
||||
@@ -18,6 +18,7 @@ Come find us on the [development community chat](https://zulip.com/development-c
|
||||
[](https://codecov.io/gh/zulip/zulip)
|
||||
[][mypy-coverage]
|
||||
[](https://github.com/astral-sh/ruff)
|
||||
[](https://github.com/psf/black)
|
||||
[](https://github.com/prettier/prettier)
|
||||
[](https://github.com/zulip/zulip/releases/latest)
|
||||
[](https://zulip.readthedocs.io/en/latest/)
|
||||
@@ -43,7 +44,7 @@ Come find us on the [development community chat](https://zulip.com/development-c
|
||||
issue](https://zulip.readthedocs.io/en/latest/contributing/contributing.html#reporting-issues),
|
||||
[translate](https://zulip.readthedocs.io/en/latest/translating/translating.html)
|
||||
Zulip into your language, or [give us
|
||||
feedback](https://zulip.readthedocs.io/en/latest/contributing/suggesting-features.html).
|
||||
feedback](https://zulip.readthedocs.io/en/latest/contributing/contributing.html#user-feedback).
|
||||
We'd love to hear from you, whether you've been using Zulip for years, or are just
|
||||
trying it out for the first time.
|
||||
|
||||
|
||||
2
Vagrantfile
vendored
2
Vagrantfile
vendored
@@ -15,7 +15,7 @@ Vagrant.configure("2") do |config|
|
||||
ubuntu_mirror = ""
|
||||
vboxadd_version = nil
|
||||
|
||||
config.vm.box = "bento/ubuntu-22.04"
|
||||
config.vm.box = "bento/ubuntu-20.04"
|
||||
|
||||
config.vm.synced_folder ".", "/vagrant", disabled: true
|
||||
config.vm.synced_folder ".", "/srv/zulip", docker_consistency: "z"
|
||||
|
||||
@@ -1,15 +1,14 @@
|
||||
import logging
|
||||
import time
|
||||
from collections import OrderedDict, defaultdict
|
||||
from collections.abc import Callable, Sequence
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import TypeAlias, Union
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import connection, models
|
||||
from django.utils.timezone import now as timezone_now
|
||||
from django.db.models import F
|
||||
from psycopg2.sql import SQL, Composable, Identifier, Literal
|
||||
from typing_extensions import override
|
||||
from typing_extensions import TypeAlias, override
|
||||
|
||||
from analytics.models import (
|
||||
BaseCount,
|
||||
@@ -20,26 +19,18 @@ from analytics.models import (
|
||||
UserCount,
|
||||
installation_epoch,
|
||||
)
|
||||
from zerver.lib.logging_util import log_to_file
|
||||
from zerver.lib.timestamp import ceiling_to_day, ceiling_to_hour, floor_to_hour, verify_UTC
|
||||
from zerver.models import Message, Realm, Stream, UserActivityInterval, UserProfile
|
||||
from zerver.models.realm_audit_logs import AuditLogEventType
|
||||
from zerver.models import Message, Realm, RealmAuditLog, Stream, UserActivityInterval, UserProfile
|
||||
|
||||
if settings.ZILENCER_ENABLED:
|
||||
from zilencer.models import (
|
||||
RemoteInstallationCount,
|
||||
RemoteRealm,
|
||||
RemoteRealmCount,
|
||||
RemoteZulipServer,
|
||||
)
|
||||
|
||||
|
||||
logger = logging.getLogger("zulip.analytics")
|
||||
## Logging setup ##
|
||||
|
||||
logger = logging.getLogger("zulip.management")
|
||||
log_to_file(logger, settings.ANALYTICS_LOG_PATH)
|
||||
|
||||
# You can't subtract timedelta.max from a datetime, so use this instead
|
||||
TIMEDELTA_MAX = timedelta(days=365 * 1000)
|
||||
|
||||
|
||||
## Class definitions ##
|
||||
|
||||
|
||||
@@ -59,7 +50,7 @@ class CountStat:
|
||||
property: str,
|
||||
data_collector: "DataCollector",
|
||||
frequency: str,
|
||||
interval: timedelta | None = None,
|
||||
interval: Optional[timedelta] = None,
|
||||
) -> None:
|
||||
self.property = property
|
||||
self.data_collector = data_collector
|
||||
@@ -76,7 +67,7 @@ class CountStat:
|
||||
def __repr__(self) -> str:
|
||||
return f"<CountStat: {self.property}>"
|
||||
|
||||
def last_successful_fill(self) -> datetime | None:
|
||||
def last_successful_fill(self) -> Optional[datetime]:
|
||||
fillstate = FillState.objects.filter(property=self.property).first()
|
||||
if fillstate is None:
|
||||
return None
|
||||
@@ -84,30 +75,9 @@ class CountStat:
|
||||
return fillstate.end_time
|
||||
return fillstate.end_time - self.time_increment
|
||||
|
||||
def current_month_accumulated_count_for_user(self, user: UserProfile) -> int:
|
||||
now = timezone_now()
|
||||
start_of_month = datetime(now.year, now.month, 1, tzinfo=timezone.utc)
|
||||
if now.month == 12: # nocoverage
|
||||
start_of_next_month = datetime(now.year + 1, 1, 1, tzinfo=timezone.utc)
|
||||
else: # nocoverage
|
||||
start_of_next_month = datetime(now.year, now.month + 1, 1, tzinfo=timezone.utc)
|
||||
|
||||
# We just want to check we are not using BaseCount, otherwise all
|
||||
# `output_table` have `objects` property.
|
||||
assert self.data_collector.output_table == UserCount
|
||||
result = self.data_collector.output_table.objects.filter( # type: ignore[attr-defined] # see above
|
||||
user=user,
|
||||
property=self.property,
|
||||
end_time__gt=start_of_month,
|
||||
end_time__lte=start_of_next_month,
|
||||
).aggregate(models.Sum("value"))
|
||||
|
||||
total_value = result["value__sum"] or 0
|
||||
return total_value
|
||||
|
||||
|
||||
class LoggingCountStat(CountStat):
|
||||
def __init__(self, property: str, output_table: type[BaseCount], frequency: str) -> None:
|
||||
def __init__(self, property: str, output_table: Type[BaseCount], frequency: str) -> None:
|
||||
CountStat.__init__(self, property, DataCollector(output_table, None), frequency)
|
||||
|
||||
|
||||
@@ -117,7 +87,7 @@ class DependentCountStat(CountStat):
|
||||
property: str,
|
||||
data_collector: "DataCollector",
|
||||
frequency: str,
|
||||
interval: timedelta | None = None,
|
||||
interval: Optional[timedelta] = None,
|
||||
dependencies: Sequence[str] = [],
|
||||
) -> None:
|
||||
CountStat.__init__(self, property, data_collector, frequency, interval=interval)
|
||||
@@ -127,20 +97,19 @@ class DependentCountStat(CountStat):
|
||||
class DataCollector:
|
||||
def __init__(
|
||||
self,
|
||||
output_table: type[BaseCount],
|
||||
pull_function: Callable[[str, datetime, datetime, Realm | None], int] | None,
|
||||
output_table: Type[BaseCount],
|
||||
pull_function: Optional[Callable[[str, datetime, datetime, Optional[Realm]], int]],
|
||||
) -> None:
|
||||
self.output_table = output_table
|
||||
self.pull_function = pull_function
|
||||
|
||||
def depends_on_realm(self) -> bool:
|
||||
return self.output_table in (UserCount, StreamCount)
|
||||
|
||||
|
||||
## CountStat-level operations ##
|
||||
|
||||
|
||||
def process_count_stat(stat: CountStat, fill_to_time: datetime, realm: Realm | None = None) -> None:
|
||||
def process_count_stat(
|
||||
stat: CountStat, fill_to_time: datetime, realm: Optional[Realm] = None
|
||||
) -> None:
|
||||
# TODO: The realm argument is not yet supported, in that we don't
|
||||
# have a solution for how to update FillState if it is passed. It
|
||||
# exists solely as partial plumbing for when we do fully implement
|
||||
@@ -182,7 +151,7 @@ def process_count_stat(stat: CountStat, fill_to_time: datetime, realm: Realm | N
|
||||
return
|
||||
fill_to_time = min(fill_to_time, dependency_fill_time)
|
||||
|
||||
currently_filled += stat.time_increment
|
||||
currently_filled = currently_filled + stat.time_increment
|
||||
while currently_filled <= fill_to_time:
|
||||
logger.info("START %s %s", stat.property, currently_filled)
|
||||
start = time.time()
|
||||
@@ -190,7 +159,7 @@ def process_count_stat(stat: CountStat, fill_to_time: datetime, realm: Realm | N
|
||||
do_fill_count_stat_at_hour(stat, currently_filled, realm)
|
||||
do_update_fill_state(fill_state, currently_filled, FillState.DONE)
|
||||
end = time.time()
|
||||
currently_filled += stat.time_increment
|
||||
currently_filled = currently_filled + stat.time_increment
|
||||
logger.info("DONE %s (%dms)", stat.property, (end - start) * 1000)
|
||||
|
||||
|
||||
@@ -203,7 +172,7 @@ def do_update_fill_state(fill_state: FillState, end_time: datetime, state: int)
|
||||
# We assume end_time is valid (e.g. is on a day or hour boundary as appropriate)
|
||||
# and is time-zone-aware. It is the caller's responsibility to enforce this!
|
||||
def do_fill_count_stat_at_hour(
|
||||
stat: CountStat, end_time: datetime, realm: Realm | None = None
|
||||
stat: CountStat, end_time: datetime, realm: Optional[Realm] = None
|
||||
) -> None:
|
||||
start_time = end_time - stat.interval
|
||||
if not isinstance(stat, LoggingCountStat):
|
||||
@@ -222,7 +191,7 @@ def do_fill_count_stat_at_hour(
|
||||
def do_delete_counts_at_hour(stat: CountStat, end_time: datetime) -> None:
|
||||
if isinstance(stat, LoggingCountStat):
|
||||
InstallationCount.objects.filter(property=stat.property, end_time=end_time).delete()
|
||||
if stat.data_collector.depends_on_realm():
|
||||
if stat.data_collector.output_table in [UserCount, StreamCount]:
|
||||
RealmCount.objects.filter(property=stat.property, end_time=end_time).delete()
|
||||
else:
|
||||
UserCount.objects.filter(property=stat.property, end_time=end_time).delete()
|
||||
@@ -232,7 +201,7 @@ def do_delete_counts_at_hour(stat: CountStat, end_time: datetime) -> None:
|
||||
|
||||
|
||||
def do_aggregate_to_summary_table(
|
||||
stat: CountStat, end_time: datetime, realm: Realm | None = None
|
||||
stat: CountStat, end_time: datetime, realm: Optional[Realm] = None
|
||||
) -> None:
|
||||
cursor = connection.cursor()
|
||||
|
||||
@@ -243,7 +212,7 @@ def do_aggregate_to_summary_table(
|
||||
else:
|
||||
realm_clause = SQL("")
|
||||
|
||||
if stat.data_collector.depends_on_realm():
|
||||
if output_table in (UserCount, StreamCount):
|
||||
realmcount_query = SQL(
|
||||
"""
|
||||
INSERT INTO analytics_realmcount
|
||||
@@ -324,9 +293,9 @@ def do_aggregate_to_summary_table(
|
||||
|
||||
# called from zerver.actions; should not throw any errors
|
||||
def do_increment_logging_stat(
|
||||
model_object_for_bucket: Union[Realm, UserProfile, Stream, "RemoteRealm", "RemoteZulipServer"],
|
||||
zerver_object: Union[Realm, UserProfile, Stream],
|
||||
stat: CountStat,
|
||||
subgroup: str | int | bool | None,
|
||||
subgroup: Optional[Union[str, int, bool]],
|
||||
event_time: datetime,
|
||||
increment: int = 1,
|
||||
) -> None:
|
||||
@@ -334,100 +303,31 @@ def do_increment_logging_stat(
|
||||
return
|
||||
|
||||
table = stat.data_collector.output_table
|
||||
id_args: dict[str, int | None] = {}
|
||||
conflict_args: list[str] = []
|
||||
if table == RealmCount:
|
||||
assert isinstance(model_object_for_bucket, Realm)
|
||||
id_args = {"realm_id": model_object_for_bucket.id}
|
||||
conflict_args = ["realm_id"]
|
||||
assert isinstance(zerver_object, Realm)
|
||||
id_args: Dict[str, Union[Realm, UserProfile, Stream]] = {"realm": zerver_object}
|
||||
elif table == UserCount:
|
||||
assert isinstance(model_object_for_bucket, UserProfile)
|
||||
id_args = {
|
||||
"realm_id": model_object_for_bucket.realm_id,
|
||||
"user_id": model_object_for_bucket.id,
|
||||
}
|
||||
conflict_args = ["user_id"]
|
||||
elif table == StreamCount:
|
||||
assert isinstance(model_object_for_bucket, Stream)
|
||||
id_args = {
|
||||
"realm_id": model_object_for_bucket.realm_id,
|
||||
"stream_id": model_object_for_bucket.id,
|
||||
}
|
||||
conflict_args = ["stream_id"]
|
||||
elif table == RemoteInstallationCount:
|
||||
assert isinstance(model_object_for_bucket, RemoteZulipServer)
|
||||
id_args = {"server_id": model_object_for_bucket.id, "remote_id": None}
|
||||
conflict_args = ["server_id"]
|
||||
elif table == RemoteRealmCount:
|
||||
assert isinstance(model_object_for_bucket, RemoteRealm)
|
||||
# For RemoteRealmCount (e.g. `mobile_pushes_forwarded::day`),
|
||||
# we have no `remote_id` nor `realm_id`, since they are not
|
||||
# imported from the remote server, which is the source of
|
||||
# truth of those two columns. Their "ON CONFLICT" is thus the
|
||||
# only unique key we have, which is `remote_realm_id`, and not
|
||||
# `server_id` / `realm_id`.
|
||||
id_args = {
|
||||
"server_id": model_object_for_bucket.server_id,
|
||||
"remote_realm_id": model_object_for_bucket.id,
|
||||
"remote_id": None,
|
||||
"realm_id": None,
|
||||
}
|
||||
conflict_args = [
|
||||
"remote_realm_id",
|
||||
]
|
||||
else:
|
||||
raise AssertionError("Unsupported CountStat output_table")
|
||||
assert isinstance(zerver_object, UserProfile)
|
||||
id_args = {"realm": zerver_object.realm, "user": zerver_object}
|
||||
else: # StreamCount
|
||||
assert isinstance(zerver_object, Stream)
|
||||
id_args = {"realm": zerver_object.realm, "stream": zerver_object}
|
||||
|
||||
if stat.frequency == CountStat.DAY:
|
||||
end_time = ceiling_to_day(event_time)
|
||||
elif stat.frequency == CountStat.HOUR:
|
||||
else: # CountStat.HOUR:
|
||||
end_time = ceiling_to_hour(event_time)
|
||||
else:
|
||||
raise AssertionError("Unsupported CountStat frequency")
|
||||
|
||||
is_subgroup: SQL = SQL("NULL")
|
||||
if subgroup is not None:
|
||||
is_subgroup = SQL("NOT NULL")
|
||||
# For backwards consistency, we cast the subgroup to a string
|
||||
# in Python; this emulates the behaviour of `get_or_create`,
|
||||
# which was previously used in this function, and performed
|
||||
# this cast because the `subgroup` column is defined as a
|
||||
# `CharField`. Omitting this explicit cast causes a subgroup
|
||||
# of the boolean False to be passed as the PostgreSQL false,
|
||||
# which it stringifies as the lower-case `'false'`, not the
|
||||
# initial-case `'False'` if Python stringifies it.
|
||||
#
|
||||
# Other parts of the system (e.g. count_message_by_user_query)
|
||||
# already use PostgreSQL to cast bools to strings, resulting
|
||||
# in `subgroup` values of lower-case `'false'` -- for example
|
||||
# in `messages_sent:is_bot:hour`. Fixing this inconsistency
|
||||
# via a migration is complicated by these records being
|
||||
# exchanged over the wire from remote servers.
|
||||
subgroup = str(subgroup)
|
||||
conflict_args.append("subgroup")
|
||||
|
||||
id_column_names = SQL(", ").join(map(Identifier, id_args.keys()))
|
||||
id_values = SQL(", ").join(map(Literal, id_args.values()))
|
||||
conflict_columns = SQL(", ").join(map(Identifier, conflict_args))
|
||||
|
||||
sql_query = SQL(
|
||||
"""
|
||||
INSERT INTO {table_name}(property, subgroup, end_time, value, {id_column_names})
|
||||
VALUES (%s, %s, %s, %s, {id_values})
|
||||
ON CONFLICT (property, end_time, {conflict_columns})
|
||||
WHERE subgroup IS {is_subgroup}
|
||||
DO UPDATE SET
|
||||
value = {table_name}.value + EXCLUDED.value
|
||||
"""
|
||||
).format(
|
||||
table_name=Identifier(table._meta.db_table),
|
||||
id_column_names=id_column_names,
|
||||
id_values=id_values,
|
||||
conflict_columns=conflict_columns,
|
||||
is_subgroup=is_subgroup,
|
||||
row, created = table._default_manager.get_or_create(
|
||||
property=stat.property,
|
||||
subgroup=subgroup,
|
||||
end_time=end_time,
|
||||
defaults={"value": increment},
|
||||
**id_args,
|
||||
)
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(sql_query, [stat.property, subgroup, end_time, increment])
|
||||
if not created:
|
||||
row.value = F("value") + increment
|
||||
row.save(update_fields=["value"])
|
||||
|
||||
|
||||
def do_drop_all_analytics_tables() -> None:
|
||||
@@ -448,7 +348,7 @@ def do_drop_single_stat(property: str) -> None:
|
||||
|
||||
## DataCollector-level operations ##
|
||||
|
||||
QueryFn: TypeAlias = Callable[[dict[str, Composable]], Composable]
|
||||
QueryFn: TypeAlias = Callable[[Dict[str, Composable]], Composable]
|
||||
|
||||
|
||||
def do_pull_by_sql_query(
|
||||
@@ -456,7 +356,7 @@ def do_pull_by_sql_query(
|
||||
start_time: datetime,
|
||||
end_time: datetime,
|
||||
query: QueryFn,
|
||||
group_by: tuple[type[models.Model], str] | None,
|
||||
group_by: Optional[Tuple[Type[models.Model], str]],
|
||||
) -> int:
|
||||
if group_by is None:
|
||||
subgroup: Composable = SQL("NULL")
|
||||
@@ -490,12 +390,12 @@ def do_pull_by_sql_query(
|
||||
|
||||
|
||||
def sql_data_collector(
|
||||
output_table: type[BaseCount],
|
||||
output_table: Type[BaseCount],
|
||||
query: QueryFn,
|
||||
group_by: tuple[type[models.Model], str] | None,
|
||||
group_by: Optional[Tuple[Type[models.Model], str]],
|
||||
) -> DataCollector:
|
||||
def pull_function(
|
||||
property: str, start_time: datetime, end_time: datetime, realm: Realm | None = None
|
||||
property: str, start_time: datetime, end_time: datetime, realm: Optional[Realm] = None
|
||||
) -> int:
|
||||
# The pull function type needs to accept a Realm argument
|
||||
# because the 'minutes_active::day' CountStat uses
|
||||
@@ -508,42 +408,8 @@ def sql_data_collector(
|
||||
return DataCollector(output_table, pull_function)
|
||||
|
||||
|
||||
def count_upload_space_used_by_realm_query(realm: Realm | None) -> QueryFn:
|
||||
if realm is None:
|
||||
realm_clause: Composable = SQL("")
|
||||
else:
|
||||
realm_clause = SQL("zerver_attachment.realm_id = {} AND").format(Literal(realm.id))
|
||||
|
||||
# Note: This query currently has to go through the entire table,
|
||||
# summing all the sizes of attachments for every realm. This can be improved
|
||||
# by having a query which looks at the latest CountStat for each realm,
|
||||
# and sums it with only the new attachments.
|
||||
# There'd be additional complexity added by the fact that attachments can
|
||||
# also be deleted. Partially this can be accounted for by subtracting
|
||||
# ArchivedAttachment sizes, but there's still the issue of attachments
|
||||
# which can be directly deleted via the API.
|
||||
|
||||
return lambda kwargs: SQL(
|
||||
"""
|
||||
INSERT INTO analytics_realmcount (realm_id, property, end_time, value)
|
||||
SELECT
|
||||
zerver_attachment.realm_id,
|
||||
%(property)s,
|
||||
%(time_end)s,
|
||||
COALESCE(SUM(zerver_attachment.size), 0)
|
||||
FROM
|
||||
zerver_attachment
|
||||
WHERE
|
||||
{realm_clause}
|
||||
zerver_attachment.create_time < %(time_end)s
|
||||
GROUP BY
|
||||
zerver_attachment.realm_id
|
||||
"""
|
||||
).format(**kwargs, realm_clause=realm_clause)
|
||||
|
||||
|
||||
def do_pull_minutes_active(
|
||||
property: str, start_time: datetime, end_time: datetime, realm: Realm | None = None
|
||||
property: str, start_time: datetime, end_time: datetime, realm: Optional[Realm] = None
|
||||
) -> int:
|
||||
user_activity_intervals = (
|
||||
UserActivityInterval.objects.filter(
|
||||
@@ -556,7 +422,7 @@ def do_pull_minutes_active(
|
||||
.values_list("user_profile_id", "user_profile__realm_id", "start", "end")
|
||||
)
|
||||
|
||||
seconds_active: dict[tuple[int, int], float] = defaultdict(float)
|
||||
seconds_active: Dict[Tuple[int, int], float] = defaultdict(float)
|
||||
for user_id, realm_id, interval_start, interval_end in user_activity_intervals:
|
||||
if realm is None or realm.id == realm_id:
|
||||
start = max(start_time, interval_start)
|
||||
@@ -578,7 +444,7 @@ def do_pull_minutes_active(
|
||||
return len(rows)
|
||||
|
||||
|
||||
def count_message_by_user_query(realm: Realm | None) -> QueryFn:
|
||||
def count_message_by_user_query(realm: Optional[Realm]) -> QueryFn:
|
||||
if realm is None:
|
||||
realm_clause: Composable = SQL("")
|
||||
else:
|
||||
@@ -611,7 +477,7 @@ def count_message_by_user_query(realm: Realm | None) -> QueryFn:
|
||||
|
||||
|
||||
# Note: ignores the group_by / group_by_clause.
|
||||
def count_message_type_by_user_query(realm: Realm | None) -> QueryFn:
|
||||
def count_message_type_by_user_query(realm: Optional[Realm]) -> QueryFn:
|
||||
if realm is None:
|
||||
realm_clause: Composable = SQL("")
|
||||
else:
|
||||
@@ -666,7 +532,7 @@ def count_message_type_by_user_query(realm: Realm | None) -> QueryFn:
|
||||
# use this also subgroup on UserProfile.is_bot. If in the future there is a
|
||||
# stat that counts messages by stream and doesn't need the UserProfile
|
||||
# table, consider writing a new query for efficiency.
|
||||
def count_message_by_stream_query(realm: Realm | None) -> QueryFn:
|
||||
def count_message_by_stream_query(realm: Optional[Realm]) -> QueryFn:
|
||||
if realm is None:
|
||||
realm_clause: Composable = SQL("")
|
||||
else:
|
||||
@@ -701,51 +567,79 @@ def count_message_by_stream_query(realm: Realm | None) -> QueryFn:
|
||||
).format(**kwargs, realm_clause=realm_clause)
|
||||
|
||||
|
||||
# Hardcodes the query needed for active_users_audit:is_bot:day.
|
||||
# Assumes that a user cannot have two RealmAuditLog entries with the
|
||||
# same event_time and event_type in [AuditLogEventType.USER_CREATED,
|
||||
# USER_DEACTIVATED, etc]. In particular, it's important to ensure
|
||||
# that migrations don't cause that to happen.
|
||||
def check_realmauditlog_by_user_query(realm: Realm | None) -> QueryFn:
|
||||
# Hardcodes the query needed by active_users:is_bot:day, since that is
|
||||
# currently the only stat that uses this.
|
||||
def count_user_by_realm_query(realm: Optional[Realm]) -> QueryFn:
|
||||
if realm is None:
|
||||
realm_clause: Composable = SQL("")
|
||||
else:
|
||||
realm_clause = SQL("zerver_userprofile.realm_id = {} AND").format(Literal(realm.id))
|
||||
return lambda kwargs: SQL(
|
||||
"""
|
||||
INSERT INTO analytics_realmcount
|
||||
(realm_id, value, property, subgroup, end_time)
|
||||
SELECT
|
||||
zerver_realm.id, count(*), %(property)s, {subgroup}, %(time_end)s
|
||||
FROM zerver_realm
|
||||
JOIN zerver_userprofile
|
||||
ON
|
||||
zerver_realm.id = zerver_userprofile.realm_id
|
||||
WHERE
|
||||
zerver_realm.date_created < %(time_end)s AND
|
||||
zerver_userprofile.date_joined >= %(time_start)s AND
|
||||
zerver_userprofile.date_joined < %(time_end)s AND
|
||||
{realm_clause}
|
||||
zerver_userprofile.is_active = TRUE
|
||||
GROUP BY zerver_realm.id {group_by_clause}
|
||||
"""
|
||||
).format(**kwargs, realm_clause=realm_clause)
|
||||
|
||||
|
||||
# Currently hardcodes the query needed for active_users_audit:is_bot:day.
|
||||
# Assumes that a user cannot have two RealmAuditLog entries with the same event_time and
|
||||
# event_type in [RealmAuditLog.USER_CREATED, USER_DEACTIVATED, etc].
|
||||
# In particular, it's important to ensure that migrations don't cause that to happen.
|
||||
def check_realmauditlog_by_user_query(realm: Optional[Realm]) -> QueryFn:
|
||||
if realm is None:
|
||||
realm_clause: Composable = SQL("")
|
||||
else:
|
||||
realm_clause = SQL("realm_id = {} AND").format(Literal(realm.id))
|
||||
return lambda kwargs: SQL(
|
||||
"""
|
||||
INSERT INTO analytics_realmcount
|
||||
(realm_id, value, property, subgroup, end_time)
|
||||
INSERT INTO analytics_usercount
|
||||
(user_id, realm_id, value, property, subgroup, end_time)
|
||||
SELECT
|
||||
zerver_userprofile.realm_id, count(*), %(property)s, {subgroup}, %(time_end)s
|
||||
FROM zerver_userprofile
|
||||
ral1.modified_user_id, ral1.realm_id, 1, %(property)s, {subgroup}, %(time_end)s
|
||||
FROM zerver_realmauditlog ral1
|
||||
JOIN (
|
||||
SELECT DISTINCT ON (modified_user_id)
|
||||
modified_user_id, event_type
|
||||
FROM
|
||||
zerver_realmauditlog
|
||||
WHERE
|
||||
event_type IN ({user_created}, {user_activated}, {user_deactivated}, {user_reactivated}) AND
|
||||
{realm_clause}
|
||||
event_time < %(time_end)s
|
||||
ORDER BY
|
||||
modified_user_id,
|
||||
event_time DESC
|
||||
) last_user_event ON last_user_event.modified_user_id = zerver_userprofile.id
|
||||
SELECT modified_user_id, max(event_time) AS max_event_time
|
||||
FROM zerver_realmauditlog
|
||||
WHERE
|
||||
event_type in ({user_created}, {user_activated}, {user_deactivated}, {user_reactivated}) AND
|
||||
{realm_clause}
|
||||
event_time < %(time_end)s
|
||||
GROUP BY modified_user_id
|
||||
) ral2
|
||||
ON
|
||||
ral1.event_time = max_event_time AND
|
||||
ral1.modified_user_id = ral2.modified_user_id
|
||||
JOIN zerver_userprofile
|
||||
ON
|
||||
ral1.modified_user_id = zerver_userprofile.id
|
||||
WHERE
|
||||
last_user_event.event_type in ({user_created}, {user_activated}, {user_reactivated})
|
||||
GROUP BY zerver_userprofile.realm_id {group_by_clause}
|
||||
ral1.event_type in ({user_created}, {user_activated}, {user_reactivated})
|
||||
"""
|
||||
).format(
|
||||
**kwargs,
|
||||
user_created=Literal(AuditLogEventType.USER_CREATED),
|
||||
user_activated=Literal(AuditLogEventType.USER_ACTIVATED),
|
||||
user_deactivated=Literal(AuditLogEventType.USER_DEACTIVATED),
|
||||
user_reactivated=Literal(AuditLogEventType.USER_REACTIVATED),
|
||||
user_created=Literal(RealmAuditLog.USER_CREATED),
|
||||
user_activated=Literal(RealmAuditLog.USER_ACTIVATED),
|
||||
user_deactivated=Literal(RealmAuditLog.USER_DEACTIVATED),
|
||||
user_reactivated=Literal(RealmAuditLog.USER_REACTIVATED),
|
||||
realm_clause=realm_clause,
|
||||
)
|
||||
|
||||
|
||||
def check_useractivityinterval_by_user_query(realm: Realm | None) -> QueryFn:
|
||||
def check_useractivityinterval_by_user_query(realm: Optional[Realm]) -> QueryFn:
|
||||
if realm is None:
|
||||
realm_clause: Composable = SQL("")
|
||||
else:
|
||||
@@ -769,7 +663,7 @@ def check_useractivityinterval_by_user_query(realm: Realm | None) -> QueryFn:
|
||||
).format(**kwargs, realm_clause=realm_clause)
|
||||
|
||||
|
||||
def count_realm_active_humans_query(realm: Realm | None) -> QueryFn:
|
||||
def count_realm_active_humans_query(realm: Optional[Realm]) -> QueryFn:
|
||||
if realm is None:
|
||||
realm_clause: Composable = SQL("")
|
||||
else:
|
||||
@@ -779,46 +673,29 @@ def count_realm_active_humans_query(realm: Realm | None) -> QueryFn:
|
||||
INSERT INTO analytics_realmcount
|
||||
(realm_id, value, property, subgroup, end_time)
|
||||
SELECT
|
||||
active_usercount.realm_id, count(*), %(property)s, NULL, %(time_end)s
|
||||
usercount1.realm_id, count(*), %(property)s, NULL, %(time_end)s
|
||||
FROM (
|
||||
SELECT
|
||||
realm_id,
|
||||
user_id
|
||||
FROM
|
||||
analytics_usercount
|
||||
WHERE
|
||||
property = '15day_actives::day'
|
||||
{realm_clause}
|
||||
AND end_time = %(time_end)s
|
||||
) active_usercount
|
||||
JOIN zerver_userprofile ON active_usercount.user_id = zerver_userprofile.id
|
||||
AND active_usercount.realm_id = zerver_userprofile.realm_id
|
||||
SELECT realm_id, user_id
|
||||
FROM analytics_usercount
|
||||
WHERE
|
||||
property = 'active_users_audit:is_bot:day' AND
|
||||
subgroup = 'false' AND
|
||||
{realm_clause}
|
||||
end_time = %(time_end)s
|
||||
) usercount1
|
||||
JOIN (
|
||||
SELECT DISTINCT ON (modified_user_id)
|
||||
modified_user_id, event_type
|
||||
FROM
|
||||
zerver_realmauditlog
|
||||
WHERE
|
||||
event_type IN ({user_created}, {user_activated}, {user_deactivated}, {user_reactivated})
|
||||
AND event_time < %(time_end)s
|
||||
ORDER BY
|
||||
modified_user_id,
|
||||
event_time DESC
|
||||
) last_user_event ON last_user_event.modified_user_id = active_usercount.user_id
|
||||
WHERE
|
||||
NOT zerver_userprofile.is_bot
|
||||
AND event_type IN ({user_created}, {user_activated}, {user_reactivated})
|
||||
GROUP BY
|
||||
active_usercount.realm_id
|
||||
SELECT realm_id, user_id
|
||||
FROM analytics_usercount
|
||||
WHERE
|
||||
property = '15day_actives::day' AND
|
||||
{realm_clause}
|
||||
end_time = %(time_end)s
|
||||
) usercount2
|
||||
ON
|
||||
usercount1.user_id = usercount2.user_id
|
||||
GROUP BY usercount1.realm_id
|
||||
"""
|
||||
).format(
|
||||
**kwargs,
|
||||
user_created=Literal(AuditLogEventType.USER_CREATED),
|
||||
user_activated=Literal(AuditLogEventType.USER_ACTIVATED),
|
||||
user_deactivated=Literal(AuditLogEventType.USER_DEACTIVATED),
|
||||
user_reactivated=Literal(AuditLogEventType.USER_REACTIVATED),
|
||||
realm_clause=realm_clause,
|
||||
)
|
||||
).format(**kwargs, realm_clause=realm_clause)
|
||||
|
||||
|
||||
# Currently unused and untested
|
||||
@@ -841,7 +718,7 @@ count_stream_by_realm_query = lambda kwargs: SQL(
|
||||
).format(**kwargs)
|
||||
|
||||
|
||||
def get_count_stats(realm: Realm | None = None) -> dict[str, CountStat]:
|
||||
def get_count_stats(realm: Optional[Realm] = None) -> Dict[str, CountStat]:
|
||||
## CountStat declarations ##
|
||||
|
||||
count_stats_ = [
|
||||
@@ -874,22 +751,39 @@ def get_count_stats(realm: Realm | None = None) -> dict[str, CountStat]:
|
||||
),
|
||||
CountStat.DAY,
|
||||
),
|
||||
# AI credit usage stats for users, in units of $1/10^9, which is safe for
|
||||
# aggregation because we're using bigints for the values.
|
||||
LoggingCountStat("ai_credit_usage::day", UserCount, CountStat.DAY),
|
||||
# Counts the number of active users in the UserProfile.is_active sense.
|
||||
# Number of users stats
|
||||
# Stats that count the number of active users in the UserProfile.is_active sense.
|
||||
# 'active_users_audit:is_bot:day' is the canonical record of which users were
|
||||
# active on which days (in the UserProfile.is_active sense).
|
||||
# Important that this stay a daily stat, so that 'realm_active_humans::day' works as expected.
|
||||
CountStat(
|
||||
"active_users_audit:is_bot:day",
|
||||
sql_data_collector(
|
||||
RealmCount, check_realmauditlog_by_user_query(realm), (UserProfile, "is_bot")
|
||||
UserCount, check_realmauditlog_by_user_query(realm), (UserProfile, "is_bot")
|
||||
),
|
||||
CountStat.DAY,
|
||||
),
|
||||
# Important note: LoggingCountStat objects aren't passed the
|
||||
# Realm argument, because by nature they have a logging
|
||||
# structure, not a pull-from-database structure, so there's no
|
||||
# way to compute them for a single realm after the fact (the
|
||||
# use case for passing a Realm argument).
|
||||
# Sanity check on 'active_users_audit:is_bot:day', and a archetype for future LoggingCountStats.
|
||||
# In RealmCount, 'active_users_audit:is_bot:day' should be the partial
|
||||
# sum sequence of 'active_users_log:is_bot:day', for any realm that
|
||||
# started after the latter stat was introduced.
|
||||
LoggingCountStat("active_users_log:is_bot:day", RealmCount, CountStat.DAY),
|
||||
# Another sanity check on 'active_users_audit:is_bot:day'. Is only an
|
||||
# approximation, e.g. if a user is deactivated between the end of the
|
||||
# day and when this stat is run, they won't be counted. However, is the
|
||||
# simplest of the three to inspect by hand.
|
||||
CountStat(
|
||||
"upload_quota_used_bytes::day",
|
||||
sql_data_collector(RealmCount, count_upload_space_used_by_realm_query(realm), None),
|
||||
"active_users:is_bot:day",
|
||||
sql_data_collector(
|
||||
RealmCount, count_user_by_realm_query(realm), (UserProfile, "is_bot")
|
||||
),
|
||||
CountStat.DAY,
|
||||
interval=TIMEDELTA_MAX,
|
||||
),
|
||||
# Messages read stats. messages_read::hour is the total
|
||||
# number of messages read, whereas
|
||||
@@ -923,16 +817,8 @@ def get_count_stats(realm: Realm | None = None) -> dict[str, CountStat]:
|
||||
CountStat(
|
||||
"minutes_active::day", DataCollector(UserCount, do_pull_minutes_active), CountStat.DAY
|
||||
),
|
||||
# Tracks the number of push notifications requested by the server.
|
||||
# Included in LOGGING_COUNT_STAT_PROPERTIES_NOT_SENT_TO_BOUNCER.
|
||||
LoggingCountStat(
|
||||
"mobile_pushes_sent::day",
|
||||
RealmCount,
|
||||
CountStat.DAY,
|
||||
),
|
||||
# Rate limiting stats
|
||||
# Used to limit the number of invitation emails sent by a realm.
|
||||
# Included in LOGGING_COUNT_STAT_PROPERTIES_NOT_SENT_TO_BOUNCER.
|
||||
# Used to limit the number of invitation emails sent by a realm
|
||||
LoggingCountStat("invites_sent::day", RealmCount, CountStat.DAY),
|
||||
# Dependent stats
|
||||
# Must come after their dependencies.
|
||||
@@ -941,83 +827,12 @@ def get_count_stats(realm: Realm | None = None) -> dict[str, CountStat]:
|
||||
"realm_active_humans::day",
|
||||
sql_data_collector(RealmCount, count_realm_active_humans_query(realm), None),
|
||||
CountStat.DAY,
|
||||
dependencies=["15day_actives::day"],
|
||||
dependencies=["active_users_audit:is_bot:day", "15day_actives::day"],
|
||||
),
|
||||
]
|
||||
|
||||
if settings.ZILENCER_ENABLED:
|
||||
# See also the remote_installation versions of these in REMOTE_INSTALLATION_COUNT_STATS.
|
||||
count_stats_.append(
|
||||
LoggingCountStat(
|
||||
"mobile_pushes_received::day",
|
||||
RemoteRealmCount,
|
||||
CountStat.DAY,
|
||||
)
|
||||
)
|
||||
count_stats_.append(
|
||||
LoggingCountStat(
|
||||
"mobile_pushes_forwarded::day",
|
||||
RemoteRealmCount,
|
||||
CountStat.DAY,
|
||||
)
|
||||
)
|
||||
|
||||
return OrderedDict((stat.property, stat) for stat in count_stats_)
|
||||
|
||||
|
||||
# These properties are tracked by the bouncer itself and therefore syncing them
|
||||
# from a remote server should not be allowed - or the server would be able to interfere
|
||||
# with our data.
|
||||
BOUNCER_ONLY_REMOTE_COUNT_STAT_PROPERTIES = [
|
||||
"mobile_pushes_received::day",
|
||||
"mobile_pushes_forwarded::day",
|
||||
]
|
||||
|
||||
# LoggingCountStats with a daily duration and that are directly stored on
|
||||
# the RealmCount table (instead of via aggregation in process_count_stat),
|
||||
# can be in a state, after the hourly cron job to update analytics counts,
|
||||
# where the logged value will be live-updated later (as the end time for
|
||||
# the stat is still in the future). As these logging counts are designed
|
||||
# to be used on the self-hosted installation for either debugging or rate
|
||||
# limiting, sending these incomplete counts to the bouncer has low value.
|
||||
LOGGING_COUNT_STAT_PROPERTIES_NOT_SENT_TO_BOUNCER = {
|
||||
"invites_sent::day",
|
||||
"mobile_pushes_sent::day",
|
||||
"active_users_log:is_bot:day",
|
||||
"active_users:is_bot:day",
|
||||
}
|
||||
|
||||
# To avoid refactoring for now COUNT_STATS can be used as before
|
||||
COUNT_STATS = get_count_stats()
|
||||
|
||||
REMOTE_INSTALLATION_COUNT_STATS = OrderedDict()
|
||||
|
||||
if settings.ZILENCER_ENABLED:
|
||||
# REMOTE_INSTALLATION_COUNT_STATS contains duplicates of the
|
||||
# RemoteRealmCount stats declared above; it is necessary because
|
||||
# pre-8.0 servers do not send the fields required to identify a
|
||||
# RemoteRealm.
|
||||
|
||||
# Tracks the number of push notifications requested to be sent
|
||||
# by a remote server.
|
||||
REMOTE_INSTALLATION_COUNT_STATS["mobile_pushes_received::day"] = LoggingCountStat(
|
||||
"mobile_pushes_received::day",
|
||||
RemoteInstallationCount,
|
||||
CountStat.DAY,
|
||||
)
|
||||
# Tracks the number of push notifications successfully sent to
|
||||
# mobile devices, as requested by the remote server. Therefore
|
||||
# this should be less than or equal to mobile_pushes_received -
|
||||
# with potential tiny offsets resulting from a request being
|
||||
# *received* by the bouncer right before midnight, but *sent* to
|
||||
# the mobile device right after midnight. This would cause the
|
||||
# increments to happen to CountStat records for different days.
|
||||
REMOTE_INSTALLATION_COUNT_STATS["mobile_pushes_forwarded::day"] = LoggingCountStat(
|
||||
"mobile_pushes_forwarded::day",
|
||||
RemoteInstallationCount,
|
||||
CountStat.DAY,
|
||||
)
|
||||
|
||||
ALL_COUNT_STATS = OrderedDict(
|
||||
list(COUNT_STATS.items()) + list(REMOTE_INSTALLATION_COUNT_STATS.items())
|
||||
)
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from math import sqrt
|
||||
from random import Random
|
||||
from typing import List
|
||||
|
||||
from analytics.lib.counts import CountStat
|
||||
|
||||
@@ -15,7 +16,7 @@ def generate_time_series_data(
|
||||
frequency: str = CountStat.DAY,
|
||||
partial_sum: bool = False,
|
||||
random_seed: int = 26,
|
||||
) -> list[int]:
|
||||
) -> List[int]:
|
||||
"""
|
||||
Generate semi-realistic looking time series data for testing analytics graphs.
|
||||
|
||||
@@ -59,7 +60,9 @@ def generate_time_series_data(
|
||||
f"Must be generating at least 2 data points. Currently generating {length}"
|
||||
)
|
||||
growth_base = growth ** (1.0 / (length - 1))
|
||||
values_no_noise = [seasonality[i % len(seasonality)] * (growth_base**i) for i in range(length)]
|
||||
values_no_noise = [
|
||||
seasonality[i % len(seasonality)] * (growth_base**i) for i in range(length)
|
||||
]
|
||||
|
||||
noise_scalars = [rng.gauss(0, 1)]
|
||||
for i in range(1, length):
|
||||
@@ -69,7 +72,7 @@ def generate_time_series_data(
|
||||
|
||||
values = [
|
||||
0 if holiday else int(v + sqrt(v) * noise_scalar * spikiness)
|
||||
for v, noise_scalar, holiday in zip(values_no_noise, noise_scalars, holidays, strict=False)
|
||||
for v, noise_scalar, holiday in zip(values_no_noise, noise_scalars, holidays)
|
||||
]
|
||||
if partial_sum:
|
||||
for i in range(1, length):
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Optional
|
||||
|
||||
from analytics.lib.counts import CountStat
|
||||
from zerver.lib.timestamp import floor_to_day, floor_to_hour, verify_UTC
|
||||
@@ -9,8 +10,8 @@ from zerver.lib.timestamp import floor_to_day, floor_to_hour, verify_UTC
|
||||
# So informally, time_range(Sep 20, Sep 22, day, None) returns [Sep 20, Sep 21, Sep 22],
|
||||
# and time_range(Sep 20, Sep 22, day, 5) returns [Sep 18, Sep 19, Sep 20, Sep 21, Sep 22]
|
||||
def time_range(
|
||||
start: datetime, end: datetime, frequency: str, min_length: int | None
|
||||
) -> list[datetime]:
|
||||
start: datetime, end: datetime, frequency: str, min_length: Optional[int]
|
||||
) -> List[datetime]:
|
||||
verify_UTC(start)
|
||||
verify_UTC(end)
|
||||
if frequency == CountStat.HOUR:
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
from dataclasses import dataclass
|
||||
import os
|
||||
import time
|
||||
from datetime import timedelta
|
||||
from typing import Any, Literal
|
||||
from typing import Any, Dict
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.utils.timezone import now as timezone_now
|
||||
from typing_extensions import override
|
||||
|
||||
from analytics.lib.counts import ALL_COUNT_STATS, CountStat
|
||||
from analytics.lib.counts import COUNT_STATS, CountStat
|
||||
from analytics.models import installation_epoch
|
||||
from scripts.lib.zulip_tools import atomic_nagios_write
|
||||
from zerver.lib.management import ZulipBaseCommand
|
||||
from zerver.lib.timestamp import TimeZoneNotUTCError, floor_to_day, floor_to_hour, verify_UTC
|
||||
from zerver.models import Realm
|
||||
|
||||
@@ -20,13 +20,7 @@ states = {
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class NagiosResult:
|
||||
status: Literal["ok", "warning", "critical", "unknown"]
|
||||
message: str
|
||||
|
||||
|
||||
class Command(ZulipBaseCommand):
|
||||
class Command(BaseCommand):
|
||||
help = """Checks FillState table.
|
||||
|
||||
Run as a cron job that runs every hour."""
|
||||
@@ -34,24 +28,30 @@ class Command(ZulipBaseCommand):
|
||||
@override
|
||||
def handle(self, *args: Any, **options: Any) -> None:
|
||||
fill_state = self.get_fill_state()
|
||||
atomic_nagios_write("check-analytics-state", fill_state.status, fill_state.message)
|
||||
status = fill_state["status"]
|
||||
message = fill_state["message"]
|
||||
|
||||
def get_fill_state(self) -> NagiosResult:
|
||||
state_file_path = "/var/lib/nagios_state/check-analytics-state"
|
||||
state_file_tmp = state_file_path + "-tmp"
|
||||
|
||||
with open(state_file_tmp, "w") as f:
|
||||
f.write(f"{int(time.time())}|{status}|{states[status]}|{message}\n")
|
||||
os.rename(state_file_tmp, state_file_path)
|
||||
|
||||
def get_fill_state(self) -> Dict[str, Any]:
|
||||
if not Realm.objects.exists():
|
||||
return NagiosResult(status="ok", message="No realms exist, so not checking FillState.")
|
||||
return {"status": 0, "message": "No realms exist, so not checking FillState."}
|
||||
|
||||
warning_unfilled_properties = []
|
||||
critical_unfilled_properties = []
|
||||
for property, stat in ALL_COUNT_STATS.items():
|
||||
for property, stat in COUNT_STATS.items():
|
||||
last_fill = stat.last_successful_fill()
|
||||
if last_fill is None:
|
||||
last_fill = installation_epoch()
|
||||
try:
|
||||
verify_UTC(last_fill)
|
||||
except TimeZoneNotUTCError:
|
||||
return NagiosResult(
|
||||
status="critical", message=f"FillState not in UTC for {property}"
|
||||
)
|
||||
return {"status": 2, "message": f"FillState not in UTC for {property}"}
|
||||
|
||||
if stat.frequency == CountStat.DAY:
|
||||
floor_function = floor_to_day
|
||||
@@ -63,10 +63,10 @@ class Command(ZulipBaseCommand):
|
||||
critical_threshold = timedelta(minutes=150)
|
||||
|
||||
if floor_function(last_fill) != last_fill:
|
||||
return NagiosResult(
|
||||
status="critical",
|
||||
message=f"FillState not on {stat.frequency} boundary for {property}",
|
||||
)
|
||||
return {
|
||||
"status": 2,
|
||||
"message": f"FillState not on {stat.frequency} boundary for {property}",
|
||||
}
|
||||
|
||||
time_to_last_fill = timezone_now() - last_fill
|
||||
if time_to_last_fill > critical_threshold:
|
||||
@@ -75,18 +75,18 @@ class Command(ZulipBaseCommand):
|
||||
warning_unfilled_properties.append(property)
|
||||
|
||||
if len(critical_unfilled_properties) == 0 and len(warning_unfilled_properties) == 0:
|
||||
return NagiosResult(status="ok", message="FillState looks fine.")
|
||||
return {"status": 0, "message": "FillState looks fine."}
|
||||
if len(critical_unfilled_properties) == 0:
|
||||
return NagiosResult(
|
||||
status="warning",
|
||||
message="Missed filling {} once.".format(
|
||||
return {
|
||||
"status": 1,
|
||||
"message": "Missed filling {} once.".format(
|
||||
", ".join(warning_unfilled_properties),
|
||||
),
|
||||
)
|
||||
return NagiosResult(
|
||||
status="critical",
|
||||
message="Missed filling {} once. Missed filling {} at least twice.".format(
|
||||
}
|
||||
return {
|
||||
"status": 2,
|
||||
"message": "Missed filling {} once. Missed filling {} at least twice.".format(
|
||||
", ".join(warning_unfilled_properties),
|
||||
", ".join(critical_unfilled_properties),
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1,14 +1,13 @@
|
||||
from argparse import ArgumentParser
|
||||
from typing import Any
|
||||
|
||||
from django.core.management.base import CommandError
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from typing_extensions import override
|
||||
|
||||
from analytics.lib.counts import do_drop_all_analytics_tables
|
||||
from zerver.lib.management import ZulipBaseCommand
|
||||
|
||||
|
||||
class Command(ZulipBaseCommand):
|
||||
class Command(BaseCommand):
|
||||
help = """Clear analytics tables."""
|
||||
|
||||
@override
|
||||
|
||||
@@ -1,14 +1,13 @@
|
||||
from argparse import ArgumentParser
|
||||
from typing import Any
|
||||
|
||||
from django.core.management.base import CommandError
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from typing_extensions import override
|
||||
|
||||
from analytics.lib.counts import ALL_COUNT_STATS, do_drop_single_stat
|
||||
from zerver.lib.management import ZulipBaseCommand
|
||||
from analytics.lib.counts import COUNT_STATS, do_drop_single_stat
|
||||
|
||||
|
||||
class Command(ZulipBaseCommand):
|
||||
class Command(BaseCommand):
|
||||
help = """Clear analytics tables."""
|
||||
|
||||
@override
|
||||
@@ -19,7 +18,7 @@ class Command(ZulipBaseCommand):
|
||||
@override
|
||||
def handle(self, *args: Any, **options: Any) -> None:
|
||||
property = options["property"]
|
||||
if property not in ALL_COUNT_STATS:
|
||||
if property not in COUNT_STATS:
|
||||
raise CommandError(f"Invalid property: {property}")
|
||||
if not options["force"]:
|
||||
raise CommandError("No action taken. Use --force.")
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
from collections.abc import Mapping
|
||||
import os
|
||||
from datetime import timedelta
|
||||
from typing import Any, TypeAlias
|
||||
from typing import Any, Dict, List, Mapping, Type, Union
|
||||
|
||||
from django.core.files.uploadedfile import UploadedFile
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.utils.timezone import now as timezone_now
|
||||
from typing_extensions import override
|
||||
from typing_extensions import TypeAlias, override
|
||||
|
||||
from analytics.lib.counts import COUNT_STATS, CountStat, do_drop_all_analytics_tables
|
||||
from analytics.lib.fixtures import generate_time_series_data
|
||||
@@ -20,17 +21,23 @@ from analytics.models import (
|
||||
from zerver.actions.create_realm import do_create_realm
|
||||
from zerver.actions.users import do_change_user_role
|
||||
from zerver.lib.create_user import create_user
|
||||
from zerver.lib.management import ZulipBaseCommand
|
||||
from zerver.lib.storage import static_path
|
||||
from zerver.lib.stream_color import STREAM_ASSIGNMENT_COLORS
|
||||
from zerver.lib.streams import get_default_values_for_stream_permission_group_settings
|
||||
from zerver.lib.timestamp import floor_to_day
|
||||
from zerver.lib.upload import upload_message_attachment_from_request
|
||||
from zerver.models import Client, Realm, RealmAuditLog, Recipient, Stream, Subscription, UserProfile
|
||||
from zerver.models.realm_audit_logs import AuditLogEventType
|
||||
from zerver.models import (
|
||||
Client,
|
||||
Realm,
|
||||
RealmAuditLog,
|
||||
Recipient,
|
||||
Stream,
|
||||
Subscription,
|
||||
UserGroup,
|
||||
UserProfile,
|
||||
)
|
||||
|
||||
|
||||
class Command(ZulipBaseCommand):
|
||||
class Command(BaseCommand):
|
||||
help = """Populates analytics tables with randomly generated data."""
|
||||
|
||||
DAYS_OF_DATA = 100
|
||||
@@ -46,7 +53,7 @@ class Command(ZulipBaseCommand):
|
||||
spikiness: float,
|
||||
holiday_rate: float = 0,
|
||||
partial_sum: bool = False,
|
||||
) -> list[int]:
|
||||
) -> List[int]:
|
||||
self.random_seed += 1
|
||||
return generate_time_series_data(
|
||||
days=self.DAYS_OF_DATA,
|
||||
@@ -106,11 +113,14 @@ class Command(ZulipBaseCommand):
|
||||
force_date_joined=installation_time,
|
||||
)
|
||||
|
||||
administrators_user_group = UserGroup.objects.get(
|
||||
name=UserGroup.ADMINISTRATORS_GROUP_NAME, realm=realm, is_system_group=True
|
||||
)
|
||||
stream = Stream.objects.create(
|
||||
name="all",
|
||||
realm=realm,
|
||||
date_created=installation_time,
|
||||
**get_default_values_for_stream_permission_group_settings(realm),
|
||||
can_remove_subscribers_group=administrators_user_group,
|
||||
)
|
||||
recipient = Recipient.objects.create(type_id=stream.id, type=Recipient.STREAM)
|
||||
stream.recipient = recipient
|
||||
@@ -128,27 +138,29 @@ class Command(ZulipBaseCommand):
|
||||
modified_user=shylock,
|
||||
modified_stream=stream,
|
||||
event_last_message_id=0,
|
||||
event_type=AuditLogEventType.SUBSCRIPTION_CREATED,
|
||||
event_type=RealmAuditLog.SUBSCRIPTION_CREATED,
|
||||
event_time=installation_time,
|
||||
)
|
||||
|
||||
# Create an attachment in the database for set_storage_space_used_statistic.
|
||||
IMAGE_FILE_PATH = static_path("images/test-images/checkbox.png")
|
||||
file_info = os.stat(IMAGE_FILE_PATH)
|
||||
file_size = file_info.st_size
|
||||
with open(IMAGE_FILE_PATH, "rb") as fp:
|
||||
upload_message_attachment_from_request(UploadedFile(fp), shylock)
|
||||
upload_message_attachment_from_request(UploadedFile(fp), shylock, file_size)
|
||||
|
||||
FixtureData: TypeAlias = Mapping[str | int | None, list[int]]
|
||||
FixtureData: TypeAlias = Mapping[Union[str, int, None], List[int]]
|
||||
|
||||
def insert_fixture_data(
|
||||
stat: CountStat,
|
||||
fixture_data: FixtureData,
|
||||
table: type[BaseCount],
|
||||
table: Type[BaseCount],
|
||||
) -> None:
|
||||
end_times = time_range(
|
||||
last_end_time, last_end_time, stat.frequency, len(next(iter(fixture_data.values())))
|
||||
)
|
||||
if table == InstallationCount:
|
||||
id_args: dict[str, Any] = {}
|
||||
id_args: Dict[str, Any] = {}
|
||||
if table == RealmCount:
|
||||
id_args = {"realm": realm}
|
||||
if table == UserCount:
|
||||
@@ -165,7 +177,7 @@ class Command(ZulipBaseCommand):
|
||||
value=value,
|
||||
**id_args,
|
||||
)
|
||||
for end_time, value in zip(end_times, values, strict=False)
|
||||
for end_time, value in zip(end_times, values)
|
||||
if value != 0
|
||||
)
|
||||
|
||||
@@ -272,7 +284,6 @@ class Command(ZulipBaseCommand):
|
||||
android, created = Client.objects.get_or_create(name="ZulipAndroid")
|
||||
iOS, created = Client.objects.get_or_create(name="ZulipiOS")
|
||||
react_native, created = Client.objects.get_or_create(name="ZulipMobile")
|
||||
flutter, created = Client.objects.get_or_create(name="ZulipFlutter")
|
||||
API, created = Client.objects.get_or_create(name="API: Python")
|
||||
zephyr_mirror, created = Client.objects.get_or_create(name="zephyr_mirror")
|
||||
unused, created = Client.objects.get_or_create(name="unused")
|
||||
@@ -290,7 +301,6 @@ class Command(ZulipBaseCommand):
|
||||
android.id: self.generate_fixture_data(stat, 5, 5, 2, 0.6, 3),
|
||||
iOS.id: self.generate_fixture_data(stat, 5, 5, 2, 0.6, 3),
|
||||
react_native.id: self.generate_fixture_data(stat, 5, 5, 10, 0.6, 3),
|
||||
flutter.id: self.generate_fixture_data(stat, 5, 5, 10, 0.6, 3),
|
||||
API.id: self.generate_fixture_data(stat, 5, 5, 5, 0.6, 3),
|
||||
zephyr_mirror.id: self.generate_fixture_data(stat, 1, 1, 3, 0.6, 3),
|
||||
unused.id: self.generate_fixture_data(stat, 0, 0, 0, 0, 0),
|
||||
@@ -302,7 +312,6 @@ class Command(ZulipBaseCommand):
|
||||
old_desktop.id: self.generate_fixture_data(stat, 50, 30, 8, 0.6, 3),
|
||||
android.id: self.generate_fixture_data(stat, 50, 50, 2, 0.6, 3),
|
||||
iOS.id: self.generate_fixture_data(stat, 50, 50, 2, 0.6, 3),
|
||||
flutter.id: self.generate_fixture_data(stat, 5, 5, 10, 0.6, 3),
|
||||
react_native.id: self.generate_fixture_data(stat, 5, 5, 10, 0.6, 3),
|
||||
API.id: self.generate_fixture_data(stat, 50, 50, 5, 0.6, 3),
|
||||
zephyr_mirror.id: self.generate_fixture_data(stat, 10, 10, 3, 0.6, 3),
|
||||
@@ -320,7 +329,7 @@ class Command(ZulipBaseCommand):
|
||||
"true": self.generate_fixture_data(stat, 20, 2, 3, 0.2, 3),
|
||||
}
|
||||
insert_fixture_data(stat, realm_data, RealmCount)
|
||||
stream_data: Mapping[int | str | None, list[int]] = {
|
||||
stream_data: Mapping[Union[int, str, None], List[int]] = {
|
||||
"false": self.generate_fixture_data(stat, 10, 7, 5, 0.6, 4),
|
||||
"true": self.generate_fixture_data(stat, 5, 3, 2, 0.4, 2),
|
||||
}
|
||||
|
||||
@@ -1,22 +1,23 @@
|
||||
import hashlib
|
||||
import os
|
||||
import time
|
||||
from argparse import ArgumentParser
|
||||
from datetime import timezone
|
||||
from typing import Any
|
||||
from typing import Any, Dict
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.utils.dateparse import parse_datetime
|
||||
from django.utils.timezone import now as timezone_now
|
||||
from typing_extensions import override
|
||||
|
||||
from analytics.lib.counts import ALL_COUNT_STATS, logger, process_count_stat
|
||||
from zerver.lib.management import ZulipBaseCommand, abort_unless_locked
|
||||
from zerver.lib.remote_server import send_server_data_to_push_bouncer, should_send_analytics_data
|
||||
from analytics.lib.counts import COUNT_STATS, logger, process_count_stat
|
||||
from scripts.lib.zulip_tools import ENDC, WARNING
|
||||
from zerver.lib.remote_server import send_analytics_to_push_bouncer
|
||||
from zerver.lib.timestamp import floor_to_hour
|
||||
from zerver.models import Realm
|
||||
|
||||
|
||||
class Command(ZulipBaseCommand):
|
||||
class Command(BaseCommand):
|
||||
help = """Fills Analytics tables.
|
||||
|
||||
Run as a cron job that runs every hour."""
|
||||
@@ -26,7 +27,8 @@ class Command(ZulipBaseCommand):
|
||||
parser.add_argument(
|
||||
"--time",
|
||||
"-t",
|
||||
help="Update stat tables from current state to --time. Defaults to the current time.",
|
||||
help="Update stat tables from current state to "
|
||||
"--time. Defaults to the current time.",
|
||||
default=timezone_now().isoformat(),
|
||||
)
|
||||
parser.add_argument("--utc", action="store_true", help="Interpret --time in UTC.")
|
||||
@@ -38,11 +40,22 @@ class Command(ZulipBaseCommand):
|
||||
)
|
||||
|
||||
@override
|
||||
@abort_unless_locked
|
||||
def handle(self, *args: Any, **options: Any) -> None:
|
||||
self.run_update_analytics_counts(options)
|
||||
try:
|
||||
os.mkdir(settings.ANALYTICS_LOCK_DIR)
|
||||
except OSError:
|
||||
print(
|
||||
f"{WARNING}Analytics lock {settings.ANALYTICS_LOCK_DIR} is unavailable;"
|
||||
f" exiting.{ENDC}"
|
||||
)
|
||||
return
|
||||
|
||||
def run_update_analytics_counts(self, options: dict[str, Any]) -> None:
|
||||
try:
|
||||
self.run_update_analytics_counts(options)
|
||||
finally:
|
||||
os.rmdir(settings.ANALYTICS_LOCK_DIR)
|
||||
|
||||
def run_update_analytics_counts(self, options: Dict[str, Any]) -> None:
|
||||
# installation_epoch relies on there being at least one realm; we
|
||||
# shouldn't run the analytics code if that condition isn't satisfied
|
||||
if not Realm.objects.exists():
|
||||
@@ -61,9 +74,9 @@ class Command(ZulipBaseCommand):
|
||||
fill_to_time = floor_to_hour(fill_to_time.astimezone(timezone.utc))
|
||||
|
||||
if options["stat"] is not None:
|
||||
stats = [ALL_COUNT_STATS[options["stat"]]]
|
||||
stats = [COUNT_STATS[options["stat"]]]
|
||||
else:
|
||||
stats = list(ALL_COUNT_STATS.values())
|
||||
stats = list(COUNT_STATS.values())
|
||||
|
||||
logger.info("Starting updating analytics counts through %s", fill_to_time)
|
||||
if options["verbose"]:
|
||||
@@ -82,17 +95,5 @@ class Command(ZulipBaseCommand):
|
||||
)
|
||||
logger.info("Finished updating analytics counts through %s", fill_to_time)
|
||||
|
||||
if should_send_analytics_data():
|
||||
# Based on the specific value of the setting, the exact details to send
|
||||
# will be decided. However, we proceed just based on this not being falsey.
|
||||
|
||||
# Skew 0-10 minutes based on a hash of settings.ZULIP_ORG_ID, so
|
||||
# that each server will report in at a somewhat consistent time.
|
||||
assert settings.ZULIP_ORG_ID
|
||||
delay = int.from_bytes(
|
||||
hashlib.sha256(settings.ZULIP_ORG_ID.encode()).digest(), byteorder="big"
|
||||
) % (60 * 10)
|
||||
logger.info("Sleeping %d seconds before reporting...", delay)
|
||||
time.sleep(delay)
|
||||
|
||||
send_server_data_to_push_bouncer(consider_usage_statistics=True, raise_on_error=True)
|
||||
if settings.PUSH_NOTIFICATION_BOUNCER_URL and settings.SUBMIT_USAGE_STATISTICS:
|
||||
send_analytics_to_push_bouncer()
|
||||
|
||||
@@ -1,224 +0,0 @@
|
||||
# Generated by Django 5.0.7 on 2024-08-13 20:16
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
replaces = [
|
||||
("analytics", "0001_initial"),
|
||||
("analytics", "0002_remove_huddlecount"),
|
||||
("analytics", "0003_fillstate"),
|
||||
("analytics", "0004_add_subgroup"),
|
||||
("analytics", "0005_alter_field_size"),
|
||||
("analytics", "0006_add_subgroup_to_unique_constraints"),
|
||||
("analytics", "0007_remove_interval"),
|
||||
("analytics", "0008_add_count_indexes"),
|
||||
("analytics", "0009_remove_messages_to_stream_stat"),
|
||||
("analytics", "0010_clear_messages_sent_values"),
|
||||
("analytics", "0011_clear_analytics_tables"),
|
||||
("analytics", "0012_add_on_delete"),
|
||||
("analytics", "0013_remove_anomaly"),
|
||||
("analytics", "0014_remove_fillstate_last_modified"),
|
||||
("analytics", "0015_clear_duplicate_counts"),
|
||||
("analytics", "0016_unique_constraint_when_subgroup_null"),
|
||||
("analytics", "0017_regenerate_partial_indexes"),
|
||||
("analytics", "0018_remove_usercount_active_users_audit"),
|
||||
("analytics", "0019_remove_unused_counts"),
|
||||
("analytics", "0020_alter_installationcount_id_alter_realmcount_id_and_more"),
|
||||
("analytics", "0021_alter_fillstate_id"),
|
||||
]
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
# Needed for foreign keys to core models like Realm.
|
||||
("zerver", "0001_initial"),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="InstallationCount",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
("property", models.CharField(max_length=32)),
|
||||
("end_time", models.DateTimeField()),
|
||||
("value", models.BigIntegerField()),
|
||||
("subgroup", models.CharField(max_length=16, null=True)),
|
||||
],
|
||||
options={
|
||||
"unique_together": set(),
|
||||
"constraints": [
|
||||
models.UniqueConstraint(
|
||||
condition=models.Q(("subgroup__isnull", False)),
|
||||
fields=("property", "subgroup", "end_time"),
|
||||
name="unique_installation_count",
|
||||
),
|
||||
models.UniqueConstraint(
|
||||
condition=models.Q(("subgroup__isnull", True)),
|
||||
fields=("property", "end_time"),
|
||||
name="unique_installation_count_null_subgroup",
|
||||
),
|
||||
],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="RealmCount",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
(
|
||||
"realm",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="zerver.realm"
|
||||
),
|
||||
),
|
||||
("property", models.CharField(max_length=32)),
|
||||
("end_time", models.DateTimeField()),
|
||||
("value", models.BigIntegerField()),
|
||||
("subgroup", models.CharField(max_length=16, null=True)),
|
||||
],
|
||||
options={
|
||||
"indexes": [
|
||||
models.Index(
|
||||
fields=["property", "end_time"],
|
||||
name="analytics_realmcount_property_end_time_3b60396b_idx",
|
||||
)
|
||||
],
|
||||
"unique_together": set(),
|
||||
"constraints": [
|
||||
models.UniqueConstraint(
|
||||
condition=models.Q(("subgroup__isnull", False)),
|
||||
fields=("realm", "property", "subgroup", "end_time"),
|
||||
name="unique_realm_count",
|
||||
),
|
||||
models.UniqueConstraint(
|
||||
condition=models.Q(("subgroup__isnull", True)),
|
||||
fields=("realm", "property", "end_time"),
|
||||
name="unique_realm_count_null_subgroup",
|
||||
),
|
||||
],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="StreamCount",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
(
|
||||
"realm",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="zerver.realm"
|
||||
),
|
||||
),
|
||||
(
|
||||
"stream",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="zerver.stream"
|
||||
),
|
||||
),
|
||||
("property", models.CharField(max_length=32)),
|
||||
("end_time", models.DateTimeField()),
|
||||
("value", models.BigIntegerField()),
|
||||
("subgroup", models.CharField(max_length=16, null=True)),
|
||||
],
|
||||
options={
|
||||
"indexes": [
|
||||
models.Index(
|
||||
fields=["property", "realm", "end_time"],
|
||||
name="analytics_streamcount_property_realm_id_end_time_155ae930_idx",
|
||||
)
|
||||
],
|
||||
"unique_together": set(),
|
||||
"constraints": [
|
||||
models.UniqueConstraint(
|
||||
condition=models.Q(("subgroup__isnull", False)),
|
||||
fields=("stream", "property", "subgroup", "end_time"),
|
||||
name="unique_stream_count",
|
||||
),
|
||||
models.UniqueConstraint(
|
||||
condition=models.Q(("subgroup__isnull", True)),
|
||||
fields=("stream", "property", "end_time"),
|
||||
name="unique_stream_count_null_subgroup",
|
||||
),
|
||||
],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="UserCount",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
(
|
||||
"realm",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="zerver.realm"
|
||||
),
|
||||
),
|
||||
(
|
||||
"user",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
|
||||
),
|
||||
),
|
||||
("property", models.CharField(max_length=32)),
|
||||
("end_time", models.DateTimeField()),
|
||||
("value", models.BigIntegerField()),
|
||||
("subgroup", models.CharField(max_length=16, null=True)),
|
||||
],
|
||||
options={
|
||||
"indexes": [
|
||||
models.Index(
|
||||
fields=["property", "realm", "end_time"],
|
||||
name="analytics_usercount_property_realm_id_end_time_591dbec1_idx",
|
||||
)
|
||||
],
|
||||
"unique_together": set(),
|
||||
"constraints": [
|
||||
models.UniqueConstraint(
|
||||
condition=models.Q(("subgroup__isnull", False)),
|
||||
fields=("user", "property", "subgroup", "end_time"),
|
||||
name="unique_user_count",
|
||||
),
|
||||
models.UniqueConstraint(
|
||||
condition=models.Q(("subgroup__isnull", True)),
|
||||
fields=("user", "property", "end_time"),
|
||||
name="unique_user_count_null_subgroup",
|
||||
),
|
||||
],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="FillState",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
("property", models.CharField(max_length=40, unique=True)),
|
||||
("end_time", models.DateTimeField()),
|
||||
("state", models.PositiveSmallIntegerField()),
|
||||
],
|
||||
),
|
||||
]
|
||||
@@ -26,5 +26,5 @@ class Migration(migrations.Migration):
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(delete_messages_sent_to_stream_stat, elidable=True),
|
||||
migrations.RunPython(delete_messages_sent_to_stream_stat),
|
||||
]
|
||||
|
||||
@@ -24,5 +24,5 @@ class Migration(migrations.Migration):
|
||||
dependencies = [("analytics", "0009_remove_messages_to_stream_stat")]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(clear_message_sent_by_message_type_values, elidable=True),
|
||||
migrations.RunPython(clear_message_sent_by_message_type_values),
|
||||
]
|
||||
|
||||
@@ -23,5 +23,5 @@ class Migration(migrations.Migration):
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(clear_analytics_tables, elidable=True),
|
||||
migrations.RunPython(clear_analytics_tables),
|
||||
]
|
||||
|
||||
@@ -60,7 +60,5 @@ class Migration(migrations.Migration):
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
clear_duplicate_counts, reverse_code=migrations.RunPython.noop, elidable=True
|
||||
),
|
||||
migrations.RunPython(clear_duplicate_counts, reverse_code=migrations.RunPython.noop),
|
||||
]
|
||||
|
||||
@@ -1,114 +0,0 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("analytics", "0016_unique_constraint_when_subgroup_null"),
|
||||
]
|
||||
|
||||
# If the server was installed between 7.0 and 7.4 (or main between
|
||||
# 2c20028aa451 and 7807bff52635), it contains indexes which (when
|
||||
# running 7.5 or 7807bff52635 or higher) are never used, because
|
||||
# they contain an improper cast
|
||||
# (https://code.djangoproject.com/ticket/34840).
|
||||
#
|
||||
# We regenerate the indexes here, by dropping and re-creating
|
||||
# them, so that we know that they are properly formed.
|
||||
operations = [
|
||||
migrations.RemoveConstraint(
|
||||
model_name="installationcount",
|
||||
name="unique_installation_count",
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="installationcount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=False),
|
||||
fields=("property", "subgroup", "end_time"),
|
||||
name="unique_installation_count",
|
||||
),
|
||||
),
|
||||
migrations.RemoveConstraint(
|
||||
model_name="installationcount",
|
||||
name="unique_installation_count_null_subgroup",
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="installationcount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=True),
|
||||
fields=("property", "end_time"),
|
||||
name="unique_installation_count_null_subgroup",
|
||||
),
|
||||
),
|
||||
migrations.RemoveConstraint(
|
||||
model_name="realmcount",
|
||||
name="unique_realm_count",
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="realmcount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=False),
|
||||
fields=("realm", "property", "subgroup", "end_time"),
|
||||
name="unique_realm_count",
|
||||
),
|
||||
),
|
||||
migrations.RemoveConstraint(
|
||||
model_name="realmcount",
|
||||
name="unique_realm_count_null_subgroup",
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="realmcount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=True),
|
||||
fields=("realm", "property", "end_time"),
|
||||
name="unique_realm_count_null_subgroup",
|
||||
),
|
||||
),
|
||||
migrations.RemoveConstraint(
|
||||
model_name="streamcount",
|
||||
name="unique_stream_count",
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="streamcount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=False),
|
||||
fields=("stream", "property", "subgroup", "end_time"),
|
||||
name="unique_stream_count",
|
||||
),
|
||||
),
|
||||
migrations.RemoveConstraint(
|
||||
model_name="streamcount",
|
||||
name="unique_stream_count_null_subgroup",
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="streamcount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=True),
|
||||
fields=("stream", "property", "end_time"),
|
||||
name="unique_stream_count_null_subgroup",
|
||||
),
|
||||
),
|
||||
migrations.RemoveConstraint(
|
||||
model_name="usercount",
|
||||
name="unique_user_count",
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="usercount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=False),
|
||||
fields=("user", "property", "subgroup", "end_time"),
|
||||
name="unique_user_count",
|
||||
),
|
||||
),
|
||||
migrations.RemoveConstraint(
|
||||
model_name="usercount",
|
||||
name="unique_user_count_null_subgroup",
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="usercount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=True),
|
||||
fields=("user", "property", "end_time"),
|
||||
name="unique_user_count_null_subgroup",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,16 +0,0 @@
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
elidable = True
|
||||
|
||||
dependencies = [
|
||||
("analytics", "0017_regenerate_partial_indexes"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunSQL(
|
||||
"DELETE FROM analytics_usercount WHERE property = 'active_users_audit:is_bot:day'",
|
||||
elidable=True,
|
||||
)
|
||||
]
|
||||
@@ -1,27 +0,0 @@
|
||||
from django.db import migrations
|
||||
|
||||
REMOVED_COUNTS = (
|
||||
"active_users_log:is_bot:day",
|
||||
"active_users:is_bot:day",
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
elidable = True
|
||||
|
||||
dependencies = [
|
||||
("analytics", "0018_remove_usercount_active_users_audit"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunSQL(
|
||||
[
|
||||
("DELETE FROM analytics_realmcount WHERE property IN %s", (REMOVED_COUNTS,)),
|
||||
(
|
||||
"DELETE FROM analytics_installationcount WHERE property IN %s",
|
||||
(REMOVED_COUNTS,),
|
||||
),
|
||||
],
|
||||
elidable=True,
|
||||
)
|
||||
]
|
||||
@@ -1,40 +0,0 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("analytics", "0019_remove_unused_counts"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="installationcount",
|
||||
name="id",
|
||||
field=models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="realmcount",
|
||||
name="id",
|
||||
field=models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="streamcount",
|
||||
name="id",
|
||||
field=models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="usercount",
|
||||
name="id",
|
||||
field=models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,17 +0,0 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("analytics", "0020_alter_installationcount_id_alter_realmcount_id_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="fillstate",
|
||||
name="id",
|
||||
field=models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,4 +1,7 @@
|
||||
from datetime import datetime
|
||||
# https://github.com/typeddjango/django-stubs/issues/1698
|
||||
# mypy: disable-error-code="explicit-override"
|
||||
|
||||
import datetime
|
||||
|
||||
from django.db import models
|
||||
from django.db.models import Q, UniqueConstraint
|
||||
@@ -24,7 +27,7 @@ class FillState(models.Model):
|
||||
|
||||
# The earliest/starting end_time in FillState
|
||||
# We assume there is at least one realm
|
||||
def installation_epoch() -> datetime:
|
||||
def installation_epoch() -> datetime.datetime:
|
||||
earliest_realm_creation = Realm.objects.aggregate(models.Min("date_created"))[
|
||||
"date_created__min"
|
||||
]
|
||||
|
||||
53
analytics/tests/test_activity_views.py
Normal file
53
analytics/tests/test_activity_views.py
Normal file
@@ -0,0 +1,53 @@
|
||||
from unittest import mock
|
||||
|
||||
from django.utils.timezone import now as timezone_now
|
||||
|
||||
from zerver.lib.test_classes import ZulipTestCase
|
||||
from zerver.models import Client, UserActivity, UserProfile
|
||||
|
||||
|
||||
class ActivityTest(ZulipTestCase):
|
||||
@mock.patch("stripe.Customer.list", return_value=[])
|
||||
def test_activity(self, unused_mock: mock.Mock) -> None:
|
||||
self.login("hamlet")
|
||||
client, _ = Client.objects.get_or_create(name="website")
|
||||
query = "/json/messages/flags"
|
||||
last_visit = timezone_now()
|
||||
count = 150
|
||||
for activity_user_profile in UserProfile.objects.all():
|
||||
UserActivity.objects.get_or_create(
|
||||
user_profile=activity_user_profile,
|
||||
client=client,
|
||||
query=query,
|
||||
count=count,
|
||||
last_visit=last_visit,
|
||||
)
|
||||
|
||||
# Fails when not staff
|
||||
result = self.client_get("/activity")
|
||||
self.assertEqual(result.status_code, 302)
|
||||
|
||||
user_profile = self.example_user("hamlet")
|
||||
user_profile.is_staff = True
|
||||
user_profile.save(update_fields=["is_staff"])
|
||||
|
||||
with self.assert_database_query_count(12):
|
||||
result = self.client_get("/activity")
|
||||
self.assertEqual(result.status_code, 200)
|
||||
|
||||
with self.assert_database_query_count(4):
|
||||
result = self.client_get("/activity/remote")
|
||||
self.assertEqual(result.status_code, 200)
|
||||
|
||||
with self.assert_database_query_count(4):
|
||||
result = self.client_get("/activity/integrations")
|
||||
self.assertEqual(result.status_code, 200)
|
||||
|
||||
with self.assert_database_query_count(8):
|
||||
result = self.client_get("/realm_activity/zulip/")
|
||||
self.assertEqual(result.status_code, 200)
|
||||
|
||||
iago = self.example_user("iago")
|
||||
with self.assert_database_query_count(5):
|
||||
result = self.client_get(f"/user_activity/{iago.id}/")
|
||||
self.assertEqual(result.status_code, 200)
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,4 +1,5 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import List, Optional
|
||||
|
||||
from django.utils.timezone import now as timezone_now
|
||||
from typing_extensions import override
|
||||
@@ -9,8 +10,7 @@ from analytics.models import FillState, RealmCount, StreamCount, UserCount
|
||||
from analytics.views.stats import rewrite_client_arrays, sort_by_totals, sort_client_labels
|
||||
from zerver.lib.test_classes import ZulipTestCase
|
||||
from zerver.lib.timestamp import ceiling_to_day, ceiling_to_hour, datetime_to_timestamp
|
||||
from zerver.models import Client
|
||||
from zerver.models.realms import get_realm
|
||||
from zerver.models import Client, get_realm
|
||||
|
||||
|
||||
class TestStatsEndpoint(ZulipTestCase):
|
||||
@@ -83,11 +83,11 @@ class TestGetChartData(ZulipTestCase):
|
||||
ceiling_to_day(self.realm.date_created) + timedelta(days=i) for i in range(4)
|
||||
]
|
||||
|
||||
def data(self, i: int) -> list[int]:
|
||||
def data(self, i: int) -> List[int]:
|
||||
return [0, 0, i, 0]
|
||||
|
||||
def insert_data(
|
||||
self, stat: CountStat, realm_subgroups: list[str | None], user_subgroups: list[str]
|
||||
self, stat: CountStat, realm_subgroups: List[Optional[str]], user_subgroups: List[str]
|
||||
) -> None:
|
||||
if stat.frequency == CountStat.HOUR:
|
||||
insert_time = self.end_times_hour[2]
|
||||
@@ -191,21 +191,21 @@ class TestGetChartData(ZulipTestCase):
|
||||
"end_times": [datetime_to_timestamp(dt) for dt in self.end_times_day],
|
||||
"frequency": CountStat.DAY,
|
||||
"everyone": {
|
||||
"Public channels": self.data(100),
|
||||
"Private channels": self.data(0),
|
||||
"Public streams": self.data(100),
|
||||
"Private streams": self.data(0),
|
||||
"Direct messages": self.data(101),
|
||||
"Group direct messages": self.data(0),
|
||||
},
|
||||
"user": {
|
||||
"Public channels": self.data(200),
|
||||
"Private channels": self.data(201),
|
||||
"Public streams": self.data(200),
|
||||
"Private streams": self.data(201),
|
||||
"Direct messages": self.data(0),
|
||||
"Group direct messages": self.data(0),
|
||||
},
|
||||
"display_order": [
|
||||
"Direct messages",
|
||||
"Public channels",
|
||||
"Private channels",
|
||||
"Public streams",
|
||||
"Private streams",
|
||||
"Group direct messages",
|
||||
],
|
||||
"result": "success",
|
||||
@@ -305,7 +305,7 @@ class TestGetChartData(ZulipTestCase):
|
||||
},
|
||||
subdomain="zephyr",
|
||||
)
|
||||
self.assert_json_error(result, "Invalid channel ID")
|
||||
self.assert_json_error(result, "Invalid stream ID")
|
||||
|
||||
def test_include_empty_subgroups(self) -> None:
|
||||
FillState.objects.create(
|
||||
@@ -342,8 +342,8 @@ class TestGetChartData(ZulipTestCase):
|
||||
self.assertEqual(
|
||||
data["everyone"],
|
||||
{
|
||||
"Public channels": [0],
|
||||
"Private channels": [0],
|
||||
"Public streams": [0],
|
||||
"Private streams": [0],
|
||||
"Direct messages": [0],
|
||||
"Group direct messages": [0],
|
||||
},
|
||||
@@ -351,8 +351,8 @@ class TestGetChartData(ZulipTestCase):
|
||||
self.assertEqual(
|
||||
data["user"],
|
||||
{
|
||||
"Public channels": [0],
|
||||
"Private channels": [0],
|
||||
"Public streams": [0],
|
||||
"Private streams": [0],
|
||||
"Direct messages": [0],
|
||||
"Group direct messages": [0],
|
||||
},
|
||||
@@ -604,7 +604,7 @@ class TestGetChartData(ZulipTestCase):
|
||||
|
||||
class TestGetChartDataHelpers(ZulipTestCase):
|
||||
def test_sort_by_totals(self) -> None:
|
||||
empty: list[int] = []
|
||||
empty: List[int] = []
|
||||
value_arrays = {"c": [0, 1], "a": [9], "b": [1, 1, 1], "d": empty}
|
||||
self.assertEqual(sort_by_totals(value_arrays), ["a", "b", "c", "d"])
|
||||
|
||||
@@ -660,9 +660,7 @@ class TestMapArrays(ZulipTestCase):
|
||||
"website": [1, 2, 3],
|
||||
"ZulipiOS": [1, 2, 3],
|
||||
"ZulipElectron": [2, 5, 7],
|
||||
"ZulipMobile": [1, 2, 3],
|
||||
"ZulipMobile/flutter": [1, 1, 1],
|
||||
"ZulipFlutter": [1, 1, 1],
|
||||
"ZulipMobile": [1, 5, 7],
|
||||
"ZulipPython": [1, 2, 3],
|
||||
"API: Python": [1, 2, 3],
|
||||
"SomethingRandom": [4, 5, 6],
|
||||
@@ -677,8 +675,7 @@ class TestMapArrays(ZulipTestCase):
|
||||
"Old desktop app": [32, 36, 39],
|
||||
"Old iOS app": [1, 2, 3],
|
||||
"Desktop app": [2, 5, 7],
|
||||
"Mobile app (React Native)": [1, 2, 3],
|
||||
"Mobile app beta (Flutter)": [2, 2, 2],
|
||||
"Mobile app": [1, 5, 7],
|
||||
"Web app": [1, 2, 3],
|
||||
"Python API": [2, 4, 6],
|
||||
"SomethingRandom": [4, 5, 6],
|
||||
|
||||
791
analytics/tests/test_support_views.py
Normal file
791
analytics/tests/test_support_views.py
Normal file
@@ -0,0 +1,791 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
from unittest import mock
|
||||
|
||||
import orjson
|
||||
from django.utils.timezone import now as timezone_now
|
||||
from typing_extensions import override
|
||||
|
||||
from corporate.lib.stripe import add_months, update_sponsorship_status
|
||||
from corporate.models import Customer, CustomerPlan, LicenseLedger, get_customer_by_realm
|
||||
from zerver.actions.invites import do_create_multiuse_invite_link
|
||||
from zerver.actions.realm_settings import do_change_realm_org_type, do_send_realm_reactivation_email
|
||||
from zerver.actions.user_settings import do_change_user_setting
|
||||
from zerver.lib.test_classes import ZulipTestCase
|
||||
from zerver.lib.test_helpers import reset_email_visibility_to_everyone_in_zulip_realm
|
||||
from zerver.models import (
|
||||
MultiuseInvite,
|
||||
PreregistrationUser,
|
||||
Realm,
|
||||
UserMessage,
|
||||
UserProfile,
|
||||
get_org_type_display_name,
|
||||
get_realm,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from django.test.client import _MonkeyPatchedWSGIResponse as TestHttpResponse
|
||||
|
||||
import uuid
|
||||
|
||||
from zilencer.models import RemoteZulipServer
|
||||
|
||||
|
||||
class TestRemoteServerSupportEndpoint(ZulipTestCase):
|
||||
@override
|
||||
def setUp(self) -> None:
|
||||
super().setUp()
|
||||
|
||||
# Set up some initial example data.
|
||||
for i in range(20):
|
||||
hostname = f"zulip-{i}.example.com"
|
||||
RemoteZulipServer.objects.create(
|
||||
hostname=hostname, contact_email=f"admin@{hostname}", plan_type=1, uuid=uuid.uuid4()
|
||||
)
|
||||
|
||||
def test_search(self) -> None:
|
||||
self.login("cordelia")
|
||||
|
||||
result = self.client_get("/activity/remote/support")
|
||||
self.assertEqual(result.status_code, 302)
|
||||
self.assertEqual(result["Location"], "/login/")
|
||||
|
||||
# Iago is the user with the appropriate permissions to access this page.
|
||||
self.login("iago")
|
||||
assert self.example_user("iago").is_staff
|
||||
|
||||
result = self.client_get("/activity/remote/support")
|
||||
self.assert_in_success_response(
|
||||
[
|
||||
'input type="text" name="q" class="input-xxlarge search-query" placeholder="hostname or contact email"'
|
||||
],
|
||||
result,
|
||||
)
|
||||
|
||||
result = self.client_get("/activity/remote/support", {"q": "zulip-1.example.com"})
|
||||
self.assert_in_success_response(["<h3>zulip-1.example.com</h3>"], result)
|
||||
self.assert_not_in_success_response(["<h3>zulip-2.example.com</h3>"], result)
|
||||
|
||||
result = self.client_get("/activity/remote/support", {"q": "example.com"})
|
||||
for i in range(20):
|
||||
self.assert_in_success_response([f"<h3>zulip-{i}.example.com</h3>"], result)
|
||||
|
||||
result = self.client_get("/activity/remote/support", {"q": "admin@zulip-2.example.com"})
|
||||
self.assert_in_success_response(["<h3>zulip-2.example.com</h3>"], result)
|
||||
self.assert_in_success_response(["<b>Contact email</b>: admin@zulip-2.example.com"], result)
|
||||
self.assert_not_in_success_response(["<h3>zulip-1.example.com</h3>"], result)
|
||||
|
||||
|
||||
class TestSupportEndpoint(ZulipTestCase):
|
||||
def test_search(self) -> None:
|
||||
reset_email_visibility_to_everyone_in_zulip_realm()
|
||||
lear_user = self.lear_user("king")
|
||||
lear_user.is_staff = True
|
||||
lear_user.save(update_fields=["is_staff"])
|
||||
lear_realm = get_realm("lear")
|
||||
|
||||
def assert_user_details_in_html_response(
|
||||
html_response: "TestHttpResponse", full_name: str, email: str, role: str
|
||||
) -> None:
|
||||
self.assert_in_success_response(
|
||||
[
|
||||
'<span class="label">user</span>\n',
|
||||
f"<h3>{full_name}</h3>",
|
||||
f"<b>Email</b>: {email}",
|
||||
"<b>Is active</b>: True<br />",
|
||||
f"<b>Role</b>: {role}<br />",
|
||||
],
|
||||
html_response,
|
||||
)
|
||||
|
||||
def create_invitation(
|
||||
stream: str, invitee_email: str, realm: Optional[Realm] = None
|
||||
) -> None:
|
||||
invite_expires_in_minutes = 10 * 24 * 60
|
||||
self.client_post(
|
||||
"/json/invites",
|
||||
{
|
||||
"invitee_emails": [invitee_email],
|
||||
"stream_ids": orjson.dumps([self.get_stream_id(stream, realm)]).decode(),
|
||||
"invite_expires_in_minutes": invite_expires_in_minutes,
|
||||
"invite_as": PreregistrationUser.INVITE_AS["MEMBER"],
|
||||
},
|
||||
subdomain=realm.string_id if realm is not None else "zulip",
|
||||
)
|
||||
|
||||
def check_hamlet_user_query_result(result: "TestHttpResponse") -> None:
|
||||
assert_user_details_in_html_response(
|
||||
result, "King Hamlet", self.example_email("hamlet"), "Member"
|
||||
)
|
||||
self.assert_in_success_response(
|
||||
[
|
||||
f"<b>Admins</b>: {self.example_email('iago')}\n",
|
||||
f"<b>Owners</b>: {self.example_email('desdemona')}\n",
|
||||
'class="copy-button" data-copytext="{}">'.format(self.example_email("iago")),
|
||||
'class="copy-button" data-copytext="{}">'.format(
|
||||
self.example_email("desdemona")
|
||||
),
|
||||
],
|
||||
result,
|
||||
)
|
||||
|
||||
def check_lear_user_query_result(result: "TestHttpResponse") -> None:
|
||||
assert_user_details_in_html_response(
|
||||
result, lear_user.full_name, lear_user.email, "Member"
|
||||
)
|
||||
|
||||
def check_othello_user_query_result(result: "TestHttpResponse") -> None:
|
||||
assert_user_details_in_html_response(
|
||||
result, "Othello, the Moor of Venice", self.example_email("othello"), "Member"
|
||||
)
|
||||
|
||||
def check_polonius_user_query_result(result: "TestHttpResponse") -> None:
|
||||
assert_user_details_in_html_response(
|
||||
result, "Polonius", self.example_email("polonius"), "Guest"
|
||||
)
|
||||
|
||||
def check_zulip_realm_query_result(result: "TestHttpResponse") -> None:
|
||||
zulip_realm = get_realm("zulip")
|
||||
first_human_user = zulip_realm.get_first_human_user()
|
||||
assert first_human_user is not None
|
||||
self.assert_in_success_response(
|
||||
[
|
||||
f"<b>First human user</b>: {first_human_user.delivery_email}\n",
|
||||
f'<input type="hidden" name="realm_id" value="{zulip_realm.id}"',
|
||||
"Zulip Dev</h3>",
|
||||
'<option value="1" selected>Self-hosted</option>',
|
||||
'<option value="2" >Limited</option>',
|
||||
'input type="number" name="discount" value="None"',
|
||||
'<option value="active" selected>Active</option>',
|
||||
'<option value="deactivated" >Deactivated</option>',
|
||||
f'<option value="{zulip_realm.org_type}" selected>',
|
||||
'scrub-realm-button">',
|
||||
'data-string-id="zulip"',
|
||||
],
|
||||
result,
|
||||
)
|
||||
|
||||
def check_lear_realm_query_result(result: "TestHttpResponse") -> None:
|
||||
self.assert_in_success_response(
|
||||
[
|
||||
f'<input type="hidden" name="realm_id" value="{lear_realm.id}"',
|
||||
"Lear & Co.</h3>",
|
||||
'<option value="1" selected>Self-hosted</option>',
|
||||
'<option value="2" >Limited</option>',
|
||||
'input type="number" name="discount" value="None"',
|
||||
'<option value="active" selected>Active</option>',
|
||||
'<option value="deactivated" >Deactivated</option>',
|
||||
'scrub-realm-button">',
|
||||
'data-string-id="lear"',
|
||||
"<b>Name</b>: Zulip Cloud Standard",
|
||||
"<b>Status</b>: Active",
|
||||
"<b>Billing schedule</b>: Annual",
|
||||
"<b>Licenses</b>: 2/10 (Manual)",
|
||||
"<b>Price per license</b>: $80.0",
|
||||
"<b>Next invoice date</b>: 02 January 2017",
|
||||
'<option value="send_invoice" selected>',
|
||||
'<option value="charge_automatically" >',
|
||||
],
|
||||
result,
|
||||
)
|
||||
|
||||
def check_preregistration_user_query_result(
|
||||
result: "TestHttpResponse", email: str, invite: bool = False
|
||||
) -> None:
|
||||
self.assert_in_success_response(
|
||||
[
|
||||
'<span class="label">preregistration user</span>\n',
|
||||
f"<b>Email</b>: {email}",
|
||||
],
|
||||
result,
|
||||
)
|
||||
if invite:
|
||||
self.assert_in_success_response(['<span class="label">invite</span>'], result)
|
||||
self.assert_in_success_response(
|
||||
[
|
||||
"<b>Expires in</b>: 1\xa0week, 3\xa0days",
|
||||
"<b>Status</b>: Link has not been used",
|
||||
],
|
||||
result,
|
||||
)
|
||||
self.assert_in_success_response([], result)
|
||||
else:
|
||||
self.assert_not_in_success_response(['<span class="label">invite</span>'], result)
|
||||
self.assert_in_success_response(
|
||||
[
|
||||
"<b>Expires in</b>: 1\xa0day",
|
||||
"<b>Status</b>: Link has not been used",
|
||||
],
|
||||
result,
|
||||
)
|
||||
|
||||
def check_realm_creation_query_result(result: "TestHttpResponse", email: str) -> None:
|
||||
self.assert_in_success_response(
|
||||
[
|
||||
'<span class="label">preregistration user</span>\n',
|
||||
'<span class="label">realm creation</span>\n',
|
||||
"<b>Link</b>: http://testserver/accounts/do_confirm/",
|
||||
"<b>Expires in</b>: 1\xa0day",
|
||||
],
|
||||
result,
|
||||
)
|
||||
|
||||
def check_multiuse_invite_link_query_result(result: "TestHttpResponse") -> None:
|
||||
self.assert_in_success_response(
|
||||
[
|
||||
'<span class="label">multiuse invite</span>\n',
|
||||
"<b>Link</b>: http://zulip.testserver/join/",
|
||||
"<b>Expires in</b>: 1\xa0week, 3\xa0days",
|
||||
],
|
||||
result,
|
||||
)
|
||||
|
||||
def check_realm_reactivation_link_query_result(result: "TestHttpResponse") -> None:
|
||||
self.assert_in_success_response(
|
||||
[
|
||||
'<span class="label">realm reactivation</span>\n',
|
||||
"<b>Link</b>: http://zulip.testserver/reactivate/",
|
||||
"<b>Expires in</b>: 1\xa0day",
|
||||
],
|
||||
result,
|
||||
)
|
||||
|
||||
def get_check_query_result(
|
||||
query: str, count: int, subdomain: str = "zulip"
|
||||
) -> "TestHttpResponse":
|
||||
result = self.client_get("/activity/support", {"q": query}, subdomain=subdomain)
|
||||
self.assertEqual(result.content.decode().count("support-query-result"), count)
|
||||
return result
|
||||
|
||||
self.login("cordelia")
|
||||
|
||||
result = self.client_get("/activity/support")
|
||||
self.assertEqual(result.status_code, 302)
|
||||
self.assertEqual(result["Location"], "/login/")
|
||||
|
||||
self.login("iago")
|
||||
|
||||
do_change_user_setting(
|
||||
self.example_user("hamlet"),
|
||||
"email_address_visibility",
|
||||
UserProfile.EMAIL_ADDRESS_VISIBILITY_NOBODY,
|
||||
acting_user=None,
|
||||
)
|
||||
|
||||
customer = Customer.objects.create(realm=lear_realm, stripe_customer_id="cus_123")
|
||||
now = datetime(2016, 1, 2, tzinfo=timezone.utc)
|
||||
plan = CustomerPlan.objects.create(
|
||||
customer=customer,
|
||||
billing_cycle_anchor=now,
|
||||
billing_schedule=CustomerPlan.ANNUAL,
|
||||
tier=CustomerPlan.STANDARD,
|
||||
price_per_license=8000,
|
||||
next_invoice_date=add_months(now, 12),
|
||||
)
|
||||
LicenseLedger.objects.create(
|
||||
licenses=10,
|
||||
licenses_at_next_renewal=10,
|
||||
event_time=timezone_now(),
|
||||
is_renewal=True,
|
||||
plan=plan,
|
||||
)
|
||||
|
||||
result = self.client_get("/activity/support")
|
||||
self.assert_in_success_response(
|
||||
['<input type="text" name="q" class="input-xxlarge search-query"'], result
|
||||
)
|
||||
|
||||
result = get_check_query_result(self.example_email("hamlet"), 1)
|
||||
check_hamlet_user_query_result(result)
|
||||
check_zulip_realm_query_result(result)
|
||||
|
||||
# Search should be case-insensitive:
|
||||
assert self.example_email("hamlet") != self.example_email("hamlet").upper()
|
||||
result = get_check_query_result(self.example_email("hamlet").upper(), 1)
|
||||
check_hamlet_user_query_result(result)
|
||||
check_zulip_realm_query_result(result)
|
||||
|
||||
result = get_check_query_result(lear_user.email, 1)
|
||||
check_lear_user_query_result(result)
|
||||
check_lear_realm_query_result(result)
|
||||
|
||||
result = get_check_query_result(self.example_email("polonius"), 1)
|
||||
check_polonius_user_query_result(result)
|
||||
check_zulip_realm_query_result(result)
|
||||
|
||||
result = get_check_query_result("lear", 1)
|
||||
check_lear_realm_query_result(result)
|
||||
|
||||
result = get_check_query_result("http://lear.testserver", 1)
|
||||
check_lear_realm_query_result(result)
|
||||
|
||||
with self.settings(REALM_HOSTS={"zulip": "localhost"}):
|
||||
result = get_check_query_result("http://localhost", 1)
|
||||
check_zulip_realm_query_result(result)
|
||||
|
||||
result = get_check_query_result("hamlet@zulip.com, lear", 2)
|
||||
check_hamlet_user_query_result(result)
|
||||
check_zulip_realm_query_result(result)
|
||||
check_lear_realm_query_result(result)
|
||||
|
||||
result = get_check_query_result("King hamlet,lear", 2)
|
||||
check_hamlet_user_query_result(result)
|
||||
check_zulip_realm_query_result(result)
|
||||
check_lear_realm_query_result(result)
|
||||
|
||||
result = get_check_query_result("Othello, the Moor of Venice", 1)
|
||||
check_othello_user_query_result(result)
|
||||
check_zulip_realm_query_result(result)
|
||||
|
||||
result = get_check_query_result("lear, Hamlet <hamlet@zulip.com>", 2)
|
||||
check_hamlet_user_query_result(result)
|
||||
check_zulip_realm_query_result(result)
|
||||
check_lear_realm_query_result(result)
|
||||
|
||||
with mock.patch(
|
||||
"analytics.views.support.timezone_now",
|
||||
return_value=timezone_now() - timedelta(minutes=50),
|
||||
):
|
||||
self.client_post("/accounts/home/", {"email": self.nonreg_email("test")})
|
||||
self.login("iago")
|
||||
result = get_check_query_result(self.nonreg_email("test"), 1)
|
||||
check_preregistration_user_query_result(result, self.nonreg_email("test"))
|
||||
check_zulip_realm_query_result(result)
|
||||
|
||||
create_invitation("Denmark", self.nonreg_email("test1"))
|
||||
result = get_check_query_result(self.nonreg_email("test1"), 1)
|
||||
check_preregistration_user_query_result(result, self.nonreg_email("test1"), invite=True)
|
||||
check_zulip_realm_query_result(result)
|
||||
|
||||
email = self.nonreg_email("alice")
|
||||
self.submit_realm_creation_form(
|
||||
email, realm_subdomain="custom-test", realm_name="Zulip test"
|
||||
)
|
||||
result = get_check_query_result(email, 1)
|
||||
check_realm_creation_query_result(result, email)
|
||||
|
||||
invite_expires_in_minutes = 10 * 24 * 60
|
||||
do_create_multiuse_invite_link(
|
||||
self.example_user("hamlet"),
|
||||
invited_as=1,
|
||||
invite_expires_in_minutes=invite_expires_in_minutes,
|
||||
)
|
||||
result = get_check_query_result("zulip", 2)
|
||||
check_multiuse_invite_link_query_result(result)
|
||||
check_zulip_realm_query_result(result)
|
||||
MultiuseInvite.objects.all().delete()
|
||||
|
||||
do_send_realm_reactivation_email(get_realm("zulip"), acting_user=None)
|
||||
result = get_check_query_result("zulip", 2)
|
||||
check_realm_reactivation_link_query_result(result)
|
||||
check_zulip_realm_query_result(result)
|
||||
|
||||
lear_nonreg_email = "newguy@lear.org"
|
||||
self.client_post("/accounts/home/", {"email": lear_nonreg_email}, subdomain="lear")
|
||||
result = get_check_query_result(lear_nonreg_email, 1)
|
||||
check_preregistration_user_query_result(result, lear_nonreg_email)
|
||||
check_lear_realm_query_result(result)
|
||||
|
||||
self.login_user(lear_user)
|
||||
create_invitation("general", "newguy2@lear.org", lear_realm)
|
||||
result = get_check_query_result("newguy2@lear.org", 1, lear_realm.string_id)
|
||||
check_preregistration_user_query_result(result, "newguy2@lear.org", invite=True)
|
||||
check_lear_realm_query_result(result)
|
||||
|
||||
def test_get_org_type_display_name(self) -> None:
|
||||
self.assertEqual(get_org_type_display_name(Realm.ORG_TYPES["business"]["id"]), "Business")
|
||||
self.assertEqual(get_org_type_display_name(883), "")
|
||||
|
||||
def test_unspecified_org_type_correctly_displayed(self) -> None:
|
||||
"""
|
||||
Unspecified org type is special in that it is marked to not be shown
|
||||
on the registration page (because organitions are not meant to be able to choose it),
|
||||
but should be correctly shown at the /support/ endpoint.
|
||||
"""
|
||||
realm = get_realm("zulip")
|
||||
|
||||
do_change_realm_org_type(realm, 0, acting_user=None)
|
||||
self.assertEqual(realm.org_type, 0)
|
||||
|
||||
self.login("iago")
|
||||
|
||||
result = self.client_get("/activity/support", {"q": "zulip"}, subdomain="zulip")
|
||||
self.assert_in_success_response(
|
||||
[
|
||||
f'<input type="hidden" name="realm_id" value="{realm.id}"',
|
||||
'<option value="0" selected>',
|
||||
],
|
||||
result,
|
||||
)
|
||||
|
||||
@mock.patch("analytics.views.support.update_billing_method_of_current_plan")
|
||||
def test_change_billing_method(self, m: mock.Mock) -> None:
|
||||
cordelia = self.example_user("cordelia")
|
||||
self.login_user(cordelia)
|
||||
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{cordelia.realm_id}", "plan_type": "2"}
|
||||
)
|
||||
self.assertEqual(result.status_code, 302)
|
||||
self.assertEqual(result["Location"], "/login/")
|
||||
|
||||
iago = self.example_user("iago")
|
||||
self.login_user(iago)
|
||||
|
||||
result = self.client_post(
|
||||
"/activity/support",
|
||||
{"realm_id": f"{iago.realm_id}", "billing_method": "charge_automatically"},
|
||||
)
|
||||
m.assert_called_once_with(get_realm("zulip"), charge_automatically=True, acting_user=iago)
|
||||
self.assert_in_success_response(
|
||||
["Billing method of zulip updated to charge automatically"], result
|
||||
)
|
||||
|
||||
m.reset_mock()
|
||||
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{iago.realm_id}", "billing_method": "send_invoice"}
|
||||
)
|
||||
m.assert_called_once_with(get_realm("zulip"), charge_automatically=False, acting_user=iago)
|
||||
self.assert_in_success_response(
|
||||
["Billing method of zulip updated to pay by invoice"], result
|
||||
)
|
||||
|
||||
def test_change_realm_plan_type(self) -> None:
|
||||
cordelia = self.example_user("cordelia")
|
||||
self.login_user(cordelia)
|
||||
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{cordelia.realm_id}", "plan_type": "2"}
|
||||
)
|
||||
self.assertEqual(result.status_code, 302)
|
||||
self.assertEqual(result["Location"], "/login/")
|
||||
|
||||
iago = self.example_user("iago")
|
||||
self.login_user(iago)
|
||||
|
||||
with mock.patch("analytics.views.support.do_change_realm_plan_type") as m:
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{iago.realm_id}", "plan_type": "2"}
|
||||
)
|
||||
m.assert_called_once_with(get_realm("zulip"), 2, acting_user=iago)
|
||||
self.assert_in_success_response(
|
||||
["Plan type of zulip changed from self-hosted to limited"], result
|
||||
)
|
||||
|
||||
with mock.patch("analytics.views.support.do_change_realm_plan_type") as m:
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{iago.realm_id}", "plan_type": "10"}
|
||||
)
|
||||
m.assert_called_once_with(get_realm("zulip"), 10, acting_user=iago)
|
||||
self.assert_in_success_response(
|
||||
["Plan type of zulip changed from self-hosted to plus"], result
|
||||
)
|
||||
|
||||
def test_change_org_type(self) -> None:
|
||||
cordelia = self.example_user("cordelia")
|
||||
self.login_user(cordelia)
|
||||
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{cordelia.realm_id}", "org_type": "70"}
|
||||
)
|
||||
self.assertEqual(result.status_code, 302)
|
||||
self.assertEqual(result["Location"], "/login/")
|
||||
|
||||
iago = self.example_user("iago")
|
||||
self.login_user(iago)
|
||||
|
||||
with mock.patch("analytics.views.support.do_change_realm_org_type") as m:
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{iago.realm_id}", "org_type": "70"}
|
||||
)
|
||||
m.assert_called_once_with(get_realm("zulip"), 70, acting_user=iago)
|
||||
self.assert_in_success_response(
|
||||
["Org type of zulip changed from Business to Government"], result
|
||||
)
|
||||
|
||||
def test_attach_discount(self) -> None:
|
||||
cordelia = self.example_user("cordelia")
|
||||
lear_realm = get_realm("lear")
|
||||
self.login_user(cordelia)
|
||||
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{lear_realm.id}", "discount": "25"}
|
||||
)
|
||||
self.assertEqual(result.status_code, 302)
|
||||
self.assertEqual(result["Location"], "/login/")
|
||||
|
||||
iago = self.example_user("iago")
|
||||
self.login("iago")
|
||||
|
||||
with mock.patch("analytics.views.support.attach_discount_to_realm") as m:
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{lear_realm.id}", "discount": "25"}
|
||||
)
|
||||
m.assert_called_once_with(get_realm("lear"), 25, acting_user=iago)
|
||||
self.assert_in_success_response(["Discount of lear changed to 25% from 0%"], result)
|
||||
|
||||
def test_change_sponsorship_status(self) -> None:
|
||||
lear_realm = get_realm("lear")
|
||||
self.assertIsNone(get_customer_by_realm(lear_realm))
|
||||
|
||||
cordelia = self.example_user("cordelia")
|
||||
self.login_user(cordelia)
|
||||
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{lear_realm.id}", "sponsorship_pending": "true"}
|
||||
)
|
||||
self.assertEqual(result.status_code, 302)
|
||||
self.assertEqual(result["Location"], "/login/")
|
||||
|
||||
iago = self.example_user("iago")
|
||||
self.login_user(iago)
|
||||
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{lear_realm.id}", "sponsorship_pending": "true"}
|
||||
)
|
||||
self.assert_in_success_response(["lear marked as pending sponsorship."], result)
|
||||
customer = get_customer_by_realm(lear_realm)
|
||||
assert customer is not None
|
||||
self.assertTrue(customer.sponsorship_pending)
|
||||
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{lear_realm.id}", "sponsorship_pending": "false"}
|
||||
)
|
||||
self.assert_in_success_response(["lear is no longer pending sponsorship."], result)
|
||||
customer = get_customer_by_realm(lear_realm)
|
||||
assert customer is not None
|
||||
self.assertFalse(customer.sponsorship_pending)
|
||||
|
||||
def test_approve_sponsorship(self) -> None:
|
||||
lear_realm = get_realm("lear")
|
||||
update_sponsorship_status(lear_realm, True, acting_user=None)
|
||||
king_user = self.lear_user("king")
|
||||
king_user.role = UserProfile.ROLE_REALM_OWNER
|
||||
king_user.save()
|
||||
|
||||
cordelia = self.example_user("cordelia")
|
||||
self.login_user(cordelia)
|
||||
|
||||
result = self.client_post(
|
||||
"/activity/support",
|
||||
{"realm_id": f"{lear_realm.id}", "approve_sponsorship": "true"},
|
||||
)
|
||||
self.assertEqual(result.status_code, 302)
|
||||
self.assertEqual(result["Location"], "/login/")
|
||||
|
||||
iago = self.example_user("iago")
|
||||
self.login_user(iago)
|
||||
|
||||
result = self.client_post(
|
||||
"/activity/support",
|
||||
{"realm_id": f"{lear_realm.id}", "approve_sponsorship": "true"},
|
||||
)
|
||||
self.assert_in_success_response(["Sponsorship approved for lear"], result)
|
||||
lear_realm.refresh_from_db()
|
||||
self.assertEqual(lear_realm.plan_type, Realm.PLAN_TYPE_STANDARD_FREE)
|
||||
customer = get_customer_by_realm(lear_realm)
|
||||
assert customer is not None
|
||||
self.assertFalse(customer.sponsorship_pending)
|
||||
messages = UserMessage.objects.filter(user_profile=king_user)
|
||||
self.assertIn(
|
||||
"request for sponsored hosting has been approved", messages[0].message.content
|
||||
)
|
||||
self.assert_length(messages, 1)
|
||||
|
||||
def test_activate_or_deactivate_realm(self) -> None:
|
||||
cordelia = self.example_user("cordelia")
|
||||
lear_realm = get_realm("lear")
|
||||
self.login_user(cordelia)
|
||||
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{lear_realm.id}", "status": "deactivated"}
|
||||
)
|
||||
self.assertEqual(result.status_code, 302)
|
||||
self.assertEqual(result["Location"], "/login/")
|
||||
|
||||
self.login("iago")
|
||||
|
||||
with mock.patch("analytics.views.support.do_deactivate_realm") as m:
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{lear_realm.id}", "status": "deactivated"}
|
||||
)
|
||||
m.assert_called_once_with(lear_realm, acting_user=self.example_user("iago"))
|
||||
self.assert_in_success_response(["lear deactivated"], result)
|
||||
|
||||
with mock.patch("analytics.views.support.do_send_realm_reactivation_email") as m:
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{lear_realm.id}", "status": "active"}
|
||||
)
|
||||
m.assert_called_once_with(lear_realm, acting_user=self.example_user("iago"))
|
||||
self.assert_in_success_response(
|
||||
["Realm reactivation email sent to admins of lear"], result
|
||||
)
|
||||
|
||||
def test_change_subdomain(self) -> None:
|
||||
cordelia = self.example_user("cordelia")
|
||||
lear_realm = get_realm("lear")
|
||||
self.login_user(cordelia)
|
||||
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{lear_realm.id}", "new_subdomain": "new_name"}
|
||||
)
|
||||
self.assertEqual(result.status_code, 302)
|
||||
self.assertEqual(result["Location"], "/login/")
|
||||
self.login("iago")
|
||||
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{lear_realm.id}", "new_subdomain": "new-name"}
|
||||
)
|
||||
self.assertEqual(result.status_code, 302)
|
||||
self.assertEqual(result["Location"], "/activity/support?q=new-name")
|
||||
realm_id = lear_realm.id
|
||||
lear_realm = get_realm("new-name")
|
||||
self.assertEqual(lear_realm.id, realm_id)
|
||||
self.assertTrue(Realm.objects.filter(string_id="lear").exists())
|
||||
self.assertTrue(Realm.objects.filter(string_id="lear")[0].deactivated)
|
||||
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{lear_realm.id}", "new_subdomain": "new-name"}
|
||||
)
|
||||
self.assert_in_success_response(
|
||||
["Subdomain already in use. Please choose a different one."], result
|
||||
)
|
||||
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{lear_realm.id}", "new_subdomain": "zulip"}
|
||||
)
|
||||
self.assert_in_success_response(
|
||||
["Subdomain already in use. Please choose a different one."], result
|
||||
)
|
||||
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{lear_realm.id}", "new_subdomain": "lear"}
|
||||
)
|
||||
self.assert_in_success_response(
|
||||
["Subdomain already in use. Please choose a different one."], result
|
||||
)
|
||||
|
||||
# Test renaming to a "reserved" subdomain
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{lear_realm.id}", "new_subdomain": "your-org"}
|
||||
)
|
||||
self.assert_in_success_response(
|
||||
["Subdomain reserved. Please choose a different one."], result
|
||||
)
|
||||
|
||||
def test_downgrade_realm(self) -> None:
|
||||
cordelia = self.example_user("cordelia")
|
||||
self.login_user(cordelia)
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{cordelia.realm_id}", "plan_type": "2"}
|
||||
)
|
||||
self.assertEqual(result.status_code, 302)
|
||||
self.assertEqual(result["Location"], "/login/")
|
||||
|
||||
iago = self.example_user("iago")
|
||||
self.login_user(iago)
|
||||
|
||||
with mock.patch("analytics.views.support.downgrade_at_the_end_of_billing_cycle") as m:
|
||||
result = self.client_post(
|
||||
"/activity/support",
|
||||
{
|
||||
"realm_id": f"{iago.realm_id}",
|
||||
"modify_plan": "downgrade_at_billing_cycle_end",
|
||||
},
|
||||
)
|
||||
m.assert_called_once_with(get_realm("zulip"))
|
||||
self.assert_in_success_response(
|
||||
["zulip marked for downgrade at the end of billing cycle"], result
|
||||
)
|
||||
|
||||
with mock.patch(
|
||||
"analytics.views.support.downgrade_now_without_creating_additional_invoices"
|
||||
) as m:
|
||||
result = self.client_post(
|
||||
"/activity/support",
|
||||
{
|
||||
"realm_id": f"{iago.realm_id}",
|
||||
"modify_plan": "downgrade_now_without_additional_licenses",
|
||||
},
|
||||
)
|
||||
m.assert_called_once_with(get_realm("zulip"))
|
||||
self.assert_in_success_response(
|
||||
["zulip downgraded without creating additional invoices"], result
|
||||
)
|
||||
|
||||
with mock.patch(
|
||||
"analytics.views.support.downgrade_now_without_creating_additional_invoices"
|
||||
) as m1:
|
||||
with mock.patch("analytics.views.support.void_all_open_invoices", return_value=1) as m2:
|
||||
result = self.client_post(
|
||||
"/activity/support",
|
||||
{
|
||||
"realm_id": f"{iago.realm_id}",
|
||||
"modify_plan": "downgrade_now_void_open_invoices",
|
||||
},
|
||||
)
|
||||
m1.assert_called_once_with(get_realm("zulip"))
|
||||
m2.assert_called_once_with(get_realm("zulip"))
|
||||
self.assert_in_success_response(
|
||||
["zulip downgraded and voided 1 open invoices"], result
|
||||
)
|
||||
|
||||
with mock.patch("analytics.views.support.switch_realm_from_standard_to_plus_plan") as m:
|
||||
result = self.client_post(
|
||||
"/activity/support",
|
||||
{
|
||||
"realm_id": f"{iago.realm_id}",
|
||||
"modify_plan": "upgrade_to_plus",
|
||||
},
|
||||
)
|
||||
m.assert_called_once_with(get_realm("zulip"))
|
||||
self.assert_in_success_response(["zulip upgraded to Plus"], result)
|
||||
|
||||
def test_scrub_realm(self) -> None:
|
||||
cordelia = self.example_user("cordelia")
|
||||
lear_realm = get_realm("lear")
|
||||
self.login_user(cordelia)
|
||||
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{lear_realm.id}", "discount": "25"}
|
||||
)
|
||||
self.assertEqual(result.status_code, 302)
|
||||
self.assertEqual(result["Location"], "/login/")
|
||||
|
||||
self.login("iago")
|
||||
|
||||
with mock.patch("analytics.views.support.do_scrub_realm") as m:
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{lear_realm.id}", "scrub_realm": "true"}
|
||||
)
|
||||
m.assert_called_once_with(lear_realm, acting_user=self.example_user("iago"))
|
||||
self.assert_in_success_response(["lear scrubbed"], result)
|
||||
|
||||
with mock.patch("analytics.views.support.do_scrub_realm") as m:
|
||||
result = self.client_post("/activity/support", {"realm_id": f"{lear_realm.id}"})
|
||||
self.assert_json_error(result, "Invalid parameters")
|
||||
m.assert_not_called()
|
||||
|
||||
def test_delete_user(self) -> None:
|
||||
cordelia = self.example_user("cordelia")
|
||||
hamlet = self.example_user("hamlet")
|
||||
hamlet_email = hamlet.delivery_email
|
||||
realm = get_realm("zulip")
|
||||
self.login_user(cordelia)
|
||||
|
||||
result = self.client_post(
|
||||
"/activity/support", {"realm_id": f"{realm.id}", "delete_user_by_id": hamlet.id}
|
||||
)
|
||||
self.assertEqual(result.status_code, 302)
|
||||
self.assertEqual(result["Location"], "/login/")
|
||||
|
||||
self.login("iago")
|
||||
|
||||
with mock.patch("analytics.views.support.do_delete_user_preserving_messages") as m:
|
||||
result = self.client_post(
|
||||
"/activity/support",
|
||||
{"realm_id": f"{realm.id}", "delete_user_by_id": hamlet.id},
|
||||
)
|
||||
m.assert_called_once_with(hamlet)
|
||||
self.assert_in_success_response([f"{hamlet_email} in zulip deleted"], result)
|
||||
@@ -1,38 +1,51 @@
|
||||
from django.conf import settings
|
||||
from typing import List, Union
|
||||
|
||||
from django.conf.urls import include
|
||||
from django.urls import path
|
||||
from django.urls.resolvers import URLPattern, URLResolver
|
||||
|
||||
from analytics.views.installation_activity import (
|
||||
get_installation_activity,
|
||||
get_integrations_activity,
|
||||
)
|
||||
from analytics.views.realm_activity import get_realm_activity
|
||||
from analytics.views.remote_activity import get_remote_server_activity
|
||||
from analytics.views.stats import (
|
||||
get_chart_data,
|
||||
get_chart_data_for_installation,
|
||||
get_chart_data_for_realm,
|
||||
get_chart_data_for_remote_installation,
|
||||
get_chart_data_for_remote_realm,
|
||||
get_chart_data_for_stream,
|
||||
stats,
|
||||
stats_for_installation,
|
||||
stats_for_realm,
|
||||
stats_for_remote_installation,
|
||||
stats_for_remote_realm,
|
||||
)
|
||||
from analytics.views.support import remote_servers_support, support
|
||||
from analytics.views.user_activity import get_user_activity
|
||||
from zerver.lib.rest import rest_path
|
||||
|
||||
i18n_urlpatterns: list[URLPattern | URLResolver] = [
|
||||
i18n_urlpatterns: List[Union[URLPattern, URLResolver]] = [
|
||||
# Server admin (user_profile.is_staff) visible stats pages
|
||||
path("activity", get_installation_activity),
|
||||
path("activity/remote", get_remote_server_activity),
|
||||
path("activity/integrations", get_integrations_activity),
|
||||
path("activity/support", support, name="support"),
|
||||
path("activity/remote/support", remote_servers_support, name="remote_servers_support"),
|
||||
path("realm_activity/<realm_str>/", get_realm_activity),
|
||||
path("user_activity/<user_profile_id>/", get_user_activity),
|
||||
path("stats/realm/<realm_str>/", stats_for_realm),
|
||||
path("stats/installation", stats_for_installation),
|
||||
path("stats/remote/<int:remote_server_id>/installation", stats_for_remote_installation),
|
||||
path(
|
||||
"stats/remote/<int:remote_server_id>/realm/<int:remote_realm_id>/", stats_for_remote_realm
|
||||
),
|
||||
# User-visible stats page
|
||||
path("stats", stats, name="stats"),
|
||||
]
|
||||
|
||||
if settings.ZILENCER_ENABLED:
|
||||
from analytics.views.stats import stats_for_remote_installation, stats_for_remote_realm
|
||||
|
||||
i18n_urlpatterns += [
|
||||
path("stats/remote/<int:remote_server_id>/installation", stats_for_remote_installation),
|
||||
path(
|
||||
"stats/remote/<int:remote_server_id>/realm/<int:remote_realm_id>/",
|
||||
stats_for_remote_realm,
|
||||
),
|
||||
]
|
||||
|
||||
# These endpoints are a part of the API (V1), which uses:
|
||||
# * REST verbs
|
||||
# * Basic auth (username:password is email:apiKey)
|
||||
@@ -47,25 +60,16 @@ v1_api_and_json_patterns = [
|
||||
rest_path("analytics/chart_data/stream/<stream_id>", GET=get_chart_data_for_stream),
|
||||
rest_path("analytics/chart_data/realm/<realm_str>", GET=get_chart_data_for_realm),
|
||||
rest_path("analytics/chart_data/installation", GET=get_chart_data_for_installation),
|
||||
rest_path(
|
||||
"analytics/chart_data/remote/<int:remote_server_id>/installation",
|
||||
GET=get_chart_data_for_remote_installation,
|
||||
),
|
||||
rest_path(
|
||||
"analytics/chart_data/remote/<int:remote_server_id>/realm/<int:remote_realm_id>",
|
||||
GET=get_chart_data_for_remote_realm,
|
||||
),
|
||||
]
|
||||
|
||||
if settings.ZILENCER_ENABLED:
|
||||
from analytics.views.stats import (
|
||||
get_chart_data_for_remote_installation,
|
||||
get_chart_data_for_remote_realm,
|
||||
)
|
||||
|
||||
v1_api_and_json_patterns += [
|
||||
rest_path(
|
||||
"analytics/chart_data/remote/<int:remote_server_id>/installation",
|
||||
GET=get_chart_data_for_remote_installation,
|
||||
),
|
||||
rest_path(
|
||||
"analytics/chart_data/remote/<int:remote_server_id>/realm/<int:remote_realm_id>",
|
||||
GET=get_chart_data_for_remote_realm,
|
||||
),
|
||||
]
|
||||
|
||||
i18n_urlpatterns += [
|
||||
path("api/v1/", include(v1_api_and_json_patterns)),
|
||||
path("json/", include(v1_api_and_json_patterns)),
|
||||
|
||||
198
analytics/views/activity_common.py
Normal file
198
analytics/views/activity_common.py
Normal file
@@ -0,0 +1,198 @@
|
||||
import re
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from typing import Any, Callable, Collection, Dict, List, Optional, Sequence, Union
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import connection
|
||||
from django.db.backends.utils import CursorWrapper
|
||||
from django.template import loader
|
||||
from django.urls import reverse
|
||||
from markupsafe import Markup
|
||||
from psycopg2.sql import Composable
|
||||
|
||||
from zerver.lib.url_encoding import append_url_query_string
|
||||
from zerver.models import UserActivity, get_realm
|
||||
|
||||
if sys.version_info < (3, 9): # nocoverage
|
||||
from backports import zoneinfo
|
||||
else: # nocoverage
|
||||
import zoneinfo
|
||||
|
||||
eastern_tz = zoneinfo.ZoneInfo("America/New_York")
|
||||
|
||||
|
||||
if settings.BILLING_ENABLED:
|
||||
pass
|
||||
|
||||
|
||||
def make_table(
|
||||
title: str, cols: Sequence[str], rows: Sequence[Any], has_row_class: bool = False
|
||||
) -> str:
|
||||
if not has_row_class:
|
||||
|
||||
def fix_row(row: Any) -> Dict[str, Any]:
|
||||
return dict(cells=row, row_class=None)
|
||||
|
||||
rows = list(map(fix_row, rows))
|
||||
|
||||
data = dict(title=title, cols=cols, rows=rows)
|
||||
|
||||
content = loader.render_to_string(
|
||||
"analytics/ad_hoc_query.html",
|
||||
dict(data=data),
|
||||
)
|
||||
|
||||
return content
|
||||
|
||||
|
||||
def get_page(
|
||||
query: Composable, cols: Sequence[str], title: str, totals_columns: Sequence[int] = []
|
||||
) -> Dict[str, str]:
|
||||
cursor = connection.cursor()
|
||||
cursor.execute(query)
|
||||
rows = cursor.fetchall()
|
||||
rows = list(map(list, rows))
|
||||
cursor.close()
|
||||
|
||||
def fix_rows(
|
||||
i: int, fixup_func: Union[Callable[[str], Markup], Callable[[datetime], str]]
|
||||
) -> None:
|
||||
for row in rows:
|
||||
row[i] = fixup_func(row[i])
|
||||
|
||||
total_row = []
|
||||
for i, col in enumerate(cols):
|
||||
if col == "Realm":
|
||||
fix_rows(i, realm_activity_link)
|
||||
elif col in ["Last time", "Last visit"]:
|
||||
fix_rows(i, format_date_for_activity_reports)
|
||||
elif col == "Hostname":
|
||||
for row in rows:
|
||||
row[i] = remote_installation_stats_link(row[0], row[i])
|
||||
if len(totals_columns) > 0:
|
||||
if i == 0:
|
||||
total_row.append("Total")
|
||||
elif i in totals_columns:
|
||||
total_row.append(str(sum(row[i] for row in rows if row[i] is not None)))
|
||||
else:
|
||||
total_row.append("")
|
||||
if len(totals_columns) > 0:
|
||||
rows.insert(0, total_row)
|
||||
|
||||
content = make_table(title, cols, rows)
|
||||
|
||||
return dict(
|
||||
content=content,
|
||||
title=title,
|
||||
)
|
||||
|
||||
|
||||
def dictfetchall(cursor: CursorWrapper) -> List[Dict[str, Any]]:
|
||||
"""Returns all rows from a cursor as a dict"""
|
||||
desc = cursor.description
|
||||
return [dict(zip((col[0] for col in desc), row)) for row in cursor.fetchall()]
|
||||
|
||||
|
||||
def format_date_for_activity_reports(date: Optional[datetime]) -> str:
|
||||
if date:
|
||||
return date.astimezone(eastern_tz).strftime("%Y-%m-%d %H:%M")
|
||||
else:
|
||||
return ""
|
||||
|
||||
|
||||
def user_activity_link(email: str, user_profile_id: int) -> Markup:
|
||||
from analytics.views.user_activity import get_user_activity
|
||||
|
||||
url = reverse(get_user_activity, kwargs=dict(user_profile_id=user_profile_id))
|
||||
return Markup('<a href="{url}">{email}</a>').format(url=url, email=email)
|
||||
|
||||
|
||||
def realm_activity_link(realm_str: str) -> Markup:
|
||||
from analytics.views.realm_activity import get_realm_activity
|
||||
|
||||
url = reverse(get_realm_activity, kwargs=dict(realm_str=realm_str))
|
||||
return Markup('<a href="{url}">{realm_str}</a>').format(url=url, realm_str=realm_str)
|
||||
|
||||
|
||||
def realm_stats_link(realm_str: str) -> Markup:
|
||||
from analytics.views.stats import stats_for_realm
|
||||
|
||||
url = reverse(stats_for_realm, kwargs=dict(realm_str=realm_str))
|
||||
return Markup('<a href="{url}"><i class="fa fa-pie-chart"></i></a>').format(url=url)
|
||||
|
||||
|
||||
def realm_support_link(realm_str: str) -> Markup:
|
||||
support_url = reverse("support")
|
||||
query = urlencode({"q": realm_str})
|
||||
url = append_url_query_string(support_url, query)
|
||||
return Markup('<a href="{url}">{realm_str}</a>').format(url=url, realm_str=realm_str)
|
||||
|
||||
|
||||
def realm_url_link(realm_str: str) -> Markup:
|
||||
url = get_realm(realm_str).uri
|
||||
return Markup('<a href="{url}"><i class="fa fa-home"></i></a>').format(url=url)
|
||||
|
||||
|
||||
def remote_installation_stats_link(server_id: int, hostname: str) -> Markup:
|
||||
from analytics.views.stats import stats_for_remote_installation
|
||||
|
||||
url = reverse(stats_for_remote_installation, kwargs=dict(remote_server_id=server_id))
|
||||
return Markup('<a href="{url}"><i class="fa fa-pie-chart"></i></a> {hostname}').format(
|
||||
url=url, hostname=hostname
|
||||
)
|
||||
|
||||
|
||||
def get_user_activity_summary(records: Collection[UserActivity]) -> Dict[str, Any]:
|
||||
#: The type annotation used above is clearly overly permissive.
|
||||
#: We should perhaps use TypedDict to clearly lay out the schema
|
||||
#: for the user activity summary.
|
||||
summary: Dict[str, Any] = {}
|
||||
|
||||
def update(action: str, record: UserActivity) -> None:
|
||||
if action not in summary:
|
||||
summary[action] = dict(
|
||||
count=record.count,
|
||||
last_visit=record.last_visit,
|
||||
)
|
||||
else:
|
||||
summary[action]["count"] += record.count
|
||||
summary[action]["last_visit"] = max(
|
||||
summary[action]["last_visit"],
|
||||
record.last_visit,
|
||||
)
|
||||
|
||||
if records:
|
||||
first_record = next(iter(records))
|
||||
summary["name"] = first_record.user_profile.full_name
|
||||
summary["user_profile_id"] = first_record.user_profile.id
|
||||
|
||||
for record in records:
|
||||
client = record.client.name
|
||||
query = str(record.query)
|
||||
|
||||
update("use", record)
|
||||
|
||||
if client == "API":
|
||||
m = re.match("/api/.*/external/(.*)", query)
|
||||
if m:
|
||||
client = m.group(1)
|
||||
update(client, record)
|
||||
|
||||
if client.startswith("desktop"):
|
||||
update("desktop", record)
|
||||
if client == "website":
|
||||
update("website", record)
|
||||
if ("send_message" in query) or re.search("/api/.*/external/.*", query):
|
||||
update("send", record)
|
||||
if query in [
|
||||
"/json/update_pointer",
|
||||
"/json/users/me/pointer",
|
||||
"/api/v1/update_pointer",
|
||||
"update_pointer_backend",
|
||||
]:
|
||||
update("pointer", record)
|
||||
update(client, record)
|
||||
|
||||
return summary
|
||||
420
analytics/views/installation_activity.py
Normal file
420
analytics/views/installation_activity.py
Normal file
@@ -0,0 +1,420 @@
|
||||
import itertools
|
||||
import time
|
||||
from collections import defaultdict
|
||||
from contextlib import suppress
|
||||
from datetime import timedelta
|
||||
from typing import Dict, Optional, Tuple
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import connection
|
||||
from django.http import HttpRequest, HttpResponse
|
||||
from django.shortcuts import render
|
||||
from django.template import loader
|
||||
from django.utils.timezone import now as timezone_now
|
||||
from markupsafe import Markup
|
||||
from psycopg2.sql import SQL
|
||||
|
||||
from analytics.lib.counts import COUNT_STATS
|
||||
from analytics.views.activity_common import (
|
||||
dictfetchall,
|
||||
get_page,
|
||||
realm_activity_link,
|
||||
realm_stats_link,
|
||||
realm_support_link,
|
||||
realm_url_link,
|
||||
)
|
||||
from analytics.views.support import get_plan_name
|
||||
from zerver.decorator import require_server_admin
|
||||
from zerver.lib.request import has_request_variables
|
||||
from zerver.lib.timestamp import timestamp_to_datetime
|
||||
from zerver.models import Realm, UserActivityInterval, get_org_type_display_name
|
||||
|
||||
if settings.BILLING_ENABLED:
|
||||
from corporate.lib.stripe import (
|
||||
estimate_annual_recurring_revenue_by_realm,
|
||||
get_realms_to_default_discount_dict,
|
||||
)
|
||||
|
||||
|
||||
def get_realm_day_counts() -> Dict[str, Dict[str, Markup]]:
|
||||
# To align with UTC days, we subtract an hour from end_time to
|
||||
# get the start_time, since the hour that starts at midnight was
|
||||
# on the previous day.
|
||||
query = SQL(
|
||||
"""
|
||||
select
|
||||
r.string_id,
|
||||
(now()::date - (end_time - interval '1 hour')::date) age,
|
||||
coalesce(sum(value), 0) cnt
|
||||
from zerver_realm r
|
||||
join analytics_realmcount rc on r.id = rc.realm_id
|
||||
where
|
||||
property = 'messages_sent:is_bot:hour'
|
||||
and
|
||||
subgroup = 'false'
|
||||
and
|
||||
end_time > now()::date - interval '8 day' - interval '1 hour'
|
||||
group by
|
||||
r.string_id,
|
||||
age
|
||||
"""
|
||||
)
|
||||
cursor = connection.cursor()
|
||||
cursor.execute(query)
|
||||
rows = dictfetchall(cursor)
|
||||
cursor.close()
|
||||
|
||||
counts: Dict[str, Dict[int, int]] = defaultdict(dict)
|
||||
for row in rows:
|
||||
counts[row["string_id"]][row["age"]] = row["cnt"]
|
||||
|
||||
def format_count(cnt: int, style: Optional[str] = None) -> Markup:
|
||||
if style is not None:
|
||||
good_bad = style
|
||||
elif cnt == min_cnt:
|
||||
good_bad = "bad"
|
||||
elif cnt == max_cnt:
|
||||
good_bad = "good"
|
||||
else:
|
||||
good_bad = "neutral"
|
||||
|
||||
return Markup('<td class="number {good_bad}">{cnt}</td>').format(good_bad=good_bad, cnt=cnt)
|
||||
|
||||
result = {}
|
||||
for string_id in counts:
|
||||
raw_cnts = [counts[string_id].get(age, 0) for age in range(8)]
|
||||
min_cnt = min(raw_cnts[1:])
|
||||
max_cnt = max(raw_cnts[1:])
|
||||
|
||||
cnts = format_count(raw_cnts[0], "neutral") + Markup().join(map(format_count, raw_cnts[1:]))
|
||||
result[string_id] = dict(cnts=cnts)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def realm_summary_table(realm_minutes: Dict[str, float]) -> str:
|
||||
now = timezone_now()
|
||||
|
||||
query = SQL(
|
||||
"""
|
||||
SELECT
|
||||
realm.string_id,
|
||||
realm.date_created,
|
||||
realm.plan_type,
|
||||
realm.org_type,
|
||||
coalesce(wau_table.value, 0) wau_count,
|
||||
coalesce(dau_table.value, 0) dau_count,
|
||||
coalesce(user_count_table.value, 0) user_profile_count,
|
||||
coalesce(bot_count_table.value, 0) bot_count
|
||||
FROM
|
||||
zerver_realm as realm
|
||||
LEFT OUTER JOIN (
|
||||
SELECT
|
||||
value _14day_active_humans,
|
||||
realm_id
|
||||
from
|
||||
analytics_realmcount
|
||||
WHERE
|
||||
property = 'realm_active_humans::day'
|
||||
AND end_time = %(realm_active_humans_end_time)s
|
||||
) as _14day_active_humans_table ON realm.id = _14day_active_humans_table.realm_id
|
||||
LEFT OUTER JOIN (
|
||||
SELECT
|
||||
value,
|
||||
realm_id
|
||||
from
|
||||
analytics_realmcount
|
||||
WHERE
|
||||
property = '7day_actives::day'
|
||||
AND end_time = %(seven_day_actives_end_time)s
|
||||
) as wau_table ON realm.id = wau_table.realm_id
|
||||
LEFT OUTER JOIN (
|
||||
SELECT
|
||||
value,
|
||||
realm_id
|
||||
from
|
||||
analytics_realmcount
|
||||
WHERE
|
||||
property = '1day_actives::day'
|
||||
AND end_time = %(one_day_actives_end_time)s
|
||||
) as dau_table ON realm.id = dau_table.realm_id
|
||||
LEFT OUTER JOIN (
|
||||
SELECT
|
||||
value,
|
||||
realm_id
|
||||
from
|
||||
analytics_realmcount
|
||||
WHERE
|
||||
property = 'active_users_audit:is_bot:day'
|
||||
AND subgroup = 'false'
|
||||
AND end_time = %(active_users_audit_end_time)s
|
||||
) as user_count_table ON realm.id = user_count_table.realm_id
|
||||
LEFT OUTER JOIN (
|
||||
SELECT
|
||||
value,
|
||||
realm_id
|
||||
from
|
||||
analytics_realmcount
|
||||
WHERE
|
||||
property = 'active_users_audit:is_bot:day'
|
||||
AND subgroup = 'true'
|
||||
AND end_time = %(active_users_audit_end_time)s
|
||||
) as bot_count_table ON realm.id = bot_count_table.realm_id
|
||||
WHERE
|
||||
_14day_active_humans IS NOT NULL
|
||||
or realm.plan_type = 3
|
||||
ORDER BY
|
||||
dau_count DESC,
|
||||
string_id ASC
|
||||
"""
|
||||
)
|
||||
|
||||
cursor = connection.cursor()
|
||||
cursor.execute(
|
||||
query,
|
||||
{
|
||||
"realm_active_humans_end_time": COUNT_STATS[
|
||||
"realm_active_humans::day"
|
||||
].last_successful_fill(),
|
||||
"seven_day_actives_end_time": COUNT_STATS["7day_actives::day"].last_successful_fill(),
|
||||
"one_day_actives_end_time": COUNT_STATS["1day_actives::day"].last_successful_fill(),
|
||||
"active_users_audit_end_time": COUNT_STATS[
|
||||
"active_users_audit:is_bot:day"
|
||||
].last_successful_fill(),
|
||||
},
|
||||
)
|
||||
rows = dictfetchall(cursor)
|
||||
cursor.close()
|
||||
|
||||
for row in rows:
|
||||
row["date_created_day"] = row["date_created"].strftime("%Y-%m-%d")
|
||||
row["age_days"] = int((now - row["date_created"]).total_seconds() / 86400)
|
||||
row["is_new"] = row["age_days"] < 12 * 7
|
||||
|
||||
# get messages sent per day
|
||||
counts = get_realm_day_counts()
|
||||
for row in rows:
|
||||
try:
|
||||
row["history"] = counts[row["string_id"]]["cnts"]
|
||||
except Exception:
|
||||
row["history"] = ""
|
||||
|
||||
# estimate annual subscription revenue
|
||||
total_arr = 0
|
||||
if settings.BILLING_ENABLED:
|
||||
estimated_arrs = estimate_annual_recurring_revenue_by_realm()
|
||||
realms_to_default_discount = get_realms_to_default_discount_dict()
|
||||
|
||||
for row in rows:
|
||||
row["plan_type_string"] = get_plan_name(row["plan_type"])
|
||||
|
||||
string_id = row["string_id"]
|
||||
|
||||
if string_id in estimated_arrs:
|
||||
row["arr"] = estimated_arrs[string_id]
|
||||
|
||||
if row["plan_type"] in [Realm.PLAN_TYPE_STANDARD, Realm.PLAN_TYPE_PLUS]:
|
||||
row["effective_rate"] = 100 - int(realms_to_default_discount.get(string_id, 0))
|
||||
elif row["plan_type"] == Realm.PLAN_TYPE_STANDARD_FREE:
|
||||
row["effective_rate"] = 0
|
||||
elif (
|
||||
row["plan_type"] == Realm.PLAN_TYPE_LIMITED
|
||||
and string_id in realms_to_default_discount
|
||||
):
|
||||
row["effective_rate"] = 100 - int(realms_to_default_discount[string_id])
|
||||
else:
|
||||
row["effective_rate"] = ""
|
||||
|
||||
total_arr += sum(estimated_arrs.values())
|
||||
|
||||
for row in rows:
|
||||
row["org_type_string"] = get_org_type_display_name(row["org_type"])
|
||||
|
||||
# augment data with realm_minutes
|
||||
total_hours = 0.0
|
||||
for row in rows:
|
||||
string_id = row["string_id"]
|
||||
minutes = realm_minutes.get(string_id, 0.0)
|
||||
hours = minutes / 60.0
|
||||
total_hours += hours
|
||||
row["hours"] = str(int(hours))
|
||||
with suppress(Exception):
|
||||
row["hours_per_user"] = "{:.1f}".format(hours / row["dau_count"])
|
||||
|
||||
# formatting
|
||||
for row in rows:
|
||||
row["realm_url"] = realm_url_link(row["string_id"])
|
||||
row["stats_link"] = realm_stats_link(row["string_id"])
|
||||
row["support_link"] = realm_support_link(row["string_id"])
|
||||
row["string_id"] = realm_activity_link(row["string_id"])
|
||||
|
||||
# Count active sites
|
||||
num_active_sites = sum(row["dau_count"] >= 5 for row in rows)
|
||||
|
||||
# create totals
|
||||
total_dau_count = 0
|
||||
total_user_profile_count = 0
|
||||
total_bot_count = 0
|
||||
total_wau_count = 0
|
||||
for row in rows:
|
||||
total_dau_count += int(row["dau_count"])
|
||||
total_user_profile_count += int(row["user_profile_count"])
|
||||
total_bot_count += int(row["bot_count"])
|
||||
total_wau_count += int(row["wau_count"])
|
||||
|
||||
total_row = dict(
|
||||
string_id="Total",
|
||||
plan_type_string="",
|
||||
org_type_string="",
|
||||
effective_rate="",
|
||||
arr=total_arr,
|
||||
realm_url="",
|
||||
stats_link="",
|
||||
support_link="",
|
||||
date_created_day="",
|
||||
dau_count=total_dau_count,
|
||||
user_profile_count=total_user_profile_count,
|
||||
bot_count=total_bot_count,
|
||||
hours=int(total_hours),
|
||||
wau_count=total_wau_count,
|
||||
)
|
||||
|
||||
rows.insert(0, total_row)
|
||||
|
||||
content = loader.render_to_string(
|
||||
"analytics/realm_summary_table.html",
|
||||
dict(
|
||||
rows=rows,
|
||||
num_active_sites=num_active_sites,
|
||||
utctime=now.strftime("%Y-%m-%d %H:%M %Z"),
|
||||
billing_enabled=settings.BILLING_ENABLED,
|
||||
),
|
||||
)
|
||||
return content
|
||||
|
||||
|
||||
def user_activity_intervals() -> Tuple[Markup, Dict[str, float]]:
|
||||
day_end = timestamp_to_datetime(time.time())
|
||||
day_start = day_end - timedelta(hours=24)
|
||||
|
||||
output = Markup()
|
||||
output += "Per-user online duration for the last 24 hours:\n"
|
||||
total_duration = timedelta(0)
|
||||
|
||||
all_intervals = (
|
||||
UserActivityInterval.objects.filter(
|
||||
end__gte=day_start,
|
||||
start__lte=day_end,
|
||||
)
|
||||
.select_related(
|
||||
"user_profile",
|
||||
"user_profile__realm",
|
||||
)
|
||||
.only(
|
||||
"start",
|
||||
"end",
|
||||
"user_profile__delivery_email",
|
||||
"user_profile__realm__string_id",
|
||||
)
|
||||
.order_by(
|
||||
"user_profile__realm__string_id",
|
||||
"user_profile__delivery_email",
|
||||
)
|
||||
)
|
||||
|
||||
by_string_id = lambda row: row.user_profile.realm.string_id
|
||||
by_email = lambda row: row.user_profile.delivery_email
|
||||
|
||||
realm_minutes = {}
|
||||
|
||||
for string_id, realm_intervals in itertools.groupby(all_intervals, by_string_id):
|
||||
realm_duration = timedelta(0)
|
||||
output += Markup("<hr>") + f"{string_id}\n"
|
||||
for email, intervals in itertools.groupby(realm_intervals, by_email):
|
||||
duration = timedelta(0)
|
||||
for interval in intervals:
|
||||
start = max(day_start, interval.start)
|
||||
end = min(day_end, interval.end)
|
||||
duration += end - start
|
||||
|
||||
total_duration += duration
|
||||
realm_duration += duration
|
||||
output += f" {email:<37}{duration}\n"
|
||||
|
||||
realm_minutes[string_id] = realm_duration.total_seconds() / 60
|
||||
|
||||
output += f"\nTotal duration: {total_duration}\n"
|
||||
output += f"\nTotal duration in minutes: {total_duration.total_seconds() / 60.}\n"
|
||||
output += f"Total duration amortized to a month: {total_duration.total_seconds() * 30. / 60.}"
|
||||
content = Markup("<pre>{}</pre>").format(output)
|
||||
return content, realm_minutes
|
||||
|
||||
|
||||
@require_server_admin
|
||||
@has_request_variables
|
||||
def get_installation_activity(request: HttpRequest) -> HttpResponse:
|
||||
duration_content, realm_minutes = user_activity_intervals()
|
||||
counts_content: str = realm_summary_table(realm_minutes)
|
||||
data = [
|
||||
("Counts", counts_content),
|
||||
("Durations", duration_content),
|
||||
]
|
||||
|
||||
title = "Activity"
|
||||
|
||||
return render(
|
||||
request,
|
||||
"analytics/activity.html",
|
||||
context=dict(data=data, title=title, is_home=True),
|
||||
)
|
||||
|
||||
|
||||
@require_server_admin
|
||||
def get_integrations_activity(request: HttpRequest) -> HttpResponse:
|
||||
title = "Integrations by client"
|
||||
|
||||
query = SQL(
|
||||
"""
|
||||
select
|
||||
case
|
||||
when query like '%%external%%' then split_part(query, '/', 5)
|
||||
else client.name
|
||||
end client_name,
|
||||
realm.string_id,
|
||||
sum(count) as hits,
|
||||
max(last_visit) as last_time
|
||||
from zerver_useractivity ua
|
||||
join zerver_client client on client.id = ua.client_id
|
||||
join zerver_userprofile up on up.id = ua.user_profile_id
|
||||
join zerver_realm realm on realm.id = up.realm_id
|
||||
where
|
||||
(query in ('send_message_backend', '/api/v1/send_message')
|
||||
and client.name not in ('Android', 'ZulipiOS')
|
||||
and client.name not like 'test: Zulip%%'
|
||||
)
|
||||
or
|
||||
query like '%%external%%'
|
||||
group by client_name, string_id
|
||||
having max(last_visit) > now() - interval '2 week'
|
||||
order by client_name, string_id
|
||||
"""
|
||||
)
|
||||
|
||||
cols = [
|
||||
"Client",
|
||||
"Realm",
|
||||
"Hits",
|
||||
"Last time",
|
||||
]
|
||||
|
||||
integrations_activity = get_page(query, cols, title)
|
||||
|
||||
return render(
|
||||
request,
|
||||
"analytics/activity_details_template.html",
|
||||
context=dict(
|
||||
data=integrations_activity["content"],
|
||||
title=integrations_activity["title"],
|
||||
is_home=False,
|
||||
),
|
||||
)
|
||||
245
analytics/views/realm_activity.py
Normal file
245
analytics/views/realm_activity.py
Normal file
@@ -0,0 +1,245 @@
|
||||
import itertools
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, List, Optional, Set, Tuple
|
||||
|
||||
from django.db import connection
|
||||
from django.db.models import QuerySet
|
||||
from django.http import HttpRequest, HttpResponse, HttpResponseNotFound
|
||||
from django.shortcuts import render
|
||||
from django.utils.timezone import now as timezone_now
|
||||
from psycopg2.sql import SQL
|
||||
|
||||
from analytics.views.activity_common import (
|
||||
format_date_for_activity_reports,
|
||||
get_user_activity_summary,
|
||||
make_table,
|
||||
realm_stats_link,
|
||||
user_activity_link,
|
||||
)
|
||||
from zerver.decorator import require_server_admin
|
||||
from zerver.models import Realm, UserActivity
|
||||
|
||||
|
||||
def get_user_activity_records_for_realm(realm: str, is_bot: bool) -> QuerySet[UserActivity]:
|
||||
fields = [
|
||||
"user_profile__full_name",
|
||||
"user_profile__delivery_email",
|
||||
"query",
|
||||
"client__name",
|
||||
"count",
|
||||
"last_visit",
|
||||
]
|
||||
|
||||
records = UserActivity.objects.filter(
|
||||
user_profile__realm__string_id=realm,
|
||||
user_profile__is_active=True,
|
||||
user_profile__is_bot=is_bot,
|
||||
)
|
||||
records = records.order_by("user_profile__delivery_email", "-last_visit")
|
||||
records = records.select_related("user_profile", "client").only(*fields)
|
||||
return records
|
||||
|
||||
|
||||
def realm_user_summary_table(
|
||||
all_records: QuerySet[UserActivity], admin_emails: Set[str]
|
||||
) -> Tuple[Dict[str, Any], str]:
|
||||
user_records = {}
|
||||
|
||||
def by_email(record: UserActivity) -> str:
|
||||
return record.user_profile.delivery_email
|
||||
|
||||
for email, records in itertools.groupby(all_records, by_email):
|
||||
user_records[email] = get_user_activity_summary(list(records))
|
||||
|
||||
def get_last_visit(user_summary: Dict[str, Dict[str, datetime]], k: str) -> Optional[datetime]:
|
||||
if k in user_summary:
|
||||
return user_summary[k]["last_visit"]
|
||||
else:
|
||||
return None
|
||||
|
||||
def get_count(user_summary: Dict[str, Dict[str, str]], k: str) -> str:
|
||||
if k in user_summary:
|
||||
return user_summary[k]["count"]
|
||||
else:
|
||||
return ""
|
||||
|
||||
def is_recent(val: datetime) -> bool:
|
||||
age = timezone_now() - val
|
||||
return age.total_seconds() < 5 * 60
|
||||
|
||||
rows = []
|
||||
for email, user_summary in user_records.items():
|
||||
email_link = user_activity_link(email, user_summary["user_profile_id"])
|
||||
sent_count = get_count(user_summary, "send")
|
||||
cells = [user_summary["name"], email_link, sent_count]
|
||||
row_class = ""
|
||||
for field in ["use", "send", "pointer", "desktop", "ZulipiOS", "Android"]:
|
||||
visit = get_last_visit(user_summary, field)
|
||||
if field == "use":
|
||||
if visit and is_recent(visit):
|
||||
row_class += " recently_active"
|
||||
if email in admin_emails:
|
||||
row_class += " admin"
|
||||
val = format_date_for_activity_reports(visit)
|
||||
cells.append(val)
|
||||
row = dict(cells=cells, row_class=row_class)
|
||||
rows.append(row)
|
||||
|
||||
def by_used_time(row: Dict[str, Any]) -> str:
|
||||
return row["cells"][3]
|
||||
|
||||
rows = sorted(rows, key=by_used_time, reverse=True)
|
||||
|
||||
cols = [
|
||||
"Name",
|
||||
"Email",
|
||||
"Total sent",
|
||||
"Heard from",
|
||||
"Message sent",
|
||||
"Pointer motion",
|
||||
"Desktop",
|
||||
"ZulipiOS",
|
||||
"Android",
|
||||
]
|
||||
|
||||
title = "Summary"
|
||||
|
||||
content = make_table(title, cols, rows, has_row_class=True)
|
||||
return user_records, content
|
||||
|
||||
|
||||
def realm_client_table(user_summaries: Dict[str, Dict[str, Any]]) -> str:
|
||||
exclude_keys = [
|
||||
"internal",
|
||||
"name",
|
||||
"user_profile_id",
|
||||
"use",
|
||||
"send",
|
||||
"pointer",
|
||||
"website",
|
||||
"desktop",
|
||||
]
|
||||
|
||||
rows = []
|
||||
for email, user_summary in user_summaries.items():
|
||||
email_link = user_activity_link(email, user_summary["user_profile_id"])
|
||||
name = user_summary["name"]
|
||||
for k, v in user_summary.items():
|
||||
if k in exclude_keys:
|
||||
continue
|
||||
client = k
|
||||
count = v["count"]
|
||||
last_visit = v["last_visit"]
|
||||
row = [
|
||||
format_date_for_activity_reports(last_visit),
|
||||
client,
|
||||
name,
|
||||
email_link,
|
||||
count,
|
||||
]
|
||||
rows.append(row)
|
||||
|
||||
rows = sorted(rows, key=lambda r: r[0], reverse=True)
|
||||
|
||||
cols = [
|
||||
"Last visit",
|
||||
"Client",
|
||||
"Name",
|
||||
"Email",
|
||||
"Count",
|
||||
]
|
||||
|
||||
title = "Clients"
|
||||
|
||||
return make_table(title, cols, rows)
|
||||
|
||||
|
||||
def sent_messages_report(realm: str) -> str:
|
||||
title = "Recently sent messages for " + realm
|
||||
|
||||
cols = [
|
||||
"Date",
|
||||
"Humans",
|
||||
"Bots",
|
||||
]
|
||||
|
||||
# Uses index: zerver_message_realm_date_sent
|
||||
query = SQL(
|
||||
"""
|
||||
select
|
||||
series.day::date,
|
||||
user_messages.humans,
|
||||
user_messages.bots
|
||||
from (
|
||||
select generate_series(
|
||||
(now()::date - interval '2 week'),
|
||||
now()::date,
|
||||
interval '1 day'
|
||||
) as day
|
||||
) as series
|
||||
left join (
|
||||
select
|
||||
date_sent::date date_sent,
|
||||
count(*) filter (where not up.is_bot) as humans,
|
||||
count(*) filter (where up.is_bot) as bots
|
||||
from zerver_message m
|
||||
join zerver_userprofile up on up.id = m.sender_id
|
||||
join zerver_realm r on r.id = up.realm_id
|
||||
where
|
||||
r.string_id = %s
|
||||
and
|
||||
date_sent > now() - interval '2 week'
|
||||
and
|
||||
m.realm_id = r.id
|
||||
group by
|
||||
date_sent::date
|
||||
order by
|
||||
date_sent::date
|
||||
) user_messages on
|
||||
series.day = user_messages.date_sent
|
||||
"""
|
||||
)
|
||||
cursor = connection.cursor()
|
||||
cursor.execute(query, [realm])
|
||||
rows = cursor.fetchall()
|
||||
cursor.close()
|
||||
|
||||
return make_table(title, cols, rows)
|
||||
|
||||
|
||||
@require_server_admin
|
||||
def get_realm_activity(request: HttpRequest, realm_str: str) -> HttpResponse:
|
||||
data: List[Tuple[str, str]] = []
|
||||
all_user_records: Dict[str, Any] = {}
|
||||
|
||||
try:
|
||||
admins = Realm.objects.get(string_id=realm_str).get_human_admin_users()
|
||||
except Realm.DoesNotExist:
|
||||
return HttpResponseNotFound()
|
||||
|
||||
admin_emails = {admin.delivery_email for admin in admins}
|
||||
|
||||
for is_bot, page_title in [(False, "Humans"), (True, "Bots")]:
|
||||
all_records = get_user_activity_records_for_realm(realm_str, is_bot)
|
||||
|
||||
user_records, content = realm_user_summary_table(all_records, admin_emails)
|
||||
all_user_records.update(user_records)
|
||||
|
||||
data += [(page_title, content)]
|
||||
|
||||
page_title = "Clients"
|
||||
content = realm_client_table(all_user_records)
|
||||
data += [(page_title, content)]
|
||||
|
||||
page_title = "History"
|
||||
content = sent_messages_report(realm_str)
|
||||
data += [(page_title, content)]
|
||||
|
||||
title = realm_str
|
||||
realm_stats = realm_stats_link(realm_str)
|
||||
|
||||
return render(
|
||||
request,
|
||||
"analytics/activity.html",
|
||||
context=dict(data=data, realm_stats_link=realm_stats, title=title),
|
||||
)
|
||||
59
analytics/views/remote_activity.py
Normal file
59
analytics/views/remote_activity.py
Normal file
@@ -0,0 +1,59 @@
|
||||
from django.http import HttpRequest, HttpResponse
|
||||
from django.shortcuts import render
|
||||
from psycopg2.sql import SQL
|
||||
|
||||
from analytics.views.activity_common import get_page
|
||||
from zerver.decorator import require_server_admin
|
||||
|
||||
|
||||
@require_server_admin
|
||||
def get_remote_server_activity(request: HttpRequest) -> HttpResponse:
|
||||
title = "Remote servers"
|
||||
|
||||
query = SQL(
|
||||
"""
|
||||
with icount as (
|
||||
select
|
||||
server_id,
|
||||
max(value) as max_value,
|
||||
max(end_time) as max_end_time
|
||||
from zilencer_remoteinstallationcount
|
||||
where
|
||||
property='active_users:is_bot:day'
|
||||
and subgroup='false'
|
||||
group by server_id
|
||||
),
|
||||
remote_push_devices as (
|
||||
select server_id, count(distinct(user_id)) as push_user_count from zilencer_remotepushdevicetoken
|
||||
group by server_id
|
||||
)
|
||||
select
|
||||
rserver.id,
|
||||
rserver.hostname,
|
||||
rserver.contact_email,
|
||||
max_value,
|
||||
push_user_count,
|
||||
max_end_time
|
||||
from zilencer_remotezulipserver rserver
|
||||
left join icount on icount.server_id = rserver.id
|
||||
left join remote_push_devices on remote_push_devices.server_id = rserver.id
|
||||
order by max_value DESC NULLS LAST, push_user_count DESC NULLS LAST
|
||||
"""
|
||||
)
|
||||
|
||||
cols = [
|
||||
"ID",
|
||||
"Hostname",
|
||||
"Contact email",
|
||||
"Analytics users",
|
||||
"Mobile users",
|
||||
"Last update time",
|
||||
]
|
||||
|
||||
remote_servers = get_page(query, cols, title, totals_columns=[3, 4])
|
||||
|
||||
return render(
|
||||
request,
|
||||
"analytics/activity_details_template.html",
|
||||
context=dict(data=remote_servers["content"], title=remote_servers["title"], is_home=False),
|
||||
)
|
||||
@@ -1,7 +1,7 @@
|
||||
import logging
|
||||
from collections import defaultdict
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Annotated, Any, Optional, TypeAlias, TypeVar, cast
|
||||
from typing import Any, Dict, List, Optional, Tuple, Type, TypeVar, Union, cast
|
||||
|
||||
from django.conf import settings
|
||||
from django.db.models import QuerySet
|
||||
@@ -10,7 +10,7 @@ from django.shortcuts import render
|
||||
from django.utils import translation
|
||||
from django.utils.timezone import now as timezone_now
|
||||
from django.utils.translation import gettext as _
|
||||
from pydantic import BeforeValidator, Json, NonNegativeInt
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
from analytics.lib.counts import COUNT_STATS, CountStat
|
||||
from analytics.lib.time_utils import time_range
|
||||
@@ -31,12 +31,12 @@ from zerver.decorator import (
|
||||
)
|
||||
from zerver.lib.exceptions import JsonableError
|
||||
from zerver.lib.i18n import get_and_set_request_language, get_language_translation_data
|
||||
from zerver.lib.request import REQ, has_request_variables
|
||||
from zerver.lib.response import json_success
|
||||
from zerver.lib.streams import access_stream_by_id
|
||||
from zerver.lib.timestamp import convert_to_UTC
|
||||
from zerver.lib.typed_endpoint import PathOnly, typed_endpoint
|
||||
from zerver.models import Client, Realm, Stream, UserProfile
|
||||
from zerver.models.realms import get_realm
|
||||
from zerver.lib.validator import to_non_negative_int
|
||||
from zerver.models import Client, Realm, Stream, UserProfile, get_realm
|
||||
|
||||
if settings.ZILENCER_ENABLED:
|
||||
from zilencer.models import RemoteInstallationCount, RemoteRealmCount, RemoteZulipServer
|
||||
@@ -51,9 +51,11 @@ def is_analytics_ready(realm: Realm) -> bool:
|
||||
def render_stats(
|
||||
request: HttpRequest,
|
||||
data_url_suffix: str,
|
||||
realm: Realm | None,
|
||||
realm: Optional[Realm],
|
||||
*,
|
||||
title: str | None = None,
|
||||
title: Optional[str] = None,
|
||||
for_installation: bool = False,
|
||||
remote: bool = False,
|
||||
analytics_ready: bool = True,
|
||||
) -> HttpResponse:
|
||||
assert request.user.is_authenticated
|
||||
@@ -73,20 +75,21 @@ def render_stats(
|
||||
guest_users = None
|
||||
space_used = None
|
||||
|
||||
page_params = dict(
|
||||
data_url_suffix=data_url_suffix,
|
||||
for_installation=for_installation,
|
||||
remote=remote,
|
||||
upload_space_used=space_used,
|
||||
guest_users=guest_users,
|
||||
)
|
||||
|
||||
request_language = get_and_set_request_language(
|
||||
request,
|
||||
request.user.default_language,
|
||||
translation.get_language_from_path(request.path_info),
|
||||
)
|
||||
|
||||
# Sync this with stats_params_schema in base_page_params.ts.
|
||||
page_params = dict(
|
||||
page_type="stats",
|
||||
data_url_suffix=data_url_suffix,
|
||||
upload_space_used=space_used,
|
||||
guest_users=guest_users,
|
||||
translation_data=get_language_translation_data(request_language),
|
||||
)
|
||||
page_params["translation_data"] = get_language_translation_data(request_language)
|
||||
|
||||
return render(
|
||||
request,
|
||||
@@ -111,8 +114,8 @@ def stats(request: HttpRequest) -> HttpResponse:
|
||||
|
||||
|
||||
@require_server_admin
|
||||
@typed_endpoint
|
||||
def stats_for_realm(request: HttpRequest, *, realm_str: PathOnly[str]) -> HttpResponse:
|
||||
@has_request_variables
|
||||
def stats_for_realm(request: HttpRequest, realm_str: str) -> HttpResponse:
|
||||
try:
|
||||
realm = get_realm(realm_str)
|
||||
except Realm.DoesNotExist:
|
||||
@@ -127,9 +130,9 @@ def stats_for_realm(request: HttpRequest, *, realm_str: PathOnly[str]) -> HttpRe
|
||||
|
||||
|
||||
@require_server_admin
|
||||
@typed_endpoint
|
||||
@has_request_variables
|
||||
def stats_for_remote_realm(
|
||||
request: HttpRequest, *, remote_server_id: PathOnly[int], remote_realm_id: PathOnly[int]
|
||||
request: HttpRequest, remote_server_id: int, remote_realm_id: int
|
||||
) -> HttpResponse:
|
||||
assert settings.ZILENCER_ENABLED
|
||||
server = RemoteZulipServer.objects.get(id=remote_server_id)
|
||||
@@ -142,96 +145,59 @@ def stats_for_remote_realm(
|
||||
|
||||
|
||||
@require_server_admin_api
|
||||
@typed_endpoint
|
||||
@has_request_variables
|
||||
def get_chart_data_for_realm(
|
||||
request: HttpRequest,
|
||||
user_profile: UserProfile,
|
||||
/,
|
||||
*,
|
||||
realm_str: PathOnly[str],
|
||||
chart_name: str,
|
||||
min_length: Json[NonNegativeInt] | None = None,
|
||||
start: Annotated[datetime | None, BeforeValidator(to_utc_datetime)] = None,
|
||||
end: Annotated[datetime | None, BeforeValidator(to_utc_datetime)] = None,
|
||||
request: HttpRequest, /, user_profile: UserProfile, realm_str: str, **kwargs: Any
|
||||
) -> HttpResponse:
|
||||
try:
|
||||
realm = get_realm(realm_str)
|
||||
except Realm.DoesNotExist:
|
||||
raise JsonableError(_("Invalid organization"))
|
||||
|
||||
return do_get_chart_data(
|
||||
request,
|
||||
user_profile,
|
||||
realm=realm,
|
||||
chart_name=chart_name,
|
||||
min_length=min_length,
|
||||
start=start,
|
||||
end=end,
|
||||
)
|
||||
return get_chart_data(request, user_profile, realm=realm, **kwargs)
|
||||
|
||||
|
||||
@require_non_guest_user
|
||||
@typed_endpoint
|
||||
@has_request_variables
|
||||
def get_chart_data_for_stream(
|
||||
request: HttpRequest,
|
||||
user_profile: UserProfile,
|
||||
*,
|
||||
stream_id: PathOnly[int],
|
||||
chart_name: str,
|
||||
min_length: Json[NonNegativeInt] | None = None,
|
||||
start: Annotated[datetime | None, BeforeValidator(to_utc_datetime)] = None,
|
||||
end: Annotated[datetime | None, BeforeValidator(to_utc_datetime)] = None,
|
||||
request: HttpRequest, /, user_profile: UserProfile, stream_id: int
|
||||
) -> HttpResponse:
|
||||
stream, ignored_sub = access_stream_by_id(
|
||||
user_profile,
|
||||
stream_id,
|
||||
require_content_access=False,
|
||||
require_active=True,
|
||||
allow_realm_admin=True,
|
||||
)
|
||||
|
||||
return do_get_chart_data(
|
||||
request,
|
||||
user_profile,
|
||||
stream=stream,
|
||||
chart_name=chart_name,
|
||||
min_length=min_length,
|
||||
start=start,
|
||||
end=end,
|
||||
)
|
||||
return get_chart_data(request, user_profile, stream=stream)
|
||||
|
||||
|
||||
@require_server_admin_api
|
||||
@typed_endpoint
|
||||
@has_request_variables
|
||||
def get_chart_data_for_remote_realm(
|
||||
request: HttpRequest,
|
||||
user_profile: UserProfile,
|
||||
/,
|
||||
*,
|
||||
remote_server_id: PathOnly[int],
|
||||
remote_realm_id: PathOnly[int],
|
||||
chart_name: str,
|
||||
min_length: Json[NonNegativeInt] | None = None,
|
||||
start: Annotated[datetime | None, BeforeValidator(to_utc_datetime)] = None,
|
||||
end: Annotated[datetime | None, BeforeValidator(to_utc_datetime)] = None,
|
||||
user_profile: UserProfile,
|
||||
remote_server_id: int,
|
||||
remote_realm_id: int,
|
||||
**kwargs: Any,
|
||||
) -> HttpResponse:
|
||||
assert settings.ZILENCER_ENABLED
|
||||
server = RemoteZulipServer.objects.get(id=remote_server_id)
|
||||
return do_get_chart_data(
|
||||
return get_chart_data(
|
||||
request,
|
||||
user_profile,
|
||||
server=server,
|
||||
remote=True,
|
||||
remote_realm_id=remote_realm_id,
|
||||
chart_name=chart_name,
|
||||
min_length=min_length,
|
||||
start=start,
|
||||
end=end,
|
||||
remote_realm_id=int(remote_realm_id),
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
|
||||
@require_server_admin
|
||||
def stats_for_installation(request: HttpRequest) -> HttpResponse:
|
||||
assert request.user.is_authenticated
|
||||
return render_stats(request, "/installation", None, title="installation")
|
||||
return render_stats(request, "/installation", None, title="installation", for_installation=True)
|
||||
|
||||
|
||||
@require_server_admin
|
||||
@@ -243,106 +209,66 @@ def stats_for_remote_installation(request: HttpRequest, remote_server_id: int) -
|
||||
f"/remote/{server.id}/installation",
|
||||
None,
|
||||
title=f"remote installation {server.hostname}",
|
||||
)
|
||||
|
||||
|
||||
@require_server_admin_api
|
||||
@typed_endpoint
|
||||
def get_chart_data_for_installation(
|
||||
request: HttpRequest,
|
||||
user_profile: UserProfile,
|
||||
/,
|
||||
*,
|
||||
chart_name: str,
|
||||
min_length: Json[NonNegativeInt] | None = None,
|
||||
start: Annotated[datetime | None, BeforeValidator(to_utc_datetime)] = None,
|
||||
end: Annotated[datetime | None, BeforeValidator(to_utc_datetime)] = None,
|
||||
) -> HttpResponse:
|
||||
return do_get_chart_data(
|
||||
request,
|
||||
user_profile,
|
||||
for_installation=True,
|
||||
chart_name=chart_name,
|
||||
min_length=min_length,
|
||||
start=start,
|
||||
end=end,
|
||||
remote=True,
|
||||
)
|
||||
|
||||
|
||||
@require_server_admin_api
|
||||
@typed_endpoint
|
||||
@has_request_variables
|
||||
def get_chart_data_for_installation(
|
||||
request: HttpRequest, /, user_profile: UserProfile, chart_name: str = REQ(), **kwargs: Any
|
||||
) -> HttpResponse:
|
||||
return get_chart_data(request, user_profile, for_installation=True, **kwargs)
|
||||
|
||||
|
||||
@require_server_admin_api
|
||||
@has_request_variables
|
||||
def get_chart_data_for_remote_installation(
|
||||
request: HttpRequest,
|
||||
user_profile: UserProfile,
|
||||
/,
|
||||
*,
|
||||
remote_server_id: PathOnly[int],
|
||||
chart_name: str,
|
||||
min_length: Json[NonNegativeInt] | None = None,
|
||||
start: Annotated[datetime | None, BeforeValidator(to_utc_datetime)] = None,
|
||||
end: Annotated[datetime | None, BeforeValidator(to_utc_datetime)] = None,
|
||||
user_profile: UserProfile,
|
||||
remote_server_id: int,
|
||||
chart_name: str = REQ(),
|
||||
**kwargs: Any,
|
||||
) -> HttpResponse:
|
||||
assert settings.ZILENCER_ENABLED
|
||||
server = RemoteZulipServer.objects.get(id=remote_server_id)
|
||||
return do_get_chart_data(
|
||||
return get_chart_data(
|
||||
request,
|
||||
user_profile,
|
||||
for_installation=True,
|
||||
remote=True,
|
||||
server=server,
|
||||
chart_name=chart_name,
|
||||
min_length=min_length,
|
||||
start=start,
|
||||
end=end,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
|
||||
@require_non_guest_user
|
||||
@typed_endpoint
|
||||
@has_request_variables
|
||||
def get_chart_data(
|
||||
request: HttpRequest,
|
||||
user_profile: UserProfile,
|
||||
*,
|
||||
chart_name: str,
|
||||
min_length: Json[NonNegativeInt] | None = None,
|
||||
start: Annotated[datetime | None, BeforeValidator(to_utc_datetime)] = None,
|
||||
end: Annotated[datetime | None, BeforeValidator(to_utc_datetime)] = None,
|
||||
) -> HttpResponse:
|
||||
return do_get_chart_data(
|
||||
request,
|
||||
user_profile,
|
||||
chart_name=chart_name,
|
||||
min_length=min_length,
|
||||
start=start,
|
||||
end=end,
|
||||
)
|
||||
|
||||
|
||||
@require_non_guest_user
|
||||
def do_get_chart_data(
|
||||
request: HttpRequest,
|
||||
user_profile: UserProfile,
|
||||
*,
|
||||
# Common parameters supported by all stats endpoints.
|
||||
chart_name: str,
|
||||
min_length: NonNegativeInt | None = None,
|
||||
start: datetime | None = None,
|
||||
end: datetime | None = None,
|
||||
# The following parameters are only used by wrapping functions for
|
||||
# various contexts; the callers are responsible for validating them.
|
||||
realm: Realm | None = None,
|
||||
chart_name: str = REQ(),
|
||||
min_length: Optional[int] = REQ(converter=to_non_negative_int, default=None),
|
||||
start: Optional[datetime] = REQ(converter=to_utc_datetime, default=None),
|
||||
end: Optional[datetime] = REQ(converter=to_utc_datetime, default=None),
|
||||
# These last several parameters are only used by functions
|
||||
# wrapping get_chart_data; the callers are responsible for
|
||||
# parsing/validation/authorization for them.
|
||||
realm: Optional[Realm] = None,
|
||||
for_installation: bool = False,
|
||||
remote: bool = False,
|
||||
remote_realm_id: int | None = None,
|
||||
remote_realm_id: Optional[int] = None,
|
||||
server: Optional["RemoteZulipServer"] = None,
|
||||
stream: Stream | None = None,
|
||||
stream: Optional[Stream] = None,
|
||||
) -> HttpResponse:
|
||||
TableType: TypeAlias = (
|
||||
type["RemoteInstallationCount"]
|
||||
| type[InstallationCount]
|
||||
| type["RemoteRealmCount"]
|
||||
| type[RealmCount]
|
||||
)
|
||||
TableType: TypeAlias = Union[
|
||||
Type["RemoteInstallationCount"],
|
||||
Type[InstallationCount],
|
||||
Type["RemoteRealmCount"],
|
||||
Type[RealmCount],
|
||||
]
|
||||
if for_installation:
|
||||
if remote:
|
||||
assert settings.ZILENCER_ENABLED
|
||||
@@ -359,9 +285,9 @@ def do_get_chart_data(
|
||||
else:
|
||||
aggregate_table = RealmCount
|
||||
|
||||
tables: (
|
||||
tuple[TableType] | tuple[TableType, type[UserCount]] | tuple[TableType, type[StreamCount]]
|
||||
)
|
||||
tables: Union[
|
||||
Tuple[TableType], Tuple[TableType, Type[UserCount]], Tuple[TableType, Type[StreamCount]]
|
||||
]
|
||||
|
||||
if chart_name == "number_of_humans":
|
||||
stats = [
|
||||
@@ -370,7 +296,7 @@ def do_get_chart_data(
|
||||
COUNT_STATS["active_users_audit:is_bot:day"],
|
||||
]
|
||||
tables = (aggregate_table,)
|
||||
subgroup_to_label: dict[CountStat, dict[str | None, str]] = {
|
||||
subgroup_to_label: Dict[CountStat, Dict[Optional[str], str]] = {
|
||||
stats[0]: {None: "_1day"},
|
||||
stats[1]: {None: "_15day"},
|
||||
stats[2]: {"false": "all_time"},
|
||||
@@ -388,8 +314,8 @@ def do_get_chart_data(
|
||||
tables = (aggregate_table, UserCount)
|
||||
subgroup_to_label = {
|
||||
stats[0]: {
|
||||
"public_stream": _("Public channels"),
|
||||
"private_stream": _("Private channels"),
|
||||
"public_stream": _("Public streams"),
|
||||
"private_stream": _("Private streams"),
|
||||
"private_message": _("Direct messages"),
|
||||
"huddle_message": _("Group direct messages"),
|
||||
}
|
||||
@@ -414,7 +340,7 @@ def do_get_chart_data(
|
||||
elif chart_name == "messages_sent_by_stream":
|
||||
if stream is None:
|
||||
raise JsonableError(
|
||||
_("Missing channel for chart: {chart_name}").format(chart_name=chart_name)
|
||||
_("Missing stream for chart: {chart_name}").format(chart_name=chart_name)
|
||||
)
|
||||
stats = [COUNT_STATS["messages_in_stream:is_bot:day"]]
|
||||
tables = (aggregate_table, StreamCount)
|
||||
@@ -450,20 +376,18 @@ def do_get_chart_data(
|
||||
assert server is not None
|
||||
assert aggregate_table is RemoteInstallationCount or aggregate_table is RemoteRealmCount
|
||||
aggregate_table_remote = cast(
|
||||
type[RemoteInstallationCount] | type[RemoteRealmCount], aggregate_table
|
||||
Union[Type[RemoteInstallationCount], Type[RemoteRealmCount]], aggregate_table
|
||||
) # https://stackoverflow.com/questions/68540528/mypy-assertions-on-the-types-of-types
|
||||
if not aggregate_table_remote.objects.filter(server=server).exists():
|
||||
raise JsonableError(
|
||||
_("No analytics data available. Please contact your server administrator.")
|
||||
)
|
||||
if start is None:
|
||||
first = (
|
||||
aggregate_table_remote.objects.filter(server=server).order_by("remote_id").first()
|
||||
)
|
||||
first = aggregate_table_remote.objects.filter(server=server).first()
|
||||
assert first is not None
|
||||
start = first.end_time
|
||||
if end is None:
|
||||
last = aggregate_table_remote.objects.filter(server=server).order_by("remote_id").last()
|
||||
last = aggregate_table_remote.objects.filter(server=server).last()
|
||||
assert last is not None
|
||||
end = last.end_time
|
||||
else:
|
||||
@@ -496,7 +420,7 @@ def do_get_chart_data(
|
||||
|
||||
assert len({stat.frequency for stat in stats}) == 1
|
||||
end_times = time_range(start, end, stats[0].frequency, min_length)
|
||||
data: dict[str, Any] = {
|
||||
data: Dict[str, Any] = {
|
||||
"end_times": [int(end_time.timestamp()) for end_time in end_times],
|
||||
"frequency": stats[0].frequency,
|
||||
}
|
||||
@@ -549,7 +473,7 @@ def do_get_chart_data(
|
||||
return json_success(request, data=data)
|
||||
|
||||
|
||||
def sort_by_totals(value_arrays: dict[str, list[int]]) -> list[str]:
|
||||
def sort_by_totals(value_arrays: Dict[str, List[int]]) -> List[str]:
|
||||
totals = sorted(((sum(values), label) for label, values in value_arrays.items()), reverse=True)
|
||||
return [label for total, label in totals]
|
||||
|
||||
@@ -560,10 +484,12 @@ def sort_by_totals(value_arrays: dict[str, list[int]]) -> list[str]:
|
||||
# understanding the realm's traffic and the user's traffic. This function
|
||||
# tries to rank the clients so that taking the first N elements of the
|
||||
# sorted list has a reasonable chance of doing so.
|
||||
def sort_client_labels(data: dict[str, dict[str, list[int]]]) -> list[str]:
|
||||
def sort_client_labels(data: Dict[str, Dict[str, List[int]]]) -> List[str]:
|
||||
realm_order = sort_by_totals(data["everyone"])
|
||||
user_order = sort_by_totals(data["user"])
|
||||
label_sort_values: dict[str, float] = {label: i for i, label in enumerate(realm_order)}
|
||||
label_sort_values: Dict[str, float] = {}
|
||||
for i, label in enumerate(realm_order):
|
||||
label_sort_values[label] = i
|
||||
for i, label in enumerate(user_order):
|
||||
label_sort_values[label] = min(i - 0.1, label_sort_values.get(label, i))
|
||||
return [label for label, sort_value in sorted(label_sort_values.items(), key=lambda x: x[1])]
|
||||
@@ -572,7 +498,7 @@ def sort_client_labels(data: dict[str, dict[str, list[int]]]) -> list[str]:
|
||||
CountT = TypeVar("CountT", bound=BaseCount)
|
||||
|
||||
|
||||
def table_filtered_to_id(table: type[CountT], key_id: int) -> QuerySet[CountT]:
|
||||
def table_filtered_to_id(table: Type[CountT], key_id: int) -> QuerySet[CountT]:
|
||||
if table == RealmCount:
|
||||
return table._default_manager.filter(realm_id=key_id)
|
||||
elif table == UserCount:
|
||||
@@ -603,18 +529,16 @@ def client_label_map(name: str) -> str:
|
||||
if name == "ZulipiOS":
|
||||
return "Old iOS app"
|
||||
if name == "ZulipMobile":
|
||||
return "Mobile app (React Native)"
|
||||
if name in ["ZulipFlutter", "ZulipMobile/flutter"]:
|
||||
return "Mobile app beta (Flutter)"
|
||||
return "Mobile app"
|
||||
if name in ["ZulipPython", "API: Python"]:
|
||||
return "Python API"
|
||||
if name.startswith("Zulip") and name.endswith("Webhook"):
|
||||
return name.removeprefix("Zulip").removesuffix("Webhook") + " webhook"
|
||||
return name[len("Zulip") : -len("Webhook")] + " webhook"
|
||||
return name
|
||||
|
||||
|
||||
def rewrite_client_arrays(value_arrays: dict[str, list[int]]) -> dict[str, list[int]]:
|
||||
mapped_arrays: dict[str, list[int]] = {}
|
||||
def rewrite_client_arrays(value_arrays: Dict[str, List[int]]) -> Dict[str, List[int]]:
|
||||
mapped_arrays: Dict[str, List[int]] = {}
|
||||
for label, array in value_arrays.items():
|
||||
mapped_label = client_label_map(label)
|
||||
if mapped_label in mapped_arrays:
|
||||
@@ -627,18 +551,18 @@ def rewrite_client_arrays(value_arrays: dict[str, list[int]]) -> dict[str, list[
|
||||
|
||||
def get_time_series_by_subgroup(
|
||||
stat: CountStat,
|
||||
table: type[BaseCount],
|
||||
table: Type[BaseCount],
|
||||
key_id: int,
|
||||
end_times: list[datetime],
|
||||
subgroup_to_label: dict[str | None, str],
|
||||
end_times: List[datetime],
|
||||
subgroup_to_label: Dict[Optional[str], str],
|
||||
include_empty_subgroups: bool,
|
||||
) -> dict[str, list[int]]:
|
||||
) -> Dict[str, List[int]]:
|
||||
queryset = (
|
||||
table_filtered_to_id(table, key_id)
|
||||
.filter(property=stat.property)
|
||||
.values_list("subgroup", "end_time", "value")
|
||||
)
|
||||
value_dicts: dict[str | None, dict[datetime, int]] = defaultdict(lambda: defaultdict(int))
|
||||
value_dicts: Dict[Optional[str], Dict[datetime, int]] = defaultdict(lambda: defaultdict(int))
|
||||
for subgroup, end_time, value in queryset:
|
||||
value_dicts[subgroup][end_time] = value
|
||||
value_arrays = {}
|
||||
|
||||
451
analytics/views/support.py
Normal file
451
analytics/views/support.py
Normal file
@@ -0,0 +1,451 @@
|
||||
import urllib
|
||||
from contextlib import suppress
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
from decimal import Decimal
|
||||
from typing import Any, Dict, Iterable, List, Optional
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.core.validators import URLValidator
|
||||
from django.db.models import Q
|
||||
from django.http import HttpRequest, HttpResponse, HttpResponseRedirect
|
||||
from django.shortcuts import render
|
||||
from django.urls import reverse
|
||||
from django.utils.timesince import timesince
|
||||
from django.utils.timezone import now as timezone_now
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
from confirmation.models import Confirmation, confirmation_url
|
||||
from confirmation.settings import STATUS_USED
|
||||
from zerver.actions.create_realm import do_change_realm_subdomain
|
||||
from zerver.actions.realm_settings import (
|
||||
do_change_realm_org_type,
|
||||
do_change_realm_plan_type,
|
||||
do_deactivate_realm,
|
||||
do_scrub_realm,
|
||||
do_send_realm_reactivation_email,
|
||||
)
|
||||
from zerver.actions.users import do_delete_user_preserving_messages
|
||||
from zerver.decorator import require_server_admin
|
||||
from zerver.forms import check_subdomain_available
|
||||
from zerver.lib.exceptions import JsonableError
|
||||
from zerver.lib.realm_icon import realm_icon_url
|
||||
from zerver.lib.request import REQ, has_request_variables
|
||||
from zerver.lib.subdomains import get_subdomain_from_hostname
|
||||
from zerver.lib.validator import check_bool, check_string_in, to_decimal, to_non_negative_int
|
||||
from zerver.models import (
|
||||
MultiuseInvite,
|
||||
PreregistrationRealm,
|
||||
PreregistrationUser,
|
||||
Realm,
|
||||
RealmReactivationStatus,
|
||||
UserProfile,
|
||||
get_org_type_display_name,
|
||||
get_realm,
|
||||
get_user_profile_by_id,
|
||||
)
|
||||
from zerver.views.invite import get_invitee_emails_set
|
||||
|
||||
if settings.ZILENCER_ENABLED:
|
||||
from zilencer.models import RemoteZulipServer
|
||||
|
||||
if settings.BILLING_ENABLED:
|
||||
from corporate.lib.stripe import approve_sponsorship as do_approve_sponsorship
|
||||
from corporate.lib.stripe import (
|
||||
attach_discount_to_realm,
|
||||
downgrade_at_the_end_of_billing_cycle,
|
||||
downgrade_now_without_creating_additional_invoices,
|
||||
get_discount_for_realm,
|
||||
get_latest_seat_count,
|
||||
make_end_of_cycle_updates_if_needed,
|
||||
switch_realm_from_standard_to_plus_plan,
|
||||
update_billing_method_of_current_plan,
|
||||
update_sponsorship_status,
|
||||
void_all_open_invoices,
|
||||
)
|
||||
from corporate.models import (
|
||||
Customer,
|
||||
CustomerPlan,
|
||||
get_current_plan_by_realm,
|
||||
get_customer_by_realm,
|
||||
)
|
||||
|
||||
|
||||
def get_plan_name(plan_type: int) -> str:
|
||||
return {
|
||||
Realm.PLAN_TYPE_SELF_HOSTED: "self-hosted",
|
||||
Realm.PLAN_TYPE_LIMITED: "limited",
|
||||
Realm.PLAN_TYPE_STANDARD: "standard",
|
||||
Realm.PLAN_TYPE_STANDARD_FREE: "open source",
|
||||
Realm.PLAN_TYPE_PLUS: "plus",
|
||||
}[plan_type]
|
||||
|
||||
|
||||
def get_confirmations(
|
||||
types: List[int], object_ids: Iterable[int], hostname: Optional[str] = None
|
||||
) -> List[Dict[str, Any]]:
|
||||
lowest_datetime = timezone_now() - timedelta(days=30)
|
||||
confirmations = Confirmation.objects.filter(
|
||||
type__in=types, object_id__in=object_ids, date_sent__gte=lowest_datetime
|
||||
)
|
||||
confirmation_dicts = []
|
||||
for confirmation in confirmations:
|
||||
realm = confirmation.realm
|
||||
content_object = confirmation.content_object
|
||||
|
||||
type = confirmation.type
|
||||
expiry_date = confirmation.expiry_date
|
||||
|
||||
assert content_object is not None
|
||||
if hasattr(content_object, "status"):
|
||||
if content_object.status == STATUS_USED:
|
||||
link_status = "Link has been used"
|
||||
else:
|
||||
link_status = "Link has not been used"
|
||||
else:
|
||||
link_status = ""
|
||||
|
||||
now = timezone_now()
|
||||
if expiry_date is None:
|
||||
expires_in = "Never"
|
||||
elif now < expiry_date:
|
||||
expires_in = timesince(now, expiry_date)
|
||||
else:
|
||||
expires_in = "Expired"
|
||||
|
||||
url = confirmation_url(confirmation.confirmation_key, realm, type)
|
||||
confirmation_dicts.append(
|
||||
{
|
||||
"object": confirmation.content_object,
|
||||
"url": url,
|
||||
"type": type,
|
||||
"link_status": link_status,
|
||||
"expires_in": expires_in,
|
||||
}
|
||||
)
|
||||
return confirmation_dicts
|
||||
|
||||
|
||||
VALID_MODIFY_PLAN_METHODS = [
|
||||
"downgrade_at_billing_cycle_end",
|
||||
"downgrade_now_without_additional_licenses",
|
||||
"downgrade_now_void_open_invoices",
|
||||
"upgrade_to_plus",
|
||||
]
|
||||
|
||||
VALID_STATUS_VALUES = [
|
||||
"active",
|
||||
"deactivated",
|
||||
]
|
||||
|
||||
VALID_BILLING_METHODS = [
|
||||
"send_invoice",
|
||||
"charge_automatically",
|
||||
]
|
||||
|
||||
|
||||
@dataclass
|
||||
class PlanData:
|
||||
customer: Optional["Customer"] = None
|
||||
current_plan: Optional["CustomerPlan"] = None
|
||||
licenses: Optional[int] = None
|
||||
licenses_used: Optional[int] = None
|
||||
|
||||
|
||||
@require_server_admin
|
||||
@has_request_variables
|
||||
def support(
|
||||
request: HttpRequest,
|
||||
realm_id: Optional[int] = REQ(default=None, converter=to_non_negative_int),
|
||||
plan_type: Optional[int] = REQ(default=None, converter=to_non_negative_int),
|
||||
discount: Optional[Decimal] = REQ(default=None, converter=to_decimal),
|
||||
new_subdomain: Optional[str] = REQ(default=None),
|
||||
status: Optional[str] = REQ(default=None, str_validator=check_string_in(VALID_STATUS_VALUES)),
|
||||
billing_method: Optional[str] = REQ(
|
||||
default=None, str_validator=check_string_in(VALID_BILLING_METHODS)
|
||||
),
|
||||
sponsorship_pending: Optional[bool] = REQ(default=None, json_validator=check_bool),
|
||||
approve_sponsorship: bool = REQ(default=False, json_validator=check_bool),
|
||||
modify_plan: Optional[str] = REQ(
|
||||
default=None, str_validator=check_string_in(VALID_MODIFY_PLAN_METHODS)
|
||||
),
|
||||
scrub_realm: bool = REQ(default=False, json_validator=check_bool),
|
||||
delete_user_by_id: Optional[int] = REQ(default=None, converter=to_non_negative_int),
|
||||
query: Optional[str] = REQ("q", default=None),
|
||||
org_type: Optional[int] = REQ(default=None, converter=to_non_negative_int),
|
||||
) -> HttpResponse:
|
||||
context: Dict[str, Any] = {}
|
||||
|
||||
if "success_message" in request.session:
|
||||
context["success_message"] = request.session["success_message"]
|
||||
del request.session["success_message"]
|
||||
|
||||
if settings.BILLING_ENABLED and request.method == "POST":
|
||||
# We check that request.POST only has two keys in it: The
|
||||
# realm_id and a field to change.
|
||||
keys = set(request.POST.keys())
|
||||
if "csrfmiddlewaretoken" in keys:
|
||||
keys.remove("csrfmiddlewaretoken")
|
||||
if len(keys) != 2:
|
||||
raise JsonableError(_("Invalid parameters"))
|
||||
|
||||
assert realm_id is not None
|
||||
realm = Realm.objects.get(id=realm_id)
|
||||
|
||||
acting_user = request.user
|
||||
assert isinstance(acting_user, UserProfile)
|
||||
if plan_type is not None:
|
||||
current_plan_type = realm.plan_type
|
||||
do_change_realm_plan_type(realm, plan_type, acting_user=acting_user)
|
||||
msg = f"Plan type of {realm.string_id} changed from {get_plan_name(current_plan_type)} to {get_plan_name(plan_type)} "
|
||||
context["success_message"] = msg
|
||||
elif org_type is not None:
|
||||
current_realm_type = realm.org_type
|
||||
do_change_realm_org_type(realm, org_type, acting_user=acting_user)
|
||||
msg = f"Org type of {realm.string_id} changed from {get_org_type_display_name(current_realm_type)} to {get_org_type_display_name(org_type)} "
|
||||
context["success_message"] = msg
|
||||
elif discount is not None:
|
||||
current_discount = get_discount_for_realm(realm) or 0
|
||||
attach_discount_to_realm(realm, discount, acting_user=acting_user)
|
||||
context[
|
||||
"success_message"
|
||||
] = f"Discount of {realm.string_id} changed to {discount}% from {current_discount}%."
|
||||
elif new_subdomain is not None:
|
||||
old_subdomain = realm.string_id
|
||||
try:
|
||||
check_subdomain_available(new_subdomain)
|
||||
except ValidationError as error:
|
||||
context["error_message"] = error.message
|
||||
else:
|
||||
do_change_realm_subdomain(realm, new_subdomain, acting_user=acting_user)
|
||||
request.session[
|
||||
"success_message"
|
||||
] = f"Subdomain changed from {old_subdomain} to {new_subdomain}"
|
||||
return HttpResponseRedirect(
|
||||
reverse("support") + "?" + urlencode({"q": new_subdomain})
|
||||
)
|
||||
elif status is not None:
|
||||
if status == "active":
|
||||
do_send_realm_reactivation_email(realm, acting_user=acting_user)
|
||||
context[
|
||||
"success_message"
|
||||
] = f"Realm reactivation email sent to admins of {realm.string_id}."
|
||||
elif status == "deactivated":
|
||||
do_deactivate_realm(realm, acting_user=acting_user)
|
||||
context["success_message"] = f"{realm.string_id} deactivated."
|
||||
elif billing_method is not None:
|
||||
if billing_method == "send_invoice":
|
||||
update_billing_method_of_current_plan(
|
||||
realm, charge_automatically=False, acting_user=acting_user
|
||||
)
|
||||
context[
|
||||
"success_message"
|
||||
] = f"Billing method of {realm.string_id} updated to pay by invoice."
|
||||
elif billing_method == "charge_automatically":
|
||||
update_billing_method_of_current_plan(
|
||||
realm, charge_automatically=True, acting_user=acting_user
|
||||
)
|
||||
context[
|
||||
"success_message"
|
||||
] = f"Billing method of {realm.string_id} updated to charge automatically."
|
||||
elif sponsorship_pending is not None:
|
||||
if sponsorship_pending:
|
||||
update_sponsorship_status(realm, True, acting_user=acting_user)
|
||||
context["success_message"] = f"{realm.string_id} marked as pending sponsorship."
|
||||
else:
|
||||
update_sponsorship_status(realm, False, acting_user=acting_user)
|
||||
context["success_message"] = f"{realm.string_id} is no longer pending sponsorship."
|
||||
elif approve_sponsorship:
|
||||
do_approve_sponsorship(realm, acting_user=acting_user)
|
||||
context["success_message"] = f"Sponsorship approved for {realm.string_id}"
|
||||
elif modify_plan is not None:
|
||||
if modify_plan == "downgrade_at_billing_cycle_end":
|
||||
downgrade_at_the_end_of_billing_cycle(realm)
|
||||
context[
|
||||
"success_message"
|
||||
] = f"{realm.string_id} marked for downgrade at the end of billing cycle"
|
||||
elif modify_plan == "downgrade_now_without_additional_licenses":
|
||||
downgrade_now_without_creating_additional_invoices(realm)
|
||||
context[
|
||||
"success_message"
|
||||
] = f"{realm.string_id} downgraded without creating additional invoices"
|
||||
elif modify_plan == "downgrade_now_void_open_invoices":
|
||||
downgrade_now_without_creating_additional_invoices(realm)
|
||||
voided_invoices_count = void_all_open_invoices(realm)
|
||||
context[
|
||||
"success_message"
|
||||
] = f"{realm.string_id} downgraded and voided {voided_invoices_count} open invoices"
|
||||
elif modify_plan == "upgrade_to_plus":
|
||||
switch_realm_from_standard_to_plus_plan(realm)
|
||||
context["success_message"] = f"{realm.string_id} upgraded to Plus"
|
||||
elif scrub_realm:
|
||||
do_scrub_realm(realm, acting_user=acting_user)
|
||||
context["success_message"] = f"{realm.string_id} scrubbed."
|
||||
elif delete_user_by_id:
|
||||
user_profile_for_deletion = get_user_profile_by_id(delete_user_by_id)
|
||||
user_email = user_profile_for_deletion.delivery_email
|
||||
assert user_profile_for_deletion.realm == realm
|
||||
do_delete_user_preserving_messages(user_profile_for_deletion)
|
||||
context["success_message"] = f"{user_email} in {realm.subdomain} deleted."
|
||||
|
||||
if query:
|
||||
key_words = get_invitee_emails_set(query)
|
||||
|
||||
case_insensitive_users_q = Q()
|
||||
for key_word in key_words:
|
||||
case_insensitive_users_q |= Q(delivery_email__iexact=key_word)
|
||||
users = set(UserProfile.objects.filter(case_insensitive_users_q))
|
||||
realms = set(Realm.objects.filter(string_id__in=key_words))
|
||||
|
||||
for key_word in key_words:
|
||||
try:
|
||||
URLValidator()(key_word)
|
||||
parse_result = urllib.parse.urlparse(key_word)
|
||||
hostname = parse_result.hostname
|
||||
assert hostname is not None
|
||||
if parse_result.port:
|
||||
hostname = f"{hostname}:{parse_result.port}"
|
||||
subdomain = get_subdomain_from_hostname(hostname)
|
||||
with suppress(Realm.DoesNotExist):
|
||||
realms.add(get_realm(subdomain))
|
||||
except ValidationError:
|
||||
users.update(UserProfile.objects.filter(full_name__iexact=key_word))
|
||||
|
||||
# full_names can have , in them
|
||||
users.update(UserProfile.objects.filter(full_name__iexact=query))
|
||||
|
||||
context["users"] = users
|
||||
context["realms"] = realms
|
||||
|
||||
confirmations: List[Dict[str, Any]] = []
|
||||
|
||||
preregistration_user_ids = [
|
||||
user.id for user in PreregistrationUser.objects.filter(email__in=key_words)
|
||||
]
|
||||
confirmations += get_confirmations(
|
||||
[Confirmation.USER_REGISTRATION, Confirmation.INVITATION],
|
||||
preregistration_user_ids,
|
||||
hostname=request.get_host(),
|
||||
)
|
||||
|
||||
preregistration_realm_ids = [
|
||||
user.id for user in PreregistrationRealm.objects.filter(email__in=key_words)
|
||||
]
|
||||
confirmations += get_confirmations(
|
||||
[Confirmation.REALM_CREATION],
|
||||
preregistration_realm_ids,
|
||||
hostname=request.get_host(),
|
||||
)
|
||||
|
||||
multiuse_invite_ids = [
|
||||
invite.id for invite in MultiuseInvite.objects.filter(realm__in=realms)
|
||||
]
|
||||
confirmations += get_confirmations([Confirmation.MULTIUSE_INVITE], multiuse_invite_ids)
|
||||
|
||||
realm_reactivation_status_objects = RealmReactivationStatus.objects.filter(realm__in=realms)
|
||||
confirmations += get_confirmations(
|
||||
[Confirmation.REALM_REACTIVATION], [obj.id for obj in realm_reactivation_status_objects]
|
||||
)
|
||||
|
||||
context["confirmations"] = confirmations
|
||||
|
||||
# We want a union of all realms that might appear in the search result,
|
||||
# but not necessary as a separate result item.
|
||||
# Therefore, we do not modify the realms object in the context.
|
||||
all_realms = realms.union(
|
||||
[
|
||||
confirmation["object"].realm
|
||||
for confirmation in confirmations
|
||||
# For confirmations, we only display realm details when the type is USER_REGISTRATION
|
||||
# or INVITATION.
|
||||
if confirmation["type"] in (Confirmation.USER_REGISTRATION, Confirmation.INVITATION)
|
||||
]
|
||||
+ [user.realm for user in users]
|
||||
)
|
||||
plan_data: Dict[int, PlanData] = {}
|
||||
for realm in all_realms:
|
||||
current_plan = get_current_plan_by_realm(realm)
|
||||
plan_data[realm.id] = PlanData(
|
||||
customer=get_customer_by_realm(realm),
|
||||
current_plan=current_plan,
|
||||
)
|
||||
if current_plan is not None:
|
||||
new_plan, last_ledger_entry = make_end_of_cycle_updates_if_needed(
|
||||
current_plan, timezone_now()
|
||||
)
|
||||
if last_ledger_entry is not None:
|
||||
if new_plan is not None:
|
||||
plan_data[realm.id].current_plan = new_plan
|
||||
else:
|
||||
plan_data[realm.id].current_plan = current_plan
|
||||
plan_data[realm.id].licenses = last_ledger_entry.licenses
|
||||
plan_data[realm.id].licenses_used = get_latest_seat_count(realm)
|
||||
context["plan_data"] = plan_data
|
||||
|
||||
def get_realm_owner_emails_as_string(realm: Realm) -> str:
|
||||
return ", ".join(
|
||||
realm.get_human_owner_users()
|
||||
.order_by("delivery_email")
|
||||
.values_list("delivery_email", flat=True)
|
||||
)
|
||||
|
||||
def get_realm_admin_emails_as_string(realm: Realm) -> str:
|
||||
return ", ".join(
|
||||
realm.get_human_admin_users(include_realm_owners=False)
|
||||
.order_by("delivery_email")
|
||||
.values_list("delivery_email", flat=True)
|
||||
)
|
||||
|
||||
context["get_realm_owner_emails_as_string"] = get_realm_owner_emails_as_string
|
||||
context["get_realm_admin_emails_as_string"] = get_realm_admin_emails_as_string
|
||||
context["get_discount_for_realm"] = get_discount_for_realm
|
||||
context["get_org_type_display_name"] = get_org_type_display_name
|
||||
context["realm_icon_url"] = realm_icon_url
|
||||
context["Confirmation"] = Confirmation
|
||||
context["sorted_realm_types"] = sorted(
|
||||
Realm.ORG_TYPES.values(), key=lambda d: d["display_order"]
|
||||
)
|
||||
|
||||
return render(request, "analytics/support.html", context=context)
|
||||
|
||||
|
||||
def get_remote_servers_for_support(
|
||||
email_to_search: Optional[str], hostname_to_search: Optional[str]
|
||||
) -> List["RemoteZulipServer"]:
|
||||
if not email_to_search and not hostname_to_search:
|
||||
return []
|
||||
|
||||
remote_servers_query = RemoteZulipServer.objects.order_by("id")
|
||||
if email_to_search:
|
||||
remote_servers_query = remote_servers_query.filter(contact_email__iexact=email_to_search)
|
||||
elif hostname_to_search:
|
||||
remote_servers_query = remote_servers_query.filter(hostname__icontains=hostname_to_search)
|
||||
|
||||
return list(remote_servers_query)
|
||||
|
||||
|
||||
@require_server_admin
|
||||
@has_request_variables
|
||||
def remote_servers_support(
|
||||
request: HttpRequest, query: Optional[str] = REQ("q", default=None)
|
||||
) -> HttpResponse:
|
||||
email_to_search = None
|
||||
hostname_to_search = None
|
||||
if query:
|
||||
if "@" in query:
|
||||
email_to_search = query
|
||||
else:
|
||||
hostname_to_search = query
|
||||
|
||||
remote_servers = get_remote_servers_for_support(
|
||||
email_to_search=email_to_search, hostname_to_search=hostname_to_search
|
||||
)
|
||||
return render(
|
||||
request,
|
||||
"analytics/remote_server_support.html",
|
||||
context=dict(
|
||||
remote_servers=remote_servers,
|
||||
),
|
||||
)
|
||||
106
analytics/views/user_activity.py
Normal file
106
analytics/views/user_activity.py
Normal file
@@ -0,0 +1,106 @@
|
||||
from typing import Any, Dict, List, Tuple
|
||||
|
||||
from django.conf import settings
|
||||
from django.db.models import QuerySet
|
||||
from django.http import HttpRequest, HttpResponse
|
||||
from django.shortcuts import render
|
||||
|
||||
from analytics.views.activity_common import (
|
||||
format_date_for_activity_reports,
|
||||
get_user_activity_summary,
|
||||
make_table,
|
||||
)
|
||||
from zerver.decorator import require_server_admin
|
||||
from zerver.models import UserActivity, UserProfile, get_user_profile_by_id
|
||||
|
||||
if settings.BILLING_ENABLED:
|
||||
pass
|
||||
|
||||
|
||||
def get_user_activity_records(
|
||||
user_profile: UserProfile,
|
||||
) -> QuerySet[UserActivity]:
|
||||
fields = [
|
||||
"user_profile__full_name",
|
||||
"query",
|
||||
"client__name",
|
||||
"count",
|
||||
"last_visit",
|
||||
]
|
||||
|
||||
records = UserActivity.objects.filter(
|
||||
user_profile=user_profile,
|
||||
)
|
||||
records = records.order_by("-last_visit")
|
||||
records = records.select_related("user_profile", "client").only(*fields)
|
||||
return records
|
||||
|
||||
|
||||
def raw_user_activity_table(records: QuerySet[UserActivity]) -> str:
|
||||
cols = [
|
||||
"query",
|
||||
"client",
|
||||
"count",
|
||||
"last_visit",
|
||||
]
|
||||
|
||||
def row(record: UserActivity) -> List[Any]:
|
||||
return [
|
||||
record.query,
|
||||
record.client.name,
|
||||
record.count,
|
||||
format_date_for_activity_reports(record.last_visit),
|
||||
]
|
||||
|
||||
rows = list(map(row, records))
|
||||
title = "Raw data"
|
||||
return make_table(title, cols, rows)
|
||||
|
||||
|
||||
def user_activity_summary_table(user_summary: Dict[str, Dict[str, Any]]) -> str:
|
||||
rows = []
|
||||
for k, v in user_summary.items():
|
||||
if k in ("name", "user_profile_id"):
|
||||
continue
|
||||
client = k
|
||||
count = v["count"]
|
||||
last_visit = v["last_visit"]
|
||||
row = [
|
||||
format_date_for_activity_reports(last_visit),
|
||||
client,
|
||||
count,
|
||||
]
|
||||
rows.append(row)
|
||||
|
||||
rows = sorted(rows, key=lambda r: r[0], reverse=True)
|
||||
|
||||
cols = [
|
||||
"last_visit",
|
||||
"client",
|
||||
"count",
|
||||
]
|
||||
|
||||
title = "User activity"
|
||||
return make_table(title, cols, rows)
|
||||
|
||||
|
||||
@require_server_admin
|
||||
def get_user_activity(request: HttpRequest, user_profile_id: int) -> HttpResponse:
|
||||
user_profile = get_user_profile_by_id(user_profile_id)
|
||||
records = get_user_activity_records(user_profile)
|
||||
|
||||
data: List[Tuple[str, str]] = []
|
||||
user_summary = get_user_activity_summary(records)
|
||||
content = user_activity_summary_table(user_summary)
|
||||
|
||||
data += [("Summary", content)]
|
||||
|
||||
content = raw_user_activity_table(records)
|
||||
data += [("Info", content)]
|
||||
|
||||
title = user_profile.delivery_email
|
||||
return render(
|
||||
request,
|
||||
"analytics/activity.html",
|
||||
context=dict(data=data, title=title),
|
||||
)
|
||||
@@ -1,89 +1,34 @@
|
||||
# API keys
|
||||
|
||||
An **API key** is how a bot identifies itself to Zulip. For the official
|
||||
clients, such as the Python bindings, we recommend [downloading a `zuliprc`
|
||||
file](/api/configuring-python-bindings#download-a-zuliprc-file). This file
|
||||
contains an API key and other necessary configuration values for using the
|
||||
Zulip API with a specific account on a Zulip server.
|
||||
An **API key** is how a bot identifies itself to Zulip. Anyone with a
|
||||
bot's API key can impersonate the bot, so be careful with it!
|
||||
|
||||
## Get a bot's API key
|
||||
|
||||
{start_tabs}
|
||||
|
||||
{tab|desktop-web}
|
||||
|
||||
{settings_tab|your-bots}
|
||||
|
||||
1. Click **Active bots**.
|
||||
|
||||
1. Find your bot. The bot's API key is under **API KEY**.
|
||||
|
||||
{end_tabs}
|
||||
|
||||
!!! warn ""
|
||||
|
||||
Anyone with a bot's API key can impersonate the bot, so be careful with it!
|
||||
|
||||
## Get your API key
|
||||
|
||||
{start_tabs}
|
||||
|
||||
{tab|desktop-web}
|
||||
Anyone with your API key can impersonate you, so be doubly careful with it.
|
||||
|
||||
{settings_tab|account-and-privacy}
|
||||
|
||||
1. Under **API key**, click **Manage your API key**.
|
||||
1. Under **API key**, click **Show/change your API key**.
|
||||
|
||||
1. Enter your password, and click **Get API key**. If you don't know your
|
||||
password, click **reset it** and follow the instructions from there.
|
||||
password, click **reset it** and follow the
|
||||
instructions from there.
|
||||
|
||||
1. Copy your API key.
|
||||
|
||||
{end_tabs}
|
||||
|
||||
!!! warn ""
|
||||
|
||||
Anyone with your API key can impersonate you, so be doubly careful with it.
|
||||
|
||||
|
||||
## Invalidate an API key
|
||||
|
||||
To invalidate an existing API key, you have to generate a new key.
|
||||
To invalidate a key, follow the instructions above, and click
|
||||
**Generate new API key** or click the **refresh**
|
||||
(<i class="fa fa-refresh"></i>) icon as appropriate.
|
||||
|
||||
### Invalidate a bot's API key
|
||||
|
||||
{start_tabs}
|
||||
|
||||
{tab|desktop-web}
|
||||
|
||||
{settings_tab|your-bots}
|
||||
|
||||
1. Click **Active bots**.
|
||||
|
||||
1. Find your bot.
|
||||
|
||||
1. Under **API KEY**, click the **refresh** (<i class="fa fa-refresh"></i>) icon
|
||||
to the right of the bot's API key.
|
||||
|
||||
{end_tabs}
|
||||
|
||||
### Invalidate your API key
|
||||
|
||||
{start_tabs}
|
||||
|
||||
{tab|desktop-web}
|
||||
|
||||
{settings_tab|account-and-privacy}
|
||||
|
||||
1. Under **API key**, click **Manage your API key**.
|
||||
|
||||
1. Enter your password, and click **Get API key**. If you don't know your
|
||||
password, click **reset it** and follow the instructions from there.
|
||||
|
||||
1. Click **Generate new API key**
|
||||
|
||||
{end_tabs}
|
||||
|
||||
## Related articles
|
||||
|
||||
* [Configuring the Python bindings](/api/configuring-python-bindings)
|
||||
This will generate a new key for you or the bot, and invalidate the old one.
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -37,7 +37,7 @@ topic][integrations-thread] in
|
||||
or submit a pull request [updating this
|
||||
page](https://zulip.readthedocs.io/en/latest/documentation/api.html).
|
||||
|
||||
[integrations-thread]: https://chat.zulip.org/#narrow/channel/127-integrations/topic/API.20client.20libraries/
|
||||
[integrations-thread]: https://chat.zulip.org/#narrow/stream/127-integrations/topic/API.20client.20libraries/
|
||||
|
||||
### Outdated
|
||||
|
||||
|
||||
@@ -5,65 +5,15 @@ easily, called the [Python bindings](https://pypi.python.org/pypi/zulip/).
|
||||
One of the most notable use cases for these bindings are bots developed
|
||||
using Zulip's [bot framework](/api/writing-bots).
|
||||
|
||||
In order to use them, you need to configure them with your identity
|
||||
(account, API key, and Zulip server URL). There are a few ways to
|
||||
achieve that:
|
||||
In order to use them, you need to configure them with your API key and other
|
||||
settings. There are two ways to achieve that:
|
||||
|
||||
- Using a `zuliprc` file, referenced via the `--config-file` option or
|
||||
the `config_file` option to the `zulip.Client` constructor
|
||||
(recommended for bots).
|
||||
- Using a `zuliprc` file in your home directory at `~/.zuliprc`
|
||||
(recommended for your own API key).
|
||||
- Using the [environment
|
||||
variables](https://en.wikipedia.org/wiki/Environment_variable)
|
||||
documented below.
|
||||
- Using the `--api-key`, `--email`, and `--site` variables as command
|
||||
line parameters.
|
||||
- Using the `api_key`, `email`, and `site` parameters to the
|
||||
`zulip.Client` constructor.
|
||||
- With a file called `.zuliprc`, located in your home directory.
|
||||
- With
|
||||
[environment variables](https://en.wikipedia.org/wiki/Environment_variable)
|
||||
set up in your host machine.
|
||||
|
||||
## Download a `zuliprc` file
|
||||
|
||||
{start_tabs}
|
||||
|
||||
{tab|for-a-bot}
|
||||
|
||||
{settings_tab|your-bots}
|
||||
|
||||
1. Click the **download** (<i class="fa fa-download"></i>) icon on the profile
|
||||
card of the desired bot to download the bot's `zuliprc` file.
|
||||
|
||||
!!! warn ""
|
||||
|
||||
Anyone with a bot's API key can impersonate the bot, so be careful with it!
|
||||
|
||||
{tab|for-yourself}
|
||||
|
||||
{settings_tab|account-and-privacy}
|
||||
|
||||
1. Under **API key**, click **Manage your API key**.
|
||||
|
||||
1. Enter your password, and click **Get API key**. If you don't know your
|
||||
password, click **reset it** and follow the
|
||||
instructions from there.
|
||||
|
||||
1. Click **Download zuliprc** to download your `zuliprc` file.
|
||||
|
||||
1. (optional) If you'd like your credentials to be used by default
|
||||
when using the Zulip API on your computer, move the `zuliprc` file
|
||||
to `~/.zuliprc` in your home directory.
|
||||
|
||||
!!! warn ""
|
||||
|
||||
Anyone with your API key can impersonate you, so be doubly careful with it.
|
||||
|
||||
{end_tabs}
|
||||
|
||||
## Configuration keys and environment variables
|
||||
|
||||
`zuliprc` is a configuration file written in the
|
||||
[INI file format](https://en.wikipedia.org/wiki/INI_file),
|
||||
which contains key-value pairs as shown in the following example:
|
||||
A `.zuliprc` file is a plain text document that looks like this:
|
||||
|
||||
```
|
||||
[api]
|
||||
@@ -79,7 +29,7 @@ can be found in the following table:
|
||||
<table class="table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th><code>zuliprc</code> key</th>
|
||||
<th><code>.zuliprc</code> key</th>
|
||||
<th>Environment variable</th>
|
||||
<th>Required</th>
|
||||
<th>Description</th>
|
||||
@@ -152,10 +102,3 @@ can be found in the following table:
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
## Related articles
|
||||
|
||||
* [Installation instructions](/api/installation-instructions)
|
||||
* [API keys](/api/api-keys)
|
||||
* [Running bots](/api/running-bots)
|
||||
* [Deploying bots](/api/deploying-bots)
|
||||
|
||||
@@ -1,18 +1,18 @@
|
||||
# Construct a narrow
|
||||
|
||||
A **narrow** is a set of filters for Zulip messages, that can be based
|
||||
on many different factors (like sender, channel, topic, search
|
||||
keywords, etc.). Narrows are used in various places in the Zulip
|
||||
on many different factors (like sender, stream, topic, search
|
||||
keywords, etc.). Narrows are used in various places in the the Zulip
|
||||
API (most importantly, in the API for fetching messages).
|
||||
|
||||
It is simplest to explain the algorithm for encoding a search as a
|
||||
narrow using a single example. Consider the following search query
|
||||
(written as it would be entered in the Zulip web app's search box).
|
||||
It filters for messages sent to channel `announce`, not sent by
|
||||
It filters for messages sent to stream `announce`, not sent by
|
||||
`iago@zulip.com`, and containing the words `cool` and `sunglasses`:
|
||||
|
||||
```
|
||||
channel:announce -sender:iago@zulip.com cool sunglasses
|
||||
stream:announce -sender:iago@zulip.com cool sunglasses
|
||||
```
|
||||
|
||||
This query would be JSON-encoded for use in the Zulip API using JSON
|
||||
@@ -21,7 +21,7 @@ as a list of simple objects, as follows:
|
||||
```json
|
||||
[
|
||||
{
|
||||
"operator": "channel",
|
||||
"operator": "stream",
|
||||
"operand": "announce"
|
||||
},
|
||||
{
|
||||
@@ -40,109 +40,42 @@ The Zulip help center article on [searching for messages](/help/search-for-messa
|
||||
documents the majority of the search/narrow options supported by the
|
||||
Zulip API.
|
||||
|
||||
Note that many narrows, including all that lack a `channel` or `channels`
|
||||
Note that many narrows, including all that lack a `stream` or `streams`
|
||||
operator, search the current user's personal message history. See
|
||||
[searching shared history](/help/search-for-messages#searching-shared-history)
|
||||
for details.
|
||||
|
||||
Clients should note that the `is:unread` filter takes advantage of the
|
||||
fact that there is a database index for unread messages, which can be an
|
||||
important optimization when fetching messages in certain cases (e.g.,
|
||||
when [adding the `read` flag to a user's personal
|
||||
messages](/api/update-message-flags-for-narrow)).
|
||||
**Changes**: In Zulip 7.0 (feature level 177), support was added
|
||||
for three filters related to direct messages: `is:dm`, `dm` and
|
||||
`dm-including`. The `dm` operator replaced and deprecated the
|
||||
`pm-with` operator. The `is:dm` filter replaced and deprecated
|
||||
the `is:private` filter. The `dm-including` operator replaced and
|
||||
deprecated the `group-pm-with` operator.
|
||||
|
||||
Note: When the value of `realm_empty_topic_display_name` found in
|
||||
the [POST /register](/api/register-queue) response is used as an operand
|
||||
for the `"topic"` operator in the narrow, it is interpreted
|
||||
as an empty string.
|
||||
The `dm-including` and `group-pm-with` operators return slightly
|
||||
different results. For example, `dm-including:1234` returns all
|
||||
direct messages (1-on-1 and group) that include the current user
|
||||
and the user with the unique user ID of `1234`. On the other hand,
|
||||
`group-pm-with:1234` returned only group direct messages that included
|
||||
the current user and the user with the unique user ID of `1234`.
|
||||
|
||||
## Changes
|
||||
|
||||
* In Zulip 10.0 (feature level ZF-f80735), support was added for a new
|
||||
`is:muted` operator combination, matching messages in topics and
|
||||
channels that the user has [muted](/help/mute-a-topic).
|
||||
|
||||
* Before Zulip 10.0 (feature level 334), empty string was not a valid
|
||||
topic name for channel messages.
|
||||
|
||||
* In Zulip 9.0 (feature level 271), support was added for a new filter
|
||||
operator, `with`, which uses a [message ID](#message-ids) for its
|
||||
operand, and is designed for creating permanent links to topics.
|
||||
|
||||
* In Zulip 9.0 (feature level 265), support was added for a new
|
||||
`is:followed` filter, matching messages in topics that the current
|
||||
user is [following](/help/follow-a-topic).
|
||||
|
||||
* In Zulip 9.0 (feature level 250), support was added for two filters
|
||||
related to stream messages: `channel` and `channels`. The `channel`
|
||||
operator is an alias for the `stream` operator. The `channels`
|
||||
operator is an alias for the `streams` operator. Both `channel` and
|
||||
`channels` return the same exact results as `stream` and `streams`
|
||||
respectively.
|
||||
|
||||
* In Zulip 9.0 (feature level 249), support was added for a new filter,
|
||||
`has:reaction`, which returns messages that have at least one [emoji
|
||||
reaction](/help/emoji-reactions).
|
||||
|
||||
* In Zulip 7.0 (feature level 177), support was added for three filters
|
||||
related to direct messages: `is:dm`, `dm` and `dm-including`. The
|
||||
`dm` operator replaced and deprecated the `pm-with` operator. The
|
||||
`is:dm` filter replaced and deprecated the `is:private` filter. The
|
||||
`dm-including` operator replaced and deprecated the `group-pm-with`
|
||||
operator.
|
||||
|
||||
* The `dm-including` and `group-pm-with` operators return slightly
|
||||
different results. For example, `dm-including:1234` returns all
|
||||
direct messages (1-on-1 and group) that include the current user
|
||||
and the user with the unique user ID of `1234`. On the other hand,
|
||||
`group-pm-with:1234` returned only group direct messages that
|
||||
included the current user and the user with the unique user ID of
|
||||
`1234`.
|
||||
|
||||
* Both `dm` and `is:dm` are aliases of `pm-with` and `is:private`
|
||||
respectively, and return the same exact results that the
|
||||
deprecated filters did.
|
||||
Both `dm` and `is:dm` are aliases of `pm-with` and `is:private`
|
||||
respectively, and return the same exact results that the deprecated
|
||||
filters did.
|
||||
|
||||
## Narrows that use IDs
|
||||
|
||||
### Message IDs
|
||||
|
||||
The `id` and `with` operators use message IDs for their operands. The
|
||||
message ID operand for these two operators may be encoded as either a
|
||||
number or a string.
|
||||
The `near` and `id` operators, documented in the help center, use message
|
||||
IDs for their operands.
|
||||
|
||||
* `id:12345`: Search for only the message with ID `12345`.
|
||||
* `with:12345`: Search for the conversation that contains the message
|
||||
with ID `12345`.
|
||||
* `near:12345`: Search messages around the message with ID `12345`.
|
||||
* `id:12345`: Search for only message with ID `12345`.
|
||||
|
||||
The `id` operator returns the message with the specified ID if it exists,
|
||||
and if it can be accessed by the user.
|
||||
|
||||
The `with` operator is designed to be used for permanent links to
|
||||
topics, which means they should continue to work when the topic is
|
||||
[moved](/help/move-content-to-another-topic) or
|
||||
[resolved](/help/resolve-a-topic). If the message with the specified
|
||||
ID exists, and can be accessed by the user, then it will return
|
||||
messages with the `channel`/`topic`/`dm` operators corresponding to
|
||||
the current conversation containing that message, replacing any such
|
||||
operators included in the original narrow query.
|
||||
|
||||
If no such message exists, or the message ID represents a message that
|
||||
is inaccessible to the user, this operator will be ignored (rather
|
||||
than throwing an error) if the remaining operators uniquely identify a
|
||||
conversation (i.e., they contain `channel` and `topic` terms or `dm`
|
||||
term). This behavior is intended to provide the best possible
|
||||
experience for links to private channels with protected history.
|
||||
|
||||
The [help center](/help/search-for-messages#search-by-message-id) also
|
||||
documents the `near` operator for searching for messages by ID, but
|
||||
this narrow operator has no effect on filtering messages when sent to
|
||||
the server. In practice, when the `near` operator is used to search for
|
||||
messages, or is part of a URL fragment, the value of its operand should
|
||||
instead be used for the value of the `anchor` parameter in endpoints
|
||||
that also accept a `narrow` parameter; see
|
||||
[GET /messages][anchor-get-messages] and
|
||||
[POST /messages/flags/narrow][anchor-post-flags].
|
||||
The message ID operand for the `id` operator may be encoded as either a
|
||||
number or a string. The message ID operand for the `near` operator must
|
||||
be encoded as a string.
|
||||
|
||||
**Changes**: Prior to Zulip 8.0 (feature level 194), the message ID
|
||||
operand for the `id` operator needed to be encoded as a string.
|
||||
@@ -157,13 +90,13 @@ operand for the `id` operator needed to be encoded as a string.
|
||||
]
|
||||
```
|
||||
|
||||
### Channel and user IDs
|
||||
### Stream and user IDs
|
||||
|
||||
There are a few additional narrow/search options (new in Zulip 2.1)
|
||||
that use either channel IDs or user IDs that are not documented in the
|
||||
that use either stream IDs or user IDs that are not documented in the
|
||||
help center because they are primarily useful to API clients:
|
||||
|
||||
* `channel:1234`: Search messages sent to the channel with ID `1234`.
|
||||
* `stream:1234`: Search messages sent to the stream with ID `1234`.
|
||||
* `sender:1234`: Search messages sent by user ID `1234`.
|
||||
* `dm:1234`: Search the direct message conversation between
|
||||
you and user ID `1234`.
|
||||
@@ -172,12 +105,6 @@ help center because they are primarily useful to API clients:
|
||||
* `dm-including:1234`: Search all direct messages (1-on-1 and group)
|
||||
that include you and user ID `1234`.
|
||||
|
||||
!!! tip ""
|
||||
|
||||
A user ID can be found by [viewing a user's profile][view-profile]
|
||||
in the web or desktop apps. A channel ID can be found when [browsing
|
||||
channels][browse-channels] in the web or desktop apps.
|
||||
|
||||
The operands for these search options must be encoded either as an
|
||||
integer ID or a JSON list of integer IDs. For example, to query
|
||||
messages sent by a user 1234 to a direct message thread with yourself,
|
||||
@@ -195,8 +122,3 @@ user 1234, and user 5678, the correct JSON-encoded query is:
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
[view-profile]: /help/view-someones-profile
|
||||
[browse-channels]: /help/introduction-to-channels#browse-and-subscribe-to-channels
|
||||
[anchor-get-messages]: /api/get-messages#parameter-anchor
|
||||
[anchor-post-flags]: /api/update-message-flags-for-narrow#parameter-anchor
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
{tab|curl}
|
||||
|
||||
``` curl
|
||||
# Create a scheduled channel message
|
||||
# Create a scheduled stream message
|
||||
curl -X POST {{ api_url }}/v1/scheduled_messages \
|
||||
-u BOT_EMAIL_ADDRESS:BOT_API_KEY \
|
||||
--data-urlencode type=stream \
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# Create a channel
|
||||
# Create a stream
|
||||
|
||||
You can create a channel using Zulip's REST API by submitting a
|
||||
[subscribe](/api/subscribe) request with a channel name that
|
||||
You can create a stream using Zulip's REST API by submitting a
|
||||
[subscribe](/api/subscribe) request with a stream name that
|
||||
doesn't yet exist and passing appropriate parameters to define
|
||||
the initial configuration of the new channel.
|
||||
the initial configuration of the new stream.
|
||||
|
||||
@@ -44,7 +44,7 @@ Botserver interaction are:
|
||||
|
||||
1. The Zulip server sends a POST request to the Botserver on `https://bot-server.example.com/`:
|
||||
|
||||
```
|
||||
```json
|
||||
{
|
||||
"message":{
|
||||
"content":"@**My Bot User** hello world",
|
||||
@@ -57,8 +57,9 @@ Botserver interaction are:
|
||||
|
||||
This URL is configured in the Zulip web-app in your Bot User's settings.
|
||||
|
||||
1. The Botserver searches for a bot to handle the message, and executes your
|
||||
bot's `handle_message` code.
|
||||
1. The Botserver searches for a bot to handle the message.
|
||||
|
||||
1. The Botserver executes your bot's `handle_message` code.
|
||||
|
||||
Your bot's code should work just like it does with `zulip-run-bot`;
|
||||
for example, you reply using
|
||||
@@ -74,7 +75,6 @@ pip3 install zulip_botserver
|
||||
|
||||
### Running a bot using the Zulip Botserver
|
||||
|
||||
{start_tabs}
|
||||
|
||||
1. Construct the URL for your bot, which will be of the form:
|
||||
|
||||
@@ -89,7 +89,7 @@ pip3 install zulip_botserver
|
||||
1. Register new bot users on the Zulip server's web interface.
|
||||
|
||||
* Log in to the Zulip server.
|
||||
* Navigate to *Personal settings (<i class="zulip-icon zulip-icon-gear"></i>)* -> *Bots* -> *Add a new bot*.
|
||||
* Navigate to *Personal settings (<i class="fa fa-cog"></i>)* -> *Bots* -> *Add a new bot*.
|
||||
Select *Outgoing webhook* for bot type, fill out the form (using
|
||||
the URL from above) and click on *Create bot*.
|
||||
* A new bot user should appear in the *Active bots* panel.
|
||||
@@ -108,15 +108,11 @@ pip3 install zulip_botserver
|
||||
1. Congrats, everything is set up! Test your Botserver like you would
|
||||
test a normal bot.
|
||||
|
||||
{end_tabs}
|
||||
|
||||
### Running multiple bots using the Zulip Botserver
|
||||
|
||||
The Zulip Botserver also supports running multiple bots from a single
|
||||
Botserver process. You can do this with the following procedure.
|
||||
|
||||
{start_tabs}
|
||||
|
||||
1. Download the `botserverrc` from the `your-bots` settings page, using
|
||||
the "Download config of all active outgoing webhook bots in Zulip
|
||||
Botserver format." option at the top.
|
||||
@@ -164,8 +160,6 @@ Botserver process. You can do this with the following procedure.
|
||||
|
||||
If omitted, `hostname` defaults to `127.0.0.1` and `port` to `5002`.
|
||||
|
||||
{end_tabs}
|
||||
|
||||
### Running Zulip Botserver with supervisord
|
||||
|
||||
[supervisord](http://supervisord.org/) is a popular tool for running
|
||||
@@ -176,9 +170,7 @@ section documents how to run the Zulip Botserver using *supervisord*.
|
||||
Running the Zulip Botserver with *supervisord* works almost like
|
||||
running it manually.
|
||||
|
||||
{start_tabs}
|
||||
|
||||
1. Install *supervisord* via your package manager; e.g., on Debian/Ubuntu:
|
||||
1. Install *supervisord* via your package manager; e.g. on Debian/Ubuntu:
|
||||
|
||||
```
|
||||
sudo apt-get install supervisor
|
||||
@@ -224,8 +216,6 @@ running it manually.
|
||||
The standard output of the Botserver will be logged to the path in
|
||||
your *supervisord* configuration.
|
||||
|
||||
{end_tabs}
|
||||
|
||||
If you are hosting the Botserver yourself (as opposed to using a
|
||||
hosting service that provides SSL), we recommend securing your
|
||||
Botserver with SSL using an `nginx` or `Apache` reverse proxy and
|
||||
@@ -233,17 +223,18 @@ Botserver with SSL using an `nginx` or `Apache` reverse proxy and
|
||||
|
||||
### Troubleshooting
|
||||
|
||||
- Make sure the API key you're using is for an [outgoing webhook
|
||||
bot](/api/outgoing-webhooks) and you've
|
||||
correctly configured the URL for your Botserver.
|
||||
1. Make sure the API key you're using is for an [outgoing webhook
|
||||
bot](/api/outgoing-webhooks) and you've
|
||||
correctly configured the URL for your Botserver.
|
||||
|
||||
- Your Botserver needs to be accessible from your Zulip server over
|
||||
HTTP(S). Make sure any firewall allows the connection. We
|
||||
recommend using [zulip-run-bot](running-bots) instead for
|
||||
development/testing on a laptop or other non-server system.
|
||||
If your Zulip server is self-hosted, you can test by running `curl
|
||||
http://zulipbotserver.example.com:5002` from your Zulip server;
|
||||
the output should be:
|
||||
1. Your Botserver needs to be accessible from your Zulip server over
|
||||
HTTP(S). Make sure any firewall allows the connection. We
|
||||
recommend using [zulip-run-bot](running-bots) instead for
|
||||
development/testing on a laptop or other non-server system.
|
||||
|
||||
If your Zulip server is self-hosted, you can test by running `curl
|
||||
http://zulipbotserver.example.com:5002` from your Zulip server;
|
||||
the output should be:
|
||||
|
||||
```
|
||||
$ curl http://zulipbotserver.example.com:5002/
|
||||
@@ -252,9 +243,3 @@ Botserver with SSL using an `nginx` or `Apache` reverse proxy and
|
||||
<h1>Method Not Allowed</h1>
|
||||
<p>The method is not allowed for the requested URL.</p>
|
||||
```
|
||||
|
||||
## Related articles
|
||||
|
||||
* [Non-webhook integrations](/api/non-webhook-integrations)
|
||||
* [Running bots](/api/running-bots)
|
||||
* [Writing bots](/api/writing-bots)
|
||||
|
||||
@@ -1,122 +0,0 @@
|
||||
# Group-setting values
|
||||
|
||||
Settings defining permissions in Zulip are increasingly represented
|
||||
using [user groups](/help/user-groups), which offer much more flexible
|
||||
configuration than the older [roles](/api/roles-and-permissions) system.
|
||||
|
||||
!!! warn ""
|
||||
|
||||
**Note**: Many group-valued settings are configured to require
|
||||
a single system group for their value via
|
||||
`server_supported_permission_settings`, pending web app UI
|
||||
changes to fully support group-setting values.
|
||||
|
||||
**Changes**: Before Zulip 10.0 (feature level 309), only system
|
||||
groups were permitted values for group-setting values in
|
||||
production environments, regardless of the values in
|
||||
`server_supported_permission_settings`.
|
||||
|
||||
In the API, these settings are represented using a **group-setting
|
||||
value**, which can take two forms:
|
||||
|
||||
- An integer user group ID, which can be either a named user group
|
||||
visible in the UI or a [role-based system group](#system-groups).
|
||||
- An object with fields `direct_member_ids`, containing a list of
|
||||
integer user IDs, and `direct_subgroup_ids`, containing a list of
|
||||
integer group IDs. The setting's value is the union of the
|
||||
identified collection of users and groups.
|
||||
|
||||
Group-setting values in the object form can be thought of as an
|
||||
anonymous group. They function very much like a named user group
|
||||
object, and remove the naming and UI overhead involved in creating
|
||||
a visible user group just to store the value of a single setting.
|
||||
|
||||
The server will canonicalize an object with an empty `direct_member_ids`
|
||||
list and a `direct_subgroup_ids` list that contains just a single group
|
||||
ID to the integer format.
|
||||
|
||||
## System groups
|
||||
|
||||
The Zulip server maintains a collection of system groups that
|
||||
correspond to the users with a given role; this makes it convenient to
|
||||
store concepts like "all administrators" in a group-setting
|
||||
value. These use a special naming convention and can be recognized by
|
||||
the `is_system_group` property on their group object.
|
||||
|
||||
The following system groups are maintained by the Zulip server:
|
||||
|
||||
- `role:internet`: Everyone on the Internet has this permission; this
|
||||
is used to configure the [public access
|
||||
option](/help/public-access-option).
|
||||
- `role:everyone`: All users, including guests.
|
||||
- `role:members`: All users, excluding guests.
|
||||
- `role:fullmembers`: All [full
|
||||
members](https://zulip.com/api/roles-and-permissions#determining-if-a-user-is-a-full-member)
|
||||
of the organization.
|
||||
- `role:moderators`: All users with at least the moderator role.
|
||||
- `role:administrators`: All users with at least the administrator
|
||||
role.
|
||||
- `role:owners`: All users with the owner role.
|
||||
- `role:nobody`: The formal empty group. Used in the API to represent
|
||||
disabling a feature.
|
||||
|
||||
Client UI for setting a permission or displaying a group (when
|
||||
silently mentioned, for example) is encouraged to display system
|
||||
groups using their description, rather than using their `role:}`
|
||||
names, which are chosen to be unique and clear in the API.
|
||||
|
||||
System groups should generally not be displayed in UI for
|
||||
administering an organization's user groups, since they are not
|
||||
directly mutable.
|
||||
|
||||
## Updating group-setting values
|
||||
|
||||
The Zulip API uses a special format for modifying an existing setting
|
||||
using a group-setting value.
|
||||
|
||||
A **group-setting update** is an object with a `new` field and an
|
||||
optional `old` field, each containing a group-setting value. The
|
||||
setting's value will be set to the membership expressed by the `new`
|
||||
field.
|
||||
|
||||
The `old` field expresses the client's understanding of the current
|
||||
value of the setting. If the `old` field is present and does not match
|
||||
the actual current value of the setting, then the request will fail
|
||||
with error code `EXPECTATION_MISMATCH` and no changes will be applied.
|
||||
|
||||
When a user edits the setting in a UI, the resulting API request
|
||||
should generally always include the `old` field, giving the value
|
||||
the list had when the user started editing. This accurately expresses
|
||||
the user's intent, and if two users edit the same list around the
|
||||
same time, it prevents a situation where the second change
|
||||
accidentally reverts the first one without either user noticing.
|
||||
|
||||
Omitting `old` is appropriate where the intent really is a new complete
|
||||
list rather than an edit, for example in an integration that syncs the
|
||||
list from an external source of truth.
|
||||
|
||||
## Permitted values
|
||||
|
||||
Not every possible group-setting value is a valid configuration for a
|
||||
given group-based setting. For example, as a security hardening
|
||||
measure, some administrative permissions should never be exercised by
|
||||
guest users, and the system group for all users, including guests,
|
||||
should not be offered to users as an option for those settings.
|
||||
|
||||
Others have restrictions to only permit system groups due to UI
|
||||
components not yet having been migrated to support a broader set of
|
||||
values. In order to avoid this configuration ending up hardcoded in
|
||||
clients, every permission setting using this framework has an entry in
|
||||
the `server_supported_permission_settings` section of the [`POST
|
||||
/register`](/api/register-queue) response.
|
||||
|
||||
Clients that support mutating group-settings values must parse that
|
||||
part of the `register` payload in order to compute the set of
|
||||
permitted values to offer to the user and avoid server-side errors
|
||||
when trying to save a value.
|
||||
|
||||
Note specifically that the `allow_everyone_group` field, which
|
||||
determines whether the setting can have the value of "all user
|
||||
accounts, including guests" also controls whether guests users can
|
||||
exercise the permission regardless of their membership in the
|
||||
group-setting value.
|
||||
@@ -49,7 +49,7 @@ client = zulip.Client(
|
||||
If you are working on an integration that you plan to share outside
|
||||
your organization, you can get help picking a good name in
|
||||
`#integrations` in the [Zulip development
|
||||
community](https://zulip.com/development-community/).
|
||||
community](https://zulip.com/development-community).
|
||||
|
||||
## Rate-limiting response headers
|
||||
|
||||
|
||||
@@ -10,12 +10,12 @@
|
||||
* [Remove an emoji reaction](/api/remove-reaction)
|
||||
* [Render a message](/api/render-message)
|
||||
* [Fetch a single message](/api/get-message)
|
||||
* [Check if messages match a narrow](/api/check-messages-match-narrow)
|
||||
* [Check if messages match narrow](/api/check-messages-match-narrow)
|
||||
* [Get a message's edit history](/api/get-message-history)
|
||||
* [Update personal message flags](/api/update-message-flags)
|
||||
* [Update personal message flags for narrow](/api/update-message-flags-for-narrow)
|
||||
* [Mark all messages as read](/api/mark-all-as-read)
|
||||
* [Mark messages in a channel as read](/api/mark-stream-as-read)
|
||||
* [Mark messages in a stream as read](/api/mark-stream-as-read)
|
||||
* [Mark messages in a topic as read](/api/mark-topic-as-read)
|
||||
* [Get a message's read receipts](/api/get-read-receipts)
|
||||
|
||||
@@ -32,79 +32,61 @@
|
||||
* [Create drafts](/api/create-drafts)
|
||||
* [Edit a draft](/api/edit-draft)
|
||||
* [Delete a draft](/api/delete-draft)
|
||||
* [Get all saved snippets](/api/get-saved-snippets)
|
||||
* [Create a saved snippet](/api/create-saved-snippet)
|
||||
* [Edit a saved snippet](/api/edit-saved-snippet)
|
||||
* [Delete a saved snippet](/api/delete-saved-snippet)
|
||||
|
||||
#### Channels
|
||||
#### Streams
|
||||
|
||||
* [Get subscribed channels](/api/get-subscriptions)
|
||||
* [Subscribe to a channel](/api/subscribe)
|
||||
* [Unsubscribe from a channel](/api/unsubscribe)
|
||||
* [Get subscribed streams](/api/get-subscriptions)
|
||||
* [Subscribe to a stream](/api/subscribe)
|
||||
* [Unsubscribe from a stream](/api/unsubscribe)
|
||||
* [Get subscription status](/api/get-subscription-status)
|
||||
* [Get channel subscribers](/api/get-subscribers)
|
||||
* [Get all subscribers](/api/get-subscribers)
|
||||
* [Update subscription settings](/api/update-subscription-settings)
|
||||
* [Get all channels](/api/get-streams)
|
||||
* [Get a channel by ID](/api/get-stream-by-id)
|
||||
* [Get channel ID](/api/get-stream-id)
|
||||
* [Create a channel](/api/create-stream)
|
||||
* [Update a channel](/api/update-stream)
|
||||
* [Archive a channel](/api/archive-stream)
|
||||
* [Get channel's email address](/api/get-stream-email-address)
|
||||
* [Get topics in a channel](/api/get-stream-topics)
|
||||
* [Get all streams](/api/get-streams)
|
||||
* [Get a stream by ID](/api/get-stream-by-id)
|
||||
* [Get stream ID](/api/get-stream-id)
|
||||
* [Create a stream](/api/create-stream)
|
||||
* [Update a stream](/api/update-stream)
|
||||
* [Archive a stream](/api/archive-stream)
|
||||
* [Get topics in a stream](/api/get-stream-topics)
|
||||
* [Topic muting](/api/mute-topic)
|
||||
* [Update personal preferences for a topic](/api/update-user-topic)
|
||||
* [Delete a topic](/api/delete-topic)
|
||||
* [Add a default channel](/api/add-default-stream)
|
||||
* [Remove a default channel](/api/remove-default-stream)
|
||||
* [Add a default stream](/api/add-default-stream)
|
||||
* [Remove a default stream](/api/remove-default-stream)
|
||||
|
||||
#### Users
|
||||
|
||||
* [Get all users](/api/get-users)
|
||||
* [Get own user](/api/get-own-user)
|
||||
* [Get a user](/api/get-user)
|
||||
* [Get a user by email](/api/get-user-by-email)
|
||||
* [Get own user](/api/get-own-user)
|
||||
* [Get all users](/api/get-users)
|
||||
* [Create a user](/api/create-user)
|
||||
* [Update a user](/api/update-user)
|
||||
* [Update a user by email](/api/update-user-by-email)
|
||||
* [Deactivate a user](/api/deactivate-user)
|
||||
* [Deactivate own user](/api/deactivate-own-user)
|
||||
* [Reactivate a user](/api/reactivate-user)
|
||||
* [Get a user's status](/api/get-user-status)
|
||||
* [Update your status](/api/update-status)
|
||||
* [Create a user](/api/create-user)
|
||||
* [Deactivate a user](/api/deactivate-user)
|
||||
* [Reactivate a user](/api/reactivate-user)
|
||||
* [Deactivate own user](/api/deactivate-own-user)
|
||||
* [Set "typing" status](/api/set-typing-status)
|
||||
* [Set "typing" status for message editing](/api/set-typing-status-for-message-edit)
|
||||
* [Get a user's presence](/api/get-user-presence)
|
||||
* [Get user presence](/api/get-user-presence)
|
||||
* [Get presence of all users](/api/get-presence)
|
||||
* [Update your presence](/api/update-presence)
|
||||
* [Get attachments](/api/get-attachments)
|
||||
* [Delete an attachment](/api/remove-attachment)
|
||||
* [Update settings](/api/update-settings)
|
||||
* [Get user groups](/api/get-user-groups)
|
||||
* [Create a user group](/api/create-user-group)
|
||||
* [Update a user group](/api/update-user-group)
|
||||
* [Deactivate a user group](/api/deactivate-user-group)
|
||||
* [Delete a user group](/api/remove-user-group)
|
||||
* [Update user group members](/api/update-user-group-members)
|
||||
* [Update subgroups of a user group](/api/update-user-group-subgroups)
|
||||
* [Update user group subgroups](/api/update-user-group-subgroups)
|
||||
* [Get user group membership status](/api/get-is-user-group-member)
|
||||
* [Get user group members](/api/get-user-group-members)
|
||||
* [Get subgroups of a user group](/api/get-user-group-subgroups)
|
||||
* [Get subgroups of user group](/api/get-user-group-subgroups)
|
||||
* [Mute a user](/api/mute-user)
|
||||
* [Unmute a user](/api/unmute-user)
|
||||
* [Get all alert words](/api/get-alert-words)
|
||||
* [Add alert words](/api/add-alert-words)
|
||||
* [Remove alert words](/api/remove-alert-words)
|
||||
|
||||
#### Invitations
|
||||
|
||||
* [Get all invitations](/api/get-invites)
|
||||
* [Send invitations](/api/send-invites)
|
||||
* [Create a reusable invitation link](/api/create-invite-link)
|
||||
* [Resend an email invitation](/api/resend-email-invite)
|
||||
* [Revoke an email invitation](/api/revoke-email-invite)
|
||||
* [Revoke a reusable invitation link](/api/revoke-invite-link)
|
||||
|
||||
#### Server & organizations
|
||||
|
||||
* [Get server settings](/api/get-server-settings)
|
||||
@@ -122,9 +104,6 @@
|
||||
* [Reorder custom profile fields](/api/reorder-custom-profile-fields)
|
||||
* [Create a custom profile field](/api/create-custom-profile-field)
|
||||
* [Update realm-level defaults of user settings](/api/update-realm-user-settings-defaults)
|
||||
* [Get all data exports](/api/get-realm-exports)
|
||||
* [Create a data export](/api/export-realm)
|
||||
* [Get data export consent state](/api/get-realm-export-consents)
|
||||
|
||||
#### Real-time events
|
||||
|
||||
@@ -137,9 +116,3 @@
|
||||
|
||||
* [Fetch an API key (production)](/api/fetch-api-key)
|
||||
* [Fetch an API key (development only)](/api/dev-fetch-api-key)
|
||||
* [Send a test notification to mobile device(s)](/api/test-notify)
|
||||
* [Add an APNs device token](/api/add-apns-token)
|
||||
* [Remove an APNs device token](/api/remove-apns-token)
|
||||
* [Add an FCM registration token](/api/add-fcm-token)
|
||||
* [Remove an FCM registration token](/api/remove-fcm-token)
|
||||
* [Create BigBlueButton video call](/api/create-big-blue-button-video-call)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Incoming webhook integrations
|
||||
|
||||
An incoming webhook allows a third-party service to push data to Zulip when
|
||||
something happens. There are several ways to set up an incoming webhook in
|
||||
something happens. There's several ways to do an incoming webhook in
|
||||
Zulip:
|
||||
|
||||
* Use our [REST API](/api/rest) endpoint for [sending
|
||||
@@ -11,9 +11,9 @@ Zulip:
|
||||
* Use one of our supported [integration
|
||||
frameworks](/integrations/meta-integration), such as the
|
||||
[Slack-compatible incoming webhook](/integrations/doc/slack_incoming),
|
||||
[Zapier integration](/integrations/doc/zapier), or
|
||||
[Zapier integration](/integrations/docs/zapier), or
|
||||
[IFTTT integration](/integrations/doc/ifttt).
|
||||
* Implementing an incoming webhook integration (detailed on this page),
|
||||
* Adding an incoming webhook integration (detailed on this page),
|
||||
where all the logic for formatting the Zulip messages lives in the
|
||||
Zulip server. This is how most of [Zulip's official
|
||||
integrations](/integrations/) work, because they enable Zulip to
|
||||
@@ -22,7 +22,7 @@ Zulip:
|
||||
Zulip).
|
||||
|
||||
In an incoming webhook integration, the third-party service's
|
||||
"outgoing webhook" feature sends an `HTTP POST` to a special URL when
|
||||
"outgoing webhook" feature sends an `HTTP POST`s to a special URL when
|
||||
it has something for you, and then the Zulip "incoming webhook"
|
||||
integration handles that incoming data to format and send a message in
|
||||
Zulip.
|
||||
@@ -40,18 +40,18 @@ process.
|
||||
<https://webhook.site/>, or a similar site to capture an example
|
||||
webhook payload from the third-party service. Create a
|
||||
`zerver/webhooks/<mywebhook>/fixtures/` directory, and add the
|
||||
captured JSON payload as a test fixture.
|
||||
captured payload as a test fixture.
|
||||
|
||||
* Create an `Integration` object, and add it to the `WEBHOOK_INTEGRATIONS`
|
||||
list in `zerver/lib/integrations.py`. Search for `WebhookIntegration` in that
|
||||
file to find an existing one to copy.
|
||||
* Create an `Integration` object, and add it to `WEBHOOK_INTEGRATIONS` in
|
||||
`zerver/lib/integrations.py`. Search for `webhook` in that file to find an
|
||||
existing one to copy.
|
||||
|
||||
* Write a draft webhook handler in `zerver/webhooks/<mywebhook>/view.py`. There
|
||||
are a lot of examples in the `zerver/webhooks/` directory that you can copy.
|
||||
We recommend templating from a short one, like `zendesk`.
|
||||
* Write a draft webhook handler under `zerver/webhooks/`. There are a lot of
|
||||
examples in that directory that you can copy. We recommend templating off
|
||||
a short one, like `zendesk`.
|
||||
|
||||
* Write a test for your fixture in `zerver/webhooks/<mywebhook>/tests.py`.
|
||||
Run the test for your integration like this:
|
||||
* Add a test for your fixture at `zerver/webhooks/<mywebhook>/tests.py`.
|
||||
Run the tests for your integration like this:
|
||||
|
||||
```
|
||||
tools/test-backend zerver/webhooks/<mywebhook>/
|
||||
@@ -64,10 +64,10 @@ process.
|
||||
service will make, and add tests for them; usually this part of the
|
||||
process is pretty fast.
|
||||
|
||||
* Document the integration in `zerver/webhooks/<mywebhook>/doc.md`(required for
|
||||
getting it merged into Zulip). You can use existing documentation, like
|
||||
[this one](https://raw.githubusercontent.com/zulip/zulip/main/zerver/webhooks/github/doc.md),
|
||||
as a template. This should not take more than 15 minutes, even if you don't speak English
|
||||
* Document the integration (required for getting it merged into Zulip). You
|
||||
can template off an existing guide, like
|
||||
[this one](https://raw.githubusercontent.com/zulip/zulip/main/zerver/webhooks/github/doc.md).
|
||||
This should not take more than 15 minutes, even if you don't speak English
|
||||
as a first language (we'll clean up the text before merging).
|
||||
|
||||
## Hello world walkthrough
|
||||
@@ -84,9 +84,9 @@ below are for a webhook named `MyWebHook`.
|
||||
|
||||
* `zerver/webhooks/mywebhook/__init__.py`: Empty file that is an obligatory
|
||||
part of every python package. Remember to `git add` it.
|
||||
* `zerver/webhooks/mywebhook/view.py`: The main webhook integration function,
|
||||
called `api_mywebhook_webhook`, along with any necessary helper functions.
|
||||
* `zerver/webhooks/mywebhook/fixtures/message_type.json`: Sample JSON payload data
|
||||
* `zerver/webhooks/mywebhook/view.py`: The main webhook integration function
|
||||
as well as any needed helper functions.
|
||||
* `zerver/webhooks/mywebhook/fixtures/messagetype.json`: Sample json payload data
|
||||
used by tests. Add one fixture file per type of message supported by your
|
||||
integration.
|
||||
* `zerver/webhooks/mywebhook/tests.py`: Tests for your webhook.
|
||||
@@ -95,9 +95,9 @@ below are for a webhook named `MyWebHook`.
|
||||
* `static/images/integrations/logos/mywebhook.svg`: A square logo for the
|
||||
platform/server/product you are integrating. Used on the documentation
|
||||
pages as well as the sender's avatar for messages sent by the integration.
|
||||
* `static/images/integrations/mywebhook/001.png`: A screenshot of a message
|
||||
* `static/images/integrations/mywebhook/001.svg`: A screenshot of a message
|
||||
sent by the integration, used on the documentation page. This can be
|
||||
generated by running `tools/screenshots/generate-integration-docs-screenshot --integration mywebhook`.
|
||||
generated by running `tools/generate-integration-docs-screenshot --integration mywebhook`.
|
||||
* `static/images/integrations/bot_avatars/mywebhook.png`: A square logo for the
|
||||
platform/server/product you are integrating which is used to create the avatar
|
||||
for generating screenshots with. This can be generated automatically from
|
||||
@@ -113,7 +113,7 @@ below are for a webhook named `MyWebHook`.
|
||||
`zerver/webhooks/mywebhook/view.py`. Also add your integration to
|
||||
`DOC_SCREENSHOT_CONFIG`. This will allow you to automatically generate
|
||||
a screenshot for the documentation by running
|
||||
`tools/screenshots/generate-integration-docs-screenshot --integration mywebhook`.
|
||||
`tools/generate-integration-docs-screenshot --integration mywebhook`.
|
||||
|
||||
## Common Helpers
|
||||
|
||||
@@ -125,19 +125,19 @@ below are for a webhook named `MyWebHook`.
|
||||
## General advice
|
||||
|
||||
* Consider using our Zulip markup to make the output from your
|
||||
integration especially attractive or useful (e.g., emoji, Markdown
|
||||
emphasis, or @-mentions).
|
||||
integration especially attractive or useful (e.g. emoji, Markdown
|
||||
emphasis or @-mentions).
|
||||
|
||||
* Use topics effectively to ensure sequential messages about the same
|
||||
thing are threaded together; this makes for much better consumption
|
||||
by users. E.g., for a bug tracker integration, put the bug number in
|
||||
by users. E.g. for a bug tracker integration, put the bug number in
|
||||
the topic for all messages; for an integration like Nagios, put the
|
||||
service in the topic.
|
||||
|
||||
* Integrations that don't match a team's workflow can often be
|
||||
uselessly spammy. Give careful thought to providing options for
|
||||
triggering Zulip messages only for certain message types, certain
|
||||
projects, or sending different messages to different channels/topics,
|
||||
projects, or sending different messages to different streams/topics,
|
||||
to make it easy for teams to configure the integration to support
|
||||
their workflow.
|
||||
|
||||
@@ -155,69 +155,3 @@ below are for a webhook named `MyWebHook`.
|
||||
testing with live data from the service you're integrating and can help you
|
||||
spot why something isn't working or if the service is using custom HTTP
|
||||
headers.
|
||||
|
||||
## URL specification
|
||||
|
||||
The base URL for an incoming webhook integration bot, where
|
||||
`INTEGRATION_NAME` is the name of the specific webhook integration and
|
||||
`API_KEY` is the API key of the bot created by the user for the
|
||||
integration, is:
|
||||
|
||||
```
|
||||
{{ api_url }}/v1/external/INTEGRATION_NAME?api_key=API_KEY
|
||||
```
|
||||
|
||||
The list of existing webhook integrations can be found by browsing the
|
||||
[Integrations documentation](/integrations/) or in
|
||||
`zerver/lib/integrations.py` at `WEBHOOK_INTEGRATIONS`.
|
||||
|
||||
Parameters accepted in the URL include:
|
||||
|
||||
### api_key *(required)*
|
||||
|
||||
The API key of the bot created by the user for the integration. To get a
|
||||
bot's API key, see the [API keys](/api/api-keys) documentation.
|
||||
|
||||
### stream
|
||||
|
||||
The channel for the integration to send notifications to. Can be either
|
||||
the channel ID or the [URL-encoded][url-encoder] channel name. By default
|
||||
the integration will send direct messages to the bot's owner.
|
||||
|
||||
!!! tip ""
|
||||
|
||||
A channel ID can be found when [browsing channels][browse-channels]
|
||||
in the web or desktop apps.
|
||||
|
||||
### topic
|
||||
|
||||
The topic in the specified channel for the integration to send
|
||||
notifications to. The topic should also be [URL-encoded][url-encoder].
|
||||
By default the integration will have a topic configured for channel
|
||||
messages.
|
||||
|
||||
### only_events, exclude_events
|
||||
|
||||
Some incoming webhook integrations support these parameters to filter
|
||||
which events will trigger a notification. You can append either
|
||||
`&only_events=["event_a","event_b"]` or
|
||||
`&exclude_events=["event_a","event_b"]` (or both, with different events)
|
||||
to the URL, with an arbitrary number of supported events.
|
||||
|
||||
You can use UNIX-style wildcards like `*` to include multiple events.
|
||||
For example, `test*` matches every event that starts with `test`.
|
||||
|
||||
!!! tip ""
|
||||
|
||||
For a list of supported events, see a specific [integration's
|
||||
documentation](/integrations) page.
|
||||
|
||||
[browse-channels]: /help/introduction-to-channels#browse-and-subscribe-to-channels
|
||||
[add-bot]: /help/add-a-bot-or-integration
|
||||
[url-encoder]: https://www.urlencoder.org/
|
||||
|
||||
## Related articles
|
||||
|
||||
* [Integrations overview](/api/integrations-overview)
|
||||
* [Incoming webhook walkthrough](/api/incoming-webhooks-walkthrough)
|
||||
* [Non-webhook integrations](/api/non-webhook-integrations)
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
Below, we explain each part of a simple incoming webhook integration,
|
||||
called **Hello World**. This integration sends a "hello" message to the `test`
|
||||
channel and includes a link to the Wikipedia article of the day, which
|
||||
stream and includes a link to the Wikipedia article of the day, which
|
||||
it formats from json data it receives in the http request.
|
||||
|
||||
Use this walkthrough to learn how to write your first webhook
|
||||
@@ -69,10 +69,10 @@ integration uses.
|
||||
## Step 1: Initialize your webhook python package
|
||||
|
||||
In the `zerver/webhooks/` directory, create new subdirectory that will
|
||||
contain all of the corresponding code. In our example, it will be
|
||||
contain all of corresponding code. In our example it will be
|
||||
`helloworld`. The new directory will be a python package, so you have
|
||||
to create an empty `__init__.py` file in that directory via, for
|
||||
example, `touch zerver/webhooks/helloworld/__init__.py`.
|
||||
to create an empty `__init__.py` file in that directory via e.g.
|
||||
`touch zerver/webhooks/helloworld/__init__.py`.
|
||||
|
||||
## Step 2: Create main webhook code
|
||||
|
||||
@@ -82,24 +82,25 @@ python file, `zerver/webhooks/mywebhook/view.py`.
|
||||
The Hello World integration is in `zerver/webhooks/helloworld/view.py`:
|
||||
|
||||
```python
|
||||
from typing import Any, Dict, Sequence
|
||||
|
||||
from django.http import HttpRequest, HttpResponse
|
||||
|
||||
from zerver.decorator import webhook_view
|
||||
from zerver.lib.request import REQ, has_request_variables
|
||||
from zerver.lib.response import json_success
|
||||
from zerver.lib.typed_endpoint import JsonBodyPayload, typed_endpoint
|
||||
from zerver.lib.validator import WildValue, check_string
|
||||
from zerver.lib.webhooks.common import check_send_webhook_message
|
||||
from zerver.models import UserProfile
|
||||
|
||||
|
||||
@webhook_view("HelloWorld")
|
||||
@typed_endpoint
|
||||
@has_request_variables
|
||||
def api_helloworld_webhook(
|
||||
request: HttpRequest,
|
||||
user_profile: UserProfile,
|
||||
*,
|
||||
payload: JsonBodyPayload[WildValue],
|
||||
payload: Dict[str, Sequence[Dict[str, Any]]] = REQ(argument_type="body"),
|
||||
) -> HttpResponse:
|
||||
|
||||
# construct the body of the message
|
||||
body = "Hello! I am happy to be here! :smile:"
|
||||
|
||||
@@ -107,10 +108,7 @@ def api_helloworld_webhook(
|
||||
body_template = (
|
||||
"\nThe Wikipedia featured article for today is **[{featured_title}]({featured_url})**"
|
||||
)
|
||||
body += body_template.format(
|
||||
featured_title=payload["featured_title"].tame(check_string),
|
||||
featured_url=payload["featured_url"].tame(check_string),
|
||||
)
|
||||
body += body_template.format(**payload)
|
||||
|
||||
topic = "Hello World"
|
||||
|
||||
@@ -122,13 +120,14 @@ def api_helloworld_webhook(
|
||||
|
||||
The above code imports the required functions and defines the main webhook
|
||||
function `api_helloworld_webhook`, decorating it with `webhook_view` and
|
||||
`typed_endpoint`. The `typed_endpoint` decorator allows you to
|
||||
access request variables with `JsonBodyPayload()`. You can find more about `JsonBodyPayload` and request variables in [Writing views](
|
||||
`has_request_variables`. The `has_request_variables` decorator allows you to
|
||||
access request variables with `REQ()`. You can find more about `REQ` and request
|
||||
variables in [Writing views](
|
||||
https://zulip.readthedocs.io/en/latest/tutorials/writing-views.html#request-variables).
|
||||
|
||||
You must pass the name of your integration to the
|
||||
`webhook_view` decorator; that name will be used to
|
||||
describe your integration in Zulip's analytics (e.g., the `/stats`
|
||||
describe your integration in Zulip's analytics (e.g. the `/stats`
|
||||
page). Here we have used `HelloWorld`. To be consistent with other
|
||||
integrations, use the name of the product you are integrating in camel
|
||||
case, spelled as the product spells its own name (except always first
|
||||
@@ -144,14 +143,14 @@ You should name your webhook function as such
|
||||
integration and is always lower-case.
|
||||
|
||||
At minimum, the webhook function must accept `request` (Django
|
||||
[HttpRequest](https://docs.djangoproject.com/en/5.0/ref/request-response/#django.http.HttpRequest)
|
||||
[HttpRequest](https://docs.djangoproject.com/en/3.2/ref/request-response/#django.http.HttpRequest)
|
||||
object), and `user_profile` (Zulip's user object). You may also want to
|
||||
define additional parameters using the `typed_endpoint` decorator.
|
||||
define additional parameters using the `REQ` object.
|
||||
|
||||
In the example above, we have defined `payload` which is populated
|
||||
from the body of the http request, `stream` with a default of `test`
|
||||
(available by default in the Zulip development environment), and
|
||||
`topic` with a default of `Hello World`. If your webhook uses a custom channel,
|
||||
`topic` with a default of `Hello World`. If your webhook uses a custom stream,
|
||||
it must exist before a message can be created in it. (See
|
||||
[Step 4: Create automated tests](#step-5-create-automated-tests) for how to handle this in tests.)
|
||||
|
||||
@@ -170,7 +169,7 @@ link to the Wikipedia article of the day as provided by the json payload.
|
||||
Then we send a message with `check_send_webhook_message`, which will
|
||||
validate the message and do the following:
|
||||
|
||||
* Send a public (channel) message if the `stream` query parameter is
|
||||
* Send a public (stream) message if the `stream` query parameter is
|
||||
specified in the webhook URL.
|
||||
* If the `stream` query parameter isn't specified, it will send a direct
|
||||
message to the owner of the webhook bot.
|
||||
@@ -192,7 +191,7 @@ WEBHOOK_INTEGRATIONS: List[WebhookIntegration] = [
|
||||
And you'll find the entry for Hello World:
|
||||
|
||||
```python
|
||||
WebhookIntegration("helloworld", ["misc"], display_name="Hello World"),
|
||||
WebhookIntegration('helloworld', ['misc'], display_name='Hello World'),
|
||||
```
|
||||
|
||||
This tells the Zulip API to call the `api_helloworld_webhook` function in
|
||||
@@ -200,7 +199,7 @@ This tells the Zulip API to call the `api_helloworld_webhook` function in
|
||||
`/api/v1/external/helloworld`.
|
||||
|
||||
This line also tells Zulip to generate an entry for Hello World on the Zulip
|
||||
integrations page using `static/images/integrations/logos/helloworld.svg` as its
|
||||
integrations page using `static/images/integrations/logos/helloworld.png` as its
|
||||
icon. The second positional argument defines a list of categories for the
|
||||
integration.
|
||||
|
||||
@@ -259,7 +258,7 @@ After running the above command, you should see something similar to:
|
||||
Using `manage.py` from within the Zulip development environment:
|
||||
|
||||
```console
|
||||
(zulip-server) vagrant@vagrant:/srv/zulip$
|
||||
(zulip-py3-venv) vagrant@vagrant:/srv/zulip$
|
||||
./manage.py send_webhook_fixture_message \
|
||||
--fixture=zerver/webhooks/helloworld/fixtures/hello.json \
|
||||
'--url=http://localhost:9991/api/v1/external/helloworld?api_key=<api_key>'
|
||||
@@ -289,19 +288,15 @@ the [management commands][management-commands] documentation.
|
||||
### Integrations Dev Panel
|
||||
This is the GUI tool.
|
||||
|
||||
{start_tabs}
|
||||
|
||||
1. Run `./tools/run-dev` then go to http://localhost:9991/devtools/integrations/.
|
||||
|
||||
1. Set the following mandatory fields:
|
||||
2. Set the following mandatory fields:
|
||||
**Bot** - Any incoming webhook bot.
|
||||
**Integration** - One of the integrations.
|
||||
**Fixture** - Though not mandatory, it's recommended that you select one and then tweak it if necessary.
|
||||
The remaining fields are optional, and the URL will automatically be generated.
|
||||
|
||||
1. Click **Send**!
|
||||
|
||||
{end_tabs}
|
||||
3. Click **Send**!
|
||||
|
||||
By opening Zulip in one tab and then this tool in another, you can quickly tweak
|
||||
your code and send sample messages for many different test fixtures.
|
||||
@@ -328,34 +323,30 @@ class `HelloWorldHookTests`:
|
||||
|
||||
```python
|
||||
class HelloWorldHookTests(WebhookTestCase):
|
||||
CHANNEL_NAME = "test"
|
||||
URL_TEMPLATE = "/api/v1/external/helloworld?&api_key={api_key}&stream={stream}"
|
||||
DIRECT_MESSAGE_URL_TEMPLATE = "/api/v1/external/helloworld?&api_key={api_key}"
|
||||
WEBHOOK_DIR_NAME = "helloworld"
|
||||
STREAM_NAME = 'test'
|
||||
URL_TEMPLATE = "/api/v1/external/helloworld?&api_key={api_key}"
|
||||
WEBHOOK_DIR_NAME = 'helloworld'
|
||||
|
||||
# Note: Include a test function per each distinct message condition your integration supports
|
||||
def test_hello_message(self) -> None:
|
||||
expected_topic = "Hello World"
|
||||
expected_message = "Hello! I am happy to be here! :smile:\nThe Wikipedia featured article for today is **[Marilyn Monroe](https://en.wikipedia.org/wiki/Marilyn_Monroe)**"
|
||||
expected_topic = "Hello World";
|
||||
expected_message = "Hello! I am happy to be here! :smile: \nThe Wikipedia featured article for today is **[Marilyn Monroe](https://en.wikipedia.org/wiki/Marilyn_Monroe)**";
|
||||
|
||||
# use fixture named helloworld_hello
|
||||
self.check_webhook(
|
||||
"hello",
|
||||
expected_topic,
|
||||
expected_message,
|
||||
content_type="application/x-www-form-urlencoded",
|
||||
)
|
||||
self.check_webhook('hello', expected_topic, expected_message,
|
||||
content_type="application/x-www-form-urlencoded")
|
||||
|
||||
```
|
||||
|
||||
In the above example, `CHANNEL_NAME`, `URL_TEMPLATE`, and `WEBHOOK_DIR_NAME` refer
|
||||
In the above example, `STREAM_NAME`, `URL_TEMPLATE`, and `WEBHOOK_DIR_NAME` refer
|
||||
to class attributes from the base class, `WebhookTestCase`. These are needed by
|
||||
the helper function `check_webhook` to determine how to execute
|
||||
your test. `CHANNEL_NAME` should be set to your default channel. If it doesn't exist,
|
||||
your test. `STREAM_NAME` should be set to your default stream. If it doesn't exist,
|
||||
`check_webhook` will create it while executing your test.
|
||||
|
||||
If your test expects a channel name from a test fixture, the value in the fixture
|
||||
and the value you set for `CHANNEL_NAME` must match. The test helpers use `CHANNEL_NAME`
|
||||
to create the destination channel, and then create the message to send using the
|
||||
If your test expects a stream name from a test fixture, the value in the fixture
|
||||
and the value you set for `STREAM_NAME` must match. The test helpers use `STREAM_NAME`
|
||||
to create the destination stream, and then create the message to send using the
|
||||
value from the fixture. If these don't match, the test will fail.
|
||||
|
||||
`URL_TEMPLATE` defines how the test runner will call your incoming webhook, in the same way
|
||||
@@ -372,16 +363,12 @@ class called something like `test_goodbye_message`:
|
||||
|
||||
```python
|
||||
def test_goodbye_message(self) -> None:
|
||||
expected_topic = "Hello World"
|
||||
expected_message = "Hello! I am happy to be here! :smile:\nThe Wikipedia featured article for today is **[Goodbye](https://en.wikipedia.org/wiki/Goodbye)**"
|
||||
expected_topic = "Hello World";
|
||||
expected_message = "Hello! I am happy to be here! :smile:\nThe Wikipedia featured article for today is **[Goodbye](https://en.wikipedia.org/wiki/Goodbye)**";
|
||||
|
||||
# use fixture named helloworld_goodbye
|
||||
self.check_webhook(
|
||||
"goodbye",
|
||||
expected_topic,
|
||||
expected_message,
|
||||
content_type="application/x-www-form-urlencoded",
|
||||
)
|
||||
self.check_webhook('goodbye', expected_topic, expected_message,
|
||||
content_type="application/x-www-form-urlencoded")
|
||||
```
|
||||
|
||||
As well as a new fixture `goodbye.json` in
|
||||
@@ -402,7 +389,7 @@ Once you have written some tests, you can run just these new tests from within
|
||||
the Zulip development environment with this command:
|
||||
|
||||
```console
|
||||
(zulip-server) vagrant@vagrant:/srv/zulip$
|
||||
(zulip-py3-venv) vagrant@vagrant:/srv/zulip$
|
||||
./tools/test-backend zerver/webhooks/helloworld
|
||||
```
|
||||
|
||||
@@ -438,20 +425,19 @@ Second, you need to write the actual documentation content in
|
||||
```md
|
||||
Learn how Zulip integrations work with this simple Hello World example!
|
||||
|
||||
1. The Hello World webhook will use the `test` channel, which is created
|
||||
1. The Hello World webhook will use the `test` stream, which is created
|
||||
by default in the Zulip development environment. If you are running
|
||||
Zulip in production, you should make sure that this channel exists.
|
||||
Zulip in production, you should make sure that this stream exists.
|
||||
|
||||
1. {!create-an-incoming-webhook.md!}
|
||||
1. {!create-bot-construct-url.md!}
|
||||
|
||||
1. {!generate-webhook-url-basic.md!}
|
||||
|
||||
1. To trigger a notification using this example webhook, you can use
|
||||
`send_webhook_fixture_message` from a [Zulip development
|
||||
environment](https://zulip.readthedocs.io/en/latest/development/overview.html):
|
||||
|
||||
```
|
||||
(zulip-server) vagrant@vagrant:/srv/zulip$
|
||||
(zulip-py3-venv) vagrant@vagrant:/srv/zulip$
|
||||
./manage.py send_webhook_fixture_message \
|
||||
> --fixture=zerver/tests/fixtures/helloworld/hello.json \
|
||||
> '--url=http://localhost:9991/api/v1/external/helloworld?api_key=abcdefgh&stream=stream%20name;'
|
||||
@@ -469,7 +455,7 @@ Learn how Zulip integrations work with this simple Hello World example!
|
||||
|
||||
```
|
||||
|
||||
`{!create-an-incoming-webhook.md!}` and `{!congrats.md!}` are examples of
|
||||
`{!create-bot-construct-url.md!}` and `{!congrats.md!}` are examples of
|
||||
a Markdown macro. Zulip has a macro-based Markdown/Jinja2 framework that
|
||||
includes macros for common instructions in Zulip's webhooks/integrations
|
||||
documentation.
|
||||
@@ -484,24 +470,27 @@ screenshot. Mostly you should plan on templating off an existing guide, like
|
||||
|
||||
## Step 7: Preparing a pull request to zulip/zulip
|
||||
|
||||
When you have finished your webhook integration, follow these guidelines before
|
||||
pushing the code to your fork and submitting a pull request to zulip/zulip:
|
||||
When you have finished your webhook integration and are ready for it to be
|
||||
available in the Zulip product, follow these steps to prepare your pull
|
||||
request:
|
||||
|
||||
- Run tests including linters and ensure you have addressed any issues they
|
||||
report. See [Testing](https://zulip.readthedocs.io/en/latest/testing/testing.html)
|
||||
and [Linters](https://zulip.readthedocs.io/en/latest/testing/linters.html) for details.
|
||||
- Read through [Code styles and conventions](
|
||||
https://zulip.readthedocs.io/en/latest/contributing/code-style.html) and take a look
|
||||
through your code to double-check that you've followed Zulip's guidelines.
|
||||
- Take a look at your Git history to ensure your commits have been clear and
|
||||
logical (see [Commit discipline](
|
||||
https://zulip.readthedocs.io/en/latest/contributing/commit-discipline.html) for tips). If not,
|
||||
consider revising them with `git rebase --interactive`. For most incoming webhooks,
|
||||
you'll want to squash your changes into a single commit and include a good,
|
||||
clear commit message.
|
||||
1. Run tests including linters and ensure you have addressed any issues they
|
||||
report. See [Testing](https://zulip.readthedocs.io/en/latest/testing/testing.html)
|
||||
and [Linters](https://zulip.readthedocs.io/en/latest/testing/linters.html) for details.
|
||||
2. Read through [Code styles and conventions](
|
||||
https://zulip.readthedocs.io/en/latest/contributing/code-style.html) and take a look
|
||||
through your code to double-check that you've followed Zulip's guidelines.
|
||||
3. Take a look at your Git history to ensure your commits have been clear and
|
||||
logical (see [Commit discipline](
|
||||
https://zulip.readthedocs.io/en/latest/contributing/commit-discipline.html) for tips). If not,
|
||||
consider revising them with `git rebase --interactive`. For most incoming webhooks,
|
||||
you'll want to squash your changes into a single commit and include a good,
|
||||
clear commit message.
|
||||
4. Push code to your fork.
|
||||
5. Submit a pull request to zulip/zulip.
|
||||
|
||||
If you would like feedback on your integration as you go, feel free to post a
|
||||
message on the [public Zulip instance](https://chat.zulip.org/#narrow/channel/integrations).
|
||||
message on the [public Zulip instance](https://chat.zulip.org/#narrow/stream/bots).
|
||||
You can also create a [draft pull request](
|
||||
https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/about-pull-requests#draft-pull-requests) while you
|
||||
are still working on your integration. See the
|
||||
@@ -531,11 +520,11 @@ def test_unknown_action_no_data(self) -> None:
|
||||
# we are testing. The value of result is the error message the webhook should
|
||||
# return if no params are sent. The fixture for this test is an empty file.
|
||||
|
||||
# subscribe to the target channel
|
||||
self.subscribe(self.test_user, self.CHANNEL_NAME)
|
||||
# subscribe to the target stream
|
||||
self.subscribe(self.test_user, self.STREAM_NAME)
|
||||
|
||||
# post to the webhook url
|
||||
post_params = {'stream_name': self.CHANNEL_NAME,
|
||||
post_params = {'stream_name': self.STREAM_NAME,
|
||||
'content_type': 'application/x-www-form-urlencoded'}
|
||||
result = self.client_post(self.url, 'unknown_action', **post_params)
|
||||
|
||||
@@ -549,8 +538,8 @@ the webhook returns an error, the test fails. Instead, explicitly do the
|
||||
setup it would have done, and check the result yourself.
|
||||
|
||||
Here, `subscribe_to_stream` is a test helper that uses `TEST_USER_EMAIL` and
|
||||
`CHANNEL_NAME` (attributes from the base class) to register the user to receive
|
||||
messages in the given channel. If the channel doesn't exist, it creates it.
|
||||
`STREAM_NAME` (attributes from the base class) to register the user to receive
|
||||
messages in the given stream. If the stream doesn't exist, it creates it.
|
||||
|
||||
`client_post`, another helper, performs the HTTP POST that calls the incoming
|
||||
webhook. As long as `self.url` is correct, you don't need to construct the webhook
|
||||
@@ -569,11 +558,10 @@ For example, here is the definition of a webhook function that gets both `stream
|
||||
and `topic` from the query parameters:
|
||||
|
||||
```python
|
||||
@typed_endpoint
|
||||
def api_querytest_webhook(request: HttpRequest, user_profile: UserProfile,
|
||||
payload: Annotated[str, ApiParamConfig(argument_type_is_body=True)],
|
||||
stream: str = "test",
|
||||
topic: str= "Default Alert":
|
||||
payload: str=REQ(argument_type='body'),
|
||||
stream: str=REQ(default='test'),
|
||||
topic: str=REQ(default='Default Alert')):
|
||||
```
|
||||
|
||||
In actual use, you might configure the 3rd party service to call your Zulip
|
||||
@@ -584,7 +572,7 @@ http://myhost/api/v1/external/querytest?api_key=abcdefgh&stream=alerts&topic=que
|
||||
```
|
||||
|
||||
It provides values for `stream` and `topic`, and the webhook can get those
|
||||
using `@typed_endpoint` without any special handling. How does this work in a test?
|
||||
using `REQ` without any special handling. How does this work in a test?
|
||||
|
||||
The new attribute `TOPIC` exists only in our class so far. In order to
|
||||
construct a URL with a query parameter for `topic`, you can pass the
|
||||
@@ -593,7 +581,7 @@ attribute `TOPIC` as a keyword argument to `build_webhook_url`, like so:
|
||||
```python
|
||||
class QuerytestHookTests(WebhookTestCase):
|
||||
|
||||
CHANNEL_NAME = 'querytest'
|
||||
STREAM_NAME = 'querytest'
|
||||
TOPIC = "Default topic"
|
||||
URL_TEMPLATE = "/api/v1/external/querytest?api_key={api_key}&stream={stream}"
|
||||
FIXTURE_DIR_NAME = 'querytest'
|
||||
@@ -654,8 +642,3 @@ with a string describing the unsupported event type, like so:
|
||||
```
|
||||
raise UnsupportedWebhookEventTypeError(event_type)
|
||||
```
|
||||
|
||||
## Related articles
|
||||
|
||||
* [Integrations overview](/api/integrations-overview)
|
||||
* [Incoming webhook integrations](/api/incoming-webhooks-overview)
|
||||
|
||||
@@ -40,7 +40,3 @@ No download required!
|
||||
|
||||
See also [user-contributed client libraries](/api/client-libraries)
|
||||
for many other languages.
|
||||
|
||||
## Related articles
|
||||
|
||||
* [Configuring the Python bindings](/api/configuring-python-bindings)
|
||||
|
||||
@@ -1,106 +1,47 @@
|
||||
# Integrations overview
|
||||
|
||||
Integrations let you connect Zulip with other products. For example, you can get
|
||||
notification messages in Zulip when an issue in your tracker is updated, or for
|
||||
alerts from your monitoring tool.
|
||||
Integrations allow you to send data from other products into or out of
|
||||
Zulip. Zulip natively integrates with dozens of products, and with hundreds
|
||||
more through Zapier and IFTTT.
|
||||
|
||||
Zulip offers [over 120 native integrations](/integrations/), which take
|
||||
advantage of Zulip's [topics](/help/introduction-to-topics) to organize
|
||||
notification messages. Additionally, thousands of integrations are available
|
||||
through [Zapier](https://zapier.com/apps) and [IFTTT](https://ifttt.com/search).
|
||||
You can also [connect any webhook designed to work with
|
||||
Slack](/integrations/doc/slack_incoming) to Zulip.
|
||||
Zulip also makes it very easy to write your own integration, and (if you'd
|
||||
like) to get it merged into the main Zulip repository.
|
||||
|
||||
If you don't find an integration you need, you can:
|
||||
Integrations are one of the most important parts of a group chat tool like
|
||||
Zulip, and we are committed to making integrating with Zulip as easy as
|
||||
possible.
|
||||
|
||||
- [Write your own integration](#write-your-own-integration). You can [submit a
|
||||
pull
|
||||
request](https://zulip.readthedocs.io/en/latest/contributing/reviewable-prs.html)
|
||||
to get your integration merged into the main Zulip repository.
|
||||
## Set up an existing integration
|
||||
|
||||
- [File an issue](https://github.com/zulip/zulip/issues/new/choose) to request
|
||||
an integration (if it's a nice-to-have).
|
||||
Most existing integrations send content from a third-party product into
|
||||
Zulip.
|
||||
|
||||
- [Contact Zulip Sales](mailto:sales@zulip.com) to inquire about a custom
|
||||
development contract.
|
||||
* Search Zulip's [list of native integrations](/integrations/) for the
|
||||
third-party product. Each integration has a page describing how to set it
|
||||
up.
|
||||
|
||||
## Set up an integration
|
||||
* Check if [Zapier](https://zapier.com/apps) has an integration with the
|
||||
product. If it does, follow [these instructions](/integrations/doc/zapier)
|
||||
to set it up.
|
||||
|
||||
### Native integrations
|
||||
* Check if [IFTTT](https://ifttt.com/search) has an integration with the
|
||||
product. If it does, follow [these instructions](/integrations/doc/ifttt)
|
||||
to set it up.
|
||||
|
||||
{start_tabs}
|
||||
* Use a third-party webhook integration designed to work with
|
||||
[Slack's webhook API](https://api.slack.com/messaging/webhooks)
|
||||
pointed at Zulip's
|
||||
[Slack-compatible webhook API](/integrations/slack/slack_incoming).
|
||||
|
||||
1. [Search Zulip's integrations](/integrations/) for the product you'd like to
|
||||
connect to Zulip.
|
||||
|
||||
1. Click on the card for the product, and follow the instructions on the page.
|
||||
|
||||
{end_tabs}
|
||||
|
||||
### Integrate via Zapier or IFTTT
|
||||
|
||||
If you don't see a native Zulip integration, you can access thousands of
|
||||
additional integrations through [Zapier](https://zapier.com/apps) and
|
||||
[IFTTT](https://ifttt.com/search).
|
||||
|
||||
{start_tabs}
|
||||
|
||||
1. Search [Zapier](https://zapier.com/apps) or [IFTTT](https://ifttt.com/search)
|
||||
for the product you'd like to connect to Zulip.
|
||||
|
||||
1. Follow the integration instructions for [Zapier](/integrations/doc/zapier) or
|
||||
[IFTTT](/integrations/doc/ifttt).
|
||||
|
||||
{end_tabs}
|
||||
|
||||
### Integrate via Slack-compatible webhook API
|
||||
|
||||
Zulip can process incoming webhook messages written to work with [Slack's
|
||||
webhook API](https://api.slack.com/messaging/webhooks). This makes it easy to
|
||||
quickly move your integrations when [migrating your
|
||||
organization](/help/import-from-slack) from Slack to Zulip, or integrate any
|
||||
product that has a Slack webhook integration with Zulip .
|
||||
|
||||
!!! warn ""
|
||||
|
||||
**Note:** In the long term, the recommended approach is to use
|
||||
Zulip's native integrations, which take advantage of Zulip's topics.
|
||||
There may also be some quirks when Slack's formatting system is
|
||||
translated into Zulip's.
|
||||
|
||||
{start_tabs}
|
||||
|
||||
1. [Create a bot](/help/add-a-bot-or-integration) for the Slack-compatible
|
||||
webhook. Make sure that you select **Incoming webhook** as the **Bot type**.
|
||||
|
||||
1. Decide where to send Slack-compatible webhook notifications, and [generate
|
||||
the integration URL](https://zulip.com/help/generate-integration-url).
|
||||
|
||||
1. Use the generated URL anywhere you would use a Slack webhook.
|
||||
|
||||
{end_tabs}
|
||||
|
||||
### Integrate via email
|
||||
|
||||
If the product you'd like to integrate can send email notifications, you can
|
||||
[send those emails to a Zulip channel](/help/message-a-channel-by-email). The
|
||||
email subject will become the Zulip topic, and the email body will become the
|
||||
Zulip message.
|
||||
|
||||
For example, you can configure your personal GitHub notifications to go to a
|
||||
Zulip channel rather than your email inbox. Notifications for each issue or pull
|
||||
request will be grouped into a single topic.
|
||||
* If the product can send email notifications, you can
|
||||
[send those emails to a stream](/help/message-a-stream-by-email).
|
||||
|
||||
## Write your own integration
|
||||
|
||||
You can write your own Zulip integrations using the well-documented APIs below.
|
||||
For example, if your company develops software, you can create a custom
|
||||
integration to connect your product to Zulip.
|
||||
|
||||
If you need help, best-effort community support is available in the [Zulip
|
||||
development community](https://zulip.com/development-community/). To inquire
|
||||
about options for custom development, [contact Zulip
|
||||
Sales](mailto:sales@zulip.com).
|
||||
We've put a lot of effort into making this as easy as possible, but
|
||||
all of the options below do require some comfort writing code. If you
|
||||
need an integration and don't have an engineer on staff, [contact
|
||||
us](/help/contact-support) and we'll see what we can do.
|
||||
|
||||
### Sending content into Zulip
|
||||
|
||||
@@ -110,12 +51,8 @@ Sales](mailto:sales@zulip.com).
|
||||
* If it doesn't, you may want to write a
|
||||
[script or plugin integration](/api/non-webhook-integrations).
|
||||
|
||||
* The [`zulip-send` tool](/api/send-message) makes it easy to send Zulip
|
||||
messages from shell scripts.
|
||||
|
||||
* Finally, you can
|
||||
[send messages using Zulip's API](/api/send-message), with bindings for
|
||||
Python, JavaScript and [other languages](/api/client-libraries).
|
||||
[send messages using Zulip's API](/api/send-message).
|
||||
|
||||
### Sending and receiving content
|
||||
|
||||
@@ -128,12 +65,5 @@ Sales](mailto:sales@zulip.com).
|
||||
built on top of this API, so it can do anything a human user can do. Most
|
||||
but not all of the endpoints are documented on this site; if you need
|
||||
something that isn't there check out Zulip's
|
||||
[REST endpoints](https://github.com/zulip/zulip/blob/main/zproject/urls.py).
|
||||
|
||||
## Related articles
|
||||
|
||||
* [Bots overview](/help/bots-overview)
|
||||
* [Set up integrations](/help/set-up-integrations)
|
||||
* [Add a bot or integration](/help/add-a-bot-or-integration)
|
||||
* [Generate integration URL](/help/generate-integration-url)
|
||||
* [Request an integration](/help/request-an-integration)
|
||||
[REST endpoints](https://github.com/zulip/zulip/blob/main/zproject/urls.py)
|
||||
or [contact us](/help/contact-support) and we'll help you out.
|
||||
|
||||
@@ -1,364 +0,0 @@
|
||||
# Message formatting
|
||||
|
||||
Zulip supports an extended version of Markdown for messages, as well as
|
||||
some HTML level special behavior. The Zulip help center article on [message
|
||||
formatting](/help/format-your-message-using-markdown) is the primary
|
||||
documentation for Zulip's markup features. This article is currently a
|
||||
changelog for updates to these features.
|
||||
|
||||
The [render a message](/api/render-message) endpoint can be used to get
|
||||
the current HTML version of any Markdown syntax for message content.
|
||||
|
||||
## Code blocks
|
||||
|
||||
**Changes**: As of Zulip 4.0 (feature level 33), [code blocks][help-code]
|
||||
can have a `data-code-language` attribute attached to the outer HTML
|
||||
`div` element, which records the programming language that was selected
|
||||
for syntax highlighting. This field is used in the
|
||||
[playgrounds][help-playgrounds] feature for code blocks.
|
||||
|
||||
## Global times
|
||||
|
||||
**Changes**: In Zulip 3.0 (feature level 8), added [global time
|
||||
mentions][help-global-time] to supported Markdown message formatting
|
||||
features.
|
||||
|
||||
## Links to channels, topics, and messages
|
||||
|
||||
Zulip's markup supports special readable Markdown syntax for [linking
|
||||
to channels, topics, and messages](/help/link-to-a-message-or-conversation).
|
||||
|
||||
Sample HTML formats are as follows:
|
||||
``` html
|
||||
<!-- Syntax: #**announce** -->
|
||||
<a class="stream" data-stream-id="9"
|
||||
href="/#narrow/channel/9-announce">
|
||||
#announce
|
||||
</a>
|
||||
|
||||
<!-- Syntax: #**announce>Zulip updates** -->
|
||||
<a class="stream-topic" data-stream-id="9"
|
||||
href="/#narrow/channel/9-announce/topic/Zulip.20updates/with/214">
|
||||
#announce > Zulip updates
|
||||
</a>
|
||||
|
||||
<!-- Syntax: #**announce>Zulip updates**
|
||||
Generated only if topic had no messages or the link was rendered
|
||||
before Zulip 10.0 (feature level 347) -->
|
||||
<a class="stream-topic" data-stream-id="9"
|
||||
href="/#narrow/channel/9-announce/topic/Zulip.20updates">
|
||||
#announce > Zulip updates
|
||||
</a>
|
||||
|
||||
<!-- Syntax: #**announce>Zulip updates@214** -->
|
||||
<a class="message-link"
|
||||
href="/#narrow/channel/9-announce/topic/Zulip.20updates/near/214">
|
||||
#announce > Zulip updates @ 💬
|
||||
</a>
|
||||
```
|
||||
|
||||
The `near` and `with` operators are documented in more detail in the
|
||||
[search and URL documentation](/api/construct-narrow). When rendering
|
||||
topic links with the `with` operator, the code doing the rendering may
|
||||
pick the ID arbitrarily among messages accessible to the client and/or
|
||||
acting user at the time of rendering. Currently, the server chooses
|
||||
the message ID to use for `with` operators as the oldest message ID in
|
||||
the topic accessible to the user who wrote the message. In channels
|
||||
with protected history, this means the same Markdown syntax may be
|
||||
rendered differently for users who joined at different times.
|
||||
|
||||
The older stream/topic link elements include a `data-stream-id`, which
|
||||
historically was used in order to display the current channel name if
|
||||
the channel had been renamed. That field is **deprecated**, because
|
||||
displaying an updated value for the most common forms of this syntax
|
||||
requires parsing the URL to get the topic to use anyway.
|
||||
|
||||
When a topic is an empty string, it is replaced with
|
||||
`realm_empty_topic_display_name` found in the [`POST /register`](/api/register-queue)
|
||||
response and wrapped with the `<em>` tag.
|
||||
|
||||
Sample HTML formats with `"realm_empty_topic_display_name": "general chat"`
|
||||
are as follows:
|
||||
```html
|
||||
<!-- Syntax: #**announce>** -->
|
||||
<a class="stream-topic" data-stream-id="9"
|
||||
href="/#narrow/channel/9-announce/topic/with/214">
|
||||
#announce > <em>general chat</em>
|
||||
</a>
|
||||
|
||||
<!-- Syntax: #**announce>**
|
||||
Generated only if topic had no messages or the link was rendered
|
||||
before Zulip 10.0 (feature level 347) -->
|
||||
<a class="stream-topic" data-stream-id="9"
|
||||
href="/#narrow/channel/9-announce/topic/">
|
||||
#announce > <em>general chat</em>
|
||||
</a>
|
||||
|
||||
<!-- Syntax: #**announce>@214** -->
|
||||
<a class="message-link"
|
||||
href="/#narrow/channel/9-announce/topic//near/214">
|
||||
#announce > <em>general chat</em> @ 💬
|
||||
</a>
|
||||
```
|
||||
|
||||
**Changes**: Before Zulip 10.0 (feature level 347), the `with` field
|
||||
was never used in topic link URLs generated by the server; the markup
|
||||
currently used only for empty topics was used for all topic links.
|
||||
|
||||
Before Zulip 10.0 (feature level 346), empty string
|
||||
was not a valid topic name in syntaxes for linking to topics and
|
||||
messages.
|
||||
|
||||
In Zulip 10.0 (feature level 319), added Markdown syntax
|
||||
for linking to a specific message in a conversation. Declared the
|
||||
`data-stream-id` field to be deprecated as detailed above.
|
||||
|
||||
## Image previews
|
||||
|
||||
When a Zulip message is sent linking to an uploaded image, Zulip will
|
||||
generate an image preview element with the following format.
|
||||
|
||||
``` html
|
||||
<div class="message_inline_image">
|
||||
<a href="/user_uploads/path/to/image.png" title="image.png">
|
||||
<img data-original-dimensions="1920x1080"
|
||||
data-original-content-type="image/png"
|
||||
src="/user_uploads/thumbnail/path/to/image.png/840x560.webp">
|
||||
</a>
|
||||
</div>
|
||||
```
|
||||
|
||||
If the server has not yet generated thumbnails for the image yet at
|
||||
the time the message is sent, the `img` element will be a temporary
|
||||
loading indicator image and have the `image-loading-placeholder`
|
||||
class, which clients can use to identify loading indicators and
|
||||
replace them with a more native loading indicator element if
|
||||
desired. For example:
|
||||
|
||||
``` html
|
||||
<div class="message_inline_image">
|
||||
<a href="/user_uploads/path/to/image.png" title="image.png">
|
||||
<img class="image-loading-placeholder"
|
||||
data-original-dimensions="1920x1080"
|
||||
data-original-content-type="image/png"
|
||||
src="/path/to/spinner.png">
|
||||
</a>
|
||||
</div>
|
||||
```
|
||||
|
||||
Once the server has a working thumbnail, such messages will be updated
|
||||
via an `update_message` event, with the `rendering_only: true` flag
|
||||
(telling clients not to adjust message edit history), with appropriate
|
||||
adjusted `rendered_content`. A client should process those events by
|
||||
just using the updated rendering. If thumbnailing failed, the same
|
||||
type of event will edit the message's rendered form to remove the
|
||||
image preview element, so no special client-side logic should be
|
||||
required to process such errors.
|
||||
|
||||
Note that in the uncommon situation that the thumbnailing system is
|
||||
backlogged, an individual message containing multiple image previews
|
||||
may be re-rendered multiple times as each image finishes thumbnailing
|
||||
and triggers a message update.
|
||||
|
||||
Clients are recommended to do the following when processing image
|
||||
previews:
|
||||
|
||||
- Clients that would like to use the image's aspect ratio to lay out
|
||||
one or more images in the message feed may use the
|
||||
`data-original-dimensions` attribute, which is present even if the
|
||||
image is a placeholder spinner. This attribute encodes the
|
||||
dimensions of the original image as `{width}x{height}`. These
|
||||
dimensions are for the image as rendered, _after_ any EXIF rotation
|
||||
and mirroring has been applied.
|
||||
- If the client would like to control the thumbnail resolution used,
|
||||
it can replace the final section of the URL (`840x560.webp` in the
|
||||
example above) with the `name` of its preferred format from the set
|
||||
of supported formats provided by the server in the
|
||||
`server_thumbnail_formats` portion of the `register`
|
||||
response. Clients should not make any assumptions about what format
|
||||
the server will use as the "default" thumbnail resolution, as it may
|
||||
change over time.
|
||||
- Download button type elements should provide the original image
|
||||
(encoded via the `href` of the containing `a` tag).
|
||||
- The content-type of the original image is provided on a
|
||||
`data-original-content-type` attribute, so clients can decide if
|
||||
they are capable of rendering the original image.
|
||||
- For images whose formats which are not widely-accepted by browsers
|
||||
(e.g., HEIC and TIFF), the image may contain a
|
||||
`data-transcoded-image` attribute, which specifies a high-resolution
|
||||
thumbnail format which clients may use instead of the original
|
||||
image.
|
||||
- Lightbox elements for viewing an image should be designed to
|
||||
immediately display any already-downloaded thumbnail while fetching
|
||||
the original-quality image or an appropriate higher-quality
|
||||
thumbnail from the server, to be transparently swapped in once it is
|
||||
available. Clients that would like to size the lightbox based on the
|
||||
size of the original image can use the `data-original-dimensions`
|
||||
attribute, as described above.
|
||||
- Animated images will have a `data-animated` attribute on the `img`
|
||||
tag. As detailed in `server_thumbnail_formats`, both animated and
|
||||
still images are available for clients to use, depending on their
|
||||
preference. See, for example, the [web setting][help-previews]
|
||||
to control whether animated images are autoplayed in the message
|
||||
feed.
|
||||
- Clients should not assume that the requested format is the format
|
||||
that they will receive; in rare cases where the client has an
|
||||
out-of-date list of `server_thumbnail_formats`, the server will
|
||||
provide an approximation of the client's requested format. Because
|
||||
of this, clients should not assume that the pixel dimensions or file
|
||||
format match what they requested.
|
||||
- No other processing of the URLs is recommended.
|
||||
|
||||
**Changes**: In Zulip 10.0 (feature level 336), added
|
||||
`data-original-content-type` attribute to convey the type of the
|
||||
original image, and optional `data-transcoded-image` attribute for
|
||||
images with formats which are not widely supported by browsers.
|
||||
|
||||
**Changes**: In Zulip 9.2 (feature levels 278-279, and 287+), added
|
||||
`data-original-dimensions` to the `image-loading-placeholder` spinner
|
||||
images, containing the dimensions of the original image.
|
||||
|
||||
In Zulip 9.0 (feature level 276), added `data-original-dimensions`
|
||||
attribute to images that have been thumbnailed, containing the
|
||||
dimensions of the full-size version of the image. Thumbnailing itself
|
||||
was reintroduced at feature level 275.
|
||||
|
||||
Previously, with the exception of Zulip servers that used the beta
|
||||
Thumbor-based implementation years ago, all image previews in Zulip
|
||||
messages were not thumbnailed; the `a` tag and the `img` tag would both
|
||||
point to the original image.
|
||||
|
||||
Clients that correctly implement the current API should handle
|
||||
Thumbor-based older thumbnails correctly, as long as they do not
|
||||
assume that `data-original-dimensions` is present. Clients should not
|
||||
assume that messages sent prior to the introduction of thumbnailing
|
||||
have been re-rendered to use the new format or have thumbnails
|
||||
available.
|
||||
|
||||
## Mentions and silent mentions
|
||||
|
||||
Zulip markup supports [mentioning](/help/mention-a-user-or-group)
|
||||
users, user groups, and a few special "wildcard" mentions (the three
|
||||
spellings of a channel wildcard mention: `@**all**`, `@**everyone**`,
|
||||
`@**channel**` and the topic wildcard mention `@**topic**`).
|
||||
|
||||
Mentions result in a message being highlighted for the target user(s),
|
||||
both in the UI and in notifications, and may also result in the target
|
||||
user(s) following the conversation, [depending on their
|
||||
settings](/help/follow-a-topic#follow-topics-where-you-are-mentioned).
|
||||
|
||||
Silent mentions of users or groups have none of those side effects,
|
||||
but nonetheless uniquely identify the user or group
|
||||
identified. (There's no such thing as a silent wildcard mention).
|
||||
|
||||
Permissions for mentioning users work as follows:
|
||||
|
||||
- Any user can mention any other user, though mentions by [muted
|
||||
users](/help/mute-a-user) are automatically marked as read and thus do
|
||||
not trigger notifications or otherwise get highlighted like unread
|
||||
mentions.
|
||||
|
||||
- Wildcard mentions are permitted except where [organization-level
|
||||
restrictions](/help/restrict-wildcard-mentions) apply.
|
||||
|
||||
- User groups can be mentioned if and only if the acting user is in
|
||||
the `can_mention_group` group for that group. All user groups can be
|
||||
silently mentioned by any user.
|
||||
|
||||
- System groups, when (silently) mentioned, should be displayed using
|
||||
their description, not their `role:nobody` style API names; see the
|
||||
main [system group
|
||||
documentation](/api/group-setting-values#system-groups) for
|
||||
details. System groups can only be silently mentioned right now,
|
||||
because they happen to all use the empty `Nobody` group for
|
||||
`can_mention_group`; clients should just use `can_mention_group` to
|
||||
determine which groups to offer in typeahead in similar contexts.
|
||||
|
||||
- Requests to send or edit a message that are impermissible due to
|
||||
including a mention where the acting user does not have permission to
|
||||
mention the target will return an error. Mention syntax that does not
|
||||
correspond to a real user or group is ignored.
|
||||
|
||||
Sample markup for `@**Example User**`:
|
||||
|
||||
``` html
|
||||
<span class="user-mention" data-user-id="31">@Example User</span>
|
||||
```
|
||||
|
||||
Sample markup for `@_**Example User**`:
|
||||
|
||||
``` html
|
||||
<span class="user-mention silent" data-user-id="31">Example User</span>
|
||||
```
|
||||
|
||||
Sample markup for `@**topic**`:
|
||||
|
||||
``` html
|
||||
<span class="topic-mention">@topic</span>
|
||||
```
|
||||
|
||||
Sample markup for `@**channel**`:
|
||||
|
||||
``` html
|
||||
<span class="user-mention channel-wildcard-mention"
|
||||
data-user-id="*">@channel</span>
|
||||
```
|
||||
|
||||
Sample markup for `@*support*`, assuming "support" is a valid group:
|
||||
``` html
|
||||
<span class="user-group-mention"
|
||||
data-user-group-id="17">@support</span>
|
||||
```
|
||||
|
||||
Sample markup for `@_*support*`, assuming "support" is a valid group:
|
||||
``` html
|
||||
<span class="user-group-mention silent"
|
||||
data-user-group-id="17">support</span>
|
||||
```
|
||||
|
||||
Sample markup for `@_*role:administrators*`:
|
||||
``` html
|
||||
<span class="user-group-mention silent"
|
||||
data-user-group-id="5">Administrators</span>
|
||||
```
|
||||
|
||||
When processing mentions, clients should look up the user or group
|
||||
referenced by ID, and update the textual name for the mention to the
|
||||
current name for the user or group with that ID. Note that for system
|
||||
groups, this requires special logic to look up the user-facing name
|
||||
for that group; see [system
|
||||
groups](/api/group-setting-values#system-groups) for details.
|
||||
|
||||
**Changes**: Prior to Zulip 10.0 (feature level 333), it was not
|
||||
possible to silently mention [system
|
||||
groups](/api/group-setting-values#system-groups).
|
||||
|
||||
In Zulip 9.0 (feature level 247), `channel` was added to the supported
|
||||
[wildcard][help-mention-all] options used in the
|
||||
[mentions][help-mentions] Markdown message formatting feature.
|
||||
|
||||
## Spoilers
|
||||
|
||||
**Changes**: In Zulip 3.0 (feature level 15), added
|
||||
[spoilers][help-spoilers] to supported Markdown message formatting
|
||||
features.
|
||||
|
||||
## Removed features
|
||||
|
||||
**Changes**: In Zulip 4.0 (feature level 24), the rarely used `!avatar()`
|
||||
and `!gravatar()` markup syntax, which was never documented and had an
|
||||
inconsistent syntax, were removed.
|
||||
|
||||
## Related articles
|
||||
|
||||
* [Markdown formatting](/help/format-your-message-using-markdown)
|
||||
* [Send a message](/api/send-message)
|
||||
* [Render a message](/api/render-message)
|
||||
|
||||
[help-code]: /help/code-blocks
|
||||
[help-playgrounds]: /help/code-blocks#code-playgrounds
|
||||
[help-spoilers]: /help/spoilers
|
||||
[help-global-time]: /help/global-times
|
||||
[help-mentions]: /help/mention-a-user-or-group
|
||||
[help-mention-all]: /help/mention-a-user-or-group#mention-everyone-on-a-channel
|
||||
[help-previews]: /help/image-video-and-website-previews#configure-how-animated-images-are-played
|
||||
@@ -4,14 +4,14 @@
|
||||
fastest to write, but sometimes a third-party product just doesn't support
|
||||
them. Zulip supports several other types of integrations.
|
||||
|
||||
* **Python script integrations**
|
||||
1. **Python script integrations**
|
||||
(examples: SVN, Git), where we can get the service to call our integration
|
||||
(by shelling out or otherwise), passing in the required data. Our preferred
|
||||
model for these is to ship these integrations in the
|
||||
[Zulip Python API distribution](https://github.com/zulip/python-zulip-api/tree/main/zulip),
|
||||
within the `integrations` directory there.
|
||||
|
||||
* **Plugin integrations** (examples:
|
||||
1. **Plugin integrations** (examples:
|
||||
Jenkins, Hubot, Trac) where the user needs to install a plugin into their
|
||||
existing software. These are often more work, but for some products are the
|
||||
only way to integrate with the product at all.
|
||||
@@ -20,7 +20,7 @@ them. Zulip supports several other types of integrations.
|
||||
documentation for the third party software in order to learn how to
|
||||
write the integration.
|
||||
|
||||
* **Interactive bots**. See [Writing bots](/api/writing-bots).
|
||||
1. **Interactive bots**. See [Writing bots](/api/writing-bots).
|
||||
|
||||
A few notes on how to do these:
|
||||
|
||||
@@ -51,9 +51,3 @@ examples of ideal UAs are:
|
||||
|
||||
* The [general advice](/api/incoming-webhooks-overview#general-advice) for
|
||||
webhook integrations applies here as well.
|
||||
|
||||
## Related articles
|
||||
|
||||
* [Running bots](/api/running-bots)
|
||||
* [Deploying bots](/api/deploying-bots)
|
||||
* [Writing bots](/api/writing-bots)
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
Outgoing webhooks allow you to build or set up Zulip integrations
|
||||
which are notified when certain types of messages are sent in
|
||||
Zulip. When one of those events is triggered, we'll send an HTTP POST
|
||||
Zulip. When one of those events is triggered, we'll send a HTTP POST
|
||||
payload to the webhook's configured URL. Webhooks can be used to
|
||||
power a wide range of Zulip integrations. For example, the
|
||||
[Zulip Botserver][zulip-botserver] is built on top of this API.
|
||||
@@ -18,7 +18,7 @@ with porting an existing Slack integration to work with Zulip.
|
||||
To register an outgoing webhook:
|
||||
|
||||
* Log in to the Zulip server.
|
||||
* Navigate to *Personal settings (<i class="zulip-icon zulip-icon-gear"></i>)* -> *Bots* ->
|
||||
* Navigate to *Personal settings (<i class="fa fa-cog"></i>)* -> *Bots* ->
|
||||
*Add a new bot*. Select *Outgoing webhook* for bot type, the URL
|
||||
you'd like Zulip to post to as the **Endpoint URL**, the format you
|
||||
want, and click on *Create bot*. to submit the form/
|
||||
@@ -29,9 +29,9 @@ To register an outgoing webhook:
|
||||
|
||||
There are currently two ways to trigger an outgoing webhook:
|
||||
|
||||
* **@-mention** the bot user in a channel. If the bot replies, its
|
||||
reply will be sent to that channel and topic.
|
||||
* **Send a direct message** with the bot as one of the recipients.
|
||||
1. **@-mention** the bot user in a stream. If the bot replies, its
|
||||
reply will be sent to that stream and topic.
|
||||
2. **Send a direct message** with the bot as one of the recipients.
|
||||
If the bot replies, its reply will be sent to that thread.
|
||||
|
||||
## Timeouts
|
||||
@@ -124,11 +124,11 @@ Here's how we fill in the fields that a Slack-format webhook expects:
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>channel_id</code></td>
|
||||
<td>Channel ID prefixed by "C"</td>
|
||||
<td>Stream ID prefixed by "C"</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>channel_name</code></td>
|
||||
<td>Channel name</td>
|
||||
<td>Stream name</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>thread_ts</code></td>
|
||||
|
||||
@@ -4,7 +4,7 @@ Zulip's real-time events API lets you write software that reacts
|
||||
immediately to events happening in Zulip. This API is what powers the
|
||||
real-time updates in the Zulip web and mobile apps. As a result, the
|
||||
events available via this API cover all changes to data displayed in
|
||||
the Zulip product, from new messages to channel descriptions to
|
||||
the Zulip product, from new messages to stream descriptions to
|
||||
emoji reactions to changes in user or organization-level settings.
|
||||
|
||||
## Using the events API
|
||||
|
||||
@@ -2,48 +2,20 @@
|
||||
|
||||
Zulip's API will always return a JSON format response.
|
||||
The HTTP status code indicates whether the request was successful
|
||||
(200 = success, 4xx = user error, 5xx = server error).
|
||||
(200 = success, 40x = user error, 50x = server error). Every response
|
||||
will contain at least two keys: `msg` (a human-readable error message)
|
||||
and `result`, which will be either `error` or `success` (this is
|
||||
redundant with the HTTP status code, but is convenient when printing
|
||||
responses while debugging).
|
||||
|
||||
Every response, both success and error responses, will contain at least
|
||||
two keys:
|
||||
For some common errors, Zulip provides a `code` attribute. Where
|
||||
present, clients should check `code`, rather than `msg`, when looking
|
||||
for specific error conditions, since the `msg` strings are
|
||||
internationalized (e.g. the server will send the error message
|
||||
translated into French if the user has a French locale).
|
||||
|
||||
- `msg`: an internationalized, human-readable error message string.
|
||||
|
||||
- `result`: either `"error"` or `"success"`, which is redundant with the
|
||||
HTTP status code, but is convenient when print debugging.
|
||||
|
||||
Every error response will also contain an additional key:
|
||||
|
||||
- `code`: a machine-readable error string, with a default value of
|
||||
`"BAD_REQUEST"` for general errors.
|
||||
|
||||
Clients should always check `code`, rather than `msg`, when looking for
|
||||
specific error conditions. The string values for `msg` are
|
||||
internationalized (e.g., the server will send the error message
|
||||
translated into French if the user has a French locale), so checking
|
||||
those strings will result in buggy code.
|
||||
|
||||
!!! tip ""
|
||||
|
||||
If a client needs information that is only present in the string value
|
||||
of `msg` for a particular error response, then the developers
|
||||
implementing the client should [start a conversation here][api-design]
|
||||
in order to discuss getting a specific error `code` and/or relevant
|
||||
additional key/value pairs for that error response.
|
||||
|
||||
In addition to the keys described above, some error responses will
|
||||
contain other keys with further details that are useful for clients. The
|
||||
specific keys present depend on the error `code`, and are documented at
|
||||
the API endpoints where these particular errors appear.
|
||||
|
||||
**Changes**: Before Zulip 5.0 (feature level 76), all error responses
|
||||
did not contain a `code` key, and its absence indicated that no specific
|
||||
error `code` had been allocated for that error.
|
||||
|
||||
## Common error responses
|
||||
|
||||
Documented below are some error responses that are common to many
|
||||
endpoints:
|
||||
Each endpoint documents its own unique errors; documented below are
|
||||
errors common to many endpoints:
|
||||
|
||||
{generate_code_example|/rest-error-handling:post|fixture}
|
||||
|
||||
@@ -53,12 +25,10 @@ In JSON success responses, all Zulip REST API endpoints may return
|
||||
an array of parameters sent in the request that are not supported
|
||||
by that specific endpoint.
|
||||
|
||||
While this can be expected, e.g., when sending both current and legacy
|
||||
While this can be expected, e.g. when sending both current and legacy
|
||||
names for a parameter to a Zulip server of unknown version, this often
|
||||
indicates either a bug in the client implementation or an attempt to
|
||||
configure a new feature while connected to an older Zulip server that
|
||||
does not support said feature.
|
||||
|
||||
{generate_code_example|/settings:patch|fixture}
|
||||
|
||||
[api-design]: https://chat.zulip.org/#narrow/channel/378-api-design
|
||||
|
||||
@@ -6,7 +6,7 @@ you can do in Zulip, you can do with Zulip's REST API. To use this API:
|
||||
* You'll need to [get an API key](/api/api-keys). You will likely
|
||||
want to [create a bot](/help/add-a-bot-or-integration), unless you're
|
||||
using the API to interact with
|
||||
your own account (e.g., exporting your personal message history).
|
||||
your own account (e.g. exporting your personal message history).
|
||||
* Choose what language you'd like to use. You can download the
|
||||
[Python or JavaScript bindings](/api/installation-instructions), projects in
|
||||
[other languages](/api/client-libraries), or
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Roles and permissions
|
||||
|
||||
Zulip offers several levels of permissions based on a
|
||||
[user's role](/help/user-roles) in a Zulip organization.
|
||||
[user's role](/help/roles-and-permissions) in a Zulip organization.
|
||||
|
||||
Here are some important details to note when working with these
|
||||
roles and permissions in Zulip's API:
|
||||
@@ -75,9 +75,9 @@ event](/api/get-events#realm_user-add), and the
|
||||
|
||||
Many areas of Zulip are customizable by the roles
|
||||
above, such as (but not limited to) [restricting message editing and
|
||||
deletion](/help/restrict-message-editing-and-deletion) and various
|
||||
permissions for different [channel types](/help/channel-permissions).
|
||||
The potential permission levels are:
|
||||
deletion](/help/restrict-message-editing-and-deletion) and
|
||||
[streams permissions](/help/stream-permissions). The potential
|
||||
permission levels are:
|
||||
|
||||
* Everyone / Any user including Guests (least restrictive)
|
||||
|
||||
@@ -102,11 +102,6 @@ and owners.
|
||||
Note that specific settings and policies in the Zulip API that use these
|
||||
permission levels will likely support a subset of those listed above.
|
||||
|
||||
## Group-based permissions
|
||||
|
||||
Some settings have been migrated to a more flexible system based on
|
||||
[user groups](/api/group-setting-values).
|
||||
|
||||
## Determining if a user is a full member
|
||||
|
||||
When a Zulip organization has set up a [waiting period before new members
|
||||
|
||||
@@ -12,7 +12,7 @@ https://github.com/zulip/python-zulip-api/tree/main/zulip_bots/zulip_bots/bots).
|
||||
You'll need:
|
||||
|
||||
* An account in a Zulip organization
|
||||
(e.g., [the Zulip development community](https://zulip.com/development-community/),
|
||||
(e.g. [the Zulip development community](https://zulip.com/development-community/),
|
||||
`<yourSubdomain>.zulipchat.com`, or a Zulip organization on your own
|
||||
[development](https://zulip.readthedocs.io/en/latest/development/overview.html) or
|
||||
[production](https://zulip.readthedocs.io/en/latest/production/install.html) server).
|
||||
@@ -20,42 +20,39 @@ You'll need:
|
||||
|
||||
**Note: Please be considerate when testing experimental bots on public servers such as chat.zulip.org.**
|
||||
|
||||
{start_tabs}
|
||||
1. Go to your Zulip account and
|
||||
[add a bot](/help/add-a-bot-or-integration). Use **Generic bot** as the bot type.
|
||||
|
||||
1. [Create a bot](/help/add-a-bot-or-integration), making sure to select
|
||||
**Generic bot** as the **Bot type**.
|
||||
1. Download the bot's `zuliprc` configuration file to your computer.
|
||||
|
||||
1. [Download the bot's `zuliprc` file](/api/configuring-python-bindings#download-a-zuliprc-file).
|
||||
1. Download the `zulip_bots` Python package to your computer using `pip3 install zulip_bots`.
|
||||
|
||||
1. Use the following command to install the
|
||||
[`zulip_bots` Python package](https://pypi.org/project/zulip-bots/):
|
||||
*Note: Click
|
||||
[here](
|
||||
writing-bots#installing-a-development-version-of-the-zulip-bots-package)
|
||||
to install the latest development version of the package.*
|
||||
|
||||
pip3 install zulip_bots
|
||||
1. Start the bot process on your computer.
|
||||
|
||||
1. Use the following command to start the bot process *(replacing
|
||||
`~/path/to/zuliprc` with the path to the `zuliprc` file you downloaded above)*:
|
||||
* Run
|
||||
```
|
||||
zulip-run-bot <bot-name> --config-file ~/path/to/zuliprc
|
||||
```
|
||||
|
||||
zulip-run-bot <bot-name> --config-file ~/path/to/zuliprc
|
||||
(replacing `~/path/to/zuliprc` with the path to the `zuliprc` file you downloaded above).
|
||||
|
||||
1. Check the output of the command above to make sure your bot is running.
|
||||
It should include the following line:
|
||||
* Check the output of the command. It should include the following line:
|
||||
|
||||
INFO:root:starting message handling...
|
||||
INFO:root:starting message handling...
|
||||
|
||||
1. Test your setup by [starting a new direct message](/help/starting-a-new-direct-message)
|
||||
with the bot or [mentioning](/help/mention-a-user-or-group) the bot on a channel.
|
||||
Congrats! Your bot is running.
|
||||
|
||||
!!! tip ""
|
||||
|
||||
To use the latest development version of the `zulip_bots` package, follow
|
||||
[these steps](writing-bots#installing-a-development-version-of-the-zulip-bots-package).
|
||||
|
||||
{end_tabs}
|
||||
1. To talk with the bot, at-mention its name, like `@**bot-name**`.
|
||||
|
||||
You can now play around with the bot and get it configured the way you
|
||||
like. Eventually, you'll probably want to run it in a production
|
||||
environment where it'll stay up, by [deploying](/api/deploying-bots) it on a
|
||||
server using the Zulip Botserver.
|
||||
environment where it'll stay up, by [deploying](/api/deploying-bots) it on a server using the
|
||||
Zulip Botserver.
|
||||
|
||||
## Common problems
|
||||
|
||||
@@ -66,9 +63,3 @@ server using the Zulip Botserver.
|
||||
the Vagrant environment.
|
||||
* Some bots require Python 3. Try switching to a Python 3 environment before running
|
||||
your bot.
|
||||
|
||||
## Related articles
|
||||
|
||||
* [Non-webhook integrations](/api/non-webhook-integrations)
|
||||
* [Deploying bots](/api/deploying-bots)
|
||||
* [Writing bots](/api/writing-bots)
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
{tab|curl}
|
||||
|
||||
``` curl
|
||||
# For channel messages
|
||||
# For stream messages
|
||||
curl -X POST {{ api_url }}/v1/messages \
|
||||
-u BOT_EMAIL_ADDRESS:BOT_API_KEY \
|
||||
--data-urlencode type=stream \
|
||||
@@ -34,7 +34,7 @@ You can use `zulip-send`
|
||||
the command-line, providing the message content via STDIN.
|
||||
|
||||
```bash
|
||||
# For channel messages
|
||||
# For stream messages
|
||||
zulip-send --stream Denmark --subject Castle \
|
||||
--user othello-bot@example.com --api-key a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5
|
||||
|
||||
|
||||
@@ -21,8 +21,6 @@
|
||||
* [HTTP headers](/api/http-headers)
|
||||
* [Error handling](/api/rest-error-handling)
|
||||
* [Roles and permissions](/api/roles-and-permissions)
|
||||
* [Group-setting values](/api/group-setting-values)
|
||||
* [Message formatting](/api/message-formatting)
|
||||
* [Client libraries](/api/client-libraries)
|
||||
* [API changelog](/api/changelog)
|
||||
|
||||
|
||||
@@ -17,41 +17,23 @@ On this page you'll find:
|
||||
|
||||
## Installing a development version of the Zulip bots package
|
||||
|
||||
{start_tabs}
|
||||
1. `git clone https://github.com/zulip/python-zulip-api.git` - clone the [python-zulip-api](
|
||||
https://github.com/zulip/python-zulip-api) repository.
|
||||
|
||||
1. Clone the [python-zulip-api](https://github.com/zulip/python-zulip-api)
|
||||
repository:
|
||||
2. `cd python-zulip-api` - navigate into your cloned repository.
|
||||
|
||||
```
|
||||
git clone https://github.com/zulip/python-zulip-api.git
|
||||
```
|
||||
3. `python3 ./tools/provision` - install all requirements in a Python virtualenv.
|
||||
|
||||
1. Navigate into your cloned repository:
|
||||
4. The output of `provision` will end with a command of the form `source .../activate`;
|
||||
run that command to enter the new virtualenv.
|
||||
|
||||
```
|
||||
cd python-zulip-api
|
||||
```
|
||||
5. *Finished*. You should now see the name of your venv preceding your prompt,
|
||||
e.g. `(zulip-api-py3-venv)`.
|
||||
|
||||
1. Install all requirements in a Python virtualenv:
|
||||
|
||||
```
|
||||
python3 ./tools/provision
|
||||
```
|
||||
|
||||
1. Run the command provided in the final output of the `provision` process to
|
||||
enter the new virtualenv. The command will be of the form `source .../activate`.
|
||||
|
||||
1. You should now see the name of your virtualenv preceding your prompt (e.g.,
|
||||
`(zulip-api-py3-venv)`).
|
||||
|
||||
!!! tip ""
|
||||
|
||||
`provision` installs the `zulip`, `zulip_bots`, and
|
||||
`zulip_botserver` packages in developer mode. This enables you to
|
||||
modify these packages and then run your modified code without
|
||||
having to first re-install the packages or re-provision.
|
||||
|
||||
{end_tabs}
|
||||
*Hint: `provision` installs the `zulip`, `zulip_bots`, and
|
||||
`zulip_botserver` packages in developer mode. This enables you to
|
||||
modify these packages and then run your modified code without
|
||||
having to first re-install the packages or re-provision.*
|
||||
|
||||
## Writing a bot
|
||||
|
||||
@@ -151,7 +133,7 @@ Response: stream: followup topic: foo_sender@zulip.com
|
||||
```
|
||||
|
||||
Note that the `-b` (aka `--bot-config-file`) argument is for an optional third party
|
||||
config file (e.g., ~/giphy.conf), which only applies to certain types of bots.
|
||||
config file (e.g. ~/giphy.conf), which only applies to certain types of bots.
|
||||
|
||||
## Bot API
|
||||
|
||||
@@ -191,7 +173,7 @@ def usage(self):
|
||||
This plugin will allow users to flag messages
|
||||
as being follow-up items. Users should preface
|
||||
messages with "@followup".
|
||||
Before running this, make sure to create a channel
|
||||
Before running this, make sure to create a stream
|
||||
called "followup" that your API user can send to.
|
||||
'''
|
||||
```
|
||||
@@ -208,7 +190,7 @@ handles user message.
|
||||
|
||||
* message - a dictionary describing a Zulip message
|
||||
|
||||
* bot_handler - used to interact with the server, e.g., to send a message
|
||||
* bot_handler - used to interact with the server, e.g. to send a message
|
||||
|
||||
#### Return values
|
||||
|
||||
@@ -247,7 +229,7 @@ about where the message is sent to.
|
||||
```python
|
||||
bot_handler.send_message(dict(
|
||||
type='stream', # can be 'stream' or 'private'
|
||||
to=channel_name, # either the channel name or user's email
|
||||
to=stream_name, # either the stream name or user's email
|
||||
subject=subject, # message subject
|
||||
content=message, # content of the sent message
|
||||
))
|
||||
@@ -290,58 +272,24 @@ bot_handler.update_message(dict(
|
||||
### bot_handler.storage
|
||||
|
||||
A common problem when writing an interactive bot is that you want to
|
||||
be able to store a bit of persistent state for the bot (e.g., for an
|
||||
be able to store a bit of persistent state for the bot (e.g. for an
|
||||
RSVP bot, the RSVPs). For a sufficiently complex bot, you want need
|
||||
your own database, but for simpler bots, we offer a convenient way for
|
||||
bot code to persistently store data.
|
||||
|
||||
The interface for doing this is `bot_handler.storage`.
|
||||
|
||||
The data is stored in the Zulip Server's database. Each bot user has
|
||||
The data is stored in the Zulip Server's database. Each bot user has
|
||||
an independent storage quota available to it.
|
||||
|
||||
#### Performance considerations
|
||||
|
||||
You can use `bot_handler.storage` in one of two ways:
|
||||
|
||||
- **Direct access**: You can use bot_handler.storage directly, which
|
||||
will result in a round-trip to the server for each `get`, `put`, and
|
||||
`contains` call.
|
||||
- **Context manager**: Alternatively, you can use the `use_storage`
|
||||
context manager to minimize the number of round-trips to the server. We
|
||||
recommend writing bots with the context manager such that they
|
||||
automatically fetch data at the start of `handle_message` and submit the
|
||||
state to the server at the end.
|
||||
|
||||
#### Context manager use_storage
|
||||
|
||||
`use_storage(storage: BotStorage, keys: List[str])`
|
||||
|
||||
The context manager fetches the data for the specified keys and stores
|
||||
them in a `CachedStorage` object with a `bot_handler.storage.get` call for
|
||||
each key, at the start. This object will not communicate with the server
|
||||
until manually calling flush or getting some values that are not previously
|
||||
fetched. After the context manager block is exited, it will automatically
|
||||
flush any changes made to the `CachedStorage` object to the server.
|
||||
|
||||
##### Arguments
|
||||
* storage - a BotStorage object, i.e., `bot_handler.storage`
|
||||
* keys - a list of keys to fetch
|
||||
|
||||
##### Example
|
||||
|
||||
```python
|
||||
with use_storage(bot_handler.storage, ["foo", "bar"]) as cache:
|
||||
print(cache.get("foo")) # print the value of "foo"
|
||||
cache.put("foo", "new value") # update the value of "foo"
|
||||
# changes are automatically flushed to the server on exiting the block
|
||||
```
|
||||
|
||||
#### bot_handler.storage methods
|
||||
|
||||
When using the `use_storage` context manager, the `bot_handler.storage`
|
||||
methods on the yielded object will only operate on a cached version of the
|
||||
storage.
|
||||
Since each access to `bot_handler.storage` will involve a round-trip
|
||||
to the server, we recommend writing bots so that they do a single
|
||||
`bot_handler.storage.get` at the start of `handle_message`, and a
|
||||
single `bot_handler.storage.put` at the end to submit the state to the
|
||||
server. We plan to offer a context manager that takes care of this
|
||||
automatically.
|
||||
|
||||
#### bot_handler.storage.put
|
||||
|
||||
@@ -383,10 +331,6 @@ print(bot_handler.storage.get("foo")) # print "bar"
|
||||
|
||||
will check if the entry `key` exists.
|
||||
|
||||
Note that this will only check the cache, so it would return `False` if no
|
||||
previous call to `bot_handler.storage.get()` or `bot_handler.storage.put()`
|
||||
was made for `key`, since the bot was restarted.
|
||||
|
||||
##### Arguments
|
||||
|
||||
* key - a UTF-8 string
|
||||
@@ -408,43 +352,6 @@ by setting the functions `bot_handler.storage.marshal` and
|
||||
`bot_handler.storage.demarshal`. These functions parse your data on
|
||||
every call to `put` and `get`, respectively.
|
||||
|
||||
#### Flushing cached data to the server
|
||||
|
||||
When using the `use_storage` context manager, you can manually flush
|
||||
changes made to the cache to the server, using the below methods.
|
||||
|
||||
#### cache.flush
|
||||
|
||||
`cache.flush()`
|
||||
|
||||
will flush all changes to the cache to the server.
|
||||
|
||||
##### Example
|
||||
```python
|
||||
with use_storage(bot_handler.storage, ["foo", "bar"]) as cache:
|
||||
cache.put("foo", "foo_value") # update the value of "foo"
|
||||
cache.put("bar", "bar_value") # update the value of "bar"
|
||||
cache.flush() # manually flush both the changes to the server
|
||||
```
|
||||
|
||||
#### cache.flush_one
|
||||
|
||||
`cache.flush_one(key)`
|
||||
|
||||
will flush the changes for the specified key to the server.
|
||||
|
||||
##### Arguments
|
||||
|
||||
- key - a UTF-8 string
|
||||
|
||||
##### Example
|
||||
```python
|
||||
with use_storage(bot_handler.storage, ["foo", "bar"]) as cache:
|
||||
cache.put("foo", "baz") # update the value of "foo"
|
||||
cache.put("bar", "bar_value") # update the value of "bar"
|
||||
cache.flush_one("foo") # flush the changes to "foo" to the server
|
||||
```
|
||||
|
||||
### Configuration file
|
||||
|
||||
```
|
||||
@@ -457,7 +364,7 @@ with use_storage(bot_handler.storage, ["foo", "bar"]) as cache:
|
||||
* key - the API key you created for the bot; this is how Zulip knows
|
||||
the request is from an authorized user.
|
||||
|
||||
* email - the email address of the bot, e.g., `some-bot@zulip.com`
|
||||
* email - the email address of the bot, e.g. `some-bot@zulip.com`
|
||||
|
||||
* site - your development environment URL; if you are working on a
|
||||
development environment hosted on your computer, use
|
||||
@@ -601,14 +508,7 @@ The long-term plan for this bot system is to allow the same
|
||||
* Run directly using the Zulip `call_on_each_message` API, which is
|
||||
how the implementation above works. This is great for quick
|
||||
development with minimal setup.
|
||||
* Run in a simple Python web server, processing messages
|
||||
* Run in a simple Python webserver server, processing messages
|
||||
received from Zulip's outgoing webhooks integration.
|
||||
* For bots merged into the mainline Zulip codebase, enabled via a
|
||||
button in the Zulip web UI, with no code deployment effort required.
|
||||
|
||||
## Related articles
|
||||
|
||||
* [Non-webhook integrations](/api/non-webhook-integrations)
|
||||
* [Running bots](/api/running-bots)
|
||||
* [Deploying bots](/api/deploying-bots)
|
||||
* [Configuring the Python bindings](/api/configuring-python-bindings)
|
||||
|
||||
@@ -1,90 +0,0 @@
|
||||
# Generated by Django 5.0.7 on 2024-08-13 19:41
|
||||
|
||||
import django.db.models.deletion
|
||||
import django.utils.timezone
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
replaces = [
|
||||
("confirmation", "0001_initial"),
|
||||
("confirmation", "0002_realmcreationkey"),
|
||||
("confirmation", "0003_emailchangeconfirmation"),
|
||||
("confirmation", "0004_remove_confirmationmanager"),
|
||||
("confirmation", "0005_confirmation_realm"),
|
||||
("confirmation", "0006_realmcreationkey_presume_email_valid"),
|
||||
("confirmation", "0007_add_indexes"),
|
||||
("confirmation", "0008_confirmation_expiry_date"),
|
||||
("confirmation", "0009_confirmation_expiry_date_backfill"),
|
||||
("confirmation", "0010_alter_confirmation_expiry_date"),
|
||||
("confirmation", "0011_alter_confirmation_expiry_date"),
|
||||
("confirmation", "0012_alter_confirmation_id"),
|
||||
("confirmation", "0013_alter_realmcreationkey_id"),
|
||||
("confirmation", "0014_confirmation_confirmatio_content_80155a_idx"),
|
||||
]
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
("contenttypes", "0001_initial"),
|
||||
("zerver", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="RealmCreationKey",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
(
|
||||
"creation_key",
|
||||
models.CharField(db_index=True, max_length=40, verbose_name="activation key"),
|
||||
),
|
||||
(
|
||||
"date_created",
|
||||
models.DateTimeField(default=django.utils.timezone.now, verbose_name="created"),
|
||||
),
|
||||
("presume_email_valid", models.BooleanField(default=False)),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="Confirmation",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
("object_id", models.PositiveIntegerField(db_index=True)),
|
||||
("date_sent", models.DateTimeField(db_index=True)),
|
||||
("confirmation_key", models.CharField(db_index=True, max_length=40)),
|
||||
(
|
||||
"content_type",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="contenttypes.contenttype"
|
||||
),
|
||||
),
|
||||
("type", models.PositiveSmallIntegerField()),
|
||||
(
|
||||
"realm",
|
||||
models.ForeignKey(
|
||||
null=True, on_delete=django.db.models.deletion.CASCADE, to="zerver.realm"
|
||||
),
|
||||
),
|
||||
("expiry_date", models.DateTimeField(db_index=True, null=True)),
|
||||
],
|
||||
options={
|
||||
"unique_together": {("type", "confirmation_key")},
|
||||
"indexes": [
|
||||
models.Index(
|
||||
fields=["content_type", "object_id"], name="confirmatio_content_80155a_idx"
|
||||
)
|
||||
],
|
||||
},
|
||||
),
|
||||
]
|
||||
@@ -5,7 +5,7 @@ from django.db import migrations, models
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("zerver", "0001_initial"),
|
||||
("zerver", "0124_stream_enable_notifications"),
|
||||
("confirmation", "0004_remove_confirmationmanager"),
|
||||
]
|
||||
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("confirmation", "0011_alter_confirmation_expiry_date"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="confirmation",
|
||||
name="id",
|
||||
field=models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,17 +0,0 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("confirmation", "0012_alter_confirmation_id"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="realmcreationkey",
|
||||
name="id",
|
||||
field=models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,20 +0,0 @@
|
||||
from django.contrib.postgres.operations import AddIndexConcurrently
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("confirmation", "0013_alter_realmcreationkey_id"),
|
||||
("contenttypes", "0002_remove_content_type_name"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
AddIndexConcurrently(
|
||||
model_name="confirmation",
|
||||
index=models.Index(
|
||||
fields=["content_type", "object_id"], name="confirmatio_content_80155a_idx"
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,17 +0,0 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("confirmation", "0014_confirmation_confirmatio_content_80155a_idx"),
|
||||
# We want to be linking to tables that are already bigints
|
||||
("zerver", "0531_convert_most_ids_to_bigints"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="confirmation",
|
||||
name="object_id",
|
||||
field=models.PositiveBigIntegerField(db_index=True),
|
||||
),
|
||||
]
|
||||
@@ -1,11 +1,10 @@
|
||||
# Copyright: (c) 2008, Jarek Zgoda <jarek.zgoda@gmail.com>
|
||||
|
||||
__revision__ = "$Id: models.py 28 2009-10-22 15:03:02Z jarek.zgoda $"
|
||||
import datetime
|
||||
import secrets
|
||||
from base64 import b32encode
|
||||
from collections.abc import Mapping
|
||||
from datetime import timedelta
|
||||
from typing import Optional, TypeAlias, Union, cast
|
||||
from typing import List, Mapping, Optional, Union
|
||||
from urllib.parse import urljoin
|
||||
|
||||
from django.conf import settings
|
||||
@@ -17,10 +16,10 @@ from django.http import HttpRequest, HttpResponse
|
||||
from django.template.response import TemplateResponse
|
||||
from django.urls import reverse
|
||||
from django.utils.timezone import now as timezone_now
|
||||
from typing_extensions import override
|
||||
from typing_extensions import TypeAlias, override
|
||||
|
||||
from confirmation import settings as confirmation_settings
|
||||
from zerver.lib.types import UNSET, Unset
|
||||
from zerver.lib.types import UnspecifiedValue
|
||||
from zerver.models import (
|
||||
EmailChangeStatus,
|
||||
MultiuseInvite,
|
||||
@@ -31,12 +30,6 @@ from zerver.models import (
|
||||
UserProfile,
|
||||
)
|
||||
|
||||
if settings.ZILENCER_ENABLED:
|
||||
from zilencer.models import (
|
||||
PreregistrationRemoteRealmBillingUser,
|
||||
PreregistrationRemoteServerBillingUser,
|
||||
)
|
||||
|
||||
|
||||
class ConfirmationKeyError(Exception):
|
||||
WRONG_LENGTH = 1
|
||||
@@ -63,29 +56,18 @@ def generate_key() -> str:
|
||||
return b32encode(secrets.token_bytes(15)).decode().lower()
|
||||
|
||||
|
||||
NoZilencerConfirmationObjT: TypeAlias = (
|
||||
MultiuseInvite
|
||||
| PreregistrationRealm
|
||||
| PreregistrationUser
|
||||
| EmailChangeStatus
|
||||
| UserProfile
|
||||
| RealmReactivationStatus
|
||||
)
|
||||
ZilencerConfirmationObjT: TypeAlias = Union[
|
||||
NoZilencerConfirmationObjT,
|
||||
"PreregistrationRemoteServerBillingUser",
|
||||
"PreregistrationRemoteRealmBillingUser",
|
||||
ConfirmationObjT: TypeAlias = Union[
|
||||
MultiuseInvite,
|
||||
PreregistrationRealm,
|
||||
PreregistrationUser,
|
||||
EmailChangeStatus,
|
||||
UserProfile,
|
||||
RealmReactivationStatus,
|
||||
]
|
||||
|
||||
ConfirmationObjT: TypeAlias = NoZilencerConfirmationObjT | ZilencerConfirmationObjT
|
||||
|
||||
|
||||
def get_object_from_key(
|
||||
confirmation_key: str,
|
||||
confirmation_types: list[int],
|
||||
*,
|
||||
mark_object_used: bool,
|
||||
allow_used: bool = False,
|
||||
confirmation_key: str, confirmation_types: List[int], *, mark_object_used: bool
|
||||
) -> ConfirmationObjT:
|
||||
"""Access a confirmation object from one of the provided confirmation
|
||||
types with the provided key.
|
||||
@@ -94,9 +76,6 @@ def get_object_from_key(
|
||||
confirmation object as used (which generally prevents it from
|
||||
being used again). It should always be False for MultiuseInvite
|
||||
objects, since they are intended to be used multiple times.
|
||||
|
||||
By default, used confirmation objects cannot be used again as part
|
||||
of their security model.
|
||||
"""
|
||||
|
||||
# Confirmation keys used to be 40 characters
|
||||
@@ -115,13 +94,11 @@ def get_object_from_key(
|
||||
obj = confirmation.content_object
|
||||
assert obj is not None
|
||||
|
||||
forbidden_statuses = {confirmation_settings.STATUS_REVOKED}
|
||||
if not allow_used:
|
||||
forbidden_statuses.add(confirmation_settings.STATUS_USED)
|
||||
|
||||
if hasattr(obj, "status") and obj.status in forbidden_statuses:
|
||||
used_value = confirmation_settings.STATUS_USED
|
||||
revoked_value = confirmation_settings.STATUS_REVOKED
|
||||
if hasattr(obj, "status") and obj.status in [used_value, revoked_value]:
|
||||
# Confirmations where the object has the status attribute are one-time use
|
||||
# and are marked after being revoked (or used).
|
||||
# and are marked after being used (or revoked).
|
||||
raise ConfirmationKeyError(ConfirmationKeyError.EXPIRED)
|
||||
|
||||
if mark_object_used:
|
||||
@@ -134,40 +111,38 @@ def get_object_from_key(
|
||||
return obj
|
||||
|
||||
|
||||
def create_confirmation_object(
|
||||
def create_confirmation_link(
|
||||
obj: ConfirmationObjT,
|
||||
confirmation_type: int,
|
||||
*,
|
||||
validity_in_minutes: int | None | Unset = UNSET,
|
||||
no_associated_realm_object: bool = False,
|
||||
) -> "Confirmation":
|
||||
validity_in_minutes: Union[Optional[int], UnspecifiedValue] = UnspecifiedValue(),
|
||||
url_args: Mapping[str, str] = {},
|
||||
realm_creation: bool = False,
|
||||
) -> str:
|
||||
# validity_in_minutes is an override for the default values which are
|
||||
# determined by the confirmation_type - its main purpose is for use
|
||||
# in tests which may want to have control over the exact expiration time.
|
||||
key = generate_key()
|
||||
|
||||
# Some confirmation objects, like those for realm creation or those used
|
||||
# for the self-hosted management flows, are not associated with a realm
|
||||
# hosted by this Zulip server.
|
||||
if no_associated_realm_object:
|
||||
if realm_creation:
|
||||
realm = None
|
||||
else:
|
||||
obj = cast(NoZilencerConfirmationObjT, obj)
|
||||
assert not isinstance(obj, PreregistrationRealm)
|
||||
realm = obj.realm
|
||||
|
||||
current_time = timezone_now()
|
||||
expiry_date = None
|
||||
if not isinstance(validity_in_minutes, Unset):
|
||||
if not isinstance(validity_in_minutes, UnspecifiedValue):
|
||||
if validity_in_minutes is None:
|
||||
expiry_date = None
|
||||
else:
|
||||
assert validity_in_minutes is not None
|
||||
expiry_date = current_time + timedelta(minutes=validity_in_minutes)
|
||||
expiry_date = current_time + datetime.timedelta(minutes=validity_in_minutes)
|
||||
else:
|
||||
expiry_date = current_time + timedelta(days=_properties[confirmation_type].validity_in_days)
|
||||
expiry_date = current_time + datetime.timedelta(
|
||||
days=_properties[confirmation_type].validity_in_days
|
||||
)
|
||||
|
||||
return Confirmation.objects.create(
|
||||
Confirmation.objects.create(
|
||||
content_object=obj,
|
||||
date_sent=current_time,
|
||||
confirmation_key=key,
|
||||
@@ -175,50 +150,26 @@ def create_confirmation_object(
|
||||
expiry_date=expiry_date,
|
||||
type=confirmation_type,
|
||||
)
|
||||
|
||||
|
||||
def create_confirmation_link(
|
||||
obj: ConfirmationObjT,
|
||||
confirmation_type: int,
|
||||
*,
|
||||
validity_in_minutes: int | None | Unset = UNSET,
|
||||
url_args: Mapping[str, str] = {},
|
||||
no_associated_realm_object: bool = False,
|
||||
) -> str:
|
||||
return confirmation_url_for(
|
||||
create_confirmation_object(
|
||||
obj,
|
||||
confirmation_type,
|
||||
validity_in_minutes=validity_in_minutes,
|
||||
no_associated_realm_object=no_associated_realm_object,
|
||||
),
|
||||
url_args=url_args,
|
||||
)
|
||||
|
||||
|
||||
def confirmation_url_for(confirmation_obj: "Confirmation", url_args: Mapping[str, str] = {}) -> str:
|
||||
return confirmation_url(
|
||||
confirmation_obj.confirmation_key, confirmation_obj.realm, confirmation_obj.type, url_args
|
||||
)
|
||||
return confirmation_url(key, realm, confirmation_type, url_args)
|
||||
|
||||
|
||||
def confirmation_url(
|
||||
confirmation_key: str,
|
||||
realm: Realm | None,
|
||||
realm: Optional[Realm],
|
||||
confirmation_type: int,
|
||||
url_args: Mapping[str, str] = {},
|
||||
) -> str:
|
||||
url_args = dict(url_args)
|
||||
url_args["confirmation_key"] = confirmation_key
|
||||
return urljoin(
|
||||
settings.ROOT_DOMAIN_URI if realm is None else realm.url,
|
||||
settings.ROOT_DOMAIN_URI if realm is None else realm.uri,
|
||||
reverse(_properties[confirmation_type].url_name, kwargs=url_args),
|
||||
)
|
||||
|
||||
|
||||
class Confirmation(models.Model):
|
||||
content_type = models.ForeignKey(ContentType, on_delete=CASCADE)
|
||||
object_id = models.PositiveBigIntegerField(db_index=True)
|
||||
object_id = models.PositiveIntegerField(db_index=True)
|
||||
content_object = GenericForeignKey("content_type", "object_id")
|
||||
date_sent = models.DateTimeField(db_index=True)
|
||||
confirmation_key = models.CharField(max_length=40, db_index=True)
|
||||
@@ -234,15 +185,10 @@ class Confirmation(models.Model):
|
||||
MULTIUSE_INVITE = 6
|
||||
REALM_CREATION = 7
|
||||
REALM_REACTIVATION = 8
|
||||
REMOTE_SERVER_BILLING_LEGACY_LOGIN = 9
|
||||
REMOTE_REALM_BILLING_LEGACY_LOGIN = 10
|
||||
type = models.PositiveSmallIntegerField()
|
||||
|
||||
class Meta:
|
||||
unique_together = ("type", "confirmation_key")
|
||||
indexes = [
|
||||
models.Index(fields=["content_type", "object_id"]),
|
||||
]
|
||||
|
||||
@override
|
||||
def __str__(self) -> str:
|
||||
@@ -275,13 +221,6 @@ _properties = {
|
||||
Confirmation.REALM_CREATION: ConfirmationType("get_prereg_key_and_redirect"),
|
||||
Confirmation.REALM_REACTIVATION: ConfirmationType("realm_reactivation"),
|
||||
}
|
||||
if settings.ZILENCER_ENABLED:
|
||||
_properties[Confirmation.REMOTE_SERVER_BILLING_LEGACY_LOGIN] = ConfirmationType(
|
||||
"remote_billing_legacy_server_from_login_confirmation_link"
|
||||
)
|
||||
_properties[Confirmation.REMOTE_REALM_BILLING_LEGACY_LOGIN] = ConfirmationType(
|
||||
"remote_realm_billing_from_login_confirmation_link"
|
||||
)
|
||||
|
||||
|
||||
def one_click_unsubscribe_link(user_profile: UserProfile, email_type: str) -> str:
|
||||
@@ -303,7 +242,7 @@ def one_click_unsubscribe_link(user_profile: UserProfile, email_type: str) -> st
|
||||
# add another Confirmation.type for this; it's this way for historical reasons.
|
||||
|
||||
|
||||
def validate_key(creation_key: str | None) -> Optional["RealmCreationKey"]:
|
||||
def validate_key(creation_key: Optional[str]) -> Optional["RealmCreationKey"]:
|
||||
"""Get the record for this key, raising InvalidCreationKey if non-None but invalid."""
|
||||
if creation_key is None:
|
||||
return None
|
||||
|
||||
@@ -1,417 +0,0 @@
|
||||
from collections import defaultdict
|
||||
from collections.abc import Callable, Sequence
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from decimal import Decimal
|
||||
from typing import Any
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import connection
|
||||
from django.db.backends.utils import CursorWrapper
|
||||
from django.db.models import Prefetch
|
||||
from django.template import loader
|
||||
from django.urls import reverse
|
||||
from django.utils.timezone import now as timezone_now
|
||||
from markupsafe import Markup
|
||||
from psycopg2.sql import Composable
|
||||
|
||||
from corporate.models import CustomerPlan, LicenseLedger
|
||||
from zerver.lib.pysa import mark_sanitized
|
||||
from zerver.lib.url_encoding import append_url_query_string
|
||||
from zerver.models import Realm
|
||||
from zilencer.models import (
|
||||
RemoteCustomerUserCount,
|
||||
RemoteRealm,
|
||||
RemoteRealmAuditLog,
|
||||
RemoteZulipServer,
|
||||
get_remote_customer_user_count,
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class RemoteActivityPlanData:
|
||||
current_status: str
|
||||
current_plan_name: str
|
||||
annual_revenue: int
|
||||
rate: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class ActivityHeaderEntry:
|
||||
name: str
|
||||
value: str | Markup
|
||||
|
||||
|
||||
def make_table(
|
||||
title: str,
|
||||
cols: Sequence[str],
|
||||
rows: Sequence[Any],
|
||||
*,
|
||||
header: list[ActivityHeaderEntry] | None = None,
|
||||
totals: Any | None = None,
|
||||
title_link: Markup | None = None,
|
||||
has_row_class: bool = False,
|
||||
) -> str:
|
||||
if not has_row_class:
|
||||
|
||||
def fix_row(row: Any) -> dict[str, Any]:
|
||||
return dict(cells=row, row_class=None)
|
||||
|
||||
rows = list(map(fix_row, rows))
|
||||
|
||||
data = dict(
|
||||
title=title, cols=cols, rows=rows, header=header, totals=totals, title_link=title_link
|
||||
)
|
||||
|
||||
content = loader.render_to_string(
|
||||
"corporate/activity/activity_table.html",
|
||||
dict(data=data),
|
||||
)
|
||||
|
||||
return content
|
||||
|
||||
|
||||
def fix_rows(
|
||||
rows: list[list[Any]],
|
||||
i: int,
|
||||
fixup_func: Callable[[str], Markup] | Callable[[datetime], str] | Callable[[int], int],
|
||||
) -> None:
|
||||
for row in rows:
|
||||
row[i] = fixup_func(row[i])
|
||||
|
||||
|
||||
def get_query_data(query: Composable) -> list[list[Any]]:
|
||||
cursor = connection.cursor()
|
||||
cursor.execute(query)
|
||||
rows = cursor.fetchall()
|
||||
rows = list(map(list, rows))
|
||||
cursor.close()
|
||||
return rows
|
||||
|
||||
|
||||
def dictfetchall(cursor: CursorWrapper) -> list[dict[str, Any]]:
|
||||
"""Returns all rows from a cursor as a dict"""
|
||||
desc = cursor.description
|
||||
return [dict(zip((col[0] for col in desc), row, strict=False)) for row in cursor.fetchall()]
|
||||
|
||||
|
||||
def format_optional_datetime(date: datetime | None, display_none: bool = False) -> str:
|
||||
if date:
|
||||
return date.strftime("%Y-%m-%d %H:%M")
|
||||
elif display_none:
|
||||
return "None"
|
||||
else:
|
||||
return ""
|
||||
|
||||
|
||||
def format_datetime_as_date(date: datetime) -> str:
|
||||
return date.strftime("%Y-%m-%d")
|
||||
|
||||
|
||||
def format_none_as_zero(value: int | None) -> int:
|
||||
if value:
|
||||
return value
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
def user_activity_link(email: str, user_profile_id: int) -> Markup:
|
||||
from corporate.views.user_activity import get_user_activity
|
||||
|
||||
url = reverse(get_user_activity, kwargs=dict(user_profile_id=user_profile_id))
|
||||
return Markup('<a href="{url}">{email}</a>').format(url=url, email=email)
|
||||
|
||||
|
||||
def realm_activity_link(realm_str: str) -> Markup:
|
||||
from corporate.views.realm_activity import get_realm_activity
|
||||
|
||||
url = reverse(get_realm_activity, kwargs=dict(realm_str=realm_str))
|
||||
return Markup('<a href="{url}"><i class="fa fa-table"></i></a>').format(url=url)
|
||||
|
||||
|
||||
def realm_stats_link(realm_str: str) -> Markup:
|
||||
from analytics.views.stats import stats_for_realm
|
||||
|
||||
url = reverse(stats_for_realm, kwargs=dict(realm_str=realm_str))
|
||||
return Markup('<a href="{url}"><i class="fa fa-pie-chart"></i></a>').format(url=url)
|
||||
|
||||
|
||||
def user_support_link(email: str) -> Markup:
|
||||
support_url = reverse("support")
|
||||
query = urlencode({"q": email})
|
||||
url = append_url_query_string(support_url, query)
|
||||
return Markup('<a href="{url}"><i class="fa fa-gear"></i></a>').format(url=url)
|
||||
|
||||
|
||||
def realm_support_link(realm_str: str) -> Markup:
|
||||
support_url = reverse("support")
|
||||
query = urlencode({"q": realm_str})
|
||||
url = append_url_query_string(support_url, query)
|
||||
return Markup('<a href="{url}">{realm}</i></a>').format(url=url, realm=realm_str)
|
||||
|
||||
|
||||
def realm_url_link(realm_str: str) -> Markup:
|
||||
host = Realm.host_for_subdomain(realm_str)
|
||||
url = settings.EXTERNAL_URI_SCHEME + mark_sanitized(host)
|
||||
return Markup('<a href="{url}"><i class="fa fa-home"></i></a>').format(url=url)
|
||||
|
||||
|
||||
def remote_installation_stats_link(server_id: int) -> Markup:
|
||||
from analytics.views.stats import stats_for_remote_installation
|
||||
|
||||
url = reverse(stats_for_remote_installation, kwargs=dict(remote_server_id=server_id))
|
||||
return Markup('<a href="{url}"><i class="fa fa-pie-chart"></i></a>').format(url=url)
|
||||
|
||||
|
||||
def remote_installation_support_link(hostname: str) -> Markup:
|
||||
support_url = reverse("remote_servers_support")
|
||||
query = urlencode({"q": hostname})
|
||||
url = append_url_query_string(support_url, query)
|
||||
return Markup('<a href="{url}"><i class="fa fa-gear"></i></a>').format(url=url)
|
||||
|
||||
|
||||
def get_plan_rate_percentage(discount: str | None) -> str:
|
||||
# CustomerPlan.discount is a string field that stores the discount.
|
||||
if discount is None or discount == "0":
|
||||
return "100%"
|
||||
|
||||
rate = 100 - Decimal(discount)
|
||||
if rate * 100 % 100 == 0:
|
||||
precision = 0
|
||||
else:
|
||||
precision = 2
|
||||
return f"{rate:.{precision}f}%"
|
||||
|
||||
|
||||
def get_remote_activity_plan_data(
|
||||
plan: CustomerPlan,
|
||||
license_ledger: LicenseLedger,
|
||||
*,
|
||||
remote_realm: RemoteRealm | None = None,
|
||||
remote_server: RemoteZulipServer | None = None,
|
||||
) -> RemoteActivityPlanData:
|
||||
from corporate.lib.stripe import RemoteRealmBillingSession, RemoteServerBillingSession
|
||||
|
||||
if plan.tier == CustomerPlan.TIER_SELF_HOSTED_LEGACY or plan.status in (
|
||||
CustomerPlan.DOWNGRADE_AT_END_OF_FREE_TRIAL,
|
||||
CustomerPlan.DOWNGRADE_AT_END_OF_CYCLE,
|
||||
):
|
||||
renewal_cents = 0
|
||||
current_rate = "---"
|
||||
elif plan.tier == CustomerPlan.TIER_SELF_HOSTED_COMMUNITY:
|
||||
renewal_cents = 0
|
||||
current_rate = "0%"
|
||||
elif remote_realm is not None:
|
||||
renewal_cents = RemoteRealmBillingSession(
|
||||
remote_realm=remote_realm
|
||||
).get_annual_recurring_revenue_for_support_data(plan, license_ledger)
|
||||
current_rate = get_plan_rate_percentage(plan.discount)
|
||||
else:
|
||||
assert remote_server is not None
|
||||
renewal_cents = RemoteServerBillingSession(
|
||||
remote_server=remote_server
|
||||
).get_annual_recurring_revenue_for_support_data(plan, license_ledger)
|
||||
current_rate = get_plan_rate_percentage(plan.discount)
|
||||
|
||||
return RemoteActivityPlanData(
|
||||
current_status=plan.get_plan_status_as_text(),
|
||||
current_plan_name=plan.name,
|
||||
annual_revenue=renewal_cents,
|
||||
rate=current_rate,
|
||||
)
|
||||
|
||||
|
||||
def get_estimated_arr_and_rate_by_realm() -> tuple[dict[str, int], dict[str, str]]: # nocoverage
|
||||
from corporate.lib.stripe import RealmBillingSession
|
||||
|
||||
# NOTE: Customers without a plan might still have a discount attached to them which
|
||||
# are not included in `plan_rate`.
|
||||
annual_revenue = {}
|
||||
plan_rate = {}
|
||||
plans = (
|
||||
CustomerPlan.objects.filter(
|
||||
status=CustomerPlan.ACTIVE,
|
||||
customer__remote_realm__isnull=True,
|
||||
customer__remote_server__isnull=True,
|
||||
)
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"licenseledger_set",
|
||||
queryset=LicenseLedger.objects.order_by("plan", "-id").distinct("plan"),
|
||||
to_attr="latest_ledger_entry",
|
||||
)
|
||||
)
|
||||
.select_related("customer__realm")
|
||||
)
|
||||
|
||||
for plan in plans:
|
||||
assert plan.customer.realm is not None
|
||||
latest_ledger_entry = plan.latest_ledger_entry[0] # type: ignore[attr-defined] # attribute from prefetch_related query
|
||||
assert latest_ledger_entry is not None
|
||||
renewal_cents = RealmBillingSession(
|
||||
realm=plan.customer.realm
|
||||
).get_annual_recurring_revenue_for_support_data(plan, latest_ledger_entry)
|
||||
annual_revenue[plan.customer.realm.string_id] = renewal_cents
|
||||
plan_rate[plan.customer.realm.string_id] = get_plan_rate_percentage(plan.discount)
|
||||
return annual_revenue, plan_rate
|
||||
|
||||
|
||||
def get_plan_data_by_remote_server() -> dict[int, RemoteActivityPlanData]: # nocoverage
|
||||
remote_server_plan_data: dict[int, RemoteActivityPlanData] = {}
|
||||
plans = (
|
||||
CustomerPlan.objects.filter(
|
||||
status__lt=CustomerPlan.LIVE_STATUS_THRESHOLD,
|
||||
customer__realm__isnull=True,
|
||||
customer__remote_realm__isnull=True,
|
||||
customer__remote_server__deactivated=False,
|
||||
)
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"licenseledger_set",
|
||||
queryset=LicenseLedger.objects.order_by("plan", "-id").distinct("plan"),
|
||||
to_attr="latest_ledger_entry",
|
||||
)
|
||||
)
|
||||
.select_related("customer__remote_server")
|
||||
)
|
||||
|
||||
for plan in plans:
|
||||
server_id = None
|
||||
assert plan.customer.remote_server is not None
|
||||
server_id = plan.customer.remote_server.id
|
||||
assert server_id is not None
|
||||
|
||||
latest_ledger_entry = plan.latest_ledger_entry[0] # type: ignore[attr-defined] # attribute from prefetch_related query
|
||||
assert latest_ledger_entry is not None
|
||||
|
||||
plan_data = get_remote_activity_plan_data(
|
||||
plan, latest_ledger_entry, remote_server=plan.customer.remote_server
|
||||
)
|
||||
|
||||
current_data = remote_server_plan_data.get(server_id)
|
||||
if current_data is not None:
|
||||
current_revenue = remote_server_plan_data[server_id].annual_revenue
|
||||
current_plans = remote_server_plan_data[server_id].current_plan_name
|
||||
# There should only ever be one CustomerPlan for a remote server with
|
||||
# a status that is less than the CustomerPlan.LIVE_STATUS_THRESHOLD.
|
||||
remote_server_plan_data[server_id] = RemoteActivityPlanData(
|
||||
current_status="ERROR: MULTIPLE PLANS",
|
||||
current_plan_name=f"{current_plans}, {plan_data.current_plan_name}",
|
||||
annual_revenue=current_revenue + plan_data.annual_revenue,
|
||||
rate="",
|
||||
)
|
||||
else:
|
||||
remote_server_plan_data[server_id] = plan_data
|
||||
return remote_server_plan_data
|
||||
|
||||
|
||||
def get_plan_data_by_remote_realm() -> dict[int, dict[int, RemoteActivityPlanData]]: # nocoverage
|
||||
remote_server_plan_data_by_realm: dict[int, dict[int, RemoteActivityPlanData]] = {}
|
||||
plans = (
|
||||
CustomerPlan.objects.filter(
|
||||
status__lt=CustomerPlan.LIVE_STATUS_THRESHOLD,
|
||||
customer__realm__isnull=True,
|
||||
customer__remote_server__isnull=True,
|
||||
customer__remote_realm__is_system_bot_realm=False,
|
||||
customer__remote_realm__realm_deactivated=False,
|
||||
)
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"licenseledger_set",
|
||||
queryset=LicenseLedger.objects.order_by("plan", "-id").distinct("plan"),
|
||||
to_attr="latest_ledger_entry",
|
||||
)
|
||||
)
|
||||
.select_related("customer__remote_realm")
|
||||
)
|
||||
|
||||
for plan in plans:
|
||||
server_id = None
|
||||
assert plan.customer.remote_realm is not None
|
||||
server_id = plan.customer.remote_realm.server_id
|
||||
assert server_id is not None
|
||||
|
||||
latest_ledger_entry = plan.latest_ledger_entry[0] # type: ignore[attr-defined] # attribute from prefetch_related query
|
||||
assert latest_ledger_entry is not None
|
||||
|
||||
plan_data = get_remote_activity_plan_data(
|
||||
plan, latest_ledger_entry, remote_realm=plan.customer.remote_realm
|
||||
)
|
||||
|
||||
current_server_data = remote_server_plan_data_by_realm.get(server_id)
|
||||
realm_id = plan.customer.remote_realm.id
|
||||
|
||||
if current_server_data is None:
|
||||
realm_dict = {realm_id: plan_data}
|
||||
remote_server_plan_data_by_realm[server_id] = realm_dict
|
||||
else:
|
||||
assert current_server_data is not None
|
||||
current_realm_data = current_server_data.get(realm_id)
|
||||
if current_realm_data is not None:
|
||||
# There should only ever be one CustomerPlan for a remote realm with
|
||||
# a status that is less than the CustomerPlan.LIVE_STATUS_THRESHOLD.
|
||||
current_revenue = current_realm_data.annual_revenue
|
||||
current_plans = current_realm_data.current_plan_name
|
||||
current_server_data[realm_id] = RemoteActivityPlanData(
|
||||
current_status="ERROR: MULTIPLE PLANS",
|
||||
current_plan_name=f"{current_plans}, {plan_data.current_plan_name}",
|
||||
annual_revenue=current_revenue + plan_data.annual_revenue,
|
||||
rate="",
|
||||
)
|
||||
else:
|
||||
current_server_data[realm_id] = plan_data
|
||||
|
||||
return remote_server_plan_data_by_realm
|
||||
|
||||
|
||||
def get_remote_realm_user_counts(
|
||||
event_time: datetime | None = None,
|
||||
) -> dict[int, RemoteCustomerUserCount]: # nocoverage
|
||||
user_counts_by_realm: dict[int, RemoteCustomerUserCount] = {}
|
||||
for log in (
|
||||
RemoteRealmAuditLog.objects.filter(
|
||||
event_type__in=RemoteRealmAuditLog.SYNCED_BILLING_EVENTS,
|
||||
event_time__lte=timezone_now() if event_time is None else event_time,
|
||||
remote_realm__isnull=False,
|
||||
)
|
||||
# Important: extra_data is empty for some pre-2020 audit logs
|
||||
# prior to the introduction of realm_user_count_by_role
|
||||
# logging. Meanwhile, modern Zulip servers using
|
||||
# bulk_create_users to create the users in the system bot
|
||||
# realm also generate such audit logs. Such audit logs should
|
||||
# never be the latest in a normal realm.
|
||||
.exclude(extra_data={})
|
||||
.order_by("remote_realm", "-event_time")
|
||||
.distinct("remote_realm")
|
||||
.select_related("remote_realm")
|
||||
):
|
||||
assert log.remote_realm is not None
|
||||
user_counts_by_realm[log.remote_realm.id] = get_remote_customer_user_count([log])
|
||||
|
||||
return user_counts_by_realm
|
||||
|
||||
|
||||
def get_remote_server_audit_logs(
|
||||
event_time: datetime | None = None,
|
||||
) -> dict[int, list[RemoteRealmAuditLog]]:
|
||||
logs_per_server: dict[int, list[RemoteRealmAuditLog]] = defaultdict(list)
|
||||
for log in (
|
||||
RemoteRealmAuditLog.objects.filter(
|
||||
event_type__in=RemoteRealmAuditLog.SYNCED_BILLING_EVENTS,
|
||||
event_time__lte=timezone_now() if event_time is None else event_time,
|
||||
)
|
||||
# Important: extra_data is empty for some pre-2020 audit logs
|
||||
# prior to the introduction of realm_user_count_by_role
|
||||
# logging. Meanwhile, modern Zulip servers using
|
||||
# bulk_create_users to create the users in the system bot
|
||||
# realm also generate such audit logs. Such audit logs should
|
||||
# never be the latest in a normal realm.
|
||||
.exclude(extra_data={})
|
||||
.order_by("server_id", "realm_id", "-event_time")
|
||||
.distinct("server_id", "realm_id")
|
||||
.select_related("server")
|
||||
):
|
||||
logs_per_server[log.server.id].append(log)
|
||||
|
||||
return logs_per_server
|
||||
@@ -1,5 +0,0 @@
|
||||
from typing import Literal
|
||||
|
||||
BillingModality = Literal["send_invoice", "charge_automatically"]
|
||||
BillingSchedule = Literal["annual", "monthly"]
|
||||
LicenseManagement = Literal["automatic", "manual"]
|
||||
@@ -1,230 +0,0 @@
|
||||
import inspect
|
||||
from collections.abc import Callable
|
||||
from functools import wraps
|
||||
from typing import TYPE_CHECKING, Concatenate
|
||||
from urllib.parse import urlencode, urljoin
|
||||
|
||||
from django.conf import settings
|
||||
from django.http import HttpRequest, HttpResponse, HttpResponseRedirect, JsonResponse
|
||||
from django.shortcuts import render
|
||||
from django.urls import reverse
|
||||
from typing_extensions import ParamSpec
|
||||
|
||||
from corporate.lib.remote_billing_util import (
|
||||
RemoteBillingIdentityExpiredError,
|
||||
get_remote_realm_and_user_from_session,
|
||||
get_remote_server_and_user_from_session,
|
||||
)
|
||||
from zerver.lib.exceptions import RemoteBillingAuthenticationError
|
||||
from zerver.lib.subdomains import get_subdomain
|
||||
from zerver.lib.url_encoding import append_url_query_string
|
||||
from zilencer.models import RemoteRealm
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from corporate.lib.stripe import RemoteRealmBillingSession, RemoteServerBillingSession
|
||||
|
||||
ParamT = ParamSpec("ParamT")
|
||||
|
||||
|
||||
def session_expired_ajax_response(login_url: str) -> JsonResponse: # nocoverage
|
||||
return JsonResponse(
|
||||
{
|
||||
"error_message": "Remote billing authentication expired",
|
||||
"login_url": login_url,
|
||||
},
|
||||
status=401,
|
||||
)
|
||||
|
||||
|
||||
def is_self_hosting_management_subdomain(request: HttpRequest) -> bool:
|
||||
subdomain = get_subdomain(request)
|
||||
return subdomain == settings.SELF_HOSTING_MANAGEMENT_SUBDOMAIN
|
||||
|
||||
|
||||
def self_hosting_management_endpoint(
|
||||
view_func: Callable[Concatenate[HttpRequest, ParamT], HttpResponse],
|
||||
) -> Callable[Concatenate[HttpRequest, ParamT], HttpResponse]:
|
||||
@wraps(view_func)
|
||||
def _wrapped_view_func(
|
||||
request: HttpRequest, /, *args: ParamT.args, **kwargs: ParamT.kwargs
|
||||
) -> HttpResponse:
|
||||
if not is_self_hosting_management_subdomain(request): # nocoverage
|
||||
return render(request, "404.html", status=404)
|
||||
return view_func(request, *args, **kwargs)
|
||||
|
||||
return _wrapped_view_func
|
||||
|
||||
|
||||
def authenticated_remote_realm_management_endpoint(
|
||||
view_func: Callable[
|
||||
Concatenate[HttpRequest, "RemoteRealmBillingSession", ParamT], HttpResponse
|
||||
],
|
||||
) -> Callable[Concatenate[HttpRequest, ParamT], HttpResponse]:
|
||||
@wraps(view_func)
|
||||
def _wrapped_view_func(
|
||||
request: HttpRequest,
|
||||
/,
|
||||
*args: ParamT.args,
|
||||
**kwargs: ParamT.kwargs,
|
||||
) -> HttpResponse:
|
||||
from corporate.lib.stripe import RemoteRealmBillingSession
|
||||
|
||||
if not is_self_hosting_management_subdomain(request): # nocoverage
|
||||
return render(request, "404.html", status=404)
|
||||
|
||||
realm_uuid = kwargs.pop("realm_uuid")
|
||||
if realm_uuid is not None and not isinstance(realm_uuid, str): # nocoverage
|
||||
raise TypeError("realm_uuid must be a string or None")
|
||||
|
||||
try:
|
||||
remote_realm, remote_billing_user = get_remote_realm_and_user_from_session(
|
||||
request, realm_uuid
|
||||
)
|
||||
except RemoteBillingIdentityExpiredError as e:
|
||||
# The user had an authenticated session with an identity_dict,
|
||||
# but it expired.
|
||||
# We want to redirect back to the start of their login flow
|
||||
# at their {realm.host}/self-hosted-billing/ with a proper
|
||||
# next parameter to take them back to what they're trying
|
||||
# to access after re-authing.
|
||||
# Note: Theoretically we could take the realm_uuid from the request
|
||||
# path or params to figure out the remote_realm.host for the redirect,
|
||||
# but that would mean leaking that .host value to anyone who knows
|
||||
# the uuid. Therefore we limit ourselves to taking the realm_uuid
|
||||
# from the identity_dict - since that proves that the user at least
|
||||
# previously was successfully authenticated as a billing admin of that
|
||||
# realm.
|
||||
realm_uuid = e.realm_uuid
|
||||
server_uuid = e.server_uuid
|
||||
uri_scheme = e.uri_scheme
|
||||
if realm_uuid is None:
|
||||
# This doesn't make sense - if get_remote_realm_and_user_from_session
|
||||
# found an expired identity dict, it should have had a realm_uuid.
|
||||
raise AssertionError
|
||||
|
||||
assert server_uuid is not None, "identity_dict with realm_uuid must have server_uuid"
|
||||
assert uri_scheme is not None, "identity_dict with realm_uuid must have uri_scheme"
|
||||
|
||||
try:
|
||||
remote_realm = RemoteRealm.objects.get(uuid=realm_uuid, server__uuid=server_uuid)
|
||||
except RemoteRealm.DoesNotExist:
|
||||
# This should be impossible - unless the RemoteRealm existed and somehow the row
|
||||
# was deleted.
|
||||
raise AssertionError
|
||||
|
||||
# Using EXTERNAL_URI_SCHEME means we'll do https:// in production, which is
|
||||
# the sane default - while having http:// in development, which will allow
|
||||
# these redirects to work there for testing.
|
||||
url = urljoin(uri_scheme + remote_realm.host, "/self-hosted-billing/")
|
||||
|
||||
page_type = get_next_page_param_from_request_path(request)
|
||||
if page_type is not None:
|
||||
query = urlencode({"next_page": page_type})
|
||||
url = append_url_query_string(url, query)
|
||||
|
||||
# Return error for AJAX requests with url.
|
||||
if request.headers.get("x-requested-with") == "XMLHttpRequest": # nocoverage
|
||||
return session_expired_ajax_response(url)
|
||||
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
billing_session = RemoteRealmBillingSession(
|
||||
remote_realm, remote_billing_user=remote_billing_user
|
||||
)
|
||||
return view_func(request, billing_session, *args, **kwargs)
|
||||
|
||||
signature = inspect.signature(view_func)
|
||||
request_parameter, billing_session_parameter, *other_parameters = signature.parameters.values()
|
||||
_wrapped_view_func.__signature__ = signature.replace( # type: ignore[attr-defined] # too magic
|
||||
parameters=[request_parameter, *other_parameters]
|
||||
)
|
||||
_wrapped_view_func.__annotations__ = {
|
||||
k: v for k, v in view_func.__annotations__.items() if k != billing_session_parameter.name
|
||||
}
|
||||
|
||||
return _wrapped_view_func
|
||||
|
||||
|
||||
def get_next_page_param_from_request_path(request: HttpRequest) -> str | None:
|
||||
# Our endpoint URLs in this subsystem end with something like
|
||||
# /sponsorship or /plans etc.
|
||||
# Therefore we can use this nice property to figure out easily what
|
||||
# kind of page the user is trying to access and find the right value
|
||||
# for the `next` query parameter.
|
||||
path = request.path.removesuffix("/")
|
||||
page_type = path.split("/")[-1]
|
||||
|
||||
from corporate.views.remote_billing_page import (
|
||||
VALID_NEXT_PAGES as REMOTE_BILLING_VALID_NEXT_PAGES,
|
||||
)
|
||||
|
||||
if page_type in REMOTE_BILLING_VALID_NEXT_PAGES:
|
||||
return page_type
|
||||
|
||||
# page_type is not where we want user to go after a login, so just render the default page.
|
||||
return None # nocoverage
|
||||
|
||||
|
||||
def authenticated_remote_server_management_endpoint(
|
||||
view_func: Callable[
|
||||
Concatenate[HttpRequest, "RemoteServerBillingSession", ParamT], HttpResponse
|
||||
],
|
||||
) -> Callable[Concatenate[HttpRequest, ParamT], HttpResponse]:
|
||||
@wraps(view_func)
|
||||
def _wrapped_view_func(
|
||||
request: HttpRequest,
|
||||
/,
|
||||
*args: ParamT.args,
|
||||
**kwargs: ParamT.kwargs,
|
||||
) -> HttpResponse:
|
||||
from corporate.lib.stripe import RemoteServerBillingSession
|
||||
|
||||
if not is_self_hosting_management_subdomain(request): # nocoverage
|
||||
return render(request, "404.html", status=404)
|
||||
|
||||
server_uuid = kwargs.pop("server_uuid")
|
||||
if not isinstance(server_uuid, str):
|
||||
raise TypeError("server_uuid must be a string") # nocoverage
|
||||
|
||||
try:
|
||||
remote_server, remote_billing_user = get_remote_server_and_user_from_session(
|
||||
request, server_uuid=server_uuid
|
||||
)
|
||||
if remote_billing_user is None:
|
||||
# This should only be possible if the user hasn't finished the confirmation flow
|
||||
# and doesn't have a fully authenticated session yet. They should not be attempting
|
||||
# to access this endpoint yet.
|
||||
raise RemoteBillingAuthenticationError
|
||||
except (RemoteBillingIdentityExpiredError, RemoteBillingAuthenticationError):
|
||||
# In this flow, we can only redirect to our local "legacy server flow login" page.
|
||||
# That means that we can do it universally whether the user has an expired
|
||||
# identity_dict, or just lacks any form of authentication info at all - there
|
||||
# are no security concerns since this is just a local redirect.
|
||||
url = reverse("remote_billing_legacy_server_login")
|
||||
page_type = get_next_page_param_from_request_path(request)
|
||||
if page_type is not None:
|
||||
query = urlencode({"next_page": page_type})
|
||||
url = append_url_query_string(url, query)
|
||||
|
||||
# Return error for AJAX requests with url.
|
||||
if request.headers.get("x-requested-with") == "XMLHttpRequest": # nocoverage
|
||||
return session_expired_ajax_response(url)
|
||||
|
||||
return HttpResponseRedirect(url)
|
||||
|
||||
assert remote_billing_user is not None
|
||||
billing_session = RemoteServerBillingSession(
|
||||
remote_server, remote_billing_user=remote_billing_user
|
||||
)
|
||||
return view_func(request, billing_session, *args, **kwargs)
|
||||
|
||||
signature = inspect.signature(view_func)
|
||||
request_parameter, billing_session_parameter, *other_parameters = signature.parameters.values()
|
||||
_wrapped_view_func.__signature__ = signature.replace( # type: ignore[attr-defined] # too magic
|
||||
parameters=[request_parameter, *other_parameters]
|
||||
)
|
||||
_wrapped_view_func.__annotations__ = {
|
||||
k: v for k, v in view_func.__annotations__.items() if k != billing_session_parameter.name
|
||||
}
|
||||
|
||||
return _wrapped_view_func
|
||||
@@ -1,24 +1,18 @@
|
||||
from typing import Optional
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
from corporate.lib.stripe import LicenseLimitError, get_latest_seat_count, get_seat_count
|
||||
from corporate.models import CustomerPlan, get_current_plan_by_realm
|
||||
from zerver.actions.create_user import send_group_direct_message_to_admins
|
||||
from zerver.lib.exceptions import InvitationError, JsonableError
|
||||
from zerver.models import Realm, UserProfile
|
||||
from zerver.models.users import get_system_bot
|
||||
from corporate.models import get_current_plan_by_realm
|
||||
from zerver.actions.create_user import send_message_to_signup_notification_stream
|
||||
from zerver.lib.exceptions import InvitationError
|
||||
from zerver.models import Realm, UserProfile, get_system_bot
|
||||
|
||||
|
||||
def get_plan_if_manual_license_management_enforced(realm: Realm) -> CustomerPlan | None:
|
||||
def generate_licenses_low_warning_message_if_required(realm: Realm) -> Optional[str]:
|
||||
plan = get_current_plan_by_realm(realm)
|
||||
if plan is None or plan.automanage_licenses or plan.customer.exempt_from_license_number_check:
|
||||
return None
|
||||
return plan
|
||||
|
||||
|
||||
def generate_licenses_low_warning_message_if_required(realm: Realm) -> str | None:
|
||||
plan = get_plan_if_manual_license_management_enforced(realm)
|
||||
if plan is None:
|
||||
if plan is None or plan.automanage_licenses:
|
||||
return None
|
||||
|
||||
licenses_remaining = plan.licenses() - get_latest_seat_count(realm)
|
||||
@@ -26,7 +20,7 @@ def generate_licenses_low_warning_message_if_required(realm: Realm) -> str | Non
|
||||
return None
|
||||
|
||||
format_kwargs = {
|
||||
"billing_page_link": "/billing/",
|
||||
"billing_page_link": "/billing/#settings",
|
||||
"deactivate_user_help_page_link": "/help/deactivate-or-reactivate-a-user",
|
||||
}
|
||||
|
||||
@@ -56,33 +50,34 @@ def generate_licenses_low_warning_message_if_required(realm: Realm) -> str | Non
|
||||
}[licenses_remaining].format(**format_kwargs)
|
||||
|
||||
|
||||
def send_user_unable_to_signup_group_direct_message_to_admins(
|
||||
def send_user_unable_to_signup_message_to_signup_notification_stream(
|
||||
realm: Realm, user_email: str
|
||||
) -> None:
|
||||
message = _(
|
||||
"A new user ({email}) was unable to join because your organization does not have enough "
|
||||
"Zulip licenses. To allow new users to join, make sure that the [number of licenses for "
|
||||
"the current and next billing period]({billing_page_link}) is greater than the current "
|
||||
"number of users."
|
||||
"A new member ({email}) was unable to join your organization because all Zulip licenses "
|
||||
"are in use. Please [increase the number of licenses]({billing_page_link}) or "
|
||||
"[deactivate inactive users]({deactivate_user_help_page_link}) to allow new members to join."
|
||||
).format(
|
||||
email=user_email,
|
||||
billing_page_link="/billing/",
|
||||
billing_page_link="/billing/#settings",
|
||||
deactivate_user_help_page_link="/help/deactivate-or-reactivate-a-user",
|
||||
)
|
||||
|
||||
send_group_direct_message_to_admins(
|
||||
send_message_to_signup_notification_stream(
|
||||
get_system_bot(settings.NOTIFICATION_BOT, realm.id), realm, message
|
||||
)
|
||||
|
||||
|
||||
def check_spare_licenses_available(
|
||||
realm: Realm, plan: CustomerPlan, extra_non_guests_count: int = 0, extra_guests_count: int = 0
|
||||
def check_spare_licenses_available_for_adding_new_users(
|
||||
realm: Realm, extra_non_guests_count: int = 0, extra_guests_count: int = 0
|
||||
) -> None:
|
||||
seat_count = get_seat_count(
|
||||
plan = get_current_plan_by_realm(realm)
|
||||
if plan is None or plan.automanage_licenses or plan.customer.exempt_from_license_number_check:
|
||||
return
|
||||
|
||||
if plan.licenses() < get_seat_count(
|
||||
realm, extra_non_guests_count=extra_non_guests_count, extra_guests_count=extra_guests_count
|
||||
)
|
||||
current_licenses = plan.licenses()
|
||||
renewal_licenses = plan.licenses_at_next_renewal()
|
||||
if current_licenses < seat_count or (renewal_licenses and renewal_licenses < seat_count):
|
||||
):
|
||||
raise LicenseLimitError
|
||||
|
||||
|
||||
@@ -91,45 +86,29 @@ def check_spare_licenses_available_for_registering_new_user(
|
||||
user_email_to_add: str,
|
||||
role: int,
|
||||
) -> None:
|
||||
plan = get_plan_if_manual_license_management_enforced(realm)
|
||||
if plan is None:
|
||||
return
|
||||
|
||||
try:
|
||||
if role == UserProfile.ROLE_GUEST:
|
||||
check_spare_licenses_available(realm, plan, extra_guests_count=1)
|
||||
check_spare_licenses_available_for_adding_new_users(realm, extra_guests_count=1)
|
||||
else:
|
||||
check_spare_licenses_available(realm, plan, extra_non_guests_count=1)
|
||||
check_spare_licenses_available_for_adding_new_users(realm, extra_non_guests_count=1)
|
||||
except LicenseLimitError:
|
||||
send_user_unable_to_signup_group_direct_message_to_admins(realm, user_email_to_add)
|
||||
send_user_unable_to_signup_message_to_signup_notification_stream(realm, user_email_to_add)
|
||||
raise
|
||||
|
||||
|
||||
def check_spare_licenses_available_for_inviting_new_users(
|
||||
realm: Realm, extra_non_guests_count: int = 0, extra_guests_count: int = 0
|
||||
) -> None:
|
||||
plan = get_plan_if_manual_license_management_enforced(realm)
|
||||
if plan is None:
|
||||
return
|
||||
|
||||
num_invites = extra_non_guests_count + extra_guests_count
|
||||
try:
|
||||
check_spare_licenses_available(realm, plan, extra_non_guests_count, extra_guests_count)
|
||||
except LicenseLimitError:
|
||||
message = _(
|
||||
"Your organization does not have enough Zulip licenses. Invitations were not sent."
|
||||
check_spare_licenses_available_for_adding_new_users(
|
||||
realm, extra_non_guests_count, extra_guests_count
|
||||
)
|
||||
except LicenseLimitError:
|
||||
if num_invites == 1:
|
||||
message = _("All Zulip licenses for this organization are currently in use.")
|
||||
else:
|
||||
message = _(
|
||||
"Your organization does not have enough unused Zulip licenses to invite {num_invites} users."
|
||||
).format(num_invites=num_invites)
|
||||
raise InvitationError(message, [], sent_invitations=False, license_limit_reached=True)
|
||||
|
||||
|
||||
def check_spare_license_available_for_changing_guest_user_role(realm: Realm) -> None:
|
||||
plan = get_plan_if_manual_license_management_enforced(realm)
|
||||
if plan is None:
|
||||
return
|
||||
|
||||
try:
|
||||
check_spare_licenses_available(realm, plan, extra_non_guests_count=1)
|
||||
except LicenseLimitError:
|
||||
error_message = _(
|
||||
"Your organization does not have enough Zulip licenses to change a guest user's role."
|
||||
)
|
||||
raise JsonableError(error_message)
|
||||
|
||||
@@ -1,182 +0,0 @@
|
||||
import logging
|
||||
from typing import Literal, TypedDict, cast
|
||||
|
||||
from django.http import HttpRequest
|
||||
from django.utils.timezone import now as timezone_now
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
from zerver.lib.exceptions import JsonableError, RemoteBillingAuthenticationError
|
||||
from zerver.lib.timestamp import datetime_to_timestamp
|
||||
from zilencer.models import (
|
||||
RemoteRealm,
|
||||
RemoteRealmBillingUser,
|
||||
RemoteServerBillingUser,
|
||||
RemoteZulipServer,
|
||||
)
|
||||
|
||||
billing_logger = logging.getLogger("corporate.stripe")
|
||||
|
||||
# The sessions are relatively short-lived, so that we can avoid issues
|
||||
# with users who have their privileges revoked on the remote server
|
||||
# maintaining access to the billing page for too long.
|
||||
REMOTE_BILLING_SESSION_VALIDITY_SECONDS = 2 * 60 * 60
|
||||
|
||||
|
||||
class RemoteBillingUserDict(TypedDict):
|
||||
user_uuid: str
|
||||
user_email: str
|
||||
user_full_name: str
|
||||
|
||||
|
||||
class RemoteBillingIdentityDict(TypedDict):
|
||||
user: RemoteBillingUserDict
|
||||
remote_server_uuid: str
|
||||
remote_realm_uuid: str
|
||||
|
||||
remote_billing_user_id: int | None
|
||||
authenticated_at: int
|
||||
uri_scheme: Literal["http://", "https://"]
|
||||
|
||||
next_page: str | None
|
||||
|
||||
|
||||
class LegacyServerIdentityDict(TypedDict):
|
||||
# Currently this has only one field. We can extend this
|
||||
# to add more information as appropriate.
|
||||
remote_server_uuid: str
|
||||
|
||||
remote_billing_user_id: int | None
|
||||
authenticated_at: int
|
||||
|
||||
|
||||
class RemoteBillingIdentityExpiredError(Exception):
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
realm_uuid: str | None = None,
|
||||
server_uuid: str | None = None,
|
||||
uri_scheme: Literal["http://", "https://"] | None = None,
|
||||
) -> None:
|
||||
self.realm_uuid = realm_uuid
|
||||
self.server_uuid = server_uuid
|
||||
self.uri_scheme = uri_scheme
|
||||
|
||||
|
||||
def get_identity_dict_from_session(
|
||||
request: HttpRequest,
|
||||
*,
|
||||
realm_uuid: str | None,
|
||||
server_uuid: str | None,
|
||||
) -> RemoteBillingIdentityDict | LegacyServerIdentityDict | None:
|
||||
if not (realm_uuid or server_uuid):
|
||||
return None
|
||||
|
||||
identity_dicts = request.session.get("remote_billing_identities")
|
||||
if identity_dicts is None:
|
||||
return None
|
||||
|
||||
if realm_uuid is not None:
|
||||
result = identity_dicts.get(f"remote_realm:{realm_uuid}")
|
||||
else:
|
||||
assert server_uuid is not None
|
||||
result = identity_dicts.get(f"remote_server:{server_uuid}")
|
||||
|
||||
if result is None:
|
||||
return None
|
||||
if (
|
||||
datetime_to_timestamp(timezone_now()) - result["authenticated_at"]
|
||||
> REMOTE_BILLING_SESSION_VALIDITY_SECONDS
|
||||
):
|
||||
# In this case we raise, because callers want to catch this as an explicitly
|
||||
# different scenario from the user not being authenticated, to handle it nicely
|
||||
# by redirecting them to their login page.
|
||||
raise RemoteBillingIdentityExpiredError(
|
||||
realm_uuid=result.get("remote_realm_uuid"),
|
||||
server_uuid=result.get("remote_server_uuid"),
|
||||
uri_scheme=result.get("uri_scheme"),
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def get_remote_realm_and_user_from_session(
|
||||
request: HttpRequest,
|
||||
realm_uuid: str | None,
|
||||
) -> tuple[RemoteRealm, RemoteRealmBillingUser]:
|
||||
# Cannot use isinstance with TypedDicts, to make mypy know
|
||||
# which of the TypedDicts in the Union this is - so just cast it.
|
||||
identity_dict = cast(
|
||||
RemoteBillingIdentityDict | None,
|
||||
get_identity_dict_from_session(request, realm_uuid=realm_uuid, server_uuid=None),
|
||||
)
|
||||
|
||||
if identity_dict is None:
|
||||
raise RemoteBillingAuthenticationError
|
||||
|
||||
remote_server_uuid = identity_dict["remote_server_uuid"]
|
||||
remote_realm_uuid = identity_dict["remote_realm_uuid"]
|
||||
|
||||
try:
|
||||
remote_realm = RemoteRealm.objects.get(
|
||||
uuid=remote_realm_uuid, server__uuid=remote_server_uuid
|
||||
)
|
||||
except RemoteRealm.DoesNotExist:
|
||||
raise AssertionError(
|
||||
"The remote realm is missing despite being in the RemoteBillingIdentityDict"
|
||||
)
|
||||
|
||||
if (
|
||||
remote_realm.registration_deactivated
|
||||
or remote_realm.realm_deactivated
|
||||
or remote_realm.server.deactivated
|
||||
):
|
||||
raise JsonableError(_("Registration is deactivated"))
|
||||
|
||||
remote_billing_user_id = identity_dict["remote_billing_user_id"]
|
||||
# We only put IdentityDicts with remote_billing_user_id in the session in this flow,
|
||||
# because the RemoteRealmBillingUser already exists when this is inserted into the session
|
||||
# at the end of authentication.
|
||||
assert remote_billing_user_id is not None
|
||||
|
||||
try:
|
||||
remote_billing_user = RemoteRealmBillingUser.objects.get(
|
||||
id=remote_billing_user_id, remote_realm=remote_realm
|
||||
)
|
||||
except RemoteRealmBillingUser.DoesNotExist:
|
||||
raise AssertionError
|
||||
|
||||
return remote_realm, remote_billing_user
|
||||
|
||||
|
||||
def get_remote_server_and_user_from_session(
|
||||
request: HttpRequest,
|
||||
server_uuid: str,
|
||||
) -> tuple[RemoteZulipServer, RemoteServerBillingUser | None]:
|
||||
identity_dict: LegacyServerIdentityDict | None = get_identity_dict_from_session(
|
||||
request, realm_uuid=None, server_uuid=server_uuid
|
||||
)
|
||||
|
||||
if identity_dict is None:
|
||||
raise RemoteBillingAuthenticationError
|
||||
|
||||
remote_server_uuid = identity_dict["remote_server_uuid"]
|
||||
try:
|
||||
remote_server = RemoteZulipServer.objects.get(uuid=remote_server_uuid)
|
||||
except RemoteZulipServer.DoesNotExist:
|
||||
raise JsonableError(_("Invalid remote server."))
|
||||
|
||||
if remote_server.deactivated:
|
||||
raise JsonableError(_("Registration is deactivated"))
|
||||
|
||||
remote_billing_user_id = identity_dict.get("remote_billing_user_id")
|
||||
if remote_billing_user_id is None:
|
||||
return remote_server, None
|
||||
|
||||
try:
|
||||
remote_billing_user = RemoteServerBillingUser.objects.get(
|
||||
id=remote_billing_user_id, remote_server=remote_server
|
||||
)
|
||||
except RemoteServerBillingUser.DoesNotExist:
|
||||
remote_billing_user = None
|
||||
|
||||
return remote_server, remote_billing_user
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,38 +1,28 @@
|
||||
import logging
|
||||
from collections.abc import Callable
|
||||
from typing import Any
|
||||
from contextlib import suppress
|
||||
from typing import Any, Callable, Dict, Union
|
||||
|
||||
import stripe
|
||||
from django.conf import settings
|
||||
|
||||
from corporate.lib.stripe import (
|
||||
BILLING_SUPPORT_EMAIL,
|
||||
BillingError,
|
||||
RealmBillingSession,
|
||||
RemoteRealmBillingSession,
|
||||
RemoteServerBillingSession,
|
||||
get_configured_fixed_price_plan_offer,
|
||||
UpgradeWithExistingPlanError,
|
||||
ensure_realm_does_not_have_active_plan,
|
||||
process_initial_upgrade,
|
||||
update_or_create_stripe_customer,
|
||||
)
|
||||
from corporate.models import (
|
||||
Customer,
|
||||
CustomerPlan,
|
||||
Event,
|
||||
Invoice,
|
||||
Session,
|
||||
get_current_plan_by_customer,
|
||||
)
|
||||
from zerver.lib.send_email import FromAddress, send_email
|
||||
from zerver.models.users import get_active_user_profile_by_id_in_realm
|
||||
from corporate.models import Event, PaymentIntent, Session
|
||||
from zerver.models import get_active_user_profile_by_id_in_realm
|
||||
|
||||
billing_logger = logging.getLogger("corporate.stripe")
|
||||
|
||||
|
||||
def stripe_event_handler_decorator(
|
||||
def error_handler(
|
||||
func: Callable[[Any, Any], None],
|
||||
) -> Callable[[stripe.checkout.Session | stripe.Invoice, Event], None]:
|
||||
) -> Callable[[Union[stripe.checkout.Session, stripe.PaymentIntent], Event], None]:
|
||||
def wrapper(
|
||||
stripe_object: stripe.checkout.Session | stripe.Invoice,
|
||||
event: Event,
|
||||
stripe_object: Union[stripe.checkout.Session, stripe.PaymentIntent], event: Event
|
||||
) -> None:
|
||||
event.status = Event.EVENT_HANDLER_STARTED
|
||||
event.save(update_fields=["status"])
|
||||
@@ -40,7 +30,7 @@ def stripe_event_handler_decorator(
|
||||
try:
|
||||
func(stripe_object, event.content_object)
|
||||
except BillingError as e:
|
||||
message = (
|
||||
billing_logger.warning(
|
||||
"BillingError in %s event handler: %s. stripe_object_id=%s, customer_id=%s metadata=%s",
|
||||
event.type,
|
||||
e.error_description,
|
||||
@@ -48,23 +38,12 @@ def stripe_event_handler_decorator(
|
||||
stripe_object.customer,
|
||||
stripe_object.metadata,
|
||||
)
|
||||
billing_logger.warning(message)
|
||||
event.status = Event.EVENT_HANDLER_FAILED
|
||||
event.handler_error = {
|
||||
"message": e.msg,
|
||||
"description": e.error_description,
|
||||
}
|
||||
event.save(update_fields=["status", "handler_error"])
|
||||
if isinstance(stripe_object, stripe.Invoice):
|
||||
# For Invoice processing errors, send email to billing support.
|
||||
send_email(
|
||||
"zerver/emails/error_processing_invoice",
|
||||
to_emails=[BILLING_SUPPORT_EMAIL],
|
||||
from_address=FromAddress.tokenized_no_reply_address(),
|
||||
context={
|
||||
"message": message,
|
||||
},
|
||||
)
|
||||
except Exception:
|
||||
billing_logger.exception(
|
||||
"Uncaught exception in %s event handler:",
|
||||
@@ -84,129 +63,120 @@ def stripe_event_handler_decorator(
|
||||
return wrapper
|
||||
|
||||
|
||||
def get_billing_session_for_stripe_webhook(
|
||||
customer: Customer, user_id: str | None
|
||||
) -> RealmBillingSession | RemoteRealmBillingSession | RemoteServerBillingSession:
|
||||
if customer.remote_realm is not None:
|
||||
return RemoteRealmBillingSession(customer.remote_realm)
|
||||
elif customer.remote_server is not None:
|
||||
return RemoteServerBillingSession(customer.remote_server)
|
||||
else:
|
||||
assert customer.realm is not None
|
||||
if user_id:
|
||||
user = get_active_user_profile_by_id_in_realm(int(user_id), customer.realm)
|
||||
return RealmBillingSession(user)
|
||||
else:
|
||||
return RealmBillingSession(user=None, realm=customer.realm) # nocoverage
|
||||
|
||||
|
||||
@stripe_event_handler_decorator
|
||||
@error_handler
|
||||
def handle_checkout_session_completed_event(
|
||||
stripe_session: stripe.checkout.Session, session: Session
|
||||
) -> None:
|
||||
session.status = Session.COMPLETED
|
||||
session.save()
|
||||
|
||||
assert isinstance(stripe_session.setup_intent, str)
|
||||
assert stripe_session.metadata is not None
|
||||
stripe_setup_intent = stripe.SetupIntent.retrieve(stripe_session.setup_intent)
|
||||
billing_session = get_billing_session_for_stripe_webhook(
|
||||
session.customer, stripe_session.metadata.get("user_id")
|
||||
)
|
||||
assert session.customer.realm is not None
|
||||
user_id = stripe_session.metadata.get("user_id")
|
||||
assert user_id is not None
|
||||
user = get_active_user_profile_by_id_in_realm(user_id, session.customer.realm)
|
||||
payment_method = stripe_setup_intent.payment_method
|
||||
assert isinstance(payment_method, str | None)
|
||||
|
||||
if session.type in [
|
||||
Session.CARD_UPDATE_FROM_BILLING_PAGE,
|
||||
Session.CARD_UPDATE_FROM_UPGRADE_PAGE,
|
||||
Session.UPGRADE_FROM_BILLING_PAGE,
|
||||
Session.RETRY_UPGRADE_WITH_ANOTHER_PAYMENT_METHOD,
|
||||
]:
|
||||
billing_session.update_or_create_stripe_customer(payment_method)
|
||||
|
||||
|
||||
@stripe_event_handler_decorator
|
||||
def handle_invoice_paid_event(stripe_invoice: stripe.Invoice, invoice: Invoice) -> None:
|
||||
invoice.status = Invoice.PAID
|
||||
invoice.save(update_fields=["status"])
|
||||
|
||||
customer = invoice.customer
|
||||
|
||||
configured_fixed_price_plan = None
|
||||
if customer.required_plan_tier:
|
||||
configured_fixed_price_plan = get_configured_fixed_price_plan_offer(
|
||||
customer, customer.required_plan_tier
|
||||
)
|
||||
|
||||
if (
|
||||
stripe_invoice.collection_method == "send_invoice"
|
||||
and configured_fixed_price_plan
|
||||
and configured_fixed_price_plan.sent_invoice_id == invoice.stripe_invoice_id
|
||||
):
|
||||
billing_session = get_billing_session_for_stripe_webhook(customer, user_id=None)
|
||||
complimentary_access_plan = billing_session.get_complimentary_access_plan(customer)
|
||||
assert customer.required_plan_tier is not None
|
||||
billing_session.process_initial_upgrade(
|
||||
plan_tier=customer.required_plan_tier,
|
||||
# TODO: Currently licenses don't play any role for fixed price plan.
|
||||
# We plan to introduce max_licenses allowed soon.
|
||||
licenses=0,
|
||||
automanage_licenses=True,
|
||||
billing_schedule=CustomerPlan.BILLING_SCHEDULE_ANNUAL,
|
||||
charge_automatically=False,
|
||||
free_trial=False,
|
||||
complimentary_access_plan=complimentary_access_plan,
|
||||
stripe_invoice_paid=True,
|
||||
)
|
||||
else:
|
||||
metadata = stripe_invoice.metadata
|
||||
# Only process upgrade required if metadata has the required keys.
|
||||
# This is a safeguard to avoid processing custom invoices.
|
||||
if (
|
||||
metadata is None
|
||||
or metadata.get("billing_schedule") is None
|
||||
or metadata.get("plan_tier") is None
|
||||
): # nocoverage
|
||||
return
|
||||
|
||||
billing_session = get_billing_session_for_stripe_webhook(customer, metadata.get("user_id"))
|
||||
complimentary_access_plan = billing_session.get_complimentary_access_plan(customer)
|
||||
billing_schedule = int(metadata["billing_schedule"])
|
||||
plan_tier = int(metadata["plan_tier"])
|
||||
charge_automatically = stripe_invoice.collection_method != "send_invoice"
|
||||
if configured_fixed_price_plan and customer.required_plan_tier == plan_tier:
|
||||
assert customer.required_plan_tier is not None
|
||||
billing_session.process_initial_upgrade(
|
||||
plan_tier=customer.required_plan_tier,
|
||||
# TODO: Currently licenses don't play any role for fixed price plan.
|
||||
# We plan to introduce max_licenses allowed soon.
|
||||
licenses=0,
|
||||
automanage_licenses=True,
|
||||
billing_schedule=billing_schedule,
|
||||
charge_automatically=charge_automatically,
|
||||
free_trial=False,
|
||||
complimentary_access_plan=complimentary_access_plan,
|
||||
stripe_invoice_paid=True,
|
||||
ensure_realm_does_not_have_active_plan(user.realm)
|
||||
update_or_create_stripe_customer(user, payment_method)
|
||||
assert session.payment_intent is not None
|
||||
session.payment_intent.status = PaymentIntent.PROCESSING
|
||||
session.payment_intent.last_payment_error = ()
|
||||
session.payment_intent.save(update_fields=["status", "last_payment_error"])
|
||||
with suppress(stripe.error.CardError):
|
||||
stripe.PaymentIntent.confirm(
|
||||
session.payment_intent.stripe_payment_intent_id,
|
||||
payment_method=payment_method,
|
||||
off_session=True,
|
||||
)
|
||||
return
|
||||
elif metadata.get("on_free_trial") and invoice.is_created_for_free_trial_upgrade:
|
||||
free_trial_plan = invoice.plan
|
||||
assert free_trial_plan is not None
|
||||
if free_trial_plan.is_free_trial():
|
||||
# We don't need to do anything here. When the free trial ends we will
|
||||
# check if user has paid the invoice, if not we downgrade the user.
|
||||
return
|
||||
|
||||
# If customer paid after end of free trial, we just upgrade via default method below.
|
||||
assert free_trial_plan.status == CustomerPlan.ENDED
|
||||
# Also check if customer is not on any other active plan.
|
||||
assert get_current_plan_by_customer(customer) is None
|
||||
|
||||
billing_session.process_initial_upgrade(
|
||||
plan_tier,
|
||||
int(metadata["licenses"]),
|
||||
metadata["license_management"] == "automatic",
|
||||
billing_schedule=billing_schedule,
|
||||
charge_automatically=charge_automatically,
|
||||
free_trial=False,
|
||||
complimentary_access_plan=complimentary_access_plan,
|
||||
stripe_invoice_paid=True,
|
||||
elif session.type in [
|
||||
Session.FREE_TRIAL_UPGRADE_FROM_BILLING_PAGE,
|
||||
Session.FREE_TRIAL_UPGRADE_FROM_ONBOARDING_PAGE,
|
||||
]:
|
||||
ensure_realm_does_not_have_active_plan(user.realm)
|
||||
update_or_create_stripe_customer(user, payment_method)
|
||||
process_initial_upgrade(
|
||||
user,
|
||||
int(stripe_setup_intent.metadata["licenses"]),
|
||||
stripe_setup_intent.metadata["license_management"] == "automatic",
|
||||
int(stripe_setup_intent.metadata["billing_schedule"]),
|
||||
charge_automatically=True,
|
||||
free_trial=True,
|
||||
)
|
||||
elif session.type in [Session.CARD_UPDATE_FROM_BILLING_PAGE]:
|
||||
update_or_create_stripe_customer(user, payment_method)
|
||||
|
||||
|
||||
@error_handler
|
||||
def handle_payment_intent_succeeded_event(
|
||||
stripe_payment_intent: stripe.PaymentIntent, payment_intent: PaymentIntent
|
||||
) -> None:
|
||||
payment_intent.status = PaymentIntent.SUCCEEDED
|
||||
payment_intent.save()
|
||||
metadata: Dict[str, Any] = stripe_payment_intent.metadata
|
||||
assert payment_intent.customer.realm is not None
|
||||
user_id = metadata.get("user_id")
|
||||
assert user_id is not None
|
||||
user = get_active_user_profile_by_id_in_realm(user_id, payment_intent.customer.realm)
|
||||
|
||||
description = ""
|
||||
for charge in stripe_payment_intent.charges:
|
||||
description = f"Payment (Card ending in {charge.payment_method_details.card.last4})"
|
||||
break
|
||||
|
||||
stripe.InvoiceItem.create(
|
||||
amount=stripe_payment_intent.amount * -1,
|
||||
currency="usd",
|
||||
customer=stripe_payment_intent.customer,
|
||||
description=description,
|
||||
discountable=False,
|
||||
)
|
||||
try:
|
||||
ensure_realm_does_not_have_active_plan(user.realm)
|
||||
except UpgradeWithExistingPlanError as e:
|
||||
stripe_invoice = stripe.Invoice.create(
|
||||
auto_advance=True,
|
||||
collection_method="charge_automatically",
|
||||
customer=stripe_payment_intent.customer,
|
||||
days_until_due=None,
|
||||
statement_descriptor="Zulip Cloud Standard Credit",
|
||||
)
|
||||
stripe.Invoice.finalize_invoice(stripe_invoice)
|
||||
raise e
|
||||
|
||||
process_initial_upgrade(
|
||||
user,
|
||||
int(metadata["licenses"]),
|
||||
metadata["license_management"] == "automatic",
|
||||
int(metadata["billing_schedule"]),
|
||||
True,
|
||||
False,
|
||||
)
|
||||
|
||||
|
||||
@error_handler
|
||||
def handle_payment_intent_payment_failed_event(
|
||||
stripe_payment_intent: stripe.PaymentIntent, payment_intent: PaymentIntent
|
||||
) -> None:
|
||||
payment_intent.status = PaymentIntent.get_status_integer_from_status_text(
|
||||
stripe_payment_intent.status
|
||||
)
|
||||
assert payment_intent.customer.realm is not None
|
||||
billing_logger.info(
|
||||
"Stripe payment intent failed: %s %s %s %s",
|
||||
payment_intent.customer.realm.string_id,
|
||||
stripe_payment_intent.last_payment_error.get("type"),
|
||||
stripe_payment_intent.last_payment_error.get("code"),
|
||||
stripe_payment_intent.last_payment_error.get("param"),
|
||||
)
|
||||
payment_intent.last_payment_error = {
|
||||
"description": stripe_payment_intent.last_payment_error.get("type"),
|
||||
}
|
||||
payment_intent.last_payment_error["message"] = stripe_payment_intent.last_payment_error.get(
|
||||
"message"
|
||||
)
|
||||
payment_intent.save(update_fields=["status", "last_payment_error"])
|
||||
|
||||
@@ -1,484 +1,15 @@
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Optional, TypedDict, Union
|
||||
from urllib.parse import urlencode, urljoin, urlunsplit
|
||||
|
||||
from django.db.models import Sum
|
||||
from django.utils.timezone import now as timezone_now
|
||||
from django.conf import settings
|
||||
from django.urls import reverse
|
||||
|
||||
from corporate.lib.stripe import (
|
||||
BillingSession,
|
||||
RealmBillingSession,
|
||||
RemoteRealmBillingSession,
|
||||
RemoteServerBillingSession,
|
||||
get_configured_fixed_price_plan_offer,
|
||||
get_guest_user_count,
|
||||
get_non_guest_user_count,
|
||||
get_price_per_license,
|
||||
get_push_status_for_remote_request,
|
||||
start_of_next_billing_cycle,
|
||||
)
|
||||
from corporate.models import (
|
||||
Customer,
|
||||
CustomerPlan,
|
||||
CustomerPlanOffer,
|
||||
LicenseLedger,
|
||||
ZulipSponsorshipRequest,
|
||||
get_current_plan_by_customer,
|
||||
)
|
||||
from zerver.lib.timestamp import timestamp_to_datetime
|
||||
from zerver.models import Realm
|
||||
from zerver.models.realm_audit_logs import AuditLogEventType
|
||||
from zerver.models.realms import get_org_type_display_name
|
||||
from zilencer.lib.remote_counts import MissingDataError
|
||||
from zilencer.models import (
|
||||
RemoteCustomerUserCount,
|
||||
RemoteInstallationCount,
|
||||
RemotePushDeviceToken,
|
||||
RemoteRealm,
|
||||
RemoteRealmCount,
|
||||
RemoteZulipServer,
|
||||
RemoteZulipServerAuditLog,
|
||||
get_remote_realm_guest_and_non_guest_count,
|
||||
get_remote_server_guest_and_non_guest_count,
|
||||
has_stale_audit_log,
|
||||
)
|
||||
|
||||
USER_DATA_STALE_WARNING = "Recent audit log missing: No data for used licenses."
|
||||
from zerver.models import Realm, get_realm
|
||||
|
||||
|
||||
class SponsorshipRequestDict(TypedDict):
|
||||
org_type: str
|
||||
org_website: str
|
||||
org_description: str
|
||||
plan_to_use_zulip: str
|
||||
total_users: str
|
||||
paid_users: str
|
||||
paid_users_description: str
|
||||
requested_plan: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class SponsorshipData:
|
||||
sponsorship_pending: bool = False
|
||||
has_discount: bool = False
|
||||
monthly_discounted_price: int | None = None
|
||||
annual_discounted_price: int | None = None
|
||||
original_monthly_plan_price: int | None = None
|
||||
original_annual_plan_price: int | None = None
|
||||
minimum_licenses: int | None = None
|
||||
required_plan_tier: int | None = None
|
||||
latest_sponsorship_request: SponsorshipRequestDict | None = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class NextPlanData:
|
||||
plan: Union["CustomerPlan", "CustomerPlanOffer", None] = None
|
||||
estimated_revenue: int | None = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class PlanData:
|
||||
customer: Optional["Customer"] = None
|
||||
current_plan: Optional["CustomerPlan"] = None
|
||||
next_plan: Union["CustomerPlan", "CustomerPlanOffer", None] = None
|
||||
licenses: int | None = None
|
||||
licenses_used: int | None = None
|
||||
next_billing_cycle_start: datetime | None = None
|
||||
is_complimentary_access_plan: bool = False
|
||||
has_fixed_price: bool = False
|
||||
is_current_plan_billable: bool = False
|
||||
stripe_customer_url: str | None = None
|
||||
warning: str = ""
|
||||
annual_recurring_revenue: int | None = None
|
||||
estimated_next_plan_revenue: int | None = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class PushNotificationsStatus:
|
||||
can_push: bool
|
||||
expected_end: datetime | None
|
||||
message: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class MobilePushData:
|
||||
total_mobile_users: int
|
||||
push_notification_status: PushNotificationsStatus
|
||||
uncategorized_mobile_users: int | None = None
|
||||
mobile_pushes_forwarded: int | None = None
|
||||
last_mobile_push_sent: str = ""
|
||||
|
||||
|
||||
@dataclass
|
||||
class RemoteSupportData:
|
||||
date_created: datetime
|
||||
has_stale_audit_log: bool
|
||||
plan_data: PlanData
|
||||
sponsorship_data: SponsorshipData
|
||||
user_data: RemoteCustomerUserCount
|
||||
mobile_push_data: MobilePushData
|
||||
|
||||
|
||||
@dataclass
|
||||
class UserData:
|
||||
guest_user_count: int
|
||||
non_guest_user_count: int
|
||||
|
||||
|
||||
@dataclass
|
||||
class CloudSupportData:
|
||||
plan_data: PlanData
|
||||
sponsorship_data: SponsorshipData
|
||||
user_data: UserData
|
||||
|
||||
|
||||
def get_stripe_customer_url(stripe_id: str) -> str:
|
||||
return f"https://dashboard.stripe.com/customers/{stripe_id}" # nocoverage
|
||||
|
||||
|
||||
def get_realm_user_data(realm: Realm) -> UserData:
|
||||
non_guests = get_non_guest_user_count(realm)
|
||||
guests = get_guest_user_count(realm)
|
||||
return UserData(
|
||||
guest_user_count=guests,
|
||||
non_guest_user_count=non_guests,
|
||||
)
|
||||
|
||||
|
||||
def get_customer_sponsorship_data(customer: Customer) -> SponsorshipData:
|
||||
pending = customer.sponsorship_pending
|
||||
licenses = customer.minimum_licenses
|
||||
plan_tier = customer.required_plan_tier
|
||||
has_discount = False
|
||||
sponsorship_request = None
|
||||
monthly_discounted_price = None
|
||||
annual_discounted_price = None
|
||||
original_monthly_plan_price = None
|
||||
original_annual_plan_price = None
|
||||
if customer.monthly_discounted_price:
|
||||
has_discount = True
|
||||
monthly_discounted_price = customer.monthly_discounted_price
|
||||
if customer.annual_discounted_price:
|
||||
has_discount = True
|
||||
annual_discounted_price = customer.annual_discounted_price
|
||||
if plan_tier is not None:
|
||||
original_monthly_plan_price = get_price_per_license(
|
||||
plan_tier, CustomerPlan.BILLING_SCHEDULE_MONTHLY
|
||||
)
|
||||
original_annual_plan_price = get_price_per_license(
|
||||
plan_tier, CustomerPlan.BILLING_SCHEDULE_ANNUAL
|
||||
)
|
||||
if pending:
|
||||
last_sponsorship_request = (
|
||||
ZulipSponsorshipRequest.objects.filter(customer=customer).order_by("id").last()
|
||||
)
|
||||
if last_sponsorship_request is not None:
|
||||
org_type_name = get_org_type_display_name(last_sponsorship_request.org_type)
|
||||
if (
|
||||
last_sponsorship_request.org_website is None
|
||||
or last_sponsorship_request.org_website == ""
|
||||
):
|
||||
website = "No website submitted"
|
||||
else:
|
||||
website = last_sponsorship_request.org_website
|
||||
sponsorship_request = SponsorshipRequestDict(
|
||||
org_type=org_type_name,
|
||||
org_website=website,
|
||||
org_description=last_sponsorship_request.org_description,
|
||||
total_users=last_sponsorship_request.expected_total_users,
|
||||
plan_to_use_zulip=last_sponsorship_request.plan_to_use_zulip,
|
||||
paid_users=last_sponsorship_request.paid_users_count,
|
||||
paid_users_description=last_sponsorship_request.paid_users_description,
|
||||
requested_plan=last_sponsorship_request.requested_plan,
|
||||
)
|
||||
|
||||
return SponsorshipData(
|
||||
sponsorship_pending=pending,
|
||||
has_discount=has_discount,
|
||||
monthly_discounted_price=monthly_discounted_price,
|
||||
annual_discounted_price=annual_discounted_price,
|
||||
original_monthly_plan_price=original_monthly_plan_price,
|
||||
original_annual_plan_price=original_annual_plan_price,
|
||||
minimum_licenses=licenses,
|
||||
required_plan_tier=plan_tier,
|
||||
latest_sponsorship_request=sponsorship_request,
|
||||
)
|
||||
|
||||
|
||||
def get_annual_invoice_count(billing_schedule: int) -> int:
|
||||
if billing_schedule == CustomerPlan.BILLING_SCHEDULE_MONTHLY:
|
||||
return 12
|
||||
else: # nocoverage
|
||||
return 1
|
||||
|
||||
|
||||
def get_next_plan_data(
|
||||
billing_session: BillingSession,
|
||||
customer: Customer,
|
||||
current_plan: CustomerPlan | None = None,
|
||||
) -> NextPlanData:
|
||||
plan_offer: CustomerPlanOffer | None = None
|
||||
|
||||
# A customer can have a CustomerPlanOffer with or without a current plan.
|
||||
if customer.required_plan_tier:
|
||||
plan_offer = get_configured_fixed_price_plan_offer(customer, customer.required_plan_tier)
|
||||
|
||||
if plan_offer is not None:
|
||||
next_plan_data = NextPlanData(plan=plan_offer)
|
||||
elif current_plan is not None:
|
||||
next_plan_data = NextPlanData(plan=billing_session.get_next_plan(current_plan))
|
||||
else:
|
||||
next_plan_data = NextPlanData()
|
||||
|
||||
if next_plan_data.plan is not None:
|
||||
if next_plan_data.plan.fixed_price is not None:
|
||||
next_plan_data.estimated_revenue = next_plan_data.plan.fixed_price
|
||||
return next_plan_data
|
||||
|
||||
if current_plan is not None:
|
||||
licenses_at_next_renewal = current_plan.licenses_at_next_renewal()
|
||||
if licenses_at_next_renewal is not None:
|
||||
assert type(next_plan_data.plan) is CustomerPlan
|
||||
assert next_plan_data.plan.price_per_license is not None
|
||||
invoice_count = get_annual_invoice_count(next_plan_data.plan.billing_schedule)
|
||||
next_plan_data.estimated_revenue = (
|
||||
next_plan_data.plan.price_per_license * licenses_at_next_renewal * invoice_count
|
||||
)
|
||||
else:
|
||||
next_plan_data.estimated_revenue = 0 # nocoverage
|
||||
return next_plan_data
|
||||
|
||||
return next_plan_data
|
||||
|
||||
|
||||
def get_plan_data_for_support_view(
|
||||
billing_session: BillingSession, user_count: int | None = None, stale_user_data: bool = False
|
||||
) -> PlanData:
|
||||
customer = billing_session.get_customer()
|
||||
plan = None
|
||||
if customer is not None:
|
||||
plan = get_current_plan_by_customer(customer)
|
||||
plan_data = PlanData(
|
||||
customer=customer,
|
||||
current_plan=plan,
|
||||
)
|
||||
|
||||
if plan_data.current_plan is not None:
|
||||
last_ledger_entry = (
|
||||
LicenseLedger.objects.filter(
|
||||
plan=plan_data.current_plan, event_time__lte=timezone_now()
|
||||
)
|
||||
.order_by("-id")
|
||||
.first()
|
||||
)
|
||||
|
||||
if last_ledger_entry is None: # nocoverage
|
||||
# This shouldn't be possible because at least one
|
||||
# license ledger entry should exist when a plan's
|
||||
# status is less than CustomerPlan.LIVE_STATUS_THRESHOLD.
|
||||
# But since we have a warning feature in the support
|
||||
# view for plan data, we use that instead of raising
|
||||
# an assertion error so that support staff can debug
|
||||
# if this does ever occur.
|
||||
plan_data.warning += "License ledger missing: No data for total licenses and revenue. "
|
||||
plan_data.licenses = None
|
||||
plan_data.annual_recurring_revenue = 0
|
||||
else:
|
||||
plan_data.licenses = last_ledger_entry.licenses
|
||||
plan_data.annual_recurring_revenue = (
|
||||
billing_session.get_annual_recurring_revenue_for_support_data(
|
||||
plan_data.current_plan, last_ledger_entry
|
||||
)
|
||||
)
|
||||
|
||||
# If we already have user count data, we use that
|
||||
# instead of querying the database again to get
|
||||
# the number of currently used licenses.
|
||||
if stale_user_data:
|
||||
plan_data.warning += USER_DATA_STALE_WARNING
|
||||
plan_data.licenses_used = None
|
||||
elif user_count is None:
|
||||
try:
|
||||
plan_data.licenses_used = billing_session.current_count_for_billed_licenses()
|
||||
except MissingDataError: # nocoverage
|
||||
plan_data.warning += USER_DATA_STALE_WARNING
|
||||
plan_data.licenses_used = None
|
||||
else: # nocoverage
|
||||
assert user_count is not None
|
||||
plan_data.licenses_used = user_count
|
||||
|
||||
if plan_data.current_plan.status in (
|
||||
CustomerPlan.FREE_TRIAL,
|
||||
CustomerPlan.DOWNGRADE_AT_END_OF_FREE_TRIAL,
|
||||
): # nocoverage
|
||||
assert plan_data.current_plan.next_invoice_date is not None
|
||||
plan_data.next_billing_cycle_start = plan_data.current_plan.next_invoice_date
|
||||
else:
|
||||
plan_data.next_billing_cycle_start = start_of_next_billing_cycle(
|
||||
plan_data.current_plan, timezone_now()
|
||||
)
|
||||
|
||||
if isinstance(billing_session, RealmBillingSession):
|
||||
# TODO implement a complimentary access plan/tier for Zulip Cloud.
|
||||
plan_data.is_complimentary_access_plan = False
|
||||
else:
|
||||
plan_data.is_complimentary_access_plan = (
|
||||
plan_data.current_plan.tier == CustomerPlan.TIER_SELF_HOSTED_LEGACY
|
||||
)
|
||||
plan_data.has_fixed_price = plan_data.current_plan.fixed_price is not None
|
||||
plan_data.is_current_plan_billable = billing_session.check_plan_tier_is_billable(
|
||||
plan_tier=plan_data.current_plan.tier
|
||||
)
|
||||
|
||||
# Check for a non-active/scheduled CustomerPlan or CustomerPlanOffer
|
||||
if customer is not None:
|
||||
next_plan_data = get_next_plan_data(billing_session, customer, plan_data.current_plan)
|
||||
plan_data.next_plan = next_plan_data.plan
|
||||
plan_data.estimated_next_plan_revenue = next_plan_data.estimated_revenue
|
||||
|
||||
# If customer has a stripe ID, add link to stripe customer dashboard
|
||||
if customer is not None and customer.stripe_customer_id is not None:
|
||||
plan_data.stripe_customer_url = get_stripe_customer_url(
|
||||
customer.stripe_customer_id
|
||||
) # nocoverage
|
||||
|
||||
return plan_data
|
||||
|
||||
|
||||
def get_mobile_push_data(remote_entity: RemoteZulipServer | RemoteRealm) -> MobilePushData:
|
||||
if isinstance(remote_entity, RemoteZulipServer):
|
||||
total_users = (
|
||||
RemotePushDeviceToken.objects.filter(server=remote_entity)
|
||||
.distinct("user_id", "user_uuid")
|
||||
.count()
|
||||
)
|
||||
uncategorized_users = (
|
||||
RemotePushDeviceToken.objects.filter(server=remote_entity, remote_realm__isnull=True)
|
||||
.distinct("user_id", "user_uuid")
|
||||
.count()
|
||||
)
|
||||
mobile_pushes = RemoteInstallationCount.objects.filter(
|
||||
server=remote_entity,
|
||||
property="mobile_pushes_forwarded::day",
|
||||
subgroup=None,
|
||||
end_time__gte=timezone_now() - timedelta(days=7),
|
||||
).aggregate(total_forwarded=Sum("value", default=0))
|
||||
latest_remote_server_push_forwarded_count = RemoteInstallationCount.objects.filter(
|
||||
server=remote_entity,
|
||||
subgroup=None,
|
||||
property="mobile_pushes_forwarded::day",
|
||||
).last()
|
||||
if latest_remote_server_push_forwarded_count is not None: # nocoverage
|
||||
# mobile_pushes_forwarded is a CountStat with a day frequency,
|
||||
# so we want to show the start of the latest day interval.
|
||||
push_forwarded_interval_start = (
|
||||
latest_remote_server_push_forwarded_count.end_time - timedelta(days=1)
|
||||
).strftime("%Y-%m-%d")
|
||||
else:
|
||||
push_forwarded_interval_start = "None"
|
||||
push_status = get_push_status_for_remote_request(
|
||||
remote_server=remote_entity, remote_realm=None
|
||||
)
|
||||
push_notification_status = PushNotificationsStatus(
|
||||
can_push=push_status.can_push,
|
||||
expected_end=timestamp_to_datetime(push_status.expected_end_timestamp)
|
||||
if push_status.expected_end_timestamp
|
||||
else None,
|
||||
message=push_status.message,
|
||||
)
|
||||
return MobilePushData(
|
||||
total_mobile_users=total_users,
|
||||
push_notification_status=push_notification_status,
|
||||
uncategorized_mobile_users=uncategorized_users,
|
||||
mobile_pushes_forwarded=mobile_pushes["total_forwarded"],
|
||||
last_mobile_push_sent=push_forwarded_interval_start,
|
||||
)
|
||||
else:
|
||||
assert isinstance(remote_entity, RemoteRealm)
|
||||
mobile_users = (
|
||||
RemotePushDeviceToken.objects.filter(remote_realm=remote_entity)
|
||||
.distinct("user_id", "user_uuid")
|
||||
.count()
|
||||
)
|
||||
mobile_pushes = RemoteRealmCount.objects.filter(
|
||||
remote_realm=remote_entity,
|
||||
property="mobile_pushes_forwarded::day",
|
||||
subgroup=None,
|
||||
end_time__gte=timezone_now() - timedelta(days=7),
|
||||
).aggregate(total_forwarded=Sum("value", default=0))
|
||||
latest_remote_realm_push_forwarded_count = RemoteRealmCount.objects.filter(
|
||||
remote_realm=remote_entity,
|
||||
subgroup=None,
|
||||
property="mobile_pushes_forwarded::day",
|
||||
).last()
|
||||
if latest_remote_realm_push_forwarded_count is not None: # nocoverage
|
||||
# mobile_pushes_forwarded is a CountStat with a day frequency,
|
||||
# so we want to show the start of the latest day interval.
|
||||
push_forwarded_interval_start = (
|
||||
latest_remote_realm_push_forwarded_count.end_time - timedelta(days=1)
|
||||
).strftime("%Y-%m-%d")
|
||||
else:
|
||||
push_forwarded_interval_start = "None"
|
||||
push_status = get_push_status_for_remote_request(remote_entity.server, remote_entity)
|
||||
push_notification_status = PushNotificationsStatus(
|
||||
can_push=push_status.can_push,
|
||||
expected_end=timestamp_to_datetime(push_status.expected_end_timestamp)
|
||||
if push_status.expected_end_timestamp
|
||||
else None,
|
||||
message=push_status.message,
|
||||
)
|
||||
return MobilePushData(
|
||||
total_mobile_users=mobile_users,
|
||||
push_notification_status=push_notification_status,
|
||||
uncategorized_mobile_users=None,
|
||||
mobile_pushes_forwarded=mobile_pushes["total_forwarded"],
|
||||
last_mobile_push_sent=push_forwarded_interval_start,
|
||||
)
|
||||
|
||||
|
||||
def get_data_for_remote_support_view(billing_session: BillingSession) -> RemoteSupportData:
|
||||
if isinstance(billing_session, RemoteServerBillingSession):
|
||||
user_data = get_remote_server_guest_and_non_guest_count(billing_session.remote_server.id)
|
||||
stale_audit_log_data = has_stale_audit_log(billing_session.remote_server)
|
||||
date_created = RemoteZulipServerAuditLog.objects.get(
|
||||
event_type=AuditLogEventType.REMOTE_SERVER_CREATED,
|
||||
server__id=billing_session.remote_server.id,
|
||||
).event_time
|
||||
mobile_data = get_mobile_push_data(billing_session.remote_server)
|
||||
else:
|
||||
assert isinstance(billing_session, RemoteRealmBillingSession)
|
||||
user_data = get_remote_realm_guest_and_non_guest_count(billing_session.remote_realm)
|
||||
stale_audit_log_data = has_stale_audit_log(billing_session.remote_realm.server)
|
||||
date_created = billing_session.remote_realm.realm_date_created
|
||||
mobile_data = get_mobile_push_data(billing_session.remote_realm)
|
||||
user_count = user_data.guest_user_count + user_data.non_guest_user_count
|
||||
plan_data = get_plan_data_for_support_view(billing_session, user_count, stale_audit_log_data)
|
||||
if plan_data.customer is not None:
|
||||
sponsorship_data = get_customer_sponsorship_data(plan_data.customer)
|
||||
else:
|
||||
sponsorship_data = SponsorshipData()
|
||||
|
||||
return RemoteSupportData(
|
||||
date_created=date_created,
|
||||
has_stale_audit_log=stale_audit_log_data,
|
||||
plan_data=plan_data,
|
||||
sponsorship_data=sponsorship_data,
|
||||
user_data=user_data,
|
||||
mobile_push_data=mobile_data,
|
||||
)
|
||||
|
||||
|
||||
def get_data_for_cloud_support_view(billing_session: BillingSession) -> CloudSupportData:
|
||||
assert isinstance(billing_session, RealmBillingSession)
|
||||
user_data = get_realm_user_data(billing_session.realm)
|
||||
plan_data = get_plan_data_for_support_view(billing_session)
|
||||
if plan_data.customer is not None:
|
||||
sponsorship_data = get_customer_sponsorship_data(plan_data.customer)
|
||||
else:
|
||||
sponsorship_data = SponsorshipData()
|
||||
|
||||
return CloudSupportData(
|
||||
plan_data=plan_data,
|
||||
sponsorship_data=sponsorship_data,
|
||||
user_data=user_data,
|
||||
def get_support_url(realm: Realm) -> str:
|
||||
support_realm_uri = get_realm(settings.STAFF_SUBDOMAIN).uri
|
||||
support_url = urljoin(
|
||||
support_realm_uri,
|
||||
urlunsplit(("", "", reverse("support"), urlencode({"q": realm.string_id}), "")),
|
||||
)
|
||||
return support_url
|
||||
|
||||
@@ -1,378 +0,0 @@
|
||||
# Generated by Django 5.0.7 on 2024-08-13 20:29
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
replaces = [
|
||||
("corporate", "0001_initial"),
|
||||
("corporate", "0002_customer_default_discount"),
|
||||
("corporate", "0003_customerplan"),
|
||||
("corporate", "0004_licenseledger"),
|
||||
("corporate", "0005_customerplan_invoicing"),
|
||||
("corporate", "0006_nullable_stripe_customer_id"),
|
||||
("corporate", "0007_remove_deprecated_fields"),
|
||||
("corporate", "0008_nullable_next_invoice_date"),
|
||||
("corporate", "0009_customer_sponsorship_pending"),
|
||||
("corporate", "0010_customerplan_exempt_from_from_license_number_check"),
|
||||
("corporate", "0011_move_exempt_from_from_license_number_check_to_customer_model"),
|
||||
("corporate", "0012_zulipsponsorshiprequest"),
|
||||
("corporate", "0013_alter_zulipsponsorshiprequest_org_website"),
|
||||
("corporate", "0014_customerplan_end_date"),
|
||||
("corporate", "0015_event_paymentintent_session"),
|
||||
("corporate", "0016_customer_add_remote_server_field"),
|
||||
(
|
||||
"corporate",
|
||||
"0017_rename_exempt_from_from_license_number_check_customer_exempt_from_license_number_check",
|
||||
),
|
||||
("corporate", "0018_customer_cloud_xor_self_hosted"),
|
||||
("corporate", "0019_zulipsponsorshiprequest_expected_total_users_and_more"),
|
||||
("corporate", "0020_add_remote_realm_customers"),
|
||||
("corporate", "0021_remove_session_payment_intent"),
|
||||
("corporate", "0022_session_is_manual_license_management_upgrade_session"),
|
||||
("corporate", "0023_zulipsponsorshiprequest_customer"),
|
||||
("corporate", "0024_zulipsponsorshiprequest_fill_customer_data"),
|
||||
("corporate", "0025_alter_zulipsponsorshiprequest_customer"),
|
||||
("corporate", "0026_remove_zulipsponsorshiprequest_realm"),
|
||||
("corporate", "0027_alter_zulipsponsorshiprequest_requested_by"),
|
||||
("corporate", "0028_zulipsponsorshiprequest_requested_plan"),
|
||||
("corporate", "0029_session_tier"),
|
||||
("corporate", "0030_alter_zulipsponsorshiprequest_requested_plan"),
|
||||
("corporate", "0031_customer_flat_discount_and_more"),
|
||||
("corporate", "0032_customer_minimum_licenses"),
|
||||
("corporate", "0033_customerplan_invoice_overdue_email_sent"),
|
||||
("corporate", "0034_customer_discount_required_tier"),
|
||||
("corporate", "0035_update_legacy_plan_next_invoice_date"),
|
||||
("corporate", "0036_fix_customer_plans_scheduled_after_legacy_plan"),
|
||||
("corporate", "0037_customerplanoffer"),
|
||||
("corporate", "0038_customerplanoffer_sent_invoice_id_invoice"),
|
||||
("corporate", "0039_backfill_end_date_for_fixed_price_plans"),
|
||||
("corporate", "0040_customerplan_reminder_to_review_plan_email_sent"),
|
||||
("corporate", "0041_fix_plans_on_free_trial_with_changes_in_schedule"),
|
||||
("corporate", "0042_invoice_is_created_for_free_trial_upgrade_and_more"),
|
||||
("corporate", "0043_remove_customer_default_discount_and_more"),
|
||||
("corporate", "0044_convert_ids_to_bigints"),
|
||||
]
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
("contenttypes", "0002_remove_content_type_name"),
|
||||
("zerver", "0001_initial"),
|
||||
# Requires RemoteRealm model for foreign keys.
|
||||
("zilencer", "0035_remoterealmcount_remote_realm_and_more"),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="Customer",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
("stripe_customer_id", models.CharField(max_length=255, null=True, unique=True)),
|
||||
(
|
||||
"realm",
|
||||
models.OneToOneField(
|
||||
null=True, on_delete=django.db.models.deletion.CASCADE, to="zerver.realm"
|
||||
),
|
||||
),
|
||||
("sponsorship_pending", models.BooleanField(default=False)),
|
||||
("exempt_from_license_number_check", models.BooleanField(default=False)),
|
||||
(
|
||||
"remote_server",
|
||||
models.OneToOneField(
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="zilencer.remotezulipserver",
|
||||
),
|
||||
),
|
||||
(
|
||||
"remote_realm",
|
||||
models.OneToOneField(
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="zilencer.remoterealm",
|
||||
),
|
||||
),
|
||||
("flat_discount", models.IntegerField(default=2000)),
|
||||
("flat_discounted_months", models.IntegerField(default=0)),
|
||||
("minimum_licenses", models.PositiveIntegerField(null=True)),
|
||||
("required_plan_tier", models.SmallIntegerField(null=True)),
|
||||
("annual_discounted_price", models.IntegerField(default=0)),
|
||||
("monthly_discounted_price", models.IntegerField(default=0)),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="CustomerPlan",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
("automanage_licenses", models.BooleanField(default=False)),
|
||||
("charge_automatically", models.BooleanField(default=False)),
|
||||
("price_per_license", models.IntegerField(null=True)),
|
||||
("fixed_price", models.IntegerField(null=True)),
|
||||
("discount", models.TextField(null=True)),
|
||||
("billing_cycle_anchor", models.DateTimeField()),
|
||||
("billing_schedule", models.SmallIntegerField()),
|
||||
("next_invoice_date", models.DateTimeField(db_index=True, null=True)),
|
||||
("tier", models.SmallIntegerField()),
|
||||
("status", models.SmallIntegerField(default=1)),
|
||||
(
|
||||
"customer",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="corporate.customer"
|
||||
),
|
||||
),
|
||||
("invoicing_status", models.SmallIntegerField(default=1)),
|
||||
("end_date", models.DateTimeField(null=True)),
|
||||
("invoice_overdue_email_sent", models.BooleanField(default=False)),
|
||||
("reminder_to_review_plan_email_sent", models.BooleanField(default=False)),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="CustomerPlanOffer",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
("fixed_price", models.IntegerField(null=True)),
|
||||
("tier", models.SmallIntegerField()),
|
||||
("status", models.SmallIntegerField()),
|
||||
(
|
||||
"customer",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="corporate.customer"
|
||||
),
|
||||
),
|
||||
("sent_invoice_id", models.CharField(max_length=255, null=True)),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="Event",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
("stripe_event_id", models.CharField(max_length=255)),
|
||||
("type", models.CharField(max_length=255)),
|
||||
("status", models.SmallIntegerField(default=1)),
|
||||
("object_id", models.PositiveIntegerField(db_index=True)),
|
||||
("handler_error", models.JSONField(default=None, null=True)),
|
||||
(
|
||||
"content_type",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="contenttypes.contenttype"
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="Invoice",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
("stripe_invoice_id", models.CharField(max_length=255, unique=True)),
|
||||
("status", models.SmallIntegerField()),
|
||||
(
|
||||
"customer",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="corporate.customer"
|
||||
),
|
||||
),
|
||||
("is_created_for_free_trial_upgrade", models.BooleanField(default=False)),
|
||||
(
|
||||
"plan",
|
||||
models.ForeignKey(
|
||||
default=None,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
to="corporate.customerplan",
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="LicenseLedger",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
("is_renewal", models.BooleanField(default=False)),
|
||||
("event_time", models.DateTimeField()),
|
||||
("licenses", models.IntegerField()),
|
||||
("licenses_at_next_renewal", models.IntegerField(null=True)),
|
||||
(
|
||||
"plan",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="corporate.customerplan"
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="PaymentIntent",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
("stripe_payment_intent_id", models.CharField(max_length=255, unique=True)),
|
||||
("status", models.SmallIntegerField()),
|
||||
("last_payment_error", models.JSONField(default=None, null=True)),
|
||||
(
|
||||
"customer",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="corporate.customer"
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="Session",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
("stripe_session_id", models.CharField(max_length=255, unique=True)),
|
||||
("type", models.SmallIntegerField()),
|
||||
("status", models.SmallIntegerField(default=1)),
|
||||
(
|
||||
"customer",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="corporate.customer"
|
||||
),
|
||||
),
|
||||
(
|
||||
"is_manual_license_management_upgrade_session",
|
||||
models.BooleanField(default=False),
|
||||
),
|
||||
("tier", models.SmallIntegerField(null=True)),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="ZulipSponsorshipRequest",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
(
|
||||
"org_type",
|
||||
models.PositiveSmallIntegerField(
|
||||
choices=[
|
||||
(0, "Unspecified"),
|
||||
(10, "Business"),
|
||||
(20, "Open-source project"),
|
||||
(30, "Education (non-profit)"),
|
||||
(35, "Education (for-profit)"),
|
||||
(40, "Research"),
|
||||
(50, "Event or conference"),
|
||||
(60, "Non-profit (registered)"),
|
||||
(70, "Government"),
|
||||
(80, "Political group"),
|
||||
(90, "Community"),
|
||||
(100, "Personal"),
|
||||
(1000, "Other"),
|
||||
],
|
||||
default=0,
|
||||
),
|
||||
),
|
||||
("org_website", models.URLField(blank=True, null=True)),
|
||||
("org_description", models.TextField(default="")),
|
||||
(
|
||||
"requested_by",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
("expected_total_users", models.TextField(default="")),
|
||||
("paid_users_count", models.TextField(default="")),
|
||||
("paid_users_description", models.TextField(default="")),
|
||||
(
|
||||
"customer",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="corporate.customer"
|
||||
),
|
||||
),
|
||||
(
|
||||
"requested_plan",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("", "UNSPECIFIED"),
|
||||
("Community", "COMMUNITY"),
|
||||
("Basic", "BASIC"),
|
||||
("Business", "BUSINESS"),
|
||||
],
|
||||
default="",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
# Operation cannot be squashed, because it's a reverse foreign
|
||||
# key (`LicenseLedger` has a `CustomerPlan` column), and
|
||||
# squashing it into the CreateModel would require creating a
|
||||
# dependency loop.
|
||||
migrations.AddField(
|
||||
model_name="customerplan",
|
||||
name="invoiced_through",
|
||||
field=models.ForeignKey(
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="+",
|
||||
to="corporate.LicenseLedger",
|
||||
),
|
||||
),
|
||||
# Django's squashmigrations tooling seems unable to squash
|
||||
# this operation into CreateModel. Likely it's possible to do
|
||||
# so manually, but it's just a single operation.
|
||||
migrations.AddConstraint(
|
||||
model_name="customer",
|
||||
constraint=models.CheckConstraint(
|
||||
condition=models.Q(
|
||||
("realm__isnull", False),
|
||||
("remote_server__isnull", False),
|
||||
("remote_realm__isnull", False),
|
||||
_connector="OR",
|
||||
),
|
||||
name="has_associated_model_object",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -15,7 +15,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AddConstraint(
|
||||
model_name="customer",
|
||||
constraint=models.CheckConstraint(
|
||||
condition=models.Q(
|
||||
check=models.Q(
|
||||
("realm__isnull", False), ("remote_server__isnull", False), _connector="XOR"
|
||||
),
|
||||
name="cloud_xor_self_hosted",
|
||||
|
||||
@@ -1,27 +0,0 @@
|
||||
# Generated by Django 4.2.6 on 2023-11-11 14:16
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("corporate", "0018_customer_cloud_xor_self_hosted"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="zulipsponsorshiprequest",
|
||||
name="expected_total_users",
|
||||
field=models.TextField(default=""),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="zulipsponsorshiprequest",
|
||||
name="paid_users_count",
|
||||
field=models.TextField(default=""),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="zulipsponsorshiprequest",
|
||||
name="paid_users_description",
|
||||
field=models.TextField(default=""),
|
||||
),
|
||||
]
|
||||
@@ -1,37 +0,0 @@
|
||||
# Generated by Django 4.2.7 on 2023-11-17 20:11
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("zilencer", "0035_remoterealmcount_remote_realm_and_more"),
|
||||
("corporate", "0019_zulipsponsorshiprequest_expected_total_users_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveConstraint(
|
||||
model_name="customer",
|
||||
name="cloud_xor_self_hosted",
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="customer",
|
||||
name="remote_realm",
|
||||
field=models.OneToOneField(
|
||||
null=True, on_delete=django.db.models.deletion.CASCADE, to="zilencer.remoterealm"
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="customer",
|
||||
constraint=models.CheckConstraint(
|
||||
condition=models.Q(
|
||||
("realm__isnull", False),
|
||||
("remote_server__isnull", False),
|
||||
("remote_realm__isnull", False),
|
||||
_connector="OR",
|
||||
),
|
||||
name="has_associated_model_object",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,16 +0,0 @@
|
||||
# Generated by Django 4.2.7 on 2023-11-18 14:54
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("corporate", "0020_add_remote_realm_customers"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name="session",
|
||||
name="payment_intent",
|
||||
),
|
||||
]
|
||||
@@ -1,17 +0,0 @@
|
||||
# Generated by Django 4.2.7 on 2023-11-21 11:20
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("corporate", "0021_remove_session_payment_intent"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="session",
|
||||
name="is_manual_license_management_upgrade_session",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
||||
@@ -1,20 +0,0 @@
|
||||
# Generated by Django 4.2.7 on 2023-11-26 16:00
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("corporate", "0022_session_is_manual_license_management_upgrade_session"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="zulipsponsorshiprequest",
|
||||
name="customer",
|
||||
field=models.ForeignKey(
|
||||
null=True, on_delete=django.db.models.deletion.CASCADE, to="corporate.customer"
|
||||
),
|
||||
),
|
||||
]
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user