mirror of
https://github.com/zulip/zulip.git
synced 2025-10-26 09:34:02 +00:00
Compare commits
178 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5e78618309 | ||
|
|
305c13faeb | ||
|
|
c6a5903280 | ||
|
|
82adae451e | ||
|
|
3f0919cc65 | ||
|
|
e61ffc5bd7 | ||
|
|
a303c27a16 | ||
|
|
6b3399d7e6 | ||
|
|
ad692da6aa | ||
|
|
5ebfb6aae5 | ||
|
|
cc1244afa3 | ||
|
|
b6c8acbf14 | ||
|
|
82155e15a5 | ||
|
|
c9e00e6391 | ||
|
|
51d0886f60 | ||
|
|
c74f3c247c | ||
|
|
86a37e6956 | ||
|
|
5cef03280a | ||
|
|
bb3cc8eae8 | ||
|
|
2e7738470f | ||
|
|
b5d75b9dba | ||
|
|
f604124622 | ||
|
|
0e613f724f | ||
|
|
2aa3695d41 | ||
|
|
214df3ea1f | ||
|
|
a4134e183f | ||
|
|
3c7fdf8a82 | ||
|
|
b031537fe9 | ||
|
|
9d3fb85897 | ||
|
|
b5e64dd1ef | ||
|
|
b1156e6d67 | ||
|
|
d918a09db8 | ||
|
|
70aed5e26c | ||
|
|
30ef55ca6c | ||
|
|
09bd546210 | ||
|
|
8619f858f6 | ||
|
|
97f49cc555 | ||
|
|
096e7af06d | ||
|
|
e6f52eb2a0 | ||
|
|
51ff34083e | ||
|
|
41038c3510 | ||
|
|
25c87d9823 | ||
|
|
14e60fd203 | ||
|
|
236508f61e | ||
|
|
4bbcfd0499 | ||
|
|
80bf880d6f | ||
|
|
6a3488d7ed | ||
|
|
7039f1d182 | ||
|
|
4fa62a25e2 | ||
|
|
09678193c9 | ||
|
|
28a8655a9d | ||
|
|
cf86e7b3d8 | ||
|
|
472e216cec | ||
|
|
345939dc64 | ||
|
|
029b72c496 | ||
|
|
602984f73e | ||
|
|
fcf4ede700 | ||
|
|
318da92b59 | ||
|
|
5de2969275 | ||
|
|
44bee53f30 | ||
|
|
1593ab6082 | ||
|
|
3bc1ad05f7 | ||
|
|
e124464fea | ||
|
|
9362158e04 | ||
|
|
0ccc706f7a | ||
|
|
b4a0684201 | ||
|
|
ad9187d9f7 | ||
|
|
edda368670 | ||
|
|
f7f750e7a8 | ||
|
|
ce8d8f3846 | ||
|
|
d632e2c6bf | ||
|
|
ac5e31ce04 | ||
|
|
5f474e8425 | ||
|
|
33d43b695e | ||
|
|
acf90db8b6 | ||
|
|
40968fda49 | ||
|
|
4b3f68382c | ||
|
|
b20797ed9c | ||
|
|
e637ff626d | ||
|
|
ca4cf94e79 | ||
|
|
789e960672 | ||
|
|
572138d983 | ||
|
|
df8ac69d90 | ||
|
|
9a9c6730ff | ||
|
|
5ff82c82ae | ||
|
|
00b3da0a0c | ||
|
|
9ded5be2a7 | ||
|
|
0d0aaf3c92 | ||
|
|
26907e1c2e | ||
|
|
953f3c8c1d | ||
|
|
abf82392a3 | ||
|
|
fb9cdf0f56 | ||
|
|
df80303a64 | ||
|
|
d7fb2292eb | ||
|
|
827d1d9d3b | ||
|
|
64b563e1dc | ||
|
|
92fdfffa4d | ||
|
|
1767a0bcb1 | ||
|
|
6736b35f5f | ||
|
|
c34110f88c | ||
|
|
fc4102d779 | ||
|
|
4d0ddf483d | ||
|
|
9c927e40d6 | ||
|
|
4d21bad033 | ||
|
|
472428621a | ||
|
|
37b40df30c | ||
|
|
87c58f8e23 | ||
|
|
eb5832f7a4 | ||
|
|
a9b6d9990a | ||
|
|
f4fe1660f3 | ||
|
|
3bb3a415a8 | ||
|
|
cca19fedf0 | ||
|
|
c59eb24674 | ||
|
|
c530f1b582 | ||
|
|
3b48bcca95 | ||
|
|
50ca78447e | ||
|
|
b4d9cd4e0f | ||
|
|
7c5e017c14 | ||
|
|
7ba639960d | ||
|
|
76641a5f21 | ||
|
|
b54240d6cf | ||
|
|
508c676f61 | ||
|
|
b60ba10351 | ||
|
|
b8567d8d8f | ||
|
|
025219da16 | ||
|
|
5bcb52390c | ||
|
|
90cbf900d4 | ||
|
|
ddf76baf89 | ||
|
|
be7169bed0 | ||
|
|
bdc67055b1 | ||
|
|
f5b96c8551 | ||
|
|
2e48056a9c | ||
|
|
e0f9f58411 | ||
|
|
f29b1d3192 | ||
|
|
b1e8ead908 | ||
|
|
5cb7acec36 | ||
|
|
0cb261ac6b | ||
|
|
b58a5b3bf3 | ||
|
|
76dc8bc9f7 | ||
|
|
bf5f006971 | ||
|
|
500bd04e11 | ||
|
|
d35bdd312f | ||
|
|
51d9bbca1e | ||
|
|
f3c9a5019b | ||
|
|
36fa5e0385 | ||
|
|
faea77d03f | ||
|
|
ea9ba8b24c | ||
|
|
051f1c3120 | ||
|
|
fb03c3205e | ||
|
|
662396d2c5 | ||
|
|
4a5204a967 | ||
|
|
44a3cd8dd3 | ||
|
|
efddda2609 | ||
|
|
ecfcc20351 | ||
|
|
fa68acd669 | ||
|
|
9c88f6c4ce | ||
|
|
8aa6958923 | ||
|
|
88e2f64869 | ||
|
|
c7df68eb48 | ||
|
|
599094bcf5 | ||
|
|
4a4be8620c | ||
|
|
3dc29fbc76 | ||
|
|
c49dfc5679 | ||
|
|
08c2d9a766 | ||
|
|
d9e7feae0a | ||
|
|
58e29a9ca0 | ||
|
|
266dbad737 | ||
|
|
4db1aa75ce | ||
|
|
fc0d5fcfd5 | ||
|
|
00382078ad | ||
|
|
9313e8f909 | ||
|
|
9bb31433f1 | ||
|
|
4c313ff652 | ||
|
|
9ba6664c44 | ||
|
|
8616c2e092 | ||
|
|
6ca04586c1 | ||
|
|
04fc7e293e | ||
|
|
7809ecd38e |
@@ -3,6 +3,3 @@
|
|||||||
last 2 versions
|
last 2 versions
|
||||||
Firefox ESR
|
Firefox ESR
|
||||||
not dead and supports async-functions
|
not dead and supports async-functions
|
||||||
|
|
||||||
[test]
|
|
||||||
current Node
|
|
||||||
@@ -16,15 +16,3 @@ fpr
|
|||||||
alls
|
alls
|
||||||
nd
|
nd
|
||||||
ot
|
ot
|
||||||
womens
|
|
||||||
vise
|
|
||||||
falsy
|
|
||||||
ro
|
|
||||||
derails
|
|
||||||
forin
|
|
||||||
uper
|
|
||||||
slac
|
|
||||||
couldn
|
|
||||||
ges
|
|
||||||
assertIn
|
|
||||||
thirdparty
|
|
||||||
|
|||||||
@@ -8,9 +8,8 @@ indent_style = space
|
|||||||
insert_final_newline = true
|
insert_final_newline = true
|
||||||
trim_trailing_whitespace = true
|
trim_trailing_whitespace = true
|
||||||
|
|
||||||
[[shell]]
|
binary_next_line = true # for shfmt
|
||||||
binary_next_line = true
|
switch_case_indent = true # for shfmt
|
||||||
switch_case_indent = true
|
|
||||||
|
|
||||||
[{*.{js,json,ts},check-openapi}]
|
[{*.{js,json,ts},check-openapi}]
|
||||||
max_line_length = 100
|
max_line_length = 100
|
||||||
|
|||||||
@@ -4,12 +4,11 @@
|
|||||||
|
|
||||||
/docs/_build
|
/docs/_build
|
||||||
/static/generated
|
/static/generated
|
||||||
|
/static/third
|
||||||
/static/webpack-bundles
|
/static/webpack-bundles
|
||||||
/var/*
|
/var/*
|
||||||
!/var/puppeteer
|
!/var/puppeteer
|
||||||
/var/puppeteer/*
|
/var/puppeteer/*
|
||||||
!/var/puppeteer/test_credentials.d.ts
|
!/var/puppeteer/test_credentials.d.ts
|
||||||
/web/generated
|
|
||||||
/web/third
|
|
||||||
/zulip-current-venv
|
/zulip-current-venv
|
||||||
/zulip-py3-venv
|
/zulip-py3-venv
|
||||||
|
|||||||
293
.eslintrc.js
293
.eslintrc.js
@@ -1,293 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
|
|
||||||
const confusingBrowserGlobals = require("confusing-browser-globals");
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
root: true,
|
|
||||||
env: {
|
|
||||||
es2020: true,
|
|
||||||
node: true,
|
|
||||||
},
|
|
||||||
extends: [
|
|
||||||
"eslint:recommended",
|
|
||||||
"plugin:import/errors",
|
|
||||||
"plugin:import/warnings",
|
|
||||||
"plugin:no-jquery/recommended",
|
|
||||||
"plugin:no-jquery/deprecated",
|
|
||||||
"plugin:unicorn/recommended",
|
|
||||||
"prettier",
|
|
||||||
],
|
|
||||||
parser: "@babel/eslint-parser",
|
|
||||||
parserOptions: {
|
|
||||||
requireConfigFile: false,
|
|
||||||
warnOnUnsupportedTypeScriptVersion: false,
|
|
||||||
sourceType: "unambiguous",
|
|
||||||
},
|
|
||||||
plugins: ["formatjs", "no-jquery"],
|
|
||||||
settings: {
|
|
||||||
formatjs: {
|
|
||||||
additionalFunctionNames: ["$t", "$t_html"],
|
|
||||||
},
|
|
||||||
"no-jquery": {
|
|
||||||
collectionReturningPlugins: {
|
|
||||||
expectOne: "always",
|
|
||||||
},
|
|
||||||
variablePattern: "^\\$(?!t$|t_html$).",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
reportUnusedDisableDirectives: true,
|
|
||||||
rules: {
|
|
||||||
"array-callback-return": "error",
|
|
||||||
"arrow-body-style": "error",
|
|
||||||
"block-scoped-var": "error",
|
|
||||||
"consistent-return": "error",
|
|
||||||
curly: "error",
|
|
||||||
"dot-notation": "error",
|
|
||||||
eqeqeq: "error",
|
|
||||||
"formatjs/enforce-default-message": ["error", "literal"],
|
|
||||||
"formatjs/enforce-placeholders": [
|
|
||||||
"error",
|
|
||||||
{ignoreList: ["b", "code", "em", "i", "kbd", "p", "strong"]},
|
|
||||||
],
|
|
||||||
"formatjs/no-id": "error",
|
|
||||||
"guard-for-in": "error",
|
|
||||||
"import/extensions": "error",
|
|
||||||
"import/first": "error",
|
|
||||||
"import/newline-after-import": "error",
|
|
||||||
"import/no-cycle": ["error", {ignoreExternal: true}],
|
|
||||||
"import/no-duplicates": "error",
|
|
||||||
"import/no-self-import": "error",
|
|
||||||
"import/no-unresolved": "off",
|
|
||||||
"import/no-useless-path-segments": "error",
|
|
||||||
"import/order": ["error", {alphabetize: {order: "asc"}, "newlines-between": "always"}],
|
|
||||||
"import/unambiguous": "error",
|
|
||||||
"lines-around-directive": "error",
|
|
||||||
"new-cap": "error",
|
|
||||||
"no-alert": "error",
|
|
||||||
"no-array-constructor": "error",
|
|
||||||
"no-bitwise": "error",
|
|
||||||
"no-caller": "error",
|
|
||||||
"no-catch-shadow": "error",
|
|
||||||
"no-constant-condition": ["error", {checkLoops: false}],
|
|
||||||
"no-div-regex": "error",
|
|
||||||
"no-else-return": "error",
|
|
||||||
"no-eq-null": "error",
|
|
||||||
"no-eval": "error",
|
|
||||||
"no-implicit-coercion": "error",
|
|
||||||
"no-implied-eval": "error",
|
|
||||||
"no-inner-declarations": "off",
|
|
||||||
"no-iterator": "error",
|
|
||||||
"no-jquery/no-append-html": "error",
|
|
||||||
"no-jquery/no-constructor-attributes": "error",
|
|
||||||
"no-jquery/no-parse-html-literal": "error",
|
|
||||||
"no-label-var": "error",
|
|
||||||
"no-labels": "error",
|
|
||||||
"no-loop-func": "error",
|
|
||||||
"no-multi-str": "error",
|
|
||||||
"no-native-reassign": "error",
|
|
||||||
"no-new-func": "error",
|
|
||||||
"no-new-object": "error",
|
|
||||||
"no-new-wrappers": "error",
|
|
||||||
"no-octal-escape": "error",
|
|
||||||
"no-plusplus": "error",
|
|
||||||
"no-proto": "error",
|
|
||||||
"no-restricted-globals": ["error", ...confusingBrowserGlobals],
|
|
||||||
"no-return-assign": "error",
|
|
||||||
"no-script-url": "error",
|
|
||||||
"no-self-compare": "error",
|
|
||||||
"no-sync": "error",
|
|
||||||
"no-throw-literal": "error",
|
|
||||||
"no-undef-init": "error",
|
|
||||||
"no-unneeded-ternary": ["error", {defaultAssignment: false}],
|
|
||||||
"no-unused-expressions": "error",
|
|
||||||
"no-unused-vars": [
|
|
||||||
"error",
|
|
||||||
{args: "all", argsIgnorePattern: "^_", ignoreRestSiblings: true},
|
|
||||||
],
|
|
||||||
"no-use-before-define": ["error", {functions: false}],
|
|
||||||
"no-useless-concat": "error",
|
|
||||||
"no-useless-constructor": "error",
|
|
||||||
"no-var": "error",
|
|
||||||
"object-shorthand": ["error", "always", {avoidExplicitReturnArrows: true}],
|
|
||||||
"one-var": ["error", "never"],
|
|
||||||
"prefer-arrow-callback": "error",
|
|
||||||
"prefer-const": ["error", {ignoreReadBeforeAssign: true}],
|
|
||||||
radix: "error",
|
|
||||||
"sort-imports": ["error", {ignoreDeclarationSort: true}],
|
|
||||||
"spaced-comment": ["error", "always", {markers: ["/"]}],
|
|
||||||
strict: "error",
|
|
||||||
"unicorn/consistent-function-scoping": "off",
|
|
||||||
"unicorn/explicit-length-check": "off",
|
|
||||||
"unicorn/filename-case": "off",
|
|
||||||
"unicorn/no-await-expression-member": "off",
|
|
||||||
"unicorn/no-negated-condition": "off",
|
|
||||||
"unicorn/no-null": "off",
|
|
||||||
"unicorn/no-process-exit": "off",
|
|
||||||
"unicorn/no-useless-undefined": "off",
|
|
||||||
"unicorn/numeric-separators-style": "off",
|
|
||||||
"unicorn/prefer-module": "off",
|
|
||||||
"unicorn/prefer-node-protocol": "off",
|
|
||||||
"unicorn/prefer-string-raw": "off",
|
|
||||||
"unicorn/prefer-ternary": "off",
|
|
||||||
"unicorn/prefer-top-level-await": "off",
|
|
||||||
"unicorn/prevent-abbreviations": "off",
|
|
||||||
"unicorn/switch-case-braces": "off",
|
|
||||||
"valid-typeof": ["error", {requireStringLiterals: true}],
|
|
||||||
yoda: "error",
|
|
||||||
},
|
|
||||||
overrides: [
|
|
||||||
{
|
|
||||||
files: ["web/tests/**"],
|
|
||||||
rules: {
|
|
||||||
"no-jquery/no-selector-prop": "off",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
files: ["web/e2e-tests/**"],
|
|
||||||
globals: {
|
|
||||||
zulip_test: false,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
files: ["web/src/**"],
|
|
||||||
globals: {
|
|
||||||
StripeCheckout: false,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
files: ["**/*.ts"],
|
|
||||||
extends: [
|
|
||||||
"plugin:@typescript-eslint/strict-type-checked",
|
|
||||||
"plugin:@typescript-eslint/stylistic-type-checked",
|
|
||||||
"plugin:import/typescript",
|
|
||||||
],
|
|
||||||
parserOptions: {
|
|
||||||
project: "tsconfig.json",
|
|
||||||
},
|
|
||||||
settings: {
|
|
||||||
"import/resolver": {
|
|
||||||
node: {
|
|
||||||
extensions: [".ts", ".d.ts", ".js"], // https://github.com/import-js/eslint-plugin-import/issues/2267
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
globals: {
|
|
||||||
JQuery: false,
|
|
||||||
},
|
|
||||||
rules: {
|
|
||||||
// Disable base rule to avoid conflict
|
|
||||||
"no-use-before-define": "off",
|
|
||||||
|
|
||||||
"@typescript-eslint/consistent-type-assertions": [
|
|
||||||
"error",
|
|
||||||
{assertionStyle: "never"},
|
|
||||||
],
|
|
||||||
"@typescript-eslint/consistent-type-definitions": ["error", "type"],
|
|
||||||
"@typescript-eslint/consistent-type-imports": "error",
|
|
||||||
"@typescript-eslint/explicit-function-return-type": [
|
|
||||||
"error",
|
|
||||||
{allowExpressions: true},
|
|
||||||
],
|
|
||||||
"@typescript-eslint/member-ordering": "error",
|
|
||||||
"@typescript-eslint/method-signature-style": "error",
|
|
||||||
"@typescript-eslint/no-non-null-assertion": "off",
|
|
||||||
"@typescript-eslint/no-unnecessary-condition": "off",
|
|
||||||
"@typescript-eslint/no-unnecessary-qualifier": "error",
|
|
||||||
"@typescript-eslint/no-unused-vars": [
|
|
||||||
"error",
|
|
||||||
{args: "all", argsIgnorePattern: "^_", ignoreRestSiblings: true},
|
|
||||||
],
|
|
||||||
"@typescript-eslint/no-use-before-define": ["error", {functions: false}],
|
|
||||||
"@typescript-eslint/parameter-properties": "error",
|
|
||||||
"@typescript-eslint/promise-function-async": "error",
|
|
||||||
"@typescript-eslint/restrict-plus-operands": ["error", {}],
|
|
||||||
"@typescript-eslint/restrict-template-expressions": ["error", {}],
|
|
||||||
"no-undef": "error",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
files: ["**/*.d.ts"],
|
|
||||||
rules: {
|
|
||||||
"import/unambiguous": "off",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
files: ["web/e2e-tests/**", "web/tests/**"],
|
|
||||||
globals: {
|
|
||||||
CSS: false,
|
|
||||||
document: false,
|
|
||||||
navigator: false,
|
|
||||||
window: false,
|
|
||||||
},
|
|
||||||
rules: {
|
|
||||||
"formatjs/no-id": "off",
|
|
||||||
"new-cap": "off",
|
|
||||||
"no-sync": "off",
|
|
||||||
"unicorn/prefer-prototype-methods": "off",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
files: ["web/debug-require.js"],
|
|
||||||
env: {
|
|
||||||
browser: true,
|
|
||||||
es2020: false,
|
|
||||||
},
|
|
||||||
rules: {
|
|
||||||
// Don’t require ES features that PhantomJS doesn’t support
|
|
||||||
// TODO: Toggle these settings now that we don't use PhantomJS
|
|
||||||
"no-var": "off",
|
|
||||||
"object-shorthand": "off",
|
|
||||||
"prefer-arrow-callback": "off",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
files: ["web/shared/**", "web/src/**", "web/third/**"],
|
|
||||||
env: {
|
|
||||||
browser: true,
|
|
||||||
node: false,
|
|
||||||
},
|
|
||||||
globals: {
|
|
||||||
DEVELOPMENT: false,
|
|
||||||
ZULIP_VERSION: false,
|
|
||||||
},
|
|
||||||
rules: {
|
|
||||||
"no-console": "error",
|
|
||||||
},
|
|
||||||
settings: {
|
|
||||||
"import/resolver": {
|
|
||||||
webpack: {
|
|
||||||
config: "./web/webpack.config.ts",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
files: ["web/shared/**"],
|
|
||||||
env: {
|
|
||||||
browser: false,
|
|
||||||
"shared-node-browser": true,
|
|
||||||
},
|
|
||||||
rules: {
|
|
||||||
"import/no-restricted-paths": [
|
|
||||||
"error",
|
|
||||||
{
|
|
||||||
zones: [
|
|
||||||
{
|
|
||||||
target: "./web/shared",
|
|
||||||
from: ".",
|
|
||||||
except: ["./node_modules", "./web/shared"],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
"unicorn/prefer-string-replace-all": "off",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
files: ["web/server/**"],
|
|
||||||
env: {
|
|
||||||
node: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
};
|
|
||||||
276
.eslintrc.json
Normal file
276
.eslintrc.json
Normal file
@@ -0,0 +1,276 @@
|
|||||||
|
{
|
||||||
|
"env": {
|
||||||
|
"es2020": true,
|
||||||
|
"node": true
|
||||||
|
},
|
||||||
|
"extends": [
|
||||||
|
"eslint:recommended",
|
||||||
|
"plugin:import/errors",
|
||||||
|
"plugin:import/warnings",
|
||||||
|
"plugin:no-jquery/recommended",
|
||||||
|
"plugin:no-jquery/deprecated",
|
||||||
|
"plugin:unicorn/recommended",
|
||||||
|
"prettier"
|
||||||
|
],
|
||||||
|
"parser": "@babel/eslint-parser",
|
||||||
|
"parserOptions": {
|
||||||
|
"warnOnUnsupportedTypeScriptVersion": false,
|
||||||
|
"sourceType": "unambiguous"
|
||||||
|
},
|
||||||
|
"plugins": ["formatjs", "no-jquery"],
|
||||||
|
"settings": {
|
||||||
|
"additionalFunctionNames": ["$t", "$t_html"],
|
||||||
|
"no-jquery": {
|
||||||
|
"collectionReturningPlugins": {
|
||||||
|
"expectOne": "always"
|
||||||
|
},
|
||||||
|
"variablePattern": "^\\$(?!t$|t_html$)."
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"reportUnusedDisableDirectives": true,
|
||||||
|
"rules": {
|
||||||
|
"array-callback-return": "error",
|
||||||
|
"arrow-body-style": "error",
|
||||||
|
"block-scoped-var": "error",
|
||||||
|
"consistent-return": "error",
|
||||||
|
"curly": "error",
|
||||||
|
"dot-notation": "error",
|
||||||
|
"eqeqeq": "error",
|
||||||
|
"formatjs/enforce-default-message": ["error", "literal"],
|
||||||
|
"formatjs/enforce-placeholders": [
|
||||||
|
"error",
|
||||||
|
{"ignoreList": ["b", "code", "em", "i", "kbd", "p", "strong"]}
|
||||||
|
],
|
||||||
|
"formatjs/no-id": "error",
|
||||||
|
"guard-for-in": "error",
|
||||||
|
"import/extensions": "error",
|
||||||
|
"import/first": "error",
|
||||||
|
"import/newline-after-import": "error",
|
||||||
|
"import/no-self-import": "error",
|
||||||
|
"import/no-useless-path-segments": "error",
|
||||||
|
"import/order": [
|
||||||
|
"error",
|
||||||
|
{
|
||||||
|
"alphabetize": {"order": "asc"},
|
||||||
|
"newlines-between": "always"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"import/unambiguous": "error",
|
||||||
|
"lines-around-directive": "error",
|
||||||
|
"new-cap": "error",
|
||||||
|
"no-alert": "error",
|
||||||
|
"no-array-constructor": "error",
|
||||||
|
"no-bitwise": "error",
|
||||||
|
"no-caller": "error",
|
||||||
|
"no-catch-shadow": "error",
|
||||||
|
"no-constant-condition": ["error", {"checkLoops": false}],
|
||||||
|
"no-div-regex": "error",
|
||||||
|
"no-duplicate-imports": "error",
|
||||||
|
"no-else-return": "error",
|
||||||
|
"no-eq-null": "error",
|
||||||
|
"no-eval": "error",
|
||||||
|
"no-implicit-coercion": "error",
|
||||||
|
"no-implied-eval": "error",
|
||||||
|
"no-inner-declarations": "off",
|
||||||
|
"no-iterator": "error",
|
||||||
|
"no-jquery/no-parse-html-literal": "error",
|
||||||
|
"no-label-var": "error",
|
||||||
|
"no-labels": "error",
|
||||||
|
"no-loop-func": "error",
|
||||||
|
"no-multi-str": "error",
|
||||||
|
"no-native-reassign": "error",
|
||||||
|
"no-new-func": "error",
|
||||||
|
"no-new-object": "error",
|
||||||
|
"no-new-wrappers": "error",
|
||||||
|
"no-octal-escape": "error",
|
||||||
|
"no-plusplus": "error",
|
||||||
|
"no-proto": "error",
|
||||||
|
"no-return-assign": "error",
|
||||||
|
"no-script-url": "error",
|
||||||
|
"no-self-compare": "error",
|
||||||
|
"no-sync": "error",
|
||||||
|
"no-throw-literal": "error",
|
||||||
|
"no-undef-init": "error",
|
||||||
|
"no-unneeded-ternary": ["error", {"defaultAssignment": false}],
|
||||||
|
"no-unused-expressions": "error",
|
||||||
|
"no-use-before-define": ["error", {"functions": false}],
|
||||||
|
"no-useless-concat": "error",
|
||||||
|
"no-useless-constructor": "error",
|
||||||
|
"no-var": "error",
|
||||||
|
"object-shorthand": "error",
|
||||||
|
"one-var": ["error", "never"],
|
||||||
|
"prefer-arrow-callback": "error",
|
||||||
|
"prefer-const": [
|
||||||
|
"error",
|
||||||
|
{
|
||||||
|
"ignoreReadBeforeAssign": true
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"radix": "error",
|
||||||
|
"sort-imports": ["error", {"ignoreDeclarationSort": true}],
|
||||||
|
"spaced-comment": ["error", "always", {"markers": ["/"]}],
|
||||||
|
"strict": "error",
|
||||||
|
"unicorn/consistent-function-scoping": "off",
|
||||||
|
"unicorn/explicit-length-check": "off",
|
||||||
|
"unicorn/filename-case": "off",
|
||||||
|
"unicorn/no-await-expression-member": "off",
|
||||||
|
"unicorn/no-nested-ternary": "off",
|
||||||
|
"unicorn/no-null": "off",
|
||||||
|
"unicorn/no-process-exit": "off",
|
||||||
|
"unicorn/no-useless-undefined": "off",
|
||||||
|
"unicorn/number-literal-case": "off",
|
||||||
|
"unicorn/numeric-separators-style": "off",
|
||||||
|
"unicorn/prefer-module": "off",
|
||||||
|
"unicorn/prefer-node-protocol": "off",
|
||||||
|
"unicorn/prefer-spread": "off",
|
||||||
|
"unicorn/prefer-ternary": "off",
|
||||||
|
"unicorn/prevent-abbreviations": "off",
|
||||||
|
"valid-typeof": ["error", {"requireStringLiterals": true}],
|
||||||
|
"yoda": "error"
|
||||||
|
},
|
||||||
|
"overrides": [
|
||||||
|
{
|
||||||
|
"files": ["frontend_tests/node_tests/**", "frontend_tests/zjsunit/**"],
|
||||||
|
"rules": {
|
||||||
|
"no-jquery/no-selector-prop": "off"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"files": ["frontend_tests/puppeteer_lib/**", "frontend_tests/puppeteer_tests/**"],
|
||||||
|
"globals": {
|
||||||
|
"$": false,
|
||||||
|
"zulip_test": false
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"files": ["static/js/**"],
|
||||||
|
"globals": {
|
||||||
|
"StripeCheckout": false
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"files": ["**/*.ts"],
|
||||||
|
"extends": [
|
||||||
|
"plugin:@typescript-eslint/recommended-requiring-type-checking",
|
||||||
|
"plugin:import/typescript"
|
||||||
|
],
|
||||||
|
"parserOptions": {
|
||||||
|
"project": "tsconfig.json"
|
||||||
|
},
|
||||||
|
"settings": {
|
||||||
|
"import/resolver": {
|
||||||
|
"node": {
|
||||||
|
"extensions": [".ts", ".d.ts", ".js"] // https://github.com/import-js/eslint-plugin-import/issues/2267
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"globals": {
|
||||||
|
"JQuery": false
|
||||||
|
},
|
||||||
|
"rules": {
|
||||||
|
// Disable base rule to avoid conflict
|
||||||
|
"no-duplicate-imports": "off",
|
||||||
|
"no-unused-vars": "off",
|
||||||
|
"no-useless-constructor": "off",
|
||||||
|
"no-use-before-define": "off",
|
||||||
|
|
||||||
|
"@typescript-eslint/array-type": "error",
|
||||||
|
"@typescript-eslint/consistent-type-assertions": "error",
|
||||||
|
"@typescript-eslint/consistent-type-imports": "error",
|
||||||
|
"@typescript-eslint/explicit-function-return-type": [
|
||||||
|
"error",
|
||||||
|
{"allowExpressions": true}
|
||||||
|
],
|
||||||
|
"@typescript-eslint/member-ordering": "error",
|
||||||
|
"@typescript-eslint/no-duplicate-imports": "off",
|
||||||
|
"@typescript-eslint/no-explicit-any": "off",
|
||||||
|
"@typescript-eslint/no-extraneous-class": "error",
|
||||||
|
"@typescript-eslint/no-non-null-assertion": "off",
|
||||||
|
"@typescript-eslint/no-parameter-properties": "error",
|
||||||
|
"@typescript-eslint/no-unnecessary-qualifier": "error",
|
||||||
|
"@typescript-eslint/no-unused-vars": ["error", {"varsIgnorePattern": "^_"}],
|
||||||
|
"@typescript-eslint/no-unsafe-argument": "off",
|
||||||
|
"@typescript-eslint/no-unsafe-assignment": "off",
|
||||||
|
"@typescript-eslint/no-unsafe-call": "off",
|
||||||
|
"@typescript-eslint/no-unsafe-member-access": "off",
|
||||||
|
"@typescript-eslint/no-unsafe-return": "off",
|
||||||
|
"@typescript-eslint/no-use-before-define": "error",
|
||||||
|
"@typescript-eslint/no-useless-constructor": "error",
|
||||||
|
"@typescript-eslint/prefer-includes": "error",
|
||||||
|
"@typescript-eslint/prefer-string-starts-ends-with": "error",
|
||||||
|
"@typescript-eslint/promise-function-async": "error",
|
||||||
|
"@typescript-eslint/unified-signatures": "error",
|
||||||
|
"no-undef": "error"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"files": ["**/*.d.ts"],
|
||||||
|
"rules": {
|
||||||
|
"import/unambiguous": "off"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"files": ["frontend_tests/**"],
|
||||||
|
"globals": {
|
||||||
|
"CSS": false,
|
||||||
|
"document": false,
|
||||||
|
"navigator": false,
|
||||||
|
"window": false
|
||||||
|
},
|
||||||
|
"rules": {
|
||||||
|
"formatjs/no-id": "off",
|
||||||
|
"new-cap": "off",
|
||||||
|
"no-sync": "off",
|
||||||
|
"unicorn/prefer-prototype-methods": "off"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"files": ["tools/debug-require.js"],
|
||||||
|
"env": {
|
||||||
|
"browser": true,
|
||||||
|
"es2020": false
|
||||||
|
},
|
||||||
|
"rules": {
|
||||||
|
// Don’t require ES features that PhantomJS doesn’t support
|
||||||
|
// TODO: Toggle these settings now that we don't use PhantomJS
|
||||||
|
"no-var": "off",
|
||||||
|
"object-shorthand": "off",
|
||||||
|
"prefer-arrow-callback": "off"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"files": ["static/**"],
|
||||||
|
"env": {
|
||||||
|
"browser": true,
|
||||||
|
"node": false
|
||||||
|
},
|
||||||
|
"rules": {
|
||||||
|
"no-console": "error"
|
||||||
|
},
|
||||||
|
"settings": {
|
||||||
|
"import/resolver": "webpack"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"files": ["static/shared/**"],
|
||||||
|
"env": {
|
||||||
|
"browser": false,
|
||||||
|
"shared-node-browser": true
|
||||||
|
},
|
||||||
|
"rules": {
|
||||||
|
"import/no-restricted-paths": [
|
||||||
|
"error",
|
||||||
|
{
|
||||||
|
"zones": [
|
||||||
|
{
|
||||||
|
"target": "./static/shared",
|
||||||
|
"from": ".",
|
||||||
|
"except": ["./node_modules", "./static/shared"]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
10
.github/ISSUE_TEMPLATE/1_discussed_on_czo.md
vendored
10
.github/ISSUE_TEMPLATE/1_discussed_on_czo.md
vendored
@@ -1,10 +0,0 @@
|
|||||||
---
|
|
||||||
name: Issue discussed in the Zulip development community
|
|
||||||
about: Bug report, feature or improvement already discussed on chat.zulip.org.
|
|
||||||
---
|
|
||||||
|
|
||||||
<!-- Issue description -->
|
|
||||||
|
|
||||||
<!-- Link to a message in the chat.zulip.org discussion. Message links will still work even if the topic is renamed or resolved. Link back to this issue from the chat.zulip.org thread. -->
|
|
||||||
|
|
||||||
CZO thread
|
|
||||||
18
.github/ISSUE_TEMPLATE/2_bug_report.md
vendored
18
.github/ISSUE_TEMPLATE/2_bug_report.md
vendored
@@ -1,18 +0,0 @@
|
|||||||
---
|
|
||||||
name: Bug report
|
|
||||||
about: A concrete bug report with steps to reproduce the behavior. (See also "Possible bug" below.)
|
|
||||||
labels: ["bug"]
|
|
||||||
---
|
|
||||||
|
|
||||||
<!-- Describe what you were expecting to see, what you saw instead, and steps to take in order to reproduce the buggy behavior. Screenshots can be helpful. -->
|
|
||||||
|
|
||||||
<!-- Check the box for the version of Zulip you are using (see https://zulip.com/help/view-zulip-version).-->
|
|
||||||
|
|
||||||
**Zulip Server and web app version:**
|
|
||||||
|
|
||||||
- [ ] Zulip Cloud (`*.zulipchat.com`)
|
|
||||||
- [ ] Zulip Server 8.0+
|
|
||||||
- [ ] Zulip Server 7.0+
|
|
||||||
- [ ] Zulip Server 6.0+
|
|
||||||
- [ ] Zulip Server 5.0 or older
|
|
||||||
- [ ] Other or not sure
|
|
||||||
6
.github/ISSUE_TEMPLATE/3_feature_request.md
vendored
6
.github/ISSUE_TEMPLATE/3_feature_request.md
vendored
@@ -1,6 +0,0 @@
|
|||||||
---
|
|
||||||
name: Feature or improvement request
|
|
||||||
about: A specific proposal for a new feature of improvement. (See also "Feature suggestion or feedback" below.)
|
|
||||||
---
|
|
||||||
|
|
||||||
<!-- Describe the proposal, including how it would help you or your organization. -->
|
|
||||||
14
.github/ISSUE_TEMPLATE/config.yml
vendored
14
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,14 +0,0 @@
|
|||||||
blank_issues_enabled: true
|
|
||||||
contact_links:
|
|
||||||
- name: Possible bug
|
|
||||||
url: https://zulip.readthedocs.io/en/latest/contributing/reporting-bugs.html
|
|
||||||
about: Report unexpected behavior that may be a bug.
|
|
||||||
- name: Feature suggestion or feedback
|
|
||||||
url: https://zulip.readthedocs.io/en/latest/contributing/suggesting-features.html
|
|
||||||
about: Start a discussion about your idea for improving Zulip.
|
|
||||||
- name: Issue with running or upgrading a Zulip server
|
|
||||||
url: https://zulip.readthedocs.io/en/latest/production/troubleshooting.html
|
|
||||||
about: We provide free, interactive support for the vast majority of questions about running a Zulip server.
|
|
||||||
- name: Other support requests and sales questions
|
|
||||||
url: https://zulip.com/help/contact-support
|
|
||||||
about: Contact us — we're happy to help!
|
|
||||||
48
.github/pull_request_template.md
vendored
48
.github/pull_request_template.md
vendored
@@ -1,43 +1,11 @@
|
|||||||
<!-- Describe your pull request here.-->
|
<!-- What's this PR for? (Just a link to an issue is fine.) -->
|
||||||
|
|
||||||
Fixes: <!-- Issue link, or clear description.-->
|
**Testing plan:** <!-- How have you tested? -->
|
||||||
|
|
||||||
<!-- If the PR makes UI changes, always include one or more still screenshots to demonstrate your changes. If it seems helpful, add a screen capture of the new functionality as well.
|
**GIFs or screenshots:** <!-- If a UI change. See:
|
||||||
|
https://zulip.readthedocs.io/en/latest/tutorials/screenshot-and-gif-software.html
|
||||||
|
-->
|
||||||
|
|
||||||
Tooling tips: https://zulip.readthedocs.io/en/latest/tutorials/screenshot-and-gif-software.html
|
<!-- Also be sure to make clear, coherent commits:
|
||||||
-->
|
https://zulip.readthedocs.io/en/latest/contributing/version-control.html
|
||||||
|
-->
|
||||||
**Screenshots and screen captures:**
|
|
||||||
|
|
||||||
<details>
|
|
||||||
<summary>Self-review checklist</summary>
|
|
||||||
|
|
||||||
<!-- Prior to submitting a PR, follow our step-by-step guide to review your own code:
|
|
||||||
https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html#how-to-review-code -->
|
|
||||||
|
|
||||||
<!-- Once you create the PR, check off all the steps below that you have completed.
|
|
||||||
If any of these steps are not relevant or you have not completed, leave them unchecked.-->
|
|
||||||
|
|
||||||
- [ ] [Self-reviewed](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html#how-to-review-code) the changes for clarity and maintainability
|
|
||||||
(variable names, code reuse, readability, etc.).
|
|
||||||
|
|
||||||
Communicate decisions, questions, and potential concerns.
|
|
||||||
|
|
||||||
- [ ] Explains differences from previous plans (e.g., issue description).
|
|
||||||
- [ ] Highlights technical choices and bugs encountered.
|
|
||||||
- [ ] Calls out remaining decisions and concerns.
|
|
||||||
- [ ] Automated tests verify logic where appropriate.
|
|
||||||
|
|
||||||
Individual commits are ready for review (see [commit discipline](https://zulip.readthedocs.io/en/latest/contributing/commit-discipline.html)).
|
|
||||||
|
|
||||||
- [ ] Each commit is a coherent idea.
|
|
||||||
- [ ] Commit message(s) explain reasoning and motivation for changes.
|
|
||||||
|
|
||||||
Completed manual review and testing of the following:
|
|
||||||
|
|
||||||
- [ ] Visual appearance of the changes.
|
|
||||||
- [ ] Responsiveness and internationalization.
|
|
||||||
- [ ] Strings and tooltips.
|
|
||||||
- [ ] End-to-end functionality of buttons, interactions and flows.
|
|
||||||
- [ ] Corner cases, error conditions, and easily imagined bugs.
|
|
||||||
</details>
|
|
||||||
|
|||||||
43
.github/workflows/cancel-previous-runs.yml
vendored
Normal file
43
.github/workflows/cancel-previous-runs.yml
vendored
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
name: Cancel previous runs
|
||||||
|
on: [push, pull_request]
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
cancel:
|
||||||
|
name: Cancel previous runs
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 3
|
||||||
|
|
||||||
|
# Don't run this job for zulip/zulip pushes since we
|
||||||
|
# want to run those jobs.
|
||||||
|
if: ${{ github.event_name != 'push' || github.event.repository.full_name != 'zulip/zulip' }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
# We get workflow IDs from GitHub API so we don't have to maintain
|
||||||
|
# a hard-coded list of IDs which need to be updated when a workflow
|
||||||
|
# is added or removed. And, workflow IDs are different for other forks
|
||||||
|
# so this is required.
|
||||||
|
- name: Get workflow IDs.
|
||||||
|
id: workflow_ids
|
||||||
|
continue-on-error: true # Don't fail this job on failure
|
||||||
|
env:
|
||||||
|
# This is in <owner>/<repo> format e.g. zulip/zulip
|
||||||
|
REPOSITORY: ${{ github.repository }}
|
||||||
|
run: |
|
||||||
|
workflow_api_url=https://api.github.com/repos/$REPOSITORY/actions/workflows
|
||||||
|
curl -fL $workflow_api_url -o workflows.json
|
||||||
|
|
||||||
|
script="const {workflows} = require('./workflows'); \
|
||||||
|
const ids = workflows.map(workflow => workflow.id); \
|
||||||
|
console.log(ids.join(','));"
|
||||||
|
ids=$(node -e "$script")
|
||||||
|
echo "::set-output name=ids::$ids"
|
||||||
|
|
||||||
|
- uses: styfle/cancel-workflow-action@0.9.0
|
||||||
|
continue-on-error: true # Don't fail this job on failure
|
||||||
|
with:
|
||||||
|
workflow_id: ${{ steps.workflow_ids.outputs.ids }}
|
||||||
|
access_token: ${{ github.token }}
|
||||||
25
.github/workflows/codeql-analysis.yml
vendored
25
.github/workflows/codeql-analysis.yml
vendored
@@ -2,39 +2,26 @@ name: "Code scanning"
|
|||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: ["*.x", chat.zulip.org, main]
|
branches-ignore:
|
||||||
tags: ["*"]
|
- dependabot/** # https://github.com/github/codeql-action/pull/435
|
||||||
pull_request:
|
pull_request: {}
|
||||||
branches: ["*.x", chat.zulip.org, main]
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: "${{ github.workflow }}-${{ github.head_ref || github.run_id }}"
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
CodeQL:
|
CodeQL:
|
||||||
permissions:
|
|
||||||
actions: read # for github/codeql-action/init to get workflow details
|
|
||||||
contents: read # for actions/checkout to fetch code
|
|
||||||
security-events: write # for github/codeql-action/analyze to upload SARIF results
|
|
||||||
if: ${{!github.event.repository.private}}
|
if: ${{!github.event.repository.private}}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Check out repository
|
- name: Check out repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v2
|
||||||
|
|
||||||
# Initializes the CodeQL tools for scanning.
|
# Initializes the CodeQL tools for scanning.
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@v3
|
uses: github/codeql-action/init@v1
|
||||||
|
|
||||||
# Override language selection by uncommenting this and choosing your languages
|
# Override language selection by uncommenting this and choosing your languages
|
||||||
# with:
|
# with:
|
||||||
# languages: go, javascript, csharp, python, cpp, java
|
# languages: go, javascript, csharp, python, cpp, java
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@v3
|
uses: github/codeql-action/analyze@v1
|
||||||
|
|||||||
204
.github/workflows/production-suite.yml
vendored
204
.github/workflows/production-suite.yml
vendored
@@ -1,52 +1,42 @@
|
|||||||
name: Zulip production suite
|
name: Zulip production suite
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push: {}
|
||||||
branches: ["*.x", chat.zulip.org, main]
|
|
||||||
tags: ["*"]
|
|
||||||
pull_request:
|
pull_request:
|
||||||
paths:
|
paths:
|
||||||
- .github/workflows/production-suite.yml
|
- .github/workflows/production-suite.yml
|
||||||
- "**/migrations/**"
|
- "**/migrations/**"
|
||||||
|
- babel.config.js
|
||||||
- manage.py
|
- manage.py
|
||||||
- pnpm-lock.yaml
|
- postcss.config.js
|
||||||
- puppet/**
|
- puppet/**
|
||||||
- requirements/**
|
- requirements/**
|
||||||
- scripts/**
|
- scripts/**
|
||||||
|
- static/assets/**
|
||||||
|
- static/third/**
|
||||||
- tools/**
|
- tools/**
|
||||||
- web/babel.config.js
|
- webpack.config.ts
|
||||||
- web/postcss.config.js
|
- yarn.lock
|
||||||
- web/third/**
|
|
||||||
- web/webpack.config.ts
|
|
||||||
- zerver/worker/queue_processors.py
|
- zerver/worker/queue_processors.py
|
||||||
- zerver/lib/push_notifications.py
|
- zerver/lib/push_notifications.py
|
||||||
- zerver/decorator.py
|
- zerver/decorator.py
|
||||||
- zproject/**
|
- zproject/**
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: "${{ github.workflow }}-${{ github.head_ref || github.run_id }}"
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
production_build:
|
production_build:
|
||||||
# This job builds a release tarball from the current commit, which
|
# This job builds a release tarball from the current commit, which
|
||||||
# will be used for all of the following install/upgrade tests.
|
# will be used for all of the following install/upgrade tests.
|
||||||
name: Ubuntu 22.04 production build
|
name: Debian 10 production build
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
# Docker images are built from 'tools/ci/Dockerfile'; the comments at
|
# Docker images are built from 'tools/ci/Dockerfile'; the comments at
|
||||||
# the top explain how to build and upload these images.
|
# the top explain how to build and upload these images.
|
||||||
# Ubuntu 22.04 ships with Python 3.10.12.
|
# Debian 10 ships with Python 3.7.3.
|
||||||
container: zulip/ci:jammy
|
container: zulip/ci:buster
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Add required permissions
|
- name: Add required permissions
|
||||||
run: |
|
run: |
|
||||||
@@ -64,66 +54,50 @@ jobs:
|
|||||||
# cache action to work. It is owned by root currently.
|
# cache action to work. It is owned by root currently.
|
||||||
sudo chmod -R 0777 /__w/_temp/
|
sudo chmod -R 0777 /__w/_temp/
|
||||||
|
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
- name: Create cache directories
|
- name: Create cache directories
|
||||||
run: |
|
run: |
|
||||||
dirs=(/srv/zulip-{venv,emoji}-cache)
|
dirs=(/srv/zulip-{npm,venv,emoji}-cache)
|
||||||
sudo mkdir -p "${dirs[@]}"
|
sudo mkdir -p "${dirs[@]}"
|
||||||
sudo chown -R github "${dirs[@]}"
|
sudo chown -R github "${dirs[@]}"
|
||||||
|
|
||||||
- name: Restore pnpm store
|
- name: Restore node_modules cache
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v2
|
||||||
with:
|
with:
|
||||||
path: /__w/.pnpm-store
|
path: /srv/zulip-npm-cache
|
||||||
key: v1-pnpm-store-jammy-${{ hashFiles('pnpm-lock.yaml') }}
|
key: v1-yarn-deps-buster-${{ hashFiles('package.json') }}-${{ hashFiles('yarn.lock') }}
|
||||||
|
restore-keys: v1-yarn-deps-buster
|
||||||
|
|
||||||
- name: Restore python cache
|
- name: Restore python cache
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v2
|
||||||
with:
|
with:
|
||||||
path: /srv/zulip-venv-cache
|
path: /srv/zulip-venv-cache
|
||||||
key: v1-venv-jammy-${{ hashFiles('requirements/dev.txt') }}
|
key: v1-venv-buster-${{ hashFiles('requirements/dev.txt') }}
|
||||||
restore-keys: v1-venv-jammy
|
restore-keys: v1-venv-buster
|
||||||
|
|
||||||
- name: Restore emoji cache
|
- name: Restore emoji cache
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v2
|
||||||
with:
|
with:
|
||||||
path: /srv/zulip-emoji-cache
|
path: /srv/zulip-emoji-cache
|
||||||
key: v1-emoji-jammy-${{ hashFiles('tools/setup/emoji/emoji_map.json') }}-${{ hashFiles('tools/setup/emoji/build_emoji') }}-${{ hashFiles('tools/setup/emoji/emoji_setup_utils.py') }}-${{ hashFiles('tools/setup/emoji/emoji_names.py') }}-${{ hashFiles('package.json') }}
|
key: v1-emoji-buster-${{ hashFiles('tools/setup/emoji/emoji_map.json') }}-${{ hashFiles('tools/setup/emoji/build_emoji') }}-${{ hashFiles('tools/setup/emoji/emoji_setup_utils.py') }}-${{ hashFiles('tools/setup/emoji/emoji_names.py') }}-${{ hashFiles('package.json') }}
|
||||||
restore-keys: v1-emoji-jammy
|
restore-keys: v1-emoji-buster
|
||||||
|
|
||||||
- name: Build production tarball
|
- name: Build production tarball
|
||||||
run: ./tools/ci/production-build
|
run: ./tools/ci/production-build
|
||||||
|
|
||||||
- name: Upload production build artifacts for install jobs
|
- name: Upload production build artifacts for install jobs
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v2
|
||||||
with:
|
with:
|
||||||
name: production-tarball
|
name: production-tarball
|
||||||
path: /tmp/production-build
|
path: /tmp/production-build
|
||||||
retention-days: 1
|
retention-days: 14
|
||||||
|
|
||||||
- name: Verify pnpm store path
|
- name: Report status
|
||||||
run: |
|
if: failure()
|
||||||
set -x
|
env:
|
||||||
path="$(pnpm store path)"
|
ZULIP_BOT_KEY: ${{ secrets.ZULIP_BOT_KEY }}
|
||||||
[[ "$path" == /__w/.pnpm-store/* ]]
|
run: tools/ci/send-failure-message
|
||||||
|
|
||||||
- name: Generate failure report string
|
|
||||||
id: failure_report_string
|
|
||||||
if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }}
|
|
||||||
run: tools/ci/generate-failure-message >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: Report status to CZO
|
|
||||||
if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }}
|
|
||||||
uses: zulip/github-actions-zulip/send-message@v1
|
|
||||||
with:
|
|
||||||
api-key: ${{ secrets.ZULIP_BOT_KEY }}
|
|
||||||
email: "github-actions-bot@chat.zulip.org"
|
|
||||||
organization-url: "https://chat.zulip.org"
|
|
||||||
to: "automated testing"
|
|
||||||
topic: ${{ steps.failure_report_string.outputs.topic }}
|
|
||||||
type: "stream"
|
|
||||||
content: ${{ steps.failure_report_string.outputs.content }}
|
|
||||||
|
|
||||||
production_install:
|
production_install:
|
||||||
# This job installs the server release tarball built above on a
|
# This job installs the server release tarball built above on a
|
||||||
@@ -132,23 +106,26 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
|
extra_args: [""]
|
||||||
include:
|
include:
|
||||||
# Docker images are built from 'tools/ci/Dockerfile'; the comments at
|
# Docker images are built from 'tools/ci/Dockerfile'; the comments at
|
||||||
# the top explain how to build and upload these images.
|
# the top explain how to build and upload these images.
|
||||||
|
- docker_image: zulip/ci:focal
|
||||||
|
name: Ubuntu 20.04 production install
|
||||||
|
os: focal
|
||||||
|
|
||||||
- docker_image: zulip/ci:jammy
|
- docker_image: zulip/ci:jammy
|
||||||
name: Ubuntu 22.04 production install and PostgreSQL upgrade with pgroonga
|
name: Ubuntu 22.04 production install
|
||||||
os: jammy
|
os: jammy
|
||||||
extra-args: ""
|
|
||||||
|
|
||||||
- docker_image: zulip/ci:noble
|
- docker_image: zulip/ci:buster
|
||||||
name: Ubuntu 24.04 production install
|
name: Debian 10 production install with custom db name and user
|
||||||
os: noble
|
os: buster
|
||||||
extra-args: ""
|
extra_args: --test-custom-db
|
||||||
|
|
||||||
- docker_image: zulip/ci:bookworm
|
- docker_image: zulip/ci:bullseye
|
||||||
name: Debian 12 production install with custom db name and user
|
name: Debian 11 production install
|
||||||
os: bookworm
|
os: bullseye
|
||||||
extra-args: --test-custom-db
|
|
||||||
|
|
||||||
name: ${{ matrix.name }}
|
name: ${{ matrix.name }}
|
||||||
container:
|
container:
|
||||||
@@ -159,7 +136,7 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Download built production tarball
|
- name: Download built production tarball
|
||||||
uses: actions/download-artifact@v4
|
uses: actions/download-artifact@v2
|
||||||
with:
|
with:
|
||||||
name: production-tarball
|
name: production-tarball
|
||||||
path: /tmp
|
path: /tmp
|
||||||
@@ -171,58 +148,56 @@ jobs:
|
|||||||
# cache action to work. It is owned by root currently.
|
# cache action to work. It is owned by root currently.
|
||||||
sudo chmod -R 0777 /__w/_temp/
|
sudo chmod -R 0777 /__w/_temp/
|
||||||
|
|
||||||
# Since actions/download-artifact@v4 loses all the permissions
|
# Since actions/download-artifact@v2 loses all the permissions
|
||||||
# of the tarball uploaded by the upload artifact fix those.
|
# of the tarball uploaded by the upload artifact fix those.
|
||||||
chmod +x /tmp/production-upgrade-pg
|
chmod +x /tmp/production-upgrade-pg
|
||||||
chmod +x /tmp/production-pgroonga
|
chmod +x /tmp/production-pgroonga
|
||||||
chmod +x /tmp/production-install
|
chmod +x /tmp/production-install
|
||||||
chmod +x /tmp/production-verify
|
chmod +x /tmp/production-verify
|
||||||
chmod +x /tmp/generate-failure-message
|
chmod +x /tmp/send-failure-message
|
||||||
|
|
||||||
- name: Create cache directories
|
- name: Create cache directories
|
||||||
run: |
|
run: |
|
||||||
dirs=(/srv/zulip-{venv,emoji}-cache)
|
dirs=(/srv/zulip-{npm,venv,emoji}-cache)
|
||||||
sudo mkdir -p "${dirs[@]}"
|
sudo mkdir -p "${dirs[@]}"
|
||||||
sudo chown -R github "${dirs[@]}"
|
sudo chown -R github "${dirs[@]}"
|
||||||
|
|
||||||
|
- name: Restore node_modules cache
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: /srv/zulip-npm-cache
|
||||||
|
key: v1-yarn-deps-${{ matrix.os }}-${{ hashFiles('/tmp/package.json') }}-${{ hashFiles('/tmp/yarn.lock') }}
|
||||||
|
restore-keys: v1-yarn-deps-${{ matrix.os }}
|
||||||
|
|
||||||
- name: Install production
|
- name: Install production
|
||||||
run: sudo /tmp/production-install ${{ matrix.extra-args }}
|
run: |
|
||||||
|
sudo service rabbitmq-server restart
|
||||||
|
sudo /tmp/production-install ${{ matrix.extra-args }}
|
||||||
|
|
||||||
- name: Verify install
|
- name: Verify install
|
||||||
run: sudo /tmp/production-verify ${{ matrix.extra-args }}
|
run: sudo /tmp/production-verify ${{ matrix.extra-args }}
|
||||||
|
|
||||||
- name: Install pgroonga
|
- name: Install pgroonga
|
||||||
if: ${{ matrix.os == 'jammy' }}
|
if: ${{ matrix.os == 'focal' }}
|
||||||
run: sudo /tmp/production-pgroonga
|
run: sudo /tmp/production-pgroonga
|
||||||
|
|
||||||
- name: Verify install after installing pgroonga
|
- name: Verify install after installing pgroonga
|
||||||
if: ${{ matrix.os == 'jammy' }}
|
if: ${{ matrix.os == 'focal' }}
|
||||||
run: sudo /tmp/production-verify ${{ matrix.extra-args }}
|
run: sudo /tmp/production-verify ${{ matrix.extra-args }}
|
||||||
|
|
||||||
- name: Upgrade postgresql
|
- name: Upgrade postgresql
|
||||||
if: ${{ matrix.os == 'jammy' }}
|
if: ${{ matrix.os == 'focal' }}
|
||||||
run: sudo /tmp/production-upgrade-pg
|
run: sudo /tmp/production-upgrade-pg
|
||||||
|
|
||||||
- name: Verify install after upgrading postgresql
|
- name: Verify install after upgrading postgresql
|
||||||
if: ${{ matrix.os == 'jammy' }}
|
if: ${{ matrix.os == 'focal' }}
|
||||||
run: sudo /tmp/production-verify ${{ matrix.extra-args }}
|
run: sudo /tmp/production-verify ${{ matrix.extra-args }}
|
||||||
|
|
||||||
- name: Generate failure report string
|
- name: Report status
|
||||||
id: failure_report_string
|
if: failure()
|
||||||
if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }}
|
env:
|
||||||
run: /tmp/generate-failure-message >> $GITHUB_OUTPUT
|
ZULIP_BOT_KEY: ${{ secrets.ZULIP_BOT_KEY }}
|
||||||
|
run: /tmp/send-failure-message
|
||||||
- name: Report status to CZO
|
|
||||||
if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }}
|
|
||||||
uses: zulip/github-actions-zulip/send-message@v1
|
|
||||||
with:
|
|
||||||
api-key: ${{ secrets.ZULIP_BOT_KEY }}
|
|
||||||
email: "github-actions-bot@chat.zulip.org"
|
|
||||||
organization-url: "https://chat.zulip.org"
|
|
||||||
to: "automated testing"
|
|
||||||
topic: ${{ steps.failure_report_string.outputs.topic }}
|
|
||||||
type: "stream"
|
|
||||||
content: ${{ steps.failure_report_string.outputs.content }}
|
|
||||||
|
|
||||||
production_upgrade:
|
production_upgrade:
|
||||||
# The production upgrade job starts with a container with a
|
# The production upgrade job starts with a container with a
|
||||||
@@ -235,17 +210,15 @@ jobs:
|
|||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
include:
|
include:
|
||||||
# Docker images are built from 'tools/ci/Dockerfile.prod'; the comments at
|
# Docker images are built from 'tools/ci/Dockerfile'; the comments at
|
||||||
# the top explain how to build and upload these images.
|
# the top explain how to build and upload these images.
|
||||||
- docker_image: zulip/ci:jammy-6.0
|
- docker_image: zulip/ci:buster-3.4
|
||||||
name: 6.0 Version Upgrade
|
name: 3.4 Version Upgrade
|
||||||
os: jammy
|
os: buster
|
||||||
- docker_image: zulip/ci:bookworm-7.0
|
|
||||||
name: 7.0 Version Upgrade
|
- docker_image: zulip/ci:bullseye-4.11
|
||||||
os: bookworm
|
name: 4.11 Version Upgrade
|
||||||
- docker_image: zulip/ci:bookworm-8.0
|
os: bullseye
|
||||||
name: 8.0 Version Upgrade
|
|
||||||
os: bookworm
|
|
||||||
|
|
||||||
name: ${{ matrix.name }}
|
name: ${{ matrix.name }}
|
||||||
container:
|
container:
|
||||||
@@ -256,7 +229,7 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Download built production tarball
|
- name: Download built production tarball
|
||||||
uses: actions/download-artifact@v4
|
uses: actions/download-artifact@v2
|
||||||
with:
|
with:
|
||||||
name: production-tarball
|
name: production-tarball
|
||||||
path: /tmp
|
path: /tmp
|
||||||
@@ -268,15 +241,15 @@ jobs:
|
|||||||
# cache action to work. It is owned by root currently.
|
# cache action to work. It is owned by root currently.
|
||||||
sudo chmod -R 0777 /__w/_temp/
|
sudo chmod -R 0777 /__w/_temp/
|
||||||
|
|
||||||
# Since actions/download-artifact@v4 loses all the permissions
|
# Since actions/download-artifact@v2 loses all the permissions
|
||||||
# of the tarball uploaded by the upload artifact fix those.
|
# of the tarball uploaded by the upload artifact fix those.
|
||||||
chmod +x /tmp/production-upgrade
|
chmod +x /tmp/production-upgrade
|
||||||
chmod +x /tmp/production-verify
|
chmod +x /tmp/production-verify
|
||||||
chmod +x /tmp/generate-failure-message
|
chmod +x /tmp/send-failure-message
|
||||||
|
|
||||||
- name: Create cache directories
|
- name: Create cache directories
|
||||||
run: |
|
run: |
|
||||||
dirs=(/srv/zulip-{venv,emoji}-cache)
|
dirs=(/srv/zulip-{npm,venv,emoji}-cache)
|
||||||
sudo mkdir -p "${dirs[@]}"
|
sudo mkdir -p "${dirs[@]}"
|
||||||
sudo chown -R github "${dirs[@]}"
|
sudo chown -R github "${dirs[@]}"
|
||||||
|
|
||||||
@@ -289,19 +262,8 @@ jobs:
|
|||||||
# - name: Verify install
|
# - name: Verify install
|
||||||
# run: sudo /tmp/production-verify
|
# run: sudo /tmp/production-verify
|
||||||
|
|
||||||
- name: Generate failure report string
|
- name: Report status
|
||||||
id: failure_report_string
|
if: failure()
|
||||||
if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }}
|
env:
|
||||||
run: /tmp/generate-failure-message >> $GITHUB_OUTPUT
|
ZULIP_BOT_KEY: ${{ secrets.ZULIP_BOT_KEY }}
|
||||||
|
run: /tmp/send-failure-message
|
||||||
- name: Report status to CZO
|
|
||||||
if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }}
|
|
||||||
uses: zulip/github-actions-zulip/send-message@v1
|
|
||||||
with:
|
|
||||||
api-key: ${{ secrets.ZULIP_BOT_KEY }}
|
|
||||||
email: "github-actions-bot@chat.zulip.org"
|
|
||||||
organization-url: "https://chat.zulip.org"
|
|
||||||
to: "automated testing"
|
|
||||||
topic: ${{ steps.failure_report_string.outputs.topic }}
|
|
||||||
type: "stream"
|
|
||||||
content: ${{ steps.failure_report_string.outputs.content }}
|
|
||||||
|
|||||||
7
.github/workflows/update-oneclick-apps.yml
vendored
7
.github/workflows/update-oneclick-apps.yml
vendored
@@ -2,14 +2,11 @@ name: Update one click apps
|
|||||||
on:
|
on:
|
||||||
release:
|
release:
|
||||||
types: [published]
|
types: [published]
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
update-digitalocean-oneclick-app:
|
update-digitalocean-oneclick-app:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v2
|
||||||
- name: Update DigitalOcean one click app
|
- name: Update DigitalOcean one click app
|
||||||
env:
|
env:
|
||||||
DIGITALOCEAN_API_KEY: ${{ secrets.ONE_CLICK_ACTION_DIGITALOCEAN_API_KEY }}
|
DIGITALOCEAN_API_KEY: ${{ secrets.ONE_CLICK_ACTION_DIGITALOCEAN_API_KEY }}
|
||||||
@@ -22,6 +19,6 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
export PATH="$HOME/.local/bin:$PATH"
|
export PATH="$HOME/.local/bin:$PATH"
|
||||||
git clone https://github.com/zulip/marketplace-partners
|
git clone https://github.com/zulip/marketplace-partners
|
||||||
pip3 install python-digitalocean zulip fab-classic PyNaCl
|
pip3 install python-digitalocean zulip fab-classic
|
||||||
echo $PATH
|
echo $PATH
|
||||||
python3 tools/oneclickapps/prepare_digital_ocean_one_click_app_release.py
|
python3 tools/oneclickapps/prepare_digital_ocean_one_click_app_release.py
|
||||||
|
|||||||
217
.github/workflows/zulip-ci.yml
vendored
217
.github/workflows/zulip-ci.yml
vendored
@@ -4,50 +4,38 @@
|
|||||||
|
|
||||||
name: Zulip CI
|
name: Zulip CI
|
||||||
|
|
||||||
on:
|
on: [push, pull_request]
|
||||||
push:
|
|
||||||
branches: ["*.x", chat.zulip.org, main]
|
|
||||||
tags: ["*"]
|
|
||||||
pull_request:
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: "${{ github.workflow }}-${{ github.head_ref || github.run_id }}"
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
tests:
|
tests:
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
|
include_frontend_tests: [false]
|
||||||
include:
|
include:
|
||||||
# Base images are built using `tools/ci/Dockerfile.prod.template`.
|
# Base images are built using `tools/ci/Dockerfile.prod.template`.
|
||||||
# The comments at the top explain how to build and upload these images.
|
# The comments at the top explain how to build and upload these images.
|
||||||
# Ubuntu 22.04 ships with Python 3.10.12.
|
# Debian 10 ships with Python 3.7.3.
|
||||||
- docker_image: zulip/ci:jammy
|
- docker_image: zulip/ci:buster
|
||||||
name: Ubuntu 22.04 (Python 3.10, backend + frontend)
|
name: Debian 10 (Python 3.7, backend + frontend)
|
||||||
os: jammy
|
os: buster
|
||||||
include_documentation_tests: false
|
|
||||||
include_frontend_tests: true
|
include_frontend_tests: true
|
||||||
# Debian 12 ships with Python 3.11.2.
|
# Ubuntu 20.04 ships with Python 3.8.2.
|
||||||
- docker_image: zulip/ci:bookworm
|
- docker_image: zulip/ci:focal
|
||||||
name: Debian 12 (Python 3.11, backend + documentation)
|
name: Ubuntu 20.04 (Python 3.8, backend)
|
||||||
os: bookworm
|
os: focal
|
||||||
include_documentation_tests: true
|
# Debian 11 ships with Python 3.9.2.
|
||||||
include_frontend_tests: false
|
- docker_image: zulip/ci:bullseye
|
||||||
# Ubuntu 24.04 ships with Python 3.12.2.
|
name: Debian 11 (Python 3.9, backend)
|
||||||
- docker_image: zulip/ci:noble
|
os: bullseye
|
||||||
name: Ubuntu 24.04 (Python 3.12, backend)
|
# Ubuntu 22.04 ships with Python 3.10.4.
|
||||||
os: noble
|
- docker_image: zulip/ci:jammy
|
||||||
include_documentation_tests: false
|
name: Ubuntu 22.04 (Python 3.10, backend)
|
||||||
include_frontend_tests: false
|
os: jammy
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
name: ${{ matrix.name }}
|
name: ${{ matrix.name }}
|
||||||
@@ -62,39 +50,43 @@ jobs:
|
|||||||
HOME: /home/github/
|
HOME: /home/github/
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
- name: Create cache directories
|
- name: Create cache directories
|
||||||
run: |
|
run: |
|
||||||
dirs=(/srv/zulip-{venv,emoji}-cache)
|
dirs=(/srv/zulip-{npm,venv,emoji}-cache)
|
||||||
sudo mkdir -p "${dirs[@]}"
|
sudo mkdir -p "${dirs[@]}"
|
||||||
sudo chown -R github "${dirs[@]}"
|
sudo chown -R github "${dirs[@]}"
|
||||||
|
|
||||||
- name: Restore pnpm store
|
- name: Restore node_modules cache
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v2
|
||||||
with:
|
with:
|
||||||
path: /__w/.pnpm-store
|
path: /srv/zulip-npm-cache
|
||||||
key: v1-pnpm-store-${{ matrix.os }}-${{ hashFiles('pnpm-lock.yaml') }}
|
key: v1-yarn-deps-${{ matrix.os }}-${{ hashFiles('package.json') }}-${{ hashFiles('yarn.lock') }}
|
||||||
|
restore-keys: v1-yarn-deps-${{ matrix.os }}
|
||||||
|
|
||||||
- name: Restore python cache
|
- name: Restore python cache
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v2
|
||||||
with:
|
with:
|
||||||
path: /srv/zulip-venv-cache
|
path: /srv/zulip-venv-cache
|
||||||
key: v1-venv-${{ matrix.os }}-${{ hashFiles('requirements/dev.txt') }}
|
key: v1-venv-${{ matrix.os }}-${{ hashFiles('requirements/dev.txt') }}
|
||||||
restore-keys: v1-venv-${{ matrix.os }}
|
restore-keys: v1-venv-${{ matrix.os }}
|
||||||
|
|
||||||
- name: Restore emoji cache
|
- name: Restore emoji cache
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v2
|
||||||
with:
|
with:
|
||||||
path: /srv/zulip-emoji-cache
|
path: /srv/zulip-emoji-cache
|
||||||
key: v1-emoji-${{ matrix.os }}-${{ hashFiles('tools/setup/emoji/emoji_map.json', 'tools/setup/emoji/build_emoji', 'tools/setup/emoji/emoji_setup_utils.py', 'tools/setup/emoji/emoji_names.py', 'package.json') }}
|
key: v1-emoji-${{ matrix.os }}-${{ hashFiles('tools/setup/emoji/emoji_map.json') }}-${{ hashFiles('tools/setup/emoji/build_emoji') }}-${{ hashFiles('tools/setup/emoji/emoji_setup_utils.py') }}-${{ hashFiles('tools/setup/emoji/emoji_names.py') }}-${{ hashFiles('package.json') }}
|
||||||
restore-keys: v1-emoji-${{ matrix.os }}
|
restore-keys: v1-emoji-${{ matrix.os }}
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: |
|
run: |
|
||||||
# This is the main setup job for the test suite
|
# This is the main setup job for the test suite
|
||||||
./tools/ci/setup-backend --skip-dev-db-build
|
./tools/ci/setup-backend --skip-dev-db-build
|
||||||
scripts/lib/clean_unused_caches.py --verbose --threshold=0
|
|
||||||
|
# Cleaning caches is mostly unnecessary in GitHub Actions, because
|
||||||
|
# most builds don't get to write to the cache.
|
||||||
|
# scripts/lib/clean_unused_caches.py --verbose --threshold 0
|
||||||
|
|
||||||
- name: Run tools test
|
- name: Run tools test
|
||||||
run: |
|
run: |
|
||||||
@@ -106,13 +98,56 @@ jobs:
|
|||||||
source tools/ci/activate-venv
|
source tools/ci/activate-venv
|
||||||
./tools/run-codespell
|
./tools/run-codespell
|
||||||
|
|
||||||
# We run the tests that are only run in a specific job early, so
|
- name: Run backend lint
|
||||||
# that we get feedback to the developer about likely failures as
|
run: |
|
||||||
# quickly as possible. Backend/mypy failures that aren't
|
source tools/ci/activate-venv
|
||||||
# identical across different versions are much more rare than
|
echo "Test suite is running under $(python --version)."
|
||||||
# frontend linter or node test failures.
|
./tools/lint --groups=backend --skip=gitlint,mypy # gitlint disabled because flaky
|
||||||
|
|
||||||
|
- name: Run frontend lint
|
||||||
|
if: ${{ matrix.include_frontend_tests }}
|
||||||
|
run: |
|
||||||
|
source tools/ci/activate-venv
|
||||||
|
./tools/lint --groups=frontend --skip=gitlint # gitlint disabled because flaky
|
||||||
|
|
||||||
|
- name: Run backend tests
|
||||||
|
run: |
|
||||||
|
source tools/ci/activate-venv
|
||||||
|
./tools/test-backend --coverage --include-webhooks --no-cov-cleanup --ban-console-output
|
||||||
|
|
||||||
|
- name: Run mypy
|
||||||
|
run: |
|
||||||
|
source tools/ci/activate-venv
|
||||||
|
# We run mypy after the backend tests so we get output from the
|
||||||
|
# backend tests, which tend to uncover more serious problems, first.
|
||||||
|
./tools/run-mypy --version
|
||||||
|
./tools/run-mypy
|
||||||
|
|
||||||
|
- name: Run miscellaneous tests
|
||||||
|
run: |
|
||||||
|
source tools/ci/activate-venv
|
||||||
|
|
||||||
|
# Currently our compiled requirements files will differ for different python versions
|
||||||
|
# so we will run test-locked-requirements only for Debian 10.
|
||||||
|
# ./tools/test-locked-requirements
|
||||||
|
# ./tools/test-run-dev # https://github.com/zulip/zulip/pull/14233
|
||||||
|
#
|
||||||
|
# This test has been persistently flaky at like 1% frequency, is slow,
|
||||||
|
# and is for a very specific single feature, so we don't run it by default:
|
||||||
|
# ./tools/test-queue-worker-reload
|
||||||
|
|
||||||
|
./tools/test-migrations
|
||||||
|
./tools/setup/optimize-svg --check
|
||||||
|
./tools/setup/generate_integration_bots_avatars.py --check-missing
|
||||||
|
|
||||||
|
# Ban check-database-compatibility.py from transitively
|
||||||
|
# relying on static/generated, because it might not be
|
||||||
|
# up-to-date at that point in upgrade-zulip-stage-2.
|
||||||
|
chmod 000 static/generated
|
||||||
|
./scripts/lib/check-database-compatibility.py
|
||||||
|
chmod 755 static/generated
|
||||||
|
|
||||||
- name: Run documentation and api tests
|
- name: Run documentation and api tests
|
||||||
if: ${{ matrix.include_documentation_tests }}
|
|
||||||
run: |
|
run: |
|
||||||
source tools/ci/activate-venv
|
source tools/ci/activate-venv
|
||||||
# In CI, we only test links we control in test-documentation to avoid flakes
|
# In CI, we only test links we control in test-documentation to avoid flakes
|
||||||
@@ -127,12 +162,6 @@ jobs:
|
|||||||
# Run the node tests first, since they're fast and deterministic
|
# Run the node tests first, since they're fast and deterministic
|
||||||
./tools/test-js-with-node --coverage --parallel=1
|
./tools/test-js-with-node --coverage --parallel=1
|
||||||
|
|
||||||
- name: Run frontend lint
|
|
||||||
if: ${{ matrix.include_frontend_tests }}
|
|
||||||
run: |
|
|
||||||
source tools/ci/activate-venv
|
|
||||||
./tools/lint --groups=frontend --skip=gitlint # gitlint disabled because flaky
|
|
||||||
|
|
||||||
- name: Check schemas
|
- name: Check schemas
|
||||||
if: ${{ matrix.include_frontend_tests }}
|
if: ${{ matrix.include_frontend_tests }}
|
||||||
run: |
|
run: |
|
||||||
@@ -154,55 +183,6 @@ jobs:
|
|||||||
source tools/ci/activate-venv
|
source tools/ci/activate-venv
|
||||||
./tools/test-js-with-puppeteer
|
./tools/test-js-with-puppeteer
|
||||||
|
|
||||||
- name: Check pnpm dedupe
|
|
||||||
if: ${{ matrix.include_frontend_tests }}
|
|
||||||
run: pnpm dedupe --check
|
|
||||||
|
|
||||||
- name: Run backend lint
|
|
||||||
run: |
|
|
||||||
source tools/ci/activate-venv
|
|
||||||
echo "Test suite is running under $(python --version)."
|
|
||||||
./tools/lint --groups=backend --skip=gitlint,mypy # gitlint disabled because flaky
|
|
||||||
|
|
||||||
- name: Run backend tests
|
|
||||||
run: |
|
|
||||||
source tools/ci/activate-venv
|
|
||||||
./tools/test-backend ${{ matrix.os != 'bookworm' && '--coverage' || '' }} --xml-report --no-html-report --include-webhooks --include-transaction-tests --no-cov-cleanup --ban-console-output
|
|
||||||
|
|
||||||
- name: Run mypy
|
|
||||||
run: |
|
|
||||||
source tools/ci/activate-venv
|
|
||||||
# We run mypy after the backend tests so we get output from the
|
|
||||||
# backend tests, which tend to uncover more serious problems, first.
|
|
||||||
./tools/run-mypy --version
|
|
||||||
./tools/run-mypy
|
|
||||||
|
|
||||||
- name: Run miscellaneous tests
|
|
||||||
run: |
|
|
||||||
source tools/ci/activate-venv
|
|
||||||
|
|
||||||
# Currently our compiled requirements files will differ for different
|
|
||||||
# Python versions, so we will run test-locked-requirements only on the
|
|
||||||
# platform with the oldest one.
|
|
||||||
# ./tools/test-locked-requirements
|
|
||||||
# ./tools/test-run-dev # https://github.com/zulip/zulip/pull/14233
|
|
||||||
#
|
|
||||||
# This test has been persistently flaky at like 1% frequency, is slow,
|
|
||||||
# and is for a very specific single feature, so we don't run it by default:
|
|
||||||
# ./tools/test-queue-worker-reload
|
|
||||||
|
|
||||||
./tools/test-migrations
|
|
||||||
./tools/setup/optimize-svg --check
|
|
||||||
./tools/setup/generate_integration_bots_avatars.py --check-missing
|
|
||||||
./tools/ci/check-executables
|
|
||||||
|
|
||||||
# Ban check-database-compatibility from transitively
|
|
||||||
# relying on static/generated, because it might not be
|
|
||||||
# up-to-date at that point in upgrade-zulip-stage-2.
|
|
||||||
chmod 000 static/generated web/generated
|
|
||||||
./scripts/lib/check-database-compatibility
|
|
||||||
chmod 755 static/generated web/generated
|
|
||||||
|
|
||||||
- name: Check for untracked files
|
- name: Check for untracked files
|
||||||
run: |
|
run: |
|
||||||
source tools/ci/activate-venv
|
source tools/ci/activate-venv
|
||||||
@@ -215,7 +195,7 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Test locked requirements
|
- name: Test locked requirements
|
||||||
if: ${{ matrix.os == 'jammy' }}
|
if: ${{ matrix.os == 'buster' }}
|
||||||
run: |
|
run: |
|
||||||
. /srv/zulip-py3-venv/bin/activate && \
|
. /srv/zulip-py3-venv/bin/activate && \
|
||||||
./tools/test-locked-requirements
|
./tools/test-locked-requirements
|
||||||
@@ -225,42 +205,25 @@ jobs:
|
|||||||
# Only upload coverage when both frontend and backend
|
# Only upload coverage when both frontend and backend
|
||||||
# tests are run.
|
# tests are run.
|
||||||
if: ${{ matrix.include_frontend_tests }}
|
if: ${{ matrix.include_frontend_tests }}
|
||||||
uses: codecov/codecov-action@v4
|
uses: codecov/codecov-action@v2
|
||||||
with:
|
with:
|
||||||
files: var/coverage.xml,var/node-coverage/lcov.info
|
files: var/coverage.xml,var/node-coverage/lcov.info
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
|
||||||
|
|
||||||
- name: Store Puppeteer artifacts
|
- name: Store Puppeteer artifacts
|
||||||
# Upload these on failure, as well
|
# Upload these on failure, as well
|
||||||
if: ${{ always() && matrix.include_frontend_tests }}
|
if: ${{ always() && matrix.include_frontend_tests }}
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v2
|
||||||
with:
|
with:
|
||||||
name: puppeteer
|
name: puppeteer
|
||||||
path: ./var/puppeteer
|
path: ./var/puppeteer
|
||||||
retention-days: 60
|
retention-days: 60
|
||||||
|
|
||||||
- name: Check development database build
|
- name: Check development database build
|
||||||
|
if: ${{ matrix.os == 'focal' || matrix.os == 'bullseye' || matrix.os == 'jammy' }}
|
||||||
run: ./tools/ci/setup-backend
|
run: ./tools/ci/setup-backend
|
||||||
|
|
||||||
- name: Verify pnpm store path
|
- name: Report status
|
||||||
run: |
|
if: failure()
|
||||||
set -x
|
env:
|
||||||
path="$(pnpm store path)"
|
ZULIP_BOT_KEY: ${{ secrets.ZULIP_BOT_KEY }}
|
||||||
[[ "$path" == /__w/.pnpm-store/* ]]
|
run: tools/ci/send-failure-message
|
||||||
|
|
||||||
- name: Generate failure report string
|
|
||||||
id: failure_report_string
|
|
||||||
if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }}
|
|
||||||
run: tools/ci/generate-failure-message >> $GITHUB_OUTPUT
|
|
||||||
|
|
||||||
- name: Report status to CZO
|
|
||||||
if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }}
|
|
||||||
uses: zulip/github-actions-zulip/send-message@v1
|
|
||||||
with:
|
|
||||||
api-key: ${{ secrets.ZULIP_BOT_KEY }}
|
|
||||||
email: "github-actions-bot@chat.zulip.org"
|
|
||||||
organization-url: "https://chat.zulip.org"
|
|
||||||
to: "automated testing"
|
|
||||||
topic: ${{ steps.failure_report_string.outputs.topic }}
|
|
||||||
type: "stream"
|
|
||||||
content: ${{ steps.failure_report_string.outputs.content }}
|
|
||||||
|
|||||||
10
.gitignore
vendored
10
.gitignore
vendored
@@ -17,17 +17,13 @@
|
|||||||
# See `git help ignore` for details on the format.
|
# See `git help ignore` for details on the format.
|
||||||
|
|
||||||
## Config files for the dev environment
|
## Config files for the dev environment
|
||||||
/zproject/apns-dev.pem
|
|
||||||
/zproject/apns-dev-key.p8
|
|
||||||
/zproject/dev-secrets.conf
|
/zproject/dev-secrets.conf
|
||||||
/zproject/custom_dev_settings.py
|
|
||||||
/tools/conf.ini
|
/tools/conf.ini
|
||||||
/tools/custom_provision
|
/tools/custom_provision
|
||||||
/tools/droplets/conf.ini
|
/tools/droplets/conf.ini
|
||||||
|
|
||||||
## Byproducts of setting up and using the dev environment
|
## Byproducts of setting up and using the dev environment
|
||||||
*.pyc
|
*.pyc
|
||||||
*.tsbuildinfo
|
|
||||||
package-lock.json
|
package-lock.json
|
||||||
|
|
||||||
/.vagrant
|
/.vagrant
|
||||||
@@ -37,7 +33,6 @@ package-lock.json
|
|||||||
!/var/puppeteer/test_credentials.d.ts
|
!/var/puppeteer/test_credentials.d.ts
|
||||||
|
|
||||||
/.dmypy.json
|
/.dmypy.json
|
||||||
/.ruff_cache
|
|
||||||
|
|
||||||
# Generated i18n data
|
# Generated i18n data
|
||||||
/locale/en
|
/locale/en
|
||||||
@@ -48,11 +43,11 @@ package-lock.json
|
|||||||
# Static build
|
# Static build
|
||||||
*.mo
|
*.mo
|
||||||
npm-debug.log
|
npm-debug.log
|
||||||
/.pnpm-store
|
|
||||||
/node_modules
|
/node_modules
|
||||||
/prod-static
|
/prod-static
|
||||||
/staticfiles.json
|
/staticfiles.json
|
||||||
/webpack-stats-production.json
|
/webpack-stats-production.json
|
||||||
|
/yarn-error.log
|
||||||
zulip-git-version
|
zulip-git-version
|
||||||
|
|
||||||
# Test / analysis tools
|
# Test / analysis tools
|
||||||
@@ -87,9 +82,6 @@ zulip.kdev4
|
|||||||
# Core dump files
|
# Core dump files
|
||||||
core
|
core
|
||||||
|
|
||||||
# Static generated files for landing page.
|
|
||||||
/static/images/landing-page/hello/generated
|
|
||||||
|
|
||||||
## Miscellaneous
|
## Miscellaneous
|
||||||
# (Ideally this section is empty.)
|
# (Ideally this section is empty.)
|
||||||
.transifexrc
|
.transifexrc
|
||||||
|
|||||||
4
.gitlint
4
.gitlint
@@ -1,13 +1,13 @@
|
|||||||
[general]
|
[general]
|
||||||
ignore=title-trailing-punctuation, body-min-length, body-is-missing
|
ignore=title-trailing-punctuation, body-min-length, body-is-missing
|
||||||
|
|
||||||
extra-path=tools/lib/gitlint_rules.py
|
extra-path=tools/lib/gitlint-rules.py
|
||||||
|
|
||||||
[title-match-regex]
|
[title-match-regex]
|
||||||
regex=^(.+:\ )?[A-Z].+\.$
|
regex=^(.+:\ )?[A-Z].+\.$
|
||||||
|
|
||||||
[title-max-length]
|
[title-max-length]
|
||||||
line-length=72
|
line-length=76
|
||||||
|
|
||||||
[body-max-line-length]
|
[body-max-line-length]
|
||||||
line-length=76
|
line-length=76
|
||||||
|
|||||||
107
.mailmap
107
.mailmap
@@ -12,153 +12,62 @@
|
|||||||
# # shows raw names/emails, filtered by mapped name:
|
# # shows raw names/emails, filtered by mapped name:
|
||||||
# $ git log --format='%an %ae' --author=$NAME | uniq -c
|
# $ git log --format='%an %ae' --author=$NAME | uniq -c
|
||||||
|
|
||||||
acrefoot <acrefoot@zulip.com> <acrefoot@alum.mit.edu>
|
|
||||||
acrefoot <acrefoot@zulip.com> <acrefoot@dropbox.com>
|
|
||||||
acrefoot <acrefoot@zulip.com> <acrefoot@humbughq.com>
|
|
||||||
Adam Benesh <Adam.Benesh@gmail.com>
|
|
||||||
Adam Benesh <Adam.Benesh@gmail.com> <Adam-Daniel.Benesh@t-systems.com>
|
|
||||||
Adarsh Tiwari <xoldyckk@gmail.com>
|
|
||||||
Aditya Chaudhary <aditya.chaudhary1558@gmail.com>
|
|
||||||
Adnan Shabbir Husain <generaladnan139@gmail.com>
|
|
||||||
Adnan Shabbir Husain <generaladnan139@gmail.com> <78212328+adnan-td@users.noreply.github.com>
|
|
||||||
Alex Vandiver <alexmv@zulip.com> <alex@chmrr.net>
|
Alex Vandiver <alexmv@zulip.com> <alex@chmrr.net>
|
||||||
Alex Vandiver <alexmv@zulip.com> <github@chmrr.net>
|
Alex Vandiver <alexmv@zulip.com> <github@chmrr.net>
|
||||||
Allen Rabinovich <allenrabinovich@yahoo.com> <allenr@humbughq.com>
|
Allen Rabinovich <allenrabinovich@yahoo.com> <allenr@humbughq.com>
|
||||||
Allen Rabinovich <allenrabinovich@yahoo.com> <allenr@zulip.com>
|
Allen Rabinovich <allenrabinovich@yahoo.com> <allenr@zulip.com>
|
||||||
Alya Abbott <alya@zulip.com> <2090066+alya@users.noreply.github.com>
|
Alya Abbott <alya@zulip.com> <2090066+alya@users.noreply.github.com>
|
||||||
Alya Abbott <alya@zulip.com> <alyaabbott@elance-odesk.com>
|
|
||||||
Aman Agrawal <amanagr@zulip.com>
|
|
||||||
Aman Agrawal <amanagr@zulip.com> <f2016561@pilani.bits-pilani.ac.in>
|
Aman Agrawal <amanagr@zulip.com> <f2016561@pilani.bits-pilani.ac.in>
|
||||||
Anders Kaseorg <anders@zulip.com> <anders@zulipchat.com>
|
Anders Kaseorg <anders@zulip.com> <anders@zulipchat.com>
|
||||||
Anders Kaseorg <anders@zulip.com> <andersk@mit.edu>
|
Anders Kaseorg <anders@zulip.com> <andersk@mit.edu>
|
||||||
aparna-bhatt <aparnabhatt2001@gmail.com> <86338542+aparna-bhatt@users.noreply.github.com>
|
|
||||||
Aryan Bhokare <aryan1bhokare@gmail.com>
|
|
||||||
Aryan Bhokare <aryan1bhokare@gmail.com> <92683836+aryan-bhokare@users.noreply.github.com>
|
|
||||||
Aryan Shridhar <aryanshridhar7@gmail.com>
|
|
||||||
Aryan Shridhar <aryanshridhar7@gmail.com> <53977614+aryanshridhar@users.noreply.github.com>
|
|
||||||
Ashwat Kumar Singh <ashwat.kumarsingh.met20@itbhu.ac.in>
|
|
||||||
Austin Riba <austin@zulip.com> <austin@m51.io>
|
Austin Riba <austin@zulip.com> <austin@m51.io>
|
||||||
Bedo Khaled <bedokhaled66@gmail.com>
|
|
||||||
Bedo Khaled <bedokhaled66@gmail.com> <64221784+abdelrahman725@users.noreply.github.com>
|
|
||||||
BIKI DAS <bikid475@gmail.com>
|
BIKI DAS <bikid475@gmail.com>
|
||||||
Brijmohan Siyag <brijsiyag@gmail.com>
|
Brock Whittaker <brock@zulipchat.com> <bjwhitta@asu.edu>
|
||||||
Brock Whittaker <whittakerbrock@gmail.com> <bjwhitta@asu.edu>
|
Brock Whittaker <brock@zulipchat.com> <brockwhittaker@Brocks-MacBook.local>
|
||||||
Brock Whittaker <whittakerbrock@gmail.com> <brock@zulip.com>
|
Brock Whittaker <brock@zulipchat.com> <brock@zulipchat.org>
|
||||||
Brock Whittaker <whittakerbrock@gmail.com> <brock@zulip.org>
|
|
||||||
Brock Whittaker <whittakerbrock@gmail.com> <brock@zulipchat.org>
|
|
||||||
Brock Whittaker <whittakerbrock@gmail.com> <brockwhittaker@Brocks-MacBook.local>
|
|
||||||
Chris Bobbe <cbobbe@zulip.com> <cbobbe@zulipchat.com>
|
Chris Bobbe <cbobbe@zulip.com> <cbobbe@zulipchat.com>
|
||||||
Chris Bobbe <cbobbe@zulip.com> <csbobbe@gmail.com>
|
Chris Bobbe <cbobbe@zulip.com> <csbobbe@gmail.com>
|
||||||
codewithnick <nikhilsingh526452@gmail.com>
|
|
||||||
Danny Su <contact@dannysu.com> <opensource@emailengine.org>
|
|
||||||
Dhruv Goyal <dhruvgoyal.dev@gmail.com>
|
|
||||||
Dinesh <chdinesh1089@gmail.com>
|
|
||||||
Dinesh <chdinesh1089@gmail.com> <chdinesh1089>
|
|
||||||
Eeshan Garg <eeshan@zulip.com> <jerryguitarist@gmail.com>
|
Eeshan Garg <eeshan@zulip.com> <jerryguitarist@gmail.com>
|
||||||
Eric Smith <erwsmith@gmail.com> <99841919+erwsmith@users.noreply.github.com>
|
|
||||||
Evy Kassirer <evy@zulip.com>
|
|
||||||
Evy Kassirer <evy@zulip.com> <evy.kassirer@gmail.com>
|
|
||||||
Evy Kassirer <evy@zulip.com> <evykassirer@users.noreply.github.com>
|
|
||||||
Ganesh Pawar <pawarg256@gmail.com> <58626718+ganpa3@users.noreply.github.com>
|
|
||||||
Greg Price <greg@zulip.com> <gnprice@gmail.com>
|
Greg Price <greg@zulip.com> <gnprice@gmail.com>
|
||||||
Greg Price <greg@zulip.com> <greg@zulipchat.com>
|
Greg Price <greg@zulip.com> <greg@zulipchat.com>
|
||||||
Greg Price <greg@zulip.com> <price@mit.edu>
|
Greg Price <greg@zulip.com> <price@mit.edu>
|
||||||
Hardik Dharmani <Ddharmani99@gmail.com> <ddharmani99@gmail.com>
|
|
||||||
Hemant Umre <hemantumre12@gmail.com> <87542880+HemantUmre12@users.noreply.github.com>
|
|
||||||
Jai soni <jai_s@me.iitr.ac.in>
|
Jai soni <jai_s@me.iitr.ac.in>
|
||||||
Jai soni <jai_s@me.iitr.ac.in> <76561593+jai2201@users.noreply.github.com>
|
Jai soni <jai_s@me.iitr.ac.in> <76561593+jai2201@users.noreply.github.com>
|
||||||
Jeff Arnold <jbarnold@gmail.com> <jbarnold@humbughq.com>
|
Jeff Arnold <jbarnold@gmail.com> <jbarnold@humbughq.com>
|
||||||
Jeff Arnold <jbarnold@gmail.com> <jbarnold@zulip.com>
|
Jeff Arnold <jbarnold@gmail.com> <jbarnold@zulip.com>
|
||||||
Jessica McKellar <jesstess@mit.edu> <jesstess@humbughq.com>
|
Jessica McKellar <jesstess@mit.edu> <jesstess@humbughq.com>
|
||||||
Jessica McKellar <jesstess@mit.edu> <jesstess@zulip.com>
|
Jessica McKellar <jesstess@mit.edu> <jesstess@zulip.com>
|
||||||
John Lu <JohnLu10212004@gmail.com>
|
|
||||||
John Lu <JohnLu10212004@gmail.com> <87673068+JohnLu2004@users.noreply.github.com>
|
|
||||||
Joseph Ho <josephho678@gmail.com>
|
|
||||||
Joseph Ho <josephho678@gmail.com> <62449508+Joelute@users.noreply.github.com>
|
|
||||||
Julia Bichler <julia.bichler@tum.de> <74348920+juliaBichler01@users.noreply.github.com>
|
|
||||||
Karl Stolley <karl@zulip.com> <karl@stolley.dev>
|
|
||||||
Kevin Mehall <km@kevinmehall.net> <kevin@humbughq.com>
|
Kevin Mehall <km@kevinmehall.net> <kevin@humbughq.com>
|
||||||
Kevin Mehall <km@kevinmehall.net> <kevin@zulip.com>
|
Kevin Mehall <km@kevinmehall.net> <kevin@zulip.com>
|
||||||
Kevin Scott <kevin.scott.98@gmail.com>
|
Kevin Scott <kevin.scott.98@gmail.com>
|
||||||
Kislay Verma <kislayuv27@gmail.com>
|
|
||||||
Kunal Sharma <v.shm.kunal@gmail.com>
|
|
||||||
Lalit Kumar Singh <lalitkumarsingh3716@gmail.com>
|
|
||||||
Lalit Kumar Singh <lalitkumarsingh3716@gmail.com> <lalits01@smartek21.com>
|
|
||||||
Lauryn Menard <lauryn@zulip.com> <63245456+laurynmm@users.noreply.github.com>
|
|
||||||
Lauryn Menard <lauryn@zulip.com> <lauryn.menard@gmail.com>
|
Lauryn Menard <lauryn@zulip.com> <lauryn.menard@gmail.com>
|
||||||
m-e-l-u-h-a-n <purushottam.tiwari.cd.cse19@itbhu.ac.in>
|
|
||||||
m-e-l-u-h-a-n <purushottam.tiwari.cd.cse19@itbhu.ac.in> <pururshottam.tiwari.cd.cse19@itbhu.ac.in>
|
|
||||||
Mateusz Mandera <mateusz.mandera@zulip.com> <mateusz.mandera@protonmail.com>
|
Mateusz Mandera <mateusz.mandera@zulip.com> <mateusz.mandera@protonmail.com>
|
||||||
Matt Keller <matt@zulip.com>
|
m-e-l-u-h-a-n <purushottam.tiwari.cd.cse19@itbhu.ac.in>
|
||||||
Matt Keller <matt@zulip.com> <m@cognusion.com>
|
|
||||||
Nehal Sharma <bablinaneh@gmail.com>
|
|
||||||
Nehal Sharma <bablinaneh@gmail.com> <68962290+N-Shar-ma@users.noreply.github.com>
|
|
||||||
Nimish Medatwal <medatwalnimish@gmail.com>
|
|
||||||
Noble Mittal <noblemittal@outlook.com> <62551163+beingnoble03@users.noreply.github.com>
|
|
||||||
nzai <nzaih18@gmail.com> <70953556+nzaih1999@users.noreply.github.com>
|
|
||||||
Palash Baderia <palash.baderia@outlook.com>
|
|
||||||
Palash Baderia <palash.baderia@outlook.com> <66828942+palashb01@users.noreply.github.com>
|
|
||||||
Palash Raghuwanshi <singhpalash0@gmail.com>
|
Palash Raghuwanshi <singhpalash0@gmail.com>
|
||||||
Parth <mittalparth22@gmail.com>
|
Parth <mittalparth22@gmail.com>
|
||||||
Pratik Chanda <pratikchanda2000@gmail.com>
|
|
||||||
Pratik Solanki <pratiksolanki2021@gmail.com>
|
|
||||||
Priyam Seth <sethpriyam1@gmail.com> <b19188@students.iitmandi.ac.in>
|
|
||||||
Ray Kraesig <rkraesig@zulip.com> <rkraesig@zulipchat.com>
|
Ray Kraesig <rkraesig@zulip.com> <rkraesig@zulipchat.com>
|
||||||
Reid Barton <rwbarton@gmail.com> <rwbarton@humbughq.com>
|
|
||||||
Rein Zustand (rht) <rhtbot@protonmail.com>
|
|
||||||
Rishabh Maheshwari <b20063@students.iitmandi.ac.in>
|
|
||||||
Rishi Gupta <rishig@zulipchat.com> <rishig+git@mit.edu>
|
Rishi Gupta <rishig@zulipchat.com> <rishig+git@mit.edu>
|
||||||
Rishi Gupta <rishig@zulipchat.com> <rishig@kandralabs.com>
|
Rishi Gupta <rishig@zulipchat.com> <rishig@kandralabs.com>
|
||||||
Rishi Gupta <rishig@zulipchat.com> <rishig@users.noreply.github.com>
|
Rishi Gupta <rishig@zulipchat.com> <rishig@users.noreply.github.com>
|
||||||
Rixant Rokaha <rixantrokaha@gmail.com>
|
Reid Barton <rwbarton@gmail.com> <rwbarton@humbughq.com>
|
||||||
Rixant Rokaha <rixantrokaha@gmail.com> <rishantrokaha@gmail.com>
|
|
||||||
Rixant Rokaha <rixantrokaha@gmail.com> <rrokaha@caldwell.edu>
|
|
||||||
Rohan Gudimetla <rohan.gudimetla07@gmail.com>
|
|
||||||
Sahil Batra <sahil@zulip.com> <35494118+sahil839@users.noreply.github.com>
|
|
||||||
Sahil Batra <sahil@zulip.com> <sahilbatra839@gmail.com>
|
|
||||||
Sanchit Sharma <ssharmas10662@gmail.com>
|
|
||||||
Satyam Bansal <sbansal1999@gmail.com>
|
|
||||||
Sayam Samal <samal.sayam@gmail.com>
|
Sayam Samal <samal.sayam@gmail.com>
|
||||||
Scott Feeney <scott@oceanbase.org> <scott@humbughq.com>
|
Scott Feeney <scott@oceanbase.org> <scott@humbughq.com>
|
||||||
Scott Feeney <scott@oceanbase.org> <scott@zulip.com>
|
Scott Feeney <scott@oceanbase.org> <scott@zulip.com>
|
||||||
Shashank Singh <21bec103@iiitdmj.ac.in>
|
|
||||||
Shlok Patel <shlokcpatel2001@gmail.com>
|
Shlok Patel <shlokcpatel2001@gmail.com>
|
||||||
Shu Chen <shu@zulip.com>
|
|
||||||
Shubham Padia <shubham@zulip.com>
|
|
||||||
Shubham Padia <shubham@zulip.com> <shubham@glints.com>
|
|
||||||
Somesh Ranjan <somesh.ranjan.met20@itbhu.ac.in> <77766761+somesh202@users.noreply.github.com>
|
|
||||||
Steve Howell <showell@zulip.com> <showell30@yahoo.com>
|
Steve Howell <showell@zulip.com> <showell30@yahoo.com>
|
||||||
Steve Howell <showell@zulip.com> <showell@yahoo.com>
|
Steve Howell <showell@zulip.com> <showell@yahoo.com>
|
||||||
Steve Howell <showell@zulip.com> <showell@zulipchat.com>
|
Steve Howell <showell@zulip.com> <showell@zulipchat.com>
|
||||||
Steve Howell <showell@zulip.com> <steve@humbughq.com>
|
Steve Howell <showell@zulip.com> <steve@humbughq.com>
|
||||||
Steve Howell <showell@zulip.com> <steve@zulip.com>
|
Steve Howell <showell@zulip.com> <steve@zulip.com>
|
||||||
strifel <info@strifel.de>
|
strifel <info@strifel.de>
|
||||||
Sujal Shah <sujalshah28092004@gmail.com>
|
|
||||||
Tanmay Kumar <tnmdotkr@gmail.com>
|
|
||||||
Tanmay Kumar <tnmdotkr@gmail.com> <133781250+tnmkr@users.noreply.github.com>
|
|
||||||
Tim Abbott <tabbott@zulip.com>
|
|
||||||
Tim Abbott <tabbott@zulip.com> <tabbott@dropbox.com>
|
Tim Abbott <tabbott@zulip.com> <tabbott@dropbox.com>
|
||||||
Tim Abbott <tabbott@zulip.com> <tabbott@humbughq.com>
|
Tim Abbott <tabbott@zulip.com> <tabbott@humbughq.com>
|
||||||
Tim Abbott <tabbott@zulip.com> <tabbott@mit.edu>
|
Tim Abbott <tabbott@zulip.com> <tabbott@mit.edu>
|
||||||
Tim Abbott <tabbott@zulip.com> <tabbott@zulipchat.com>
|
Tim Abbott <tabbott@zulip.com> <tabbott@zulipchat.com>
|
||||||
Tomasz Kolek <tomasz-kolek@o2.pl> <tomasz-kolek@go2.pl>
|
|
||||||
Ujjawal Modi <umodi2003@gmail.com> <99073049+Ujjawal3@users.noreply.github.com>
|
|
||||||
umkay <ukhan@zulipchat.com> <umaimah.k@gmail.com>
|
|
||||||
umkay <ukhan@zulipchat.com> <umkay@users.noreply.github.com>
|
|
||||||
Viktor Illmer <1476338+v-ji@users.noreply.github.com>
|
|
||||||
Vishnu KS <vishnu@zulip.com> <hackerkid@vishnuks.com>
|
Vishnu KS <vishnu@zulip.com> <hackerkid@vishnuks.com>
|
||||||
Vishnu KS <vishnu@zulip.com> <yo@vishnuks.com>
|
Vishnu KS <vishnu@zulip.com> <yo@vishnuks.com>
|
||||||
Waseem Daher <wdaher@zulip.com> <wdaher@dropbox.com>
|
Alya Abbott <alya@zulip.com> <alyaabbott@elance-odesk.com>
|
||||||
Waseem Daher <wdaher@zulip.com> <wdaher@humbughq.com>
|
Sahil Batra <sahil@zulip.com> <sahilbatra839@gmail.com>
|
||||||
Yash RE <33805964+YashRE42@users.noreply.github.com>
|
|
||||||
Yash RE <33805964+YashRE42@users.noreply.github.com> <YashRE42@github.com>
|
Yash RE <33805964+YashRE42@users.noreply.github.com> <YashRE42@github.com>
|
||||||
|
Yash RE <33805964+YashRE42@users.noreply.github.com>
|
||||||
Yogesh Sirsat <yogeshsirsat56@gmail.com>
|
Yogesh Sirsat <yogeshsirsat56@gmail.com>
|
||||||
Yogesh Sirsat <yogeshsirsat56@gmail.com> <41695888+yogesh-sirsat@users.noreply.github.com>
|
|
||||||
Zeeshan Equbal <equbalzeeshan@gmail.com>
|
|
||||||
Zeeshan Equbal <equbalzeeshan@gmail.com> <54993043+zee-bit@users.noreply.github.com>
|
|
||||||
Zev Benjamin <zev@zulip.com> <zev@dropbox.com>
|
|
||||||
Zev Benjamin <zev@zulip.com> <zev@humbughq.com>
|
|
||||||
Zev Benjamin <zev@zulip.com> <zev@mit.edu>
|
|
||||||
Zixuan James Li <p359101898@gmail.com>
|
|
||||||
Zixuan James Li <p359101898@gmail.com> <359101898@qq.com>
|
|
||||||
Zixuan James Li <p359101898@gmail.com> <39874143+PIG208@users.noreply.github.com>
|
|
||||||
|
|||||||
@@ -1,17 +1,8 @@
|
|||||||
pnpm-lock.yaml
|
|
||||||
/api_docs/**/*.md
|
|
||||||
/corporate/tests/stripe_fixtures
|
/corporate/tests/stripe_fixtures
|
||||||
/help/**/*.md
|
|
||||||
/locale
|
/locale
|
||||||
|
/static/third
|
||||||
/templates/**/*.md
|
/templates/**/*.md
|
||||||
/tools/setup/emoji/emoji_map.json
|
/tools/setup/emoji/emoji_map.json
|
||||||
/web/third/*
|
|
||||||
!/web/third/marked
|
|
||||||
/web/third/marked/*
|
|
||||||
!/web/third/marked/lib
|
|
||||||
/web/third/marked/lib/*
|
|
||||||
!/web/third/marked/lib/marked.d.ts
|
|
||||||
/zerver/tests/fixtures
|
/zerver/tests/fixtures
|
||||||
/zerver/webhooks/*/doc.md
|
/zerver/webhooks/*/doc.md
|
||||||
/zerver/webhooks/github/githubsponsors.md
|
|
||||||
/zerver/webhooks/*/fixtures
|
/zerver/webhooks/*/fixtures
|
||||||
|
|||||||
@@ -1,15 +0,0 @@
|
|||||||
# https://docs.readthedocs.io/en/stable/config-file/v2.html
|
|
||||||
version: 2
|
|
||||||
|
|
||||||
build:
|
|
||||||
os: ubuntu-22.04
|
|
||||||
tools:
|
|
||||||
python: "3.10"
|
|
||||||
|
|
||||||
sphinx:
|
|
||||||
configuration: docs/conf.py
|
|
||||||
fail_on_warning: true
|
|
||||||
|
|
||||||
python:
|
|
||||||
install:
|
|
||||||
- requirements: requirements/docs.txt
|
|
||||||
27
.tx/config
27
.tx/config
@@ -1,39 +1,32 @@
|
|||||||
# Migrated from transifex-client format with `tx migrate`
|
|
||||||
#
|
|
||||||
# See https://developers.transifex.com/docs/using-the-client which hints at
|
|
||||||
# this format, but in general, the headings are in the format of:
|
|
||||||
#
|
|
||||||
# [o:<org>:p:<project>:r:<resource>]
|
|
||||||
|
|
||||||
[main]
|
[main]
|
||||||
host = https://www.transifex.com
|
host = https://www.transifex.com
|
||||||
lang_map = zh-Hans: zh_Hans, zh-Hant: zh_Hant
|
lang_map = zh-Hans: zh_Hans, zh-Hant: zh_Hant
|
||||||
|
|
||||||
[o:zulip:p:zulip:r:djangopo]
|
[zulip.djangopo]
|
||||||
file_filter = locale/<lang>/LC_MESSAGES/django.po
|
file_filter = locale/<lang>/LC_MESSAGES/django.po
|
||||||
source_file = locale/en/LC_MESSAGES/django.po
|
source_file = locale/en/LC_MESSAGES/django.po
|
||||||
source_lang = en
|
source_lang = en
|
||||||
type = PO
|
type = PO
|
||||||
|
|
||||||
[o:zulip:p:zulip:r:mobile]
|
[zulip.translationsjson]
|
||||||
|
file_filter = locale/<lang>/translations.json
|
||||||
|
source_file = locale/en/translations.json
|
||||||
|
source_lang = en
|
||||||
|
type = KEYVALUEJSON
|
||||||
|
|
||||||
|
[zulip.mobile]
|
||||||
file_filter = locale/<lang>/mobile.json
|
file_filter = locale/<lang>/mobile.json
|
||||||
source_file = locale/en/mobile.json
|
source_file = locale/en/mobile.json
|
||||||
source_lang = en
|
source_lang = en
|
||||||
type = KEYVALUEJSON
|
type = KEYVALUEJSON
|
||||||
|
|
||||||
[o:zulip:p:zulip:r:translationsjson]
|
[zulip-test.djangopo]
|
||||||
file_filter = locale/<lang>/translations.json
|
|
||||||
source_file = locale/en/translations.json
|
|
||||||
source_lang = en
|
|
||||||
type = KEYVALUEJSON
|
|
||||||
|
|
||||||
[o:zulip:p:zulip-test:r:djangopo]
|
|
||||||
file_filter = locale/<lang>/LC_MESSAGES/django.po
|
file_filter = locale/<lang>/LC_MESSAGES/django.po
|
||||||
source_file = locale/en/LC_MESSAGES/django.po
|
source_file = locale/en/LC_MESSAGES/django.po
|
||||||
source_lang = en
|
source_lang = en
|
||||||
type = PO
|
type = PO
|
||||||
|
|
||||||
[o:zulip:p:zulip-test:r:translationsjson]
|
[zulip-test.translationsjson]
|
||||||
file_filter = locale/<lang>/translations.json
|
file_filter = locale/<lang>/translations.json
|
||||||
source_file = locale/en/translations.json
|
source_file = locale/en/translations.json
|
||||||
source_lang = en
|
source_lang = en
|
||||||
|
|||||||
@@ -66,7 +66,7 @@ organizers may take any action they deem appropriate, up to and including a
|
|||||||
temporary ban or permanent expulsion from the community without warning (and
|
temporary ban or permanent expulsion from the community without warning (and
|
||||||
without refund in the case of a paid event).
|
without refund in the case of a paid event).
|
||||||
|
|
||||||
If someone outside the development community (e.g., a user of the Zulip
|
If someone outside the development community (e.g. a user of the Zulip
|
||||||
software) engages in unacceptable behavior that affects someone in the
|
software) engages in unacceptable behavior that affects someone in the
|
||||||
community, we still want to know. Even if we don't have direct control over
|
community, we still want to know. Even if we don't have direct control over
|
||||||
the violator, the community organizers can still support the people
|
the violator, the community organizers can still support the people
|
||||||
@@ -102,72 +102,3 @@ This Code of Conduct is adapted from the
|
|||||||
under a
|
under a
|
||||||
[Creative Commons BY-SA](https://creativecommons.org/licenses/by-sa/4.0/)
|
[Creative Commons BY-SA](https://creativecommons.org/licenses/by-sa/4.0/)
|
||||||
license.
|
license.
|
||||||
|
|
||||||
## Moderating the Zulip community
|
|
||||||
|
|
||||||
Anyone can help moderate the Zulip community by helping make sure that folks are
|
|
||||||
aware of the [community guidelines](https://zulip.com/development-community/)
|
|
||||||
and this Code of Conduct, and that we maintain a positive and respectful
|
|
||||||
atmosphere.
|
|
||||||
|
|
||||||
Here are some guidelines for you how can help:
|
|
||||||
|
|
||||||
- Be friendly! Welcoming folks, thanking them for their feedback, ideas and effort,
|
|
||||||
and just trying to keep the atmosphere warm make the whole community function
|
|
||||||
more smoothly. New participants who feel accepted, listened to and respected
|
|
||||||
are likely to treat others the same way.
|
|
||||||
|
|
||||||
- Be familiar with the [community
|
|
||||||
guidelines](https://zulip.com/development-community/), and cite them liberally
|
|
||||||
when a user violates them. Be polite but firm. Some examples:
|
|
||||||
|
|
||||||
- @user please note that there is no need to @-mention @\_**Tim Abbott** when
|
|
||||||
you ask a question. As noted in the [guidelines for this
|
|
||||||
community](https://zulip.com/development-community/):
|
|
||||||
|
|
||||||
> Use @-mentions sparingly… there is generally no need to @-mention a
|
|
||||||
> core contributor unless you need their timely attention.
|
|
||||||
|
|
||||||
- @user, please keep in mind the following [community
|
|
||||||
guideline](https://zulip.com/development-community/):
|
|
||||||
|
|
||||||
> Don’t ask the same question in multiple places. Moderators read every
|
|
||||||
> public stream, and make sure every question gets a reply.
|
|
||||||
|
|
||||||
I’ve gone ahead and moved the other copy of this message to this thread.
|
|
||||||
|
|
||||||
- If asked a question in a direct message that is better discussed in a public
|
|
||||||
stream:
|
|
||||||
> Hi @user! Please start by reviewing
|
|
||||||
> https://zulip.com/development-community/#community-norms to learn how to
|
|
||||||
> get help in this community.
|
|
||||||
|
|
||||||
- Users sometimes think chat.zulip.org is a testing instance. When this happens,
|
|
||||||
kindly direct them to use the **#test here** stream.
|
|
||||||
|
|
||||||
- If you see a message that’s posted in the wrong place, go ahead and move it if
|
|
||||||
you have permissions to do so, even if you don’t plan to respond to it.
|
|
||||||
Leaving the “Send automated notice to new topic” option enabled helps make it
|
|
||||||
clear what happened to the person who sent the message.
|
|
||||||
|
|
||||||
If you are responding to a message that's been moved, mention the user in your
|
|
||||||
reply, so that the mention serves as a notification of the new location for
|
|
||||||
their conversation.
|
|
||||||
|
|
||||||
- If a user is posting spam, please report it to an administrator. They will:
|
|
||||||
|
|
||||||
- Change the user's name to `<name> (spammer)` and deactivate them.
|
|
||||||
- Delete any spam messages they posted in public streams.
|
|
||||||
|
|
||||||
- We care very much about maintaining a respectful tone in our community. If you
|
|
||||||
see someone being mean or rude, point out that their tone is inappropriate,
|
|
||||||
and ask them to communicate their perspective in a respectful way in the
|
|
||||||
future. If you don’t feel comfortable doing so yourself, feel free to ask a
|
|
||||||
member of Zulip's core team to take care of the situation.
|
|
||||||
|
|
||||||
- Try to assume the best intentions from others (given the range of
|
|
||||||
possibilities presented by their visible behavior), and stick with a friendly
|
|
||||||
and positive tone even when someone’s behavior is poor or disrespectful.
|
|
||||||
Everyone has bad days and stressful situations that can result in them
|
|
||||||
behaving not their best, and while we should be firm about our community
|
|
||||||
rules, we should also enforce them with kindness.
|
|
||||||
|
|||||||
386
CONTRIBUTING.md
386
CONTRIBUTING.md
@@ -1,36 +1,17 @@
|
|||||||
# Contributing guide
|
# Contributing to Zulip
|
||||||
|
|
||||||
Welcome to the Zulip community!
|
Welcome to the Zulip community!
|
||||||
|
|
||||||
## Zulip development community
|
## Community
|
||||||
|
|
||||||
The primary communication forum for the Zulip community is the Zulip
|
The
|
||||||
server hosted at [chat.zulip.org](https://chat.zulip.org/):
|
[Zulip community server](https://zulip.com/development-community/)
|
||||||
|
is the primary communication forum for the Zulip community. It is a good
|
||||||
- **Users** and **administrators** of Zulip organizations stop by to
|
place to start whether you have a question, are a new contributor, are a new
|
||||||
ask questions, offer feedback, and participate in product design
|
user, or anything else. Please review our
|
||||||
discussions.
|
[community norms](https://zulip.com/development-community/#community-norms)
|
||||||
- **Contributors to the project**, including the **core Zulip
|
before posting. The Zulip community is also governed by a
|
||||||
development team**, discuss ongoing and future projects, brainstorm
|
[code of conduct](https://zulip.readthedocs.io/en/latest/code-of-conduct.html).
|
||||||
ideas, and generally help each other out.
|
|
||||||
|
|
||||||
Everyone is welcome to [sign up](https://chat.zulip.org/) and
|
|
||||||
participate — we love hearing from our users! Public streams in the
|
|
||||||
community receive thousands of messages a week. We recommend signing
|
|
||||||
up using the special invite links for
|
|
||||||
[users](https://chat.zulip.org/join/t5crtoe62bpcxyisiyglmtvb/),
|
|
||||||
[self-hosters](https://chat.zulip.org/join/wnhv3jzm6afa4raenedanfno/)
|
|
||||||
and
|
|
||||||
[contributors](https://chat.zulip.org/join/npzwak7vpmaknrhxthna3c7p/)
|
|
||||||
to get a curated list of initial stream subscriptions.
|
|
||||||
|
|
||||||
To learn how to get started participating in the community, including [community
|
|
||||||
norms](https://zulip.com/development-community/#community-norms) and [where to
|
|
||||||
post](https://zulip.com/development-community/#where-do-i-send-my-message),
|
|
||||||
check out our [Zulip development community
|
|
||||||
guide](https://zulip.com/development-community/). The Zulip community is
|
|
||||||
governed by a [code of
|
|
||||||
conduct](https://zulip.readthedocs.io/en/latest/code-of-conduct.html).
|
|
||||||
|
|
||||||
## Ways to contribute
|
## Ways to contribute
|
||||||
|
|
||||||
@@ -41,9 +22,9 @@ needs doing:
|
|||||||
|
|
||||||
- Bug squashing and feature development on our Python/Django
|
- Bug squashing and feature development on our Python/Django
|
||||||
[backend](https://github.com/zulip/zulip), web
|
[backend](https://github.com/zulip/zulip), web
|
||||||
[frontend](https://github.com/zulip/zulip),
|
[frontend](https://github.com/zulip/zulip), React Native
|
||||||
Flutter [mobile app](https://github.com/zulip/zulip-flutter) in beta,
|
[mobile app](https://github.com/zulip/zulip-mobile), or Electron
|
||||||
or Electron [desktop app](https://github.com/zulip/zulip-desktop).
|
[desktop app](https://github.com/zulip/zulip-desktop).
|
||||||
- Building out our
|
- Building out our
|
||||||
[Python API and bots](https://github.com/zulip/python-zulip-api) framework.
|
[Python API and bots](https://github.com/zulip/python-zulip-api) framework.
|
||||||
- [Writing an integration](https://zulip.com/api/integrations-overview).
|
- [Writing an integration](https://zulip.com/api/integrations-overview).
|
||||||
@@ -55,14 +36,9 @@ needs doing:
|
|||||||
**Non-code contributions**: Some of the most valuable ways to contribute
|
**Non-code contributions**: Some of the most valuable ways to contribute
|
||||||
don't require touching the codebase at all. For example, you can:
|
don't require touching the codebase at all. For example, you can:
|
||||||
|
|
||||||
- Report issues, including both [feature
|
- [Report issues](#reporting-issues), including both feature requests and
|
||||||
requests](https://zulip.readthedocs.io/en/latest/contributing/suggesting-features.html)
|
bug reports.
|
||||||
and [bug
|
|
||||||
reports](https://zulip.readthedocs.io/en/latest/contributing/reporting-bugs.html).
|
|
||||||
- [Give feedback](#user-feedback) if you are evaluating or using Zulip.
|
- [Give feedback](#user-feedback) if you are evaluating or using Zulip.
|
||||||
- [Participate
|
|
||||||
thoughtfully](https://zulip.readthedocs.io/en/latest/contributing/design-discussions.html)
|
|
||||||
in design discussions.
|
|
||||||
- [Sponsor Zulip](https://github.com/sponsors/zulip) through the GitHub sponsors program.
|
- [Sponsor Zulip](https://github.com/sponsors/zulip) through the GitHub sponsors program.
|
||||||
- [Translate](https://zulip.readthedocs.io/en/latest/translating/translating.html)
|
- [Translate](https://zulip.readthedocs.io/en/latest/translating/translating.html)
|
||||||
Zulip into your language.
|
Zulip into your language.
|
||||||
@@ -78,53 +54,30 @@ to help.
|
|||||||
|
|
||||||
- First, make an account on the
|
- First, make an account on the
|
||||||
[Zulip community server](https://zulip.com/development-community/),
|
[Zulip community server](https://zulip.com/development-community/),
|
||||||
paying special attention to the
|
paying special attention to the community norms. If you'd like, introduce
|
||||||
[community norms](https://zulip.com/development-community/#community-norms).
|
yourself in
|
||||||
If you'd like, introduce yourself in
|
[#new members](https://chat.zulip.org/#narrow/stream/95-new-members), using
|
||||||
[#new members](https://chat.zulip.org/#narrow/channel/95-new-members), using
|
|
||||||
your name as the topic. Bonus: tell us about your first impressions of
|
your name as the topic. Bonus: tell us about your first impressions of
|
||||||
Zulip, and anything that felt confusing/broken or interesting/helpful as you
|
Zulip, and anything that felt confusing/broken as you started using the
|
||||||
started using the product.
|
product.
|
||||||
|
|
||||||
- Read [What makes a great Zulip contributor](#what-makes-a-great-zulip-contributor).
|
- Read [What makes a great Zulip contributor](#what-makes-a-great-zulip-contributor).
|
||||||
|
- [Install the development environment](https://zulip.readthedocs.io/en/latest/development/overview.html),
|
||||||
- Set up the development environment for the Zulip codebase you want
|
getting help in
|
||||||
to work on, and start getting familiar with the code.
|
[#provision help](https://chat.zulip.org/#narrow/stream/21-provision-help)
|
||||||
|
if you run into any troubles.
|
||||||
- For the server and web app:
|
- Familiarize yourself with [using the development environment](https://zulip.readthedocs.io/en/latest/development/using.html).
|
||||||
|
- Go through the [new application feature
|
||||||
- [Install the development environment](https://zulip.readthedocs.io/en/latest/development/overview.html),
|
tutorial](https://zulip.readthedocs.io/en/latest/tutorials/new-feature-tutorial.html) to get familiar with
|
||||||
getting help in
|
how the Zulip codebase is organized and how to find code in it.
|
||||||
[#provision help](https://chat.zulip.org/#narrow/channel/21-provision-help)
|
|
||||||
if you run into any troubles.
|
|
||||||
- Familiarize yourself with [using the development environment](https://zulip.readthedocs.io/en/latest/development/using.html).
|
|
||||||
- Go through the [new application feature
|
|
||||||
tutorial](https://zulip.readthedocs.io/en/latest/tutorials/new-feature-tutorial.html) to get familiar with
|
|
||||||
how the Zulip codebase is organized and how to find code in it.
|
|
||||||
|
|
||||||
- For the upcoming Flutter-based mobile app:
|
|
||||||
- Set up a development environment following the instructions in
|
|
||||||
[the project README](https://github.com/zulip/zulip-flutter).
|
|
||||||
- Start reading recent commits to see the code we're writing.
|
|
||||||
Use either a [graphical Git viewer][] like `gitk`, or `git log -p`
|
|
||||||
with [the "secret" to reading its output][git-log-secret].
|
|
||||||
- Pick some of the code that appears in those Git commits and
|
|
||||||
that looks interesting. Use your IDE to visit that code
|
|
||||||
and to navigate to related code, reading to see how it works
|
|
||||||
and how the codebase is organized.
|
|
||||||
|
|
||||||
- Read the [Zulip guide to
|
- Read the [Zulip guide to
|
||||||
Git](https://zulip.readthedocs.io/en/latest/git/index.html) if you
|
Git](https://zulip.readthedocs.io/en/latest/git/index.html) if you
|
||||||
are unfamiliar with Git or Zulip's rebase-based Git workflow,
|
are unfamiliar with Git or Zulip's rebase-based Git workflow,
|
||||||
getting help in [#git
|
getting help in [#git
|
||||||
help](https://chat.zulip.org/#narrow/channel/44-git-help) if you run
|
help](https://chat.zulip.org/#narrow/stream/44-git-help) if you run
|
||||||
into any troubles. Even Git experts should read the [Zulip-specific
|
into any troubles. Even Git experts should read the [Zulip-specific
|
||||||
Git tools
|
Git tools
|
||||||
page](https://zulip.readthedocs.io/en/latest/git/zulip-tools.html).
|
page](https://zulip.readthedocs.io/en/latest/git/zulip-tools.html).
|
||||||
|
|
||||||
[graphical Git viewer]: https://zulip.readthedocs.io/en/latest/git/setup.html#get-a-graphical-client
|
|
||||||
[git-log-secret]: https://github.com/zulip/zulip-mobile/blob/main/docs/howto/git.md#git-log-secret
|
|
||||||
|
|
||||||
### Where to look for an issue
|
### Where to look for an issue
|
||||||
|
|
||||||
Now you're ready to pick your first issue! Zulip has several repositories you
|
Now you're ready to pick your first issue! Zulip has several repositories you
|
||||||
@@ -139,10 +92,7 @@ use the "good first issue" label to tag issues that are especially approachable
|
|||||||
for new contributors.
|
for new contributors.
|
||||||
|
|
||||||
- [Server and web app](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22)
|
- [Server and web app](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22)
|
||||||
- Mobile apps: no "help wanted" label, but see the
|
- [Mobile apps](https://github.com/zulip/zulip-mobile/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22)
|
||||||
[project board](https://github.com/orgs/zulip/projects/5/views/4)
|
|
||||||
for the upcoming Flutter-based app. Look for issues up through the
|
|
||||||
"Launch" milestone, and that aren't already assigned.
|
|
||||||
- [Desktop app](https://github.com/zulip/zulip-desktop/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22)
|
- [Desktop app](https://github.com/zulip/zulip-desktop/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22)
|
||||||
- [Terminal app](https://github.com/zulip/zulip-terminal/issues?q=is%3Aopen+is%3Aissue+label%3A"help+wanted")
|
- [Terminal app](https://github.com/zulip/zulip-terminal/issues?q=is%3Aopen+is%3Aissue+label%3A"help+wanted")
|
||||||
- [Python API bindings and bots](https://github.com/zulip/python-zulip-api/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22)
|
- [Python API bindings and bots](https://github.com/zulip/python-zulip-api/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22)
|
||||||
@@ -174,15 +124,19 @@ Note that you are _not_ claiming an issue while you are iterating through steps
|
|||||||
1-4. _Before you claim an issue_, you should be confident that you will be able to
|
1-4. _Before you claim an issue_, you should be confident that you will be able to
|
||||||
tackle it effectively.
|
tackle it effectively.
|
||||||
|
|
||||||
|
If the lists of issues are overwhelming, you can post in
|
||||||
|
[#new members](https://chat.zulip.org/#narrow/stream/95-new-members) with a
|
||||||
|
bit about your background and interests, and we'll help you out. The most
|
||||||
|
important thing to say is whether you're looking for a backend (Python),
|
||||||
|
frontend (JavaScript and TypeScript), mobile (React Native), desktop (Electron),
|
||||||
|
documentation (English) or visual design (JavaScript/TypeScript + CSS) issue, and a
|
||||||
|
bit about your programming experience and available time.
|
||||||
|
|
||||||
Additional tips for the [main server and web app
|
Additional tips for the [main server and web app
|
||||||
repository](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22):
|
repository](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22):
|
||||||
|
|
||||||
- We especially recommend browsing recently opened issues, as there are more
|
- We especially recommend browsing recently opened issues, as there are more
|
||||||
likely to be easy ones for you to find.
|
likely to be easy ones for you to find.
|
||||||
- Take a look at issues with the ["good first issue"
|
|
||||||
label](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22),
|
|
||||||
as they are especially accessible to new contributors. However, you will
|
|
||||||
likely find issues without this label that are accessible as well.
|
|
||||||
- All issues are partitioned into areas like
|
- All issues are partitioned into areas like
|
||||||
admin, compose, emoji, hotkeys, i18n, onboarding, search, etc. Look
|
admin, compose, emoji, hotkeys, i18n, onboarding, search, etc. Look
|
||||||
through our [list of labels](https://github.com/zulip/zulip/labels), and
|
through our [list of labels](https://github.com/zulip/zulip/labels), and
|
||||||
@@ -194,21 +148,15 @@ repository](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3
|
|||||||
|
|
||||||
### Claiming an issue
|
### Claiming an issue
|
||||||
|
|
||||||
#### In the main server/web app repository and Zulip Terminal repository
|
#### In the main server and web app repository
|
||||||
|
|
||||||
The Zulip server/web app repository
|
After making sure the issue is tagged with a [help
|
||||||
([`zulip/zulip`](https://github.com/zulip/zulip/)) and the Zulip Terminal
|
|
||||||
repository ([`zulip/zulip-terminal`](https://github.com/zulip/zulip-terminal/))
|
|
||||||
are set up with a GitHub workflow bot called
|
|
||||||
[Zulipbot](https://github.com/zulip/zulipbot), which manages issues and pull
|
|
||||||
requests in order to create a better workflow for Zulip contributors.
|
|
||||||
|
|
||||||
To claim an issue in these repositories, simply post a comment that says
|
|
||||||
`@zulipbot claim` to the issue thread. If the issue is tagged with a [help
|
|
||||||
wanted](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22)
|
wanted](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22)
|
||||||
label, Zulipbot will immediately assign the issue to you.
|
label, post a comment with `@zulipbot claim` to the issue thread.
|
||||||
|
[Zulipbot](https://github.com/zulip/zulipbot) is a GitHub workflow bot; it will
|
||||||
|
assign you to the issue and label the issue as "in progress".
|
||||||
|
|
||||||
Note that new contributors can only claim one issue until their first pull request is
|
New contributors can only claim one issue until their first pull request is
|
||||||
merged. This is to encourage folks to finish ongoing work before starting
|
merged. This is to encourage folks to finish ongoing work before starting
|
||||||
something new. If you would like to pick up a new issue while waiting for review
|
something new. If you would like to pick up a new issue while waiting for review
|
||||||
on an almost-ready pull request, you can post a comment to this effect on the
|
on an almost-ready pull request, you can post a comment to this effect on the
|
||||||
@@ -216,11 +164,8 @@ issue you're interested in.
|
|||||||
|
|
||||||
#### In other Zulip repositories
|
#### In other Zulip repositories
|
||||||
|
|
||||||
There is no bot for other Zulip repositories
|
There is no bot for other repositories, so you can simply post a comment saying
|
||||||
([`zulip/zulip-flutter`](https://github.com/zulip/zulip-flutter/), etc.). If
|
that you'd like to work on the issue.
|
||||||
you are interested in claiming an issue in one of these repositories, simply
|
|
||||||
post a comment on the issue thread saying that you'd like to work on it. There
|
|
||||||
is no need to @-mention the issue creator in your comment.
|
|
||||||
|
|
||||||
Please follow the same guidelines as described above: find an issue labeled
|
Please follow the same guidelines as described above: find an issue labeled
|
||||||
"help wanted", and only pick up one issue at a time to start with.
|
"help wanted", and only pick up one issue at a time to start with.
|
||||||
@@ -235,24 +180,99 @@ GitHub issue or pull request.
|
|||||||
|
|
||||||
To get early feedback on any UI changes, we encourage you to post screenshots of
|
To get early feedback on any UI changes, we encourage you to post screenshots of
|
||||||
your work in the [#design
|
your work in the [#design
|
||||||
stream](https://chat.zulip.org/#narrow/channel/101-design) in the [Zulip
|
stream](https://chat.zulip.org/#narrow/stream/101-design) in the [Zulip
|
||||||
development community](https://zulip.com/development-community/)
|
development community](https://zulip.com/development-community/)
|
||||||
|
|
||||||
For more advice, see [What makes a great Zulip
|
For more advice, see [What makes a great Zulip
|
||||||
contributor?](#what-makes-a-great-zulip-contributor) below. It's OK if your
|
contributor?](https://zulip.readthedocs.io/en/latest/overview/contributing.html#what-makes-a-great-zulip-contributor)
|
||||||
first issue takes you a while; that's normal! You'll be able to work a lot
|
below.
|
||||||
faster as you build experience.
|
|
||||||
|
|
||||||
### Submitting a pull request
|
### Submitting a pull request
|
||||||
|
|
||||||
See the [guide on submitting a pull
|
When you believe your code is ready, follow the [guide on how to review
|
||||||
request](https://zulip.readthedocs.io/en/latest/contributing/reviewable-prs.html)
|
code](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html#how-to-review-code)
|
||||||
for detailed instructions on how to present your proposed changes to Zulip.
|
to review your own work. You can often find things you missed by taking a step
|
||||||
|
back to look over your work before asking others to do so. Catching mistakes
|
||||||
|
yourself will help your PRs be merged faster, and folks will appreciate the
|
||||||
|
quality and professionalism of your work.
|
||||||
|
|
||||||
The [pull request review process
|
Then, submit your changes. Carefully reading our [Git guide][git-guide], and in
|
||||||
guide](https://zulip.readthedocs.io/en/latest/contributing/review-process.html)
|
particular the section on [making a pull request][git-guide-make-pr],
|
||||||
explains the stages of review your PR will go through, and offers guidance on
|
will help avoid many common mistakes.
|
||||||
how to help the review process move forward.
|
|
||||||
|
Once you are satisfied with the quality of your PR, follow the
|
||||||
|
[guidelines on asking for a code
|
||||||
|
review](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html#asking-for-a-code-review)
|
||||||
|
to request a review. If you are not sure what's best, simply post a
|
||||||
|
comment on the main GitHub thread for your PR clearly indicating that
|
||||||
|
it is ready for review, and the project maintainers will take a look
|
||||||
|
and follow up with next steps.
|
||||||
|
|
||||||
|
It's OK if your first issue takes you a while; that's normal! You'll be
|
||||||
|
able to work a lot faster as you build experience.
|
||||||
|
|
||||||
|
If it helps your workflow, you can submit a work-in-progress pull
|
||||||
|
request before your work is ready for review. Simply prefix the title
|
||||||
|
of work in progress pull requests with `[WIP]`, and then remove the
|
||||||
|
prefix when you think it's time for someone else to review your work.
|
||||||
|
|
||||||
|
[git-guide]: https://zulip.readthedocs.io/en/latest/git/
|
||||||
|
[git-guide-make-pr]: https://zulip.readthedocs.io/en/latest/git/pull-requests.html
|
||||||
|
|
||||||
|
### Stages of a pull request
|
||||||
|
|
||||||
|
Your pull request will likely go through several stages of review.
|
||||||
|
|
||||||
|
1. If your PR makes user-facing changes, the UI and user experience may be
|
||||||
|
reviewed early on, without reference to the code. You will get feedback on
|
||||||
|
any user-facing bugs in the implementation. To minimize the number of review
|
||||||
|
round-trips, make sure to [thoroughly
|
||||||
|
test](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html#manual-testing)
|
||||||
|
your own PR prior to asking for review.
|
||||||
|
2. There may be choices made in the implementation that the reviewer
|
||||||
|
will ask you to revisit. This process will go more smoothly if you
|
||||||
|
specifically call attention to the decisions you made while
|
||||||
|
drafting the PR and any points about which you are uncertain. The
|
||||||
|
PR description and comments on your own PR are good ways to do this.
|
||||||
|
3. Oftentimes, seeing an initial implementation will make it clear that the
|
||||||
|
product design for a feature needs to be revised, or that additional changes
|
||||||
|
are needed. The reviewer may therefore ask you to amend or change the
|
||||||
|
implementation. Some changes may be blockers for getting the PR merged, while
|
||||||
|
others may be improvements that can happen afterwards. Feel free to ask if
|
||||||
|
it's unclear which type of feedback you're getting. (Follow-ups can be a
|
||||||
|
great next issue to work on!)
|
||||||
|
4. In addition to any UI/user experience review, all PRs will go through one or
|
||||||
|
more rounds of code review. Your code may initially be [reviewed by other
|
||||||
|
contributors](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html).
|
||||||
|
This helps us make good use of project maintainers' time, and helps you make
|
||||||
|
progress on the PR by getting more frequent feedback. A project maintainer
|
||||||
|
may leave a comment asking someone with expertise in the area you're working
|
||||||
|
on to review your work.
|
||||||
|
5. Final code review and integration for server and webapp PRs is generally done
|
||||||
|
by `@timabbott`.
|
||||||
|
|
||||||
|
#### How to help move the review process forward
|
||||||
|
|
||||||
|
The key to keeping your review moving through the review process is to:
|
||||||
|
|
||||||
|
- Address _all_ the feedback to the best of your ability.
|
||||||
|
- Make it clear when the requested changes have been made
|
||||||
|
and you believe it's time for another look.
|
||||||
|
- Make it as easy as possible to review the changes you made.
|
||||||
|
|
||||||
|
In order to do this, when you believe you have addressed the previous round of
|
||||||
|
feedback on your PR as best you can, post an comment asking reviewers to take
|
||||||
|
another look. Your comment should make it easy to understand what has been done
|
||||||
|
and what remains by:
|
||||||
|
|
||||||
|
- Summarizing the changes made since the last review you received.
|
||||||
|
- Highlighting remaining questions or decisions, with links to any relevant
|
||||||
|
chat.zulip.org threads.
|
||||||
|
- Providing updated screenshots and information on manual testing if
|
||||||
|
appropriate.
|
||||||
|
|
||||||
|
The easier it is to review your work, the more likely you are to receive quick
|
||||||
|
feedback.
|
||||||
|
|
||||||
### Beyond the first issue
|
### Beyond the first issue
|
||||||
|
|
||||||
@@ -278,34 +298,17 @@ labels.
|
|||||||
use the existing pull request (PR) as a starting point for your contribution. If
|
use the existing pull request (PR) as a starting point for your contribution. If
|
||||||
you think a different approach is needed, you can post a new PR, with a comment that clearly
|
you think a different approach is needed, you can post a new PR, with a comment that clearly
|
||||||
explains _why_ you decided to start from scratch.
|
explains _why_ you decided to start from scratch.
|
||||||
- **What if I ask if someone is still working on an issue, and they don't
|
|
||||||
respond?** If you don't get a reply within 2-3 days, go ahead and post a comment
|
|
||||||
that you are working on the issue, and submit a pull request. If the original
|
|
||||||
assignee ends up submitting a pull request first, no worries! You can help by
|
|
||||||
providing feedback on their work, or submit your own PR if you think a
|
|
||||||
different approach is needed (as described above).
|
|
||||||
- **Can I come up with my own feature idea and work on it?** We welcome
|
- **Can I come up with my own feature idea and work on it?** We welcome
|
||||||
suggestions of features or other improvements that you feel would be valuable. If you
|
suggestions of features or other improvements that you feel would be valuable. If you
|
||||||
have a new feature you'd like to add, you can start a conversation [in our
|
have a new feature you'd like to add, you can start a conversation [in our
|
||||||
development community](https://zulip.com/development-community/#where-do-i-send-my-message)
|
development community](https://zulip.com/development-community/#where-do-i-send-my-message)
|
||||||
explaining the feature idea and the problem that you're hoping to solve.
|
explaining the feature idea and the problem that you're hoping to solve.
|
||||||
- **I'm waiting for the next round of review on my PR. Can I pick up
|
|
||||||
another issue in the meantime?** Someone's first Zulip PR often
|
|
||||||
requires quite a bit of iteration, so please [make sure your pull
|
|
||||||
request is reviewable][reviewable-pull-requests] and go through at
|
|
||||||
least one round of feedback from others before picking up a second
|
|
||||||
issue. After that, sure! If
|
|
||||||
[Zulipbot](https://github.com/zulip/zulipbot) does not allow you to
|
|
||||||
claim an issue, you can post a comment describing the status of your
|
|
||||||
other work on the issue you're interested in, and asking for the
|
|
||||||
issue to be assigned to you. Note that addressing feedback on
|
|
||||||
in-progress PRs should always take priority over starting a new PR.
|
|
||||||
- **I think my PR is done, but it hasn't been merged yet. What's going on?**
|
- **I think my PR is done, but it hasn't been merged yet. What's going on?**
|
||||||
1. **Double-check that you have addressed all the feedback**, including any comments
|
1. **Double-check that you have addressed all the feedback**, including any comments
|
||||||
on [Git commit
|
on [Git commit
|
||||||
discipline](https://zulip.readthedocs.io/en/latest/contributing/commit-discipline.html).
|
discipline](https://zulip.readthedocs.io/en/latest/contributing/version-control.html#commit-discipline).
|
||||||
2. If all the feedback has been addressed, did you [leave a
|
2. If all the feedback has been addressed, did you [leave a
|
||||||
comment](https://zulip.readthedocs.io/en/latest/contributing/review-process.html#how-to-help-move-the-review-process-forward)
|
comment](https://zulip.readthedocs.io/en/latest/overview/contributing.html#how-to-help-move-the-review-process-forward)
|
||||||
explaining that you have done so and **requesting another review**? If not,
|
explaining that you have done so and **requesting another review**? If not,
|
||||||
it may not be clear to project maintainers or reviewers that your PR is
|
it may not be clear to project maintainers or reviewers that your PR is
|
||||||
ready for another look.
|
ready for another look.
|
||||||
@@ -321,28 +324,26 @@ labels.
|
|||||||
occasionally take a few weeks for a PR in the final stages of the review
|
occasionally take a few weeks for a PR in the final stages of the review
|
||||||
process to be merged.
|
process to be merged.
|
||||||
|
|
||||||
[reviewable-pull-requests]: https://zulip.readthedocs.io/en/latest/contributing/reviewable-prs.html
|
|
||||||
|
|
||||||
## What makes a great Zulip contributor?
|
## What makes a great Zulip contributor?
|
||||||
|
|
||||||
Zulip has a lot of experience working with new contributors. In our
|
Zulip has a lot of experience working with new contributors. In our
|
||||||
experience, these are the best predictors of success:
|
experience, these are the best predictors of success:
|
||||||
|
|
||||||
- [Asking great questions][great-questions]. It's very hard to answer a general
|
- Posting good questions. It's very hard to answer a general question like, "How
|
||||||
question like, "How do I do this issue?" When asking for help, explain your
|
do I do this issue?" When asking for help, explain
|
||||||
current understanding, including what you've done or tried so far and where
|
your current understanding, including what you've done or tried so far and where
|
||||||
you got stuck. Post tracebacks or other error messages if appropriate. For
|
you got stuck. Post tracebacks or other error messages if appropriate. For
|
||||||
more advice, check out [our guide][great-questions]!
|
more information, check out the ["Getting help" section of our community
|
||||||
|
guidelines](https://zulip.com/development-community/#getting-help) and
|
||||||
|
[this essay][good-questions-blog] for some good advice.
|
||||||
- Learning and practicing
|
- Learning and practicing
|
||||||
[Git commit discipline](https://zulip.readthedocs.io/en/latest/contributing/commit-discipline.html).
|
[Git commit discipline](https://zulip.readthedocs.io/en/latest/contributing/version-control.html#commit-discipline).
|
||||||
- Submitting carefully tested code. See our [detailed guide on how to review
|
- Submitting carefully tested code. See our [detailed guide on how to review
|
||||||
code](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html#how-to-review-code)
|
code](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html#how-to-review-code)
|
||||||
(yours or someone else's).
|
(yours or someone else's).
|
||||||
- Posting
|
- Posting
|
||||||
[screenshots or GIFs](https://zulip.readthedocs.io/en/latest/tutorials/screenshot-and-gif-software.html)
|
[screenshots or GIFs](https://zulip.readthedocs.io/en/latest/tutorials/screenshot-and-gif-software.html)
|
||||||
for frontend changes.
|
for frontend changes.
|
||||||
- Working to [make your pull requests easy to
|
|
||||||
review](https://zulip.readthedocs.io/en/latest/contributing/reviewable-prs.html).
|
|
||||||
- Clearly describing what you have implemented and why. For example, if your
|
- Clearly describing what you have implemented and why. For example, if your
|
||||||
implementation differs from the issue description in some way or is a partial
|
implementation differs from the issue description in some way or is a partial
|
||||||
step towards the requirements described in the issue, be sure to call
|
step towards the requirements described in the issue, be sure to call
|
||||||
@@ -353,7 +354,33 @@ experience, these are the best predictors of success:
|
|||||||
- Being helpful and friendly on the [Zulip community
|
- Being helpful and friendly on the [Zulip community
|
||||||
server](https://zulip.com/development-community/).
|
server](https://zulip.com/development-community/).
|
||||||
|
|
||||||
[great-questions]: https://zulip.readthedocs.io/en/latest/contributing/asking-great-questions.html
|
[good-questions-blog]: https://jvns.ca/blog/good-questions/
|
||||||
|
|
||||||
|
These are also the main criteria we use to select candidates for all
|
||||||
|
of our outreach programs.
|
||||||
|
|
||||||
|
## Reporting issues
|
||||||
|
|
||||||
|
If you find an easily reproducible bug and/or are experienced in reporting
|
||||||
|
bugs, feel free to just open an issue on the relevant project on GitHub.
|
||||||
|
|
||||||
|
If you have a feature request or are not yet sure what the underlying bug
|
||||||
|
is, the best place to post issues is
|
||||||
|
[#issues](https://chat.zulip.org/#narrow/stream/9-issues) (or
|
||||||
|
[#mobile](https://chat.zulip.org/#narrow/stream/48-mobile) or
|
||||||
|
[#desktop](https://chat.zulip.org/#narrow/stream/16-desktop)) on the
|
||||||
|
[Zulip community server](https://zulip.com/development-community/).
|
||||||
|
This allows us to interactively figure out what is going on, let you know if
|
||||||
|
a similar issue has already been opened, and collect any other information
|
||||||
|
we need. Choose a 2-4 word topic that describes the issue, explain the issue
|
||||||
|
and how to reproduce it if known, your browser/OS if relevant, and a
|
||||||
|
[screenshot or screenGIF](https://zulip.readthedocs.io/en/latest/tutorials/screenshot-and-gif-software.html)
|
||||||
|
if appropriate.
|
||||||
|
|
||||||
|
**Reporting security issues**. Please do not report security issues
|
||||||
|
publicly, including on public streams on chat.zulip.org. You can
|
||||||
|
email [security@zulip.com](mailto:security@zulip.com). We create a CVE for every
|
||||||
|
security issue in our released software.
|
||||||
|
|
||||||
## User feedback
|
## User feedback
|
||||||
|
|
||||||
@@ -377,25 +404,72 @@ to:
|
|||||||
A link to your organization's website?
|
A link to your organization's website?
|
||||||
|
|
||||||
You can contact us in the [#feedback stream of the Zulip development
|
You can contact us in the [#feedback stream of the Zulip development
|
||||||
community](https://chat.zulip.org/#narrow/channel/137-feedback) or
|
community](https://chat.zulip.org/#narrow/stream/137-feedback) or
|
||||||
by emailing [support@zulip.com](mailto:support@zulip.com).
|
by emailing [support@zulip.com](mailto:support@zulip.com).
|
||||||
|
|
||||||
## Outreach programs
|
## Outreach programs
|
||||||
|
|
||||||
Zulip regularly participates in [Google Summer of Code
|
Zulip participates in [Google Summer of Code
|
||||||
(GSoC)](https://developers.google.com/open-source/gsoc/) and
|
(GSoC)](https://developers.google.com/open-source/gsoc/) every year.
|
||||||
[Outreachy](https://www.outreachy.org/). We have been a GSoC mentoring
|
In the past, we've also participated in
|
||||||
organization since 2016, and we accept 15-20 GSoC participants each summer. In
|
[Outreachy](https://www.outreachy.org/), [Google
|
||||||
the past, we’ve also participated in [Google
|
Code-In](https://developers.google.com/open-source/gci/), and hosted
|
||||||
Code-In](https://developers.google.com/open-source/gci/), and hosted summer
|
summer interns from Harvard, MIT, and Stanford.
|
||||||
interns from Harvard, MIT, and Stanford.
|
|
||||||
|
|
||||||
Check out our [outreach programs
|
While each third-party program has its own rules and requirements, the
|
||||||
overview](https://zulip.readthedocs.io/en/latest/outreach/overview.html) to learn
|
Zulip community's approaches all of these programs with these ideas in
|
||||||
more about participating in an outreach program with Zulip. Most of our program
|
mind:
|
||||||
participants end up sticking around the project long-term, and many have become
|
|
||||||
core team members, maintaining important parts of the project. We hope you
|
- We try to make the application process as valuable for the applicant as
|
||||||
apply!
|
possible. Expect high-quality code reviews, a supportive community, and
|
||||||
|
publicly viewable patches you can link to from your resume, regardless of
|
||||||
|
whether you are selected.
|
||||||
|
- To apply, you'll have to submit at least one pull request to a Zulip
|
||||||
|
repository. Most students accepted to one of our programs have
|
||||||
|
several merged pull requests (including at least one larger PR) by
|
||||||
|
the time of the application deadline.
|
||||||
|
- The main criteria we use is quality of your best contributions, and
|
||||||
|
the bullets listed at
|
||||||
|
[What makes a great Zulip contributor](#what-makes-a-great-zulip-contributor).
|
||||||
|
Because we focus on evaluating your best work, it doesn't hurt your
|
||||||
|
application to makes mistakes in your first few PRs as long as your
|
||||||
|
work improves.
|
||||||
|
|
||||||
|
Most of our outreach program participants end up sticking around the
|
||||||
|
project long-term, and many have become core team members, maintaining
|
||||||
|
important parts of the project. We hope you apply!
|
||||||
|
|
||||||
|
### Google Summer of Code
|
||||||
|
|
||||||
|
The largest outreach program Zulip participates in is GSoC (14
|
||||||
|
students in 2017; 11 in 2018; 17 in 2019; 18 in 2020; 18 in 2021). While we
|
||||||
|
don't control how
|
||||||
|
many slots Google allocates to Zulip, we hope to mentor a similar
|
||||||
|
number of students in future summers. Check out our [blog
|
||||||
|
post](https://blog.zulip.com/2021/09/30/google-summer-of-code-2021/) to learn
|
||||||
|
about the GSoC 2021 experience and our participants' accomplishments.
|
||||||
|
|
||||||
|
If you're reading this well before the application deadline and want
|
||||||
|
to make your application strong, we recommend getting involved in the
|
||||||
|
community and fixing issues in Zulip now. Having good contributions
|
||||||
|
and building a reputation for doing good work is the best way to have
|
||||||
|
a strong application.
|
||||||
|
|
||||||
|
Our [GSoC program page][gsoc-guide] has lots more details on how
|
||||||
|
Zulip does GSoC, as well as project ideas. Note, however, that the project idea
|
||||||
|
list is maintained only during the GSoC application period, so if
|
||||||
|
you're looking at some other time of year, the project list is likely
|
||||||
|
out-of-date.
|
||||||
|
|
||||||
|
In some years, we have also run a Zulip Summer of Code (ZSoC)
|
||||||
|
program for students who we wanted to accept into GSoC but did not have an
|
||||||
|
official slot for. Student expectations are the
|
||||||
|
same as with GSoC, and ZSoC has no separate application process; your
|
||||||
|
GSoC application is your ZSoC application. If we'd like to select you
|
||||||
|
for ZSoC, we'll contact you when the GSoC results are announced.
|
||||||
|
|
||||||
|
[gsoc-guide]: https://zulip.readthedocs.io/en/latest/contributing/gsoc.html
|
||||||
|
[gsoc-faq]: https://developers.google.com/open-source/gsoc/faq
|
||||||
|
|
||||||
## Stay connected
|
## Stay connected
|
||||||
|
|
||||||
@@ -412,7 +486,7 @@ Here are some ways you can help others find Zulip:
|
|||||||
|
|
||||||
- Star us on GitHub. There are four main repositories:
|
- Star us on GitHub. There are four main repositories:
|
||||||
[server/web](https://github.com/zulip/zulip),
|
[server/web](https://github.com/zulip/zulip),
|
||||||
[Flutter mobile](https://github.com/zulip/zulip-flutter),
|
[mobile](https://github.com/zulip/zulip-mobile),
|
||||||
[desktop](https://github.com/zulip/zulip-desktop), and
|
[desktop](https://github.com/zulip/zulip-desktop), and
|
||||||
[Python API](https://github.com/zulip/python-zulip-api).
|
[Python API](https://github.com/zulip/python-zulip-api).
|
||||||
|
|
||||||
|
|||||||
@@ -1,25 +1,15 @@
|
|||||||
# This is a multiarch Dockerfile. See https://docs.docker.com/desktop/multi-arch/
|
# To build run `docker build -f Dockerfile-postgresql .` from the root of the
|
||||||
#
|
# zulip repo.
|
||||||
# To set up the first time:
|
|
||||||
# docker buildx create --name multiarch --use
|
|
||||||
#
|
|
||||||
# To build:
|
|
||||||
# docker buildx build --platform linux/amd64,linux/arm64 \
|
|
||||||
# -f ./Dockerfile-postgresql -t zulip/zulip-postgresql:14 --push .
|
|
||||||
|
|
||||||
# Currently the PostgreSQL images do not support automatic upgrading of
|
# Currently the PostgreSQL images do not support automatic upgrading of
|
||||||
# the on-disk data in volumes. So the base image cannot currently be upgraded
|
# the on-disk data in volumes. So the base image can not currently be upgraded
|
||||||
# without users needing a manual pgdump and restore.
|
# without users needing a manual pgdump and restore.
|
||||||
|
|
||||||
# https://hub.docker.com/r/groonga/pgroonga/tags
|
|
||||||
ARG PGROONGA_VERSION=latest
|
|
||||||
ARG POSTGRESQL_VERSION=14
|
|
||||||
FROM groonga/pgroonga:$PGROONGA_VERSION-alpine-$POSTGRESQL_VERSION-slim
|
|
||||||
|
|
||||||
# Install hunspell, Zulip stop words, and run Zulip database
|
# Install hunspell, Zulip stop words, and run Zulip database
|
||||||
# init.
|
# init.
|
||||||
|
FROM groonga/pgroonga:latest-alpine-10-slim
|
||||||
RUN apk add -U --no-cache hunspell-en
|
RUN apk add -U --no-cache hunspell-en
|
||||||
RUN ln -sf /usr/share/hunspell/en_US.dic /usr/local/share/postgresql/tsearch_data/en_us.dict && ln -sf /usr/share/hunspell/en_US.aff /usr/local/share/postgresql/tsearch_data/en_us.affix
|
RUN ln -sf /usr/share/hunspell/en_US.dic /usr/local/share/postgresql/tsearch_data/en_us.dict && ln -sf /usr/share/hunspell/en_US.aff /usr/local/share/postgresql/tsearch_data/en_us.affix
|
||||||
COPY puppet/zulip/files/postgresql/zulip_english.stop /usr/local/share/postgresql/tsearch_data/zulip_english.stop
|
COPY puppet/zulip/files/postgresql/zulip_english.stop /usr/local/share/postgresql/tsearch_data/zulip_english.stop
|
||||||
COPY scripts/setup/create-db.sql /docker-entrypoint-initdb.d/zulip-create-db.sql
|
COPY scripts/setup/create-db.sql /docker-entrypoint-initdb.d/zulip-create-db.sql
|
||||||
COPY scripts/setup/create-pgroonga.sql /docker-entrypoint-initdb.d/zulip-create-pgroonga.sql
|
COPY scripts/setup/create-pgroonga.sql /docker-entrypoint-initdb.d/zulip-create-pgroonga.sql
|
||||||
|
|||||||
21
README.md
21
README.md
@@ -17,7 +17,7 @@ Come find us on the [development community chat](https://zulip.com/development-c
|
|||||||
[](https://github.com/zulip/zulip/actions/workflows/zulip-ci.yml?query=branch%3Amain)
|
[](https://github.com/zulip/zulip/actions/workflows/zulip-ci.yml?query=branch%3Amain)
|
||||||
[](https://codecov.io/gh/zulip/zulip)
|
[](https://codecov.io/gh/zulip/zulip)
|
||||||
[][mypy-coverage]
|
[][mypy-coverage]
|
||||||
[](https://github.com/astral-sh/ruff)
|
[](https://github.com/psf/black)
|
||||||
[](https://github.com/prettier/prettier)
|
[](https://github.com/prettier/prettier)
|
||||||
[](https://github.com/zulip/zulip/releases/latest)
|
[](https://github.com/zulip/zulip/releases/latest)
|
||||||
[](https://zulip.readthedocs.io/en/latest/)
|
[](https://zulip.readthedocs.io/en/latest/)
|
||||||
@@ -33,17 +33,16 @@ Come find us on the [development community chat](https://zulip.com/development-c
|
|||||||
## Getting started
|
## Getting started
|
||||||
|
|
||||||
- **Contributing code**. Check out our [guide for new
|
- **Contributing code**. Check out our [guide for new
|
||||||
contributors](https://zulip.readthedocs.io/en/latest/contributing/contributing.html)
|
contributors](https://zulip.readthedocs.io/en/latest/overview/contributing.html)
|
||||||
to get started. We have invested in making Zulip’s code highly
|
to get started. We have invested into making Zulip’s code uniquely readable,
|
||||||
readable, thoughtfully tested, and easy to modify. Beyond that, we
|
well tested, and easy to modify. Beyond that, we have written an extraordinary
|
||||||
have written an extraordinary 150K words of documentation for Zulip
|
150K words of documentation on how to contribute to Zulip.
|
||||||
contributors.
|
|
||||||
|
|
||||||
- **Contributing non-code**. [Report an
|
- **Contributing non-code**. [Report an
|
||||||
issue](https://zulip.readthedocs.io/en/latest/contributing/contributing.html#reporting-issues),
|
issue](https://zulip.readthedocs.io/en/latest/overview/contributing.html#reporting-issues),
|
||||||
[translate](https://zulip.readthedocs.io/en/latest/translating/translating.html)
|
[translate](https://zulip.readthedocs.io/en/latest/translating/translating.html)
|
||||||
Zulip into your language, or [give us
|
Zulip into your language, or [give us
|
||||||
feedback](https://zulip.readthedocs.io/en/latest/contributing/contributing.html#user-feedback).
|
feedback](https://zulip.readthedocs.io/en/latest/overview/contributing.html#user-feedback).
|
||||||
We'd love to hear from you, whether you've been using Zulip for years, or are just
|
We'd love to hear from you, whether you've been using Zulip for years, or are just
|
||||||
trying it out for the first time.
|
trying it out for the first time.
|
||||||
|
|
||||||
@@ -52,7 +51,7 @@ Come find us on the [development community chat](https://zulip.com/development-c
|
|||||||
recommend reading about Zulip's [unique
|
recommend reading about Zulip's [unique
|
||||||
approach](https://zulip.com/why-zulip/) to organizing conversations.
|
approach](https://zulip.com/why-zulip/) to organizing conversations.
|
||||||
|
|
||||||
- **Running a Zulip server**. Self-host Zulip directly on Ubuntu or Debian
|
- **Running a Zulip server**. Self host Zulip directly on Ubuntu or Debian
|
||||||
Linux, in [Docker](https://github.com/zulip/docker-zulip), or with prebuilt
|
Linux, in [Docker](https://github.com/zulip/docker-zulip), or with prebuilt
|
||||||
images for [Digital Ocean](https://marketplace.digitalocean.com/apps/zulip) and
|
images for [Digital Ocean](https://marketplace.digitalocean.com/apps/zulip) and
|
||||||
[Render](https://render.com/docs/deploy-zulip).
|
[Render](https://render.com/docs/deploy-zulip).
|
||||||
@@ -65,14 +64,14 @@ Come find us on the [development community chat](https://zulip.com/development-c
|
|||||||
projects](https://zulip.com/for/open-source/).
|
projects](https://zulip.com/for/open-source/).
|
||||||
|
|
||||||
- **Participating in [outreach
|
- **Participating in [outreach
|
||||||
programs](https://zulip.readthedocs.io/en/latest/contributing/contributing.html#outreach-programs)**
|
programs](https://zulip.readthedocs.io/en/latest/overview/contributing.html#outreach-programs)**
|
||||||
like [Google Summer of Code](https://developers.google.com/open-source/gsoc/)
|
like [Google Summer of Code](https://developers.google.com/open-source/gsoc/)
|
||||||
and [Outreachy](https://www.outreachy.org/).
|
and [Outreachy](https://www.outreachy.org/).
|
||||||
|
|
||||||
- **Supporting Zulip**. Advocate for your organization to use Zulip, become a
|
- **Supporting Zulip**. Advocate for your organization to use Zulip, become a
|
||||||
[sponsor](https://github.com/sponsors/zulip), write a review in the mobile app
|
[sponsor](https://github.com/sponsors/zulip), write a review in the mobile app
|
||||||
stores, or [help others find
|
stores, or [help others find
|
||||||
Zulip](https://zulip.readthedocs.io/en/latest/contributing/contributing.html#help-others-find-zulip).
|
Zulip](https://zulip.readthedocs.io/en/latest/overview/contributing.html#help-others-find-zulip).
|
||||||
|
|
||||||
You may also be interested in reading our [blog](https://blog.zulip.org/), and
|
You may also be interested in reading our [blog](https://blog.zulip.org/), and
|
||||||
following us on [Twitter](https://twitter.com/zulip) and
|
following us on [Twitter](https://twitter.com/zulip) and
|
||||||
|
|||||||
@@ -33,5 +33,5 @@ See also our documentation on the [Zulip release
|
|||||||
lifecycle][release-lifecycle].
|
lifecycle][release-lifecycle].
|
||||||
|
|
||||||
[security-model]: https://zulip.readthedocs.io/en/latest/production/security-model.html
|
[security-model]: https://zulip.readthedocs.io/en/latest/production/security-model.html
|
||||||
[upgrades]: https://zulip.readthedocs.io/en/stable/production/upgrade.html#upgrading-to-a-release
|
[upgrades]: https://zulip.readthedocs.io/en/latest/production/upgrade-or-modify.html#upgrading-to-a-release
|
||||||
[release-lifecycle]: https://zulip.readthedocs.io/en/latest/overview/release-lifecycle.html
|
[release-lifecycle]: https://zulip.readthedocs.io/en/latest/overview/release-lifecycle.html
|
||||||
|
|||||||
20
Vagrantfile
vendored
20
Vagrantfile
vendored
@@ -12,13 +12,11 @@ Vagrant.configure("2") do |config|
|
|||||||
vm_num_cpus = "2"
|
vm_num_cpus = "2"
|
||||||
vm_memory = "2048"
|
vm_memory = "2048"
|
||||||
|
|
||||||
ubuntu_mirror = ""
|
debian_mirror = ""
|
||||||
vboxadd_version = nil
|
vboxadd_version = nil
|
||||||
|
|
||||||
config.vm.box = "bento/ubuntu-22.04"
|
|
||||||
|
|
||||||
config.vm.synced_folder ".", "/vagrant", disabled: true
|
config.vm.synced_folder ".", "/vagrant", disabled: true
|
||||||
config.vm.synced_folder ".", "/srv/zulip", docker_consistency: "z"
|
config.vm.synced_folder ".", "/srv/zulip"
|
||||||
|
|
||||||
vagrant_config_file = ENV["HOME"] + "/.zulip-vagrant-config"
|
vagrant_config_file = ENV["HOME"] + "/.zulip-vagrant-config"
|
||||||
if File.file?(vagrant_config_file)
|
if File.file?(vagrant_config_file)
|
||||||
@@ -34,7 +32,7 @@ Vagrant.configure("2") do |config|
|
|||||||
when "HOST_IP_ADDR"; host_ip_addr = value
|
when "HOST_IP_ADDR"; host_ip_addr = value
|
||||||
when "GUEST_CPUS"; vm_num_cpus = value
|
when "GUEST_CPUS"; vm_num_cpus = value
|
||||||
when "GUEST_MEMORY_MB"; vm_memory = value
|
when "GUEST_MEMORY_MB"; vm_memory = value
|
||||||
when "UBUNTU_MIRROR"; ubuntu_mirror = value
|
when "DEBIAN_MIRROR"; debian_mirror = value
|
||||||
when "VBOXADD_VERSION"; vboxadd_version = value
|
when "VBOXADD_VERSION"; vboxadd_version = value
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@@ -63,23 +61,23 @@ Vagrant.configure("2") do |config|
|
|||||||
config.vm.network "forwarded_port", guest: 9994, host: host_port + 3, host_ip: host_ip_addr
|
config.vm.network "forwarded_port", guest: 9994, host: host_port + 3, host_ip: host_ip_addr
|
||||||
# Specify Docker provider before VirtualBox provider so it's preferred.
|
# Specify Docker provider before VirtualBox provider so it's preferred.
|
||||||
config.vm.provider "docker" do |d, override|
|
config.vm.provider "docker" do |d, override|
|
||||||
override.vm.box = nil
|
|
||||||
d.build_dir = File.join(__dir__, "tools", "setup", "dev-vagrant-docker")
|
d.build_dir = File.join(__dir__, "tools", "setup", "dev-vagrant-docker")
|
||||||
d.build_args = ["--build-arg", "VAGRANT_UID=#{Process.uid}"]
|
d.build_args = ["--build-arg", "VAGRANT_UID=#{Process.uid}"]
|
||||||
if !ubuntu_mirror.empty?
|
if !debian_mirror.empty?
|
||||||
d.build_args += ["--build-arg", "UBUNTU_MIRROR=#{ubuntu_mirror}"]
|
d.build_args += ["--build-arg", "DEBIAN_MIRROR=#{debian_mirror}"]
|
||||||
end
|
end
|
||||||
d.has_ssh = true
|
d.has_ssh = true
|
||||||
d.create_args = ["--ulimit", "nofile=1024:65536"]
|
d.create_args = ["--ulimit", "nofile=1024:65536"]
|
||||||
end
|
end
|
||||||
|
|
||||||
config.vm.provider "virtualbox" do |vb, override|
|
config.vm.provider "virtualbox" do |vb, override|
|
||||||
|
override.vm.box = "bento/debian-10"
|
||||||
# It's possible we can get away with just 1.5GB; more testing needed
|
# It's possible we can get away with just 1.5GB; more testing needed
|
||||||
vb.memory = vm_memory
|
vb.memory = vm_memory
|
||||||
vb.cpus = vm_num_cpus
|
vb.cpus = vm_num_cpus
|
||||||
|
|
||||||
if !vboxadd_version.nil?
|
if !vboxadd_version.nil?
|
||||||
override.vbguest.installer = Class.new(VagrantVbguest::Installers::Ubuntu) do
|
override.vbguest.installer = Class.new(VagrantVbguest::Installers::Debian) do
|
||||||
define_method(:host_version) do |reload = false|
|
define_method(:host_version) do |reload = false|
|
||||||
VagrantVbguest::Version(vboxadd_version)
|
VagrantVbguest::Version(vboxadd_version)
|
||||||
end
|
end
|
||||||
@@ -90,12 +88,14 @@ Vagrant.configure("2") do |config|
|
|||||||
end
|
end
|
||||||
|
|
||||||
config.vm.provider "hyperv" do |h, override|
|
config.vm.provider "hyperv" do |h, override|
|
||||||
|
override.vm.box = "bento/debian-10"
|
||||||
h.memory = vm_memory
|
h.memory = vm_memory
|
||||||
h.maxmemory = vm_memory
|
h.maxmemory = vm_memory
|
||||||
h.cpus = vm_num_cpus
|
h.cpus = vm_num_cpus
|
||||||
end
|
end
|
||||||
|
|
||||||
config.vm.provider "parallels" do |prl, override|
|
config.vm.provider "parallels" do |prl, override|
|
||||||
|
override.vm.box = "bento/debian-10"
|
||||||
prl.memory = vm_memory
|
prl.memory = vm_memory
|
||||||
prl.cpus = vm_num_cpus
|
prl.cpus = vm_num_cpus
|
||||||
end
|
end
|
||||||
@@ -104,5 +104,5 @@ Vagrant.configure("2") do |config|
|
|||||||
# We want provision to be run with the permissions of the vagrant user.
|
# We want provision to be run with the permissions of the vagrant user.
|
||||||
privileged: false,
|
privileged: false,
|
||||||
path: "tools/setup/vagrant-provision",
|
path: "tools/setup/vagrant-provision",
|
||||||
env: { "UBUNTU_MIRROR" => ubuntu_mirror }
|
env: { "DEBIAN_MIRROR" => debian_mirror }
|
||||||
end
|
end
|
||||||
|
|||||||
@@ -1,14 +1,13 @@
|
|||||||
import logging
|
import logging
|
||||||
import time
|
import time
|
||||||
from collections import OrderedDict, defaultdict
|
from collections import OrderedDict, defaultdict
|
||||||
from collections.abc import Callable, Sequence
|
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from typing import TypeAlias, Union
|
from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.db import connection, models
|
from django.db import connection, models
|
||||||
|
from django.db.models import F
|
||||||
from psycopg2.sql import SQL, Composable, Identifier, Literal
|
from psycopg2.sql import SQL, Composable, Identifier, Literal
|
||||||
from typing_extensions import override
|
|
||||||
|
|
||||||
from analytics.models import (
|
from analytics.models import (
|
||||||
BaseCount,
|
BaseCount,
|
||||||
@@ -19,20 +18,14 @@ from analytics.models import (
|
|||||||
UserCount,
|
UserCount,
|
||||||
installation_epoch,
|
installation_epoch,
|
||||||
)
|
)
|
||||||
|
from zerver.lib.logging_util import log_to_file
|
||||||
from zerver.lib.timestamp import ceiling_to_day, ceiling_to_hour, floor_to_hour, verify_UTC
|
from zerver.lib.timestamp import ceiling_to_day, ceiling_to_hour, floor_to_hour, verify_UTC
|
||||||
from zerver.models import Message, Realm, RealmAuditLog, Stream, UserActivityInterval, UserProfile
|
from zerver.models import Message, Realm, RealmAuditLog, Stream, UserActivityInterval, UserProfile
|
||||||
|
|
||||||
if settings.ZILENCER_ENABLED:
|
## Logging setup ##
|
||||||
from zilencer.models import (
|
|
||||||
RemoteInstallationCount,
|
|
||||||
RemoteRealm,
|
|
||||||
RemoteRealmCount,
|
|
||||||
RemoteZulipServer,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger("zulip.analytics")
|
|
||||||
|
|
||||||
|
logger = logging.getLogger("zulip.management")
|
||||||
|
log_to_file(logger, settings.ANALYTICS_LOG_PATH)
|
||||||
|
|
||||||
# You can't subtract timedelta.max from a datetime, so use this instead
|
# You can't subtract timedelta.max from a datetime, so use this instead
|
||||||
TIMEDELTA_MAX = timedelta(days=365 * 1000)
|
TIMEDELTA_MAX = timedelta(days=365 * 1000)
|
||||||
@@ -56,7 +49,7 @@ class CountStat:
|
|||||||
property: str,
|
property: str,
|
||||||
data_collector: "DataCollector",
|
data_collector: "DataCollector",
|
||||||
frequency: str,
|
frequency: str,
|
||||||
interval: timedelta | None = None,
|
interval: Optional[timedelta] = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
self.property = property
|
self.property = property
|
||||||
self.data_collector = data_collector
|
self.data_collector = data_collector
|
||||||
@@ -69,11 +62,10 @@ class CountStat:
|
|||||||
else:
|
else:
|
||||||
self.interval = self.time_increment
|
self.interval = self.time_increment
|
||||||
|
|
||||||
@override
|
def __str__(self) -> str:
|
||||||
def __repr__(self) -> str:
|
|
||||||
return f"<CountStat: {self.property}>"
|
return f"<CountStat: {self.property}>"
|
||||||
|
|
||||||
def last_successful_fill(self) -> datetime | None:
|
def last_successful_fill(self) -> Optional[datetime]:
|
||||||
fillstate = FillState.objects.filter(property=self.property).first()
|
fillstate = FillState.objects.filter(property=self.property).first()
|
||||||
if fillstate is None:
|
if fillstate is None:
|
||||||
return None
|
return None
|
||||||
@@ -83,7 +75,7 @@ class CountStat:
|
|||||||
|
|
||||||
|
|
||||||
class LoggingCountStat(CountStat):
|
class LoggingCountStat(CountStat):
|
||||||
def __init__(self, property: str, output_table: type[BaseCount], frequency: str) -> None:
|
def __init__(self, property: str, output_table: Type[BaseCount], frequency: str) -> None:
|
||||||
CountStat.__init__(self, property, DataCollector(output_table, None), frequency)
|
CountStat.__init__(self, property, DataCollector(output_table, None), frequency)
|
||||||
|
|
||||||
|
|
||||||
@@ -93,7 +85,7 @@ class DependentCountStat(CountStat):
|
|||||||
property: str,
|
property: str,
|
||||||
data_collector: "DataCollector",
|
data_collector: "DataCollector",
|
||||||
frequency: str,
|
frequency: str,
|
||||||
interval: timedelta | None = None,
|
interval: Optional[timedelta] = None,
|
||||||
dependencies: Sequence[str] = [],
|
dependencies: Sequence[str] = [],
|
||||||
) -> None:
|
) -> None:
|
||||||
CountStat.__init__(self, property, data_collector, frequency, interval=interval)
|
CountStat.__init__(self, property, data_collector, frequency, interval=interval)
|
||||||
@@ -103,20 +95,19 @@ class DependentCountStat(CountStat):
|
|||||||
class DataCollector:
|
class DataCollector:
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
output_table: type[BaseCount],
|
output_table: Type[BaseCount],
|
||||||
pull_function: Callable[[str, datetime, datetime, Realm | None], int] | None,
|
pull_function: Optional[Callable[[str, datetime, datetime, Optional[Realm]], int]],
|
||||||
) -> None:
|
) -> None:
|
||||||
self.output_table = output_table
|
self.output_table = output_table
|
||||||
self.pull_function = pull_function
|
self.pull_function = pull_function
|
||||||
|
|
||||||
def depends_on_realm(self) -> bool:
|
|
||||||
return self.output_table in (UserCount, StreamCount)
|
|
||||||
|
|
||||||
|
|
||||||
## CountStat-level operations ##
|
## CountStat-level operations ##
|
||||||
|
|
||||||
|
|
||||||
def process_count_stat(stat: CountStat, fill_to_time: datetime, realm: Realm | None = None) -> None:
|
def process_count_stat(
|
||||||
|
stat: CountStat, fill_to_time: datetime, realm: Optional[Realm] = None
|
||||||
|
) -> None:
|
||||||
# TODO: The realm argument is not yet supported, in that we don't
|
# TODO: The realm argument is not yet supported, in that we don't
|
||||||
# have a solution for how to update FillState if it is passed. It
|
# have a solution for how to update FillState if it is passed. It
|
||||||
# exists solely as partial plumbing for when we do fully implement
|
# exists solely as partial plumbing for when we do fully implement
|
||||||
@@ -158,7 +149,7 @@ def process_count_stat(stat: CountStat, fill_to_time: datetime, realm: Realm | N
|
|||||||
return
|
return
|
||||||
fill_to_time = min(fill_to_time, dependency_fill_time)
|
fill_to_time = min(fill_to_time, dependency_fill_time)
|
||||||
|
|
||||||
currently_filled += stat.time_increment
|
currently_filled = currently_filled + stat.time_increment
|
||||||
while currently_filled <= fill_to_time:
|
while currently_filled <= fill_to_time:
|
||||||
logger.info("START %s %s", stat.property, currently_filled)
|
logger.info("START %s %s", stat.property, currently_filled)
|
||||||
start = time.time()
|
start = time.time()
|
||||||
@@ -166,7 +157,7 @@ def process_count_stat(stat: CountStat, fill_to_time: datetime, realm: Realm | N
|
|||||||
do_fill_count_stat_at_hour(stat, currently_filled, realm)
|
do_fill_count_stat_at_hour(stat, currently_filled, realm)
|
||||||
do_update_fill_state(fill_state, currently_filled, FillState.DONE)
|
do_update_fill_state(fill_state, currently_filled, FillState.DONE)
|
||||||
end = time.time()
|
end = time.time()
|
||||||
currently_filled += stat.time_increment
|
currently_filled = currently_filled + stat.time_increment
|
||||||
logger.info("DONE %s (%dms)", stat.property, (end - start) * 1000)
|
logger.info("DONE %s (%dms)", stat.property, (end - start) * 1000)
|
||||||
|
|
||||||
|
|
||||||
@@ -179,7 +170,7 @@ def do_update_fill_state(fill_state: FillState, end_time: datetime, state: int)
|
|||||||
# We assume end_time is valid (e.g. is on a day or hour boundary as appropriate)
|
# We assume end_time is valid (e.g. is on a day or hour boundary as appropriate)
|
||||||
# and is time-zone-aware. It is the caller's responsibility to enforce this!
|
# and is time-zone-aware. It is the caller's responsibility to enforce this!
|
||||||
def do_fill_count_stat_at_hour(
|
def do_fill_count_stat_at_hour(
|
||||||
stat: CountStat, end_time: datetime, realm: Realm | None = None
|
stat: CountStat, end_time: datetime, realm: Optional[Realm] = None
|
||||||
) -> None:
|
) -> None:
|
||||||
start_time = end_time - stat.interval
|
start_time = end_time - stat.interval
|
||||||
if not isinstance(stat, LoggingCountStat):
|
if not isinstance(stat, LoggingCountStat):
|
||||||
@@ -198,7 +189,7 @@ def do_fill_count_stat_at_hour(
|
|||||||
def do_delete_counts_at_hour(stat: CountStat, end_time: datetime) -> None:
|
def do_delete_counts_at_hour(stat: CountStat, end_time: datetime) -> None:
|
||||||
if isinstance(stat, LoggingCountStat):
|
if isinstance(stat, LoggingCountStat):
|
||||||
InstallationCount.objects.filter(property=stat.property, end_time=end_time).delete()
|
InstallationCount.objects.filter(property=stat.property, end_time=end_time).delete()
|
||||||
if stat.data_collector.depends_on_realm():
|
if stat.data_collector.output_table in [UserCount, StreamCount]:
|
||||||
RealmCount.objects.filter(property=stat.property, end_time=end_time).delete()
|
RealmCount.objects.filter(property=stat.property, end_time=end_time).delete()
|
||||||
else:
|
else:
|
||||||
UserCount.objects.filter(property=stat.property, end_time=end_time).delete()
|
UserCount.objects.filter(property=stat.property, end_time=end_time).delete()
|
||||||
@@ -208,18 +199,18 @@ def do_delete_counts_at_hour(stat: CountStat, end_time: datetime) -> None:
|
|||||||
|
|
||||||
|
|
||||||
def do_aggregate_to_summary_table(
|
def do_aggregate_to_summary_table(
|
||||||
stat: CountStat, end_time: datetime, realm: Realm | None = None
|
stat: CountStat, end_time: datetime, realm: Optional[Realm] = None
|
||||||
) -> None:
|
) -> None:
|
||||||
cursor = connection.cursor()
|
cursor = connection.cursor()
|
||||||
|
|
||||||
# Aggregate into RealmCount
|
# Aggregate into RealmCount
|
||||||
output_table = stat.data_collector.output_table
|
output_table = stat.data_collector.output_table
|
||||||
if realm is not None:
|
if realm is not None:
|
||||||
realm_clause: Composable = SQL("AND zerver_realm.id = {}").format(Literal(realm.id))
|
realm_clause = SQL("AND zerver_realm.id = {}").format(Literal(realm.id))
|
||||||
else:
|
else:
|
||||||
realm_clause = SQL("")
|
realm_clause = SQL("")
|
||||||
|
|
||||||
if stat.data_collector.depends_on_realm():
|
if output_table in (UserCount, StreamCount):
|
||||||
realmcount_query = SQL(
|
realmcount_query = SQL(
|
||||||
"""
|
"""
|
||||||
INSERT INTO analytics_realmcount
|
INSERT INTO analytics_realmcount
|
||||||
@@ -297,12 +288,11 @@ def do_aggregate_to_summary_table(
|
|||||||
|
|
||||||
## Utility functions called from outside counts.py ##
|
## Utility functions called from outside counts.py ##
|
||||||
|
|
||||||
|
|
||||||
# called from zerver.actions; should not throw any errors
|
# called from zerver.actions; should not throw any errors
|
||||||
def do_increment_logging_stat(
|
def do_increment_logging_stat(
|
||||||
model_object_for_bucket: Union[Realm, UserProfile, Stream, "RemoteRealm", "RemoteZulipServer"],
|
zerver_object: Union[Realm, UserProfile, Stream],
|
||||||
stat: CountStat,
|
stat: CountStat,
|
||||||
subgroup: str | int | bool | None,
|
subgroup: Optional[Union[str, int, bool]],
|
||||||
event_time: datetime,
|
event_time: datetime,
|
||||||
increment: int = 1,
|
increment: int = 1,
|
||||||
) -> None:
|
) -> None:
|
||||||
@@ -310,100 +300,31 @@ def do_increment_logging_stat(
|
|||||||
return
|
return
|
||||||
|
|
||||||
table = stat.data_collector.output_table
|
table = stat.data_collector.output_table
|
||||||
id_args: dict[str, int | None] = {}
|
|
||||||
conflict_args: list[str] = []
|
|
||||||
if table == RealmCount:
|
if table == RealmCount:
|
||||||
assert isinstance(model_object_for_bucket, Realm)
|
assert isinstance(zerver_object, Realm)
|
||||||
id_args = {"realm_id": model_object_for_bucket.id}
|
id_args: Dict[str, Union[Realm, UserProfile, Stream]] = {"realm": zerver_object}
|
||||||
conflict_args = ["realm_id"]
|
|
||||||
elif table == UserCount:
|
elif table == UserCount:
|
||||||
assert isinstance(model_object_for_bucket, UserProfile)
|
assert isinstance(zerver_object, UserProfile)
|
||||||
id_args = {
|
id_args = {"realm": zerver_object.realm, "user": zerver_object}
|
||||||
"realm_id": model_object_for_bucket.realm_id,
|
else: # StreamCount
|
||||||
"user_id": model_object_for_bucket.id,
|
assert isinstance(zerver_object, Stream)
|
||||||
}
|
id_args = {"realm": zerver_object.realm, "stream": zerver_object}
|
||||||
conflict_args = ["user_id"]
|
|
||||||
elif table == StreamCount:
|
|
||||||
assert isinstance(model_object_for_bucket, Stream)
|
|
||||||
id_args = {
|
|
||||||
"realm_id": model_object_for_bucket.realm_id,
|
|
||||||
"stream_id": model_object_for_bucket.id,
|
|
||||||
}
|
|
||||||
conflict_args = ["stream_id"]
|
|
||||||
elif table == RemoteInstallationCount:
|
|
||||||
assert isinstance(model_object_for_bucket, RemoteZulipServer)
|
|
||||||
id_args = {"server_id": model_object_for_bucket.id, "remote_id": None}
|
|
||||||
conflict_args = ["server_id"]
|
|
||||||
elif table == RemoteRealmCount:
|
|
||||||
assert isinstance(model_object_for_bucket, RemoteRealm)
|
|
||||||
# For RemoteRealmCount (e.g. `mobile_pushes_forwarded::day`),
|
|
||||||
# we have no `remote_id` nor `realm_id`, since they are not
|
|
||||||
# imported from the remote server, which is the source of
|
|
||||||
# truth of those two columns. Their "ON CONFLICT" is thus the
|
|
||||||
# only unique key we have, which is `remote_realm_id`, and not
|
|
||||||
# `server_id` / `realm_id`.
|
|
||||||
id_args = {
|
|
||||||
"server_id": model_object_for_bucket.server_id,
|
|
||||||
"remote_realm_id": model_object_for_bucket.id,
|
|
||||||
"remote_id": None,
|
|
||||||
"realm_id": None,
|
|
||||||
}
|
|
||||||
conflict_args = [
|
|
||||||
"remote_realm_id",
|
|
||||||
]
|
|
||||||
else:
|
|
||||||
raise AssertionError("Unsupported CountStat output_table")
|
|
||||||
|
|
||||||
if stat.frequency == CountStat.DAY:
|
if stat.frequency == CountStat.DAY:
|
||||||
end_time = ceiling_to_day(event_time)
|
end_time = ceiling_to_day(event_time)
|
||||||
elif stat.frequency == CountStat.HOUR:
|
else: # CountStat.HOUR:
|
||||||
end_time = ceiling_to_hour(event_time)
|
end_time = ceiling_to_hour(event_time)
|
||||||
else:
|
|
||||||
raise AssertionError("Unsupported CountStat frequency")
|
|
||||||
|
|
||||||
is_subgroup: SQL = SQL("NULL")
|
row, created = table.objects.get_or_create(
|
||||||
if subgroup is not None:
|
property=stat.property,
|
||||||
is_subgroup = SQL("NOT NULL")
|
subgroup=subgroup,
|
||||||
# For backwards consistency, we cast the subgroup to a string
|
end_time=end_time,
|
||||||
# in Python; this emulates the behaviour of `get_or_create`,
|
defaults={"value": increment},
|
||||||
# which was previously used in this function, and performed
|
**id_args,
|
||||||
# this cast because the `subgroup` column is defined as a
|
|
||||||
# `CharField`. Omitting this explicit cast causes a subgroup
|
|
||||||
# of the boolean False to be passed as the PostgreSQL false,
|
|
||||||
# which it stringifies as the lower-case `'false'`, not the
|
|
||||||
# initial-case `'False'` if Python stringifies it.
|
|
||||||
#
|
|
||||||
# Other parts of the system (e.g. count_message_by_user_query)
|
|
||||||
# already use PostgreSQL to cast bools to strings, resulting
|
|
||||||
# in `subgroup` values of lower-case `'false'` -- for example
|
|
||||||
# in `messages_sent:is_bot:hour`. Fixing this inconsistency
|
|
||||||
# via a migration is complicated by these records being
|
|
||||||
# exchanged over the wire from remote servers.
|
|
||||||
subgroup = str(subgroup)
|
|
||||||
conflict_args.append("subgroup")
|
|
||||||
|
|
||||||
id_column_names = SQL(", ").join(map(Identifier, id_args.keys()))
|
|
||||||
id_values = SQL(", ").join(map(Literal, id_args.values()))
|
|
||||||
conflict_columns = SQL(", ").join(map(Identifier, conflict_args))
|
|
||||||
|
|
||||||
sql_query = SQL(
|
|
||||||
"""
|
|
||||||
INSERT INTO {table_name}(property, subgroup, end_time, value, {id_column_names})
|
|
||||||
VALUES (%s, %s, %s, %s, {id_values})
|
|
||||||
ON CONFLICT (property, end_time, {conflict_columns})
|
|
||||||
WHERE subgroup IS {is_subgroup}
|
|
||||||
DO UPDATE SET
|
|
||||||
value = {table_name}.value + EXCLUDED.value
|
|
||||||
"""
|
|
||||||
).format(
|
|
||||||
table_name=Identifier(table._meta.db_table),
|
|
||||||
id_column_names=id_column_names,
|
|
||||||
id_values=id_values,
|
|
||||||
conflict_columns=conflict_columns,
|
|
||||||
is_subgroup=is_subgroup,
|
|
||||||
)
|
)
|
||||||
with connection.cursor() as cursor:
|
if not created:
|
||||||
cursor.execute(sql_query, [stat.property, subgroup, end_time, increment])
|
row.value = F("value") + increment
|
||||||
|
row.save(update_fields=["value"])
|
||||||
|
|
||||||
|
|
||||||
def do_drop_all_analytics_tables() -> None:
|
def do_drop_all_analytics_tables() -> None:
|
||||||
@@ -424,7 +345,7 @@ def do_drop_single_stat(property: str) -> None:
|
|||||||
|
|
||||||
## DataCollector-level operations ##
|
## DataCollector-level operations ##
|
||||||
|
|
||||||
QueryFn: TypeAlias = Callable[[dict[str, Composable]], Composable]
|
QueryFn = Callable[[Dict[str, Composable]], Composable]
|
||||||
|
|
||||||
|
|
||||||
def do_pull_by_sql_query(
|
def do_pull_by_sql_query(
|
||||||
@@ -432,11 +353,11 @@ def do_pull_by_sql_query(
|
|||||||
start_time: datetime,
|
start_time: datetime,
|
||||||
end_time: datetime,
|
end_time: datetime,
|
||||||
query: QueryFn,
|
query: QueryFn,
|
||||||
group_by: tuple[type[models.Model], str] | None,
|
group_by: Optional[Tuple[Type[models.Model], str]],
|
||||||
) -> int:
|
) -> int:
|
||||||
if group_by is None:
|
if group_by is None:
|
||||||
subgroup: Composable = SQL("NULL")
|
subgroup: Composable = SQL("NULL")
|
||||||
group_by_clause: Composable = SQL("")
|
group_by_clause = SQL("")
|
||||||
else:
|
else:
|
||||||
subgroup = Identifier(group_by[0]._meta.db_table, group_by[1])
|
subgroup = Identifier(group_by[0]._meta.db_table, group_by[1])
|
||||||
group_by_clause = SQL(", {}").format(subgroup)
|
group_by_clause = SQL(", {}").format(subgroup)
|
||||||
@@ -466,12 +387,12 @@ def do_pull_by_sql_query(
|
|||||||
|
|
||||||
|
|
||||||
def sql_data_collector(
|
def sql_data_collector(
|
||||||
output_table: type[BaseCount],
|
output_table: Type[BaseCount],
|
||||||
query: QueryFn,
|
query: QueryFn,
|
||||||
group_by: tuple[type[models.Model], str] | None,
|
group_by: Optional[Tuple[Type[models.Model], str]],
|
||||||
) -> DataCollector:
|
) -> DataCollector:
|
||||||
def pull_function(
|
def pull_function(
|
||||||
property: str, start_time: datetime, end_time: datetime, realm: Realm | None = None
|
property: str, start_time: datetime, end_time: datetime, realm: Optional[Realm] = None
|
||||||
) -> int:
|
) -> int:
|
||||||
# The pull function type needs to accept a Realm argument
|
# The pull function type needs to accept a Realm argument
|
||||||
# because the 'minutes_active::day' CountStat uses
|
# because the 'minutes_active::day' CountStat uses
|
||||||
@@ -484,42 +405,8 @@ def sql_data_collector(
|
|||||||
return DataCollector(output_table, pull_function)
|
return DataCollector(output_table, pull_function)
|
||||||
|
|
||||||
|
|
||||||
def count_upload_space_used_by_realm_query(realm: Realm | None) -> QueryFn:
|
|
||||||
if realm is None:
|
|
||||||
realm_clause: Composable = SQL("")
|
|
||||||
else:
|
|
||||||
realm_clause = SQL("zerver_attachment.realm_id = {} AND").format(Literal(realm.id))
|
|
||||||
|
|
||||||
# Note: This query currently has to go through the entire table,
|
|
||||||
# summing all the sizes of attachments for every realm. This can be improved
|
|
||||||
# by having a query which looks at the latest CountStat for each realm,
|
|
||||||
# and sums it with only the new attachments.
|
|
||||||
# There'd be additional complexity added by the fact that attachments can
|
|
||||||
# also be deleted. Partially this can be accounted for by subtracting
|
|
||||||
# ArchivedAttachment sizes, but there's still the issue of attachments
|
|
||||||
# which can be directly deleted via the API.
|
|
||||||
|
|
||||||
return lambda kwargs: SQL(
|
|
||||||
"""
|
|
||||||
INSERT INTO analytics_realmcount (realm_id, property, end_time, value)
|
|
||||||
SELECT
|
|
||||||
zerver_attachment.realm_id,
|
|
||||||
%(property)s,
|
|
||||||
%(time_end)s,
|
|
||||||
COALESCE(SUM(zerver_attachment.size), 0)
|
|
||||||
FROM
|
|
||||||
zerver_attachment
|
|
||||||
WHERE
|
|
||||||
{realm_clause}
|
|
||||||
zerver_attachment.create_time < %(time_end)s
|
|
||||||
GROUP BY
|
|
||||||
zerver_attachment.realm_id
|
|
||||||
"""
|
|
||||||
).format(**kwargs, realm_clause=realm_clause)
|
|
||||||
|
|
||||||
|
|
||||||
def do_pull_minutes_active(
|
def do_pull_minutes_active(
|
||||||
property: str, start_time: datetime, end_time: datetime, realm: Realm | None = None
|
property: str, start_time: datetime, end_time: datetime, realm: Optional[Realm] = None
|
||||||
) -> int:
|
) -> int:
|
||||||
user_activity_intervals = (
|
user_activity_intervals = (
|
||||||
UserActivityInterval.objects.filter(
|
UserActivityInterval.objects.filter(
|
||||||
@@ -532,7 +419,7 @@ def do_pull_minutes_active(
|
|||||||
.values_list("user_profile_id", "user_profile__realm_id", "start", "end")
|
.values_list("user_profile_id", "user_profile__realm_id", "start", "end")
|
||||||
)
|
)
|
||||||
|
|
||||||
seconds_active: dict[tuple[int, int], float] = defaultdict(float)
|
seconds_active: Dict[Tuple[int, int], float] = defaultdict(float)
|
||||||
for user_id, realm_id, interval_start, interval_end in user_activity_intervals:
|
for user_id, realm_id, interval_start, interval_end in user_activity_intervals:
|
||||||
if realm is None or realm.id == realm_id:
|
if realm is None or realm.id == realm_id:
|
||||||
start = max(start_time, interval_start)
|
start = max(start_time, interval_start)
|
||||||
@@ -554,17 +441,11 @@ def do_pull_minutes_active(
|
|||||||
return len(rows)
|
return len(rows)
|
||||||
|
|
||||||
|
|
||||||
def count_message_by_user_query(realm: Realm | None) -> QueryFn:
|
def count_message_by_user_query(realm: Optional[Realm]) -> QueryFn:
|
||||||
if realm is None:
|
if realm is None:
|
||||||
realm_clause: Composable = SQL("")
|
realm_clause = SQL("")
|
||||||
else:
|
else:
|
||||||
# We limit both userprofile and message so that we only see
|
realm_clause = SQL("zerver_userprofile.realm_id = {} AND").format(Literal(realm.id))
|
||||||
# users from this realm, but also get the performance speedup
|
|
||||||
# of limiting messages by realm.
|
|
||||||
realm_clause = SQL(
|
|
||||||
"zerver_userprofile.realm_id = {} AND zerver_message.realm_id = {} AND"
|
|
||||||
).format(Literal(realm.id), Literal(realm.id))
|
|
||||||
# Uses index: zerver_message_realm_date_sent (or the only-date index)
|
|
||||||
return lambda kwargs: SQL(
|
return lambda kwargs: SQL(
|
||||||
"""
|
"""
|
||||||
INSERT INTO analytics_usercount
|
INSERT INTO analytics_usercount
|
||||||
@@ -587,17 +468,11 @@ def count_message_by_user_query(realm: Realm | None) -> QueryFn:
|
|||||||
|
|
||||||
|
|
||||||
# Note: ignores the group_by / group_by_clause.
|
# Note: ignores the group_by / group_by_clause.
|
||||||
def count_message_type_by_user_query(realm: Realm | None) -> QueryFn:
|
def count_message_type_by_user_query(realm: Optional[Realm]) -> QueryFn:
|
||||||
if realm is None:
|
if realm is None:
|
||||||
realm_clause: Composable = SQL("")
|
realm_clause = SQL("")
|
||||||
else:
|
else:
|
||||||
# We limit both userprofile and message so that we only see
|
realm_clause = SQL("zerver_userprofile.realm_id = {} AND").format(Literal(realm.id))
|
||||||
# users from this realm, but also get the performance speedup
|
|
||||||
# of limiting messages by realm.
|
|
||||||
realm_clause = SQL(
|
|
||||||
"zerver_userprofile.realm_id = {} AND zerver_message.realm_id = {} AND"
|
|
||||||
).format(Literal(realm.id), Literal(realm.id))
|
|
||||||
# Uses index: zerver_message_realm_date_sent (or the only-date index)
|
|
||||||
return lambda kwargs: SQL(
|
return lambda kwargs: SQL(
|
||||||
"""
|
"""
|
||||||
INSERT INTO analytics_usercount
|
INSERT INTO analytics_usercount
|
||||||
@@ -642,14 +517,11 @@ def count_message_type_by_user_query(realm: Realm | None) -> QueryFn:
|
|||||||
# use this also subgroup on UserProfile.is_bot. If in the future there is a
|
# use this also subgroup on UserProfile.is_bot. If in the future there is a
|
||||||
# stat that counts messages by stream and doesn't need the UserProfile
|
# stat that counts messages by stream and doesn't need the UserProfile
|
||||||
# table, consider writing a new query for efficiency.
|
# table, consider writing a new query for efficiency.
|
||||||
def count_message_by_stream_query(realm: Realm | None) -> QueryFn:
|
def count_message_by_stream_query(realm: Optional[Realm]) -> QueryFn:
|
||||||
if realm is None:
|
if realm is None:
|
||||||
realm_clause: Composable = SQL("")
|
realm_clause = SQL("")
|
||||||
else:
|
else:
|
||||||
realm_clause = SQL(
|
realm_clause = SQL("zerver_stream.realm_id = {} AND").format(Literal(realm.id))
|
||||||
"zerver_stream.realm_id = {} AND zerver_message.realm_id = {} AND"
|
|
||||||
).format(Literal(realm.id), Literal(realm.id))
|
|
||||||
# Uses index: zerver_message_realm_date_sent (or the only-date index)
|
|
||||||
return lambda kwargs: SQL(
|
return lambda kwargs: SQL(
|
||||||
"""
|
"""
|
||||||
INSERT INTO analytics_streamcount
|
INSERT INTO analytics_streamcount
|
||||||
@@ -677,39 +549,67 @@ def count_message_by_stream_query(realm: Realm | None) -> QueryFn:
|
|||||||
).format(**kwargs, realm_clause=realm_clause)
|
).format(**kwargs, realm_clause=realm_clause)
|
||||||
|
|
||||||
|
|
||||||
# Hardcodes the query needed for active_users_audit:is_bot:day.
|
# Hardcodes the query needed by active_users:is_bot:day, since that is
|
||||||
# Assumes that a user cannot have two RealmAuditLog entries with the
|
# currently the only stat that uses this.
|
||||||
# same event_time and event_type in [RealmAuditLog.USER_CREATED,
|
def count_user_by_realm_query(realm: Optional[Realm]) -> QueryFn:
|
||||||
# USER_DEACTIVATED, etc]. In particular, it's important to ensure
|
|
||||||
# that migrations don't cause that to happen.
|
|
||||||
def check_realmauditlog_by_user_query(realm: Realm | None) -> QueryFn:
|
|
||||||
if realm is None:
|
if realm is None:
|
||||||
realm_clause: Composable = SQL("")
|
realm_clause = SQL("")
|
||||||
else:
|
else:
|
||||||
realm_clause = SQL("realm_id = {} AND").format(Literal(realm.id))
|
realm_clause = SQL("zerver_userprofile.realm_id = {} AND").format(Literal(realm.id))
|
||||||
return lambda kwargs: SQL(
|
return lambda kwargs: SQL(
|
||||||
"""
|
"""
|
||||||
INSERT INTO analytics_realmcount
|
INSERT INTO analytics_realmcount
|
||||||
(realm_id, value, property, subgroup, end_time)
|
(realm_id, value, property, subgroup, end_time)
|
||||||
SELECT
|
SELECT
|
||||||
zerver_userprofile.realm_id, count(*), %(property)s, {subgroup}, %(time_end)s
|
zerver_realm.id, count(*), %(property)s, {subgroup}, %(time_end)s
|
||||||
FROM zerver_userprofile
|
FROM zerver_realm
|
||||||
JOIN (
|
JOIN zerver_userprofile
|
||||||
SELECT DISTINCT ON (modified_user_id)
|
ON
|
||||||
modified_user_id, event_type
|
zerver_realm.id = zerver_userprofile.realm_id
|
||||||
FROM
|
|
||||||
zerver_realmauditlog
|
|
||||||
WHERE
|
|
||||||
event_type IN ({user_created}, {user_activated}, {user_deactivated}, {user_reactivated}) AND
|
|
||||||
{realm_clause}
|
|
||||||
event_time < %(time_end)s
|
|
||||||
ORDER BY
|
|
||||||
modified_user_id,
|
|
||||||
event_time DESC
|
|
||||||
) last_user_event ON last_user_event.modified_user_id = zerver_userprofile.id
|
|
||||||
WHERE
|
WHERE
|
||||||
last_user_event.event_type in ({user_created}, {user_activated}, {user_reactivated})
|
zerver_realm.date_created < %(time_end)s AND
|
||||||
GROUP BY zerver_userprofile.realm_id {group_by_clause}
|
zerver_userprofile.date_joined >= %(time_start)s AND
|
||||||
|
zerver_userprofile.date_joined < %(time_end)s AND
|
||||||
|
{realm_clause}
|
||||||
|
zerver_userprofile.is_active = TRUE
|
||||||
|
GROUP BY zerver_realm.id {group_by_clause}
|
||||||
|
"""
|
||||||
|
).format(**kwargs, realm_clause=realm_clause)
|
||||||
|
|
||||||
|
|
||||||
|
# Currently hardcodes the query needed for active_users_audit:is_bot:day.
|
||||||
|
# Assumes that a user cannot have two RealmAuditLog entries with the same event_time and
|
||||||
|
# event_type in [RealmAuditLog.USER_CREATED, USER_DEACTIVATED, etc].
|
||||||
|
# In particular, it's important to ensure that migrations don't cause that to happen.
|
||||||
|
def check_realmauditlog_by_user_query(realm: Optional[Realm]) -> QueryFn:
|
||||||
|
if realm is None:
|
||||||
|
realm_clause = SQL("")
|
||||||
|
else:
|
||||||
|
realm_clause = SQL("realm_id = {} AND").format(Literal(realm.id))
|
||||||
|
return lambda kwargs: SQL(
|
||||||
|
"""
|
||||||
|
INSERT INTO analytics_usercount
|
||||||
|
(user_id, realm_id, value, property, subgroup, end_time)
|
||||||
|
SELECT
|
||||||
|
ral1.modified_user_id, ral1.realm_id, 1, %(property)s, {subgroup}, %(time_end)s
|
||||||
|
FROM zerver_realmauditlog ral1
|
||||||
|
JOIN (
|
||||||
|
SELECT modified_user_id, max(event_time) AS max_event_time
|
||||||
|
FROM zerver_realmauditlog
|
||||||
|
WHERE
|
||||||
|
event_type in ({user_created}, {user_activated}, {user_deactivated}, {user_reactivated}) AND
|
||||||
|
{realm_clause}
|
||||||
|
event_time < %(time_end)s
|
||||||
|
GROUP BY modified_user_id
|
||||||
|
) ral2
|
||||||
|
ON
|
||||||
|
ral1.event_time = max_event_time AND
|
||||||
|
ral1.modified_user_id = ral2.modified_user_id
|
||||||
|
JOIN zerver_userprofile
|
||||||
|
ON
|
||||||
|
ral1.modified_user_id = zerver_userprofile.id
|
||||||
|
WHERE
|
||||||
|
ral1.event_type in ({user_created}, {user_activated}, {user_reactivated})
|
||||||
"""
|
"""
|
||||||
).format(
|
).format(
|
||||||
**kwargs,
|
**kwargs,
|
||||||
@@ -721,9 +621,9 @@ def check_realmauditlog_by_user_query(realm: Realm | None) -> QueryFn:
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def check_useractivityinterval_by_user_query(realm: Realm | None) -> QueryFn:
|
def check_useractivityinterval_by_user_query(realm: Optional[Realm]) -> QueryFn:
|
||||||
if realm is None:
|
if realm is None:
|
||||||
realm_clause: Composable = SQL("")
|
realm_clause = SQL("")
|
||||||
else:
|
else:
|
||||||
realm_clause = SQL("zerver_userprofile.realm_id = {} AND").format(Literal(realm.id))
|
realm_clause = SQL("zerver_userprofile.realm_id = {} AND").format(Literal(realm.id))
|
||||||
return lambda kwargs: SQL(
|
return lambda kwargs: SQL(
|
||||||
@@ -745,9 +645,9 @@ def check_useractivityinterval_by_user_query(realm: Realm | None) -> QueryFn:
|
|||||||
).format(**kwargs, realm_clause=realm_clause)
|
).format(**kwargs, realm_clause=realm_clause)
|
||||||
|
|
||||||
|
|
||||||
def count_realm_active_humans_query(realm: Realm | None) -> QueryFn:
|
def count_realm_active_humans_query(realm: Optional[Realm]) -> QueryFn:
|
||||||
if realm is None:
|
if realm is None:
|
||||||
realm_clause: Composable = SQL("")
|
realm_clause = SQL("")
|
||||||
else:
|
else:
|
||||||
realm_clause = SQL("realm_id = {} AND").format(Literal(realm.id))
|
realm_clause = SQL("realm_id = {} AND").format(Literal(realm.id))
|
||||||
return lambda kwargs: SQL(
|
return lambda kwargs: SQL(
|
||||||
@@ -755,46 +655,29 @@ def count_realm_active_humans_query(realm: Realm | None) -> QueryFn:
|
|||||||
INSERT INTO analytics_realmcount
|
INSERT INTO analytics_realmcount
|
||||||
(realm_id, value, property, subgroup, end_time)
|
(realm_id, value, property, subgroup, end_time)
|
||||||
SELECT
|
SELECT
|
||||||
active_usercount.realm_id, count(*), %(property)s, NULL, %(time_end)s
|
usercount1.realm_id, count(*), %(property)s, NULL, %(time_end)s
|
||||||
FROM (
|
FROM (
|
||||||
SELECT
|
SELECT realm_id, user_id
|
||||||
realm_id,
|
FROM analytics_usercount
|
||||||
user_id
|
WHERE
|
||||||
FROM
|
property = 'active_users_audit:is_bot:day' AND
|
||||||
analytics_usercount
|
subgroup = 'false' AND
|
||||||
WHERE
|
{realm_clause}
|
||||||
property = '15day_actives::day'
|
end_time = %(time_end)s
|
||||||
{realm_clause}
|
) usercount1
|
||||||
AND end_time = %(time_end)s
|
|
||||||
) active_usercount
|
|
||||||
JOIN zerver_userprofile ON active_usercount.user_id = zerver_userprofile.id
|
|
||||||
AND active_usercount.realm_id = zerver_userprofile.realm_id
|
|
||||||
JOIN (
|
JOIN (
|
||||||
SELECT DISTINCT ON (modified_user_id)
|
SELECT realm_id, user_id
|
||||||
modified_user_id, event_type
|
FROM analytics_usercount
|
||||||
FROM
|
WHERE
|
||||||
zerver_realmauditlog
|
property = '15day_actives::day' AND
|
||||||
WHERE
|
{realm_clause}
|
||||||
event_type IN ({user_created}, {user_activated}, {user_deactivated}, {user_reactivated})
|
end_time = %(time_end)s
|
||||||
AND event_time < %(time_end)s
|
) usercount2
|
||||||
ORDER BY
|
ON
|
||||||
modified_user_id,
|
usercount1.user_id = usercount2.user_id
|
||||||
event_time DESC
|
GROUP BY usercount1.realm_id
|
||||||
) last_user_event ON last_user_event.modified_user_id = active_usercount.user_id
|
|
||||||
WHERE
|
|
||||||
NOT zerver_userprofile.is_bot
|
|
||||||
AND event_type IN ({user_created}, {user_activated}, {user_reactivated})
|
|
||||||
GROUP BY
|
|
||||||
active_usercount.realm_id
|
|
||||||
"""
|
"""
|
||||||
).format(
|
).format(**kwargs, realm_clause=realm_clause)
|
||||||
**kwargs,
|
|
||||||
user_created=Literal(RealmAuditLog.USER_CREATED),
|
|
||||||
user_activated=Literal(RealmAuditLog.USER_ACTIVATED),
|
|
||||||
user_deactivated=Literal(RealmAuditLog.USER_DEACTIVATED),
|
|
||||||
user_reactivated=Literal(RealmAuditLog.USER_REACTIVATED),
|
|
||||||
realm_clause=realm_clause,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# Currently unused and untested
|
# Currently unused and untested
|
||||||
@@ -817,7 +700,7 @@ count_stream_by_realm_query = lambda kwargs: SQL(
|
|||||||
).format(**kwargs)
|
).format(**kwargs)
|
||||||
|
|
||||||
|
|
||||||
def get_count_stats(realm: Realm | None = None) -> dict[str, CountStat]:
|
def get_count_stats(realm: Optional[Realm] = None) -> Dict[str, CountStat]:
|
||||||
## CountStat declarations ##
|
## CountStat declarations ##
|
||||||
|
|
||||||
count_stats_ = [
|
count_stats_ = [
|
||||||
@@ -850,19 +733,39 @@ def get_count_stats(realm: Realm | None = None) -> dict[str, CountStat]:
|
|||||||
),
|
),
|
||||||
CountStat.DAY,
|
CountStat.DAY,
|
||||||
),
|
),
|
||||||
# Counts the number of active users in the UserProfile.is_active sense.
|
# Number of users stats
|
||||||
|
# Stats that count the number of active users in the UserProfile.is_active sense.
|
||||||
|
# 'active_users_audit:is_bot:day' is the canonical record of which users were
|
||||||
|
# active on which days (in the UserProfile.is_active sense).
|
||||||
# Important that this stay a daily stat, so that 'realm_active_humans::day' works as expected.
|
# Important that this stay a daily stat, so that 'realm_active_humans::day' works as expected.
|
||||||
CountStat(
|
CountStat(
|
||||||
"active_users_audit:is_bot:day",
|
"active_users_audit:is_bot:day",
|
||||||
sql_data_collector(
|
sql_data_collector(
|
||||||
RealmCount, check_realmauditlog_by_user_query(realm), (UserProfile, "is_bot")
|
UserCount, check_realmauditlog_by_user_query(realm), (UserProfile, "is_bot")
|
||||||
),
|
),
|
||||||
CountStat.DAY,
|
CountStat.DAY,
|
||||||
),
|
),
|
||||||
|
# Important note: LoggingCountStat objects aren't passed the
|
||||||
|
# Realm argument, because by nature they have a logging
|
||||||
|
# structure, not a pull-from-database structure, so there's no
|
||||||
|
# way to compute them for a single realm after the fact (the
|
||||||
|
# use case for passing a Realm argument).
|
||||||
|
# Sanity check on 'active_users_audit:is_bot:day', and a archetype for future LoggingCountStats.
|
||||||
|
# In RealmCount, 'active_users_audit:is_bot:day' should be the partial
|
||||||
|
# sum sequence of 'active_users_log:is_bot:day', for any realm that
|
||||||
|
# started after the latter stat was introduced.
|
||||||
|
LoggingCountStat("active_users_log:is_bot:day", RealmCount, CountStat.DAY),
|
||||||
|
# Another sanity check on 'active_users_audit:is_bot:day'. Is only an
|
||||||
|
# approximation, e.g. if a user is deactivated between the end of the
|
||||||
|
# day and when this stat is run, they won't be counted. However, is the
|
||||||
|
# simplest of the three to inspect by hand.
|
||||||
CountStat(
|
CountStat(
|
||||||
"upload_quota_used_bytes::day",
|
"active_users:is_bot:day",
|
||||||
sql_data_collector(RealmCount, count_upload_space_used_by_realm_query(realm), None),
|
sql_data_collector(
|
||||||
|
RealmCount, count_user_by_realm_query(realm), (UserProfile, "is_bot")
|
||||||
|
),
|
||||||
CountStat.DAY,
|
CountStat.DAY,
|
||||||
|
interval=TIMEDELTA_MAX,
|
||||||
),
|
),
|
||||||
# Messages read stats. messages_read::hour is the total
|
# Messages read stats. messages_read::hour is the total
|
||||||
# number of messages read, whereas
|
# number of messages read, whereas
|
||||||
@@ -896,16 +799,8 @@ def get_count_stats(realm: Realm | None = None) -> dict[str, CountStat]:
|
|||||||
CountStat(
|
CountStat(
|
||||||
"minutes_active::day", DataCollector(UserCount, do_pull_minutes_active), CountStat.DAY
|
"minutes_active::day", DataCollector(UserCount, do_pull_minutes_active), CountStat.DAY
|
||||||
),
|
),
|
||||||
# Tracks the number of push notifications requested by the server.
|
|
||||||
# Included in LOGGING_COUNT_STAT_PROPERTIES_NOT_SENT_TO_BOUNCER.
|
|
||||||
LoggingCountStat(
|
|
||||||
"mobile_pushes_sent::day",
|
|
||||||
RealmCount,
|
|
||||||
CountStat.DAY,
|
|
||||||
),
|
|
||||||
# Rate limiting stats
|
# Rate limiting stats
|
||||||
# Used to limit the number of invitation emails sent by a realm.
|
# Used to limit the number of invitation emails sent by a realm
|
||||||
# Included in LOGGING_COUNT_STAT_PROPERTIES_NOT_SENT_TO_BOUNCER.
|
|
||||||
LoggingCountStat("invites_sent::day", RealmCount, CountStat.DAY),
|
LoggingCountStat("invites_sent::day", RealmCount, CountStat.DAY),
|
||||||
# Dependent stats
|
# Dependent stats
|
||||||
# Must come after their dependencies.
|
# Must come after their dependencies.
|
||||||
@@ -914,83 +809,12 @@ def get_count_stats(realm: Realm | None = None) -> dict[str, CountStat]:
|
|||||||
"realm_active_humans::day",
|
"realm_active_humans::day",
|
||||||
sql_data_collector(RealmCount, count_realm_active_humans_query(realm), None),
|
sql_data_collector(RealmCount, count_realm_active_humans_query(realm), None),
|
||||||
CountStat.DAY,
|
CountStat.DAY,
|
||||||
dependencies=["15day_actives::day"],
|
dependencies=["active_users_audit:is_bot:day", "15day_actives::day"],
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
if settings.ZILENCER_ENABLED:
|
|
||||||
# See also the remote_installation versions of these in REMOTE_INSTALLATION_COUNT_STATS.
|
|
||||||
count_stats_.append(
|
|
||||||
LoggingCountStat(
|
|
||||||
"mobile_pushes_received::day",
|
|
||||||
RemoteRealmCount,
|
|
||||||
CountStat.DAY,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
count_stats_.append(
|
|
||||||
LoggingCountStat(
|
|
||||||
"mobile_pushes_forwarded::day",
|
|
||||||
RemoteRealmCount,
|
|
||||||
CountStat.DAY,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
return OrderedDict((stat.property, stat) for stat in count_stats_)
|
return OrderedDict((stat.property, stat) for stat in count_stats_)
|
||||||
|
|
||||||
|
|
||||||
# These properties are tracked by the bouncer itself and therefore syncing them
|
|
||||||
# from a remote server should not be allowed - or the server would be able to interfere
|
|
||||||
# with our data.
|
|
||||||
BOUNCER_ONLY_REMOTE_COUNT_STAT_PROPERTIES = [
|
|
||||||
"mobile_pushes_received::day",
|
|
||||||
"mobile_pushes_forwarded::day",
|
|
||||||
]
|
|
||||||
|
|
||||||
# LoggingCountStats with a daily duration and that are directly stored on
|
|
||||||
# the RealmCount table (instead of via aggregation in process_count_stat),
|
|
||||||
# can be in a state, after the hourly cron job to update analytics counts,
|
|
||||||
# where the logged value will be live-updated later (as the end time for
|
|
||||||
# the stat is still in the future). As these logging counts are designed
|
|
||||||
# to be used on the self-hosted installation for either debugging or rate
|
|
||||||
# limiting, sending these incomplete counts to the bouncer has low value.
|
|
||||||
LOGGING_COUNT_STAT_PROPERTIES_NOT_SENT_TO_BOUNCER = {
|
|
||||||
"invites_sent::day",
|
|
||||||
"mobile_pushes_sent::day",
|
|
||||||
"active_users_log:is_bot:day",
|
|
||||||
"active_users:is_bot:day",
|
|
||||||
}
|
|
||||||
|
|
||||||
# To avoid refactoring for now COUNT_STATS can be used as before
|
# To avoid refactoring for now COUNT_STATS can be used as before
|
||||||
COUNT_STATS = get_count_stats()
|
COUNT_STATS = get_count_stats()
|
||||||
|
|
||||||
REMOTE_INSTALLATION_COUNT_STATS = OrderedDict()
|
|
||||||
|
|
||||||
if settings.ZILENCER_ENABLED:
|
|
||||||
# REMOTE_INSTALLATION_COUNT_STATS contains duplicates of the
|
|
||||||
# RemoteRealmCount stats declared above; it is necessary because
|
|
||||||
# pre-8.0 servers do not send the fields required to identify a
|
|
||||||
# RemoteRealm.
|
|
||||||
|
|
||||||
# Tracks the number of push notifications requested to be sent
|
|
||||||
# by a remote server.
|
|
||||||
REMOTE_INSTALLATION_COUNT_STATS["mobile_pushes_received::day"] = LoggingCountStat(
|
|
||||||
"mobile_pushes_received::day",
|
|
||||||
RemoteInstallationCount,
|
|
||||||
CountStat.DAY,
|
|
||||||
)
|
|
||||||
# Tracks the number of push notifications successfully sent to
|
|
||||||
# mobile devices, as requested by the remote server. Therefore
|
|
||||||
# this should be less than or equal to mobile_pushes_received -
|
|
||||||
# with potential tiny offsets resulting from a request being
|
|
||||||
# *received* by the bouncer right before midnight, but *sent* to
|
|
||||||
# the mobile device right after midnight. This would cause the
|
|
||||||
# increments to happen to CountStat records for different days.
|
|
||||||
REMOTE_INSTALLATION_COUNT_STATS["mobile_pushes_forwarded::day"] = LoggingCountStat(
|
|
||||||
"mobile_pushes_forwarded::day",
|
|
||||||
RemoteInstallationCount,
|
|
||||||
CountStat.DAY,
|
|
||||||
)
|
|
||||||
|
|
||||||
ALL_COUNT_STATS = OrderedDict(
|
|
||||||
list(COUNT_STATS.items()) + list(REMOTE_INSTALLATION_COUNT_STATS.items())
|
|
||||||
)
|
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
from math import sqrt
|
from math import sqrt
|
||||||
from random import Random
|
from random import gauss, random, seed
|
||||||
|
from typing import List
|
||||||
|
|
||||||
from analytics.lib.counts import CountStat
|
from analytics.lib.counts import CountStat
|
||||||
|
|
||||||
@@ -15,7 +16,7 @@ def generate_time_series_data(
|
|||||||
frequency: str = CountStat.DAY,
|
frequency: str = CountStat.DAY,
|
||||||
partial_sum: bool = False,
|
partial_sum: bool = False,
|
||||||
random_seed: int = 26,
|
random_seed: int = 26,
|
||||||
) -> list[int]:
|
) -> List[int]:
|
||||||
"""
|
"""
|
||||||
Generate semi-realistic looking time series data for testing analytics graphs.
|
Generate semi-realistic looking time series data for testing analytics graphs.
|
||||||
|
|
||||||
@@ -35,8 +36,6 @@ def generate_time_series_data(
|
|||||||
partial_sum -- If True, return partial sum of the series.
|
partial_sum -- If True, return partial sum of the series.
|
||||||
random_seed -- Seed for random number generator.
|
random_seed -- Seed for random number generator.
|
||||||
"""
|
"""
|
||||||
rng = Random(random_seed)
|
|
||||||
|
|
||||||
if frequency == CountStat.HOUR:
|
if frequency == CountStat.HOUR:
|
||||||
length = days * 24
|
length = days * 24
|
||||||
seasonality = [non_business_hours_base] * 24 * 7
|
seasonality = [non_business_hours_base] * 24 * 7
|
||||||
@@ -45,13 +44,13 @@ def generate_time_series_data(
|
|||||||
seasonality[24 * day + hour] = business_hours_base
|
seasonality[24 * day + hour] = business_hours_base
|
||||||
holidays = []
|
holidays = []
|
||||||
for i in range(days):
|
for i in range(days):
|
||||||
holidays.extend([rng.random() < holiday_rate] * 24)
|
holidays.extend([random() < holiday_rate] * 24)
|
||||||
elif frequency == CountStat.DAY:
|
elif frequency == CountStat.DAY:
|
||||||
length = days
|
length = days
|
||||||
seasonality = [8 * business_hours_base + 16 * non_business_hours_base] * 5 + [
|
seasonality = [8 * business_hours_base + 16 * non_business_hours_base] * 5 + [
|
||||||
24 * non_business_hours_base
|
24 * non_business_hours_base
|
||||||
] * 2
|
] * 2
|
||||||
holidays = [rng.random() < holiday_rate for i in range(days)]
|
holidays = [random() < holiday_rate for i in range(days)]
|
||||||
else:
|
else:
|
||||||
raise AssertionError(f"Unknown frequency: {frequency}")
|
raise AssertionError(f"Unknown frequency: {frequency}")
|
||||||
if length < 2:
|
if length < 2:
|
||||||
@@ -59,17 +58,20 @@ def generate_time_series_data(
|
|||||||
f"Must be generating at least 2 data points. Currently generating {length}"
|
f"Must be generating at least 2 data points. Currently generating {length}"
|
||||||
)
|
)
|
||||||
growth_base = growth ** (1.0 / (length - 1))
|
growth_base = growth ** (1.0 / (length - 1))
|
||||||
values_no_noise = [seasonality[i % len(seasonality)] * (growth_base**i) for i in range(length)]
|
values_no_noise = [
|
||||||
|
seasonality[i % len(seasonality)] * (growth_base**i) for i in range(length)
|
||||||
|
]
|
||||||
|
|
||||||
noise_scalars = [rng.gauss(0, 1)]
|
seed(random_seed)
|
||||||
|
noise_scalars = [gauss(0, 1)]
|
||||||
for i in range(1, length):
|
for i in range(1, length):
|
||||||
noise_scalars.append(
|
noise_scalars.append(
|
||||||
noise_scalars[-1] * autocorrelation + rng.gauss(0, 1) * (1 - autocorrelation)
|
noise_scalars[-1] * autocorrelation + gauss(0, 1) * (1 - autocorrelation)
|
||||||
)
|
)
|
||||||
|
|
||||||
values = [
|
values = [
|
||||||
0 if holiday else int(v + sqrt(v) * noise_scalar * spikiness)
|
0 if holiday else int(v + sqrt(v) * noise_scalar * spikiness)
|
||||||
for v, noise_scalar, holiday in zip(values_no_noise, noise_scalars, holidays, strict=False)
|
for v, noise_scalar, holiday in zip(values_no_noise, noise_scalars, holidays)
|
||||||
]
|
]
|
||||||
if partial_sum:
|
if partial_sum:
|
||||||
for i in range(1, length):
|
for i in range(1, length):
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
from analytics.lib.counts import CountStat
|
from analytics.lib.counts import CountStat
|
||||||
from zerver.lib.timestamp import floor_to_day, floor_to_hour, verify_UTC
|
from zerver.lib.timestamp import floor_to_day, floor_to_hour, verify_UTC
|
||||||
@@ -9,8 +10,8 @@ from zerver.lib.timestamp import floor_to_day, floor_to_hour, verify_UTC
|
|||||||
# So informally, time_range(Sep 20, Sep 22, day, None) returns [Sep 20, Sep 21, Sep 22],
|
# So informally, time_range(Sep 20, Sep 22, day, None) returns [Sep 20, Sep 21, Sep 22],
|
||||||
# and time_range(Sep 20, Sep 22, day, 5) returns [Sep 18, Sep 19, Sep 20, Sep 21, Sep 22]
|
# and time_range(Sep 20, Sep 22, day, 5) returns [Sep 18, Sep 19, Sep 20, Sep 21, Sep 22]
|
||||||
def time_range(
|
def time_range(
|
||||||
start: datetime, end: datetime, frequency: str, min_length: int | None
|
start: datetime, end: datetime, frequency: str, min_length: Optional[int]
|
||||||
) -> list[datetime]:
|
) -> List[datetime]:
|
||||||
verify_UTC(start)
|
verify_UTC(start)
|
||||||
verify_UTC(end)
|
verify_UTC(end)
|
||||||
if frequency == CountStat.HOUR:
|
if frequency == CountStat.HOUR:
|
||||||
@@ -29,5 +30,4 @@ def time_range(
|
|||||||
while current >= start:
|
while current >= start:
|
||||||
times.append(current)
|
times.append(current)
|
||||||
current -= step
|
current -= step
|
||||||
times.reverse()
|
return list(reversed(times))
|
||||||
return times
|
|
||||||
|
|||||||
@@ -1,15 +1,14 @@
|
|||||||
from dataclasses import dataclass
|
import os
|
||||||
|
import time
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
from typing import Any, Literal
|
from typing import Any, Dict
|
||||||
|
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
from django.utils.timezone import now as timezone_now
|
from django.utils.timezone import now as timezone_now
|
||||||
from typing_extensions import override
|
|
||||||
|
|
||||||
from analytics.lib.counts import ALL_COUNT_STATS, CountStat
|
from analytics.lib.counts import COUNT_STATS, CountStat
|
||||||
from analytics.models import installation_epoch
|
from analytics.models import installation_epoch
|
||||||
from scripts.lib.zulip_tools import atomic_nagios_write
|
from zerver.lib.timestamp import TimeZoneNotUTCException, floor_to_day, floor_to_hour, verify_UTC
|
||||||
from zerver.lib.management import ZulipBaseCommand
|
|
||||||
from zerver.lib.timestamp import TimeZoneNotUTCError, floor_to_day, floor_to_hour, verify_UTC
|
|
||||||
from zerver.models import Realm
|
from zerver.models import Realm
|
||||||
|
|
||||||
states = {
|
states = {
|
||||||
@@ -20,38 +19,37 @@ states = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
class Command(BaseCommand):
|
||||||
class NagiosResult:
|
|
||||||
status: Literal["ok", "warning", "critical", "unknown"]
|
|
||||||
message: str
|
|
||||||
|
|
||||||
|
|
||||||
class Command(ZulipBaseCommand):
|
|
||||||
help = """Checks FillState table.
|
help = """Checks FillState table.
|
||||||
|
|
||||||
Run as a cron job that runs every hour."""
|
Run as a cron job that runs every hour."""
|
||||||
|
|
||||||
@override
|
|
||||||
def handle(self, *args: Any, **options: Any) -> None:
|
def handle(self, *args: Any, **options: Any) -> None:
|
||||||
fill_state = self.get_fill_state()
|
fill_state = self.get_fill_state()
|
||||||
atomic_nagios_write("check-analytics-state", fill_state.status, fill_state.message)
|
status = fill_state["status"]
|
||||||
|
message = fill_state["message"]
|
||||||
|
|
||||||
def get_fill_state(self) -> NagiosResult:
|
state_file_path = "/var/lib/nagios_state/check-analytics-state"
|
||||||
|
state_file_tmp = state_file_path + "-tmp"
|
||||||
|
|
||||||
|
with open(state_file_tmp, "w") as f:
|
||||||
|
f.write(f"{int(time.time())}|{status}|{states[status]}|{message}\n")
|
||||||
|
os.rename(state_file_tmp, state_file_path)
|
||||||
|
|
||||||
|
def get_fill_state(self) -> Dict[str, Any]:
|
||||||
if not Realm.objects.exists():
|
if not Realm.objects.exists():
|
||||||
return NagiosResult(status="ok", message="No realms exist, so not checking FillState.")
|
return {"status": 0, "message": "No realms exist, so not checking FillState."}
|
||||||
|
|
||||||
warning_unfilled_properties = []
|
warning_unfilled_properties = []
|
||||||
critical_unfilled_properties = []
|
critical_unfilled_properties = []
|
||||||
for property, stat in ALL_COUNT_STATS.items():
|
for property, stat in COUNT_STATS.items():
|
||||||
last_fill = stat.last_successful_fill()
|
last_fill = stat.last_successful_fill()
|
||||||
if last_fill is None:
|
if last_fill is None:
|
||||||
last_fill = installation_epoch()
|
last_fill = installation_epoch()
|
||||||
try:
|
try:
|
||||||
verify_UTC(last_fill)
|
verify_UTC(last_fill)
|
||||||
except TimeZoneNotUTCError:
|
except TimeZoneNotUTCException:
|
||||||
return NagiosResult(
|
return {"status": 2, "message": f"FillState not in UTC for {property}"}
|
||||||
status="critical", message=f"FillState not in UTC for {property}"
|
|
||||||
)
|
|
||||||
|
|
||||||
if stat.frequency == CountStat.DAY:
|
if stat.frequency == CountStat.DAY:
|
||||||
floor_function = floor_to_day
|
floor_function = floor_to_day
|
||||||
@@ -63,10 +61,10 @@ class Command(ZulipBaseCommand):
|
|||||||
critical_threshold = timedelta(minutes=150)
|
critical_threshold = timedelta(minutes=150)
|
||||||
|
|
||||||
if floor_function(last_fill) != last_fill:
|
if floor_function(last_fill) != last_fill:
|
||||||
return NagiosResult(
|
return {
|
||||||
status="critical",
|
"status": 2,
|
||||||
message=f"FillState not on {stat.frequency} boundary for {property}",
|
"message": f"FillState not on {stat.frequency} boundary for {property}",
|
||||||
)
|
}
|
||||||
|
|
||||||
time_to_last_fill = timezone_now() - last_fill
|
time_to_last_fill = timezone_now() - last_fill
|
||||||
if time_to_last_fill > critical_threshold:
|
if time_to_last_fill > critical_threshold:
|
||||||
@@ -75,18 +73,18 @@ class Command(ZulipBaseCommand):
|
|||||||
warning_unfilled_properties.append(property)
|
warning_unfilled_properties.append(property)
|
||||||
|
|
||||||
if len(critical_unfilled_properties) == 0 and len(warning_unfilled_properties) == 0:
|
if len(critical_unfilled_properties) == 0 and len(warning_unfilled_properties) == 0:
|
||||||
return NagiosResult(status="ok", message="FillState looks fine.")
|
return {"status": 0, "message": "FillState looks fine."}
|
||||||
if len(critical_unfilled_properties) == 0:
|
if len(critical_unfilled_properties) == 0:
|
||||||
return NagiosResult(
|
return {
|
||||||
status="warning",
|
"status": 1,
|
||||||
message="Missed filling {} once.".format(
|
"message": "Missed filling {} once.".format(
|
||||||
", ".join(warning_unfilled_properties),
|
", ".join(warning_unfilled_properties),
|
||||||
),
|
),
|
||||||
)
|
}
|
||||||
return NagiosResult(
|
return {
|
||||||
status="critical",
|
"status": 2,
|
||||||
message="Missed filling {} once. Missed filling {} at least twice.".format(
|
"message": "Missed filling {} once. Missed filling {} at least twice.".format(
|
||||||
", ".join(warning_unfilled_properties),
|
", ".join(warning_unfilled_properties),
|
||||||
", ".join(critical_unfilled_properties),
|
", ".join(critical_unfilled_properties),
|
||||||
),
|
),
|
||||||
)
|
}
|
||||||
|
|||||||
@@ -1,21 +1,17 @@
|
|||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from django.core.management.base import CommandError
|
from django.core.management.base import BaseCommand, CommandError
|
||||||
from typing_extensions import override
|
|
||||||
|
|
||||||
from analytics.lib.counts import do_drop_all_analytics_tables
|
from analytics.lib.counts import do_drop_all_analytics_tables
|
||||||
from zerver.lib.management import ZulipBaseCommand
|
|
||||||
|
|
||||||
|
|
||||||
class Command(ZulipBaseCommand):
|
class Command(BaseCommand):
|
||||||
help = """Clear analytics tables."""
|
help = """Clear analytics tables."""
|
||||||
|
|
||||||
@override
|
|
||||||
def add_arguments(self, parser: ArgumentParser) -> None:
|
def add_arguments(self, parser: ArgumentParser) -> None:
|
||||||
parser.add_argument("--force", action="store_true", help="Clear analytics tables.")
|
parser.add_argument("--force", action="store_true", help="Clear analytics tables.")
|
||||||
|
|
||||||
@override
|
|
||||||
def handle(self, *args: Any, **options: Any) -> None:
|
def handle(self, *args: Any, **options: Any) -> None:
|
||||||
if options["force"]:
|
if options["force"]:
|
||||||
do_drop_all_analytics_tables()
|
do_drop_all_analytics_tables()
|
||||||
|
|||||||
@@ -1,25 +1,21 @@
|
|||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from django.core.management.base import CommandError
|
from django.core.management.base import BaseCommand, CommandError
|
||||||
from typing_extensions import override
|
|
||||||
|
|
||||||
from analytics.lib.counts import ALL_COUNT_STATS, do_drop_single_stat
|
from analytics.lib.counts import COUNT_STATS, do_drop_single_stat
|
||||||
from zerver.lib.management import ZulipBaseCommand
|
|
||||||
|
|
||||||
|
|
||||||
class Command(ZulipBaseCommand):
|
class Command(BaseCommand):
|
||||||
help = """Clear analytics tables."""
|
help = """Clear analytics tables."""
|
||||||
|
|
||||||
@override
|
|
||||||
def add_arguments(self, parser: ArgumentParser) -> None:
|
def add_arguments(self, parser: ArgumentParser) -> None:
|
||||||
parser.add_argument("--force", action="store_true", help="Actually do it.")
|
parser.add_argument("--force", action="store_true", help="Actually do it.")
|
||||||
parser.add_argument("--property", help="The property of the stat to be cleared.")
|
parser.add_argument("--property", help="The property of the stat to be cleared.")
|
||||||
|
|
||||||
@override
|
|
||||||
def handle(self, *args: Any, **options: Any) -> None:
|
def handle(self, *args: Any, **options: Any) -> None:
|
||||||
property = options["property"]
|
property = options["property"]
|
||||||
if property not in ALL_COUNT_STATS:
|
if property not in COUNT_STATS:
|
||||||
raise CommandError(f"Invalid property: {property}")
|
raise CommandError(f"Invalid property: {property}")
|
||||||
if not options["force"]:
|
if not options["force"]:
|
||||||
raise CommandError("No action taken. Use --force.")
|
raise CommandError("No action taken. Use --force.")
|
||||||
|
|||||||
@@ -1,10 +1,9 @@
|
|||||||
from collections.abc import Mapping
|
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
from typing import Any, TypeAlias
|
from typing import Any, Dict, List, Mapping, Type, Union
|
||||||
|
from unittest import mock
|
||||||
|
|
||||||
from django.core.files.uploadedfile import UploadedFile
|
from django.core.management.base import BaseCommand
|
||||||
from django.utils.timezone import now as timezone_now
|
from django.utils.timezone import now as timezone_now
|
||||||
from typing_extensions import override
|
|
||||||
|
|
||||||
from analytics.lib.counts import COUNT_STATS, CountStat, do_drop_all_analytics_tables
|
from analytics.lib.counts import COUNT_STATS, CountStat, do_drop_all_analytics_tables
|
||||||
from analytics.lib.fixtures import generate_time_series_data
|
from analytics.lib.fixtures import generate_time_series_data
|
||||||
@@ -20,25 +19,12 @@ from analytics.models import (
|
|||||||
from zerver.actions.create_realm import do_create_realm
|
from zerver.actions.create_realm import do_create_realm
|
||||||
from zerver.actions.users import do_change_user_role
|
from zerver.actions.users import do_change_user_role
|
||||||
from zerver.lib.create_user import create_user
|
from zerver.lib.create_user import create_user
|
||||||
from zerver.lib.management import ZulipBaseCommand
|
|
||||||
from zerver.lib.storage import static_path
|
|
||||||
from zerver.lib.stream_color import STREAM_ASSIGNMENT_COLORS
|
from zerver.lib.stream_color import STREAM_ASSIGNMENT_COLORS
|
||||||
from zerver.lib.timestamp import floor_to_day
|
from zerver.lib.timestamp import floor_to_day
|
||||||
from zerver.lib.upload import upload_message_attachment_from_request
|
from zerver.models import Client, Realm, Recipient, Stream, Subscription, UserProfile
|
||||||
from zerver.models import (
|
|
||||||
Client,
|
|
||||||
NamedUserGroup,
|
|
||||||
Realm,
|
|
||||||
RealmAuditLog,
|
|
||||||
Recipient,
|
|
||||||
Stream,
|
|
||||||
Subscription,
|
|
||||||
UserProfile,
|
|
||||||
)
|
|
||||||
from zerver.models.groups import SystemGroups
|
|
||||||
|
|
||||||
|
|
||||||
class Command(ZulipBaseCommand):
|
class Command(BaseCommand):
|
||||||
help = """Populates analytics tables with randomly generated data."""
|
help = """Populates analytics tables with randomly generated data."""
|
||||||
|
|
||||||
DAYS_OF_DATA = 100
|
DAYS_OF_DATA = 100
|
||||||
@@ -54,7 +40,7 @@ class Command(ZulipBaseCommand):
|
|||||||
spikiness: float,
|
spikiness: float,
|
||||||
holiday_rate: float = 0,
|
holiday_rate: float = 0,
|
||||||
partial_sum: bool = False,
|
partial_sum: bool = False,
|
||||||
) -> list[int]:
|
) -> List[int]:
|
||||||
self.random_seed += 1
|
self.random_seed += 1
|
||||||
return generate_time_series_data(
|
return generate_time_series_data(
|
||||||
days=self.DAYS_OF_DATA,
|
days=self.DAYS_OF_DATA,
|
||||||
@@ -69,7 +55,6 @@ class Command(ZulipBaseCommand):
|
|||||||
random_seed=self.random_seed,
|
random_seed=self.random_seed,
|
||||||
)
|
)
|
||||||
|
|
||||||
@override
|
|
||||||
def handle(self, *args: Any, **options: Any) -> None:
|
def handle(self, *args: Any, **options: Any) -> None:
|
||||||
# TODO: This should arguably only delete the objects
|
# TODO: This should arguably only delete the objects
|
||||||
# associated with the "analytics" realm.
|
# associated with the "analytics" realm.
|
||||||
@@ -94,72 +79,44 @@ class Command(ZulipBaseCommand):
|
|||||||
string_id="analytics", name="Analytics", date_created=installation_time
|
string_id="analytics", name="Analytics", date_created=installation_time
|
||||||
)
|
)
|
||||||
|
|
||||||
shylock = create_user(
|
with mock.patch("zerver.lib.create_user.timezone_now", return_value=installation_time):
|
||||||
"shylock@analytics.ds",
|
shylock = create_user(
|
||||||
"Shylock",
|
"shylock@analytics.ds",
|
||||||
realm,
|
"Shylock",
|
||||||
full_name="Shylock",
|
realm,
|
||||||
role=UserProfile.ROLE_REALM_OWNER,
|
full_name="Shylock",
|
||||||
force_date_joined=installation_time,
|
role=UserProfile.ROLE_REALM_OWNER,
|
||||||
)
|
)
|
||||||
do_change_user_role(shylock, UserProfile.ROLE_REALM_OWNER, acting_user=None)
|
do_change_user_role(shylock, UserProfile.ROLE_REALM_OWNER, acting_user=None)
|
||||||
|
stream = Stream.objects.create(name="all", realm=realm, date_created=installation_time)
|
||||||
# Create guest user for set_guest_users_statistic.
|
|
||||||
create_user(
|
|
||||||
"bassanio@analytics.ds",
|
|
||||||
"Bassanio",
|
|
||||||
realm,
|
|
||||||
full_name="Bassanio",
|
|
||||||
role=UserProfile.ROLE_GUEST,
|
|
||||||
force_date_joined=installation_time,
|
|
||||||
)
|
|
||||||
|
|
||||||
administrators_user_group = NamedUserGroup.objects.get(
|
|
||||||
name=SystemGroups.ADMINISTRATORS, realm=realm, is_system_group=True
|
|
||||||
)
|
|
||||||
stream = Stream.objects.create(
|
|
||||||
name="all",
|
|
||||||
realm=realm,
|
|
||||||
date_created=installation_time,
|
|
||||||
can_remove_subscribers_group=administrators_user_group,
|
|
||||||
)
|
|
||||||
recipient = Recipient.objects.create(type_id=stream.id, type=Recipient.STREAM)
|
recipient = Recipient.objects.create(type_id=stream.id, type=Recipient.STREAM)
|
||||||
stream.recipient = recipient
|
stream.recipient = recipient
|
||||||
stream.save(update_fields=["recipient"])
|
stream.save(update_fields=["recipient"])
|
||||||
|
|
||||||
# Subscribe shylock to the stream to avoid invariant failures.
|
# Subscribe shylock to the stream to avoid invariant failures.
|
||||||
Subscription.objects.create(
|
# TODO: This should use subscribe_users_to_streams from populate_db.
|
||||||
recipient=recipient,
|
subs = [
|
||||||
user_profile=shylock,
|
Subscription(
|
||||||
is_user_active=shylock.is_active,
|
recipient=recipient,
|
||||||
color=STREAM_ASSIGNMENT_COLORS[0],
|
user_profile=shylock,
|
||||||
)
|
is_user_active=shylock.is_active,
|
||||||
RealmAuditLog.objects.create(
|
color=STREAM_ASSIGNMENT_COLORS[0],
|
||||||
realm=realm,
|
),
|
||||||
modified_user=shylock,
|
]
|
||||||
modified_stream=stream,
|
Subscription.objects.bulk_create(subs)
|
||||||
event_last_message_id=0,
|
|
||||||
event_type=RealmAuditLog.SUBSCRIPTION_CREATED,
|
|
||||||
event_time=installation_time,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create an attachment in the database for set_storage_space_used_statistic.
|
FixtureData = Mapping[Union[str, int, None], List[int]]
|
||||||
IMAGE_FILE_PATH = static_path("images/test-images/checkbox.png")
|
|
||||||
with open(IMAGE_FILE_PATH, "rb") as fp:
|
|
||||||
upload_message_attachment_from_request(UploadedFile(fp), shylock)
|
|
||||||
|
|
||||||
FixtureData: TypeAlias = Mapping[str | int | None, list[int]]
|
|
||||||
|
|
||||||
def insert_fixture_data(
|
def insert_fixture_data(
|
||||||
stat: CountStat,
|
stat: CountStat,
|
||||||
fixture_data: FixtureData,
|
fixture_data: FixtureData,
|
||||||
table: type[BaseCount],
|
table: Type[BaseCount],
|
||||||
) -> None:
|
) -> None:
|
||||||
end_times = time_range(
|
end_times = time_range(
|
||||||
last_end_time, last_end_time, stat.frequency, len(next(iter(fixture_data.values())))
|
last_end_time, last_end_time, stat.frequency, len(list(fixture_data.values())[0])
|
||||||
)
|
)
|
||||||
if table == InstallationCount:
|
if table == InstallationCount:
|
||||||
id_args: dict[str, Any] = {}
|
id_args: Dict[str, Any] = {}
|
||||||
if table == RealmCount:
|
if table == RealmCount:
|
||||||
id_args = {"realm": realm}
|
id_args = {"realm": realm}
|
||||||
if table == UserCount:
|
if table == UserCount:
|
||||||
@@ -168,7 +125,7 @@ class Command(ZulipBaseCommand):
|
|||||||
id_args = {"stream": stream, "realm": realm}
|
id_args = {"stream": stream, "realm": realm}
|
||||||
|
|
||||||
for subgroup, values in fixture_data.items():
|
for subgroup, values in fixture_data.items():
|
||||||
table._default_manager.bulk_create(
|
table.objects.bulk_create(
|
||||||
table(
|
table(
|
||||||
property=stat.property,
|
property=stat.property,
|
||||||
subgroup=subgroup,
|
subgroup=subgroup,
|
||||||
@@ -176,7 +133,7 @@ class Command(ZulipBaseCommand):
|
|||||||
value=value,
|
value=value,
|
||||||
**id_args,
|
**id_args,
|
||||||
)
|
)
|
||||||
for end_time, value in zip(end_times, values, strict=False)
|
for end_time, value in zip(end_times, values)
|
||||||
if value != 0
|
if value != 0
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -283,7 +240,6 @@ class Command(ZulipBaseCommand):
|
|||||||
android, created = Client.objects.get_or_create(name="ZulipAndroid")
|
android, created = Client.objects.get_or_create(name="ZulipAndroid")
|
||||||
iOS, created = Client.objects.get_or_create(name="ZulipiOS")
|
iOS, created = Client.objects.get_or_create(name="ZulipiOS")
|
||||||
react_native, created = Client.objects.get_or_create(name="ZulipMobile")
|
react_native, created = Client.objects.get_or_create(name="ZulipMobile")
|
||||||
flutter, created = Client.objects.get_or_create(name="ZulipFlutter")
|
|
||||||
API, created = Client.objects.get_or_create(name="API: Python")
|
API, created = Client.objects.get_or_create(name="API: Python")
|
||||||
zephyr_mirror, created = Client.objects.get_or_create(name="zephyr_mirror")
|
zephyr_mirror, created = Client.objects.get_or_create(name="zephyr_mirror")
|
||||||
unused, created = Client.objects.get_or_create(name="unused")
|
unused, created = Client.objects.get_or_create(name="unused")
|
||||||
@@ -301,7 +257,6 @@ class Command(ZulipBaseCommand):
|
|||||||
android.id: self.generate_fixture_data(stat, 5, 5, 2, 0.6, 3),
|
android.id: self.generate_fixture_data(stat, 5, 5, 2, 0.6, 3),
|
||||||
iOS.id: self.generate_fixture_data(stat, 5, 5, 2, 0.6, 3),
|
iOS.id: self.generate_fixture_data(stat, 5, 5, 2, 0.6, 3),
|
||||||
react_native.id: self.generate_fixture_data(stat, 5, 5, 10, 0.6, 3),
|
react_native.id: self.generate_fixture_data(stat, 5, 5, 10, 0.6, 3),
|
||||||
flutter.id: self.generate_fixture_data(stat, 5, 5, 10, 0.6, 3),
|
|
||||||
API.id: self.generate_fixture_data(stat, 5, 5, 5, 0.6, 3),
|
API.id: self.generate_fixture_data(stat, 5, 5, 5, 0.6, 3),
|
||||||
zephyr_mirror.id: self.generate_fixture_data(stat, 1, 1, 3, 0.6, 3),
|
zephyr_mirror.id: self.generate_fixture_data(stat, 1, 1, 3, 0.6, 3),
|
||||||
unused.id: self.generate_fixture_data(stat, 0, 0, 0, 0, 0),
|
unused.id: self.generate_fixture_data(stat, 0, 0, 0, 0, 0),
|
||||||
@@ -313,7 +268,6 @@ class Command(ZulipBaseCommand):
|
|||||||
old_desktop.id: self.generate_fixture_data(stat, 50, 30, 8, 0.6, 3),
|
old_desktop.id: self.generate_fixture_data(stat, 50, 30, 8, 0.6, 3),
|
||||||
android.id: self.generate_fixture_data(stat, 50, 50, 2, 0.6, 3),
|
android.id: self.generate_fixture_data(stat, 50, 50, 2, 0.6, 3),
|
||||||
iOS.id: self.generate_fixture_data(stat, 50, 50, 2, 0.6, 3),
|
iOS.id: self.generate_fixture_data(stat, 50, 50, 2, 0.6, 3),
|
||||||
flutter.id: self.generate_fixture_data(stat, 5, 5, 10, 0.6, 3),
|
|
||||||
react_native.id: self.generate_fixture_data(stat, 5, 5, 10, 0.6, 3),
|
react_native.id: self.generate_fixture_data(stat, 5, 5, 10, 0.6, 3),
|
||||||
API.id: self.generate_fixture_data(stat, 50, 50, 5, 0.6, 3),
|
API.id: self.generate_fixture_data(stat, 50, 50, 5, 0.6, 3),
|
||||||
zephyr_mirror.id: self.generate_fixture_data(stat, 10, 10, 3, 0.6, 3),
|
zephyr_mirror.id: self.generate_fixture_data(stat, 10, 10, 3, 0.6, 3),
|
||||||
@@ -331,7 +285,7 @@ class Command(ZulipBaseCommand):
|
|||||||
"true": self.generate_fixture_data(stat, 20, 2, 3, 0.2, 3),
|
"true": self.generate_fixture_data(stat, 20, 2, 3, 0.2, 3),
|
||||||
}
|
}
|
||||||
insert_fixture_data(stat, realm_data, RealmCount)
|
insert_fixture_data(stat, realm_data, RealmCount)
|
||||||
stream_data: Mapping[int | str | None, list[int]] = {
|
stream_data: Mapping[Union[int, str, None], List[int]] = {
|
||||||
"false": self.generate_fixture_data(stat, 10, 7, 5, 0.6, 4),
|
"false": self.generate_fixture_data(stat, 10, 7, 5, 0.6, 4),
|
||||||
"true": self.generate_fixture_data(stat, 5, 3, 2, 0.4, 2),
|
"true": self.generate_fixture_data(stat, 5, 3, 2, 0.4, 2),
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,27 +1,26 @@
|
|||||||
import hashlib
|
import os
|
||||||
import time
|
import time
|
||||||
from argparse import ArgumentParser
|
from argparse import ArgumentParser
|
||||||
from datetime import timezone
|
from datetime import timezone
|
||||||
from typing import Any
|
from typing import Any, Dict
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
from django.utils.dateparse import parse_datetime
|
from django.utils.dateparse import parse_datetime
|
||||||
from django.utils.timezone import now as timezone_now
|
from django.utils.timezone import now as timezone_now
|
||||||
from typing_extensions import override
|
|
||||||
|
|
||||||
from analytics.lib.counts import ALL_COUNT_STATS, logger, process_count_stat
|
from analytics.lib.counts import COUNT_STATS, logger, process_count_stat
|
||||||
from zerver.lib.management import ZulipBaseCommand, abort_unless_locked
|
from scripts.lib.zulip_tools import ENDC, WARNING
|
||||||
from zerver.lib.remote_server import send_server_data_to_push_bouncer, should_send_analytics_data
|
from zerver.lib.remote_server import send_analytics_to_remote_server
|
||||||
from zerver.lib.timestamp import floor_to_hour
|
from zerver.lib.timestamp import floor_to_hour
|
||||||
from zerver.models import Realm
|
from zerver.models import Realm
|
||||||
|
|
||||||
|
|
||||||
class Command(ZulipBaseCommand):
|
class Command(BaseCommand):
|
||||||
help = """Fills Analytics tables.
|
help = """Fills Analytics tables.
|
||||||
|
|
||||||
Run as a cron job that runs every hour."""
|
Run as a cron job that runs every hour."""
|
||||||
|
|
||||||
@override
|
|
||||||
def add_arguments(self, parser: ArgumentParser) -> None:
|
def add_arguments(self, parser: ArgumentParser) -> None:
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--time",
|
"--time",
|
||||||
@@ -38,12 +37,22 @@ class Command(ZulipBaseCommand):
|
|||||||
"--verbose", action="store_true", help="Print timing information to stdout."
|
"--verbose", action="store_true", help="Print timing information to stdout."
|
||||||
)
|
)
|
||||||
|
|
||||||
@override
|
|
||||||
@abort_unless_locked
|
|
||||||
def handle(self, *args: Any, **options: Any) -> None:
|
def handle(self, *args: Any, **options: Any) -> None:
|
||||||
self.run_update_analytics_counts(options)
|
try:
|
||||||
|
os.mkdir(settings.ANALYTICS_LOCK_DIR)
|
||||||
|
except OSError:
|
||||||
|
print(
|
||||||
|
f"{WARNING}Analytics lock {settings.ANALYTICS_LOCK_DIR} is unavailable;"
|
||||||
|
f" exiting.{ENDC}"
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
def run_update_analytics_counts(self, options: dict[str, Any]) -> None:
|
try:
|
||||||
|
self.run_update_analytics_counts(options)
|
||||||
|
finally:
|
||||||
|
os.rmdir(settings.ANALYTICS_LOCK_DIR)
|
||||||
|
|
||||||
|
def run_update_analytics_counts(self, options: Dict[str, Any]) -> None:
|
||||||
# installation_epoch relies on there being at least one realm; we
|
# installation_epoch relies on there being at least one realm; we
|
||||||
# shouldn't run the analytics code if that condition isn't satisfied
|
# shouldn't run the analytics code if that condition isn't satisfied
|
||||||
if not Realm.objects.exists():
|
if not Realm.objects.exists():
|
||||||
@@ -62,9 +71,9 @@ class Command(ZulipBaseCommand):
|
|||||||
fill_to_time = floor_to_hour(fill_to_time.astimezone(timezone.utc))
|
fill_to_time = floor_to_hour(fill_to_time.astimezone(timezone.utc))
|
||||||
|
|
||||||
if options["stat"] is not None:
|
if options["stat"] is not None:
|
||||||
stats = [ALL_COUNT_STATS[options["stat"]]]
|
stats = [COUNT_STATS[options["stat"]]]
|
||||||
else:
|
else:
|
||||||
stats = list(ALL_COUNT_STATS.values())
|
stats = list(COUNT_STATS.values())
|
||||||
|
|
||||||
logger.info("Starting updating analytics counts through %s", fill_to_time)
|
logger.info("Starting updating analytics counts through %s", fill_to_time)
|
||||||
if options["verbose"]:
|
if options["verbose"]:
|
||||||
@@ -83,17 +92,5 @@ class Command(ZulipBaseCommand):
|
|||||||
)
|
)
|
||||||
logger.info("Finished updating analytics counts through %s", fill_to_time)
|
logger.info("Finished updating analytics counts through %s", fill_to_time)
|
||||||
|
|
||||||
if should_send_analytics_data():
|
if settings.PUSH_NOTIFICATION_BOUNCER_URL and settings.SUBMIT_USAGE_STATISTICS:
|
||||||
# Based on the specific value of the setting, the exact details to send
|
send_analytics_to_remote_server()
|
||||||
# will be decided. However, we proceed just based on this not being falsey.
|
|
||||||
|
|
||||||
# Skew 0-10 minutes based on a hash of settings.ZULIP_ORG_ID, so
|
|
||||||
# that each server will report in at a somewhat consistent time.
|
|
||||||
assert settings.ZULIP_ORG_ID
|
|
||||||
delay = int.from_bytes(
|
|
||||||
hashlib.sha256(settings.ZULIP_ORG_ID.encode()).digest(), byteorder="big"
|
|
||||||
) % (60 * 10)
|
|
||||||
logger.info("Sleeping %d seconds before reporting...", delay)
|
|
||||||
time.sleep(delay)
|
|
||||||
|
|
||||||
send_server_data_to_push_bouncer(consider_usage_statistics=True)
|
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ from django.db import migrations, models
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("zerver", "0030_realm_org_type"),
|
("zerver", "0030_realm_org_type"),
|
||||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ from django.db import migrations
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("analytics", "0001_initial"),
|
("analytics", "0001_initial"),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ from django.db import migrations, models
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("analytics", "0002_remove_huddlecount"),
|
("analytics", "0002_remove_huddlecount"),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ from django.db import migrations, models
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("analytics", "0003_fillstate"),
|
("analytics", "0003_fillstate"),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ from django.db import migrations, models
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("analytics", "0004_add_subgroup"),
|
("analytics", "0004_add_subgroup"),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ from django.db import migrations
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("analytics", "0005_alter_field_size"),
|
("analytics", "0005_alter_field_size"),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ from django.db import migrations
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("analytics", "0006_add_subgroup_to_unique_constraints"),
|
("analytics", "0006_add_subgroup_to_unique_constraints"),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,33 +1,25 @@
|
|||||||
# Generated by Django 1.10.5 on 2017-02-01 22:28
|
# Generated by Django 1.10.5 on 2017-02-01 22:28
|
||||||
from django.db import migrations, models
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("zerver", "0050_userprofile_avatar_version"),
|
("zerver", "0050_userprofile_avatar_version"),
|
||||||
("analytics", "0007_remove_interval"),
|
("analytics", "0007_remove_interval"),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.AddIndex(
|
migrations.AlterIndexTogether(
|
||||||
model_name="realmcount",
|
name="realmcount",
|
||||||
index=models.Index(
|
index_together={("property", "end_time")},
|
||||||
fields=["property", "end_time"],
|
|
||||||
name="analytics_realmcount_property_end_time_3b60396b_idx",
|
|
||||||
),
|
|
||||||
),
|
),
|
||||||
migrations.AddIndex(
|
migrations.AlterIndexTogether(
|
||||||
model_name="streamcount",
|
name="streamcount",
|
||||||
index=models.Index(
|
index_together={("property", "realm", "end_time")},
|
||||||
fields=["property", "realm", "end_time"],
|
|
||||||
name="analytics_streamcount_property_realm_id_end_time_155ae930_idx",
|
|
||||||
),
|
|
||||||
),
|
),
|
||||||
migrations.AddIndex(
|
migrations.AlterIndexTogether(
|
||||||
model_name="usercount",
|
name="usercount",
|
||||||
index=models.Index(
|
index_together={("property", "realm", "end_time")},
|
||||||
fields=["property", "realm", "end_time"],
|
|
||||||
name="analytics_usercount_property_realm_id_end_time_591dbec1_idx",
|
|
||||||
),
|
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,10 +1,10 @@
|
|||||||
from django.db import migrations
|
from django.db import migrations
|
||||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
from django.db.backends.postgresql.schema import DatabaseSchemaEditor
|
||||||
from django.db.migrations.state import StateApps
|
from django.db.migrations.state import StateApps
|
||||||
|
|
||||||
|
|
||||||
def delete_messages_sent_to_stream_stat(
|
def delete_messages_sent_to_stream_stat(
|
||||||
apps: StateApps, schema_editor: BaseDatabaseSchemaEditor
|
apps: StateApps, schema_editor: DatabaseSchemaEditor
|
||||||
) -> None:
|
) -> None:
|
||||||
UserCount = apps.get_model("analytics", "UserCount")
|
UserCount = apps.get_model("analytics", "UserCount")
|
||||||
StreamCount = apps.get_model("analytics", "StreamCount")
|
StreamCount = apps.get_model("analytics", "StreamCount")
|
||||||
@@ -21,6 +21,7 @@ def delete_messages_sent_to_stream_stat(
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("analytics", "0008_add_count_indexes"),
|
("analytics", "0008_add_count_indexes"),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,10 +1,10 @@
|
|||||||
from django.db import migrations
|
from django.db import migrations
|
||||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
from django.db.backends.postgresql.schema import DatabaseSchemaEditor
|
||||||
from django.db.migrations.state import StateApps
|
from django.db.migrations.state import StateApps
|
||||||
|
|
||||||
|
|
||||||
def clear_message_sent_by_message_type_values(
|
def clear_message_sent_by_message_type_values(
|
||||||
apps: StateApps, schema_editor: BaseDatabaseSchemaEditor
|
apps: StateApps, schema_editor: DatabaseSchemaEditor
|
||||||
) -> None:
|
) -> None:
|
||||||
UserCount = apps.get_model("analytics", "UserCount")
|
UserCount = apps.get_model("analytics", "UserCount")
|
||||||
StreamCount = apps.get_model("analytics", "StreamCount")
|
StreamCount = apps.get_model("analytics", "StreamCount")
|
||||||
@@ -21,6 +21,7 @@ def clear_message_sent_by_message_type_values(
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [("analytics", "0009_remove_messages_to_stream_stat")]
|
dependencies = [("analytics", "0009_remove_messages_to_stream_stat")]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
from django.db import migrations
|
from django.db import migrations
|
||||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
from django.db.backends.postgresql.schema import DatabaseSchemaEditor
|
||||||
from django.db.migrations.state import StateApps
|
from django.db.migrations.state import StateApps
|
||||||
|
|
||||||
|
|
||||||
def clear_analytics_tables(apps: StateApps, schema_editor: BaseDatabaseSchemaEditor) -> None:
|
def clear_analytics_tables(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
|
||||||
UserCount = apps.get_model("analytics", "UserCount")
|
UserCount = apps.get_model("analytics", "UserCount")
|
||||||
StreamCount = apps.get_model("analytics", "StreamCount")
|
StreamCount = apps.get_model("analytics", "StreamCount")
|
||||||
RealmCount = apps.get_model("analytics", "RealmCount")
|
RealmCount = apps.get_model("analytics", "RealmCount")
|
||||||
@@ -18,6 +18,7 @@ def clear_analytics_tables(apps: StateApps, schema_editor: BaseDatabaseSchemaEdi
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("analytics", "0010_clear_messages_sent_values"),
|
("analytics", "0010_clear_messages_sent_values"),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ from django.db import migrations, models
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("analytics", "0011_clear_analytics_tables"),
|
("analytics", "0011_clear_analytics_tables"),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ from django.db import migrations
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("analytics", "0012_add_on_delete"),
|
("analytics", "0012_add_on_delete"),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ from django.db import migrations
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("analytics", "0013_remove_anomaly"),
|
("analytics", "0013_remove_anomaly"),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,10 +1,10 @@
|
|||||||
from django.db import migrations
|
from django.db import migrations
|
||||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
from django.db.backends.postgresql.schema import DatabaseSchemaEditor
|
||||||
from django.db.migrations.state import StateApps
|
from django.db.migrations.state import StateApps
|
||||||
from django.db.models import Count, Sum
|
from django.db.models import Count, Sum
|
||||||
|
|
||||||
|
|
||||||
def clear_duplicate_counts(apps: StateApps, schema_editor: BaseDatabaseSchemaEditor) -> None:
|
def clear_duplicate_counts(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
|
||||||
"""This is a preparatory migration for our Analytics tables.
|
"""This is a preparatory migration for our Analytics tables.
|
||||||
|
|
||||||
The backstory is that Django's unique_together indexes do not properly
|
The backstory is that Django's unique_together indexes do not properly
|
||||||
@@ -55,6 +55,7 @@ def clear_duplicate_counts(apps: StateApps, schema_editor: BaseDatabaseSchemaEdi
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("analytics", "0014_remove_fillstate_last_modified"),
|
("analytics", "0014_remove_fillstate_last_modified"),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ from django.db import migrations, models
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("analytics", "0015_clear_duplicate_counts"),
|
("analytics", "0015_clear_duplicate_counts"),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,114 +0,0 @@
|
|||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
dependencies = [
|
|
||||||
("analytics", "0016_unique_constraint_when_subgroup_null"),
|
|
||||||
]
|
|
||||||
|
|
||||||
# If the server was installed between 7.0 and 7.4 (or main between
|
|
||||||
# 2c20028aa451 and 7807bff52635), it contains indexes which (when
|
|
||||||
# running 7.5 or 7807bff52635 or higher) are never used, because
|
|
||||||
# they contain an improper cast
|
|
||||||
# (https://code.djangoproject.com/ticket/34840).
|
|
||||||
#
|
|
||||||
# We regenerate the indexes here, by dropping and re-creating
|
|
||||||
# them, so that we know that they are properly formed.
|
|
||||||
operations = [
|
|
||||||
migrations.RemoveConstraint(
|
|
||||||
model_name="installationcount",
|
|
||||||
name="unique_installation_count",
|
|
||||||
),
|
|
||||||
migrations.AddConstraint(
|
|
||||||
model_name="installationcount",
|
|
||||||
constraint=models.UniqueConstraint(
|
|
||||||
condition=models.Q(subgroup__isnull=False),
|
|
||||||
fields=("property", "subgroup", "end_time"),
|
|
||||||
name="unique_installation_count",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
migrations.RemoveConstraint(
|
|
||||||
model_name="installationcount",
|
|
||||||
name="unique_installation_count_null_subgroup",
|
|
||||||
),
|
|
||||||
migrations.AddConstraint(
|
|
||||||
model_name="installationcount",
|
|
||||||
constraint=models.UniqueConstraint(
|
|
||||||
condition=models.Q(subgroup__isnull=True),
|
|
||||||
fields=("property", "end_time"),
|
|
||||||
name="unique_installation_count_null_subgroup",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
migrations.RemoveConstraint(
|
|
||||||
model_name="realmcount",
|
|
||||||
name="unique_realm_count",
|
|
||||||
),
|
|
||||||
migrations.AddConstraint(
|
|
||||||
model_name="realmcount",
|
|
||||||
constraint=models.UniqueConstraint(
|
|
||||||
condition=models.Q(subgroup__isnull=False),
|
|
||||||
fields=("realm", "property", "subgroup", "end_time"),
|
|
||||||
name="unique_realm_count",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
migrations.RemoveConstraint(
|
|
||||||
model_name="realmcount",
|
|
||||||
name="unique_realm_count_null_subgroup",
|
|
||||||
),
|
|
||||||
migrations.AddConstraint(
|
|
||||||
model_name="realmcount",
|
|
||||||
constraint=models.UniqueConstraint(
|
|
||||||
condition=models.Q(subgroup__isnull=True),
|
|
||||||
fields=("realm", "property", "end_time"),
|
|
||||||
name="unique_realm_count_null_subgroup",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
migrations.RemoveConstraint(
|
|
||||||
model_name="streamcount",
|
|
||||||
name="unique_stream_count",
|
|
||||||
),
|
|
||||||
migrations.AddConstraint(
|
|
||||||
model_name="streamcount",
|
|
||||||
constraint=models.UniqueConstraint(
|
|
||||||
condition=models.Q(subgroup__isnull=False),
|
|
||||||
fields=("stream", "property", "subgroup", "end_time"),
|
|
||||||
name="unique_stream_count",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
migrations.RemoveConstraint(
|
|
||||||
model_name="streamcount",
|
|
||||||
name="unique_stream_count_null_subgroup",
|
|
||||||
),
|
|
||||||
migrations.AddConstraint(
|
|
||||||
model_name="streamcount",
|
|
||||||
constraint=models.UniqueConstraint(
|
|
||||||
condition=models.Q(subgroup__isnull=True),
|
|
||||||
fields=("stream", "property", "end_time"),
|
|
||||||
name="unique_stream_count_null_subgroup",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
migrations.RemoveConstraint(
|
|
||||||
model_name="usercount",
|
|
||||||
name="unique_user_count",
|
|
||||||
),
|
|
||||||
migrations.AddConstraint(
|
|
||||||
model_name="usercount",
|
|
||||||
constraint=models.UniqueConstraint(
|
|
||||||
condition=models.Q(subgroup__isnull=False),
|
|
||||||
fields=("user", "property", "subgroup", "end_time"),
|
|
||||||
name="unique_user_count",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
migrations.RemoveConstraint(
|
|
||||||
model_name="usercount",
|
|
||||||
name="unique_user_count_null_subgroup",
|
|
||||||
),
|
|
||||||
migrations.AddConstraint(
|
|
||||||
model_name="usercount",
|
|
||||||
constraint=models.UniqueConstraint(
|
|
||||||
condition=models.Q(subgroup__isnull=True),
|
|
||||||
fields=("user", "property", "end_time"),
|
|
||||||
name="unique_user_count_null_subgroup",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
from django.db import migrations
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
elidable = True
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
("analytics", "0017_regenerate_partial_indexes"),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.RunSQL(
|
|
||||||
"DELETE FROM analytics_usercount WHERE property = 'active_users_audit:is_bot:day'"
|
|
||||||
)
|
|
||||||
]
|
|
||||||
@@ -1,26 +0,0 @@
|
|||||||
from django.db import migrations
|
|
||||||
|
|
||||||
REMOVED_COUNTS = (
|
|
||||||
"active_users_log:is_bot:day",
|
|
||||||
"active_users:is_bot:day",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
elidable = True
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
("analytics", "0018_remove_usercount_active_users_audit"),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.RunSQL(
|
|
||||||
[
|
|
||||||
("DELETE FROM analytics_realmcount WHERE property IN %s", (REMOVED_COUNTS,)),
|
|
||||||
(
|
|
||||||
"DELETE FROM analytics_installationcount WHERE property IN %s",
|
|
||||||
(REMOVED_COUNTS,),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
)
|
|
||||||
]
|
|
||||||
@@ -1,40 +0,0 @@
|
|||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
atomic = False
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
("analytics", "0019_remove_unused_counts"),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name="installationcount",
|
|
||||||
name="id",
|
|
||||||
field=models.BigAutoField(
|
|
||||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
|
||||||
),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name="realmcount",
|
|
||||||
name="id",
|
|
||||||
field=models.BigAutoField(
|
|
||||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
|
||||||
),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name="streamcount",
|
|
||||||
name="id",
|
|
||||||
field=models.BigAutoField(
|
|
||||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
|
||||||
),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name="usercount",
|
|
||||||
name="id",
|
|
||||||
field=models.BigAutoField(
|
|
||||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
|
||||||
),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@@ -1,17 +0,0 @@
|
|||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
dependencies = [
|
|
||||||
("analytics", "0020_alter_installationcount_id_alter_realmcount_id_and_more"),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name="fillstate",
|
|
||||||
name="id",
|
|
||||||
field=models.BigAutoField(
|
|
||||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
|
||||||
),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@@ -1,33 +1,29 @@
|
|||||||
# https://github.com/typeddjango/django-stubs/issues/1698
|
import datetime
|
||||||
# mypy: disable-error-code="explicit-override"
|
from typing import Optional
|
||||||
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.db.models import Q, UniqueConstraint
|
from django.db.models import Q, UniqueConstraint
|
||||||
from typing_extensions import override
|
|
||||||
|
|
||||||
from zerver.lib.timestamp import floor_to_day
|
from zerver.lib.timestamp import floor_to_day
|
||||||
from zerver.models import Realm, Stream, UserProfile
|
from zerver.models import Realm, Stream, UserProfile
|
||||||
|
|
||||||
|
|
||||||
class FillState(models.Model):
|
class FillState(models.Model):
|
||||||
property = models.CharField(max_length=40, unique=True)
|
property: str = models.CharField(max_length=40, unique=True)
|
||||||
end_time = models.DateTimeField()
|
end_time: datetime.datetime = models.DateTimeField()
|
||||||
|
|
||||||
# Valid states are {DONE, STARTED}
|
# Valid states are {DONE, STARTED}
|
||||||
DONE = 1
|
DONE = 1
|
||||||
STARTED = 2
|
STARTED = 2
|
||||||
state = models.PositiveSmallIntegerField()
|
state: int = models.PositiveSmallIntegerField()
|
||||||
|
|
||||||
@override
|
|
||||||
def __str__(self) -> str:
|
def __str__(self) -> str:
|
||||||
return f"{self.property} {self.end_time} {self.state}"
|
return f"<FillState: {self.property} {self.end_time} {self.state}>"
|
||||||
|
|
||||||
|
|
||||||
# The earliest/starting end_time in FillState
|
# The earliest/starting end_time in FillState
|
||||||
# We assume there is at least one realm
|
# We assume there is at least one realm
|
||||||
def installation_epoch() -> datetime:
|
def installation_epoch() -> datetime.datetime:
|
||||||
earliest_realm_creation = Realm.objects.aggregate(models.Min("date_created"))[
|
earliest_realm_creation = Realm.objects.aggregate(models.Min("date_created"))[
|
||||||
"date_created__min"
|
"date_created__min"
|
||||||
]
|
]
|
||||||
@@ -38,10 +34,10 @@ class BaseCount(models.Model):
|
|||||||
# Note: When inheriting from BaseCount, you may want to rearrange
|
# Note: When inheriting from BaseCount, you may want to rearrange
|
||||||
# the order of the columns in the migration to make sure they
|
# the order of the columns in the migration to make sure they
|
||||||
# match how you'd like the table to be arranged.
|
# match how you'd like the table to be arranged.
|
||||||
property = models.CharField(max_length=32)
|
property: str = models.CharField(max_length=32)
|
||||||
subgroup = models.CharField(max_length=16, null=True)
|
subgroup: Optional[str] = models.CharField(max_length=16, null=True)
|
||||||
end_time = models.DateTimeField()
|
end_time: datetime.datetime = models.DateTimeField()
|
||||||
value = models.BigIntegerField()
|
value: int = models.BigIntegerField()
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
abstract = True
|
abstract = True
|
||||||
@@ -63,9 +59,8 @@ class InstallationCount(BaseCount):
|
|||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
@override
|
|
||||||
def __str__(self) -> str:
|
def __str__(self) -> str:
|
||||||
return f"{self.property} {self.subgroup} {self.value}"
|
return f"<InstallationCount: {self.property} {self.subgroup} {self.value}>"
|
||||||
|
|
||||||
|
|
||||||
class RealmCount(BaseCount):
|
class RealmCount(BaseCount):
|
||||||
@@ -85,16 +80,10 @@ class RealmCount(BaseCount):
|
|||||||
name="unique_realm_count_null_subgroup",
|
name="unique_realm_count_null_subgroup",
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
indexes = [
|
index_together = ["property", "end_time"]
|
||||||
models.Index(
|
|
||||||
fields=["property", "end_time"],
|
|
||||||
name="analytics_realmcount_property_end_time_3b60396b_idx",
|
|
||||||
)
|
|
||||||
]
|
|
||||||
|
|
||||||
@override
|
|
||||||
def __str__(self) -> str:
|
def __str__(self) -> str:
|
||||||
return f"{self.realm!r} {self.property} {self.subgroup} {self.value}"
|
return f"<RealmCount: {self.realm} {self.property} {self.subgroup} {self.value}>"
|
||||||
|
|
||||||
|
|
||||||
class UserCount(BaseCount):
|
class UserCount(BaseCount):
|
||||||
@@ -117,16 +106,10 @@ class UserCount(BaseCount):
|
|||||||
]
|
]
|
||||||
# This index dramatically improves the performance of
|
# This index dramatically improves the performance of
|
||||||
# aggregating from users to realms
|
# aggregating from users to realms
|
||||||
indexes = [
|
index_together = ["property", "realm", "end_time"]
|
||||||
models.Index(
|
|
||||||
fields=["property", "realm", "end_time"],
|
|
||||||
name="analytics_usercount_property_realm_id_end_time_591dbec1_idx",
|
|
||||||
)
|
|
||||||
]
|
|
||||||
|
|
||||||
@override
|
|
||||||
def __str__(self) -> str:
|
def __str__(self) -> str:
|
||||||
return f"{self.user!r} {self.property} {self.subgroup} {self.value}"
|
return f"<UserCount: {self.user} {self.property} {self.subgroup} {self.value}>"
|
||||||
|
|
||||||
|
|
||||||
class StreamCount(BaseCount):
|
class StreamCount(BaseCount):
|
||||||
@@ -149,13 +132,9 @@ class StreamCount(BaseCount):
|
|||||||
]
|
]
|
||||||
# This index dramatically improves the performance of
|
# This index dramatically improves the performance of
|
||||||
# aggregating from streams to realms
|
# aggregating from streams to realms
|
||||||
indexes = [
|
index_together = ["property", "realm", "end_time"]
|
||||||
models.Index(
|
|
||||||
fields=["property", "realm", "end_time"],
|
|
||||||
name="analytics_streamcount_property_realm_id_end_time_155ae930_idx",
|
|
||||||
)
|
|
||||||
]
|
|
||||||
|
|
||||||
@override
|
|
||||||
def __str__(self) -> str:
|
def __str__(self) -> str:
|
||||||
return f"{self.stream!r} {self.property} {self.subgroup} {self.value} {self.id}"
|
return (
|
||||||
|
f"<StreamCount: {self.stream} {self.property} {self.subgroup} {self.value} {self.id}>"
|
||||||
|
)
|
||||||
|
|||||||
55
analytics/tests/test_activity_views.py
Normal file
55
analytics/tests/test_activity_views.py
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
from unittest import mock
|
||||||
|
|
||||||
|
from django.utils.timezone import now as timezone_now
|
||||||
|
|
||||||
|
from zerver.lib.test_classes import ZulipTestCase
|
||||||
|
from zerver.lib.test_helpers import queries_captured
|
||||||
|
from zerver.models import Client, UserActivity, UserProfile, flush_per_request_caches
|
||||||
|
|
||||||
|
|
||||||
|
class ActivityTest(ZulipTestCase):
|
||||||
|
@mock.patch("stripe.Customer.list", return_value=[])
|
||||||
|
def test_activity(self, unused_mock: mock.Mock) -> None:
|
||||||
|
self.login("hamlet")
|
||||||
|
client, _ = Client.objects.get_or_create(name="website")
|
||||||
|
query = "/json/messages/flags"
|
||||||
|
last_visit = timezone_now()
|
||||||
|
count = 150
|
||||||
|
for activity_user_profile in UserProfile.objects.all():
|
||||||
|
UserActivity.objects.get_or_create(
|
||||||
|
user_profile=activity_user_profile,
|
||||||
|
client=client,
|
||||||
|
query=query,
|
||||||
|
count=count,
|
||||||
|
last_visit=last_visit,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Fails when not staff
|
||||||
|
result = self.client_get("/activity")
|
||||||
|
self.assertEqual(result.status_code, 302)
|
||||||
|
|
||||||
|
user_profile = self.example_user("hamlet")
|
||||||
|
user_profile.is_staff = True
|
||||||
|
user_profile.save(update_fields=["is_staff"])
|
||||||
|
|
||||||
|
flush_per_request_caches()
|
||||||
|
with queries_captured() as queries:
|
||||||
|
result = self.client_get("/activity")
|
||||||
|
self.assertEqual(result.status_code, 200)
|
||||||
|
|
||||||
|
self.assert_length(queries, 19)
|
||||||
|
|
||||||
|
flush_per_request_caches()
|
||||||
|
with queries_captured() as queries:
|
||||||
|
result = self.client_get("/realm_activity/zulip/")
|
||||||
|
self.assertEqual(result.status_code, 200)
|
||||||
|
|
||||||
|
self.assert_length(queries, 8)
|
||||||
|
|
||||||
|
iago = self.example_user("iago")
|
||||||
|
flush_per_request_caches()
|
||||||
|
with queries_captured() as queries:
|
||||||
|
result = self.client_get(f"/user_activity/{iago.id}/")
|
||||||
|
self.assertEqual(result.status_code, 200)
|
||||||
|
|
||||||
|
self.assert_length(queries, 5)
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1,16 +1,15 @@
|
|||||||
from datetime import datetime, timedelta, timezone
|
from datetime import datetime, timedelta, timezone
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
from django.utils.timezone import now as timezone_now
|
from django.utils.timezone import now as timezone_now
|
||||||
from typing_extensions import override
|
|
||||||
|
|
||||||
from analytics.lib.counts import COUNT_STATS, CountStat
|
from analytics.lib.counts import COUNT_STATS, CountStat
|
||||||
from analytics.lib.time_utils import time_range
|
from analytics.lib.time_utils import time_range
|
||||||
from analytics.models import FillState, RealmCount, StreamCount, UserCount
|
from analytics.models import FillState, RealmCount, UserCount
|
||||||
from analytics.views.stats import rewrite_client_arrays, sort_by_totals, sort_client_labels
|
from analytics.views.stats import rewrite_client_arrays, sort_by_totals, sort_client_labels
|
||||||
from zerver.lib.test_classes import ZulipTestCase
|
from zerver.lib.test_classes import ZulipTestCase
|
||||||
from zerver.lib.timestamp import ceiling_to_day, ceiling_to_hour, datetime_to_timestamp
|
from zerver.lib.timestamp import ceiling_to_day, ceiling_to_hour, datetime_to_timestamp
|
||||||
from zerver.models import Client
|
from zerver.models import Client, get_realm
|
||||||
from zerver.models.realms import get_realm
|
|
||||||
|
|
||||||
|
|
||||||
class TestStatsEndpoint(ZulipTestCase):
|
class TestStatsEndpoint(ZulipTestCase):
|
||||||
@@ -69,12 +68,10 @@ class TestStatsEndpoint(ZulipTestCase):
|
|||||||
|
|
||||||
|
|
||||||
class TestGetChartData(ZulipTestCase):
|
class TestGetChartData(ZulipTestCase):
|
||||||
@override
|
|
||||||
def setUp(self) -> None:
|
def setUp(self) -> None:
|
||||||
super().setUp()
|
super().setUp()
|
||||||
self.realm = get_realm("zulip")
|
self.realm = get_realm("zulip")
|
||||||
self.user = self.example_user("hamlet")
|
self.user = self.example_user("hamlet")
|
||||||
self.stream_id = self.get_stream_id(self.get_streams(self.user)[0])
|
|
||||||
self.login_user(self.user)
|
self.login_user(self.user)
|
||||||
self.end_times_hour = [
|
self.end_times_hour = [
|
||||||
ceiling_to_hour(self.realm.date_created) + timedelta(hours=i) for i in range(4)
|
ceiling_to_hour(self.realm.date_created) + timedelta(hours=i) for i in range(4)
|
||||||
@@ -83,11 +80,11 @@ class TestGetChartData(ZulipTestCase):
|
|||||||
ceiling_to_day(self.realm.date_created) + timedelta(days=i) for i in range(4)
|
ceiling_to_day(self.realm.date_created) + timedelta(days=i) for i in range(4)
|
||||||
]
|
]
|
||||||
|
|
||||||
def data(self, i: int) -> list[int]:
|
def data(self, i: int) -> List[int]:
|
||||||
return [0, 0, i, 0]
|
return [0, 0, i, 0]
|
||||||
|
|
||||||
def insert_data(
|
def insert_data(
|
||||||
self, stat: CountStat, realm_subgroups: list[str | None], user_subgroups: list[str]
|
self, stat: CountStat, realm_subgroups: List[Optional[str]], user_subgroups: List[str]
|
||||||
) -> None:
|
) -> None:
|
||||||
if stat.frequency == CountStat.HOUR:
|
if stat.frequency == CountStat.HOUR:
|
||||||
insert_time = self.end_times_hour[2]
|
insert_time = self.end_times_hour[2]
|
||||||
@@ -117,17 +114,6 @@ class TestGetChartData(ZulipTestCase):
|
|||||||
)
|
)
|
||||||
for i, subgroup in enumerate(user_subgroups)
|
for i, subgroup in enumerate(user_subgroups)
|
||||||
)
|
)
|
||||||
StreamCount.objects.bulk_create(
|
|
||||||
StreamCount(
|
|
||||||
property=stat.property,
|
|
||||||
subgroup=subgroup,
|
|
||||||
end_time=insert_time,
|
|
||||||
value=100 + i,
|
|
||||||
stream_id=self.stream_id,
|
|
||||||
realm=self.realm,
|
|
||||||
)
|
|
||||||
for i, subgroup in enumerate(realm_subgroups)
|
|
||||||
)
|
|
||||||
FillState.objects.create(property=stat.property, end_time=fill_time, state=FillState.DONE)
|
FillState.objects.create(property=stat.property, end_time=fill_time, state=FillState.DONE)
|
||||||
|
|
||||||
def test_number_of_humans(self) -> None:
|
def test_number_of_humans(self) -> None:
|
||||||
@@ -138,7 +124,8 @@ class TestGetChartData(ZulipTestCase):
|
|||||||
stat = COUNT_STATS["active_users_audit:is_bot:day"]
|
stat = COUNT_STATS["active_users_audit:is_bot:day"]
|
||||||
self.insert_data(stat, ["false"], [])
|
self.insert_data(stat, ["false"], [])
|
||||||
result = self.client_get("/json/analytics/chart_data", {"chart_name": "number_of_humans"})
|
result = self.client_get("/json/analytics/chart_data", {"chart_name": "number_of_humans"})
|
||||||
data = self.assert_json_success(result)
|
self.assert_json_success(result)
|
||||||
|
data = result.json()
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
data,
|
data,
|
||||||
{
|
{
|
||||||
@@ -161,7 +148,8 @@ class TestGetChartData(ZulipTestCase):
|
|||||||
result = self.client_get(
|
result = self.client_get(
|
||||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
|
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
|
||||||
)
|
)
|
||||||
data = self.assert_json_success(result)
|
self.assert_json_success(result)
|
||||||
|
data = result.json()
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
data,
|
data,
|
||||||
{
|
{
|
||||||
@@ -183,7 +171,8 @@ class TestGetChartData(ZulipTestCase):
|
|||||||
result = self.client_get(
|
result = self.client_get(
|
||||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_by_message_type"}
|
"/json/analytics/chart_data", {"chart_name": "messages_sent_by_message_type"}
|
||||||
)
|
)
|
||||||
data = self.assert_json_success(result)
|
self.assert_json_success(result)
|
||||||
|
data = result.json()
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
data,
|
data,
|
||||||
{
|
{
|
||||||
@@ -191,22 +180,22 @@ class TestGetChartData(ZulipTestCase):
|
|||||||
"end_times": [datetime_to_timestamp(dt) for dt in self.end_times_day],
|
"end_times": [datetime_to_timestamp(dt) for dt in self.end_times_day],
|
||||||
"frequency": CountStat.DAY,
|
"frequency": CountStat.DAY,
|
||||||
"everyone": {
|
"everyone": {
|
||||||
"Public channels": self.data(100),
|
"Public streams": self.data(100),
|
||||||
"Private channels": self.data(0),
|
"Private streams": self.data(0),
|
||||||
"Direct messages": self.data(101),
|
"Private messages": self.data(101),
|
||||||
"Group direct messages": self.data(0),
|
"Group private messages": self.data(0),
|
||||||
},
|
},
|
||||||
"user": {
|
"user": {
|
||||||
"Public channels": self.data(200),
|
"Public streams": self.data(200),
|
||||||
"Private channels": self.data(201),
|
"Private streams": self.data(201),
|
||||||
"Direct messages": self.data(0),
|
"Private messages": self.data(0),
|
||||||
"Group direct messages": self.data(0),
|
"Group private messages": self.data(0),
|
||||||
},
|
},
|
||||||
"display_order": [
|
"display_order": [
|
||||||
"Direct messages",
|
"Private messages",
|
||||||
"Public channels",
|
"Public streams",
|
||||||
"Private channels",
|
"Private streams",
|
||||||
"Group direct messages",
|
"Group private messages",
|
||||||
],
|
],
|
||||||
"result": "success",
|
"result": "success",
|
||||||
},
|
},
|
||||||
@@ -226,7 +215,8 @@ class TestGetChartData(ZulipTestCase):
|
|||||||
result = self.client_get(
|
result = self.client_get(
|
||||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_by_client"}
|
"/json/analytics/chart_data", {"chart_name": "messages_sent_by_client"}
|
||||||
)
|
)
|
||||||
data = self.assert_json_success(result)
|
self.assert_json_success(result)
|
||||||
|
data = result.json()
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
data,
|
data,
|
||||||
{
|
{
|
||||||
@@ -250,7 +240,8 @@ class TestGetChartData(ZulipTestCase):
|
|||||||
result = self.client_get(
|
result = self.client_get(
|
||||||
"/json/analytics/chart_data", {"chart_name": "messages_read_over_time"}
|
"/json/analytics/chart_data", {"chart_name": "messages_read_over_time"}
|
||||||
)
|
)
|
||||||
data = self.assert_json_success(result)
|
self.assert_json_success(result)
|
||||||
|
data = result.json()
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
data,
|
data,
|
||||||
{
|
{
|
||||||
@@ -264,49 +255,6 @@ class TestGetChartData(ZulipTestCase):
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_messages_sent_by_stream(self) -> None:
|
|
||||||
stat = COUNT_STATS["messages_in_stream:is_bot:day"]
|
|
||||||
self.insert_data(stat, ["true", "false"], [])
|
|
||||||
|
|
||||||
result = self.client_get(
|
|
||||||
f"/json/analytics/chart_data/stream/{self.stream_id}",
|
|
||||||
{
|
|
||||||
"chart_name": "messages_sent_by_stream",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
data = self.assert_json_success(result)
|
|
||||||
self.assertEqual(
|
|
||||||
data,
|
|
||||||
{
|
|
||||||
"msg": "",
|
|
||||||
"end_times": [datetime_to_timestamp(dt) for dt in self.end_times_day],
|
|
||||||
"frequency": CountStat.DAY,
|
|
||||||
"everyone": {"bot": self.data(100), "human": self.data(101)},
|
|
||||||
"display_order": None,
|
|
||||||
"result": "success",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
result = self.api_get(
|
|
||||||
self.example_user("polonius"),
|
|
||||||
f"/api/v1/analytics/chart_data/stream/{self.stream_id}",
|
|
||||||
{
|
|
||||||
"chart_name": "messages_sent_by_stream",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
self.assert_json_error(result, "Not allowed for guest users")
|
|
||||||
|
|
||||||
# Verify we correctly forbid access to stats of streams in other realms.
|
|
||||||
result = self.api_get(
|
|
||||||
self.mit_user("sipbtest"),
|
|
||||||
f"/api/v1/analytics/chart_data/stream/{self.stream_id}",
|
|
||||||
{
|
|
||||||
"chart_name": "messages_sent_by_stream",
|
|
||||||
},
|
|
||||||
subdomain="zephyr",
|
|
||||||
)
|
|
||||||
self.assert_json_error(result, "Invalid channel ID")
|
|
||||||
|
|
||||||
def test_include_empty_subgroups(self) -> None:
|
def test_include_empty_subgroups(self) -> None:
|
||||||
FillState.objects.create(
|
FillState.objects.create(
|
||||||
property="realm_active_humans::day",
|
property="realm_active_humans::day",
|
||||||
@@ -314,7 +262,8 @@ class TestGetChartData(ZulipTestCase):
|
|||||||
state=FillState.DONE,
|
state=FillState.DONE,
|
||||||
)
|
)
|
||||||
result = self.client_get("/json/analytics/chart_data", {"chart_name": "number_of_humans"})
|
result = self.client_get("/json/analytics/chart_data", {"chart_name": "number_of_humans"})
|
||||||
data = self.assert_json_success(result)
|
self.assert_json_success(result)
|
||||||
|
data = result.json()
|
||||||
self.assertEqual(data["everyone"], {"_1day": [0], "_15day": [0], "all_time": [0]})
|
self.assertEqual(data["everyone"], {"_1day": [0], "_15day": [0], "all_time": [0]})
|
||||||
self.assertFalse("user" in data)
|
self.assertFalse("user" in data)
|
||||||
|
|
||||||
@@ -326,7 +275,8 @@ class TestGetChartData(ZulipTestCase):
|
|||||||
result = self.client_get(
|
result = self.client_get(
|
||||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
|
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
|
||||||
)
|
)
|
||||||
data = self.assert_json_success(result)
|
self.assert_json_success(result)
|
||||||
|
data = result.json()
|
||||||
self.assertEqual(data["everyone"], {"human": [0], "bot": [0]})
|
self.assertEqual(data["everyone"], {"human": [0], "bot": [0]})
|
||||||
self.assertEqual(data["user"], {"human": [0], "bot": [0]})
|
self.assertEqual(data["user"], {"human": [0], "bot": [0]})
|
||||||
|
|
||||||
@@ -338,23 +288,24 @@ class TestGetChartData(ZulipTestCase):
|
|||||||
result = self.client_get(
|
result = self.client_get(
|
||||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_by_message_type"}
|
"/json/analytics/chart_data", {"chart_name": "messages_sent_by_message_type"}
|
||||||
)
|
)
|
||||||
data = self.assert_json_success(result)
|
self.assert_json_success(result)
|
||||||
|
data = result.json()
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
data["everyone"],
|
data["everyone"],
|
||||||
{
|
{
|
||||||
"Public channels": [0],
|
"Public streams": [0],
|
||||||
"Private channels": [0],
|
"Private streams": [0],
|
||||||
"Direct messages": [0],
|
"Private messages": [0],
|
||||||
"Group direct messages": [0],
|
"Group private messages": [0],
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
data["user"],
|
data["user"],
|
||||||
{
|
{
|
||||||
"Public channels": [0],
|
"Public streams": [0],
|
||||||
"Private channels": [0],
|
"Private streams": [0],
|
||||||
"Direct messages": [0],
|
"Private messages": [0],
|
||||||
"Group direct messages": [0],
|
"Group private messages": [0],
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -366,7 +317,8 @@ class TestGetChartData(ZulipTestCase):
|
|||||||
result = self.client_get(
|
result = self.client_get(
|
||||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_by_client"}
|
"/json/analytics/chart_data", {"chart_name": "messages_sent_by_client"}
|
||||||
)
|
)
|
||||||
data = self.assert_json_success(result)
|
self.assert_json_success(result)
|
||||||
|
data = result.json()
|
||||||
self.assertEqual(data["everyone"], {})
|
self.assertEqual(data["everyone"], {})
|
||||||
self.assertEqual(data["user"], {})
|
self.assertEqual(data["user"], {})
|
||||||
|
|
||||||
@@ -388,7 +340,8 @@ class TestGetChartData(ZulipTestCase):
|
|||||||
"end": end_time_timestamps[2],
|
"end": end_time_timestamps[2],
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
data = self.assert_json_success(result)
|
self.assert_json_success(result)
|
||||||
|
data = result.json()
|
||||||
self.assertEqual(data["end_times"], end_time_timestamps[1:3])
|
self.assertEqual(data["end_times"], end_time_timestamps[1:3])
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
data["everyone"], {"_1day": [0, 100], "_15day": [0, 100], "all_time": [0, 100]}
|
data["everyone"], {"_1day": [0, 100], "_15day": [0, 100], "all_time": [0, 100]}
|
||||||
@@ -416,7 +369,8 @@ class TestGetChartData(ZulipTestCase):
|
|||||||
result = self.client_get(
|
result = self.client_get(
|
||||||
"/json/analytics/chart_data", {"chart_name": "number_of_humans", "min_length": 2}
|
"/json/analytics/chart_data", {"chart_name": "number_of_humans", "min_length": 2}
|
||||||
)
|
)
|
||||||
data = self.assert_json_success(result)
|
self.assert_json_success(result)
|
||||||
|
data = result.json()
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
data["end_times"], [datetime_to_timestamp(dt) for dt in self.end_times_day]
|
data["end_times"], [datetime_to_timestamp(dt) for dt in self.end_times_day]
|
||||||
)
|
)
|
||||||
@@ -428,7 +382,8 @@ class TestGetChartData(ZulipTestCase):
|
|||||||
result = self.client_get(
|
result = self.client_get(
|
||||||
"/json/analytics/chart_data", {"chart_name": "number_of_humans", "min_length": 5}
|
"/json/analytics/chart_data", {"chart_name": "number_of_humans", "min_length": 5}
|
||||||
)
|
)
|
||||||
data = self.assert_json_success(result)
|
self.assert_json_success(result)
|
||||||
|
data = result.json()
|
||||||
end_times = [
|
end_times = [
|
||||||
ceiling_to_day(self.realm.date_created) + timedelta(days=i) for i in range(-1, 4)
|
ceiling_to_day(self.realm.date_created) + timedelta(days=i) for i in range(-1, 4)
|
||||||
]
|
]
|
||||||
@@ -604,7 +559,7 @@ class TestGetChartData(ZulipTestCase):
|
|||||||
|
|
||||||
class TestGetChartDataHelpers(ZulipTestCase):
|
class TestGetChartDataHelpers(ZulipTestCase):
|
||||||
def test_sort_by_totals(self) -> None:
|
def test_sort_by_totals(self) -> None:
|
||||||
empty: list[int] = []
|
empty: List[int] = []
|
||||||
value_arrays = {"c": [0, 1], "a": [9], "b": [1, 1, 1], "d": empty}
|
value_arrays = {"c": [0, 1], "a": [9], "b": [1, 1, 1], "d": empty}
|
||||||
self.assertEqual(sort_by_totals(value_arrays), ["a", "b", "c", "d"])
|
self.assertEqual(sort_by_totals(value_arrays), ["a", "b", "c", "d"])
|
||||||
|
|
||||||
@@ -660,15 +615,12 @@ class TestMapArrays(ZulipTestCase):
|
|||||||
"website": [1, 2, 3],
|
"website": [1, 2, 3],
|
||||||
"ZulipiOS": [1, 2, 3],
|
"ZulipiOS": [1, 2, 3],
|
||||||
"ZulipElectron": [2, 5, 7],
|
"ZulipElectron": [2, 5, 7],
|
||||||
"ZulipMobile": [1, 2, 3],
|
"ZulipMobile": [1, 5, 7],
|
||||||
"ZulipMobile/flutter": [1, 1, 1],
|
|
||||||
"ZulipFlutter": [1, 1, 1],
|
|
||||||
"ZulipPython": [1, 2, 3],
|
"ZulipPython": [1, 2, 3],
|
||||||
"API: Python": [1, 2, 3],
|
"API: Python": [1, 2, 3],
|
||||||
"SomethingRandom": [4, 5, 6],
|
"SomethingRandom": [4, 5, 6],
|
||||||
"ZulipGitHubWebhook": [7, 7, 9],
|
"ZulipGitHubWebhook": [7, 7, 9],
|
||||||
"ZulipAndroid": [64, 63, 65],
|
"ZulipAndroid": [64, 63, 65],
|
||||||
"ZulipTerminal": [9, 10, 11],
|
|
||||||
}
|
}
|
||||||
result = rewrite_client_arrays(a)
|
result = rewrite_client_arrays(a)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
@@ -677,13 +629,11 @@ class TestMapArrays(ZulipTestCase):
|
|||||||
"Old desktop app": [32, 36, 39],
|
"Old desktop app": [32, 36, 39],
|
||||||
"Old iOS app": [1, 2, 3],
|
"Old iOS app": [1, 2, 3],
|
||||||
"Desktop app": [2, 5, 7],
|
"Desktop app": [2, 5, 7],
|
||||||
"Mobile app (React Native)": [1, 2, 3],
|
"Mobile app": [1, 5, 7],
|
||||||
"Mobile app beta (Flutter)": [2, 2, 2],
|
"Website": [1, 2, 3],
|
||||||
"Web app": [1, 2, 3],
|
|
||||||
"Python API": [2, 4, 6],
|
"Python API": [2, 4, 6],
|
||||||
"SomethingRandom": [4, 5, 6],
|
"SomethingRandom": [4, 5, 6],
|
||||||
"GitHub webhook": [7, 7, 9],
|
"GitHub webhook": [7, 7, 9],
|
||||||
"Old Android app": [64, 63, 65],
|
"Old Android app": [64, 63, 65],
|
||||||
"Terminal app": [9, 10, 11],
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|||||||
629
analytics/tests/test_support_views.py
Normal file
629
analytics/tests/test_support_views.py
Normal file
@@ -0,0 +1,629 @@
|
|||||||
|
from datetime import datetime, timedelta, timezone
|
||||||
|
from unittest import mock
|
||||||
|
|
||||||
|
import orjson
|
||||||
|
from django.http import HttpResponse
|
||||||
|
from django.utils.timezone import now as timezone_now
|
||||||
|
|
||||||
|
from corporate.lib.stripe import add_months, update_sponsorship_status
|
||||||
|
from corporate.models import Customer, CustomerPlan, LicenseLedger, get_customer_by_realm
|
||||||
|
from zerver.actions.invites import do_create_multiuse_invite_link
|
||||||
|
from zerver.actions.realm_settings import do_send_realm_reactivation_email, do_set_realm_property
|
||||||
|
from zerver.lib.test_classes import ZulipTestCase
|
||||||
|
from zerver.lib.test_helpers import reset_emails_in_zulip_realm
|
||||||
|
from zerver.models import (
|
||||||
|
MultiuseInvite,
|
||||||
|
PreregistrationUser,
|
||||||
|
Realm,
|
||||||
|
UserMessage,
|
||||||
|
UserProfile,
|
||||||
|
get_org_type_display_name,
|
||||||
|
get_realm,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestSupportEndpoint(ZulipTestCase):
|
||||||
|
def test_search(self) -> None:
|
||||||
|
reset_emails_in_zulip_realm()
|
||||||
|
|
||||||
|
def assert_user_details_in_html_response(
|
||||||
|
html_response: HttpResponse, full_name: str, email: str, role: str
|
||||||
|
) -> None:
|
||||||
|
self.assert_in_success_response(
|
||||||
|
[
|
||||||
|
'<span class="label">user</span>\n',
|
||||||
|
f"<h3>{full_name}</h3>",
|
||||||
|
f"<b>Email</b>: {email}",
|
||||||
|
"<b>Is active</b>: True<br />",
|
||||||
|
f"<b>Role</b>: {role}<br />",
|
||||||
|
],
|
||||||
|
html_response,
|
||||||
|
)
|
||||||
|
|
||||||
|
def check_hamlet_user_query_result(result: HttpResponse) -> None:
|
||||||
|
assert_user_details_in_html_response(
|
||||||
|
result, "King Hamlet", self.example_email("hamlet"), "Member"
|
||||||
|
)
|
||||||
|
self.assert_in_success_response(
|
||||||
|
[
|
||||||
|
f"<b>Admins</b>: {self.example_email('iago')}\n",
|
||||||
|
f"<b>Owners</b>: {self.example_email('desdemona')}\n",
|
||||||
|
'class="copy-button" data-copytext="{}">'.format(self.example_email("iago")),
|
||||||
|
'class="copy-button" data-copytext="{}">'.format(
|
||||||
|
self.example_email("desdemona")
|
||||||
|
),
|
||||||
|
],
|
||||||
|
result,
|
||||||
|
)
|
||||||
|
|
||||||
|
def check_othello_user_query_result(result: HttpResponse) -> None:
|
||||||
|
assert_user_details_in_html_response(
|
||||||
|
result, "Othello, the Moor of Venice", self.example_email("othello"), "Member"
|
||||||
|
)
|
||||||
|
|
||||||
|
def check_polonius_user_query_result(result: HttpResponse) -> None:
|
||||||
|
assert_user_details_in_html_response(
|
||||||
|
result, "Polonius", self.example_email("polonius"), "Guest"
|
||||||
|
)
|
||||||
|
|
||||||
|
def check_zulip_realm_query_result(result: HttpResponse) -> None:
|
||||||
|
zulip_realm = get_realm("zulip")
|
||||||
|
first_human_user = zulip_realm.get_first_human_user()
|
||||||
|
assert first_human_user is not None
|
||||||
|
self.assert_in_success_response(
|
||||||
|
[
|
||||||
|
f"<b>First human user</b>: {first_human_user.delivery_email}\n",
|
||||||
|
f'<input type="hidden" name="realm_id" value="{zulip_realm.id}"',
|
||||||
|
"Zulip Dev</h3>",
|
||||||
|
'<option value="1" selected>Self-hosted</option>',
|
||||||
|
'<option value="2" >Limited</option>',
|
||||||
|
'input type="number" name="discount" value="None"',
|
||||||
|
'<option value="active" selected>Active</option>',
|
||||||
|
'<option value="deactivated" >Deactivated</option>',
|
||||||
|
f'<option value="{zulip_realm.org_type}" selected>',
|
||||||
|
'scrub-realm-button">',
|
||||||
|
'data-string-id="zulip"',
|
||||||
|
],
|
||||||
|
result,
|
||||||
|
)
|
||||||
|
|
||||||
|
def check_lear_realm_query_result(result: HttpResponse) -> None:
|
||||||
|
lear_realm = get_realm("lear")
|
||||||
|
self.assert_in_success_response(
|
||||||
|
[
|
||||||
|
f'<input type="hidden" name="realm_id" value="{lear_realm.id}"',
|
||||||
|
"Lear & Co.</h3>",
|
||||||
|
'<option value="1" selected>Self-hosted</option>',
|
||||||
|
'<option value="2" >Limited</option>',
|
||||||
|
'input type="number" name="discount" value="None"',
|
||||||
|
'<option value="active" selected>Active</option>',
|
||||||
|
'<option value="deactivated" >Deactivated</option>',
|
||||||
|
'scrub-realm-button">',
|
||||||
|
'data-string-id="lear"',
|
||||||
|
"<b>Name</b>: Zulip Cloud Standard",
|
||||||
|
"<b>Status</b>: Active",
|
||||||
|
"<b>Billing schedule</b>: Annual",
|
||||||
|
"<b>Licenses</b>: 2/10 (Manual)",
|
||||||
|
"<b>Price per license</b>: $80.0",
|
||||||
|
"<b>Next invoice date</b>: 02 January 2017",
|
||||||
|
'<option value="send_invoice" selected>',
|
||||||
|
'<option value="charge_automatically" >',
|
||||||
|
],
|
||||||
|
result,
|
||||||
|
)
|
||||||
|
|
||||||
|
def check_preregistration_user_query_result(
|
||||||
|
result: HttpResponse, email: str, invite: bool = False
|
||||||
|
) -> None:
|
||||||
|
self.assert_in_success_response(
|
||||||
|
[
|
||||||
|
'<span class="label">preregistration user</span>\n',
|
||||||
|
f"<b>Email</b>: {email}",
|
||||||
|
],
|
||||||
|
result,
|
||||||
|
)
|
||||||
|
if invite:
|
||||||
|
self.assert_in_success_response(['<span class="label">invite</span>'], result)
|
||||||
|
self.assert_in_success_response(
|
||||||
|
[
|
||||||
|
"<b>Expires in</b>: 1\xa0week, 3\xa0days",
|
||||||
|
"<b>Status</b>: Link has never been clicked",
|
||||||
|
],
|
||||||
|
result,
|
||||||
|
)
|
||||||
|
self.assert_in_success_response([], result)
|
||||||
|
else:
|
||||||
|
self.assert_not_in_success_response(['<span class="label">invite</span>'], result)
|
||||||
|
self.assert_in_success_response(
|
||||||
|
[
|
||||||
|
"<b>Expires in</b>: 1\xa0day",
|
||||||
|
"<b>Status</b>: Link has never been clicked",
|
||||||
|
],
|
||||||
|
result,
|
||||||
|
)
|
||||||
|
|
||||||
|
def check_realm_creation_query_result(result: HttpResponse, email: str) -> None:
|
||||||
|
self.assert_in_success_response(
|
||||||
|
[
|
||||||
|
'<span class="label">preregistration user</span>\n',
|
||||||
|
'<span class="label">realm creation</span>\n',
|
||||||
|
"<b>Link</b>: http://testserver/accounts/do_confirm/",
|
||||||
|
"<b>Expires in</b>: 1\xa0day",
|
||||||
|
],
|
||||||
|
result,
|
||||||
|
)
|
||||||
|
|
||||||
|
def check_multiuse_invite_link_query_result(result: HttpResponse) -> None:
|
||||||
|
self.assert_in_success_response(
|
||||||
|
[
|
||||||
|
'<span class="label">multiuse invite</span>\n',
|
||||||
|
"<b>Link</b>: http://zulip.testserver/join/",
|
||||||
|
"<b>Expires in</b>: 1\xa0week, 3\xa0days",
|
||||||
|
],
|
||||||
|
result,
|
||||||
|
)
|
||||||
|
|
||||||
|
def check_realm_reactivation_link_query_result(result: HttpResponse) -> None:
|
||||||
|
self.assert_in_success_response(
|
||||||
|
[
|
||||||
|
'<span class="label">realm reactivation</span>\n',
|
||||||
|
"<b>Link</b>: http://zulip.testserver/reactivate/",
|
||||||
|
"<b>Expires in</b>: 1\xa0day",
|
||||||
|
],
|
||||||
|
result,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.login("cordelia")
|
||||||
|
|
||||||
|
result = self.client_get("/activity/support")
|
||||||
|
self.assertEqual(result.status_code, 302)
|
||||||
|
self.assertEqual(result["Location"], "/login/")
|
||||||
|
|
||||||
|
self.login("iago")
|
||||||
|
|
||||||
|
do_set_realm_property(
|
||||||
|
get_realm("zulip"),
|
||||||
|
"email_address_visibility",
|
||||||
|
Realm.EMAIL_ADDRESS_VISIBILITY_NOBODY,
|
||||||
|
acting_user=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
customer = Customer.objects.create(realm=get_realm("lear"), stripe_customer_id="cus_123")
|
||||||
|
now = datetime(2016, 1, 2, tzinfo=timezone.utc)
|
||||||
|
plan = CustomerPlan.objects.create(
|
||||||
|
customer=customer,
|
||||||
|
billing_cycle_anchor=now,
|
||||||
|
billing_schedule=CustomerPlan.ANNUAL,
|
||||||
|
tier=CustomerPlan.STANDARD,
|
||||||
|
price_per_license=8000,
|
||||||
|
next_invoice_date=add_months(now, 12),
|
||||||
|
)
|
||||||
|
LicenseLedger.objects.create(
|
||||||
|
licenses=10,
|
||||||
|
licenses_at_next_renewal=10,
|
||||||
|
event_time=timezone_now(),
|
||||||
|
is_renewal=True,
|
||||||
|
plan=plan,
|
||||||
|
)
|
||||||
|
|
||||||
|
result = self.client_get("/activity/support")
|
||||||
|
self.assert_in_success_response(
|
||||||
|
['<input type="text" name="q" class="input-xxlarge search-query"'], result
|
||||||
|
)
|
||||||
|
|
||||||
|
result = self.client_get("/activity/support", {"q": self.example_email("hamlet")})
|
||||||
|
check_hamlet_user_query_result(result)
|
||||||
|
check_zulip_realm_query_result(result)
|
||||||
|
|
||||||
|
result = self.client_get("/activity/support", {"q": self.example_email("polonius")})
|
||||||
|
check_polonius_user_query_result(result)
|
||||||
|
check_zulip_realm_query_result(result)
|
||||||
|
|
||||||
|
result = self.client_get("/activity/support", {"q": "lear"})
|
||||||
|
check_lear_realm_query_result(result)
|
||||||
|
|
||||||
|
result = self.client_get("/activity/support", {"q": "http://lear.testserver"})
|
||||||
|
check_lear_realm_query_result(result)
|
||||||
|
|
||||||
|
with self.settings(REALM_HOSTS={"zulip": "localhost"}):
|
||||||
|
result = self.client_get("/activity/support", {"q": "http://localhost"})
|
||||||
|
check_zulip_realm_query_result(result)
|
||||||
|
|
||||||
|
result = self.client_get("/activity/support", {"q": "hamlet@zulip.com, lear"})
|
||||||
|
check_hamlet_user_query_result(result)
|
||||||
|
check_zulip_realm_query_result(result)
|
||||||
|
check_lear_realm_query_result(result)
|
||||||
|
|
||||||
|
result = self.client_get("/activity/support", {"q": "King hamlet,lear"})
|
||||||
|
check_hamlet_user_query_result(result)
|
||||||
|
check_zulip_realm_query_result(result)
|
||||||
|
check_lear_realm_query_result(result)
|
||||||
|
|
||||||
|
result = self.client_get("/activity/support", {"q": "Othello, the Moor of Venice"})
|
||||||
|
check_othello_user_query_result(result)
|
||||||
|
check_zulip_realm_query_result(result)
|
||||||
|
|
||||||
|
result = self.client_get("/activity/support", {"q": "lear, Hamlet <hamlet@zulip.com>"})
|
||||||
|
check_hamlet_user_query_result(result)
|
||||||
|
check_zulip_realm_query_result(result)
|
||||||
|
check_lear_realm_query_result(result)
|
||||||
|
|
||||||
|
with mock.patch(
|
||||||
|
"analytics.views.support.timezone_now",
|
||||||
|
return_value=timezone_now() - timedelta(minutes=50),
|
||||||
|
):
|
||||||
|
self.client_post("/accounts/home/", {"email": self.nonreg_email("test")})
|
||||||
|
self.login("iago")
|
||||||
|
result = self.client_get("/activity/support", {"q": self.nonreg_email("test")})
|
||||||
|
check_preregistration_user_query_result(result, self.nonreg_email("test"))
|
||||||
|
check_zulip_realm_query_result(result)
|
||||||
|
|
||||||
|
invite_expires_in_days = 10
|
||||||
|
stream_ids = [self.get_stream_id("Denmark")]
|
||||||
|
invitee_emails = [self.nonreg_email("test1")]
|
||||||
|
self.client_post(
|
||||||
|
"/json/invites",
|
||||||
|
{
|
||||||
|
"invitee_emails": invitee_emails,
|
||||||
|
"stream_ids": orjson.dumps(stream_ids).decode(),
|
||||||
|
"invite_expires_in_days": invite_expires_in_days,
|
||||||
|
"invite_as": PreregistrationUser.INVITE_AS["MEMBER"],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
result = self.client_get("/activity/support", {"q": self.nonreg_email("test1")})
|
||||||
|
check_preregistration_user_query_result(result, self.nonreg_email("test1"), invite=True)
|
||||||
|
check_zulip_realm_query_result(result)
|
||||||
|
|
||||||
|
email = self.nonreg_email("alice")
|
||||||
|
self.client_post("/new/", {"email": email})
|
||||||
|
result = self.client_get("/activity/support", {"q": email})
|
||||||
|
check_realm_creation_query_result(result, email)
|
||||||
|
|
||||||
|
do_create_multiuse_invite_link(
|
||||||
|
self.example_user("hamlet"),
|
||||||
|
invited_as=1,
|
||||||
|
invite_expires_in_days=invite_expires_in_days,
|
||||||
|
)
|
||||||
|
result = self.client_get("/activity/support", {"q": "zulip"})
|
||||||
|
check_multiuse_invite_link_query_result(result)
|
||||||
|
check_zulip_realm_query_result(result)
|
||||||
|
MultiuseInvite.objects.all().delete()
|
||||||
|
|
||||||
|
do_send_realm_reactivation_email(get_realm("zulip"), acting_user=None)
|
||||||
|
result = self.client_get("/activity/support", {"q": "zulip"})
|
||||||
|
check_realm_reactivation_link_query_result(result)
|
||||||
|
check_zulip_realm_query_result(result)
|
||||||
|
|
||||||
|
def test_get_org_type_display_name(self) -> None:
|
||||||
|
self.assertEqual(get_org_type_display_name(Realm.ORG_TYPES["business"]["id"]), "Business")
|
||||||
|
self.assertEqual(get_org_type_display_name(883), "")
|
||||||
|
|
||||||
|
@mock.patch("analytics.views.support.update_billing_method_of_current_plan")
|
||||||
|
def test_change_billing_method(self, m: mock.Mock) -> None:
|
||||||
|
cordelia = self.example_user("cordelia")
|
||||||
|
self.login_user(cordelia)
|
||||||
|
|
||||||
|
result = self.client_post(
|
||||||
|
"/activity/support", {"realm_id": f"{cordelia.realm_id}", "plan_type": "2"}
|
||||||
|
)
|
||||||
|
self.assertEqual(result.status_code, 302)
|
||||||
|
self.assertEqual(result["Location"], "/login/")
|
||||||
|
|
||||||
|
iago = self.example_user("iago")
|
||||||
|
self.login_user(iago)
|
||||||
|
|
||||||
|
result = self.client_post(
|
||||||
|
"/activity/support",
|
||||||
|
{"realm_id": f"{iago.realm_id}", "billing_method": "charge_automatically"},
|
||||||
|
)
|
||||||
|
m.assert_called_once_with(get_realm("zulip"), charge_automatically=True, acting_user=iago)
|
||||||
|
self.assert_in_success_response(
|
||||||
|
["Billing method of zulip updated to charge automatically"], result
|
||||||
|
)
|
||||||
|
|
||||||
|
m.reset_mock()
|
||||||
|
|
||||||
|
result = self.client_post(
|
||||||
|
"/activity/support", {"realm_id": f"{iago.realm_id}", "billing_method": "send_invoice"}
|
||||||
|
)
|
||||||
|
m.assert_called_once_with(get_realm("zulip"), charge_automatically=False, acting_user=iago)
|
||||||
|
self.assert_in_success_response(
|
||||||
|
["Billing method of zulip updated to pay by invoice"], result
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_change_realm_plan_type(self) -> None:
|
||||||
|
cordelia = self.example_user("cordelia")
|
||||||
|
self.login_user(cordelia)
|
||||||
|
|
||||||
|
result = self.client_post(
|
||||||
|
"/activity/support", {"realm_id": f"{cordelia.realm_id}", "plan_type": "2"}
|
||||||
|
)
|
||||||
|
self.assertEqual(result.status_code, 302)
|
||||||
|
self.assertEqual(result["Location"], "/login/")
|
||||||
|
|
||||||
|
iago = self.example_user("iago")
|
||||||
|
self.login_user(iago)
|
||||||
|
|
||||||
|
with mock.patch("analytics.views.support.do_change_realm_plan_type") as m:
|
||||||
|
result = self.client_post(
|
||||||
|
"/activity/support", {"realm_id": f"{iago.realm_id}", "plan_type": "2"}
|
||||||
|
)
|
||||||
|
m.assert_called_once_with(get_realm("zulip"), 2, acting_user=iago)
|
||||||
|
self.assert_in_success_response(
|
||||||
|
["Plan type of zulip changed from self-hosted to limited"], result
|
||||||
|
)
|
||||||
|
|
||||||
|
with mock.patch("analytics.views.support.do_change_realm_plan_type") as m:
|
||||||
|
result = self.client_post(
|
||||||
|
"/activity/support", {"realm_id": f"{iago.realm_id}", "plan_type": "10"}
|
||||||
|
)
|
||||||
|
m.assert_called_once_with(get_realm("zulip"), 10, acting_user=iago)
|
||||||
|
self.assert_in_success_response(
|
||||||
|
["Plan type of zulip changed from self-hosted to plus"], result
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_change_org_type(self) -> None:
|
||||||
|
cordelia = self.example_user("cordelia")
|
||||||
|
self.login_user(cordelia)
|
||||||
|
|
||||||
|
result = self.client_post(
|
||||||
|
"/activity/support", {"realm_id": f"{cordelia.realm_id}", "org_type": "70"}
|
||||||
|
)
|
||||||
|
self.assertEqual(result.status_code, 302)
|
||||||
|
self.assertEqual(result["Location"], "/login/")
|
||||||
|
|
||||||
|
iago = self.example_user("iago")
|
||||||
|
self.login_user(iago)
|
||||||
|
|
||||||
|
with mock.patch("analytics.views.support.do_change_realm_org_type") as m:
|
||||||
|
result = self.client_post(
|
||||||
|
"/activity/support", {"realm_id": f"{iago.realm_id}", "org_type": "70"}
|
||||||
|
)
|
||||||
|
m.assert_called_once_with(get_realm("zulip"), 70, acting_user=iago)
|
||||||
|
self.assert_in_success_response(
|
||||||
|
["Org type of zulip changed from Business to Government"], result
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_attach_discount(self) -> None:
|
||||||
|
cordelia = self.example_user("cordelia")
|
||||||
|
lear_realm = get_realm("lear")
|
||||||
|
self.login_user(cordelia)
|
||||||
|
|
||||||
|
result = self.client_post(
|
||||||
|
"/activity/support", {"realm_id": f"{lear_realm.id}", "discount": "25"}
|
||||||
|
)
|
||||||
|
self.assertEqual(result.status_code, 302)
|
||||||
|
self.assertEqual(result["Location"], "/login/")
|
||||||
|
|
||||||
|
iago = self.example_user("iago")
|
||||||
|
self.login("iago")
|
||||||
|
|
||||||
|
with mock.patch("analytics.views.support.attach_discount_to_realm") as m:
|
||||||
|
result = self.client_post(
|
||||||
|
"/activity/support", {"realm_id": f"{lear_realm.id}", "discount": "25"}
|
||||||
|
)
|
||||||
|
m.assert_called_once_with(get_realm("lear"), 25, acting_user=iago)
|
||||||
|
self.assert_in_success_response(["Discount of lear changed to 25% from 0%"], result)
|
||||||
|
|
||||||
|
def test_change_sponsorship_status(self) -> None:
|
||||||
|
lear_realm = get_realm("lear")
|
||||||
|
self.assertIsNone(get_customer_by_realm(lear_realm))
|
||||||
|
|
||||||
|
cordelia = self.example_user("cordelia")
|
||||||
|
self.login_user(cordelia)
|
||||||
|
|
||||||
|
result = self.client_post(
|
||||||
|
"/activity/support", {"realm_id": f"{lear_realm.id}", "sponsorship_pending": "true"}
|
||||||
|
)
|
||||||
|
self.assertEqual(result.status_code, 302)
|
||||||
|
self.assertEqual(result["Location"], "/login/")
|
||||||
|
|
||||||
|
iago = self.example_user("iago")
|
||||||
|
self.login_user(iago)
|
||||||
|
|
||||||
|
result = self.client_post(
|
||||||
|
"/activity/support", {"realm_id": f"{lear_realm.id}", "sponsorship_pending": "true"}
|
||||||
|
)
|
||||||
|
self.assert_in_success_response(["lear marked as pending sponsorship."], result)
|
||||||
|
customer = get_customer_by_realm(lear_realm)
|
||||||
|
assert customer is not None
|
||||||
|
self.assertTrue(customer.sponsorship_pending)
|
||||||
|
|
||||||
|
result = self.client_post(
|
||||||
|
"/activity/support", {"realm_id": f"{lear_realm.id}", "sponsorship_pending": "false"}
|
||||||
|
)
|
||||||
|
self.assert_in_success_response(["lear is no longer pending sponsorship."], result)
|
||||||
|
customer = get_customer_by_realm(lear_realm)
|
||||||
|
assert customer is not None
|
||||||
|
self.assertFalse(customer.sponsorship_pending)
|
||||||
|
|
||||||
|
def test_approve_sponsorship(self) -> None:
|
||||||
|
lear_realm = get_realm("lear")
|
||||||
|
update_sponsorship_status(lear_realm, True, acting_user=None)
|
||||||
|
king_user = self.lear_user("king")
|
||||||
|
king_user.role = UserProfile.ROLE_REALM_OWNER
|
||||||
|
king_user.save()
|
||||||
|
|
||||||
|
cordelia = self.example_user("cordelia")
|
||||||
|
self.login_user(cordelia)
|
||||||
|
|
||||||
|
result = self.client_post(
|
||||||
|
"/activity/support",
|
||||||
|
{"realm_id": f"{lear_realm.id}", "approve_sponsorship": "true"},
|
||||||
|
)
|
||||||
|
self.assertEqual(result.status_code, 302)
|
||||||
|
self.assertEqual(result["Location"], "/login/")
|
||||||
|
|
||||||
|
iago = self.example_user("iago")
|
||||||
|
self.login_user(iago)
|
||||||
|
|
||||||
|
result = self.client_post(
|
||||||
|
"/activity/support",
|
||||||
|
{"realm_id": f"{lear_realm.id}", "approve_sponsorship": "true"},
|
||||||
|
)
|
||||||
|
self.assert_in_success_response(["Sponsorship approved for lear"], result)
|
||||||
|
lear_realm.refresh_from_db()
|
||||||
|
self.assertEqual(lear_realm.plan_type, Realm.PLAN_TYPE_STANDARD_FREE)
|
||||||
|
customer = get_customer_by_realm(lear_realm)
|
||||||
|
assert customer is not None
|
||||||
|
self.assertFalse(customer.sponsorship_pending)
|
||||||
|
messages = UserMessage.objects.filter(user_profile=king_user)
|
||||||
|
self.assertIn(
|
||||||
|
"request for sponsored hosting has been approved", messages[0].message.content
|
||||||
|
)
|
||||||
|
self.assert_length(messages, 1)
|
||||||
|
|
||||||
|
def test_activate_or_deactivate_realm(self) -> None:
|
||||||
|
cordelia = self.example_user("cordelia")
|
||||||
|
lear_realm = get_realm("lear")
|
||||||
|
self.login_user(cordelia)
|
||||||
|
|
||||||
|
result = self.client_post(
|
||||||
|
"/activity/support", {"realm_id": f"{lear_realm.id}", "status": "deactivated"}
|
||||||
|
)
|
||||||
|
self.assertEqual(result.status_code, 302)
|
||||||
|
self.assertEqual(result["Location"], "/login/")
|
||||||
|
|
||||||
|
self.login("iago")
|
||||||
|
|
||||||
|
with mock.patch("analytics.views.support.do_deactivate_realm") as m:
|
||||||
|
result = self.client_post(
|
||||||
|
"/activity/support", {"realm_id": f"{lear_realm.id}", "status": "deactivated"}
|
||||||
|
)
|
||||||
|
m.assert_called_once_with(lear_realm, acting_user=self.example_user("iago"))
|
||||||
|
self.assert_in_success_response(["lear deactivated"], result)
|
||||||
|
|
||||||
|
with mock.patch("analytics.views.support.do_send_realm_reactivation_email") as m:
|
||||||
|
result = self.client_post(
|
||||||
|
"/activity/support", {"realm_id": f"{lear_realm.id}", "status": "active"}
|
||||||
|
)
|
||||||
|
m.assert_called_once_with(lear_realm, acting_user=self.example_user("iago"))
|
||||||
|
self.assert_in_success_response(
|
||||||
|
["Realm reactivation email sent to admins of lear"], result
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_change_subdomain(self) -> None:
|
||||||
|
cordelia = self.example_user("cordelia")
|
||||||
|
lear_realm = get_realm("lear")
|
||||||
|
self.login_user(cordelia)
|
||||||
|
|
||||||
|
result = self.client_post(
|
||||||
|
"/activity/support", {"realm_id": f"{lear_realm.id}", "new_subdomain": "new_name"}
|
||||||
|
)
|
||||||
|
self.assertEqual(result.status_code, 302)
|
||||||
|
self.assertEqual(result["Location"], "/login/")
|
||||||
|
self.login("iago")
|
||||||
|
|
||||||
|
result = self.client_post(
|
||||||
|
"/activity/support", {"realm_id": f"{lear_realm.id}", "new_subdomain": "new-name"}
|
||||||
|
)
|
||||||
|
self.assertEqual(result.status_code, 302)
|
||||||
|
self.assertEqual(result["Location"], "/activity/support?q=new-name")
|
||||||
|
realm_id = lear_realm.id
|
||||||
|
lear_realm = get_realm("new-name")
|
||||||
|
self.assertEqual(lear_realm.id, realm_id)
|
||||||
|
self.assertTrue(Realm.objects.filter(string_id="lear").exists())
|
||||||
|
self.assertTrue(Realm.objects.filter(string_id="lear")[0].deactivated)
|
||||||
|
|
||||||
|
result = self.client_post(
|
||||||
|
"/activity/support", {"realm_id": f"{lear_realm.id}", "new_subdomain": "new-name"}
|
||||||
|
)
|
||||||
|
self.assert_in_success_response(
|
||||||
|
["Subdomain unavailable. Please choose a different one."], result
|
||||||
|
)
|
||||||
|
|
||||||
|
result = self.client_post(
|
||||||
|
"/activity/support", {"realm_id": f"{lear_realm.id}", "new_subdomain": "zulip"}
|
||||||
|
)
|
||||||
|
self.assert_in_success_response(
|
||||||
|
["Subdomain unavailable. Please choose a different one."], result
|
||||||
|
)
|
||||||
|
|
||||||
|
result = self.client_post(
|
||||||
|
"/activity/support", {"realm_id": f"{lear_realm.id}", "new_subdomain": "lear"}
|
||||||
|
)
|
||||||
|
self.assert_in_success_response(
|
||||||
|
["Subdomain unavailable. Please choose a different one."], result
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_downgrade_realm(self) -> None:
|
||||||
|
cordelia = self.example_user("cordelia")
|
||||||
|
self.login_user(cordelia)
|
||||||
|
result = self.client_post(
|
||||||
|
"/activity/support", {"realm_id": f"{cordelia.realm_id}", "plan_type": "2"}
|
||||||
|
)
|
||||||
|
self.assertEqual(result.status_code, 302)
|
||||||
|
self.assertEqual(result["Location"], "/login/")
|
||||||
|
|
||||||
|
iago = self.example_user("iago")
|
||||||
|
self.login_user(iago)
|
||||||
|
|
||||||
|
with mock.patch("analytics.views.support.downgrade_at_the_end_of_billing_cycle") as m:
|
||||||
|
result = self.client_post(
|
||||||
|
"/activity/support",
|
||||||
|
{
|
||||||
|
"realm_id": f"{iago.realm_id}",
|
||||||
|
"downgrade_method": "downgrade_at_billing_cycle_end",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
m.assert_called_once_with(get_realm("zulip"))
|
||||||
|
self.assert_in_success_response(
|
||||||
|
["zulip marked for downgrade at the end of billing cycle"], result
|
||||||
|
)
|
||||||
|
|
||||||
|
with mock.patch(
|
||||||
|
"analytics.views.support.downgrade_now_without_creating_additional_invoices"
|
||||||
|
) as m:
|
||||||
|
result = self.client_post(
|
||||||
|
"/activity/support",
|
||||||
|
{
|
||||||
|
"realm_id": f"{iago.realm_id}",
|
||||||
|
"downgrade_method": "downgrade_now_without_additional_licenses",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
m.assert_called_once_with(get_realm("zulip"))
|
||||||
|
self.assert_in_success_response(
|
||||||
|
["zulip downgraded without creating additional invoices"], result
|
||||||
|
)
|
||||||
|
|
||||||
|
with mock.patch(
|
||||||
|
"analytics.views.support.downgrade_now_without_creating_additional_invoices"
|
||||||
|
) as m1:
|
||||||
|
with mock.patch("analytics.views.support.void_all_open_invoices", return_value=1) as m2:
|
||||||
|
result = self.client_post(
|
||||||
|
"/activity/support",
|
||||||
|
{
|
||||||
|
"realm_id": f"{iago.realm_id}",
|
||||||
|
"downgrade_method": "downgrade_now_void_open_invoices",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
m1.assert_called_once_with(get_realm("zulip"))
|
||||||
|
m2.assert_called_once_with(get_realm("zulip"))
|
||||||
|
self.assert_in_success_response(
|
||||||
|
["zulip downgraded and voided 1 open invoices"], result
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_scrub_realm(self) -> None:
|
||||||
|
cordelia = self.example_user("cordelia")
|
||||||
|
lear_realm = get_realm("lear")
|
||||||
|
self.login_user(cordelia)
|
||||||
|
|
||||||
|
result = self.client_post(
|
||||||
|
"/activity/support", {"realm_id": f"{lear_realm.id}", "discount": "25"}
|
||||||
|
)
|
||||||
|
self.assertEqual(result.status_code, 302)
|
||||||
|
self.assertEqual(result["Location"], "/login/")
|
||||||
|
|
||||||
|
self.login("iago")
|
||||||
|
|
||||||
|
with mock.patch("analytics.views.support.do_scrub_realm") as m:
|
||||||
|
result = self.client_post(
|
||||||
|
"/activity/support", {"realm_id": f"{lear_realm.id}", "scrub_realm": "true"}
|
||||||
|
)
|
||||||
|
m.assert_called_once_with(lear_realm, acting_user=self.example_user("iago"))
|
||||||
|
self.assert_in_success_response(["lear scrubbed"], result)
|
||||||
|
|
||||||
|
with mock.patch("analytics.views.support.do_scrub_realm") as m:
|
||||||
|
result = self.client_post("/activity/support", {"realm_id": f"{lear_realm.id}"})
|
||||||
|
self.assert_json_error(result, "Invalid parameters")
|
||||||
|
m.assert_not_called()
|
||||||
@@ -1,38 +1,43 @@
|
|||||||
from django.conf import settings
|
from typing import List, Union
|
||||||
|
|
||||||
from django.conf.urls import include
|
from django.conf.urls import include
|
||||||
from django.urls import path
|
from django.urls import path
|
||||||
from django.urls.resolvers import URLPattern, URLResolver
|
from django.urls.resolvers import URLPattern, URLResolver
|
||||||
|
|
||||||
|
from analytics.views.installation_activity import get_installation_activity
|
||||||
|
from analytics.views.realm_activity import get_realm_activity
|
||||||
from analytics.views.stats import (
|
from analytics.views.stats import (
|
||||||
get_chart_data,
|
get_chart_data,
|
||||||
get_chart_data_for_installation,
|
get_chart_data_for_installation,
|
||||||
get_chart_data_for_realm,
|
get_chart_data_for_realm,
|
||||||
get_chart_data_for_stream,
|
get_chart_data_for_remote_installation,
|
||||||
|
get_chart_data_for_remote_realm,
|
||||||
stats,
|
stats,
|
||||||
stats_for_installation,
|
stats_for_installation,
|
||||||
stats_for_realm,
|
stats_for_realm,
|
||||||
|
stats_for_remote_installation,
|
||||||
|
stats_for_remote_realm,
|
||||||
)
|
)
|
||||||
|
from analytics.views.support import support
|
||||||
|
from analytics.views.user_activity import get_user_activity
|
||||||
from zerver.lib.rest import rest_path
|
from zerver.lib.rest import rest_path
|
||||||
|
|
||||||
i18n_urlpatterns: list[URLPattern | URLResolver] = [
|
i18n_urlpatterns: List[Union[URLPattern, URLResolver]] = [
|
||||||
# Server admin (user_profile.is_staff) visible stats pages
|
# Server admin (user_profile.is_staff) visible stats pages
|
||||||
|
path("activity", get_installation_activity),
|
||||||
|
path("activity/support", support, name="support"),
|
||||||
|
path("realm_activity/<realm_str>/", get_realm_activity),
|
||||||
|
path("user_activity/<user_profile_id>/", get_user_activity),
|
||||||
path("stats/realm/<realm_str>/", stats_for_realm),
|
path("stats/realm/<realm_str>/", stats_for_realm),
|
||||||
path("stats/installation", stats_for_installation),
|
path("stats/installation", stats_for_installation),
|
||||||
|
path("stats/remote/<int:remote_server_id>/installation", stats_for_remote_installation),
|
||||||
|
path(
|
||||||
|
"stats/remote/<int:remote_server_id>/realm/<int:remote_realm_id>/", stats_for_remote_realm
|
||||||
|
),
|
||||||
# User-visible stats page
|
# User-visible stats page
|
||||||
path("stats", stats, name="stats"),
|
path("stats", stats, name="stats"),
|
||||||
]
|
]
|
||||||
|
|
||||||
if settings.ZILENCER_ENABLED:
|
|
||||||
from analytics.views.stats import stats_for_remote_installation, stats_for_remote_realm
|
|
||||||
|
|
||||||
i18n_urlpatterns += [
|
|
||||||
path("stats/remote/<int:remote_server_id>/installation", stats_for_remote_installation),
|
|
||||||
path(
|
|
||||||
"stats/remote/<int:remote_server_id>/realm/<int:remote_realm_id>/",
|
|
||||||
stats_for_remote_realm,
|
|
||||||
),
|
|
||||||
]
|
|
||||||
|
|
||||||
# These endpoints are a part of the API (V1), which uses:
|
# These endpoints are a part of the API (V1), which uses:
|
||||||
# * REST verbs
|
# * REST verbs
|
||||||
# * Basic auth (username:password is email:apiKey)
|
# * Basic auth (username:password is email:apiKey)
|
||||||
@@ -44,28 +49,18 @@ if settings.ZILENCER_ENABLED:
|
|||||||
v1_api_and_json_patterns = [
|
v1_api_and_json_patterns = [
|
||||||
# get data for the graphs at /stats
|
# get data for the graphs at /stats
|
||||||
rest_path("analytics/chart_data", GET=get_chart_data),
|
rest_path("analytics/chart_data", GET=get_chart_data),
|
||||||
rest_path("analytics/chart_data/stream/<stream_id>", GET=get_chart_data_for_stream),
|
|
||||||
rest_path("analytics/chart_data/realm/<realm_str>", GET=get_chart_data_for_realm),
|
rest_path("analytics/chart_data/realm/<realm_str>", GET=get_chart_data_for_realm),
|
||||||
rest_path("analytics/chart_data/installation", GET=get_chart_data_for_installation),
|
rest_path("analytics/chart_data/installation", GET=get_chart_data_for_installation),
|
||||||
|
rest_path(
|
||||||
|
"analytics/chart_data/remote/<int:remote_server_id>/installation",
|
||||||
|
GET=get_chart_data_for_remote_installation,
|
||||||
|
),
|
||||||
|
rest_path(
|
||||||
|
"analytics/chart_data/remote/<int:remote_server_id>/realm/<int:remote_realm_id>",
|
||||||
|
GET=get_chart_data_for_remote_realm,
|
||||||
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
if settings.ZILENCER_ENABLED:
|
|
||||||
from analytics.views.stats import (
|
|
||||||
get_chart_data_for_remote_installation,
|
|
||||||
get_chart_data_for_remote_realm,
|
|
||||||
)
|
|
||||||
|
|
||||||
v1_api_and_json_patterns += [
|
|
||||||
rest_path(
|
|
||||||
"analytics/chart_data/remote/<int:remote_server_id>/installation",
|
|
||||||
GET=get_chart_data_for_remote_installation,
|
|
||||||
),
|
|
||||||
rest_path(
|
|
||||||
"analytics/chart_data/remote/<int:remote_server_id>/realm/<int:remote_realm_id>",
|
|
||||||
GET=get_chart_data_for_remote_realm,
|
|
||||||
),
|
|
||||||
]
|
|
||||||
|
|
||||||
i18n_urlpatterns += [
|
i18n_urlpatterns += [
|
||||||
path("api/v1/", include(v1_api_and_json_patterns)),
|
path("api/v1/", include(v1_api_and_json_patterns)),
|
||||||
path("json/", include(v1_api_and_json_patterns)),
|
path("json/", include(v1_api_and_json_patterns)),
|
||||||
|
|||||||
137
analytics/views/activity_common.py
Normal file
137
analytics/views/activity_common.py
Normal file
@@ -0,0 +1,137 @@
|
|||||||
|
import re
|
||||||
|
from datetime import datetime
|
||||||
|
from html import escape
|
||||||
|
from typing import Any, Dict, List, Optional, Sequence
|
||||||
|
|
||||||
|
import pytz
|
||||||
|
from django.conf import settings
|
||||||
|
from django.db.backends.utils import CursorWrapper
|
||||||
|
from django.db.models.query import QuerySet
|
||||||
|
from django.template import loader
|
||||||
|
from django.urls import reverse
|
||||||
|
from markupsafe import Markup as mark_safe
|
||||||
|
|
||||||
|
eastern_tz = pytz.timezone("US/Eastern")
|
||||||
|
|
||||||
|
|
||||||
|
if settings.BILLING_ENABLED:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def make_table(
|
||||||
|
title: str, cols: Sequence[str], rows: Sequence[Any], has_row_class: bool = False
|
||||||
|
) -> str:
|
||||||
|
|
||||||
|
if not has_row_class:
|
||||||
|
|
||||||
|
def fix_row(row: Any) -> Dict[str, Any]:
|
||||||
|
return dict(cells=row, row_class=None)
|
||||||
|
|
||||||
|
rows = list(map(fix_row, rows))
|
||||||
|
|
||||||
|
data = dict(title=title, cols=cols, rows=rows)
|
||||||
|
|
||||||
|
content = loader.render_to_string(
|
||||||
|
"analytics/ad_hoc_query.html",
|
||||||
|
dict(data=data),
|
||||||
|
)
|
||||||
|
|
||||||
|
return content
|
||||||
|
|
||||||
|
|
||||||
|
def dictfetchall(cursor: CursorWrapper) -> List[Dict[str, Any]]:
|
||||||
|
"Returns all rows from a cursor as a dict"
|
||||||
|
desc = cursor.description
|
||||||
|
return [dict(zip((col[0] for col in desc), row)) for row in cursor.fetchall()]
|
||||||
|
|
||||||
|
|
||||||
|
def format_date_for_activity_reports(date: Optional[datetime]) -> str:
|
||||||
|
if date:
|
||||||
|
return date.astimezone(eastern_tz).strftime("%Y-%m-%d %H:%M")
|
||||||
|
else:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
|
||||||
|
def user_activity_link(email: str, user_profile_id: int) -> mark_safe:
|
||||||
|
from analytics.views.user_activity import get_user_activity
|
||||||
|
|
||||||
|
url = reverse(get_user_activity, kwargs=dict(user_profile_id=user_profile_id))
|
||||||
|
email_link = f'<a href="{escape(url)}">{escape(email)}</a>'
|
||||||
|
return mark_safe(email_link)
|
||||||
|
|
||||||
|
|
||||||
|
def realm_activity_link(realm_str: str) -> mark_safe:
|
||||||
|
from analytics.views.realm_activity import get_realm_activity
|
||||||
|
|
||||||
|
url = reverse(get_realm_activity, kwargs=dict(realm_str=realm_str))
|
||||||
|
realm_link = f'<a href="{escape(url)}">{escape(realm_str)}</a>'
|
||||||
|
return mark_safe(realm_link)
|
||||||
|
|
||||||
|
|
||||||
|
def realm_stats_link(realm_str: str) -> mark_safe:
|
||||||
|
from analytics.views.stats import stats_for_realm
|
||||||
|
|
||||||
|
url = reverse(stats_for_realm, kwargs=dict(realm_str=realm_str))
|
||||||
|
stats_link = f'<a href="{escape(url)}"><i class="fa fa-pie-chart"></i>{escape(realm_str)}</a>'
|
||||||
|
return mark_safe(stats_link)
|
||||||
|
|
||||||
|
|
||||||
|
def remote_installation_stats_link(server_id: int, hostname: str) -> mark_safe:
|
||||||
|
from analytics.views.stats import stats_for_remote_installation
|
||||||
|
|
||||||
|
url = reverse(stats_for_remote_installation, kwargs=dict(remote_server_id=server_id))
|
||||||
|
stats_link = f'<a href="{escape(url)}"><i class="fa fa-pie-chart"></i>{escape(hostname)}</a>'
|
||||||
|
return mark_safe(stats_link)
|
||||||
|
|
||||||
|
|
||||||
|
def get_user_activity_summary(records: List[QuerySet]) -> Dict[str, Any]:
|
||||||
|
#: The type annotation used above is clearly overly permissive.
|
||||||
|
#: We should perhaps use TypedDict to clearly lay out the schema
|
||||||
|
#: for the user activity summary.
|
||||||
|
summary: Dict[str, Any] = {}
|
||||||
|
|
||||||
|
def update(action: str, record: QuerySet) -> None:
|
||||||
|
if action not in summary:
|
||||||
|
summary[action] = dict(
|
||||||
|
count=record.count,
|
||||||
|
last_visit=record.last_visit,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
summary[action]["count"] += record.count
|
||||||
|
summary[action]["last_visit"] = max(
|
||||||
|
summary[action]["last_visit"],
|
||||||
|
record.last_visit,
|
||||||
|
)
|
||||||
|
|
||||||
|
if records:
|
||||||
|
summary["name"] = records[0].user_profile.full_name
|
||||||
|
summary["user_profile_id"] = records[0].user_profile.id
|
||||||
|
|
||||||
|
for record in records:
|
||||||
|
client = record.client.name
|
||||||
|
query = str(record.query)
|
||||||
|
|
||||||
|
update("use", record)
|
||||||
|
|
||||||
|
if client == "API":
|
||||||
|
m = re.match("/api/.*/external/(.*)", query)
|
||||||
|
if m:
|
||||||
|
client = m.group(1)
|
||||||
|
update(client, record)
|
||||||
|
|
||||||
|
if client.startswith("desktop"):
|
||||||
|
update("desktop", record)
|
||||||
|
if client == "website":
|
||||||
|
update("website", record)
|
||||||
|
if ("send_message" in query) or re.search("/api/.*/external/.*", query):
|
||||||
|
update("send", record)
|
||||||
|
if query in [
|
||||||
|
"/json/update_pointer",
|
||||||
|
"/json/users/me/pointer",
|
||||||
|
"/api/v1/update_pointer",
|
||||||
|
"update_pointer_backend",
|
||||||
|
]:
|
||||||
|
update("pointer", record)
|
||||||
|
update(client, record)
|
||||||
|
|
||||||
|
return summary
|
||||||
622
analytics/views/installation_activity.py
Normal file
622
analytics/views/installation_activity.py
Normal file
@@ -0,0 +1,622 @@
|
|||||||
|
import itertools
|
||||||
|
import time
|
||||||
|
from collections import defaultdict
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
from django.db import connection
|
||||||
|
from django.http import HttpRequest, HttpResponse
|
||||||
|
from django.shortcuts import render
|
||||||
|
from django.template import loader
|
||||||
|
from django.utils.timezone import now as timezone_now
|
||||||
|
from markupsafe import Markup as mark_safe
|
||||||
|
from psycopg2.sql import SQL, Composable, Literal
|
||||||
|
|
||||||
|
from analytics.lib.counts import COUNT_STATS
|
||||||
|
from analytics.views.activity_common import (
|
||||||
|
dictfetchall,
|
||||||
|
format_date_for_activity_reports,
|
||||||
|
make_table,
|
||||||
|
realm_activity_link,
|
||||||
|
realm_stats_link,
|
||||||
|
remote_installation_stats_link,
|
||||||
|
)
|
||||||
|
from analytics.views.support import get_plan_name
|
||||||
|
from zerver.decorator import require_server_admin
|
||||||
|
from zerver.lib.request import has_request_variables
|
||||||
|
from zerver.lib.timestamp import timestamp_to_datetime
|
||||||
|
from zerver.models import Realm, UserActivityInterval, UserProfile, get_org_type_display_name
|
||||||
|
|
||||||
|
if settings.BILLING_ENABLED:
|
||||||
|
from corporate.lib.stripe import (
|
||||||
|
estimate_annual_recurring_revenue_by_realm,
|
||||||
|
get_realms_to_default_discount_dict,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_realm_day_counts() -> Dict[str, Dict[str, str]]:
|
||||||
|
query = SQL(
|
||||||
|
"""
|
||||||
|
select
|
||||||
|
r.string_id,
|
||||||
|
(now()::date - date_sent::date) age,
|
||||||
|
count(*) cnt
|
||||||
|
from zerver_message m
|
||||||
|
join zerver_userprofile up on up.id = m.sender_id
|
||||||
|
join zerver_realm r on r.id = up.realm_id
|
||||||
|
join zerver_client c on c.id = m.sending_client_id
|
||||||
|
where
|
||||||
|
(not up.is_bot)
|
||||||
|
and
|
||||||
|
date_sent > now()::date - interval '8 day'
|
||||||
|
and
|
||||||
|
c.name not in ('zephyr_mirror', 'ZulipMonitoring')
|
||||||
|
group by
|
||||||
|
r.string_id,
|
||||||
|
age
|
||||||
|
order by
|
||||||
|
r.string_id,
|
||||||
|
age
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
cursor = connection.cursor()
|
||||||
|
cursor.execute(query)
|
||||||
|
rows = dictfetchall(cursor)
|
||||||
|
cursor.close()
|
||||||
|
|
||||||
|
counts: Dict[str, Dict[int, int]] = defaultdict(dict)
|
||||||
|
for row in rows:
|
||||||
|
counts[row["string_id"]][row["age"]] = row["cnt"]
|
||||||
|
|
||||||
|
result = {}
|
||||||
|
for string_id in counts:
|
||||||
|
raw_cnts = [counts[string_id].get(age, 0) for age in range(8)]
|
||||||
|
min_cnt = min(raw_cnts[1:])
|
||||||
|
max_cnt = max(raw_cnts[1:])
|
||||||
|
|
||||||
|
def format_count(cnt: int, style: Optional[str] = None) -> str:
|
||||||
|
if style is not None:
|
||||||
|
good_bad = style
|
||||||
|
elif cnt == min_cnt:
|
||||||
|
good_bad = "bad"
|
||||||
|
elif cnt == max_cnt:
|
||||||
|
good_bad = "good"
|
||||||
|
else:
|
||||||
|
good_bad = "neutral"
|
||||||
|
|
||||||
|
return f'<td class="number {good_bad}">{cnt}</td>'
|
||||||
|
|
||||||
|
cnts = format_count(raw_cnts[0], "neutral") + "".join(map(format_count, raw_cnts[1:]))
|
||||||
|
result[string_id] = dict(cnts=cnts)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def realm_summary_table(realm_minutes: Dict[str, float]) -> str:
|
||||||
|
now = timezone_now()
|
||||||
|
|
||||||
|
query = SQL(
|
||||||
|
"""
|
||||||
|
SELECT
|
||||||
|
realm.string_id,
|
||||||
|
realm.date_created,
|
||||||
|
realm.plan_type,
|
||||||
|
realm.org_type,
|
||||||
|
coalesce(wau_table.value, 0) wau_count,
|
||||||
|
coalesce(dau_table.value, 0) dau_count,
|
||||||
|
coalesce(user_count_table.value, 0) user_profile_count,
|
||||||
|
coalesce(bot_count_table.value, 0) bot_count
|
||||||
|
FROM
|
||||||
|
zerver_realm as realm
|
||||||
|
LEFT OUTER JOIN (
|
||||||
|
SELECT
|
||||||
|
value _14day_active_humans,
|
||||||
|
realm_id
|
||||||
|
from
|
||||||
|
analytics_realmcount
|
||||||
|
WHERE
|
||||||
|
property = 'realm_active_humans::day'
|
||||||
|
AND end_time = %(realm_active_humans_end_time)s
|
||||||
|
) as _14day_active_humans_table ON realm.id = _14day_active_humans_table.realm_id
|
||||||
|
LEFT OUTER JOIN (
|
||||||
|
SELECT
|
||||||
|
value,
|
||||||
|
realm_id
|
||||||
|
from
|
||||||
|
analytics_realmcount
|
||||||
|
WHERE
|
||||||
|
property = '7day_actives::day'
|
||||||
|
AND end_time = %(seven_day_actives_end_time)s
|
||||||
|
) as wau_table ON realm.id = wau_table.realm_id
|
||||||
|
LEFT OUTER JOIN (
|
||||||
|
SELECT
|
||||||
|
value,
|
||||||
|
realm_id
|
||||||
|
from
|
||||||
|
analytics_realmcount
|
||||||
|
WHERE
|
||||||
|
property = '1day_actives::day'
|
||||||
|
AND end_time = %(one_day_actives_end_time)s
|
||||||
|
) as dau_table ON realm.id = dau_table.realm_id
|
||||||
|
LEFT OUTER JOIN (
|
||||||
|
SELECT
|
||||||
|
value,
|
||||||
|
realm_id
|
||||||
|
from
|
||||||
|
analytics_realmcount
|
||||||
|
WHERE
|
||||||
|
property = 'active_users_audit:is_bot:day'
|
||||||
|
AND subgroup = 'false'
|
||||||
|
AND end_time = %(active_users_audit_end_time)s
|
||||||
|
) as user_count_table ON realm.id = user_count_table.realm_id
|
||||||
|
LEFT OUTER JOIN (
|
||||||
|
SELECT
|
||||||
|
value,
|
||||||
|
realm_id
|
||||||
|
from
|
||||||
|
analytics_realmcount
|
||||||
|
WHERE
|
||||||
|
property = 'active_users_audit:is_bot:day'
|
||||||
|
AND subgroup = 'true'
|
||||||
|
AND end_time = %(active_users_audit_end_time)s
|
||||||
|
) as bot_count_table ON realm.id = bot_count_table.realm_id
|
||||||
|
WHERE
|
||||||
|
_14day_active_humans IS NOT NULL
|
||||||
|
or realm.plan_type = 3
|
||||||
|
ORDER BY
|
||||||
|
dau_count DESC,
|
||||||
|
string_id ASC
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
cursor = connection.cursor()
|
||||||
|
cursor.execute(
|
||||||
|
query,
|
||||||
|
{
|
||||||
|
"realm_active_humans_end_time": COUNT_STATS[
|
||||||
|
"realm_active_humans::day"
|
||||||
|
].last_successful_fill(),
|
||||||
|
"seven_day_actives_end_time": COUNT_STATS["7day_actives::day"].last_successful_fill(),
|
||||||
|
"one_day_actives_end_time": COUNT_STATS["1day_actives::day"].last_successful_fill(),
|
||||||
|
"active_users_audit_end_time": COUNT_STATS[
|
||||||
|
"active_users_audit:is_bot:day"
|
||||||
|
].last_successful_fill(),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
rows = dictfetchall(cursor)
|
||||||
|
cursor.close()
|
||||||
|
|
||||||
|
# Fetch all the realm administrator users
|
||||||
|
realm_owners: Dict[str, List[str]] = defaultdict(list)
|
||||||
|
for up in UserProfile.objects.select_related("realm").filter(
|
||||||
|
role=UserProfile.ROLE_REALM_OWNER,
|
||||||
|
is_active=True,
|
||||||
|
):
|
||||||
|
realm_owners[up.realm.string_id].append(up.delivery_email)
|
||||||
|
|
||||||
|
for row in rows:
|
||||||
|
row["date_created_day"] = row["date_created"].strftime("%Y-%m-%d")
|
||||||
|
row["age_days"] = int((now - row["date_created"]).total_seconds() / 86400)
|
||||||
|
row["is_new"] = row["age_days"] < 12 * 7
|
||||||
|
row["realm_owner_emails"] = ", ".join(realm_owners[row["string_id"]])
|
||||||
|
|
||||||
|
# get messages sent per day
|
||||||
|
counts = get_realm_day_counts()
|
||||||
|
for row in rows:
|
||||||
|
try:
|
||||||
|
row["history"] = counts[row["string_id"]]["cnts"]
|
||||||
|
except Exception:
|
||||||
|
row["history"] = ""
|
||||||
|
|
||||||
|
# estimate annual subscription revenue
|
||||||
|
total_arr = 0
|
||||||
|
if settings.BILLING_ENABLED:
|
||||||
|
estimated_arrs = estimate_annual_recurring_revenue_by_realm()
|
||||||
|
realms_to_default_discount = get_realms_to_default_discount_dict()
|
||||||
|
|
||||||
|
for row in rows:
|
||||||
|
row["plan_type_string"] = get_plan_name(row["plan_type"])
|
||||||
|
|
||||||
|
string_id = row["string_id"]
|
||||||
|
|
||||||
|
if string_id in estimated_arrs:
|
||||||
|
row["arr"] = estimated_arrs[string_id]
|
||||||
|
|
||||||
|
if row["plan_type"] in [Realm.PLAN_TYPE_STANDARD, Realm.PLAN_TYPE_PLUS]:
|
||||||
|
row["effective_rate"] = 100 - int(realms_to_default_discount.get(string_id, 0))
|
||||||
|
elif row["plan_type"] == Realm.PLAN_TYPE_STANDARD_FREE:
|
||||||
|
row["effective_rate"] = 0
|
||||||
|
elif (
|
||||||
|
row["plan_type"] == Realm.PLAN_TYPE_LIMITED
|
||||||
|
and string_id in realms_to_default_discount
|
||||||
|
):
|
||||||
|
row["effective_rate"] = 100 - int(realms_to_default_discount[string_id])
|
||||||
|
else:
|
||||||
|
row["effective_rate"] = ""
|
||||||
|
|
||||||
|
total_arr += sum(estimated_arrs.values())
|
||||||
|
|
||||||
|
for row in rows:
|
||||||
|
row["org_type_string"] = get_org_type_display_name(row["org_type"])
|
||||||
|
|
||||||
|
# augment data with realm_minutes
|
||||||
|
total_hours = 0.0
|
||||||
|
for row in rows:
|
||||||
|
string_id = row["string_id"]
|
||||||
|
minutes = realm_minutes.get(string_id, 0.0)
|
||||||
|
hours = minutes / 60.0
|
||||||
|
total_hours += hours
|
||||||
|
row["hours"] = str(int(hours))
|
||||||
|
try:
|
||||||
|
row["hours_per_user"] = "{:.1f}".format(hours / row["dau_count"])
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# formatting
|
||||||
|
for row in rows:
|
||||||
|
row["stats_link"] = realm_stats_link(row["string_id"])
|
||||||
|
row["string_id"] = realm_activity_link(row["string_id"])
|
||||||
|
|
||||||
|
# Count active sites
|
||||||
|
def meets_goal(row: Dict[str, int]) -> bool:
|
||||||
|
return row["dau_count"] >= 5
|
||||||
|
|
||||||
|
num_active_sites = len(list(filter(meets_goal, rows)))
|
||||||
|
|
||||||
|
# create totals
|
||||||
|
total_dau_count = 0
|
||||||
|
total_user_profile_count = 0
|
||||||
|
total_bot_count = 0
|
||||||
|
total_wau_count = 0
|
||||||
|
for row in rows:
|
||||||
|
total_dau_count += int(row["dau_count"])
|
||||||
|
total_user_profile_count += int(row["user_profile_count"])
|
||||||
|
total_bot_count += int(row["bot_count"])
|
||||||
|
total_wau_count += int(row["wau_count"])
|
||||||
|
|
||||||
|
total_row = dict(
|
||||||
|
string_id="Total",
|
||||||
|
plan_type_string="",
|
||||||
|
org_type_string="",
|
||||||
|
effective_rate="",
|
||||||
|
arr=total_arr,
|
||||||
|
stats_link="",
|
||||||
|
date_created_day="",
|
||||||
|
realm_owner_emails="",
|
||||||
|
dau_count=total_dau_count,
|
||||||
|
user_profile_count=total_user_profile_count,
|
||||||
|
bot_count=total_bot_count,
|
||||||
|
hours=int(total_hours),
|
||||||
|
wau_count=total_wau_count,
|
||||||
|
)
|
||||||
|
|
||||||
|
rows.insert(0, total_row)
|
||||||
|
|
||||||
|
content = loader.render_to_string(
|
||||||
|
"analytics/realm_summary_table.html",
|
||||||
|
dict(
|
||||||
|
rows=rows,
|
||||||
|
num_active_sites=num_active_sites,
|
||||||
|
utctime=now.strftime("%Y-%m-%d %H:%MZ"),
|
||||||
|
billing_enabled=settings.BILLING_ENABLED,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
return content
|
||||||
|
|
||||||
|
|
||||||
|
def user_activity_intervals() -> Tuple[mark_safe, Dict[str, float]]:
|
||||||
|
day_end = timestamp_to_datetime(time.time())
|
||||||
|
day_start = day_end - timedelta(hours=24)
|
||||||
|
|
||||||
|
output = "Per-user online duration for the last 24 hours:\n"
|
||||||
|
total_duration = timedelta(0)
|
||||||
|
|
||||||
|
all_intervals = (
|
||||||
|
UserActivityInterval.objects.filter(
|
||||||
|
end__gte=day_start,
|
||||||
|
start__lte=day_end,
|
||||||
|
)
|
||||||
|
.select_related(
|
||||||
|
"user_profile",
|
||||||
|
"user_profile__realm",
|
||||||
|
)
|
||||||
|
.only(
|
||||||
|
"start",
|
||||||
|
"end",
|
||||||
|
"user_profile__delivery_email",
|
||||||
|
"user_profile__realm__string_id",
|
||||||
|
)
|
||||||
|
.order_by(
|
||||||
|
"user_profile__realm__string_id",
|
||||||
|
"user_profile__delivery_email",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
by_string_id = lambda row: row.user_profile.realm.string_id
|
||||||
|
by_email = lambda row: row.user_profile.delivery_email
|
||||||
|
|
||||||
|
realm_minutes = {}
|
||||||
|
|
||||||
|
for string_id, realm_intervals in itertools.groupby(all_intervals, by_string_id):
|
||||||
|
realm_duration = timedelta(0)
|
||||||
|
output += f"<hr>{string_id}\n"
|
||||||
|
for email, intervals in itertools.groupby(realm_intervals, by_email):
|
||||||
|
duration = timedelta(0)
|
||||||
|
for interval in intervals:
|
||||||
|
start = max(day_start, interval.start)
|
||||||
|
end = min(day_end, interval.end)
|
||||||
|
duration += end - start
|
||||||
|
|
||||||
|
total_duration += duration
|
||||||
|
realm_duration += duration
|
||||||
|
output += f" {email:<37}{duration}\n"
|
||||||
|
|
||||||
|
realm_minutes[string_id] = realm_duration.total_seconds() / 60
|
||||||
|
|
||||||
|
output += f"\nTotal duration: {total_duration}\n"
|
||||||
|
output += f"\nTotal duration in minutes: {total_duration.total_seconds() / 60.}\n"
|
||||||
|
output += f"Total duration amortized to a month: {total_duration.total_seconds() * 30. / 60.}"
|
||||||
|
content = mark_safe("<pre>" + output + "</pre>")
|
||||||
|
return content, realm_minutes
|
||||||
|
|
||||||
|
|
||||||
|
def ad_hoc_queries() -> List[Dict[str, str]]:
|
||||||
|
def get_page(
|
||||||
|
query: Composable, cols: Sequence[str], title: str, totals_columns: Sequence[int] = []
|
||||||
|
) -> Dict[str, str]:
|
||||||
|
cursor = connection.cursor()
|
||||||
|
cursor.execute(query)
|
||||||
|
rows = cursor.fetchall()
|
||||||
|
rows = list(map(list, rows))
|
||||||
|
cursor.close()
|
||||||
|
|
||||||
|
def fix_rows(
|
||||||
|
i: int, fixup_func: Union[Callable[[str], mark_safe], Callable[[datetime], str]]
|
||||||
|
) -> None:
|
||||||
|
for row in rows:
|
||||||
|
row[i] = fixup_func(row[i])
|
||||||
|
|
||||||
|
total_row = []
|
||||||
|
for i, col in enumerate(cols):
|
||||||
|
if col == "Realm":
|
||||||
|
fix_rows(i, realm_activity_link)
|
||||||
|
elif col in ["Last time", "Last visit"]:
|
||||||
|
fix_rows(i, format_date_for_activity_reports)
|
||||||
|
elif col == "Hostname":
|
||||||
|
for row in rows:
|
||||||
|
row[i] = remote_installation_stats_link(row[0], row[i])
|
||||||
|
if len(totals_columns) > 0:
|
||||||
|
if i == 0:
|
||||||
|
total_row.append("Total")
|
||||||
|
elif i in totals_columns:
|
||||||
|
total_row.append(str(sum(row[i] for row in rows if row[i] is not None)))
|
||||||
|
else:
|
||||||
|
total_row.append("")
|
||||||
|
if len(totals_columns) > 0:
|
||||||
|
rows.insert(0, total_row)
|
||||||
|
|
||||||
|
content = make_table(title, cols, rows)
|
||||||
|
|
||||||
|
return dict(
|
||||||
|
content=content,
|
||||||
|
title=title,
|
||||||
|
)
|
||||||
|
|
||||||
|
pages = []
|
||||||
|
|
||||||
|
###
|
||||||
|
|
||||||
|
for mobile_type in ["Android", "ZulipiOS"]:
|
||||||
|
title = f"{mobile_type} usage"
|
||||||
|
|
||||||
|
query = SQL(
|
||||||
|
"""
|
||||||
|
select
|
||||||
|
realm.string_id,
|
||||||
|
up.id user_id,
|
||||||
|
client.name,
|
||||||
|
sum(count) as hits,
|
||||||
|
max(last_visit) as last_time
|
||||||
|
from zerver_useractivity ua
|
||||||
|
join zerver_client client on client.id = ua.client_id
|
||||||
|
join zerver_userprofile up on up.id = ua.user_profile_id
|
||||||
|
join zerver_realm realm on realm.id = up.realm_id
|
||||||
|
where
|
||||||
|
client.name like {mobile_type}
|
||||||
|
group by string_id, up.id, client.name
|
||||||
|
having max(last_visit) > now() - interval '2 week'
|
||||||
|
order by string_id, up.id, client.name
|
||||||
|
"""
|
||||||
|
).format(
|
||||||
|
mobile_type=Literal(mobile_type),
|
||||||
|
)
|
||||||
|
|
||||||
|
cols = [
|
||||||
|
"Realm",
|
||||||
|
"User id",
|
||||||
|
"Name",
|
||||||
|
"Hits",
|
||||||
|
"Last time",
|
||||||
|
]
|
||||||
|
|
||||||
|
pages.append(get_page(query, cols, title))
|
||||||
|
|
||||||
|
###
|
||||||
|
|
||||||
|
title = "Desktop users"
|
||||||
|
|
||||||
|
query = SQL(
|
||||||
|
"""
|
||||||
|
select
|
||||||
|
realm.string_id,
|
||||||
|
client.name,
|
||||||
|
sum(count) as hits,
|
||||||
|
max(last_visit) as last_time
|
||||||
|
from zerver_useractivity ua
|
||||||
|
join zerver_client client on client.id = ua.client_id
|
||||||
|
join zerver_userprofile up on up.id = ua.user_profile_id
|
||||||
|
join zerver_realm realm on realm.id = up.realm_id
|
||||||
|
where
|
||||||
|
client.name like 'desktop%%'
|
||||||
|
group by string_id, client.name
|
||||||
|
having max(last_visit) > now() - interval '2 week'
|
||||||
|
order by string_id, client.name
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
cols = [
|
||||||
|
"Realm",
|
||||||
|
"Client",
|
||||||
|
"Hits",
|
||||||
|
"Last time",
|
||||||
|
]
|
||||||
|
|
||||||
|
pages.append(get_page(query, cols, title))
|
||||||
|
|
||||||
|
###
|
||||||
|
|
||||||
|
title = "Integrations by realm"
|
||||||
|
|
||||||
|
query = SQL(
|
||||||
|
"""
|
||||||
|
select
|
||||||
|
realm.string_id,
|
||||||
|
case
|
||||||
|
when query like '%%external%%' then split_part(query, '/', 5)
|
||||||
|
else client.name
|
||||||
|
end client_name,
|
||||||
|
sum(count) as hits,
|
||||||
|
max(last_visit) as last_time
|
||||||
|
from zerver_useractivity ua
|
||||||
|
join zerver_client client on client.id = ua.client_id
|
||||||
|
join zerver_userprofile up on up.id = ua.user_profile_id
|
||||||
|
join zerver_realm realm on realm.id = up.realm_id
|
||||||
|
where
|
||||||
|
(query in ('send_message_backend', '/api/v1/send_message')
|
||||||
|
and client.name not in ('Android', 'ZulipiOS')
|
||||||
|
and client.name not like 'test: Zulip%%'
|
||||||
|
)
|
||||||
|
or
|
||||||
|
query like '%%external%%'
|
||||||
|
group by string_id, client_name
|
||||||
|
having max(last_visit) > now() - interval '2 week'
|
||||||
|
order by string_id, client_name
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
cols = [
|
||||||
|
"Realm",
|
||||||
|
"Client",
|
||||||
|
"Hits",
|
||||||
|
"Last time",
|
||||||
|
]
|
||||||
|
|
||||||
|
pages.append(get_page(query, cols, title))
|
||||||
|
|
||||||
|
###
|
||||||
|
|
||||||
|
title = "Integrations by client"
|
||||||
|
|
||||||
|
query = SQL(
|
||||||
|
"""
|
||||||
|
select
|
||||||
|
case
|
||||||
|
when query like '%%external%%' then split_part(query, '/', 5)
|
||||||
|
else client.name
|
||||||
|
end client_name,
|
||||||
|
realm.string_id,
|
||||||
|
sum(count) as hits,
|
||||||
|
max(last_visit) as last_time
|
||||||
|
from zerver_useractivity ua
|
||||||
|
join zerver_client client on client.id = ua.client_id
|
||||||
|
join zerver_userprofile up on up.id = ua.user_profile_id
|
||||||
|
join zerver_realm realm on realm.id = up.realm_id
|
||||||
|
where
|
||||||
|
(query in ('send_message_backend', '/api/v1/send_message')
|
||||||
|
and client.name not in ('Android', 'ZulipiOS')
|
||||||
|
and client.name not like 'test: Zulip%%'
|
||||||
|
)
|
||||||
|
or
|
||||||
|
query like '%%external%%'
|
||||||
|
group by client_name, string_id
|
||||||
|
having max(last_visit) > now() - interval '2 week'
|
||||||
|
order by client_name, string_id
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
cols = [
|
||||||
|
"Client",
|
||||||
|
"Realm",
|
||||||
|
"Hits",
|
||||||
|
"Last time",
|
||||||
|
]
|
||||||
|
|
||||||
|
pages.append(get_page(query, cols, title))
|
||||||
|
|
||||||
|
title = "Remote Zulip servers"
|
||||||
|
|
||||||
|
query = SQL(
|
||||||
|
"""
|
||||||
|
with icount as (
|
||||||
|
select
|
||||||
|
server_id,
|
||||||
|
max(value) as max_value,
|
||||||
|
max(end_time) as max_end_time
|
||||||
|
from zilencer_remoteinstallationcount
|
||||||
|
where
|
||||||
|
property='active_users:is_bot:day'
|
||||||
|
and subgroup='false'
|
||||||
|
group by server_id
|
||||||
|
),
|
||||||
|
remote_push_devices as (
|
||||||
|
select server_id, count(distinct(user_id)) as push_user_count from zilencer_remotepushdevicetoken
|
||||||
|
group by server_id
|
||||||
|
)
|
||||||
|
select
|
||||||
|
rserver.id,
|
||||||
|
rserver.hostname,
|
||||||
|
rserver.contact_email,
|
||||||
|
max_value,
|
||||||
|
push_user_count,
|
||||||
|
max_end_time
|
||||||
|
from zilencer_remotezulipserver rserver
|
||||||
|
left join icount on icount.server_id = rserver.id
|
||||||
|
left join remote_push_devices on remote_push_devices.server_id = rserver.id
|
||||||
|
order by max_value DESC NULLS LAST, push_user_count DESC NULLS LAST
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
cols = [
|
||||||
|
"ID",
|
||||||
|
"Hostname",
|
||||||
|
"Contact email",
|
||||||
|
"Analytics users",
|
||||||
|
"Mobile users",
|
||||||
|
"Last update time",
|
||||||
|
]
|
||||||
|
|
||||||
|
pages.append(get_page(query, cols, title, totals_columns=[3, 4]))
|
||||||
|
|
||||||
|
return pages
|
||||||
|
|
||||||
|
|
||||||
|
@require_server_admin
|
||||||
|
@has_request_variables
|
||||||
|
def get_installation_activity(request: HttpRequest) -> HttpResponse:
|
||||||
|
duration_content, realm_minutes = user_activity_intervals()
|
||||||
|
counts_content: str = realm_summary_table(realm_minutes)
|
||||||
|
data = [
|
||||||
|
("Counts", counts_content),
|
||||||
|
("Durations", duration_content),
|
||||||
|
]
|
||||||
|
for page in ad_hoc_queries():
|
||||||
|
data.append((page["title"], page["content"]))
|
||||||
|
|
||||||
|
title = "Activity"
|
||||||
|
|
||||||
|
return render(
|
||||||
|
request,
|
||||||
|
"analytics/activity.html",
|
||||||
|
context=dict(data=data, title=title, is_home=True),
|
||||||
|
)
|
||||||
259
analytics/views/realm_activity.py
Normal file
259
analytics/views/realm_activity.py
Normal file
@@ -0,0 +1,259 @@
|
|||||||
|
import itertools
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Any, Dict, List, Optional, Set, Tuple
|
||||||
|
|
||||||
|
from django.db import connection
|
||||||
|
from django.db.models.query import QuerySet
|
||||||
|
from django.http import HttpRequest, HttpResponse, HttpResponseNotFound
|
||||||
|
from django.shortcuts import render
|
||||||
|
from django.utils.timezone import now as timezone_now
|
||||||
|
from psycopg2.sql import SQL
|
||||||
|
|
||||||
|
from analytics.views.activity_common import (
|
||||||
|
format_date_for_activity_reports,
|
||||||
|
get_user_activity_summary,
|
||||||
|
make_table,
|
||||||
|
user_activity_link,
|
||||||
|
)
|
||||||
|
from zerver.decorator import require_server_admin
|
||||||
|
from zerver.models import Realm, UserActivity
|
||||||
|
|
||||||
|
|
||||||
|
def get_user_activity_records_for_realm(realm: str, is_bot: bool) -> QuerySet:
|
||||||
|
fields = [
|
||||||
|
"user_profile__full_name",
|
||||||
|
"user_profile__delivery_email",
|
||||||
|
"query",
|
||||||
|
"client__name",
|
||||||
|
"count",
|
||||||
|
"last_visit",
|
||||||
|
]
|
||||||
|
|
||||||
|
records = UserActivity.objects.filter(
|
||||||
|
user_profile__realm__string_id=realm,
|
||||||
|
user_profile__is_active=True,
|
||||||
|
user_profile__is_bot=is_bot,
|
||||||
|
)
|
||||||
|
records = records.order_by("user_profile__delivery_email", "-last_visit")
|
||||||
|
records = records.select_related("user_profile", "client").only(*fields)
|
||||||
|
return records
|
||||||
|
|
||||||
|
|
||||||
|
def realm_user_summary_table(
|
||||||
|
all_records: List[QuerySet], admin_emails: Set[str]
|
||||||
|
) -> Tuple[Dict[str, Any], str]:
|
||||||
|
user_records = {}
|
||||||
|
|
||||||
|
def by_email(record: QuerySet) -> str:
|
||||||
|
return record.user_profile.delivery_email
|
||||||
|
|
||||||
|
for email, records in itertools.groupby(all_records, by_email):
|
||||||
|
user_records[email] = get_user_activity_summary(list(records))
|
||||||
|
|
||||||
|
def get_last_visit(user_summary: Dict[str, Dict[str, datetime]], k: str) -> Optional[datetime]:
|
||||||
|
if k in user_summary:
|
||||||
|
return user_summary[k]["last_visit"]
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_count(user_summary: Dict[str, Dict[str, str]], k: str) -> str:
|
||||||
|
if k in user_summary:
|
||||||
|
return user_summary[k]["count"]
|
||||||
|
else:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def is_recent(val: datetime) -> bool:
|
||||||
|
age = timezone_now() - val
|
||||||
|
return age.total_seconds() < 5 * 60
|
||||||
|
|
||||||
|
rows = []
|
||||||
|
for email, user_summary in user_records.items():
|
||||||
|
email_link = user_activity_link(email, user_summary["user_profile_id"])
|
||||||
|
sent_count = get_count(user_summary, "send")
|
||||||
|
cells = [user_summary["name"], email_link, sent_count]
|
||||||
|
row_class = ""
|
||||||
|
for field in ["use", "send", "pointer", "desktop", "ZulipiOS", "Android"]:
|
||||||
|
visit = get_last_visit(user_summary, field)
|
||||||
|
if field == "use":
|
||||||
|
if visit and is_recent(visit):
|
||||||
|
row_class += " recently_active"
|
||||||
|
if email in admin_emails:
|
||||||
|
row_class += " admin"
|
||||||
|
val = format_date_for_activity_reports(visit)
|
||||||
|
cells.append(val)
|
||||||
|
row = dict(cells=cells, row_class=row_class)
|
||||||
|
rows.append(row)
|
||||||
|
|
||||||
|
def by_used_time(row: Dict[str, Any]) -> str:
|
||||||
|
return row["cells"][3]
|
||||||
|
|
||||||
|
rows = sorted(rows, key=by_used_time, reverse=True)
|
||||||
|
|
||||||
|
cols = [
|
||||||
|
"Name",
|
||||||
|
"Email",
|
||||||
|
"Total sent",
|
||||||
|
"Heard from",
|
||||||
|
"Message sent",
|
||||||
|
"Pointer motion",
|
||||||
|
"Desktop",
|
||||||
|
"ZulipiOS",
|
||||||
|
"Android",
|
||||||
|
]
|
||||||
|
|
||||||
|
title = "Summary"
|
||||||
|
|
||||||
|
content = make_table(title, cols, rows, has_row_class=True)
|
||||||
|
return user_records, content
|
||||||
|
|
||||||
|
|
||||||
|
def realm_client_table(user_summaries: Dict[str, Dict[str, Any]]) -> str:
|
||||||
|
exclude_keys = [
|
||||||
|
"internal",
|
||||||
|
"name",
|
||||||
|
"user_profile_id",
|
||||||
|
"use",
|
||||||
|
"send",
|
||||||
|
"pointer",
|
||||||
|
"website",
|
||||||
|
"desktop",
|
||||||
|
]
|
||||||
|
|
||||||
|
rows = []
|
||||||
|
for email, user_summary in user_summaries.items():
|
||||||
|
email_link = user_activity_link(email, user_summary["user_profile_id"])
|
||||||
|
name = user_summary["name"]
|
||||||
|
for k, v in user_summary.items():
|
||||||
|
if k in exclude_keys:
|
||||||
|
continue
|
||||||
|
client = k
|
||||||
|
count = v["count"]
|
||||||
|
last_visit = v["last_visit"]
|
||||||
|
row = [
|
||||||
|
format_date_for_activity_reports(last_visit),
|
||||||
|
client,
|
||||||
|
name,
|
||||||
|
email_link,
|
||||||
|
count,
|
||||||
|
]
|
||||||
|
rows.append(row)
|
||||||
|
|
||||||
|
rows = sorted(rows, key=lambda r: r[0], reverse=True)
|
||||||
|
|
||||||
|
cols = [
|
||||||
|
"Last visit",
|
||||||
|
"Client",
|
||||||
|
"Name",
|
||||||
|
"Email",
|
||||||
|
"Count",
|
||||||
|
]
|
||||||
|
|
||||||
|
title = "Clients"
|
||||||
|
|
||||||
|
return make_table(title, cols, rows)
|
||||||
|
|
||||||
|
|
||||||
|
def sent_messages_report(realm: str) -> str:
|
||||||
|
title = "Recently sent messages for " + realm
|
||||||
|
|
||||||
|
cols = [
|
||||||
|
"Date",
|
||||||
|
"Humans",
|
||||||
|
"Bots",
|
||||||
|
]
|
||||||
|
|
||||||
|
query = SQL(
|
||||||
|
"""
|
||||||
|
select
|
||||||
|
series.day::date,
|
||||||
|
humans.cnt,
|
||||||
|
bots.cnt
|
||||||
|
from (
|
||||||
|
select generate_series(
|
||||||
|
(now()::date - interval '2 week'),
|
||||||
|
now()::date,
|
||||||
|
interval '1 day'
|
||||||
|
) as day
|
||||||
|
) as series
|
||||||
|
left join (
|
||||||
|
select
|
||||||
|
date_sent::date date_sent,
|
||||||
|
count(*) cnt
|
||||||
|
from zerver_message m
|
||||||
|
join zerver_userprofile up on up.id = m.sender_id
|
||||||
|
join zerver_realm r on r.id = up.realm_id
|
||||||
|
where
|
||||||
|
r.string_id = %s
|
||||||
|
and
|
||||||
|
(not up.is_bot)
|
||||||
|
and
|
||||||
|
date_sent > now() - interval '2 week'
|
||||||
|
group by
|
||||||
|
date_sent::date
|
||||||
|
order by
|
||||||
|
date_sent::date
|
||||||
|
) humans on
|
||||||
|
series.day = humans.date_sent
|
||||||
|
left join (
|
||||||
|
select
|
||||||
|
date_sent::date date_sent,
|
||||||
|
count(*) cnt
|
||||||
|
from zerver_message m
|
||||||
|
join zerver_userprofile up on up.id = m.sender_id
|
||||||
|
join zerver_realm r on r.id = up.realm_id
|
||||||
|
where
|
||||||
|
r.string_id = %s
|
||||||
|
and
|
||||||
|
up.is_bot
|
||||||
|
and
|
||||||
|
date_sent > now() - interval '2 week'
|
||||||
|
group by
|
||||||
|
date_sent::date
|
||||||
|
order by
|
||||||
|
date_sent::date
|
||||||
|
) bots on
|
||||||
|
series.day = bots.date_sent
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
cursor = connection.cursor()
|
||||||
|
cursor.execute(query, [realm, realm])
|
||||||
|
rows = cursor.fetchall()
|
||||||
|
cursor.close()
|
||||||
|
|
||||||
|
return make_table(title, cols, rows)
|
||||||
|
|
||||||
|
|
||||||
|
@require_server_admin
|
||||||
|
def get_realm_activity(request: HttpRequest, realm_str: str) -> HttpResponse:
|
||||||
|
data: List[Tuple[str, str]] = []
|
||||||
|
all_user_records: Dict[str, Any] = {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
admins = Realm.objects.get(string_id=realm_str).get_human_admin_users()
|
||||||
|
except Realm.DoesNotExist:
|
||||||
|
return HttpResponseNotFound()
|
||||||
|
|
||||||
|
admin_emails = {admin.delivery_email for admin in admins}
|
||||||
|
|
||||||
|
for is_bot, page_title in [(False, "Humans"), (True, "Bots")]:
|
||||||
|
all_records = list(get_user_activity_records_for_realm(realm_str, is_bot))
|
||||||
|
|
||||||
|
user_records, content = realm_user_summary_table(all_records, admin_emails)
|
||||||
|
all_user_records.update(user_records)
|
||||||
|
|
||||||
|
data += [(page_title, content)]
|
||||||
|
|
||||||
|
page_title = "Clients"
|
||||||
|
content = realm_client_table(all_user_records)
|
||||||
|
data += [(page_title, content)]
|
||||||
|
|
||||||
|
page_title = "History"
|
||||||
|
content = sent_messages_report(realm_str)
|
||||||
|
data += [(page_title, content)]
|
||||||
|
|
||||||
|
title = realm_str
|
||||||
|
return render(
|
||||||
|
request,
|
||||||
|
"analytics/activity.html",
|
||||||
|
context=dict(data=data, realm_link=None, title=title),
|
||||||
|
)
|
||||||
@@ -1,10 +1,10 @@
|
|||||||
import logging
|
import logging
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from datetime import datetime, timedelta, timezone
|
from datetime import datetime, timedelta, timezone
|
||||||
from typing import Any, Optional, TypeAlias, TypeVar, cast
|
from typing import Any, Dict, List, Optional, Tuple, Type, Union, cast
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.db.models import QuerySet
|
from django.db.models.query import QuerySet
|
||||||
from django.http import HttpRequest, HttpResponse, HttpResponseNotFound
|
from django.http import HttpRequest, HttpResponse, HttpResponseNotFound
|
||||||
from django.shortcuts import render
|
from django.shortcuts import render
|
||||||
from django.utils import translation
|
from django.utils import translation
|
||||||
@@ -32,11 +32,9 @@ from zerver.lib.exceptions import JsonableError
|
|||||||
from zerver.lib.i18n import get_and_set_request_language, get_language_translation_data
|
from zerver.lib.i18n import get_and_set_request_language, get_language_translation_data
|
||||||
from zerver.lib.request import REQ, has_request_variables
|
from zerver.lib.request import REQ, has_request_variables
|
||||||
from zerver.lib.response import json_success
|
from zerver.lib.response import json_success
|
||||||
from zerver.lib.streams import access_stream_by_id
|
|
||||||
from zerver.lib.timestamp import convert_to_UTC
|
from zerver.lib.timestamp import convert_to_UTC
|
||||||
from zerver.lib.validator import to_non_negative_int
|
from zerver.lib.validator import to_non_negative_int
|
||||||
from zerver.models import Client, Realm, Stream, UserProfile
|
from zerver.models import Client, Realm, UserProfile, get_realm
|
||||||
from zerver.models.realms import get_realm
|
|
||||||
|
|
||||||
if settings.ZILENCER_ENABLED:
|
if settings.ZILENCER_ENABLED:
|
||||||
from zilencer.models import RemoteInstallationCount, RemoteRealmCount, RemoteZulipServer
|
from zilencer.models import RemoteInstallationCount, RemoteRealmCount, RemoteZulipServer
|
||||||
@@ -51,27 +49,17 @@ def is_analytics_ready(realm: Realm) -> bool:
|
|||||||
def render_stats(
|
def render_stats(
|
||||||
request: HttpRequest,
|
request: HttpRequest,
|
||||||
data_url_suffix: str,
|
data_url_suffix: str,
|
||||||
realm: Realm | None,
|
target_name: str,
|
||||||
*,
|
for_installation: bool = False,
|
||||||
title: str | None = None,
|
remote: bool = False,
|
||||||
analytics_ready: bool = True,
|
analytics_ready: bool = True,
|
||||||
) -> HttpResponse:
|
) -> HttpResponse:
|
||||||
assert request.user.is_authenticated
|
assert request.user.is_authenticated
|
||||||
|
page_params = dict(
|
||||||
if realm is not None:
|
data_url_suffix=data_url_suffix,
|
||||||
# Same query to get guest user count as in get_seat_count in corporate/lib/stripe.py.
|
for_installation=for_installation,
|
||||||
guest_users = UserProfile.objects.filter(
|
remote=remote,
|
||||||
realm=realm, is_active=True, is_bot=False, role=UserProfile.ROLE_GUEST
|
)
|
||||||
).count()
|
|
||||||
space_used = realm.currently_used_upload_space_bytes()
|
|
||||||
if title:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
title = realm.name or realm.string_id
|
|
||||||
else:
|
|
||||||
assert title
|
|
||||||
guest_users = None
|
|
||||||
space_used = None
|
|
||||||
|
|
||||||
request_language = get_and_set_request_language(
|
request_language = get_and_set_request_language(
|
||||||
request,
|
request,
|
||||||
@@ -79,22 +67,13 @@ def render_stats(
|
|||||||
translation.get_language_from_path(request.path_info),
|
translation.get_language_from_path(request.path_info),
|
||||||
)
|
)
|
||||||
|
|
||||||
# Sync this with stats_params_schema in base_page_params.ts.
|
page_params["translation_data"] = get_language_translation_data(request_language)
|
||||||
page_params = dict(
|
|
||||||
page_type="stats",
|
|
||||||
data_url_suffix=data_url_suffix,
|
|
||||||
upload_space_used=space_used,
|
|
||||||
guest_users=guest_users,
|
|
||||||
translation_data=get_language_translation_data(request_language),
|
|
||||||
)
|
|
||||||
|
|
||||||
return render(
|
return render(
|
||||||
request,
|
request,
|
||||||
"analytics/stats.html",
|
"analytics/stats.html",
|
||||||
context=dict(
|
context=dict(
|
||||||
target_name=title,
|
target_name=target_name, page_params=page_params, analytics_ready=analytics_ready
|
||||||
page_params=page_params,
|
|
||||||
analytics_ready=analytics_ready,
|
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -107,7 +86,9 @@ def stats(request: HttpRequest) -> HttpResponse:
|
|||||||
# TODO: Make @zulip_login_required pass the UserProfile so we
|
# TODO: Make @zulip_login_required pass the UserProfile so we
|
||||||
# can use @require_member_or_admin
|
# can use @require_member_or_admin
|
||||||
raise JsonableError(_("Not allowed for guest users"))
|
raise JsonableError(_("Not allowed for guest users"))
|
||||||
return render_stats(request, "", realm, analytics_ready=is_analytics_ready(realm))
|
return render_stats(
|
||||||
|
request, "", realm.name or realm.string_id, analytics_ready=is_analytics_ready(realm)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@require_server_admin
|
@require_server_admin
|
||||||
@@ -121,7 +102,7 @@ def stats_for_realm(request: HttpRequest, realm_str: str) -> HttpResponse:
|
|||||||
return render_stats(
|
return render_stats(
|
||||||
request,
|
request,
|
||||||
f"/realm/{realm_str}",
|
f"/realm/{realm_str}",
|
||||||
realm,
|
realm.name or realm.string_id,
|
||||||
analytics_ready=is_analytics_ready(realm),
|
analytics_ready=is_analytics_ready(realm),
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -136,44 +117,27 @@ def stats_for_remote_realm(
|
|||||||
return render_stats(
|
return render_stats(
|
||||||
request,
|
request,
|
||||||
f"/remote/{server.id}/realm/{remote_realm_id}",
|
f"/remote/{server.id}/realm/{remote_realm_id}",
|
||||||
None,
|
f"Realm {remote_realm_id} on server {server.hostname}",
|
||||||
title=f"Realm {remote_realm_id} on server {server.hostname}",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@require_server_admin_api
|
@require_server_admin_api
|
||||||
@has_request_variables
|
@has_request_variables
|
||||||
def get_chart_data_for_realm(
|
def get_chart_data_for_realm(
|
||||||
request: HttpRequest, /, user_profile: UserProfile, realm_str: str, **kwargs: Any
|
request: HttpRequest, user_profile: UserProfile, realm_str: str, **kwargs: Any
|
||||||
) -> HttpResponse:
|
) -> HttpResponse:
|
||||||
try:
|
try:
|
||||||
realm = get_realm(realm_str)
|
realm = get_realm(realm_str)
|
||||||
except Realm.DoesNotExist:
|
except Realm.DoesNotExist:
|
||||||
raise JsonableError(_("Invalid organization"))
|
raise JsonableError(_("Invalid organization"))
|
||||||
|
|
||||||
return get_chart_data(request, user_profile, realm=realm, **kwargs)
|
return get_chart_data(request=request, user_profile=user_profile, realm=realm, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
@require_non_guest_user
|
|
||||||
@has_request_variables
|
|
||||||
def get_chart_data_for_stream(
|
|
||||||
request: HttpRequest, /, user_profile: UserProfile, stream_id: int
|
|
||||||
) -> HttpResponse:
|
|
||||||
stream, ignored_sub = access_stream_by_id(
|
|
||||||
user_profile,
|
|
||||||
stream_id,
|
|
||||||
require_active=True,
|
|
||||||
allow_realm_admin=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
return get_chart_data(request, user_profile, stream=stream)
|
|
||||||
|
|
||||||
|
|
||||||
@require_server_admin_api
|
@require_server_admin_api
|
||||||
@has_request_variables
|
@has_request_variables
|
||||||
def get_chart_data_for_remote_realm(
|
def get_chart_data_for_remote_realm(
|
||||||
request: HttpRequest,
|
request: HttpRequest,
|
||||||
/,
|
|
||||||
user_profile: UserProfile,
|
user_profile: UserProfile,
|
||||||
remote_server_id: int,
|
remote_server_id: int,
|
||||||
remote_realm_id: int,
|
remote_realm_id: int,
|
||||||
@@ -182,8 +146,8 @@ def get_chart_data_for_remote_realm(
|
|||||||
assert settings.ZILENCER_ENABLED
|
assert settings.ZILENCER_ENABLED
|
||||||
server = RemoteZulipServer.objects.get(id=remote_server_id)
|
server = RemoteZulipServer.objects.get(id=remote_server_id)
|
||||||
return get_chart_data(
|
return get_chart_data(
|
||||||
request,
|
request=request,
|
||||||
user_profile,
|
user_profile=user_profile,
|
||||||
server=server,
|
server=server,
|
||||||
remote=True,
|
remote=True,
|
||||||
remote_realm_id=int(remote_realm_id),
|
remote_realm_id=int(remote_realm_id),
|
||||||
@@ -193,8 +157,7 @@ def get_chart_data_for_remote_realm(
|
|||||||
|
|
||||||
@require_server_admin
|
@require_server_admin
|
||||||
def stats_for_installation(request: HttpRequest) -> HttpResponse:
|
def stats_for_installation(request: HttpRequest) -> HttpResponse:
|
||||||
assert request.user.is_authenticated
|
return render_stats(request, "/installation", "installation", True)
|
||||||
return render_stats(request, "/installation", None, title="installation")
|
|
||||||
|
|
||||||
|
|
||||||
@require_server_admin
|
@require_server_admin
|
||||||
@@ -204,24 +167,26 @@ def stats_for_remote_installation(request: HttpRequest, remote_server_id: int) -
|
|||||||
return render_stats(
|
return render_stats(
|
||||||
request,
|
request,
|
||||||
f"/remote/{server.id}/installation",
|
f"/remote/{server.id}/installation",
|
||||||
None,
|
f"remote installation {server.hostname}",
|
||||||
title=f"remote installation {server.hostname}",
|
True,
|
||||||
|
True,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@require_server_admin_api
|
@require_server_admin_api
|
||||||
@has_request_variables
|
@has_request_variables
|
||||||
def get_chart_data_for_installation(
|
def get_chart_data_for_installation(
|
||||||
request: HttpRequest, /, user_profile: UserProfile, chart_name: str = REQ(), **kwargs: Any
|
request: HttpRequest, user_profile: UserProfile, chart_name: str = REQ(), **kwargs: Any
|
||||||
) -> HttpResponse:
|
) -> HttpResponse:
|
||||||
return get_chart_data(request, user_profile, for_installation=True, **kwargs)
|
return get_chart_data(
|
||||||
|
request=request, user_profile=user_profile, for_installation=True, **kwargs
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@require_server_admin_api
|
@require_server_admin_api
|
||||||
@has_request_variables
|
@has_request_variables
|
||||||
def get_chart_data_for_remote_installation(
|
def get_chart_data_for_remote_installation(
|
||||||
request: HttpRequest,
|
request: HttpRequest,
|
||||||
/,
|
|
||||||
user_profile: UserProfile,
|
user_profile: UserProfile,
|
||||||
remote_server_id: int,
|
remote_server_id: int,
|
||||||
chart_name: str = REQ(),
|
chart_name: str = REQ(),
|
||||||
@@ -230,8 +195,8 @@ def get_chart_data_for_remote_installation(
|
|||||||
assert settings.ZILENCER_ENABLED
|
assert settings.ZILENCER_ENABLED
|
||||||
server = RemoteZulipServer.objects.get(id=remote_server_id)
|
server = RemoteZulipServer.objects.get(id=remote_server_id)
|
||||||
return get_chart_data(
|
return get_chart_data(
|
||||||
request,
|
request=request,
|
||||||
user_profile,
|
user_profile=user_profile,
|
||||||
for_installation=True,
|
for_installation=True,
|
||||||
remote=True,
|
remote=True,
|
||||||
server=server,
|
server=server,
|
||||||
@@ -245,25 +210,21 @@ def get_chart_data(
|
|||||||
request: HttpRequest,
|
request: HttpRequest,
|
||||||
user_profile: UserProfile,
|
user_profile: UserProfile,
|
||||||
chart_name: str = REQ(),
|
chart_name: str = REQ(),
|
||||||
min_length: int | None = REQ(converter=to_non_negative_int, default=None),
|
min_length: Optional[int] = REQ(converter=to_non_negative_int, default=None),
|
||||||
start: datetime | None = REQ(converter=to_utc_datetime, default=None),
|
start: Optional[datetime] = REQ(converter=to_utc_datetime, default=None),
|
||||||
end: datetime | None = REQ(converter=to_utc_datetime, default=None),
|
end: Optional[datetime] = REQ(converter=to_utc_datetime, default=None),
|
||||||
# These last several parameters are only used by functions
|
realm: Optional[Realm] = None,
|
||||||
# wrapping get_chart_data; the callers are responsible for
|
|
||||||
# parsing/validation/authorization for them.
|
|
||||||
realm: Realm | None = None,
|
|
||||||
for_installation: bool = False,
|
for_installation: bool = False,
|
||||||
remote: bool = False,
|
remote: bool = False,
|
||||||
remote_realm_id: int | None = None,
|
remote_realm_id: Optional[int] = None,
|
||||||
server: Optional["RemoteZulipServer"] = None,
|
server: Optional["RemoteZulipServer"] = None,
|
||||||
stream: Stream | None = None,
|
|
||||||
) -> HttpResponse:
|
) -> HttpResponse:
|
||||||
TableType: TypeAlias = (
|
TableType = Union[
|
||||||
type["RemoteInstallationCount"]
|
Type["RemoteInstallationCount"],
|
||||||
| type[InstallationCount]
|
Type[InstallationCount],
|
||||||
| type["RemoteRealmCount"]
|
Type["RemoteRealmCount"],
|
||||||
| type[RealmCount]
|
Type[RealmCount],
|
||||||
)
|
]
|
||||||
if for_installation:
|
if for_installation:
|
||||||
if remote:
|
if remote:
|
||||||
assert settings.ZILENCER_ENABLED
|
assert settings.ZILENCER_ENABLED
|
||||||
@@ -280,9 +241,7 @@ def get_chart_data(
|
|||||||
else:
|
else:
|
||||||
aggregate_table = RealmCount
|
aggregate_table = RealmCount
|
||||||
|
|
||||||
tables: (
|
tables: Union[Tuple[TableType], Tuple[TableType, Type[UserCount]]]
|
||||||
tuple[TableType] | tuple[TableType, type[UserCount]] | tuple[TableType, type[StreamCount]]
|
|
||||||
)
|
|
||||||
|
|
||||||
if chart_name == "number_of_humans":
|
if chart_name == "number_of_humans":
|
||||||
stats = [
|
stats = [
|
||||||
@@ -291,7 +250,7 @@ def get_chart_data(
|
|||||||
COUNT_STATS["active_users_audit:is_bot:day"],
|
COUNT_STATS["active_users_audit:is_bot:day"],
|
||||||
]
|
]
|
||||||
tables = (aggregate_table,)
|
tables = (aggregate_table,)
|
||||||
subgroup_to_label: dict[CountStat, dict[str | None, str]] = {
|
subgroup_to_label: Dict[CountStat, Dict[Optional[str], str]] = {
|
||||||
stats[0]: {None: "_1day"},
|
stats[0]: {None: "_1day"},
|
||||||
stats[1]: {None: "_15day"},
|
stats[1]: {None: "_15day"},
|
||||||
stats[2]: {"false": "all_time"},
|
stats[2]: {"false": "all_time"},
|
||||||
@@ -309,10 +268,10 @@ def get_chart_data(
|
|||||||
tables = (aggregate_table, UserCount)
|
tables = (aggregate_table, UserCount)
|
||||||
subgroup_to_label = {
|
subgroup_to_label = {
|
||||||
stats[0]: {
|
stats[0]: {
|
||||||
"public_stream": _("Public channels"),
|
"public_stream": _("Public streams"),
|
||||||
"private_stream": _("Private channels"),
|
"private_stream": _("Private streams"),
|
||||||
"private_message": _("Direct messages"),
|
"private_message": _("Private messages"),
|
||||||
"huddle_message": _("Group direct messages"),
|
"huddle_message": _("Group private messages"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
labels_sort_function = lambda data: sort_by_totals(data["everyone"])
|
labels_sort_function = lambda data: sort_by_totals(data["everyone"])
|
||||||
@@ -332,18 +291,8 @@ def get_chart_data(
|
|||||||
subgroup_to_label = {stats[0]: {None: "read"}}
|
subgroup_to_label = {stats[0]: {None: "read"}}
|
||||||
labels_sort_function = None
|
labels_sort_function = None
|
||||||
include_empty_subgroups = True
|
include_empty_subgroups = True
|
||||||
elif chart_name == "messages_sent_by_stream":
|
|
||||||
if stream is None:
|
|
||||||
raise JsonableError(
|
|
||||||
_("Missing channel for chart: {chart_name}").format(chart_name=chart_name)
|
|
||||||
)
|
|
||||||
stats = [COUNT_STATS["messages_in_stream:is_bot:day"]]
|
|
||||||
tables = (aggregate_table, StreamCount)
|
|
||||||
subgroup_to_label = {stats[0]: {"false": "human", "true": "bot"}}
|
|
||||||
labels_sort_function = None
|
|
||||||
include_empty_subgroups = True
|
|
||||||
else:
|
else:
|
||||||
raise JsonableError(_("Unknown chart name: {chart_name}").format(chart_name=chart_name))
|
raise JsonableError(_("Unknown chart name: {}").format(chart_name))
|
||||||
|
|
||||||
# Most likely someone using our API endpoint. The /stats page does not
|
# Most likely someone using our API endpoint. The /stats page does not
|
||||||
# pass a start or end in its requests.
|
# pass a start or end in its requests.
|
||||||
@@ -371,20 +320,18 @@ def get_chart_data(
|
|||||||
assert server is not None
|
assert server is not None
|
||||||
assert aggregate_table is RemoteInstallationCount or aggregate_table is RemoteRealmCount
|
assert aggregate_table is RemoteInstallationCount or aggregate_table is RemoteRealmCount
|
||||||
aggregate_table_remote = cast(
|
aggregate_table_remote = cast(
|
||||||
type[RemoteInstallationCount] | type[RemoteRealmCount], aggregate_table
|
Union[Type[RemoteInstallationCount], Type[RemoteRealmCount]], aggregate_table
|
||||||
) # https://stackoverflow.com/questions/68540528/mypy-assertions-on-the-types-of-types
|
) # https://stackoverflow.com/questions/68540528/mypy-assertions-on-the-types-of-types
|
||||||
if not aggregate_table_remote.objects.filter(server=server).exists():
|
if not aggregate_table_remote.objects.filter(server=server).exists():
|
||||||
raise JsonableError(
|
raise JsonableError(
|
||||||
_("No analytics data available. Please contact your server administrator.")
|
_("No analytics data available. Please contact your server administrator.")
|
||||||
)
|
)
|
||||||
if start is None:
|
if start is None:
|
||||||
first = (
|
first = aggregate_table_remote.objects.filter(server=server).first()
|
||||||
aggregate_table_remote.objects.filter(server=server).order_by("remote_id").first()
|
|
||||||
)
|
|
||||||
assert first is not None
|
assert first is not None
|
||||||
start = first.end_time
|
start = first.end_time
|
||||||
if end is None:
|
if end is None:
|
||||||
last = aggregate_table_remote.objects.filter(server=server).order_by("remote_id").last()
|
last = aggregate_table_remote.objects.filter(server=server).last()
|
||||||
assert last is not None
|
assert last is not None
|
||||||
end = last.end_time
|
end = last.end_time
|
||||||
else:
|
else:
|
||||||
@@ -417,7 +364,7 @@ def get_chart_data(
|
|||||||
|
|
||||||
assert len({stat.frequency for stat in stats}) == 1
|
assert len({stat.frequency for stat in stats}) == 1
|
||||||
end_times = time_range(start, end, stats[0].frequency, min_length)
|
end_times = time_range(start, end, stats[0].frequency, min_length)
|
||||||
data: dict[str, Any] = {
|
data: Dict[str, Any] = {
|
||||||
"end_times": [int(end_time.timestamp()) for end_time in end_times],
|
"end_times": [int(end_time.timestamp()) for end_time in end_times],
|
||||||
"frequency": stats[0].frequency,
|
"frequency": stats[0].frequency,
|
||||||
}
|
}
|
||||||
@@ -426,7 +373,6 @@ def get_chart_data(
|
|||||||
InstallationCount: "everyone",
|
InstallationCount: "everyone",
|
||||||
RealmCount: "everyone",
|
RealmCount: "everyone",
|
||||||
UserCount: "user",
|
UserCount: "user",
|
||||||
StreamCount: "everyone",
|
|
||||||
}
|
}
|
||||||
if settings.ZILENCER_ENABLED:
|
if settings.ZILENCER_ENABLED:
|
||||||
aggregation_level[RemoteInstallationCount] = "everyone"
|
aggregation_level[RemoteInstallationCount] = "everyone"
|
||||||
@@ -438,9 +384,6 @@ def get_chart_data(
|
|||||||
RealmCount: realm.id,
|
RealmCount: realm.id,
|
||||||
UserCount: user_profile.id,
|
UserCount: user_profile.id,
|
||||||
}
|
}
|
||||||
if stream is not None:
|
|
||||||
id_value[StreamCount] = stream.id
|
|
||||||
|
|
||||||
if settings.ZILENCER_ENABLED:
|
if settings.ZILENCER_ENABLED:
|
||||||
if server is not None:
|
if server is not None:
|
||||||
id_value[RemoteInstallationCount] = server.id
|
id_value[RemoteInstallationCount] = server.id
|
||||||
@@ -470,8 +413,9 @@ def get_chart_data(
|
|||||||
return json_success(request, data=data)
|
return json_success(request, data=data)
|
||||||
|
|
||||||
|
|
||||||
def sort_by_totals(value_arrays: dict[str, list[int]]) -> list[str]:
|
def sort_by_totals(value_arrays: Dict[str, List[int]]) -> List[str]:
|
||||||
totals = sorted(((sum(values), label) for label, values in value_arrays.items()), reverse=True)
|
totals = [(sum(values), label) for label, values in value_arrays.items()]
|
||||||
|
totals.sort(reverse=True)
|
||||||
return [label for total, label in totals]
|
return [label for total, label in totals]
|
||||||
|
|
||||||
|
|
||||||
@@ -481,52 +425,47 @@ def sort_by_totals(value_arrays: dict[str, list[int]]) -> list[str]:
|
|||||||
# understanding the realm's traffic and the user's traffic. This function
|
# understanding the realm's traffic and the user's traffic. This function
|
||||||
# tries to rank the clients so that taking the first N elements of the
|
# tries to rank the clients so that taking the first N elements of the
|
||||||
# sorted list has a reasonable chance of doing so.
|
# sorted list has a reasonable chance of doing so.
|
||||||
def sort_client_labels(data: dict[str, dict[str, list[int]]]) -> list[str]:
|
def sort_client_labels(data: Dict[str, Dict[str, List[int]]]) -> List[str]:
|
||||||
realm_order = sort_by_totals(data["everyone"])
|
realm_order = sort_by_totals(data["everyone"])
|
||||||
user_order = sort_by_totals(data["user"])
|
user_order = sort_by_totals(data["user"])
|
||||||
label_sort_values: dict[str, float] = {label: i for i, label in enumerate(realm_order)}
|
label_sort_values: Dict[str, float] = {}
|
||||||
|
for i, label in enumerate(realm_order):
|
||||||
|
label_sort_values[label] = i
|
||||||
for i, label in enumerate(user_order):
|
for i, label in enumerate(user_order):
|
||||||
label_sort_values[label] = min(i - 0.1, label_sort_values.get(label, i))
|
label_sort_values[label] = min(i - 0.1, label_sort_values.get(label, i))
|
||||||
return [label for label, sort_value in sorted(label_sort_values.items(), key=lambda x: x[1])]
|
return [label for label, sort_value in sorted(label_sort_values.items(), key=lambda x: x[1])]
|
||||||
|
|
||||||
|
|
||||||
CountT = TypeVar("CountT", bound=BaseCount)
|
def table_filtered_to_id(table: Type[BaseCount], key_id: int) -> QuerySet:
|
||||||
|
|
||||||
|
|
||||||
def table_filtered_to_id(table: type[CountT], key_id: int) -> QuerySet[CountT]:
|
|
||||||
if table == RealmCount:
|
if table == RealmCount:
|
||||||
return table._default_manager.filter(realm_id=key_id)
|
return RealmCount.objects.filter(realm_id=key_id)
|
||||||
elif table == UserCount:
|
elif table == UserCount:
|
||||||
return table._default_manager.filter(user_id=key_id)
|
return UserCount.objects.filter(user_id=key_id)
|
||||||
elif table == StreamCount:
|
elif table == StreamCount:
|
||||||
return table._default_manager.filter(stream_id=key_id)
|
return StreamCount.objects.filter(stream_id=key_id)
|
||||||
elif table == InstallationCount:
|
elif table == InstallationCount:
|
||||||
return table._default_manager.all()
|
return InstallationCount.objects.all()
|
||||||
elif settings.ZILENCER_ENABLED and table == RemoteInstallationCount:
|
elif settings.ZILENCER_ENABLED and table == RemoteInstallationCount:
|
||||||
return table._default_manager.filter(server_id=key_id)
|
return RemoteInstallationCount.objects.filter(server_id=key_id)
|
||||||
elif settings.ZILENCER_ENABLED and table == RemoteRealmCount:
|
elif settings.ZILENCER_ENABLED and table == RemoteRealmCount:
|
||||||
return table._default_manager.filter(realm_id=key_id)
|
return RemoteRealmCount.objects.filter(realm_id=key_id)
|
||||||
else:
|
else:
|
||||||
raise AssertionError(f"Unknown table: {table}")
|
raise AssertionError(f"Unknown table: {table}")
|
||||||
|
|
||||||
|
|
||||||
def client_label_map(name: str) -> str:
|
def client_label_map(name: str) -> str:
|
||||||
if name == "website":
|
if name == "website":
|
||||||
return "Web app"
|
return "Website"
|
||||||
if name.startswith("desktop app"):
|
if name.startswith("desktop app"):
|
||||||
return "Old desktop app"
|
return "Old desktop app"
|
||||||
if name == "ZulipElectron":
|
if name == "ZulipElectron":
|
||||||
return "Desktop app"
|
return "Desktop app"
|
||||||
if name == "ZulipTerminal":
|
|
||||||
return "Terminal app"
|
|
||||||
if name == "ZulipAndroid":
|
if name == "ZulipAndroid":
|
||||||
return "Old Android app"
|
return "Old Android app"
|
||||||
if name == "ZulipiOS":
|
if name == "ZulipiOS":
|
||||||
return "Old iOS app"
|
return "Old iOS app"
|
||||||
if name == "ZulipMobile":
|
if name == "ZulipMobile":
|
||||||
return "Mobile app (React Native)"
|
return "Mobile app"
|
||||||
if name in ["ZulipFlutter", "ZulipMobile/flutter"]:
|
|
||||||
return "Mobile app beta (Flutter)"
|
|
||||||
if name in ["ZulipPython", "API: Python"]:
|
if name in ["ZulipPython", "API: Python"]:
|
||||||
return "Python API"
|
return "Python API"
|
||||||
if name.startswith("Zulip") and name.endswith("Webhook"):
|
if name.startswith("Zulip") and name.endswith("Webhook"):
|
||||||
@@ -534,32 +473,32 @@ def client_label_map(name: str) -> str:
|
|||||||
return name
|
return name
|
||||||
|
|
||||||
|
|
||||||
def rewrite_client_arrays(value_arrays: dict[str, list[int]]) -> dict[str, list[int]]:
|
def rewrite_client_arrays(value_arrays: Dict[str, List[int]]) -> Dict[str, List[int]]:
|
||||||
mapped_arrays: dict[str, list[int]] = {}
|
mapped_arrays: Dict[str, List[int]] = {}
|
||||||
for label, array in value_arrays.items():
|
for label, array in value_arrays.items():
|
||||||
mapped_label = client_label_map(label)
|
mapped_label = client_label_map(label)
|
||||||
if mapped_label in mapped_arrays:
|
if mapped_label in mapped_arrays:
|
||||||
for i in range(len(array)):
|
for i in range(0, len(array)):
|
||||||
mapped_arrays[mapped_label][i] += value_arrays[label][i]
|
mapped_arrays[mapped_label][i] += value_arrays[label][i]
|
||||||
else:
|
else:
|
||||||
mapped_arrays[mapped_label] = [value_arrays[label][i] for i in range(len(array))]
|
mapped_arrays[mapped_label] = [value_arrays[label][i] for i in range(0, len(array))]
|
||||||
return mapped_arrays
|
return mapped_arrays
|
||||||
|
|
||||||
|
|
||||||
def get_time_series_by_subgroup(
|
def get_time_series_by_subgroup(
|
||||||
stat: CountStat,
|
stat: CountStat,
|
||||||
table: type[BaseCount],
|
table: Type[BaseCount],
|
||||||
key_id: int,
|
key_id: int,
|
||||||
end_times: list[datetime],
|
end_times: List[datetime],
|
||||||
subgroup_to_label: dict[str | None, str],
|
subgroup_to_label: Dict[Optional[str], str],
|
||||||
include_empty_subgroups: bool,
|
include_empty_subgroups: bool,
|
||||||
) -> dict[str, list[int]]:
|
) -> Dict[str, List[int]]:
|
||||||
queryset = (
|
queryset = (
|
||||||
table_filtered_to_id(table, key_id)
|
table_filtered_to_id(table, key_id)
|
||||||
.filter(property=stat.property)
|
.filter(property=stat.property)
|
||||||
.values_list("subgroup", "end_time", "value")
|
.values_list("subgroup", "end_time", "value")
|
||||||
)
|
)
|
||||||
value_dicts: dict[str | None, dict[datetime, int]] = defaultdict(lambda: defaultdict(int))
|
value_dicts: Dict[Optional[str], Dict[datetime, int]] = defaultdict(lambda: defaultdict(int))
|
||||||
for subgroup, end_time, value in queryset:
|
for subgroup, end_time, value in queryset:
|
||||||
value_dicts[subgroup][end_time] = value
|
value_dicts[subgroup][end_time] = value
|
||||||
value_arrays = {}
|
value_arrays = {}
|
||||||
|
|||||||
343
analytics/views/support.py
Normal file
343
analytics/views/support.py
Normal file
@@ -0,0 +1,343 @@
|
|||||||
|
import urllib
|
||||||
|
from datetime import timedelta
|
||||||
|
from decimal import Decimal
|
||||||
|
from typing import Any, Dict, List, Optional
|
||||||
|
from urllib.parse import urlencode
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
from django.core.exceptions import ValidationError
|
||||||
|
from django.core.validators import URLValidator
|
||||||
|
from django.http import HttpRequest, HttpResponse, HttpResponseRedirect
|
||||||
|
from django.shortcuts import render
|
||||||
|
from django.urls import reverse
|
||||||
|
from django.utils.timesince import timesince
|
||||||
|
from django.utils.timezone import now as timezone_now
|
||||||
|
from django.utils.translation import gettext as _
|
||||||
|
|
||||||
|
from confirmation.models import Confirmation, confirmation_url
|
||||||
|
from confirmation.settings import STATUS_ACTIVE
|
||||||
|
from zerver.actions.create_realm import do_change_realm_subdomain
|
||||||
|
from zerver.actions.realm_settings import (
|
||||||
|
do_change_realm_org_type,
|
||||||
|
do_change_realm_plan_type,
|
||||||
|
do_deactivate_realm,
|
||||||
|
do_scrub_realm,
|
||||||
|
do_send_realm_reactivation_email,
|
||||||
|
)
|
||||||
|
from zerver.decorator import require_server_admin
|
||||||
|
from zerver.forms import check_subdomain_available
|
||||||
|
from zerver.lib.exceptions import JsonableError
|
||||||
|
from zerver.lib.realm_icon import realm_icon_url
|
||||||
|
from zerver.lib.request import REQ, has_request_variables
|
||||||
|
from zerver.lib.subdomains import get_subdomain_from_hostname
|
||||||
|
from zerver.lib.validator import check_bool, check_string_in, to_decimal, to_non_negative_int
|
||||||
|
from zerver.models import (
|
||||||
|
MultiuseInvite,
|
||||||
|
PreregistrationUser,
|
||||||
|
Realm,
|
||||||
|
UserProfile,
|
||||||
|
get_org_type_display_name,
|
||||||
|
get_realm,
|
||||||
|
)
|
||||||
|
from zerver.views.invite import get_invitee_emails_set
|
||||||
|
|
||||||
|
if settings.BILLING_ENABLED:
|
||||||
|
from corporate.lib.stripe import approve_sponsorship as do_approve_sponsorship
|
||||||
|
from corporate.lib.stripe import (
|
||||||
|
attach_discount_to_realm,
|
||||||
|
downgrade_at_the_end_of_billing_cycle,
|
||||||
|
downgrade_now_without_creating_additional_invoices,
|
||||||
|
get_discount_for_realm,
|
||||||
|
get_latest_seat_count,
|
||||||
|
make_end_of_cycle_updates_if_needed,
|
||||||
|
update_billing_method_of_current_plan,
|
||||||
|
update_sponsorship_status,
|
||||||
|
void_all_open_invoices,
|
||||||
|
)
|
||||||
|
from corporate.models import get_current_plan_by_realm, get_customer_by_realm
|
||||||
|
|
||||||
|
|
||||||
|
def get_plan_name(plan_type: int) -> str:
|
||||||
|
return {
|
||||||
|
Realm.PLAN_TYPE_SELF_HOSTED: "self-hosted",
|
||||||
|
Realm.PLAN_TYPE_LIMITED: "limited",
|
||||||
|
Realm.PLAN_TYPE_STANDARD: "standard",
|
||||||
|
Realm.PLAN_TYPE_STANDARD_FREE: "open source",
|
||||||
|
Realm.PLAN_TYPE_PLUS: "plus",
|
||||||
|
}[plan_type]
|
||||||
|
|
||||||
|
|
||||||
|
def get_confirmations(
|
||||||
|
types: List[int], object_ids: List[int], hostname: Optional[str] = None
|
||||||
|
) -> List[Dict[str, Any]]:
|
||||||
|
lowest_datetime = timezone_now() - timedelta(days=30)
|
||||||
|
confirmations = Confirmation.objects.filter(
|
||||||
|
type__in=types, object_id__in=object_ids, date_sent__gte=lowest_datetime
|
||||||
|
)
|
||||||
|
confirmation_dicts = []
|
||||||
|
for confirmation in confirmations:
|
||||||
|
realm = confirmation.realm
|
||||||
|
content_object = confirmation.content_object
|
||||||
|
|
||||||
|
type = confirmation.type
|
||||||
|
expiry_date = confirmation.expiry_date
|
||||||
|
|
||||||
|
assert content_object is not None
|
||||||
|
if hasattr(content_object, "status"):
|
||||||
|
if content_object.status == STATUS_ACTIVE:
|
||||||
|
link_status = "Link has been clicked"
|
||||||
|
else:
|
||||||
|
link_status = "Link has never been clicked"
|
||||||
|
else:
|
||||||
|
link_status = ""
|
||||||
|
|
||||||
|
now = timezone_now()
|
||||||
|
if expiry_date is None:
|
||||||
|
expires_in = "Never"
|
||||||
|
elif now < expiry_date:
|
||||||
|
expires_in = timesince(now, expiry_date)
|
||||||
|
else:
|
||||||
|
expires_in = "Expired"
|
||||||
|
|
||||||
|
url = confirmation_url(confirmation.confirmation_key, realm, type)
|
||||||
|
confirmation_dicts.append(
|
||||||
|
{
|
||||||
|
"object": confirmation.content_object,
|
||||||
|
"url": url,
|
||||||
|
"type": type,
|
||||||
|
"link_status": link_status,
|
||||||
|
"expires_in": expires_in,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return confirmation_dicts
|
||||||
|
|
||||||
|
|
||||||
|
VALID_DOWNGRADE_METHODS = [
|
||||||
|
"downgrade_at_billing_cycle_end",
|
||||||
|
"downgrade_now_without_additional_licenses",
|
||||||
|
"downgrade_now_void_open_invoices",
|
||||||
|
]
|
||||||
|
|
||||||
|
VALID_STATUS_VALUES = [
|
||||||
|
"active",
|
||||||
|
"deactivated",
|
||||||
|
]
|
||||||
|
|
||||||
|
VALID_BILLING_METHODS = [
|
||||||
|
"send_invoice",
|
||||||
|
"charge_automatically",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@require_server_admin
|
||||||
|
@has_request_variables
|
||||||
|
def support(
|
||||||
|
request: HttpRequest,
|
||||||
|
realm_id: Optional[int] = REQ(default=None, converter=to_non_negative_int),
|
||||||
|
plan_type: Optional[int] = REQ(default=None, converter=to_non_negative_int),
|
||||||
|
discount: Optional[Decimal] = REQ(default=None, converter=to_decimal),
|
||||||
|
new_subdomain: Optional[str] = REQ(default=None),
|
||||||
|
status: Optional[str] = REQ(default=None, str_validator=check_string_in(VALID_STATUS_VALUES)),
|
||||||
|
billing_method: Optional[str] = REQ(
|
||||||
|
default=None, str_validator=check_string_in(VALID_BILLING_METHODS)
|
||||||
|
),
|
||||||
|
sponsorship_pending: Optional[bool] = REQ(default=None, json_validator=check_bool),
|
||||||
|
approve_sponsorship: Optional[bool] = REQ(default=None, json_validator=check_bool),
|
||||||
|
downgrade_method: Optional[str] = REQ(
|
||||||
|
default=None, str_validator=check_string_in(VALID_DOWNGRADE_METHODS)
|
||||||
|
),
|
||||||
|
scrub_realm: Optional[bool] = REQ(default=None, json_validator=check_bool),
|
||||||
|
query: Optional[str] = REQ("q", default=None),
|
||||||
|
org_type: Optional[int] = REQ(default=None, converter=to_non_negative_int),
|
||||||
|
) -> HttpResponse:
|
||||||
|
context: Dict[str, Any] = {}
|
||||||
|
|
||||||
|
if "success_message" in request.session:
|
||||||
|
context["success_message"] = request.session["success_message"]
|
||||||
|
del request.session["success_message"]
|
||||||
|
|
||||||
|
if settings.BILLING_ENABLED and request.method == "POST":
|
||||||
|
# We check that request.POST only has two keys in it: The
|
||||||
|
# realm_id and a field to change.
|
||||||
|
keys = set(request.POST.keys())
|
||||||
|
if "csrfmiddlewaretoken" in keys:
|
||||||
|
keys.remove("csrfmiddlewaretoken")
|
||||||
|
if len(keys) != 2:
|
||||||
|
raise JsonableError(_("Invalid parameters"))
|
||||||
|
|
||||||
|
realm = Realm.objects.get(id=realm_id)
|
||||||
|
|
||||||
|
acting_user = request.user
|
||||||
|
assert isinstance(acting_user, UserProfile)
|
||||||
|
if plan_type is not None:
|
||||||
|
current_plan_type = realm.plan_type
|
||||||
|
do_change_realm_plan_type(realm, plan_type, acting_user=acting_user)
|
||||||
|
msg = f"Plan type of {realm.string_id} changed from {get_plan_name(current_plan_type)} to {get_plan_name(plan_type)} "
|
||||||
|
context["success_message"] = msg
|
||||||
|
elif org_type is not None:
|
||||||
|
current_realm_type = realm.org_type
|
||||||
|
do_change_realm_org_type(realm, org_type, acting_user=acting_user)
|
||||||
|
msg = f"Org type of {realm.string_id} changed from {get_org_type_display_name(current_realm_type)} to {get_org_type_display_name(org_type)} "
|
||||||
|
context["success_message"] = msg
|
||||||
|
elif discount is not None:
|
||||||
|
current_discount = get_discount_for_realm(realm) or 0
|
||||||
|
attach_discount_to_realm(realm, discount, acting_user=acting_user)
|
||||||
|
context[
|
||||||
|
"success_message"
|
||||||
|
] = f"Discount of {realm.string_id} changed to {discount}% from {current_discount}%."
|
||||||
|
elif new_subdomain is not None:
|
||||||
|
old_subdomain = realm.string_id
|
||||||
|
try:
|
||||||
|
check_subdomain_available(new_subdomain)
|
||||||
|
except ValidationError as error:
|
||||||
|
context["error_message"] = error.message
|
||||||
|
else:
|
||||||
|
do_change_realm_subdomain(realm, new_subdomain, acting_user=acting_user)
|
||||||
|
request.session[
|
||||||
|
"success_message"
|
||||||
|
] = f"Subdomain changed from {old_subdomain} to {new_subdomain}"
|
||||||
|
return HttpResponseRedirect(
|
||||||
|
reverse("support") + "?" + urlencode({"q": new_subdomain})
|
||||||
|
)
|
||||||
|
elif status is not None:
|
||||||
|
if status == "active":
|
||||||
|
do_send_realm_reactivation_email(realm, acting_user=acting_user)
|
||||||
|
context[
|
||||||
|
"success_message"
|
||||||
|
] = f"Realm reactivation email sent to admins of {realm.string_id}."
|
||||||
|
elif status == "deactivated":
|
||||||
|
do_deactivate_realm(realm, acting_user=acting_user)
|
||||||
|
context["success_message"] = f"{realm.string_id} deactivated."
|
||||||
|
elif billing_method is not None:
|
||||||
|
if billing_method == "send_invoice":
|
||||||
|
update_billing_method_of_current_plan(
|
||||||
|
realm, charge_automatically=False, acting_user=acting_user
|
||||||
|
)
|
||||||
|
context[
|
||||||
|
"success_message"
|
||||||
|
] = f"Billing method of {realm.string_id} updated to pay by invoice."
|
||||||
|
elif billing_method == "charge_automatically":
|
||||||
|
update_billing_method_of_current_plan(
|
||||||
|
realm, charge_automatically=True, acting_user=acting_user
|
||||||
|
)
|
||||||
|
context[
|
||||||
|
"success_message"
|
||||||
|
] = f"Billing method of {realm.string_id} updated to charge automatically."
|
||||||
|
elif sponsorship_pending is not None:
|
||||||
|
if sponsorship_pending:
|
||||||
|
update_sponsorship_status(realm, True, acting_user=acting_user)
|
||||||
|
context["success_message"] = f"{realm.string_id} marked as pending sponsorship."
|
||||||
|
else:
|
||||||
|
update_sponsorship_status(realm, False, acting_user=acting_user)
|
||||||
|
context["success_message"] = f"{realm.string_id} is no longer pending sponsorship."
|
||||||
|
elif approve_sponsorship:
|
||||||
|
do_approve_sponsorship(realm, acting_user=acting_user)
|
||||||
|
context["success_message"] = f"Sponsorship approved for {realm.string_id}"
|
||||||
|
elif downgrade_method is not None:
|
||||||
|
if downgrade_method == "downgrade_at_billing_cycle_end":
|
||||||
|
downgrade_at_the_end_of_billing_cycle(realm)
|
||||||
|
context[
|
||||||
|
"success_message"
|
||||||
|
] = f"{realm.string_id} marked for downgrade at the end of billing cycle"
|
||||||
|
elif downgrade_method == "downgrade_now_without_additional_licenses":
|
||||||
|
downgrade_now_without_creating_additional_invoices(realm)
|
||||||
|
context[
|
||||||
|
"success_message"
|
||||||
|
] = f"{realm.string_id} downgraded without creating additional invoices"
|
||||||
|
elif downgrade_method == "downgrade_now_void_open_invoices":
|
||||||
|
downgrade_now_without_creating_additional_invoices(realm)
|
||||||
|
voided_invoices_count = void_all_open_invoices(realm)
|
||||||
|
context[
|
||||||
|
"success_message"
|
||||||
|
] = f"{realm.string_id} downgraded and voided {voided_invoices_count} open invoices"
|
||||||
|
elif scrub_realm:
|
||||||
|
do_scrub_realm(realm, acting_user=acting_user)
|
||||||
|
context["success_message"] = f"{realm.string_id} scrubbed."
|
||||||
|
|
||||||
|
if query:
|
||||||
|
key_words = get_invitee_emails_set(query)
|
||||||
|
|
||||||
|
users = set(UserProfile.objects.filter(delivery_email__in=key_words))
|
||||||
|
realms = set(Realm.objects.filter(string_id__in=key_words))
|
||||||
|
|
||||||
|
for key_word in key_words:
|
||||||
|
try:
|
||||||
|
URLValidator()(key_word)
|
||||||
|
parse_result = urllib.parse.urlparse(key_word)
|
||||||
|
hostname = parse_result.hostname
|
||||||
|
assert hostname is not None
|
||||||
|
if parse_result.port:
|
||||||
|
hostname = f"{hostname}:{parse_result.port}"
|
||||||
|
subdomain = get_subdomain_from_hostname(hostname)
|
||||||
|
try:
|
||||||
|
realms.add(get_realm(subdomain))
|
||||||
|
except Realm.DoesNotExist:
|
||||||
|
pass
|
||||||
|
except ValidationError:
|
||||||
|
users.update(UserProfile.objects.filter(full_name__iexact=key_word))
|
||||||
|
|
||||||
|
for realm in realms:
|
||||||
|
realm.customer = get_customer_by_realm(realm)
|
||||||
|
|
||||||
|
current_plan = get_current_plan_by_realm(realm)
|
||||||
|
if current_plan is not None:
|
||||||
|
new_plan, last_ledger_entry = make_end_of_cycle_updates_if_needed(
|
||||||
|
current_plan, timezone_now()
|
||||||
|
)
|
||||||
|
if last_ledger_entry is not None:
|
||||||
|
if new_plan is not None:
|
||||||
|
realm.current_plan = new_plan
|
||||||
|
else:
|
||||||
|
realm.current_plan = current_plan
|
||||||
|
realm.current_plan.licenses = last_ledger_entry.licenses
|
||||||
|
realm.current_plan.licenses_used = get_latest_seat_count(realm)
|
||||||
|
|
||||||
|
# full_names can have , in them
|
||||||
|
users.update(UserProfile.objects.filter(full_name__iexact=query))
|
||||||
|
|
||||||
|
context["users"] = users
|
||||||
|
context["realms"] = realms
|
||||||
|
|
||||||
|
confirmations: List[Dict[str, Any]] = []
|
||||||
|
|
||||||
|
preregistration_users = PreregistrationUser.objects.filter(email__in=key_words)
|
||||||
|
confirmations += get_confirmations(
|
||||||
|
[Confirmation.USER_REGISTRATION, Confirmation.INVITATION, Confirmation.REALM_CREATION],
|
||||||
|
preregistration_users,
|
||||||
|
hostname=request.get_host(),
|
||||||
|
)
|
||||||
|
|
||||||
|
multiuse_invites = MultiuseInvite.objects.filter(realm__in=realms)
|
||||||
|
confirmations += get_confirmations([Confirmation.MULTIUSE_INVITE], multiuse_invites)
|
||||||
|
|
||||||
|
confirmations += get_confirmations(
|
||||||
|
[Confirmation.REALM_REACTIVATION], [realm.id for realm in realms]
|
||||||
|
)
|
||||||
|
|
||||||
|
context["confirmations"] = confirmations
|
||||||
|
|
||||||
|
def get_realm_owner_emails_as_string(realm: Realm) -> str:
|
||||||
|
return ", ".join(
|
||||||
|
realm.get_human_owner_users()
|
||||||
|
.order_by("delivery_email")
|
||||||
|
.values_list("delivery_email", flat=True)
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_realm_admin_emails_as_string(realm: Realm) -> str:
|
||||||
|
return ", ".join(
|
||||||
|
realm.get_human_admin_users(include_realm_owners=False)
|
||||||
|
.order_by("delivery_email")
|
||||||
|
.values_list("delivery_email", flat=True)
|
||||||
|
)
|
||||||
|
|
||||||
|
context["get_realm_owner_emails_as_string"] = get_realm_owner_emails_as_string
|
||||||
|
context["get_realm_admin_emails_as_string"] = get_realm_admin_emails_as_string
|
||||||
|
context["get_discount_for_realm"] = get_discount_for_realm
|
||||||
|
context["get_org_type_display_name"] = get_org_type_display_name
|
||||||
|
context["realm_icon_url"] = realm_icon_url
|
||||||
|
context["Confirmation"] = Confirmation
|
||||||
|
context["sorted_realm_types"] = sorted(
|
||||||
|
Realm.ORG_TYPES.values(), key=lambda d: d["display_order"]
|
||||||
|
)
|
||||||
|
|
||||||
|
return render(request, "analytics/support.html", context=context)
|
||||||
104
analytics/views/user_activity.py
Normal file
104
analytics/views/user_activity.py
Normal file
@@ -0,0 +1,104 @@
|
|||||||
|
from typing import Any, Dict, List, Tuple
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
from django.db.models.query import QuerySet
|
||||||
|
from django.http import HttpRequest, HttpResponse
|
||||||
|
from django.shortcuts import render
|
||||||
|
|
||||||
|
from analytics.views.activity_common import (
|
||||||
|
format_date_for_activity_reports,
|
||||||
|
get_user_activity_summary,
|
||||||
|
make_table,
|
||||||
|
)
|
||||||
|
from zerver.decorator import require_server_admin
|
||||||
|
from zerver.models import UserActivity, UserProfile, get_user_profile_by_id
|
||||||
|
|
||||||
|
if settings.BILLING_ENABLED:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def get_user_activity_records(user_profile: UserProfile) -> List[QuerySet]:
|
||||||
|
fields = [
|
||||||
|
"user_profile__full_name",
|
||||||
|
"query",
|
||||||
|
"client__name",
|
||||||
|
"count",
|
||||||
|
"last_visit",
|
||||||
|
]
|
||||||
|
|
||||||
|
records = UserActivity.objects.filter(
|
||||||
|
user_profile=user_profile,
|
||||||
|
)
|
||||||
|
records = records.order_by("-last_visit")
|
||||||
|
records = records.select_related("user_profile", "client").only(*fields)
|
||||||
|
return records
|
||||||
|
|
||||||
|
|
||||||
|
def raw_user_activity_table(records: List[QuerySet]) -> str:
|
||||||
|
cols = [
|
||||||
|
"query",
|
||||||
|
"client",
|
||||||
|
"count",
|
||||||
|
"last_visit",
|
||||||
|
]
|
||||||
|
|
||||||
|
def row(record: QuerySet) -> List[Any]:
|
||||||
|
return [
|
||||||
|
record.query,
|
||||||
|
record.client.name,
|
||||||
|
record.count,
|
||||||
|
format_date_for_activity_reports(record.last_visit),
|
||||||
|
]
|
||||||
|
|
||||||
|
rows = list(map(row, records))
|
||||||
|
title = "Raw data"
|
||||||
|
return make_table(title, cols, rows)
|
||||||
|
|
||||||
|
|
||||||
|
def user_activity_summary_table(user_summary: Dict[str, Dict[str, Any]]) -> str:
|
||||||
|
rows = []
|
||||||
|
for k, v in user_summary.items():
|
||||||
|
if k == "name" or k == "user_profile_id":
|
||||||
|
continue
|
||||||
|
client = k
|
||||||
|
count = v["count"]
|
||||||
|
last_visit = v["last_visit"]
|
||||||
|
row = [
|
||||||
|
format_date_for_activity_reports(last_visit),
|
||||||
|
client,
|
||||||
|
count,
|
||||||
|
]
|
||||||
|
rows.append(row)
|
||||||
|
|
||||||
|
rows = sorted(rows, key=lambda r: r[0], reverse=True)
|
||||||
|
|
||||||
|
cols = [
|
||||||
|
"last_visit",
|
||||||
|
"client",
|
||||||
|
"count",
|
||||||
|
]
|
||||||
|
|
||||||
|
title = "User activity"
|
||||||
|
return make_table(title, cols, rows)
|
||||||
|
|
||||||
|
|
||||||
|
@require_server_admin
|
||||||
|
def get_user_activity(request: HttpRequest, user_profile_id: int) -> HttpResponse:
|
||||||
|
user_profile = get_user_profile_by_id(user_profile_id)
|
||||||
|
records = get_user_activity_records(user_profile)
|
||||||
|
|
||||||
|
data: List[Tuple[str, str]] = []
|
||||||
|
user_summary = get_user_activity_summary(records)
|
||||||
|
content = user_activity_summary_table(user_summary)
|
||||||
|
|
||||||
|
data += [("Summary", content)]
|
||||||
|
|
||||||
|
content = raw_user_activity_table(records)
|
||||||
|
data += [("Info", content)]
|
||||||
|
|
||||||
|
title = user_profile.delivery_email
|
||||||
|
return render(
|
||||||
|
request,
|
||||||
|
"analytics/activity.html",
|
||||||
|
context=dict(data=data, title=title),
|
||||||
|
)
|
||||||
@@ -1,89 +0,0 @@
|
|||||||
# API keys
|
|
||||||
|
|
||||||
An **API key** is how a bot identifies itself to Zulip. For the official
|
|
||||||
clients, such as the Python bindings, we recommend [downloading a `zuliprc`
|
|
||||||
file](/api/configuring-python-bindings#download-a-zuliprc-file). This file
|
|
||||||
contains an API key and other necessary configuration values for using the
|
|
||||||
Zulip API with a specific account on a Zulip server.
|
|
||||||
|
|
||||||
## Get a bot's API key
|
|
||||||
|
|
||||||
{start_tabs}
|
|
||||||
|
|
||||||
{tab|desktop-web}
|
|
||||||
|
|
||||||
{settings_tab|your-bots}
|
|
||||||
|
|
||||||
1. Click **Active bots**.
|
|
||||||
|
|
||||||
1. Find your bot. The bot's API key is under **API KEY**.
|
|
||||||
|
|
||||||
{end_tabs}
|
|
||||||
|
|
||||||
!!! warn ""
|
|
||||||
|
|
||||||
Anyone with a bot's API key can impersonate the bot, so be careful with it!
|
|
||||||
|
|
||||||
## Get your API key
|
|
||||||
|
|
||||||
{start_tabs}
|
|
||||||
|
|
||||||
{tab|desktop-web}
|
|
||||||
|
|
||||||
{settings_tab|account-and-privacy}
|
|
||||||
|
|
||||||
1. Under **API key**, click **Manage your API key**.
|
|
||||||
|
|
||||||
1. Enter your password, and click **Get API key**. If you don't know your
|
|
||||||
password, click **reset it** and follow the instructions from there.
|
|
||||||
|
|
||||||
1. Copy your API key.
|
|
||||||
|
|
||||||
{end_tabs}
|
|
||||||
|
|
||||||
!!! warn ""
|
|
||||||
|
|
||||||
Anyone with your API key can impersonate you, so be doubly careful with it.
|
|
||||||
|
|
||||||
|
|
||||||
## Invalidate an API key
|
|
||||||
|
|
||||||
To invalidate an existing API key, you have to generate a new key.
|
|
||||||
|
|
||||||
### Invalidate a bot's API key
|
|
||||||
|
|
||||||
{start_tabs}
|
|
||||||
|
|
||||||
{tab|desktop-web}
|
|
||||||
|
|
||||||
{settings_tab|your-bots}
|
|
||||||
|
|
||||||
1. Click **Active bots**.
|
|
||||||
|
|
||||||
1. Find your bot.
|
|
||||||
|
|
||||||
1. Under **API KEY**, click the **refresh** (<i class="fa fa-refresh"></i>) icon
|
|
||||||
to the right of the bot's API key.
|
|
||||||
|
|
||||||
{end_tabs}
|
|
||||||
|
|
||||||
### Invalidate your API key
|
|
||||||
|
|
||||||
{start_tabs}
|
|
||||||
|
|
||||||
{tab|desktop-web}
|
|
||||||
|
|
||||||
{settings_tab|account-and-privacy}
|
|
||||||
|
|
||||||
1. Under **API key**, click **Manage your API key**.
|
|
||||||
|
|
||||||
1. Enter your password, and click **Get API key**. If you don't know your
|
|
||||||
password, click **reset it** and follow the instructions from there.
|
|
||||||
|
|
||||||
1. Click **Generate new API key**
|
|
||||||
|
|
||||||
{end_tabs}
|
|
||||||
|
|
||||||
## Related articles
|
|
||||||
|
|
||||||
* [Configuring the Python bindings](/api/configuring-python-bindings)
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1,161 +0,0 @@
|
|||||||
# Configuring the Python bindings
|
|
||||||
|
|
||||||
Zulip provides a set of tools that allows interacting with its API more
|
|
||||||
easily, called the [Python bindings](https://pypi.python.org/pypi/zulip/).
|
|
||||||
One of the most notable use cases for these bindings are bots developed
|
|
||||||
using Zulip's [bot framework](/api/writing-bots).
|
|
||||||
|
|
||||||
In order to use them, you need to configure them with your identity
|
|
||||||
(account, API key, and Zulip server URL). There are a few ways to
|
|
||||||
achieve that:
|
|
||||||
|
|
||||||
- Using a `zuliprc` file, referenced via the `--config-file` option or
|
|
||||||
the `config_file` option to the `zulip.Client` constructor
|
|
||||||
(recommended for bots).
|
|
||||||
- Using a `zuliprc` file in your home directory at `~/.zuliprc`
|
|
||||||
(recommended for your own API key).
|
|
||||||
- Using the [environment
|
|
||||||
variables](https://en.wikipedia.org/wiki/Environment_variable)
|
|
||||||
documented below.
|
|
||||||
- Using the `--api-key`, `--email`, and `--site` variables as command
|
|
||||||
line parameters.
|
|
||||||
- Using the `api_key`, `email`, and `site` parameters to the
|
|
||||||
`zulip.Client` constructor.
|
|
||||||
|
|
||||||
## Download a `zuliprc` file
|
|
||||||
|
|
||||||
{start_tabs}
|
|
||||||
|
|
||||||
{tab|for-a-bot}
|
|
||||||
|
|
||||||
{settings_tab|your-bots}
|
|
||||||
|
|
||||||
1. Click the **download** (<i class="fa fa-download"></i>) icon on the profile
|
|
||||||
card of the desired bot to download the bot's `zuliprc` file.
|
|
||||||
|
|
||||||
!!! warn ""
|
|
||||||
|
|
||||||
Anyone with a bot's API key can impersonate the bot, so be careful with it!
|
|
||||||
|
|
||||||
{tab|for-yourself}
|
|
||||||
|
|
||||||
{settings_tab|account-and-privacy}
|
|
||||||
|
|
||||||
1. Under **API key**, click **Manage your API key**.
|
|
||||||
|
|
||||||
1. Enter your password, and click **Get API key**. If you don't know your
|
|
||||||
password, click **reset it** and follow the
|
|
||||||
instructions from there.
|
|
||||||
|
|
||||||
1. Click **Download zuliprc** to download your `zuliprc` file.
|
|
||||||
|
|
||||||
1. (optional) If you'd like your credentials to be used by default
|
|
||||||
when using the Zulip API on your computer, move the `zuliprc` file
|
|
||||||
to `~/.zuliprc` in your home directory.
|
|
||||||
|
|
||||||
!!! warn ""
|
|
||||||
|
|
||||||
Anyone with your API key can impersonate you, so be doubly careful with it.
|
|
||||||
|
|
||||||
{end_tabs}
|
|
||||||
|
|
||||||
## Configuration keys and environment variables
|
|
||||||
|
|
||||||
`zuliprc` is a configuration file written in the
|
|
||||||
[INI file format](https://en.wikipedia.org/wiki/INI_file),
|
|
||||||
which contains key-value pairs as shown in the following example:
|
|
||||||
|
|
||||||
```
|
|
||||||
[api]
|
|
||||||
key=<API key from the web interface>
|
|
||||||
email=<your email address>
|
|
||||||
site=<your Zulip server's URI>
|
|
||||||
...
|
|
||||||
```
|
|
||||||
|
|
||||||
The keys you can use in this file (and their equivalent environment variables)
|
|
||||||
can be found in the following table:
|
|
||||||
|
|
||||||
<table class="table">
|
|
||||||
<thead>
|
|
||||||
<tr>
|
|
||||||
<th><code>zuliprc</code> key</th>
|
|
||||||
<th>Environment variable</th>
|
|
||||||
<th>Required</th>
|
|
||||||
<th>Description</th>
|
|
||||||
</tr>
|
|
||||||
</thead>
|
|
||||||
<tr>
|
|
||||||
<td><code>key</code></td>
|
|
||||||
<td><code>ZULIP_API_KEY</code></td>
|
|
||||||
<td>Yes</td>
|
|
||||||
<td>
|
|
||||||
<a href="/api/api-keys">API key</a>, which you can get through
|
|
||||||
Zulip's web interface.
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td><code>email</code></td>
|
|
||||||
<td><code>ZULIP_EMAIL</code></td>
|
|
||||||
<td>Yes</td>
|
|
||||||
<td>
|
|
||||||
The email address of the user who owns the API key mentioned
|
|
||||||
above.
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td><code>site</code></td>
|
|
||||||
<td><code>ZULIP_SITE</code></td>
|
|
||||||
<td>No</td>
|
|
||||||
<td>
|
|
||||||
URL where your Zulip server is located.
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td><code>client_cert_key</code></td>
|
|
||||||
<td><code>ZULIP_CERT_KEY</code></td>
|
|
||||||
<td>No</td>
|
|
||||||
<td>
|
|
||||||
Path to the SSL/TLS private key that the binding should use to
|
|
||||||
connect to the server.
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td><code>client_cert</code></td>
|
|
||||||
<td><code>ZULIP_CERT</code></td>
|
|
||||||
<td>No*</td>
|
|
||||||
<td>
|
|
||||||
The public counterpart of <code>client_cert_key</code>/
|
|
||||||
<code>ZULIP_CERT_KEY</code>. <i>This setting is required if a cert
|
|
||||||
key has been set.</i>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td><code>client_bundle</code></td>
|
|
||||||
<td><code>ZULIP_CERT_BUNDLE</code></td>
|
|
||||||
<td>No</td>
|
|
||||||
<td>
|
|
||||||
Path where the server's PEM-encoded certificate is located. CA
|
|
||||||
certificates are also accepted, in case those CA's have issued the
|
|
||||||
server's certificate. Defaults to the built-in CA bundle trusted
|
|
||||||
by Python.
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td><code>insecure</code></td>
|
|
||||||
<td><code>ZULIP_ALLOW_INSECURE</code></td>
|
|
||||||
<td>No</td>
|
|
||||||
<td>
|
|
||||||
Allows connecting to Zulip servers with an invalid SSL/TLS
|
|
||||||
certificate. Please note that enabling this will make the HTTPS
|
|
||||||
connection insecure. Defaults to <code>false</code>.
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
|
|
||||||
## Related articles
|
|
||||||
|
|
||||||
* [Installation instructions](/api/installation-instructions)
|
|
||||||
* [API keys](/api/api-keys)
|
|
||||||
* [Running bots](/api/running-bots)
|
|
||||||
* [Deploying bots](/api/deploying-bots)
|
|
||||||
@@ -1,173 +0,0 @@
|
|||||||
# Construct a narrow
|
|
||||||
|
|
||||||
A **narrow** is a set of filters for Zulip messages, that can be based
|
|
||||||
on many different factors (like sender, channel, topic, search
|
|
||||||
keywords, etc.). Narrows are used in various places in the Zulip
|
|
||||||
API (most importantly, in the API for fetching messages).
|
|
||||||
|
|
||||||
It is simplest to explain the algorithm for encoding a search as a
|
|
||||||
narrow using a single example. Consider the following search query
|
|
||||||
(written as it would be entered in the Zulip web app's search box).
|
|
||||||
It filters for messages sent to channel `announce`, not sent by
|
|
||||||
`iago@zulip.com`, and containing the words `cool` and `sunglasses`:
|
|
||||||
|
|
||||||
```
|
|
||||||
channel:announce -sender:iago@zulip.com cool sunglasses
|
|
||||||
```
|
|
||||||
|
|
||||||
This query would be JSON-encoded for use in the Zulip API using JSON
|
|
||||||
as a list of simple objects, as follows:
|
|
||||||
|
|
||||||
```json
|
|
||||||
[
|
|
||||||
{
|
|
||||||
"operator": "channel",
|
|
||||||
"operand": "announce"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"operator": "sender",
|
|
||||||
"operand": "iago@zulip.com",
|
|
||||||
"negated": true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"operator": "search",
|
|
||||||
"operand": "cool sunglasses"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
```
|
|
||||||
|
|
||||||
The Zulip help center article on [searching for messages](/help/search-for-messages)
|
|
||||||
documents the majority of the search/narrow options supported by the
|
|
||||||
Zulip API.
|
|
||||||
|
|
||||||
Note that many narrows, including all that lack a `channel` or `channels`
|
|
||||||
operator, search the current user's personal message history. See
|
|
||||||
[searching shared history](/help/search-for-messages#searching-shared-history)
|
|
||||||
for details.
|
|
||||||
|
|
||||||
Clients should note that the `is:unread` filter takes advantage of the
|
|
||||||
fact that there is a database index for unread messages, which can be an
|
|
||||||
important optimization when fetching messages in certain cases (e.g.,
|
|
||||||
when [adding the `read` flag to a user's personal
|
|
||||||
messages](/api/update-message-flags-for-narrow)).
|
|
||||||
|
|
||||||
## Changes
|
|
||||||
|
|
||||||
* In Zulip 9.0 (feature level 271), support was added for a new filter
|
|
||||||
operator, `with`, which uses a [message ID](#message-ids) for its
|
|
||||||
operand, and is designed for creating permanent links to topics.
|
|
||||||
|
|
||||||
* In Zulip 9.0 (feature level 265), support was added for a new
|
|
||||||
`is:followed` filter, matching messages in topics that the current
|
|
||||||
user is [following](/help/follow-a-topic).
|
|
||||||
|
|
||||||
* In Zulip 9.0 (feature level 250), support was added for two filters
|
|
||||||
related to stream messages: `channel` and `channels`. The `channel`
|
|
||||||
operator is an alias for the `stream` operator. The `channels`
|
|
||||||
operator is an alias for the `streams` operator. Both `channel` and
|
|
||||||
`channels` return the same exact results as `stream` and `streams`
|
|
||||||
respectively.
|
|
||||||
|
|
||||||
* In Zulip 9.0 (feature level 249), support was added for a new filter,
|
|
||||||
`has:reaction`, which returns messages that have at least one [emoji
|
|
||||||
reaction](/help/emoji-reactions).
|
|
||||||
|
|
||||||
* In Zulip 7.0 (feature level 177), support was added for three filters
|
|
||||||
related to direct messages: `is:dm`, `dm` and `dm-including`. The
|
|
||||||
`dm` operator replaced and deprecated the `pm-with` operator. The
|
|
||||||
`is:dm` filter replaced and deprecated the `is:private` filter. The
|
|
||||||
`dm-including` operator replaced and deprecated the `group-pm-with`
|
|
||||||
operator.
|
|
||||||
|
|
||||||
* The `dm-including` and `group-pm-with` operators return slightly
|
|
||||||
different results. For example, `dm-including:1234` returns all
|
|
||||||
direct messages (1-on-1 and group) that include the current user
|
|
||||||
and the user with the unique user ID of `1234`. On the other hand,
|
|
||||||
`group-pm-with:1234` returned only group direct messages that
|
|
||||||
included the current user and the user with the unique user ID of
|
|
||||||
`1234`.
|
|
||||||
|
|
||||||
* Both `dm` and `is:dm` are aliases of `pm-with` and `is:private`
|
|
||||||
respectively, and return the same exact results that the
|
|
||||||
deprecated filters did.
|
|
||||||
|
|
||||||
## Narrows that use IDs
|
|
||||||
|
|
||||||
### Message IDs
|
|
||||||
|
|
||||||
The `near`, `id` and `with` operators use message IDs for their
|
|
||||||
operands. The `near` and `id` operators are documented in the help
|
|
||||||
center [here](/help/search-for-messages#search-by-message-id).
|
|
||||||
|
|
||||||
The `with` operator is designed to be used for permanent links to topics,
|
|
||||||
which means they should continue to work when the topic is
|
|
||||||
[moved](/help/move-content-to-another-topic) or
|
|
||||||
[resolved](/help/resolve-a-topic). If the message with the specified ID
|
|
||||||
exists, and can be accessed by the user, then it will return messages
|
|
||||||
with the `channel`/`topic`/`dm` operators corresponding to the current
|
|
||||||
conversation containing that message, and replacing any such filters
|
|
||||||
included in the narrow.
|
|
||||||
|
|
||||||
* `with:12345`: Search for the conversation that contains the message
|
|
||||||
with ID `12345`.
|
|
||||||
* `near:12345`: Search messages around the message with ID `12345`.
|
|
||||||
* `id:12345`: Search for only the message with ID `12345`.
|
|
||||||
|
|
||||||
The message ID operand for the `with` and `id` operators may be encoded
|
|
||||||
as either a number or a string. The message ID operand for the `near`
|
|
||||||
operator must be encoded as a string.
|
|
||||||
|
|
||||||
**Changes**: Prior to Zulip 8.0 (feature level 194), the message ID
|
|
||||||
operand for the `id` operator needed to be encoded as a string.
|
|
||||||
|
|
||||||
|
|
||||||
```json
|
|
||||||
[
|
|
||||||
{
|
|
||||||
"operator": "id",
|
|
||||||
"operand": 12345
|
|
||||||
}
|
|
||||||
]
|
|
||||||
```
|
|
||||||
|
|
||||||
### Channel and user IDs
|
|
||||||
|
|
||||||
There are a few additional narrow/search options (new in Zulip 2.1)
|
|
||||||
that use either channel IDs or user IDs that are not documented in the
|
|
||||||
help center because they are primarily useful to API clients:
|
|
||||||
|
|
||||||
* `channel:1234`: Search messages sent to the channel with ID `1234`.
|
|
||||||
* `sender:1234`: Search messages sent by user ID `1234`.
|
|
||||||
* `dm:1234`: Search the direct message conversation between
|
|
||||||
you and user ID `1234`.
|
|
||||||
* `dm:1234,5678`: Search the direct message conversation between
|
|
||||||
you, user ID `1234`, and user ID `5678`.
|
|
||||||
* `dm-including:1234`: Search all direct messages (1-on-1 and group)
|
|
||||||
that include you and user ID `1234`.
|
|
||||||
|
|
||||||
!!! tip ""
|
|
||||||
|
|
||||||
A user ID can be found by [viewing a user's profile][view-profile]
|
|
||||||
in the web or desktop apps. A channel ID can be found when [browsing
|
|
||||||
channels][browse-channels] in the web or desktop apps.
|
|
||||||
|
|
||||||
The operands for these search options must be encoded either as an
|
|
||||||
integer ID or a JSON list of integer IDs. For example, to query
|
|
||||||
messages sent by a user 1234 to a direct message thread with yourself,
|
|
||||||
user 1234, and user 5678, the correct JSON-encoded query is:
|
|
||||||
|
|
||||||
```json
|
|
||||||
[
|
|
||||||
{
|
|
||||||
"operator": "dm",
|
|
||||||
"operand": [1234, 5678]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"operator": "sender",
|
|
||||||
"operand": 1234
|
|
||||||
}
|
|
||||||
]
|
|
||||||
```
|
|
||||||
|
|
||||||
[view-profile]: /help/view-someones-profile
|
|
||||||
[browse-channels]: /help/introduction-to-channels#browse-and-subscribe-to-channels
|
|
||||||
@@ -1,49 +0,0 @@
|
|||||||
{generate_api_header(/scheduled_messages:post)}
|
|
||||||
|
|
||||||
## Usage examples
|
|
||||||
|
|
||||||
{start_tabs}
|
|
||||||
|
|
||||||
{generate_code_example(python)|/scheduled_messages:post|example}
|
|
||||||
|
|
||||||
{generate_code_example(javascript)|/scheduled_messages:post|example}
|
|
||||||
|
|
||||||
{tab|curl}
|
|
||||||
|
|
||||||
``` curl
|
|
||||||
# Create a scheduled channel message
|
|
||||||
curl -X POST {{ api_url }}/v1/scheduled_messages \
|
|
||||||
-u BOT_EMAIL_ADDRESS:BOT_API_KEY \
|
|
||||||
--data-urlencode type=stream \
|
|
||||||
--data-urlencode to=9 \
|
|
||||||
--data-urlencode topic=Hello \
|
|
||||||
--data-urlencode 'content=Nice to meet everyone!' \
|
|
||||||
--data-urlencode scheduled_delivery_timestamp=3165826990
|
|
||||||
|
|
||||||
# Create a scheduled direct message
|
|
||||||
curl -X POST {{ api_url }}/v1/messages \
|
|
||||||
-u BOT_EMAIL_ADDRESS:BOT_API_KEY \
|
|
||||||
--data-urlencode type=direct \
|
|
||||||
--data-urlencode 'to=[9, 10]' \
|
|
||||||
--data-urlencode 'content=Can we meet on Monday?' \
|
|
||||||
--data-urlencode scheduled_delivery_timestamp=3165826990
|
|
||||||
|
|
||||||
```
|
|
||||||
|
|
||||||
{end_tabs}
|
|
||||||
|
|
||||||
## Parameters
|
|
||||||
|
|
||||||
{generate_api_arguments_table|zulip.yaml|/scheduled_messages:post}
|
|
||||||
|
|
||||||
{generate_parameter_description(/scheduled_messages:post)}
|
|
||||||
|
|
||||||
## Response
|
|
||||||
|
|
||||||
{generate_return_values_table|zulip.yaml|/scheduled_messages:post}
|
|
||||||
|
|
||||||
{generate_response_description(/scheduled_messages:post)}
|
|
||||||
|
|
||||||
#### Example response(s)
|
|
||||||
|
|
||||||
{generate_code_example|/scheduled_messages:post|fixture}
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
# Create a channel
|
|
||||||
|
|
||||||
You can create a channel using Zulip's REST API by submitting a
|
|
||||||
[subscribe](/api/subscribe) request with a channel name that
|
|
||||||
doesn't yet exist and passing appropriate parameters to define
|
|
||||||
the initial configuration of the new channel.
|
|
||||||
@@ -1,90 +0,0 @@
|
|||||||
# Group-setting values
|
|
||||||
|
|
||||||
Settings defining permissions in Zulip are increasingly represented
|
|
||||||
using [user groups](/help/user-groups), which offer much more flexible
|
|
||||||
configuration than the older [roles](/api/roles-and-permissions) system.
|
|
||||||
|
|
||||||
!!! warn ""
|
|
||||||
|
|
||||||
This API feature is under development, and currently only values that
|
|
||||||
correspond to a single named user group are permitted in
|
|
||||||
production environments, pending the web application UI supporting
|
|
||||||
displaying more complex values correctly.
|
|
||||||
|
|
||||||
In the API, these settings are represented using a **group-setting
|
|
||||||
value**, which can take two forms:
|
|
||||||
|
|
||||||
- An integer user group ID, which can be either a named user group
|
|
||||||
visible in the UI or a [role-based system group](#system-groups).
|
|
||||||
- An object with fields `direct_member_ids` containing a list of
|
|
||||||
integer user IDs and `direct_subgroup_ids` containing a list of
|
|
||||||
integer group IDs. The setting's value is the union of the
|
|
||||||
identified collection of users and groups.
|
|
||||||
|
|
||||||
Group-setting values in the object form function very much like a
|
|
||||||
formal user group object, without requiring the naming and UI clutter
|
|
||||||
overhead involved with creating a visible user group just to store the
|
|
||||||
value of a single setting.
|
|
||||||
|
|
||||||
The server will canonicalize an object with empty `direct_member_ids`
|
|
||||||
and with `direct_subgroup_ids` containing just the given group ID to
|
|
||||||
the integer format.
|
|
||||||
|
|
||||||
## System groups
|
|
||||||
|
|
||||||
The Zulip server maintains a collection of system groups that
|
|
||||||
correspond to the users with a given role; this makes it convenient to
|
|
||||||
store concepts like "all administrators" in a group-setting
|
|
||||||
value. These use a special naming convention and can be recognized by
|
|
||||||
the `is_system_group` property on their group object.
|
|
||||||
|
|
||||||
The following system groups are maintained by the Zulip server:
|
|
||||||
|
|
||||||
- `role:internet`: Everyone on the Internet has this permission; this
|
|
||||||
is used to configure the [public access
|
|
||||||
option](/help/public-access-option).
|
|
||||||
- `role:everyone`: All users, including guests.
|
|
||||||
- `role:members`: All users, excluding guests.
|
|
||||||
- `role:fullmembers`: All [full
|
|
||||||
members](https://zulip.com/api/roles-and-permissions#determining-if-a-user-is-a-full-member)
|
|
||||||
of the organization.
|
|
||||||
- `role:moderators`: All users with at least the moderator role.
|
|
||||||
- `role:administrators`: All users with at least the administrator
|
|
||||||
role.
|
|
||||||
- `role:owners`: All users with the owner role.
|
|
||||||
- `role:nobody`: The formal empty group. Used in the API to represent
|
|
||||||
disabling a feature.
|
|
||||||
|
|
||||||
Client UI for setting a permission is encouraged to display system
|
|
||||||
groups using their description, rather than using their names, which
|
|
||||||
are chosen to be unique and clear in the API.
|
|
||||||
|
|
||||||
System groups should generally not be displayed in UI for
|
|
||||||
administering an organization's user groups, since they are not
|
|
||||||
directly mutable.
|
|
||||||
|
|
||||||
## Updating group-setting values
|
|
||||||
|
|
||||||
The Zulip API uses a special format for modifying an existing setting
|
|
||||||
using a group-setting value.
|
|
||||||
|
|
||||||
A **group-setting update** is an object with a `new` field and an
|
|
||||||
optional `old` field, each containing a group-setting value. The
|
|
||||||
setting's value will be set to the membership expressed by the `new`
|
|
||||||
field.
|
|
||||||
|
|
||||||
The `old` field expresses the client's understanding of the current
|
|
||||||
value of the setting. If the `old` field is present and does not match
|
|
||||||
the actual current value of the setting, then the request will fail
|
|
||||||
with error code `EXPECTATION_MISMATCH` and no changes will be applied.
|
|
||||||
|
|
||||||
When a user edits the setting in a UI, the resulting API request
|
|
||||||
should generally always include the `old` field, giving the value
|
|
||||||
the list had when the user started editing. This accurately expresses
|
|
||||||
the user's intent, and if two users edit the same list around the
|
|
||||||
same time, it prevents a situation where the second change
|
|
||||||
accidentally reverts the first one without either user noticing.
|
|
||||||
|
|
||||||
Omitting `old` is appropriate where the intent really is a new complete
|
|
||||||
list rather than an edit, for example in an integration that syncs the
|
|
||||||
list from an external source of truth.
|
|
||||||
@@ -1,80 +0,0 @@
|
|||||||
# HTTP headers
|
|
||||||
|
|
||||||
This page documents the HTTP headers used by the Zulip API.
|
|
||||||
|
|
||||||
Most important is that API clients authenticate to the server using
|
|
||||||
HTTP Basic authentication. If you're using the official [Python or
|
|
||||||
JavaScript bindings](/api/installation-instructions), this is taken
|
|
||||||
care of when you configure said bindings.
|
|
||||||
|
|
||||||
Otherwise, see the `curl` example on each endpoint's documentation
|
|
||||||
page, which details the request format.
|
|
||||||
|
|
||||||
Documented below are additional HTTP headers and header conventions
|
|
||||||
generally used by Zulip:
|
|
||||||
|
|
||||||
## The `User-Agent` header
|
|
||||||
|
|
||||||
Clients are not required to pass a `User-Agent` HTTP header, but we
|
|
||||||
highly recommend doing so when writing an integration. It's easy to do
|
|
||||||
and it can help save time when debugging issues related to an API
|
|
||||||
client.
|
|
||||||
|
|
||||||
If provided, the Zulip server will parse the `User-Agent` HTTP header
|
|
||||||
in order to identify specific clients and integrations. This
|
|
||||||
information is used by the server for logging, [usage
|
|
||||||
statistics](/help/analytics), and on rare occasions, for
|
|
||||||
backwards-compatibility logic to preserve support for older versions
|
|
||||||
of official clients.
|
|
||||||
|
|
||||||
Official Zulip clients and integrations use a `User-Agent` that starts
|
|
||||||
with something like `ZulipMobile/20.0.103 `, encoding the name of the
|
|
||||||
application and it's version.
|
|
||||||
|
|
||||||
Zulip's official API bindings have reasonable defaults for
|
|
||||||
`User-Agent`. For example, the official Zulip Python bindings have a
|
|
||||||
default `User-Agent` starting with `ZulipPython/{version}`, where
|
|
||||||
`version` is the version of the library.
|
|
||||||
|
|
||||||
You can give your bot/integration its own name by passing the `client`
|
|
||||||
parameter when initializing the Python bindings. For example, the
|
|
||||||
official Zulip Nagios integration is initialized like this:
|
|
||||||
|
|
||||||
``` python
|
|
||||||
client = zulip.Client(
|
|
||||||
config_file=opts.config, client=f"ZulipNagios/{VERSION}"
|
|
||||||
)
|
|
||||||
```
|
|
||||||
|
|
||||||
If you are working on an integration that you plan to share outside
|
|
||||||
your organization, you can get help picking a good name in
|
|
||||||
`#integrations` in the [Zulip development
|
|
||||||
community](https://zulip.com/development-community/).
|
|
||||||
|
|
||||||
## Rate-limiting response headers
|
|
||||||
|
|
||||||
To help clients avoid exceeding rate limits, Zulip sets the following
|
|
||||||
HTTP headers in all API responses:
|
|
||||||
|
|
||||||
* `X-RateLimit-Remaining`: The number of additional requests of this
|
|
||||||
type that the client can send before exceeding its limit.
|
|
||||||
* `X-RateLimit-Limit`: The limit that would be applicable to a client
|
|
||||||
that had not made any recent requests of this type. This is useful
|
|
||||||
for designing a client's burst behavior so as to avoid ever reaching
|
|
||||||
a rate limit.
|
|
||||||
* `X-RateLimit-Reset`: The time at which the client will no longer
|
|
||||||
have any rate limits applied to it (and thus could do a burst of
|
|
||||||
`X-RateLimit-Limit` requests).
|
|
||||||
|
|
||||||
[Zulip's rate limiting rules are configurable][rate-limiting-rules],
|
|
||||||
and can vary by server and over time. The default configuration
|
|
||||||
currently limits:
|
|
||||||
|
|
||||||
* Every user is limited to 200 total API requests per minute.
|
|
||||||
* Separate, much lower limits for authentication/login attempts.
|
|
||||||
|
|
||||||
When the Zulip server has configured multiple rate limits that apply
|
|
||||||
to a given request, the values returned will be for the strictest
|
|
||||||
limit.
|
|
||||||
|
|
||||||
[rate-limiting-rules]: https://zulip.readthedocs.io/en/latest/production/security-model.html#rate-limiting
|
|
||||||
@@ -1,136 +0,0 @@
|
|||||||
#### Messages
|
|
||||||
|
|
||||||
* [Send a message](/api/send-message)
|
|
||||||
* [Upload a file](/api/upload-file)
|
|
||||||
* [Edit a message](/api/update-message)
|
|
||||||
* [Delete a message](/api/delete-message)
|
|
||||||
* [Get messages](/api/get-messages)
|
|
||||||
* [Construct a narrow](/api/construct-narrow)
|
|
||||||
* [Add an emoji reaction](/api/add-reaction)
|
|
||||||
* [Remove an emoji reaction](/api/remove-reaction)
|
|
||||||
* [Render a message](/api/render-message)
|
|
||||||
* [Fetch a single message](/api/get-message)
|
|
||||||
* [Check if messages match a narrow](/api/check-messages-match-narrow)
|
|
||||||
* [Get a message's edit history](/api/get-message-history)
|
|
||||||
* [Update personal message flags](/api/update-message-flags)
|
|
||||||
* [Update personal message flags for narrow](/api/update-message-flags-for-narrow)
|
|
||||||
* [Mark all messages as read](/api/mark-all-as-read)
|
|
||||||
* [Mark messages in a channel as read](/api/mark-stream-as-read)
|
|
||||||
* [Mark messages in a topic as read](/api/mark-topic-as-read)
|
|
||||||
* [Get a message's read receipts](/api/get-read-receipts)
|
|
||||||
|
|
||||||
#### Scheduled messages
|
|
||||||
|
|
||||||
* [Get scheduled messages](/api/get-scheduled-messages)
|
|
||||||
* [Create a scheduled message](/api/create-scheduled-message)
|
|
||||||
* [Edit a scheduled message](/api/update-scheduled-message)
|
|
||||||
* [Delete a scheduled message](/api/delete-scheduled-message)
|
|
||||||
|
|
||||||
#### Drafts
|
|
||||||
|
|
||||||
* [Get drafts](/api/get-drafts)
|
|
||||||
* [Create drafts](/api/create-drafts)
|
|
||||||
* [Edit a draft](/api/edit-draft)
|
|
||||||
* [Delete a draft](/api/delete-draft)
|
|
||||||
|
|
||||||
#### Channels
|
|
||||||
|
|
||||||
* [Get subscribed channels](/api/get-subscriptions)
|
|
||||||
* [Subscribe to a channel](/api/subscribe)
|
|
||||||
* [Unsubscribe from a channel](/api/unsubscribe)
|
|
||||||
* [Get subscription status](/api/get-subscription-status)
|
|
||||||
* [Get channel subscribers](/api/get-subscribers)
|
|
||||||
* [Update subscription settings](/api/update-subscription-settings)
|
|
||||||
* [Get all channels](/api/get-streams)
|
|
||||||
* [Get a channel by ID](/api/get-stream-by-id)
|
|
||||||
* [Get channel ID](/api/get-stream-id)
|
|
||||||
* [Create a channel](/api/create-stream)
|
|
||||||
* [Update a channel](/api/update-stream)
|
|
||||||
* [Archive a channel](/api/archive-stream)
|
|
||||||
* [Get channel's email address](/api/get-stream-email-address)
|
|
||||||
* [Get topics in a channel](/api/get-stream-topics)
|
|
||||||
* [Topic muting](/api/mute-topic)
|
|
||||||
* [Update personal preferences for a topic](/api/update-user-topic)
|
|
||||||
* [Delete a topic](/api/delete-topic)
|
|
||||||
* [Add a default channel](/api/add-default-stream)
|
|
||||||
* [Remove a default channel](/api/remove-default-stream)
|
|
||||||
|
|
||||||
#### Users
|
|
||||||
|
|
||||||
* [Get a user](/api/get-user)
|
|
||||||
* [Get a user by email](/api/get-user-by-email)
|
|
||||||
* [Get own user](/api/get-own-user)
|
|
||||||
* [Get all users](/api/get-users)
|
|
||||||
* [Create a user](/api/create-user)
|
|
||||||
* [Update a user](/api/update-user)
|
|
||||||
* [Deactivate a user](/api/deactivate-user)
|
|
||||||
* [Deactivate own user](/api/deactivate-own-user)
|
|
||||||
* [Reactivate a user](/api/reactivate-user)
|
|
||||||
* [Get a user's status](/api/get-user-status)
|
|
||||||
* [Update your status](/api/update-status)
|
|
||||||
* [Set "typing" status](/api/set-typing-status)
|
|
||||||
* [Get a user's presence](/api/get-user-presence)
|
|
||||||
* [Get presence of all users](/api/get-presence)
|
|
||||||
* [Update your presence](/api/update-presence)
|
|
||||||
* [Get attachments](/api/get-attachments)
|
|
||||||
* [Delete an attachment](/api/remove-attachment)
|
|
||||||
* [Update settings](/api/update-settings)
|
|
||||||
* [Get user groups](/api/get-user-groups)
|
|
||||||
* [Create a user group](/api/create-user-group)
|
|
||||||
* [Update a user group](/api/update-user-group)
|
|
||||||
* [Delete a user group](/api/remove-user-group)
|
|
||||||
* [Update user group members](/api/update-user-group-members)
|
|
||||||
* [Update subgroups of a user group](/api/update-user-group-subgroups)
|
|
||||||
* [Get user group membership status](/api/get-is-user-group-member)
|
|
||||||
* [Get user group members](/api/get-user-group-members)
|
|
||||||
* [Get subgroups of a user group](/api/get-user-group-subgroups)
|
|
||||||
* [Mute a user](/api/mute-user)
|
|
||||||
* [Unmute a user](/api/unmute-user)
|
|
||||||
* [Get all alert words](/api/get-alert-words)
|
|
||||||
* [Add alert words](/api/add-alert-words)
|
|
||||||
* [Remove alert words](/api/remove-alert-words)
|
|
||||||
|
|
||||||
#### Invitations
|
|
||||||
|
|
||||||
* [Get all invitations](/api/get-invites)
|
|
||||||
* [Send invitations](/api/send-invites)
|
|
||||||
* [Create a reusable invitation link](/api/create-invite-link)
|
|
||||||
* [Resend an email invitation](/api/resend-email-invite)
|
|
||||||
* [Revoke an email invitation](/api/revoke-email-invite)
|
|
||||||
* [Revoke a reusable invitation link](/api/revoke-invite-link)
|
|
||||||
|
|
||||||
#### Server & organizations
|
|
||||||
|
|
||||||
* [Get server settings](/api/get-server-settings)
|
|
||||||
* [Get linkifiers](/api/get-linkifiers)
|
|
||||||
* [Add a linkifier](/api/add-linkifier)
|
|
||||||
* [Update a linkifier](/api/update-linkifier)
|
|
||||||
* [Remove a linkifier](/api/remove-linkifier)
|
|
||||||
* [Reorder linkifiers](/api/reorder-linkifiers)
|
|
||||||
* [Add a code playground](/api/add-code-playground)
|
|
||||||
* [Remove a code playground](/api/remove-code-playground)
|
|
||||||
* [Get all custom emoji](/api/get-custom-emoji)
|
|
||||||
* [Upload custom emoji](/api/upload-custom-emoji)
|
|
||||||
* [Deactivate custom emoji](/api/deactivate-custom-emoji)
|
|
||||||
* [Get all custom profile fields](/api/get-custom-profile-fields)
|
|
||||||
* [Reorder custom profile fields](/api/reorder-custom-profile-fields)
|
|
||||||
* [Create a custom profile field](/api/create-custom-profile-field)
|
|
||||||
* [Update realm-level defaults of user settings](/api/update-realm-user-settings-defaults)
|
|
||||||
|
|
||||||
#### Real-time events
|
|
||||||
|
|
||||||
* [Real time events API](/api/real-time-events)
|
|
||||||
* [Register an event queue](/api/register-queue)
|
|
||||||
* [Get events from an event queue](/api/get-events)
|
|
||||||
* [Delete an event queue](/api/delete-queue)
|
|
||||||
|
|
||||||
#### Specialty endpoints
|
|
||||||
|
|
||||||
* [Fetch an API key (production)](/api/fetch-api-key)
|
|
||||||
* [Fetch an API key (development only)](/api/dev-fetch-api-key)
|
|
||||||
* [Send a test notification to mobile device(s)](/api/test-notify)
|
|
||||||
* [Add an APNs device token](/api/add-apns-token)
|
|
||||||
* [Remove an APNs device token](/api/remove-apns-token)
|
|
||||||
* [Add an FCM registration token](/api/add-fcm-token)
|
|
||||||
* [Remove an FCM registration token](/api/remove-fcm-token)
|
|
||||||
* [Create BigBlueButton video call](/api/create-big-blue-button-video-call)
|
|
||||||
@@ -1,223 +0,0 @@
|
|||||||
# Incoming webhook integrations
|
|
||||||
|
|
||||||
An incoming webhook allows a third-party service to push data to Zulip when
|
|
||||||
something happens. There are several ways to set up an incoming webhook in
|
|
||||||
Zulip:
|
|
||||||
|
|
||||||
* Use our [REST API](/api/rest) endpoint for [sending
|
|
||||||
messages](/api/send-message). This works great for internal tools
|
|
||||||
or cases where the third-party tool wants to control the formatting
|
|
||||||
of the messages in Zulip.
|
|
||||||
* Use one of our supported [integration
|
|
||||||
frameworks](/integrations/meta-integration), such as the
|
|
||||||
[Slack-compatible incoming webhook](/integrations/doc/slack_incoming),
|
|
||||||
[Zapier integration](/integrations/doc/zapier), or
|
|
||||||
[IFTTT integration](/integrations/doc/ifttt).
|
|
||||||
* Implementing an incoming webhook integration (detailed on this page),
|
|
||||||
where all the logic for formatting the Zulip messages lives in the
|
|
||||||
Zulip server. This is how most of [Zulip's official
|
|
||||||
integrations](/integrations/) work, because they enable Zulip to
|
|
||||||
support third-party services that just have an "outgoing webhook"
|
|
||||||
feature (without the third party needing to do any work specific to
|
|
||||||
Zulip).
|
|
||||||
|
|
||||||
In an incoming webhook integration, the third-party service's
|
|
||||||
"outgoing webhook" feature sends an `HTTP POST` to a special URL when
|
|
||||||
it has something for you, and then the Zulip "incoming webhook"
|
|
||||||
integration handles that incoming data to format and send a message in
|
|
||||||
Zulip.
|
|
||||||
|
|
||||||
New official Zulip webhook integrations can take just a few hours to
|
|
||||||
write, including tests and documentation, if you use the right
|
|
||||||
process.
|
|
||||||
|
|
||||||
## Quick guide
|
|
||||||
|
|
||||||
* Set up the
|
|
||||||
[Zulip development environment](https://zulip.readthedocs.io/en/latest/development/overview.html).
|
|
||||||
|
|
||||||
* Use [Zulip's JSON integration](/integrations/doc/json),
|
|
||||||
<https://webhook.site/>, or a similar site to capture an example
|
|
||||||
webhook payload from the third-party service. Create a
|
|
||||||
`zerver/webhooks/<mywebhook>/fixtures/` directory, and add the
|
|
||||||
captured JSON payload as a test fixture.
|
|
||||||
|
|
||||||
* Create an `Integration` object, and add it to the `WEBHOOK_INTEGRATIONS`
|
|
||||||
list in `zerver/lib/integrations.py`. Search for `WebhookIntegration` in that
|
|
||||||
file to find an existing one to copy.
|
|
||||||
|
|
||||||
* Write a draft webhook handler in `zerver/webhooks/<mywebhook>/view.py`. There
|
|
||||||
are a lot of examples in the `zerver/webhooks/` directory that you can copy.
|
|
||||||
We recommend templating from a short one, like `zendesk`.
|
|
||||||
|
|
||||||
* Write a test for your fixture in `zerver/webhooks/<mywebhook>/tests.py`.
|
|
||||||
Run the test for your integration like this:
|
|
||||||
|
|
||||||
```
|
|
||||||
tools/test-backend zerver/webhooks/<mywebhook>/
|
|
||||||
```
|
|
||||||
|
|
||||||
Iterate on debugging the test and webhooks handler until it all
|
|
||||||
works.
|
|
||||||
|
|
||||||
* Capture payloads for the other common types of `POST`s the third-party
|
|
||||||
service will make, and add tests for them; usually this part of the
|
|
||||||
process is pretty fast.
|
|
||||||
|
|
||||||
* Document the integration in `zerver/webhooks/<mywebhook>/doc.md`(required for
|
|
||||||
getting it merged into Zulip). You can use existing documentation, like
|
|
||||||
[this one](https://raw.githubusercontent.com/zulip/zulip/main/zerver/webhooks/github/doc.md),
|
|
||||||
as a template. This should not take more than 15 minutes, even if you don't speak English
|
|
||||||
as a first language (we'll clean up the text before merging).
|
|
||||||
|
|
||||||
## Hello world walkthrough
|
|
||||||
|
|
||||||
Check out the [detailed walkthrough](incoming-webhooks-walkthrough) for step-by-step
|
|
||||||
instructions.
|
|
||||||
|
|
||||||
## Checklist
|
|
||||||
|
|
||||||
### Files that need to be created
|
|
||||||
|
|
||||||
Select a name for your incoming webhook and use it consistently. The examples
|
|
||||||
below are for a webhook named `MyWebHook`.
|
|
||||||
|
|
||||||
* `zerver/webhooks/mywebhook/__init__.py`: Empty file that is an obligatory
|
|
||||||
part of every python package. Remember to `git add` it.
|
|
||||||
* `zerver/webhooks/mywebhook/view.py`: The main webhook integration function,
|
|
||||||
called `api_mywebhook_webhook`, along with any necessary helper functions.
|
|
||||||
* `zerver/webhooks/mywebhook/fixtures/message_type.json`: Sample JSON payload data
|
|
||||||
used by tests. Add one fixture file per type of message supported by your
|
|
||||||
integration.
|
|
||||||
* `zerver/webhooks/mywebhook/tests.py`: Tests for your webhook.
|
|
||||||
* `zerver/webhooks/mywebhook/doc.md`: End-user documentation explaining
|
|
||||||
how to add the integration.
|
|
||||||
* `static/images/integrations/logos/mywebhook.svg`: A square logo for the
|
|
||||||
platform/server/product you are integrating. Used on the documentation
|
|
||||||
pages as well as the sender's avatar for messages sent by the integration.
|
|
||||||
* `static/images/integrations/mywebhook/001.png`: A screenshot of a message
|
|
||||||
sent by the integration, used on the documentation page. This can be
|
|
||||||
generated by running `tools/screenshots/generate-integration-docs-screenshot --integration mywebhook`.
|
|
||||||
* `static/images/integrations/bot_avatars/mywebhook.png`: A square logo for the
|
|
||||||
platform/server/product you are integrating which is used to create the avatar
|
|
||||||
for generating screenshots with. This can be generated automatically from
|
|
||||||
`static/images/integrations/logos/mywebhook.svg` by running
|
|
||||||
`tools/setup/generate_integration_bots_avatars.py`.
|
|
||||||
|
|
||||||
### Files that need to be updated
|
|
||||||
|
|
||||||
* `zerver/lib/integrations.py`: Add your integration to
|
|
||||||
`WEBHOOK_INTEGRATIONS`. This will automatically register a
|
|
||||||
URL for the incoming webhook of the form `api/v1/external/mywebhook` and
|
|
||||||
associate it with the function called `api_mywebhook_webhook` in
|
|
||||||
`zerver/webhooks/mywebhook/view.py`. Also add your integration to
|
|
||||||
`DOC_SCREENSHOT_CONFIG`. This will allow you to automatically generate
|
|
||||||
a screenshot for the documentation by running
|
|
||||||
`tools/screenshots/generate-integration-docs-screenshot --integration mywebhook`.
|
|
||||||
|
|
||||||
## Common Helpers
|
|
||||||
|
|
||||||
* If your integration will receive a test webhook payload, you can use
|
|
||||||
`get_setup_webhook_message` to create our standard message for test payloads.
|
|
||||||
You can import this from `zerver/lib/webhooks/common.py`, and it will generate
|
|
||||||
a message like this: "GitHub webhook is successfully configured! 🎉"
|
|
||||||
|
|
||||||
## General advice
|
|
||||||
|
|
||||||
* Consider using our Zulip markup to make the output from your
|
|
||||||
integration especially attractive or useful (e.g., emoji, Markdown
|
|
||||||
emphasis, or @-mentions).
|
|
||||||
|
|
||||||
* Use topics effectively to ensure sequential messages about the same
|
|
||||||
thing are threaded together; this makes for much better consumption
|
|
||||||
by users. E.g., for a bug tracker integration, put the bug number in
|
|
||||||
the topic for all messages; for an integration like Nagios, put the
|
|
||||||
service in the topic.
|
|
||||||
|
|
||||||
* Integrations that don't match a team's workflow can often be
|
|
||||||
uselessly spammy. Give careful thought to providing options for
|
|
||||||
triggering Zulip messages only for certain message types, certain
|
|
||||||
projects, or sending different messages to different channels/topics,
|
|
||||||
to make it easy for teams to configure the integration to support
|
|
||||||
their workflow.
|
|
||||||
|
|
||||||
* Consistently capitalize the name of the integration in the
|
|
||||||
documentation and the Client name the way the vendor does. It's OK
|
|
||||||
to use all-lower-case in the implementation.
|
|
||||||
|
|
||||||
* Sometimes it can be helpful to contact the vendor if it appears they
|
|
||||||
don't have an API or webhook we can use; sometimes the right API
|
|
||||||
is just not properly documented.
|
|
||||||
|
|
||||||
* A helpful tool for testing your integration is
|
|
||||||
[UltraHook](http://www.ultrahook.com/), which allows you to receive webhook
|
|
||||||
calls via your local Zulip development environment. This enables you to do end-to-end
|
|
||||||
testing with live data from the service you're integrating and can help you
|
|
||||||
spot why something isn't working or if the service is using custom HTTP
|
|
||||||
headers.
|
|
||||||
|
|
||||||
## URL specification
|
|
||||||
|
|
||||||
The base URL for an incoming webhook integration bot, where
|
|
||||||
`INTEGRATION_NAME` is the name of the specific webhook integration and
|
|
||||||
`API_KEY` is the API key of the bot created by the user for the
|
|
||||||
integration, is:
|
|
||||||
|
|
||||||
```
|
|
||||||
{{ api_url }}/v1/external/INTEGRATION_NAME?api_key=API_KEY
|
|
||||||
```
|
|
||||||
|
|
||||||
The list of existing webhook integrations can be found by browsing the
|
|
||||||
[Integrations documentation](/integrations/) or in
|
|
||||||
`zerver/lib/integrations.py` at `WEBHOOK_INTEGRATIONS`.
|
|
||||||
|
|
||||||
Parameters accepted in the URL include:
|
|
||||||
|
|
||||||
### api_key *(required)*
|
|
||||||
|
|
||||||
The API key of the bot created by the user for the integration. To get a
|
|
||||||
bot's API key, see the [API keys](/api/api-keys) documentation.
|
|
||||||
|
|
||||||
### stream
|
|
||||||
|
|
||||||
The channel for the integration to send notifications to. Can be either
|
|
||||||
the channel ID or the [URL-encoded][url-encoder] channel name. By default
|
|
||||||
the integration will send direct messages to the bot's owner.
|
|
||||||
|
|
||||||
!!! tip ""
|
|
||||||
|
|
||||||
A channel ID can be found when [browsing channels][browse-channels]
|
|
||||||
in the web or desktop apps.
|
|
||||||
|
|
||||||
### topic
|
|
||||||
|
|
||||||
The topic in the specified channel for the integration to send
|
|
||||||
notifications to. The topic should also be [URL-encoded][url-encoder].
|
|
||||||
By default the integration will have a topic configured for channel
|
|
||||||
messages.
|
|
||||||
|
|
||||||
### only_events, exclude_events
|
|
||||||
|
|
||||||
Some incoming webhook integrations support these parameters to filter
|
|
||||||
which events will trigger a notification. You can append either
|
|
||||||
`&only_events=["event_a","event_b"]` or
|
|
||||||
`&exclude_events=["event_a","event_b"]` (or both, with different events)
|
|
||||||
to the URL, with an arbitrary number of supported events.
|
|
||||||
|
|
||||||
You can use UNIX-style wildcards like `*` to include multiple events.
|
|
||||||
For example, `test*` matches every event that starts with `test`.
|
|
||||||
|
|
||||||
!!! tip ""
|
|
||||||
|
|
||||||
For a list of supported events, see a specific [integration's
|
|
||||||
documentation](/integrations) page.
|
|
||||||
|
|
||||||
[browse-channels]: /help/introduction-to-channels#browse-and-subscribe-to-channels
|
|
||||||
[add-bot]: /help/add-a-bot-or-integration
|
|
||||||
[url-encoder]: https://www.urlencoder.org/
|
|
||||||
|
|
||||||
## Related articles
|
|
||||||
|
|
||||||
* [Integrations overview](/api/integrations-overview)
|
|
||||||
* [Incoming webhook walkthrough](/api/incoming-webhooks-walkthrough)
|
|
||||||
* [Non-webhook integrations](/api/non-webhook-integrations)
|
|
||||||
@@ -1,26 +0,0 @@
|
|||||||
# The Zulip API
|
|
||||||
|
|
||||||
Zulip's APIs allow you to integrate other services with Zulip. This
|
|
||||||
guide should help you find the API you need:
|
|
||||||
|
|
||||||
* First, check if the tool you'd like to integrate with Zulip
|
|
||||||
[already has a native integration](/integrations/).
|
|
||||||
* Next, check if [Zapier](https://zapier.com/apps) or
|
|
||||||
[IFTTT](https://ifttt.com/search) has an integration.
|
|
||||||
[Zulip's Zapier integration](/integrations/doc/zapier) and
|
|
||||||
[Zulip's IFTTT integration](/integrations/doc/ifttt) often allow
|
|
||||||
integrating a new service with Zulip without writing any code.
|
|
||||||
* If you'd like to send content into Zulip, you can
|
|
||||||
[write a native incoming webhook integration](/api/incoming-webhooks-overview)
|
|
||||||
or use [Zulip's API for sending messages](/api/send-message).
|
|
||||||
* If you're building an interactive bot that reacts to activity inside
|
|
||||||
Zulip, you'll want to look at Zulip's
|
|
||||||
[Python framework for interactive bots](/api/running-bots) or
|
|
||||||
[Zulip's real-time events API](/api/get-events).
|
|
||||||
|
|
||||||
And if you still need to build your own integration with Zulip, check out
|
|
||||||
the full [REST API](/api/rest), generally starting with
|
|
||||||
[installing the API client bindings](/api/installation-instructions).
|
|
||||||
|
|
||||||
In case you already know how you want to build your integration and you're
|
|
||||||
just looking for an API key, we've got you covered [here](/api/api-keys).
|
|
||||||
@@ -1,77 +0,0 @@
|
|||||||
# Integrations overview
|
|
||||||
|
|
||||||
Integrations allow you to send data from other products into or out of
|
|
||||||
Zulip. Zulip natively integrates with dozens of products, and with hundreds
|
|
||||||
more through Zapier and IFTTT.
|
|
||||||
|
|
||||||
Zulip also makes it very easy to write your own integration, and (if you'd
|
|
||||||
like) to get it merged into the main Zulip repository.
|
|
||||||
|
|
||||||
Integrations are one of the most important parts of a group chat tool like
|
|
||||||
Zulip, and we are committed to making integrating with Zulip as easy as
|
|
||||||
possible.
|
|
||||||
|
|
||||||
## Set up an existing integration
|
|
||||||
|
|
||||||
Most existing integrations send content from a third-party product into
|
|
||||||
Zulip.
|
|
||||||
|
|
||||||
* Search Zulip's [list of native integrations](/integrations/) for the
|
|
||||||
third-party product. Each integration has a page describing how to set it
|
|
||||||
up.
|
|
||||||
|
|
||||||
* Check if [Zapier](https://zapier.com/apps) has an integration with the
|
|
||||||
product. If it does, follow [these instructions](/integrations/doc/zapier)
|
|
||||||
to set it up.
|
|
||||||
|
|
||||||
* Check if [IFTTT](https://ifttt.com/search) has an integration with the
|
|
||||||
product. If it does, follow [these instructions](/integrations/doc/ifttt)
|
|
||||||
to set it up.
|
|
||||||
|
|
||||||
* Use a third-party webhook integration designed to work with
|
|
||||||
[Slack's webhook API](https://api.slack.com/messaging/webhooks)
|
|
||||||
pointed at Zulip's
|
|
||||||
[Slack-compatible webhook API](/integrations/slack/slack_incoming).
|
|
||||||
|
|
||||||
* If the product can send email notifications, you can
|
|
||||||
[send those emails to a channel](/help/message-a-channel-by-email).
|
|
||||||
|
|
||||||
## Write your own integration
|
|
||||||
|
|
||||||
We've put a lot of effort into making this as easy as possible, but
|
|
||||||
all of the options below do require some comfort writing code. If you
|
|
||||||
need an integration and don't have an engineer on staff, [contact
|
|
||||||
us](/help/contact-support) and we'll see what we can do.
|
|
||||||
|
|
||||||
### Sending content into Zulip
|
|
||||||
|
|
||||||
* If the third-party service supports outgoing webhooks, you likely want to
|
|
||||||
build an [incoming webhook integration](/api/incoming-webhooks-overview).
|
|
||||||
|
|
||||||
* If it doesn't, you may want to write a
|
|
||||||
[script or plugin integration](/api/non-webhook-integrations).
|
|
||||||
|
|
||||||
* Finally, you can
|
|
||||||
[send messages using Zulip's API](/api/send-message).
|
|
||||||
|
|
||||||
### Sending and receiving content
|
|
||||||
|
|
||||||
* To react to activity inside Zulip, look at Zulip's
|
|
||||||
[Python framework for interactive bots](/api/running-bots) or
|
|
||||||
[Zulip's real-time events API](/api/get-events).
|
|
||||||
|
|
||||||
* If what you want isn't covered by the above, check out the full
|
|
||||||
[REST API](/api/rest). The web, mobile, desktop, and terminal apps are
|
|
||||||
built on top of this API, so it can do anything a human user can do. Most
|
|
||||||
but not all of the endpoints are documented on this site; if you need
|
|
||||||
something that isn't there check out Zulip's
|
|
||||||
[REST endpoints](https://github.com/zulip/zulip/blob/main/zproject/urls.py)
|
|
||||||
or [contact us](/help/contact-support) and we'll help you out.
|
|
||||||
|
|
||||||
## Related articles
|
|
||||||
|
|
||||||
* [Bots overview](/help/bots-overview)
|
|
||||||
* [Set up integrations](/help/set-up-integrations)
|
|
||||||
* [Add a bot or integration](/help/add-a-bot-or-integration)
|
|
||||||
* [Generate integration URL](/help/generate-integration-url)
|
|
||||||
* [Request an integration](/help/request-an-integration)
|
|
||||||
@@ -1,161 +0,0 @@
|
|||||||
# Message formatting
|
|
||||||
|
|
||||||
Zulip supports an extended version of Markdown for messages, as well as
|
|
||||||
some HTML level special behavior. The Zulip help center article on [message
|
|
||||||
formatting](/help/format-your-message-using-markdown) is the primary
|
|
||||||
documentation for Zulip's markup features. This article is currently a
|
|
||||||
changelog for updates to these features.
|
|
||||||
|
|
||||||
The [render a message](/api/render-message) endpoint can be used to get
|
|
||||||
the current HTML version of any Markdown syntax for message content.
|
|
||||||
|
|
||||||
## Code blocks
|
|
||||||
|
|
||||||
**Changes**: As of Zulip 4.0 (feature level 33), [code blocks][help-code]
|
|
||||||
can have a `data-code-language` attribute attached to the outer HTML
|
|
||||||
`div` element, which records the programming language that was selected
|
|
||||||
for syntax highlighting. This field is used in the
|
|
||||||
[playgrounds][help-playgrounds] feature for code blocks.
|
|
||||||
|
|
||||||
## Global times
|
|
||||||
|
|
||||||
**Changes**: In Zulip 3.0 (feature level 8), added [global time
|
|
||||||
mentions][help-global-time] to supported Markdown message formatting
|
|
||||||
features.
|
|
||||||
|
|
||||||
## Image previews
|
|
||||||
|
|
||||||
When a Zulip message is sent linking to an uploaded image, Zulip will
|
|
||||||
generate an image preview element with the following format.
|
|
||||||
|
|
||||||
``` html
|
|
||||||
<div class="message_inline_image">
|
|
||||||
<a href="/user_uploads/path/to/image.png" title="image.png">
|
|
||||||
<img data-original-dimensions="1920x1080"
|
|
||||||
src="/user_uploads/thumbnail/path/to/image.png/840x560.webp">
|
|
||||||
</a>
|
|
||||||
</div>
|
|
||||||
```
|
|
||||||
|
|
||||||
If the server has not yet generated thumbnails for the image yet at
|
|
||||||
the time the message is sent, the `img` element will be a temporary
|
|
||||||
loading indicator image and have the `image-loading-placeholder`
|
|
||||||
class, which clients can use to identify loading indicators and
|
|
||||||
replace them with a more native loading indicator element if
|
|
||||||
desired. For example:
|
|
||||||
|
|
||||||
``` html
|
|
||||||
<div class="message_inline_image">
|
|
||||||
<a href="/user_uploads/path/to/image.png" title="image.png">
|
|
||||||
<img class="image-loading-placeholder" data-original-dimensions="1920x1080" src="/path/to/spinner.png">
|
|
||||||
</a>
|
|
||||||
</div>
|
|
||||||
```
|
|
||||||
|
|
||||||
Once the server has a working thumbnail, such messages will be updated
|
|
||||||
via an `update_message` event, with the `rendering_only: true` flag
|
|
||||||
(telling clients not to adjust message edit history), with appropriate
|
|
||||||
adjusted `rendered_content`. A client should process those events by
|
|
||||||
just using the updated rendering. If thumbnailing failed, the same
|
|
||||||
type of event will edit the message's rendered form to remove the
|
|
||||||
image preview element, so no special client-side logic should be
|
|
||||||
required to process such errors.
|
|
||||||
|
|
||||||
Note that in the uncommon situation that the thumbnailing system is
|
|
||||||
backlogged, an individual message containing multiple image previews
|
|
||||||
may be re-rendered multiple times as each image finishes thumbnailing
|
|
||||||
and triggers a message update.
|
|
||||||
|
|
||||||
Clients are recommended to do the following when processing image
|
|
||||||
previews:
|
|
||||||
|
|
||||||
- Clients that would like to use the image's aspect ratio to lay out
|
|
||||||
one or more images in the message feed may use the
|
|
||||||
`data-original-dimensions` attribute, which is present even if the
|
|
||||||
image is a placeholder spinner. This attribute encodes the
|
|
||||||
dimensions of the original image as `{width}x{height}`. These
|
|
||||||
dimensions are for the image as rendered, _after_ any EXIF rotation
|
|
||||||
and mirroring has been applied.
|
|
||||||
- If the client would like to control the thumbnail resolution used,
|
|
||||||
it can replace the final section of the URL (`840x560.webp` in the
|
|
||||||
example above) with the `name` of its preferred format from the set
|
|
||||||
of supported formats provided by the server in the
|
|
||||||
`server_thumbnail_formats` portion of the `register`
|
|
||||||
response. Clients should not make any assumptions about what format
|
|
||||||
the server will use as the "default" thumbnail resolution, as it may
|
|
||||||
change over time.
|
|
||||||
- Download button type elements should provide the original image
|
|
||||||
(encoded via the `href` of the containing `a` tag).
|
|
||||||
- Lightbox elements for viewing an image should be designed to
|
|
||||||
immediately display any already-downloaded thumbnail while fetching
|
|
||||||
the original-quality image or an appropriate higher-quality
|
|
||||||
thumbnail from the server, to be transparently swapped in once it is
|
|
||||||
available. Clients that would like to size the lightbox based on the
|
|
||||||
size of the original image can use the `data-original-dimensions`
|
|
||||||
attribute, as described above.
|
|
||||||
- Animated images will have a `data-animated` attribute on the `img`
|
|
||||||
tag. As detailed in `server_thumbnail_formats`, both animated and
|
|
||||||
still images are available for clients to use, depending on their
|
|
||||||
preference. See, for example, the [web
|
|
||||||
setting](/help/allow-image-link-previews#configure-how-animated-images-are-played)
|
|
||||||
to control whether animated images are autoplayed in the message
|
|
||||||
feed.
|
|
||||||
- Clients should not assume that the requested format is the format
|
|
||||||
that they will receive; in rare cases where the client has an
|
|
||||||
out-of-date list of `server_thumbnail_formats`, the server will
|
|
||||||
provide an approximation of the client's requested format. Because
|
|
||||||
of this, clients should not assume that the pixel dimensions or file
|
|
||||||
format match what they requested.
|
|
||||||
- No other processing of the URLs is recommended.
|
|
||||||
|
|
||||||
**Changes**: In Zulip 9.2 (feature levels 278-279, and 287+), added
|
|
||||||
`data-original-dimensions` to the `image-loading-placeholder` spinner
|
|
||||||
images, containing the dimensions of the original image.
|
|
||||||
|
|
||||||
In Zulip 9.0 (feature level 276), added `data-original-dimensions`
|
|
||||||
attribute to images that have been thumbnailed, containing the
|
|
||||||
dimensions of the full-size version of the image. Thumbnailing itself
|
|
||||||
was reintroduced at feature level 275.
|
|
||||||
|
|
||||||
Previously, with the exception of Zulip servers that used the beta
|
|
||||||
Thumbor-based implementation years ago, all image previews in Zulip
|
|
||||||
messages were not thumbnailed; the `a` tag and the `img` tag would both
|
|
||||||
point to the original image.
|
|
||||||
|
|
||||||
Clients that correctly implement the current API should handle
|
|
||||||
Thumbor-based older thumbnails correctly, as long as they do not
|
|
||||||
assume that `data-original-dimension` is present. Clients should not
|
|
||||||
assume that messages sent prior to the introduction of thumbnailing
|
|
||||||
have been re-rendered to use the new format or have thumbnails
|
|
||||||
available.
|
|
||||||
|
|
||||||
## Mentions
|
|
||||||
|
|
||||||
**Changes**: In Zulip 9.0 (feature level 247), `channel` was added
|
|
||||||
to the supported [wildcard][help-mention-all] options used in the
|
|
||||||
[mentions][help-mentions] Markdown message formatting feature.
|
|
||||||
|
|
||||||
## Spoilers
|
|
||||||
|
|
||||||
**Changes**: In Zulip 3.0 (feature level 15), added
|
|
||||||
[spoilers][help-spoilers] to supported Markdown message formatting
|
|
||||||
features.
|
|
||||||
|
|
||||||
## Removed features
|
|
||||||
|
|
||||||
**Changes**: In Zulip 4.0 (feature level 24), the rarely used `!avatar()`
|
|
||||||
and `!gravatar()` markup syntax, which was never documented and had an
|
|
||||||
inconsistent syntax, were removed.
|
|
||||||
|
|
||||||
## Related articles
|
|
||||||
|
|
||||||
* [Markdown formatting](/help/format-your-message-using-markdown)
|
|
||||||
* [Send a message](/api/send-message)
|
|
||||||
* [Render a message](/api/render-message)
|
|
||||||
|
|
||||||
[help-code]: /help/code-blocks
|
|
||||||
[help-playgrounds]: /help/code-blocks#code-playgrounds
|
|
||||||
[help-spoilers]: /help/spoilers
|
|
||||||
[help-global-time]: /help/global-times
|
|
||||||
[help-mentions]: /help/mention-a-user-or-group
|
|
||||||
[help-mention-all]: /help/mention-a-user-or-group#mention-everyone-on-a-channel
|
|
||||||
@@ -1,64 +0,0 @@
|
|||||||
# Error handling
|
|
||||||
|
|
||||||
Zulip's API will always return a JSON format response.
|
|
||||||
The HTTP status code indicates whether the request was successful
|
|
||||||
(200 = success, 4xx = user error, 5xx = server error).
|
|
||||||
|
|
||||||
Every response, both success and error responses, will contain at least
|
|
||||||
two keys:
|
|
||||||
|
|
||||||
- `msg`: an internationalized, human-readable error message string.
|
|
||||||
|
|
||||||
- `result`: either `"error"` or `"success"`, which is redundant with the
|
|
||||||
HTTP status code, but is convenient when print debugging.
|
|
||||||
|
|
||||||
Every error response will also contain an additional key:
|
|
||||||
|
|
||||||
- `code`: a machine-readable error string, with a default value of
|
|
||||||
`"BAD_REQUEST"` for general errors.
|
|
||||||
|
|
||||||
Clients should always check `code`, rather than `msg`, when looking for
|
|
||||||
specific error conditions. The string values for `msg` are
|
|
||||||
internationalized (e.g., the server will send the error message
|
|
||||||
translated into French if the user has a French locale), so checking
|
|
||||||
those strings will result in buggy code.
|
|
||||||
|
|
||||||
!!! tip ""
|
|
||||||
|
|
||||||
If a client needs information that is only present in the string value
|
|
||||||
of `msg` for a particular error response, then the developers
|
|
||||||
implementing the client should [start a conversation here][api-design]
|
|
||||||
in order to discuss getting a specific error `code` and/or relevant
|
|
||||||
additional key/value pairs for that error response.
|
|
||||||
|
|
||||||
In addition to the keys described above, some error responses will
|
|
||||||
contain other keys with further details that are useful for clients. The
|
|
||||||
specific keys present depend on the error `code`, and are documented at
|
|
||||||
the API endpoints where these particular errors appear.
|
|
||||||
|
|
||||||
**Changes**: Before Zulip 5.0 (feature level 76), all error responses
|
|
||||||
did not contain a `code` key, and its absence indicated that no specific
|
|
||||||
error `code` had been allocated for that error.
|
|
||||||
|
|
||||||
## Common error responses
|
|
||||||
|
|
||||||
Documented below are some error responses that are common to many
|
|
||||||
endpoints:
|
|
||||||
|
|
||||||
{generate_code_example|/rest-error-handling:post|fixture}
|
|
||||||
|
|
||||||
## Ignored Parameters
|
|
||||||
|
|
||||||
In JSON success responses, all Zulip REST API endpoints may return
|
|
||||||
an array of parameters sent in the request that are not supported
|
|
||||||
by that specific endpoint.
|
|
||||||
|
|
||||||
While this can be expected, e.g., when sending both current and legacy
|
|
||||||
names for a parameter to a Zulip server of unknown version, this often
|
|
||||||
indicates either a bug in the client implementation or an attempt to
|
|
||||||
configure a new feature while connected to an older Zulip server that
|
|
||||||
does not support said feature.
|
|
||||||
|
|
||||||
{generate_code_example|/settings:patch|fixture}
|
|
||||||
|
|
||||||
[api-design]: https://chat.zulip.org/#narrow/channel/378-api-design
|
|
||||||
@@ -1,125 +0,0 @@
|
|||||||
# Roles and permissions
|
|
||||||
|
|
||||||
Zulip offers several levels of permissions based on a
|
|
||||||
[user's role](/help/roles-and-permissions) in a Zulip organization.
|
|
||||||
|
|
||||||
Here are some important details to note when working with these
|
|
||||||
roles and permissions in Zulip's API:
|
|
||||||
|
|
||||||
## A user's role
|
|
||||||
|
|
||||||
A user's account data include a `role` property, which contains the
|
|
||||||
user's role in the Zulip organization. These roles are encoded as:
|
|
||||||
|
|
||||||
* Organization owner: 100
|
|
||||||
|
|
||||||
* Organization administrator: 200
|
|
||||||
|
|
||||||
* Organization moderator: 300
|
|
||||||
|
|
||||||
* Member: 400
|
|
||||||
|
|
||||||
* Guest: 600
|
|
||||||
|
|
||||||
User account data also include these boolean properties that duplicate
|
|
||||||
the related roles above:
|
|
||||||
|
|
||||||
* `is_owner` specifying whether the user is an organization owner.
|
|
||||||
|
|
||||||
* `is_admin` specifying whether the user is an organization administrator.
|
|
||||||
|
|
||||||
* `is_guest` specifying whether the user is a guest user.
|
|
||||||
|
|
||||||
These are intended as conveniences for simple clients, and clients
|
|
||||||
should prefer using the `role` field, since only that one is updated
|
|
||||||
by the [events API](/api/get-events).
|
|
||||||
|
|
||||||
Note that [`POST /register`](/api/register-queue) also returns an
|
|
||||||
`is_moderator` boolean property specifying whether the current user is
|
|
||||||
an organization moderator.
|
|
||||||
|
|
||||||
Additionally, user account data include an `is_billing_admin` property
|
|
||||||
specifying whether the user is a billing administrator for the Zulip
|
|
||||||
organization, which is not related to one of the roles listed above,
|
|
||||||
but rather allows for specific permissions related to billing
|
|
||||||
administration in [paid Zulip Cloud plans](https://zulip.com/plans/).
|
|
||||||
|
|
||||||
### User account data in the API
|
|
||||||
|
|
||||||
Endpoints that return the user account data / properties mentioned
|
|
||||||
above are:
|
|
||||||
|
|
||||||
* [`GET /users`](/api/get-users)
|
|
||||||
|
|
||||||
* [`GET /users/{user_id}`](/api/get-user)
|
|
||||||
|
|
||||||
* [`GET /users/{email}`](/api/get-user-by-email)
|
|
||||||
|
|
||||||
* [`GET /users/me`](/api/get-own-user)
|
|
||||||
|
|
||||||
* [`GET /events`](/api/get-events)
|
|
||||||
|
|
||||||
* [`POST /register`](/api/register-queue)
|
|
||||||
|
|
||||||
Note that the [`POST /register` endpoint](/api/register-queue) returns
|
|
||||||
the above boolean properties to describe the role of the current user,
|
|
||||||
when `realm_user` is present in `fetch_event_types`.
|
|
||||||
|
|
||||||
Additionally, the specific events returned by the
|
|
||||||
[`GET /events` endpoint](/api/get-events) containing data related
|
|
||||||
to user accounts and roles are the [`realm_user` add
|
|
||||||
event](/api/get-events#realm_user-add), and the
|
|
||||||
[`realm_user` update event](/api/get-events#realm_user-update).
|
|
||||||
|
|
||||||
## Permission levels
|
|
||||||
|
|
||||||
Many areas of Zulip are customizable by the roles
|
|
||||||
above, such as (but not limited to) [restricting message editing and
|
|
||||||
deletion](/help/restrict-message-editing-and-deletion) and
|
|
||||||
[channels permissions](/help/channel-permissions). The potential
|
|
||||||
permission levels are:
|
|
||||||
|
|
||||||
* Everyone / Any user including Guests (least restrictive)
|
|
||||||
|
|
||||||
* Members
|
|
||||||
|
|
||||||
* Full members
|
|
||||||
|
|
||||||
* Moderators
|
|
||||||
|
|
||||||
* Administrators
|
|
||||||
|
|
||||||
* Owners
|
|
||||||
|
|
||||||
* Nobody (most restrictive)
|
|
||||||
|
|
||||||
These permission levels and policies in the API are designed to be
|
|
||||||
cutoffs in that users with the specified role and above have the
|
|
||||||
specified ability or access. For example, a permission level documented
|
|
||||||
as 'moderators only' includes organization moderators, administrators,
|
|
||||||
and owners.
|
|
||||||
|
|
||||||
Note that specific settings and policies in the Zulip API that use these
|
|
||||||
permission levels will likely support a subset of those listed above.
|
|
||||||
|
|
||||||
## Group-based permissions
|
|
||||||
|
|
||||||
Some settings have been migrated to a more flexible system based on
|
|
||||||
[user groups](/api/group-setting-values).
|
|
||||||
|
|
||||||
## Determining if a user is a full member
|
|
||||||
|
|
||||||
When a Zulip organization has set up a [waiting period before new members
|
|
||||||
turn into full members](/help/restrict-permissions-of-new-members),
|
|
||||||
clients will need to determine if a user's account has aged past the
|
|
||||||
organization's waiting period threshold.
|
|
||||||
|
|
||||||
The `realm_waiting_period_threshold`, which is the number of days until
|
|
||||||
a user's account is treated as a full member, is returned by the
|
|
||||||
[`POST /register` endpoint](/api/register-queue) when `realm` is present
|
|
||||||
in `fetch_event_types`.
|
|
||||||
|
|
||||||
Clients can compare the `realm_waiting_period_threshold` to a user
|
|
||||||
accounts's `date_joined` property, which is the time the user account
|
|
||||||
was created, to determine if a user has the permissions of a full
|
|
||||||
member or a new member.
|
|
||||||
@@ -1,74 +0,0 @@
|
|||||||
# Interactive bots
|
|
||||||
|
|
||||||
Zulip's API has a powerful framework for interactive bots that react
|
|
||||||
to messages in Zulip.
|
|
||||||
|
|
||||||
## Running a bot
|
|
||||||
|
|
||||||
This guide will show you how to run an existing Zulip bot
|
|
||||||
found in [zulip_bots/bots](
|
|
||||||
https://github.com/zulip/python-zulip-api/tree/main/zulip_bots/zulip_bots/bots).
|
|
||||||
|
|
||||||
You'll need:
|
|
||||||
|
|
||||||
* An account in a Zulip organization
|
|
||||||
(e.g., [the Zulip development community](https://zulip.com/development-community/),
|
|
||||||
`<yourSubdomain>.zulipchat.com`, or a Zulip organization on your own
|
|
||||||
[development](https://zulip.readthedocs.io/en/latest/development/overview.html) or
|
|
||||||
[production](https://zulip.readthedocs.io/en/latest/production/install.html) server).
|
|
||||||
* A computer where you're running the bot from.
|
|
||||||
|
|
||||||
**Note: Please be considerate when testing experimental bots on public servers such as chat.zulip.org.**
|
|
||||||
|
|
||||||
{start_tabs}
|
|
||||||
|
|
||||||
1. [Create a bot](/help/add-a-bot-or-integration), making sure to select
|
|
||||||
**Generic bot** as the **Bot type**.
|
|
||||||
|
|
||||||
1. [Download the bot's `zuliprc` file](/api/configuring-python-bindings#download-a-zuliprc-file).
|
|
||||||
|
|
||||||
1. Use the following command to install the
|
|
||||||
[`zulip_bots` Python package](https://pypi.org/project/zulip-bots/):
|
|
||||||
|
|
||||||
pip3 install zulip_bots
|
|
||||||
|
|
||||||
1. Use the following command to start the bot process *(replacing
|
|
||||||
`~/path/to/zuliprc` with the path to the `zuliprc` file you downloaded above)*:
|
|
||||||
|
|
||||||
zulip-run-bot <bot-name> --config-file ~/path/to/zuliprc
|
|
||||||
|
|
||||||
1. Check the output of the command above to make sure your bot is running.
|
|
||||||
It should include the following line:
|
|
||||||
|
|
||||||
INFO:root:starting message handling...
|
|
||||||
|
|
||||||
1. Test your setup by [starting a new direct message](/help/starting-a-new-direct-message)
|
|
||||||
with the bot or [mentioning](/help/mention-a-user-or-group) the bot on a channel.
|
|
||||||
|
|
||||||
!!! tip ""
|
|
||||||
|
|
||||||
To use the latest development version of the `zulip_bots` package, follow
|
|
||||||
[these steps](writing-bots#installing-a-development-version-of-the-zulip-bots-package).
|
|
||||||
|
|
||||||
{end_tabs}
|
|
||||||
|
|
||||||
You can now play around with the bot and get it configured the way you
|
|
||||||
like. Eventually, you'll probably want to run it in a production
|
|
||||||
environment where it'll stay up, by [deploying](/api/deploying-bots) it on a
|
|
||||||
server using the Zulip Botserver.
|
|
||||||
|
|
||||||
## Common problems
|
|
||||||
|
|
||||||
* My bot won't start
|
|
||||||
* Ensure that your API config file is correct (download the config file from the server).
|
|
||||||
* Ensure that your bot script is located in `zulip_bots/bots/<my-bot>/`
|
|
||||||
* Are you using your own Zulip development server? Ensure that you run your bot outside
|
|
||||||
the Vagrant environment.
|
|
||||||
* Some bots require Python 3. Try switching to a Python 3 environment before running
|
|
||||||
your bot.
|
|
||||||
|
|
||||||
## Related articles
|
|
||||||
|
|
||||||
* [Non-webhook integrations](/api/non-webhook-integrations)
|
|
||||||
* [Deploying bots](/api/deploying-bots)
|
|
||||||
* [Writing bots](/api/writing-bots)
|
|
||||||
@@ -1,29 +0,0 @@
|
|||||||
## Integrations
|
|
||||||
|
|
||||||
* [Overview](/api/integrations-overview)
|
|
||||||
* [Incoming webhook integrations](/api/incoming-webhooks-overview)
|
|
||||||
* [Hello world walkthrough](/api/incoming-webhooks-walkthrough)
|
|
||||||
* [Non-webhook integrations](/api/non-webhook-integrations)
|
|
||||||
|
|
||||||
## Interactive bots (beta)
|
|
||||||
|
|
||||||
* [Running bots](/api/running-bots)
|
|
||||||
* [Deploying bots](/api/deploying-bots)
|
|
||||||
* [Writing bots](/api/writing-bots)
|
|
||||||
* [Outgoing webhooks](/api/outgoing-webhooks)
|
|
||||||
|
|
||||||
## REST API
|
|
||||||
|
|
||||||
* [Overview](/api/rest)
|
|
||||||
* [Installation instructions](/api/installation-instructions)
|
|
||||||
* [API keys](/api/api-keys)
|
|
||||||
* [Configuring the Python bindings](/api/configuring-python-bindings)
|
|
||||||
* [HTTP headers](/api/http-headers)
|
|
||||||
* [Error handling](/api/rest-error-handling)
|
|
||||||
* [Roles and permissions](/api/roles-and-permissions)
|
|
||||||
* [Group-setting values](/api/group-setting-values)
|
|
||||||
* [Message formatting](/api/message-formatting)
|
|
||||||
* [Client libraries](/api/client-libraries)
|
|
||||||
* [API changelog](/api/changelog)
|
|
||||||
|
|
||||||
{!rest-endpoints.md!}
|
|
||||||
25
babel.config.js
Normal file
25
babel.config.js
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
"use strict";
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
plugins: [
|
||||||
|
[
|
||||||
|
"formatjs",
|
||||||
|
{
|
||||||
|
additionalFunctionNames: ["$t", "$t_html"],
|
||||||
|
overrideIdFn: (id, defaultMessage) => defaultMessage,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
],
|
||||||
|
presets: [
|
||||||
|
[
|
||||||
|
"@babel/preset-env",
|
||||||
|
{
|
||||||
|
corejs: "3.20",
|
||||||
|
shippedProposals: true,
|
||||||
|
useBuiltIns: "usage",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
"@babel/typescript",
|
||||||
|
],
|
||||||
|
sourceType: "unambiguous",
|
||||||
|
};
|
||||||
@@ -3,6 +3,7 @@ from django.db import migrations, models
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("contenttypes", "0001_initial"),
|
("contenttypes", "0001_initial"),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ from django.db import migrations, models
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("confirmation", "0001_initial"),
|
("confirmation", "0001_initial"),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ from django.db import migrations
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("confirmation", "0002_realmcreationkey"),
|
("confirmation", "0002_realmcreationkey"),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ from django.db import migrations, models
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("confirmation", "0003_emailchangeconfirmation"),
|
("confirmation", "0003_emailchangeconfirmation"),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ from django.db import migrations, models
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("zerver", "0124_stream_enable_notifications"),
|
("zerver", "0124_stream_enable_notifications"),
|
||||||
("confirmation", "0004_remove_confirmationmanager"),
|
("confirmation", "0004_remove_confirmationmanager"),
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ from django.db import migrations, models
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("confirmation", "0005_confirmation_realm"),
|
("confirmation", "0005_confirmation_realm"),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ from django.db import migrations, models
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("confirmation", "0006_realmcreationkey_presume_email_valid"),
|
("confirmation", "0006_realmcreationkey_presume_email_valid"),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ from django.db import migrations, models
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("confirmation", "0007_add_indexes"),
|
("confirmation", "0007_add_indexes"),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -5,12 +5,12 @@ from datetime import timedelta
|
|||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.db import migrations, transaction
|
from django.db import migrations, transaction
|
||||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
from django.db.backends.postgresql.schema import DatabaseSchemaEditor
|
||||||
from django.db.migrations.state import StateApps
|
from django.db.migrations.state import StateApps
|
||||||
|
|
||||||
|
|
||||||
def set_expiry_date_for_existing_confirmations(
|
def set_expiry_date_for_existing_confirmations(
|
||||||
apps: StateApps, schema_editor: BaseDatabaseSchemaEditor
|
apps: StateApps, schema_editor: DatabaseSchemaEditor
|
||||||
) -> None:
|
) -> None:
|
||||||
Confirmation = apps.get_model("confirmation", "Confirmation")
|
Confirmation = apps.get_model("confirmation", "Confirmation")
|
||||||
if not Confirmation.objects.exists():
|
if not Confirmation.objects.exists():
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ from django.db import migrations, models
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("confirmation", "0009_confirmation_expiry_date_backfill"),
|
("confirmation", "0009_confirmation_expiry_date_backfill"),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ from django.db import migrations, models
|
|||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("confirmation", "0010_alter_confirmation_expiry_date"),
|
("confirmation", "0010_alter_confirmation_expiry_date"),
|
||||||
]
|
]
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user