mirror of
				https://github.com/zulip/zulip.git
				synced 2025-10-30 19:43:47 +00:00 
			
		
		
		
	Compare commits
	
		
			141 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | 2b95f54593 | ||
|  | d41f06e8a9 | ||
|  | d119e97755 | ||
|  | 5ea0d1d1e8 | ||
|  | fd66cfd93c | ||
|  | e76bab19a7 | ||
|  | 13532917ca | ||
|  | b5c9a006f0 | ||
|  | a2edd58b82 | ||
|  | d22cb7d01f | ||
|  | 76ce370181 | ||
|  | 64856d858e | ||
|  | c9796ba7f7 | ||
|  | b21117954d | ||
|  | 59f5ca713f | ||
|  | 67da8e8431 | ||
|  | b79fbf9239 | ||
|  | f1f937e4ea | ||
|  | 68628149db | ||
|  | f247721a2d | ||
|  | e3d6b4f210 | ||
|  | ea8e6149da | ||
|  | 376cd88a83 | ||
|  | bfd92260fd | ||
|  | 217431d0c4 | ||
|  | 30cc6798b3 | ||
|  | 677ad69555 | ||
|  | 95118d860d | ||
|  | b8888c801b | ||
|  | 7a9251a3e1 | ||
|  | 64ec413940 | ||
|  | 147c3998de | ||
|  | 79fc9c3281 | ||
|  | a33d7f0400 | ||
|  | 2471f6ad83 | ||
|  | 19d1ca3a1d | ||
|  | 9fcbc3a49b | ||
|  | 1413fda773 | ||
|  | 494e596be8 | ||
|  | 4cc25f8e84 | ||
|  | 19ab295172 | ||
|  | 31f02cd926 | ||
|  | 266c7c83e0 | ||
|  | dd198fd06e | ||
|  | 10e8928b0f | ||
|  | bc81275d3c | ||
|  | 6c8c3cd3dc | ||
|  | 1783515794 | ||
|  | 21026d984b | ||
|  | 66fe724c8a | ||
|  | 282d6edf2e | ||
|  | 785a7ec9e7 | ||
|  | c44d9f9b1b | ||
|  | 0d5d3c4912 | ||
|  | ef793590c1 | ||
|  | 3032ba15cf | ||
|  | 96a2ddffe7 | ||
|  | 2794362214 | ||
|  | 9b3e1e2c97 | ||
|  | ae44fdd7cc | ||
|  | b45cce61e7 | ||
|  | 2e923a0eb5 | ||
|  | f538f34d95 | ||
|  | 5d2befdc54 | ||
|  | cc8b83b261 | ||
|  | ac8f4aaa93 | ||
|  | 843c148c59 | ||
|  | d39bcf2264 | ||
|  | ce64a6b163 | ||
|  | 7875196783 | ||
|  | 56c1ad1a3d | ||
|  | d9aa4161f8 | ||
|  | 728155afee | ||
|  | 660501c782 | ||
|  | ad974c3ae3 | ||
|  | bc4029deae | ||
|  | 218ca61dd0 | ||
|  | 3419908f39 | ||
|  | af67990f14 | ||
|  | e6cf30fc22 | ||
|  | e2ccbe7c80 | ||
|  | 8b31387670 | ||
|  | 501eb09716 | ||
|  | 280d9db26d | ||
|  | cee6227f53 | ||
|  | cae803e8a9 | ||
|  | ba598366e9 | ||
|  | d452ad31e0 | ||
|  | aed813f44c | ||
|  | 71dae1b92a | ||
|  | 629ec1aa8b | ||
|  | 87d60a1fff | ||
|  | 98eef54e4f | ||
|  | 235ba339d0 | ||
|  | e5320cc1f6 | ||
|  | 1d72ea2fd5 | ||
|  | c7948a7960 | ||
|  | 04bb26be3a | ||
|  | 7f45ca9b22 | ||
|  | 1bedb965e9 | ||
|  | bc752188e7 | ||
|  | b0ea81fe16 | ||
|  | 358ab821c4 | ||
|  | 97322dd195 | ||
|  | 1ba48a04da | ||
|  | e8377b605f | ||
|  | 830f1e9f3f | ||
|  | 037b87b580 | ||
|  | 82a6e77301 | ||
|  | 9efb90510c | ||
|  | b255c8b8a6 | ||
|  | 03e8e8be9d | ||
|  | 2932d9cd28 | ||
|  | 0baa205ad3 | ||
|  | a8d8500c46 | ||
|  | aa19f43f0b | ||
|  | 0974b0130d | ||
|  | 8a1d2bb5b6 | ||
|  | a38976f25d | ||
|  | fccfc02981 | ||
|  | 929847ae2d | ||
|  | a3338f3735 | ||
|  | f377ef6dd7 | ||
|  | 4c9997a523 | ||
|  | 2470fba95c | ||
|  | 2a6145f7fb | ||
|  | 7036fea97b | ||
|  | 05a42fb8df | ||
|  | cd0b14ce2f | ||
|  | a1fc8fb079 | ||
|  | e147ee2087 | ||
|  | 61180020c1 | ||
|  | 2a473c57f4 | ||
|  | c0980e3e9e | ||
|  | 035d4c57be | ||
|  | fcbd24e72c | ||
|  | 29babba85a | ||
|  | 49ff894d6a | ||
|  | f3e75b6b5f | ||
|  | 6b9f37dc8f | ||
|  | cd926b8aae | 
							
								
								
									
										6
									
								
								.browserslistrc
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										6
									
								
								.browserslistrc
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,6 @@ | ||||
| > 0.2% | ||||
| > 0.2% in US | ||||
| last 2 versions | ||||
| Firefox ESR | ||||
| not dead | ||||
| Chrome 26  # similar to PhantomJS | ||||
							
								
								
									
										151
									
								
								.circleci/config.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										151
									
								
								.circleci/config.yml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,151 @@ | ||||
| # See https://zulip.readthedocs.io/en/latest/testing/continuous-integration.html for | ||||
| #   high-level documentation on our CircleCI setup. | ||||
| # See CircleCI upstream's docs on this config format: | ||||
| #   https://circleci.com/docs/2.0/language-python/ | ||||
| # | ||||
| version: 2.0 | ||||
| aliases: | ||||
|   - &create_cache_directories | ||||
|     run: | ||||
|       name: create cache directories | ||||
|       command: | | ||||
|           dirs=(/srv/zulip-{npm,venv}-cache) | ||||
|           sudo mkdir -p "${dirs[@]}" | ||||
|           sudo chown -R circleci "${dirs[@]}" | ||||
|  | ||||
|   - &restore_cache_package_json | ||||
|     restore_cache: | ||||
|       keys: | ||||
|       - v1-npm-base.{{ .Environment.CIRCLE_JOB }}-{{ checksum "package.json" }}-{{ checksum "yarn.lock" }} | ||||
|  | ||||
|   - &restore_cache_requirements | ||||
|     restore_cache: | ||||
|       keys: | ||||
|       - v1-venv-base.{{ .Environment.CIRCLE_JOB }}-{{ checksum "requirements/thumbor.txt" }}-{{ checksum "requirements/dev.txt" }} | ||||
|  | ||||
|   - &install_dependencies | ||||
|     run: | ||||
|       name: install dependencies | ||||
|       command: | | ||||
|         sudo apt-get update | ||||
|         # Install moreutils so we can use `ts` and `mispipe` in the following. | ||||
|         sudo apt-get install -y moreutils | ||||
|  | ||||
|         # CircleCI sets the following in Git config at clone time: | ||||
|         #   url.ssh://git@github.com.insteadOf https://github.com | ||||
|         # This breaks the Git clones in the NVM `install.sh` we run | ||||
|         # in `install-node`. | ||||
|         # TODO: figure out why that breaks, and whether we want it. | ||||
|         #   (Is it an optimization?) | ||||
|         rm -f /home/circleci/.gitconfig | ||||
|  | ||||
|         # This is the main setup job for the test suite | ||||
|         mispipe "tools/ci/setup-backend" ts | ||||
|  | ||||
|         # Cleaning caches is mostly unnecessary in Circle, because | ||||
|         # most builds don't get to write to the cache. | ||||
|         # mispipe "scripts/lib/clean-unused-caches --verbose --threshold 0" ts | ||||
|  | ||||
|   - &save_cache_package_json | ||||
|     save_cache: | ||||
|       paths: | ||||
|         - /srv/zulip-npm-cache | ||||
|       key: v1-npm-base.{{ .Environment.CIRCLE_JOB }}-{{ checksum "package.json" }}-{{ checksum "yarn.lock" }} | ||||
|  | ||||
|   - &save_cache_requirements | ||||
|     save_cache: | ||||
|       paths: | ||||
|         - /srv/zulip-venv-cache | ||||
|       key: v1-venv-base.{{ .Environment.CIRCLE_JOB }}-{{ checksum "requirements/thumbor.txt" }}-{{ checksum "requirements/dev.txt" }} | ||||
|     # TODO: in Travis we also cache ~/zulip-emoji-cache, ~/node, ~/misc | ||||
|  | ||||
|   - &run_backend_tests | ||||
|     run: | ||||
|       name: run backend tests | ||||
|       command: | | ||||
|         . /srv/zulip-py3-venv/bin/activate | ||||
|         mispipe ./tools/ci/backend ts | ||||
|  | ||||
|   - &run_frontend_tests | ||||
|     run: | ||||
|       name: run frontend tests | ||||
|       command: | | ||||
|         . /srv/zulip-py3-venv/bin/activate | ||||
|         mispipe ./tools/ci/frontend ts | ||||
|  | ||||
|   - &upload_coverage_report | ||||
|     run: | ||||
|      name: upload coverage report | ||||
|      command: | | ||||
|        . /srv/zulip-py3-venv/bin/activate | ||||
|        pip install codecov && codecov \ | ||||
|          || echo "Error in uploading coverage reports to codecov.io." | ||||
|  | ||||
| jobs: | ||||
|   "xenial-backend-frontend-python3.5": | ||||
|     docker: | ||||
|       # This is built from tools/circleci/images/xenial/Dockerfile . | ||||
|       # Xenial ships with Python 3.5. | ||||
|       - image: gregprice/circleci:xenial-python-4.test | ||||
|  | ||||
|     working_directory: ~/zulip | ||||
|  | ||||
|     steps: | ||||
|       - checkout | ||||
|  | ||||
|       - *create_cache_directories | ||||
|       - *restore_cache_package_json | ||||
|       - *restore_cache_requirements | ||||
|       - *install_dependencies | ||||
|       - *save_cache_package_json | ||||
|       - *save_cache_requirements | ||||
|       - *run_backend_tests | ||||
|       - *run_frontend_tests | ||||
|       # We only need to upload coverage reports on whichever platform | ||||
|       # runs the frontend tests. | ||||
|       - *upload_coverage_report | ||||
|  | ||||
|       - store_artifacts: | ||||
|           path: ./var/casper/ | ||||
|           destination: casper | ||||
|  | ||||
|       - store_artifacts:     | ||||
|           path: ../../../tmp/zulip-test-event-log/ | ||||
|           destination: test-reports | ||||
|  | ||||
|       - store_test_results: | ||||
|             path: ./var/xunit-test-results/casper/ | ||||
|  | ||||
|   "bionic-backend-python3.6": | ||||
|     docker: | ||||
|       # This is built from tools/circleci/images/bionic/Dockerfile . | ||||
|       # Bionic ships with Python 3.6. | ||||
|       - image: gregprice/circleci:bionic-python-1.test | ||||
|  | ||||
|     working_directory: ~/zulip | ||||
|  | ||||
|     steps: | ||||
|       - checkout | ||||
|  | ||||
|       - *create_cache_directories | ||||
|  | ||||
|       - run: | ||||
|           name: do Bionic hack | ||||
|           command: | | ||||
|               # Temporary hack till `sudo service redis-server start` gets fixes in Bionic. See | ||||
|               # https://chat.zulip.org/#narrow/stream/3-backend/topic/Ubuntu.20bionic.20CircleCI | ||||
|               sudo sed -i '/^bind/s/bind.*/bind 0.0.0.0/' /etc/redis/redis.conf | ||||
|  | ||||
|       - *restore_cache_package_json | ||||
|       - *restore_cache_requirements | ||||
|       - *install_dependencies | ||||
|       - *save_cache_package_json | ||||
|       - *save_cache_requirements | ||||
|       - *run_backend_tests | ||||
|  | ||||
| workflows: | ||||
|   version: 2 | ||||
|   build: | ||||
|     jobs: | ||||
|       - "xenial-backend-frontend-python3.5" | ||||
|       - "bionic-backend-python3.6" | ||||
| @@ -1,30 +0,0 @@ | ||||
| te | ||||
| ans | ||||
| pullrequest | ||||
| ist | ||||
| cros | ||||
| wit | ||||
| nwe | ||||
| circularly | ||||
| ned | ||||
| ba | ||||
| ressemble | ||||
| ser | ||||
| sur | ||||
| hel | ||||
| fpr | ||||
| alls | ||||
| nd | ||||
| ot | ||||
| womens | ||||
| vise | ||||
| falsy | ||||
| ro | ||||
| derails | ||||
| forin | ||||
| uper | ||||
| slac | ||||
| couldn | ||||
| ges | ||||
| assertIn | ||||
| thirdparty | ||||
| @@ -3,23 +3,23 @@ root = true | ||||
| [*] | ||||
| end_of_line = lf | ||||
| charset = utf-8 | ||||
| indent_size = 4 | ||||
| indent_style = space | ||||
| insert_final_newline = true | ||||
| trim_trailing_whitespace = true | ||||
| insert_final_newline = true | ||||
|  | ||||
| [[shell]] | ||||
| binary_next_line = true | ||||
| switch_case_indent = true | ||||
| [*.{sh,py,pyi,js,ts,json,yml,xml,css,md,markdown,handlebars,html}] | ||||
| indent_style = space | ||||
| indent_size = 4 | ||||
|  | ||||
| [{*.{js,json,ts},check-openapi}] | ||||
| max_line_length = 100 | ||||
|  | ||||
| [*.{py,pyi}] | ||||
| [*.py] | ||||
| max_line_length = 110 | ||||
|  | ||||
| [*.{md,svg,rb,pp,yaml,yml}] | ||||
| [*.{js,ts}] | ||||
| max_line_length = 100 | ||||
|  | ||||
| [*.{svg,rb,pp,pl}] | ||||
| indent_style = space | ||||
| indent_size = 2 | ||||
|  | ||||
| [package.json] | ||||
| indent_size = 2 | ||||
| [*.cfg] | ||||
| indent_style = space | ||||
| indent_size = 8 | ||||
|   | ||||
| @@ -4,12 +4,7 @@ | ||||
|  | ||||
| /docs/_build | ||||
| /static/generated | ||||
| /static/third | ||||
| /static/webpack-bundles | ||||
| /var/* | ||||
| !/var/puppeteer | ||||
| /var/puppeteer/* | ||||
| !/var/puppeteer/test_credentials.d.ts | ||||
| /web/generated | ||||
| /web/third | ||||
| /zulip-current-venv | ||||
| /var | ||||
| /zulip-py3-venv | ||||
|   | ||||
							
								
								
									
										293
									
								
								.eslintrc.js
									
									
									
									
									
								
							
							
						
						
									
										293
									
								
								.eslintrc.js
									
									
									
									
									
								
							| @@ -1,293 +0,0 @@ | ||||
| "use strict"; | ||||
|  | ||||
| const confusingBrowserGlobals = require("confusing-browser-globals"); | ||||
|  | ||||
| module.exports = { | ||||
|     root: true, | ||||
|     env: { | ||||
|         es2020: true, | ||||
|         node: true, | ||||
|     }, | ||||
|     extends: [ | ||||
|         "eslint:recommended", | ||||
|         "plugin:import/errors", | ||||
|         "plugin:import/warnings", | ||||
|         "plugin:no-jquery/recommended", | ||||
|         "plugin:no-jquery/deprecated", | ||||
|         "plugin:unicorn/recommended", | ||||
|         "prettier", | ||||
|     ], | ||||
|     parser: "@babel/eslint-parser", | ||||
|     parserOptions: { | ||||
|         requireConfigFile: false, | ||||
|         warnOnUnsupportedTypeScriptVersion: false, | ||||
|         sourceType: "unambiguous", | ||||
|     }, | ||||
|     plugins: ["formatjs", "no-jquery"], | ||||
|     settings: { | ||||
|         formatjs: { | ||||
|             additionalFunctionNames: ["$t", "$t_html"], | ||||
|         }, | ||||
|         "no-jquery": { | ||||
|             collectionReturningPlugins: { | ||||
|                 expectOne: "always", | ||||
|             }, | ||||
|             variablePattern: "^\\$(?!t$|t_html$).", | ||||
|         }, | ||||
|     }, | ||||
|     reportUnusedDisableDirectives: true, | ||||
|     rules: { | ||||
|         "array-callback-return": "error", | ||||
|         "arrow-body-style": "error", | ||||
|         "block-scoped-var": "error", | ||||
|         "consistent-return": "error", | ||||
|         curly: "error", | ||||
|         "dot-notation": "error", | ||||
|         eqeqeq: "error", | ||||
|         "formatjs/enforce-default-message": ["error", "literal"], | ||||
|         "formatjs/enforce-placeholders": [ | ||||
|             "error", | ||||
|             {ignoreList: ["b", "code", "em", "i", "kbd", "p", "strong"]}, | ||||
|         ], | ||||
|         "formatjs/no-id": "error", | ||||
|         "guard-for-in": "error", | ||||
|         "import/extensions": "error", | ||||
|         "import/first": "error", | ||||
|         "import/newline-after-import": "error", | ||||
|         "import/no-cycle": ["error", {ignoreExternal: true}], | ||||
|         "import/no-duplicates": "error", | ||||
|         "import/no-self-import": "error", | ||||
|         "import/no-unresolved": "off", | ||||
|         "import/no-useless-path-segments": "error", | ||||
|         "import/order": ["error", {alphabetize: {order: "asc"}, "newlines-between": "always"}], | ||||
|         "import/unambiguous": "error", | ||||
|         "lines-around-directive": "error", | ||||
|         "new-cap": "error", | ||||
|         "no-alert": "error", | ||||
|         "no-array-constructor": "error", | ||||
|         "no-bitwise": "error", | ||||
|         "no-caller": "error", | ||||
|         "no-catch-shadow": "error", | ||||
|         "no-constant-condition": ["error", {checkLoops: false}], | ||||
|         "no-div-regex": "error", | ||||
|         "no-else-return": "error", | ||||
|         "no-eq-null": "error", | ||||
|         "no-eval": "error", | ||||
|         "no-implicit-coercion": "error", | ||||
|         "no-implied-eval": "error", | ||||
|         "no-inner-declarations": "off", | ||||
|         "no-iterator": "error", | ||||
|         "no-jquery/no-append-html": "error", | ||||
|         "no-jquery/no-constructor-attributes": "error", | ||||
|         "no-jquery/no-parse-html-literal": "error", | ||||
|         "no-label-var": "error", | ||||
|         "no-labels": "error", | ||||
|         "no-loop-func": "error", | ||||
|         "no-multi-str": "error", | ||||
|         "no-native-reassign": "error", | ||||
|         "no-new-func": "error", | ||||
|         "no-new-object": "error", | ||||
|         "no-new-wrappers": "error", | ||||
|         "no-octal-escape": "error", | ||||
|         "no-plusplus": "error", | ||||
|         "no-proto": "error", | ||||
|         "no-restricted-globals": ["error", ...confusingBrowserGlobals], | ||||
|         "no-return-assign": "error", | ||||
|         "no-script-url": "error", | ||||
|         "no-self-compare": "error", | ||||
|         "no-sync": "error", | ||||
|         "no-throw-literal": "error", | ||||
|         "no-undef-init": "error", | ||||
|         "no-unneeded-ternary": ["error", {defaultAssignment: false}], | ||||
|         "no-unused-expressions": "error", | ||||
|         "no-unused-vars": [ | ||||
|             "error", | ||||
|             {args: "all", argsIgnorePattern: "^_", ignoreRestSiblings: true}, | ||||
|         ], | ||||
|         "no-use-before-define": ["error", {functions: false}], | ||||
|         "no-useless-concat": "error", | ||||
|         "no-useless-constructor": "error", | ||||
|         "no-var": "error", | ||||
|         "object-shorthand": ["error", "always", {avoidExplicitReturnArrows: true}], | ||||
|         "one-var": ["error", "never"], | ||||
|         "prefer-arrow-callback": "error", | ||||
|         "prefer-const": ["error", {ignoreReadBeforeAssign: true}], | ||||
|         radix: "error", | ||||
|         "sort-imports": ["error", {ignoreDeclarationSort: true}], | ||||
|         "spaced-comment": ["error", "always", {markers: ["/"]}], | ||||
|         strict: "error", | ||||
|         "unicorn/consistent-function-scoping": "off", | ||||
|         "unicorn/explicit-length-check": "off", | ||||
|         "unicorn/filename-case": "off", | ||||
|         "unicorn/no-await-expression-member": "off", | ||||
|         "unicorn/no-negated-condition": "off", | ||||
|         "unicorn/no-null": "off", | ||||
|         "unicorn/no-process-exit": "off", | ||||
|         "unicorn/no-useless-undefined": "off", | ||||
|         "unicorn/numeric-separators-style": "off", | ||||
|         "unicorn/prefer-module": "off", | ||||
|         "unicorn/prefer-node-protocol": "off", | ||||
|         "unicorn/prefer-string-raw": "off", | ||||
|         "unicorn/prefer-ternary": "off", | ||||
|         "unicorn/prefer-top-level-await": "off", | ||||
|         "unicorn/prevent-abbreviations": "off", | ||||
|         "unicorn/switch-case-braces": "off", | ||||
|         "valid-typeof": ["error", {requireStringLiterals: true}], | ||||
|         yoda: "error", | ||||
|     }, | ||||
|     overrides: [ | ||||
|         { | ||||
|             files: ["web/tests/**"], | ||||
|             rules: { | ||||
|                 "no-jquery/no-selector-prop": "off", | ||||
|             }, | ||||
|         }, | ||||
|         { | ||||
|             files: ["web/e2e-tests/**"], | ||||
|             globals: { | ||||
|                 zulip_test: false, | ||||
|             }, | ||||
|         }, | ||||
|         { | ||||
|             files: ["web/src/**"], | ||||
|             globals: { | ||||
|                 StripeCheckout: false, | ||||
|             }, | ||||
|         }, | ||||
|         { | ||||
|             files: ["**/*.ts"], | ||||
|             extends: [ | ||||
|                 "plugin:@typescript-eslint/strict-type-checked", | ||||
|                 "plugin:@typescript-eslint/stylistic-type-checked", | ||||
|                 "plugin:import/typescript", | ||||
|             ], | ||||
|             parserOptions: { | ||||
|                 project: "tsconfig.json", | ||||
|             }, | ||||
|             settings: { | ||||
|                 "import/resolver": { | ||||
|                     node: { | ||||
|                         extensions: [".ts", ".d.ts", ".js"], // https://github.com/import-js/eslint-plugin-import/issues/2267 | ||||
|                     }, | ||||
|                 }, | ||||
|             }, | ||||
|             globals: { | ||||
|                 JQuery: false, | ||||
|             }, | ||||
|             rules: { | ||||
|                 // Disable base rule to avoid conflict | ||||
|                 "no-use-before-define": "off", | ||||
|  | ||||
|                 "@typescript-eslint/consistent-type-assertions": [ | ||||
|                     "error", | ||||
|                     {assertionStyle: "never"}, | ||||
|                 ], | ||||
|                 "@typescript-eslint/consistent-type-definitions": ["error", "type"], | ||||
|                 "@typescript-eslint/consistent-type-imports": "error", | ||||
|                 "@typescript-eslint/explicit-function-return-type": [ | ||||
|                     "error", | ||||
|                     {allowExpressions: true}, | ||||
|                 ], | ||||
|                 "@typescript-eslint/member-ordering": "error", | ||||
|                 "@typescript-eslint/method-signature-style": "error", | ||||
|                 "@typescript-eslint/no-non-null-assertion": "off", | ||||
|                 "@typescript-eslint/no-unnecessary-condition": "off", | ||||
|                 "@typescript-eslint/no-unnecessary-qualifier": "error", | ||||
|                 "@typescript-eslint/no-unused-vars": [ | ||||
|                     "error", | ||||
|                     {args: "all", argsIgnorePattern: "^_", ignoreRestSiblings: true}, | ||||
|                 ], | ||||
|                 "@typescript-eslint/no-use-before-define": ["error", {functions: false}], | ||||
|                 "@typescript-eslint/parameter-properties": "error", | ||||
|                 "@typescript-eslint/promise-function-async": "error", | ||||
|                 "@typescript-eslint/restrict-plus-operands": ["error", {}], | ||||
|                 "@typescript-eslint/restrict-template-expressions": ["error", {}], | ||||
|                 "no-undef": "error", | ||||
|             }, | ||||
|         }, | ||||
|         { | ||||
|             files: ["**/*.d.ts"], | ||||
|             rules: { | ||||
|                 "import/unambiguous": "off", | ||||
|             }, | ||||
|         }, | ||||
|         { | ||||
|             files: ["web/e2e-tests/**", "web/tests/**"], | ||||
|             globals: { | ||||
|                 CSS: false, | ||||
|                 document: false, | ||||
|                 navigator: false, | ||||
|                 window: false, | ||||
|             }, | ||||
|             rules: { | ||||
|                 "formatjs/no-id": "off", | ||||
|                 "new-cap": "off", | ||||
|                 "no-sync": "off", | ||||
|                 "unicorn/prefer-prototype-methods": "off", | ||||
|             }, | ||||
|         }, | ||||
|         { | ||||
|             files: ["web/debug-require.js"], | ||||
|             env: { | ||||
|                 browser: true, | ||||
|                 es2020: false, | ||||
|             }, | ||||
|             rules: { | ||||
|                 // Don’t require ES features that PhantomJS doesn’t support | ||||
|                 // TODO: Toggle these settings now that we don't use PhantomJS | ||||
|                 "no-var": "off", | ||||
|                 "object-shorthand": "off", | ||||
|                 "prefer-arrow-callback": "off", | ||||
|             }, | ||||
|         }, | ||||
|         { | ||||
|             files: ["web/shared/**", "web/src/**", "web/third/**"], | ||||
|             env: { | ||||
|                 browser: true, | ||||
|                 node: false, | ||||
|             }, | ||||
|             globals: { | ||||
|                 DEVELOPMENT: false, | ||||
|                 ZULIP_VERSION: false, | ||||
|             }, | ||||
|             rules: { | ||||
|                 "no-console": "error", | ||||
|             }, | ||||
|             settings: { | ||||
|                 "import/resolver": { | ||||
|                     webpack: { | ||||
|                         config: "./web/webpack.config.ts", | ||||
|                     }, | ||||
|                 }, | ||||
|             }, | ||||
|         }, | ||||
|         { | ||||
|             files: ["web/shared/**"], | ||||
|             env: { | ||||
|                 browser: false, | ||||
|                 "shared-node-browser": true, | ||||
|             }, | ||||
|             rules: { | ||||
|                 "import/no-restricted-paths": [ | ||||
|                     "error", | ||||
|                     { | ||||
|                         zones: [ | ||||
|                             { | ||||
|                                 target: "./web/shared", | ||||
|                                 from: ".", | ||||
|                                 except: ["./node_modules", "./web/shared"], | ||||
|                             }, | ||||
|                         ], | ||||
|                     }, | ||||
|                 ], | ||||
|                 "unicorn/prefer-string-replace-all": "off", | ||||
|             }, | ||||
|         }, | ||||
|         { | ||||
|             files: ["web/server/**"], | ||||
|             env: { | ||||
|                 node: true, | ||||
|             }, | ||||
|         }, | ||||
|     ], | ||||
| }; | ||||
							
								
								
									
										496
									
								
								.eslintrc.json
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										496
									
								
								.eslintrc.json
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,496 @@ | ||||
| { | ||||
|     "env": { | ||||
|         "node": true, | ||||
|         "es6": true | ||||
|     }, | ||||
|     "parserOptions": { | ||||
|         "ecmaVersion": 2019, | ||||
|         "warnOnUnsupportedTypeScriptVersion": false, | ||||
|         "sourceType": "module" | ||||
|     }, | ||||
|     "globals": { | ||||
|         "$": false, | ||||
|         "ClipboardJS": false, | ||||
|         "Dict": false, | ||||
|         "FetchStatus": false, | ||||
|         "Filter": false, | ||||
|         "Handlebars": false, | ||||
|         "LightboxCanvas": false, | ||||
|         "MessageListData": false, | ||||
|         "MessageListView": false, | ||||
|         "Plotly": false, | ||||
|         "SockJS": false, | ||||
|         "Socket": false, | ||||
|         "Sortable": false, | ||||
|         "WinChan": false, | ||||
|         "XDate": false, | ||||
|         "_": false, | ||||
|         "activity": false, | ||||
|         "admin": false, | ||||
|         "alert_words": false, | ||||
|         "alert_words_ui": false, | ||||
|         "attachments_ui": false, | ||||
|         "avatar": false, | ||||
|         "billing": false, | ||||
|         "blueslip": false, | ||||
|         "bot_data": false, | ||||
|         "bridge": false, | ||||
|         "buddy_data": false, | ||||
|         "buddy_list": false, | ||||
|         "channel": false, | ||||
|         "click_handlers": false, | ||||
|         "color_data": false, | ||||
|         "colorspace": false, | ||||
|         "common": false, | ||||
|         "components": false, | ||||
|         "compose": false, | ||||
|         "compose_actions": false, | ||||
|         "compose_fade": false, | ||||
|         "compose_pm_pill": false, | ||||
|         "compose_state": false, | ||||
|         "compose_ui": false, | ||||
|         "composebox_typeahead": false, | ||||
|         "condense": false, | ||||
|         "confirm_dialog": false, | ||||
|         "copy_and_paste": false, | ||||
|         "csrf_token": false, | ||||
|         "current_msg_list": true, | ||||
|         "drafts": false, | ||||
|         "echo": false, | ||||
|         "emoji": false, | ||||
|         "emoji_codes": false, | ||||
|         "emoji_picker": false, | ||||
|         "favicon": false, | ||||
|         "feature_flags": false, | ||||
|         "feedback_widget": false, | ||||
|         "fenced_code": false, | ||||
|         "flatpickr": false, | ||||
|         "floating_recipient_bar": false, | ||||
|         "gear_menu": false, | ||||
|         "hash_util": false, | ||||
|         "hashchange": false, | ||||
|         "helpers": false, | ||||
|         "history": false, | ||||
|         "home_msg_list": false, | ||||
|         "hotspots": false, | ||||
|         "i18n": false, | ||||
|         "info_overlay": false, | ||||
|         "input_pill": false, | ||||
|         "invite": false, | ||||
|         "jQuery": false, | ||||
|         "katex": false, | ||||
|         "keydown_util": false, | ||||
|         "lightbox": false, | ||||
|         "list_cursor": false, | ||||
|         "list_render": false, | ||||
|         "list_util": false, | ||||
|         "loading": false, | ||||
|         "localStorage": false, | ||||
|         "local_message": false, | ||||
|         "localstorage": false, | ||||
|         "location": false, | ||||
|         "markdown": false, | ||||
|         "marked": false, | ||||
|         "md5": false, | ||||
|         "message_edit": false, | ||||
|         "message_events": false, | ||||
|         "message_fetch": false, | ||||
|         "message_flags": false, | ||||
|         "message_list": false, | ||||
|         "message_live_update": false, | ||||
|         "message_scroll": false, | ||||
|         "message_store": false, | ||||
|         "message_util": false, | ||||
|         "message_viewport": false, | ||||
|         "moment": false, | ||||
|         "muting": false, | ||||
|         "muting_ui": false, | ||||
|         "narrow": false, | ||||
|         "narrow_state": false, | ||||
|         "navigate": false, | ||||
|         "night_mode": false, | ||||
|         "notifications": false, | ||||
|         "overlays": false, | ||||
|         "padded_widget": false, | ||||
|         "page_params": false, | ||||
|         "panels": false, | ||||
|         "people": false, | ||||
|         "pm_conversations": false, | ||||
|         "pm_list": false, | ||||
|         "pointer": false, | ||||
|         "popovers": false, | ||||
|         "presence": false, | ||||
|         "pygments_data": false, | ||||
|         "reactions": false, | ||||
|         "realm_icon": false, | ||||
|         "realm_logo": false, | ||||
|         "realm_night_logo": false, | ||||
|         "recent_senders": false, | ||||
|         "reload": false, | ||||
|         "reload_state": false, | ||||
|         "reminder": false, | ||||
|         "resize": false, | ||||
|         "rows": false, | ||||
|         "rtl": false, | ||||
|         "run_test": false, | ||||
|         "schema": false, | ||||
|         "scroll_bar": false, | ||||
|         "scroll_util": false, | ||||
|         "search": false, | ||||
|         "search_pill": false, | ||||
|         "search_pill_widget": false, | ||||
|         "search_suggestion": false, | ||||
|         "search_util": false, | ||||
|         "sent_messages": false, | ||||
|         "server_events": false, | ||||
|         "server_events_dispatch": false, | ||||
|         "settings": false, | ||||
|         "settings_account": false, | ||||
|         "settings_bots": false, | ||||
|         "settings_display": false, | ||||
|         "settings_emoji": false, | ||||
|         "settings_exports": false, | ||||
|         "settings_linkifiers": false, | ||||
|         "settings_invites": false, | ||||
|         "settings_muting": false, | ||||
|         "settings_notifications": false, | ||||
|         "settings_org": false, | ||||
|         "settings_panel_menu": false, | ||||
|         "settings_profile_fields": false, | ||||
|         "settings_sections": false, | ||||
|         "settings_streams": false, | ||||
|         "settings_toggle": false, | ||||
|         "settings_ui": false, | ||||
|         "settings_user_groups": false, | ||||
|         "settings_users": false, | ||||
|         "starred_messages": false, | ||||
|         "stream_color": false, | ||||
|         "stream_create": false, | ||||
|         "stream_data": false, | ||||
|         "stream_edit": false, | ||||
|         "stream_events": false, | ||||
|         "stream_list": false, | ||||
|         "stream_muting": false, | ||||
|         "stream_popover": false, | ||||
|         "stream_sort": false, | ||||
|         "stream_ui_updates": false, | ||||
|         "StripeCheckout": false, | ||||
|         "submessage": false, | ||||
|         "subs": false, | ||||
|         "tab_bar": false, | ||||
|         "templates": false, | ||||
|         "tictactoe_widget": false, | ||||
|         "timerender": false, | ||||
|         "todo_widget": false, | ||||
|         "top_left_corner": false, | ||||
|         "topic_data": false, | ||||
|         "topic_generator": false, | ||||
|         "topic_list": false, | ||||
|         "topic_zoom": false, | ||||
|         "transmit": false, | ||||
|         "tutorial": false, | ||||
|         "typeahead_helper": false, | ||||
|         "typing": false, | ||||
|         "typing_data": false, | ||||
|         "typing_events": false, | ||||
|         "ui": false, | ||||
|         "ui_init": false, | ||||
|         "ui_report": false, | ||||
|         "ui_util": false, | ||||
|         "unread": false, | ||||
|         "unread_ops": false, | ||||
|         "unread_ui": false, | ||||
|         "upgrade": false, | ||||
|         "upload": false, | ||||
|         "upload_widget": false, | ||||
|         "user_events": false, | ||||
|         "user_groups": false, | ||||
|         "user_pill": false, | ||||
|         "user_search": false, | ||||
|         "user_status": false, | ||||
|         "user_status_ui": false, | ||||
|         "util": false, | ||||
|         "poll_widget": false, | ||||
|         "widgetize": false, | ||||
|         "zcommand": false, | ||||
|         "zform": false, | ||||
|         "zxcvbn": false | ||||
|     }, | ||||
|     "plugins": [ | ||||
|         "eslint-plugin-empty-returns" | ||||
|     ], | ||||
|     "rules": { | ||||
|         "array-callback-return": "error", | ||||
|         "array-bracket-spacing": "error", | ||||
|         "arrow-spacing": [ "error", { "before": true, "after": true } ], | ||||
|         "block-scoped-var": "error", | ||||
|         "brace-style": [ "error", "1tbs", { "allowSingleLine": true } ], | ||||
|         "camelcase": "off", | ||||
|         "comma-dangle": [ "error", | ||||
|             { | ||||
|                 "arrays": "always-multiline", | ||||
|                 "objects": "always-multiline", | ||||
|                 "imports": "always-multiline", | ||||
|                 "exports": "always-multiline", | ||||
|                 "functions": "never" | ||||
|             } | ||||
|         ], | ||||
|         "comma-spacing": [ "error", | ||||
|             { | ||||
|                 "before": false, | ||||
|                 "after": true | ||||
|             } | ||||
|         ], | ||||
|         "complexity": [ "off", 4 ], | ||||
|         "curly": "error", | ||||
|         "dot-notation": [ "error", { "allowKeywords": true } ], | ||||
|         "empty-returns/main": "error", | ||||
|         "eol-last": [ "error", "always" ], | ||||
|         "eqeqeq": "error", | ||||
|         "func-style": [ "off", "expression" ], | ||||
|         "guard-for-in": "error", | ||||
|         "indent": ["error", 4, { | ||||
|             "ArrayExpression": "first", | ||||
|             "ObjectExpression": "first", | ||||
|             "SwitchCase": 0, | ||||
|             "CallExpression": {"arguments": "first"}, | ||||
|             "FunctionExpression": {"parameters": "first"}, | ||||
|             "FunctionDeclaration": {"parameters": "first"} | ||||
|         }], | ||||
|         "key-spacing": [ "error", | ||||
|             { | ||||
|                 "beforeColon": false, | ||||
|                 "afterColon": true | ||||
|             } | ||||
|         ], | ||||
|         "keyword-spacing": [ "error", | ||||
|             { | ||||
|                 "before": true, | ||||
|                 "after": true, | ||||
|                 "overrides": { | ||||
|                     "return": { "after": true }, | ||||
|                     "throw": { "after": true }, | ||||
|                     "case": { "after": true } | ||||
|                 } | ||||
|             } | ||||
|         ], | ||||
|         "max-depth": [ "off", 4 ], | ||||
|         "max-len": [ "error", 100, 2, | ||||
|             { | ||||
|                 "ignoreUrls": true, | ||||
|                 "ignoreComments": false, | ||||
|                 "ignoreRegExpLiterals": true, | ||||
|                 "ignoreStrings": true, | ||||
|                 "ignoreTemplateLiterals": true | ||||
|             } | ||||
|         ], | ||||
|         "max-params": [ "off", 3 ], | ||||
|         "max-statements": [ "off", 10 ], | ||||
|         "new-cap": [ "error", | ||||
|             { | ||||
|                 "newIsCap": true, | ||||
|                 "capIsNew": false | ||||
|             } | ||||
|         ], | ||||
|         "new-parens": "error", | ||||
|         "newline-per-chained-call": "off", | ||||
|         "no-alert": "error", | ||||
|         "no-array-constructor": "error", | ||||
|         "no-bitwise": "error", | ||||
|         "no-caller": "error", | ||||
|         "no-case-declarations": "error", | ||||
|         "no-catch-shadow": "error", | ||||
|         "no-console": "off", | ||||
|         "no-const-assign": "error", | ||||
|         "no-control-regex": "error", | ||||
|         "no-debugger": "error", | ||||
|         "no-delete-var": "error", | ||||
|         "no-div-regex": "error", | ||||
|         "no-dupe-class-members": "error", | ||||
|         "no-dupe-keys": "error", | ||||
|         "no-duplicate-imports": "error", | ||||
|         "no-else-return": "error", | ||||
|         "no-empty": "error", | ||||
|         "no-empty-character-class": "error", | ||||
|         "no-eq-null": "error", | ||||
|         "no-eval": "error", | ||||
|         "no-ex-assign": "error", | ||||
|         "no-extra-parens": ["error", "all"], | ||||
|         "no-extra-semi": "error", | ||||
|         "no-fallthrough": "error", | ||||
|         "no-floating-decimal": "error", | ||||
|         "no-func-assign": "error", | ||||
|         "no-implied-eval": "error", | ||||
|         "no-iterator": "error", | ||||
|         "no-label-var": "error", | ||||
|         "no-labels": "error", | ||||
|         "no-loop-func": "error", | ||||
|         "no-mixed-requires": [ "off", false ], | ||||
|         "no-multi-str": "error", | ||||
|         "no-native-reassign": "error", | ||||
|         "no-nested-ternary": "off", | ||||
|         "no-new-func": "error", | ||||
|         "no-new-object": "error", | ||||
|         "no-new-wrappers": "error", | ||||
|         "no-obj-calls": "error", | ||||
|         "no-octal": "error", | ||||
|         "no-octal-escape": "error", | ||||
|         "no-param-reassign": "off", | ||||
|         "no-plusplus": "error", | ||||
|         "no-proto": "error", | ||||
|         "no-redeclare": "error", | ||||
|         "no-regex-spaces": "error", | ||||
|         "no-restricted-syntax": "off", | ||||
|         "no-return-assign": "error", | ||||
|         "no-script-url": "error", | ||||
|         "no-self-compare": "error", | ||||
|         "no-shadow": "off", | ||||
|         "no-sync": "error", | ||||
|         "no-ternary": "off", | ||||
|         "no-trailing-spaces": "error", | ||||
|         "no-undef": "error", | ||||
|         "no-undef-init": "error", | ||||
|         "no-underscore-dangle": "off", | ||||
|         "no-unneeded-ternary": [ "error", { "defaultAssignment": false } ], | ||||
|         "no-unreachable": "error", | ||||
|         "no-unused-expressions": "error", | ||||
|         "no-unused-vars": [ "error", | ||||
|             { | ||||
|                 "vars": "local", | ||||
|                 "args": "after-used", | ||||
|                 "varsIgnorePattern": "print_elapsed_time|check_duplicate_ids" | ||||
|             } | ||||
|         ], | ||||
|         "no-use-before-define": "error", | ||||
|         "no-useless-constructor": "error", | ||||
|         // The Zulip codebase complies partially with the "no-useless-escape" | ||||
|         // rule; only regex expressions haven't been updated yet. | ||||
|         // Updated regex expressions are currently being tested in casper | ||||
|         // files and will decide about a potential future enforcement of this rule. | ||||
|         "no-useless-escape": "off", | ||||
|         "no-var": "error", | ||||
|         "space-unary-ops": "error", | ||||
|         "no-whitespace-before-property": "error", | ||||
|         "no-with": "error", | ||||
|         "one-var": [ "error", "never" ], | ||||
|         "padded-blocks": "off", | ||||
|         "prefer-const": [ "error", | ||||
|             { | ||||
|                 "destructuring": "any", | ||||
|                 "ignoreReadBeforeAssign": true | ||||
|             } | ||||
|         ], | ||||
|         "quote-props": [ "error", "as-needed", | ||||
|             { | ||||
|                 "keywords": false, | ||||
|                 "unnecessary": true, | ||||
|                 "numbers": false | ||||
|             } | ||||
|         ], | ||||
|         "quotes": [ "off", "single" ], | ||||
|         "radix": "error", | ||||
|         "semi": "error", | ||||
|         "semi-spacing": ["error", {"before": false, "after": true}], | ||||
|         "sort-imports": "error", | ||||
|         "space-before-blocks": "error", | ||||
|         "space-before-function-paren": [ "error", | ||||
|             { | ||||
|                 "anonymous": "always", | ||||
|                 "named": "never", | ||||
|                 "asyncArrow": "always" | ||||
|             } | ||||
|         ], | ||||
|         "space-in-parens": "error", | ||||
|         "space-infix-ops": "error", | ||||
|         "spaced-comment": "off", | ||||
|         "strict": "off", | ||||
|         "template-curly-spacing": "error", | ||||
|         "unnecessary-strict": "off", | ||||
|         "use-isnan": "error", | ||||
|         "valid-typeof": [ "error", { "requireStringLiterals": true } ], | ||||
|         "wrap-iife": [ "error", "outside", { "functionPrototypeMethods": false } ], | ||||
|         "wrap-regex": "off", | ||||
|         "yoda": "error" | ||||
|     }, | ||||
|     "overrides": [ | ||||
|         { | ||||
|             "files": [ | ||||
|                 "frontend_tests/casper_tests/*.js", | ||||
|                 "frontend_tests/casper_lib/*.js" | ||||
|             ], | ||||
|             "rules": { | ||||
|                 "no-var": "off" // PhantomJS doesn’t support let, const | ||||
|             } | ||||
|         }, | ||||
|         { | ||||
|             "files": ["**/*.ts"], | ||||
|             "parser": "@typescript-eslint/parser", | ||||
|             "parserOptions": { | ||||
|                 "project": "tsconfig.json" | ||||
|             }, | ||||
|             "plugins": ["@typescript-eslint"], | ||||
|             "rules": { | ||||
|                 // Disable base rule to avoid conflict | ||||
|                 "empty-returns/main": "off", | ||||
|                 "indent": "off", | ||||
|                 "func-call-spacing": "off", | ||||
|                 "no-magic-numbers": "off", | ||||
|                 "semi": "off", | ||||
|                 "no-unused-vars": "off", | ||||
|                 "no-useless-constructor": "off", | ||||
|  | ||||
|                 "@typescript-eslint/adjacent-overload-signatures": "error", | ||||
|                 "@typescript-eslint/array-type": "error", | ||||
|                 "@typescript-eslint/await-thenable": "error", | ||||
|                 "@typescript-eslint/ban-types": "error", | ||||
|                 "@typescript-eslint/ban-ts-ignore": "off", | ||||
|                 "@typescript-eslint/camelcase": "off", | ||||
|                 "@typescript-eslint/class-name-casing": "error", | ||||
|                 "@typescript-eslint/consistent-type-assertions": "error", | ||||
|                 "@typescript-eslint/explicit-function-return-type": ["error", { "allowExpressions": true }], | ||||
|                 "@typescript-eslint/explicit-member-accessibility": "off", | ||||
|                 "@typescript-eslint/func-call-spacing": "error", | ||||
|                 "@typescript-eslint/generic-type-naming": "off", | ||||
|                 "@typescript-eslint/indent": "error", | ||||
|                 "@typescript-eslint/interface-name-prefix": "off", | ||||
|                 "@typescript-eslint/member-delimiter-style": "error", | ||||
|                 "@typescript-eslint/member-naming": ["error", { "private": "^_" } ], | ||||
|                 "@typescript-eslint/member-ordering": "error", | ||||
|                 "@typescript-eslint/no-array-constructor": "error", | ||||
|                 "@typescript-eslint/no-empty-interface": "error", | ||||
|                 "@typescript-eslint/no-explicit-any": "off", | ||||
|                 "@typescript-eslint/no-extraneous-class": "error", | ||||
|                 "@typescript-eslint/no-for-in-array": "off", | ||||
|                 "@typescript-eslint/no-inferrable-types": "error", | ||||
|                 "@typescript-eslint/no-magic-numbers": "off", | ||||
|                 "@typescript-eslint/no-misused-new": "error", | ||||
|                 "@typescript-eslint/no-namespace": "error", | ||||
|                 "@typescript-eslint/no-non-null-assertion": "off", | ||||
|                 "@typescript-eslint/no-parameter-properties": "error", | ||||
|                 "@typescript-eslint/no-require-imports": "off", | ||||
|                 "@typescript-eslint/no-this-alias": "off", | ||||
|                 "@typescript-eslint/no-type-alias": "off", | ||||
|                 "@typescript-eslint/no-unnecessary-qualifier": "error", | ||||
|                 "@typescript-eslint/no-unnecessary-type-assertion": "error", | ||||
|                 "@typescript-eslint/no-unused-vars": ["error", { "varsIgnorePattern": "^_" } ], | ||||
|                 "@typescript-eslint/no-use-before-define": "error", | ||||
|                 "@typescript-eslint/no-useless-constructor": "error", | ||||
|                 "@typescript-eslint/no-var-requires": "off", | ||||
|                 "@typescript-eslint/prefer-for-of": "off", | ||||
|                 "@typescript-eslint/prefer-function-type": "off", | ||||
|                 "@typescript-eslint/prefer-includes": "error", | ||||
|                 "@typescript-eslint/prefer-interface": "off", | ||||
|                 "@typescript-eslint/prefer-namespace-keyword": "error", | ||||
|                 "@typescript-eslint/prefer-regexp-exec": "error", | ||||
|                 "@typescript-eslint/prefer-string-starts-ends-with": "error", | ||||
|                 "@typescript-eslint/promise-function-async": "error", | ||||
|                 "@typescript-eslint/restrict-plus-operands": "off", | ||||
|                 "@typescript-eslint/semi": "error", | ||||
|                 "@typescript-eslint/triple-slash-reference": "error", | ||||
|                 "@typescript-eslint/type-annotation-spacing": "error", | ||||
|                 "@typescript-eslint/unbound-method": "off", | ||||
|                 "@typescript-eslint/unified-signatures": "error" | ||||
|             } | ||||
|         } | ||||
|     ] | ||||
| } | ||||
							
								
								
									
										19
									
								
								.gitattributes
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										19
									
								
								.gitattributes
									
									
									
									
										vendored
									
									
								
							| @@ -1,19 +1,4 @@ | ||||
| # DIFFS: Noise suppression. | ||||
| # | ||||
| # Suppress noisy generated files in diffs. | ||||
| # (When you actually want to see these diffs, use `git diff -a`.) | ||||
|  | ||||
| # Large test fixtures: | ||||
| corporate/tests/stripe_fixtures/*.json -diff | ||||
|  | ||||
|  | ||||
| # FORMATTING | ||||
|  | ||||
| # Maintain LF (Unix-style) newlines in text files. | ||||
| *   text=auto eol=lf | ||||
|  | ||||
| # Make sure various media files never get somehow auto-detected as text | ||||
| # and then newline-converted. | ||||
| *.gif binary | ||||
| *.jpg binary | ||||
| *.jpeg binary | ||||
| @@ -26,7 +11,3 @@ corporate/tests/stripe_fixtures/*.json -diff | ||||
| *.otf binary | ||||
| *.tif binary | ||||
| *.ogg binary | ||||
| *.bson binary | ||||
| *.bmp binary | ||||
| *.mp3 binary | ||||
| *.pdf binary | ||||
|   | ||||
							
								
								
									
										3
									
								
								.github/FUNDING.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										3
									
								
								.github/FUNDING.yml
									
									
									
									
										vendored
									
									
								
							| @@ -1,3 +0,0 @@ | ||||
| github: zulip | ||||
| patreon: zulip | ||||
| open_collective: zulip | ||||
							
								
								
									
										10
									
								
								.github/ISSUE_TEMPLATE/1_discussed_on_czo.md
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										10
									
								
								.github/ISSUE_TEMPLATE/1_discussed_on_czo.md
									
									
									
									
										vendored
									
									
								
							| @@ -1,10 +0,0 @@ | ||||
| --- | ||||
| name: Issue discussed in the Zulip development community | ||||
| about: Bug report, feature or improvement already discussed on chat.zulip.org. | ||||
| --- | ||||
|  | ||||
| <!-- Issue description --> | ||||
|  | ||||
| <!-- Link to a message in the chat.zulip.org discussion. Message links will still work even if the topic is renamed or resolved. Link back to this issue from the chat.zulip.org thread. --> | ||||
|  | ||||
| CZO thread | ||||
							
								
								
									
										18
									
								
								.github/ISSUE_TEMPLATE/2_bug_report.md
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										18
									
								
								.github/ISSUE_TEMPLATE/2_bug_report.md
									
									
									
									
										vendored
									
									
								
							| @@ -1,18 +0,0 @@ | ||||
| --- | ||||
| name: Bug report | ||||
| about: A concrete bug report with steps to reproduce the behavior. (See also "Possible bug" below.) | ||||
| labels: ["bug"] | ||||
| --- | ||||
|  | ||||
| <!-- Describe what you were expecting to see, what you saw instead, and steps to take in order to reproduce the buggy behavior. Screenshots can be helpful. --> | ||||
|  | ||||
| <!-- Check the box for the version of Zulip you are using (see https://zulip.com/help/view-zulip-version).--> | ||||
|  | ||||
| **Zulip Server and web app version:** | ||||
|  | ||||
| - [ ] Zulip Cloud (`*.zulipchat.com`) | ||||
| - [ ] Zulip Server 8.0+ | ||||
| - [ ] Zulip Server 7.0+ | ||||
| - [ ] Zulip Server 6.0+ | ||||
| - [ ] Zulip Server 5.0 or older | ||||
| - [ ] Other or not sure | ||||
							
								
								
									
										6
									
								
								.github/ISSUE_TEMPLATE/3_feature_request.md
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										6
									
								
								.github/ISSUE_TEMPLATE/3_feature_request.md
									
									
									
									
										vendored
									
									
								
							| @@ -1,6 +0,0 @@ | ||||
| --- | ||||
| name: Feature or improvement request | ||||
| about: A specific proposal for a new feature of improvement. (See also "Feature suggestion or feedback" below.) | ||||
| --- | ||||
|  | ||||
| <!-- Describe the proposal, including how it would help you or your organization. --> | ||||
							
								
								
									
										14
									
								
								.github/ISSUE_TEMPLATE/config.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										14
									
								
								.github/ISSUE_TEMPLATE/config.yml
									
									
									
									
										vendored
									
									
								
							| @@ -1,14 +0,0 @@ | ||||
| blank_issues_enabled: true | ||||
| contact_links: | ||||
|   - name: Possible bug | ||||
|     url: https://zulip.readthedocs.io/en/latest/contributing/reporting-bugs.html | ||||
|     about: Report unexpected behavior that may be a bug. | ||||
|   - name: Feature suggestion or feedback | ||||
|     url: https://zulip.readthedocs.io/en/latest/contributing/suggesting-features.html | ||||
|     about: Start a discussion about your idea for improving Zulip. | ||||
|   - name: Issue with running or upgrading a Zulip server | ||||
|     url: https://zulip.readthedocs.io/en/latest/production/troubleshooting.html | ||||
|     about: We provide free, interactive support for the vast majority of questions about running a Zulip server. | ||||
|   - name: Other support requests and sales questions | ||||
|     url: https://zulip.com/help/contact-support | ||||
|     about: Contact us — we're happy to help! | ||||
							
								
								
									
										45
									
								
								.github/pull_request_template.md
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										45
									
								
								.github/pull_request_template.md
									
									
									
									
										vendored
									
									
								
							| @@ -1,43 +1,14 @@ | ||||
| <!-- Describe your pull request here.--> | ||||
| <!-- What's this PR for?  (Just a link to an issue is fine.) --> | ||||
|  | ||||
| Fixes: <!-- Issue link, or clear description.--> | ||||
|  | ||||
| <!-- If the PR makes UI changes, always include one or more still screenshots to demonstrate your changes. If it seems helpful, add a screen capture of the new functionality as well. | ||||
| **Testing Plan:** <!-- How have you tested? --> | ||||
|  | ||||
| Tooling tips: https://zulip.readthedocs.io/en/latest/tutorials/screenshot-and-gif-software.html | ||||
| --> | ||||
|  | ||||
| **Screenshots and screen captures:** | ||||
| **GIFs or Screenshots:** <!-- If a UI change.  See: | ||||
|   https://zulip.readthedocs.io/en/latest/tutorials/screenshot-and-gif-software.html | ||||
|   --> | ||||
|  | ||||
| <details> | ||||
| <summary>Self-review checklist</summary> | ||||
|  | ||||
| <!-- Prior to submitting a PR, follow our step-by-step guide to review your own code: | ||||
| https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html#how-to-review-code --> | ||||
|  | ||||
| <!-- Once you create the PR, check off all the steps below that you have completed. | ||||
| If any of these steps are not relevant or you have not completed, leave them unchecked.--> | ||||
|  | ||||
| - [ ] [Self-reviewed](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html#how-to-review-code) the changes for clarity and maintainability | ||||
|       (variable names, code reuse, readability, etc.). | ||||
|  | ||||
| Communicate decisions, questions, and potential concerns. | ||||
|  | ||||
| - [ ] Explains differences from previous plans (e.g., issue description). | ||||
| - [ ] Highlights technical choices and bugs encountered. | ||||
| - [ ] Calls out remaining decisions and concerns. | ||||
| - [ ] Automated tests verify logic where appropriate. | ||||
|  | ||||
| Individual commits are ready for review (see [commit discipline](https://zulip.readthedocs.io/en/latest/contributing/commit-discipline.html)). | ||||
|  | ||||
| - [ ] Each commit is a coherent idea. | ||||
| - [ ] Commit message(s) explain reasoning and motivation for changes. | ||||
|  | ||||
| Completed manual review and testing of the following: | ||||
|  | ||||
| - [ ] Visual appearance of the changes. | ||||
| - [ ] Responsiveness and internationalization. | ||||
| - [ ] Strings and tooltips. | ||||
| - [ ] End-to-end functionality of buttons, interactions and flows. | ||||
| - [ ] Corner cases, error conditions, and easily imagined bugs. | ||||
| </details> | ||||
| <!-- Also be sure to make clear, coherent commits: | ||||
|   https://zulip.readthedocs.io/en/latest/contributing/version-control.html | ||||
|   --> | ||||
|   | ||||
							
								
								
									
										40
									
								
								.github/workflows/codeql-analysis.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										40
									
								
								.github/workflows/codeql-analysis.yml
									
									
									
									
										vendored
									
									
								
							| @@ -1,40 +0,0 @@ | ||||
| name: "Code scanning" | ||||
|  | ||||
| on: | ||||
|   push: | ||||
|     branches: ["*.x", chat.zulip.org, main] | ||||
|     tags: ["*"] | ||||
|   pull_request: | ||||
|     branches: ["*.x", chat.zulip.org, main] | ||||
|   workflow_dispatch: | ||||
|  | ||||
| concurrency: | ||||
|   group: "${{ github.workflow }}-${{ github.head_ref || github.run_id }}" | ||||
|   cancel-in-progress: true | ||||
|  | ||||
| permissions: | ||||
|   contents: read | ||||
|  | ||||
| jobs: | ||||
|   CodeQL: | ||||
|     permissions: | ||||
|       actions: read # for github/codeql-action/init to get workflow details | ||||
|       contents: read # for actions/checkout to fetch code | ||||
|       security-events: write # for github/codeql-action/analyze to upload SARIF results | ||||
|     if: ${{!github.event.repository.private}} | ||||
|     runs-on: ubuntu-latest | ||||
|  | ||||
|     steps: | ||||
|       - name: Check out repository | ||||
|         uses: actions/checkout@v4 | ||||
|  | ||||
|       # Initializes the CodeQL tools for scanning. | ||||
|       - name: Initialize CodeQL | ||||
|         uses: github/codeql-action/init@v3 | ||||
|  | ||||
|         # Override language selection by uncommenting this and choosing your languages | ||||
|         # with: | ||||
|         #   languages: go, javascript, csharp, python, cpp, java | ||||
|  | ||||
|       - name: Perform CodeQL Analysis | ||||
|         uses: github/codeql-action/analyze@v3 | ||||
							
								
								
									
										307
									
								
								.github/workflows/production-suite.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										307
									
								
								.github/workflows/production-suite.yml
									
									
									
									
										vendored
									
									
								
							| @@ -1,307 +0,0 @@ | ||||
| name: Zulip production suite | ||||
|  | ||||
| on: | ||||
|   push: | ||||
|     branches: ["*.x", chat.zulip.org, main] | ||||
|     tags: ["*"] | ||||
|   pull_request: | ||||
|     paths: | ||||
|       - .github/workflows/production-suite.yml | ||||
|       - "**/migrations/**" | ||||
|       - manage.py | ||||
|       - pnpm-lock.yaml | ||||
|       - puppet/** | ||||
|       - requirements/** | ||||
|       - scripts/** | ||||
|       - tools/** | ||||
|       - web/babel.config.js | ||||
|       - web/postcss.config.js | ||||
|       - web/third/** | ||||
|       - web/webpack.config.ts | ||||
|       - zerver/worker/queue_processors.py | ||||
|       - zerver/lib/push_notifications.py | ||||
|       - zerver/decorator.py | ||||
|       - zproject/** | ||||
|   workflow_dispatch: | ||||
|  | ||||
| concurrency: | ||||
|   group: "${{ github.workflow }}-${{ github.head_ref || github.run_id }}" | ||||
|   cancel-in-progress: true | ||||
|  | ||||
| defaults: | ||||
|   run: | ||||
|     shell: bash | ||||
|  | ||||
| permissions: | ||||
|   contents: read | ||||
|  | ||||
| jobs: | ||||
|   production_build: | ||||
|     # This job builds a release tarball from the current commit, which | ||||
|     # will be used for all of the following install/upgrade tests. | ||||
|     name: Ubuntu 22.04 production build | ||||
|     runs-on: ubuntu-latest | ||||
|  | ||||
|     # Docker images are built from 'tools/ci/Dockerfile'; the comments at | ||||
|     # the top explain how to build and upload these images. | ||||
|     # Ubuntu 22.04 ships with Python 3.10.12. | ||||
|     container: zulip/ci:jammy | ||||
|  | ||||
|     steps: | ||||
|       - name: Add required permissions | ||||
|         run: | | ||||
|           # The checkout actions doesn't clone to ~/zulip or allow | ||||
|           # us to use the path option to clone outside the current | ||||
|           # /__w/zulip/zulip directory. Since this directory is owned | ||||
|           # by root we need to change it's ownership to allow the | ||||
|           # github user to clone the code here. | ||||
|           # Note: /__w/ is a docker volume mounted to $GITHUB_WORKSPACE | ||||
|           # which is /home/runner/work/. | ||||
|           sudo chown -R github . | ||||
|  | ||||
|           # This is the GitHub Actions specific cache directory the | ||||
|           # the current github user must be able to access for the | ||||
|           # cache action to work. It is owned by root currently. | ||||
|           sudo chmod -R 0777 /__w/_temp/ | ||||
|  | ||||
|       - uses: actions/checkout@v4 | ||||
|  | ||||
|       - name: Create cache directories | ||||
|         run: | | ||||
|           dirs=(/srv/zulip-{venv,emoji}-cache) | ||||
|           sudo mkdir -p "${dirs[@]}" | ||||
|           sudo chown -R github "${dirs[@]}" | ||||
|  | ||||
|       - name: Restore pnpm store | ||||
|         uses: actions/cache@v4 | ||||
|         with: | ||||
|           path: /__w/.pnpm-store | ||||
|           key: v1-pnpm-store-jammy-${{ hashFiles('pnpm-lock.yaml') }} | ||||
|  | ||||
|       - name: Restore python cache | ||||
|         uses: actions/cache@v4 | ||||
|         with: | ||||
|           path: /srv/zulip-venv-cache | ||||
|           key: v1-venv-jammy-${{ hashFiles('requirements/dev.txt') }} | ||||
|           restore-keys: v1-venv-jammy | ||||
|  | ||||
|       - name: Restore emoji cache | ||||
|         uses: actions/cache@v4 | ||||
|         with: | ||||
|           path: /srv/zulip-emoji-cache | ||||
|           key: v1-emoji-jammy-${{ hashFiles('tools/setup/emoji/emoji_map.json') }}-${{ hashFiles('tools/setup/emoji/build_emoji') }}-${{ hashFiles('tools/setup/emoji/emoji_setup_utils.py') }}-${{ hashFiles('tools/setup/emoji/emoji_names.py') }}-${{ hashFiles('package.json') }} | ||||
|           restore-keys: v1-emoji-jammy | ||||
|  | ||||
|       - name: Build production tarball | ||||
|         run: ./tools/ci/production-build | ||||
|  | ||||
|       - name: Upload production build artifacts for install jobs | ||||
|         uses: actions/upload-artifact@v4 | ||||
|         with: | ||||
|           name: production-tarball | ||||
|           path: /tmp/production-build | ||||
|           retention-days: 1 | ||||
|  | ||||
|       - name: Verify pnpm store path | ||||
|         run: | | ||||
|           set -x | ||||
|           path="$(pnpm store path)" | ||||
|           [[ "$path" == /__w/.pnpm-store/* ]] | ||||
|  | ||||
|       - name: Generate failure report string | ||||
|         id: failure_report_string | ||||
|         if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }} | ||||
|         run: tools/ci/generate-failure-message >> $GITHUB_OUTPUT | ||||
|  | ||||
|       - name: Report status to CZO | ||||
|         if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }} | ||||
|         uses: zulip/github-actions-zulip/send-message@v1 | ||||
|         with: | ||||
|           api-key: ${{ secrets.ZULIP_BOT_KEY }} | ||||
|           email: "github-actions-bot@chat.zulip.org" | ||||
|           organization-url: "https://chat.zulip.org" | ||||
|           to: "automated testing" | ||||
|           topic: ${{ steps.failure_report_string.outputs.topic }} | ||||
|           type: "stream" | ||||
|           content: ${{ steps.failure_report_string.outputs.content }} | ||||
|  | ||||
|   production_install: | ||||
|     # This job installs the server release tarball built above on a | ||||
|     # range of platforms, and does some basic health checks on the | ||||
|     # resulting installer Zulip server. | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         include: | ||||
|           # Docker images are built from 'tools/ci/Dockerfile'; the comments at | ||||
|           # the top explain how to build and upload these images. | ||||
|           - docker_image: zulip/ci:jammy | ||||
|             name: Ubuntu 22.04 production install and PostgreSQL upgrade with pgroonga | ||||
|             os: jammy | ||||
|             extra-args: "" | ||||
|  | ||||
|           - docker_image: zulip/ci:noble | ||||
|             name: Ubuntu 24.04 production install | ||||
|             os: noble | ||||
|             extra-args: "" | ||||
|  | ||||
|           - docker_image: zulip/ci:bookworm | ||||
|             name: Debian 12 production install with custom db name and user | ||||
|             os: bookworm | ||||
|             extra-args: --test-custom-db | ||||
|  | ||||
|     name: ${{ matrix.name  }} | ||||
|     container: | ||||
|       image: ${{ matrix.docker_image }} | ||||
|       options: --init | ||||
|     runs-on: ubuntu-latest | ||||
|     needs: production_build | ||||
|  | ||||
|     steps: | ||||
|       - name: Download built production tarball | ||||
|         uses: actions/download-artifact@v4 | ||||
|         with: | ||||
|           name: production-tarball | ||||
|           path: /tmp | ||||
|  | ||||
|       - name: Add required permissions and setup | ||||
|         run: | | ||||
|           # This is the GitHub Actions specific cache directory the | ||||
|           # the current github user must be able to access for the | ||||
|           # cache action to work. It is owned by root currently. | ||||
|           sudo chmod -R 0777 /__w/_temp/ | ||||
|  | ||||
|           # Since actions/download-artifact@v4 loses all the permissions | ||||
|           # of the tarball uploaded by the upload artifact fix those. | ||||
|           chmod +x /tmp/production-upgrade-pg | ||||
|           chmod +x /tmp/production-pgroonga | ||||
|           chmod +x /tmp/production-install | ||||
|           chmod +x /tmp/production-verify | ||||
|           chmod +x /tmp/generate-failure-message | ||||
|  | ||||
|       - name: Create cache directories | ||||
|         run: | | ||||
|           dirs=(/srv/zulip-{venv,emoji}-cache) | ||||
|           sudo mkdir -p "${dirs[@]}" | ||||
|           sudo chown -R github "${dirs[@]}" | ||||
|  | ||||
|       - name: Install production | ||||
|         run: sudo /tmp/production-install ${{ matrix.extra-args }} | ||||
|  | ||||
|       - name: Verify install | ||||
|         run: sudo /tmp/production-verify ${{ matrix.extra-args }} | ||||
|  | ||||
|       - name: Install pgroonga | ||||
|         if: ${{ matrix.os == 'jammy' }} | ||||
|         run: sudo /tmp/production-pgroonga | ||||
|  | ||||
|       - name: Verify install after installing pgroonga | ||||
|         if: ${{ matrix.os == 'jammy' }} | ||||
|         run: sudo /tmp/production-verify ${{ matrix.extra-args }} | ||||
|  | ||||
|       - name: Upgrade postgresql | ||||
|         if: ${{ matrix.os == 'jammy' }} | ||||
|         run: sudo /tmp/production-upgrade-pg | ||||
|  | ||||
|       - name: Verify install after upgrading postgresql | ||||
|         if: ${{ matrix.os == 'jammy' }} | ||||
|         run: sudo /tmp/production-verify ${{ matrix.extra-args }} | ||||
|  | ||||
|       - name: Generate failure report string | ||||
|         id: failure_report_string | ||||
|         if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }} | ||||
|         run: /tmp/generate-failure-message >> $GITHUB_OUTPUT | ||||
|  | ||||
|       - name: Report status to CZO | ||||
|         if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }} | ||||
|         uses: zulip/github-actions-zulip/send-message@v1 | ||||
|         with: | ||||
|           api-key: ${{ secrets.ZULIP_BOT_KEY }} | ||||
|           email: "github-actions-bot@chat.zulip.org" | ||||
|           organization-url: "https://chat.zulip.org" | ||||
|           to: "automated testing" | ||||
|           topic: ${{ steps.failure_report_string.outputs.topic }} | ||||
|           type: "stream" | ||||
|           content: ${{ steps.failure_report_string.outputs.content }} | ||||
|  | ||||
|   production_upgrade: | ||||
|     # The production upgrade job starts with a container with a | ||||
|     # previous Zulip release installed, and attempts to upgrade it to | ||||
|     # the release tarball built for the current commit being tested. | ||||
|     # | ||||
|     # This is intended to catch bugs that result in the upgrade | ||||
|     # process failing. | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         include: | ||||
|           # Docker images are built from 'tools/ci/Dockerfile.prod'; the comments at | ||||
|           # the top explain how to build and upload these images. | ||||
|           - docker_image: zulip/ci:jammy-6.0 | ||||
|             name: 6.0 Version Upgrade | ||||
|             os: jammy | ||||
|           - docker_image: zulip/ci:bookworm-7.0 | ||||
|             name: 7.0 Version Upgrade | ||||
|             os: bookworm | ||||
|           - docker_image: zulip/ci:bookworm-8.0 | ||||
|             name: 8.0 Version Upgrade | ||||
|             os: bookworm | ||||
|  | ||||
|     name: ${{ matrix.name  }} | ||||
|     container: | ||||
|       image: ${{ matrix.docker_image }} | ||||
|       options: --init | ||||
|     runs-on: ubuntu-latest | ||||
|     needs: production_build | ||||
|  | ||||
|     steps: | ||||
|       - name: Download built production tarball | ||||
|         uses: actions/download-artifact@v4 | ||||
|         with: | ||||
|           name: production-tarball | ||||
|           path: /tmp | ||||
|  | ||||
|       - name: Add required permissions and setup | ||||
|         run: | | ||||
|           # This is the GitHub Actions specific cache directory the | ||||
|           # the current github user must be able to access for the | ||||
|           # cache action to work. It is owned by root currently. | ||||
|           sudo chmod -R 0777 /__w/_temp/ | ||||
|  | ||||
|           # Since actions/download-artifact@v4 loses all the permissions | ||||
|           # of the tarball uploaded by the upload artifact fix those. | ||||
|           chmod +x /tmp/production-upgrade | ||||
|           chmod +x /tmp/production-verify | ||||
|           chmod +x /tmp/generate-failure-message | ||||
|  | ||||
|       - name: Create cache directories | ||||
|         run: | | ||||
|           dirs=(/srv/zulip-{venv,emoji}-cache) | ||||
|           sudo mkdir -p "${dirs[@]}" | ||||
|           sudo chown -R github "${dirs[@]}" | ||||
|  | ||||
|       - name: Upgrade production | ||||
|         run: sudo /tmp/production-upgrade | ||||
|  | ||||
|         # TODO: We should be running production-verify here, but it | ||||
|         # doesn't pass yet. | ||||
|         # | ||||
|         # - name: Verify install | ||||
|         #   run: sudo /tmp/production-verify | ||||
|  | ||||
|       - name: Generate failure report string | ||||
|         id: failure_report_string | ||||
|         if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }} | ||||
|         run: /tmp/generate-failure-message >> $GITHUB_OUTPUT | ||||
|  | ||||
|       - name: Report status to CZO | ||||
|         if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }} | ||||
|         uses: zulip/github-actions-zulip/send-message@v1 | ||||
|         with: | ||||
|           api-key: ${{ secrets.ZULIP_BOT_KEY }} | ||||
|           email: "github-actions-bot@chat.zulip.org" | ||||
|           organization-url: "https://chat.zulip.org" | ||||
|           to: "automated testing" | ||||
|           topic: ${{ steps.failure_report_string.outputs.topic }} | ||||
|           type: "stream" | ||||
|           content: ${{ steps.failure_report_string.outputs.content }} | ||||
							
								
								
									
										27
									
								
								.github/workflows/update-oneclick-apps.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										27
									
								
								.github/workflows/update-oneclick-apps.yml
									
									
									
									
										vendored
									
									
								
							| @@ -1,27 +0,0 @@ | ||||
| name: Update one click apps | ||||
| on: | ||||
|   release: | ||||
|     types: [published] | ||||
| permissions: | ||||
|   contents: read | ||||
|  | ||||
| jobs: | ||||
|   update-digitalocean-oneclick-app: | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|       - name: Update DigitalOcean one click app | ||||
|         env: | ||||
|           DIGITALOCEAN_API_KEY: ${{ secrets.ONE_CLICK_ACTION_DIGITALOCEAN_API_KEY }} | ||||
|           ZULIP_API_KEY: ${{ secrets.ONE_CLICK_ACTION_ZULIP_BOT_API_KEY }} | ||||
|           ZULIP_EMAIL: ${{ secrets.ONE_CLICK_ACTION_ZULIP_BOT_EMAIL }} | ||||
|           ZULIP_SITE: https://chat.zulip.org | ||||
|           ONE_CLICK_ACTION_STREAM: kandra ops | ||||
|           PYTHON_DIGITALOCEAN_REQUEST_TIMEOUT_SEC: 30 | ||||
|           RELEASE_VERSION: ${{ github.event.release.tag_name }} | ||||
|         run: | | ||||
|           export PATH="$HOME/.local/bin:$PATH" | ||||
|           git clone https://github.com/zulip/marketplace-partners | ||||
|           pip3 install python-digitalocean zulip fab-classic PyNaCl | ||||
|           echo $PATH | ||||
|           python3 tools/oneclickapps/prepare_digital_ocean_one_click_app_release.py | ||||
							
								
								
									
										266
									
								
								.github/workflows/zulip-ci.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										266
									
								
								.github/workflows/zulip-ci.yml
									
									
									
									
										vendored
									
									
								
							| @@ -1,266 +0,0 @@ | ||||
| # NOTE: Everything test in this file should be in `tools/test-all`.  If there's a | ||||
| # reason not to run it there, it should be there as a comment | ||||
| # explaining why. | ||||
|  | ||||
| name: Zulip CI | ||||
|  | ||||
| on: | ||||
|   push: | ||||
|     branches: ["*.x", chat.zulip.org, main] | ||||
|     tags: ["*"] | ||||
|   pull_request: | ||||
|   workflow_dispatch: | ||||
|  | ||||
| concurrency: | ||||
|   group: "${{ github.workflow }}-${{ github.head_ref || github.run_id }}" | ||||
|   cancel-in-progress: true | ||||
|  | ||||
| defaults: | ||||
|   run: | ||||
|     shell: bash | ||||
|  | ||||
| permissions: | ||||
|   contents: read | ||||
|  | ||||
| jobs: | ||||
|   tests: | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         include: | ||||
|           # Base images are built using `tools/ci/Dockerfile.prod.template`. | ||||
|           # The comments at the top explain how to build and upload these images. | ||||
|           # Ubuntu 22.04 ships with Python 3.10.12. | ||||
|           - docker_image: zulip/ci:jammy | ||||
|             name: Ubuntu 22.04 (Python 3.10, backend + frontend) | ||||
|             os: jammy | ||||
|             include_documentation_tests: false | ||||
|             include_frontend_tests: true | ||||
|           # Debian 12 ships with Python 3.11.2. | ||||
|           - docker_image: zulip/ci:bookworm | ||||
|             name: Debian 12 (Python 3.11, backend + documentation) | ||||
|             os: bookworm | ||||
|             include_documentation_tests: true | ||||
|             include_frontend_tests: false | ||||
|           # Ubuntu 24.04 ships with Python 3.12.2. | ||||
|           - docker_image: zulip/ci:noble | ||||
|             name: Ubuntu 24.04 (Python 3.12, backend) | ||||
|             os: noble | ||||
|             include_documentation_tests: false | ||||
|             include_frontend_tests: false | ||||
|  | ||||
|     runs-on: ubuntu-latest | ||||
|     name: ${{ matrix.name }} | ||||
|     container: ${{ matrix.docker_image }} | ||||
|     env: | ||||
|       # GitHub Actions sets HOME to /github/home which causes | ||||
|       # problem later in provision and frontend test that runs | ||||
|       # tools/setup/postgresql-init-dev-db because of the .pgpass | ||||
|       # location. PostgreSQL (psql) expects .pgpass to be at | ||||
|       # /home/github/.pgpass and setting home to `/home/github/` | ||||
|       # ensures it written there because we write it to ~/.pgpass. | ||||
|       HOME: /home/github/ | ||||
|  | ||||
|     steps: | ||||
|       - uses: actions/checkout@v4 | ||||
|  | ||||
|       - name: Create cache directories | ||||
|         run: | | ||||
|           dirs=(/srv/zulip-{venv,emoji}-cache) | ||||
|           sudo mkdir -p "${dirs[@]}" | ||||
|           sudo chown -R github "${dirs[@]}" | ||||
|  | ||||
|       - name: Restore pnpm store | ||||
|         uses: actions/cache@v4 | ||||
|         with: | ||||
|           path: /__w/.pnpm-store | ||||
|           key: v1-pnpm-store-${{ matrix.os }}-${{ hashFiles('pnpm-lock.yaml') }} | ||||
|  | ||||
|       - name: Restore python cache | ||||
|         uses: actions/cache@v4 | ||||
|         with: | ||||
|           path: /srv/zulip-venv-cache | ||||
|           key: v1-venv-${{ matrix.os }}-${{ hashFiles('requirements/dev.txt') }} | ||||
|           restore-keys: v1-venv-${{ matrix.os }} | ||||
|  | ||||
|       - name: Restore emoji cache | ||||
|         uses: actions/cache@v4 | ||||
|         with: | ||||
|           path: /srv/zulip-emoji-cache | ||||
|           key: v1-emoji-${{ matrix.os }}-${{ hashFiles('tools/setup/emoji/emoji_map.json', 'tools/setup/emoji/build_emoji', 'tools/setup/emoji/emoji_setup_utils.py', 'tools/setup/emoji/emoji_names.py', 'package.json') }} | ||||
|           restore-keys: v1-emoji-${{ matrix.os }} | ||||
|  | ||||
|       - name: Install dependencies | ||||
|         run: | | ||||
|           # This is the main setup job for the test suite | ||||
|           ./tools/ci/setup-backend --skip-dev-db-build | ||||
|           scripts/lib/clean_unused_caches.py --verbose --threshold=0 | ||||
|  | ||||
|       - name: Run tools test | ||||
|         run: | | ||||
|           source tools/ci/activate-venv | ||||
|           ./tools/test-tools | ||||
|  | ||||
|       - name: Run Codespell lint | ||||
|         run: | | ||||
|           source tools/ci/activate-venv | ||||
|           ./tools/run-codespell | ||||
|  | ||||
|       # We run the tests that are only run in a specific job early, so | ||||
|       # that we get feedback to the developer about likely failures as | ||||
|       # quickly as possible. Backend/mypy failures that aren't | ||||
|       # identical across different versions are much more rare than | ||||
|       # frontend linter or node test failures. | ||||
|       - name: Run documentation and api tests | ||||
|         if: ${{ matrix.include_documentation_tests }} | ||||
|         run: | | ||||
|           source tools/ci/activate-venv | ||||
|           # In CI, we only test links we control in test-documentation to avoid flakes | ||||
|           ./tools/test-documentation --skip-external-links | ||||
|           ./tools/test-help-documentation --skip-external-links | ||||
|           ./tools/test-api | ||||
|  | ||||
|       - name: Run node tests | ||||
|         if: ${{ matrix.include_frontend_tests }} | ||||
|         run: | | ||||
|           source tools/ci/activate-venv | ||||
|           # Run the node tests first, since they're fast and deterministic | ||||
|           ./tools/test-js-with-node --coverage --parallel=1 | ||||
|  | ||||
|       - name: Run frontend lint | ||||
|         if: ${{ matrix.include_frontend_tests }} | ||||
|         run: | | ||||
|           source tools/ci/activate-venv | ||||
|           ./tools/lint --groups=frontend --skip=gitlint # gitlint disabled because flaky | ||||
|  | ||||
|       - name: Check schemas | ||||
|         if: ${{ matrix.include_frontend_tests }} | ||||
|         run: | | ||||
|           source tools/ci/activate-venv | ||||
|           # Check that various schemas are consistent. (is fast) | ||||
|           ./tools/check-schemas | ||||
|  | ||||
|       - name: Check capitalization of strings | ||||
|         if: ${{ matrix.include_frontend_tests }} | ||||
|         run: | | ||||
|           source tools/ci/activate-venv | ||||
|           ./manage.py makemessages --locale en | ||||
|           PYTHONWARNINGS=ignore ./tools/check-capitalization --no-generate | ||||
|           PYTHONWARNINGS=ignore ./tools/check-frontend-i18n --no-generate | ||||
|  | ||||
|       - name: Run puppeteer tests | ||||
|         if: ${{ matrix.include_frontend_tests }} | ||||
|         run: | | ||||
|           source tools/ci/activate-venv | ||||
|           ./tools/test-js-with-puppeteer | ||||
|  | ||||
|       - name: Check pnpm dedupe | ||||
|         if: ${{ matrix.include_frontend_tests }} | ||||
|         run: pnpm dedupe --check | ||||
|  | ||||
|       - name: Run backend lint | ||||
|         run: | | ||||
|           source tools/ci/activate-venv | ||||
|           echo "Test suite is running under $(python --version)." | ||||
|           ./tools/lint --groups=backend --skip=gitlint,mypy # gitlint disabled because flaky | ||||
|  | ||||
|       - name: Run backend tests | ||||
|         run: | | ||||
|           source tools/ci/activate-venv | ||||
|           ./tools/test-backend ${{ matrix.os != 'bookworm' && '--coverage' || '' }} --xml-report --no-html-report --include-webhooks --include-transaction-tests --no-cov-cleanup --ban-console-output | ||||
|  | ||||
|       - name: Run mypy | ||||
|         run: | | ||||
|           source tools/ci/activate-venv | ||||
|           # We run mypy after the backend tests so we get output from the | ||||
|           # backend tests, which tend to uncover more serious problems, first. | ||||
|           ./tools/run-mypy --version | ||||
|           ./tools/run-mypy | ||||
|  | ||||
|       - name: Run miscellaneous tests | ||||
|         run: | | ||||
|           source tools/ci/activate-venv | ||||
|  | ||||
|           # Currently our compiled requirements files will differ for different | ||||
|           # Python versions, so we will run test-locked-requirements only on the | ||||
|           # platform with the oldest one. | ||||
|           # ./tools/test-locked-requirements | ||||
|           # ./tools/test-run-dev  # https://github.com/zulip/zulip/pull/14233 | ||||
|           # | ||||
|           # This test has been persistently flaky at like 1% frequency, is slow, | ||||
|           # and is for a very specific single feature, so we don't run it by default: | ||||
|           # ./tools/test-queue-worker-reload | ||||
|  | ||||
|           ./tools/test-migrations | ||||
|           ./tools/setup/optimize-svg --check | ||||
|           ./tools/setup/generate_integration_bots_avatars.py --check-missing | ||||
|           ./tools/ci/check-executables | ||||
|  | ||||
|           # Ban check-database-compatibility from transitively | ||||
|           # relying on static/generated, because it might not be | ||||
|           # up-to-date at that point in upgrade-zulip-stage-2. | ||||
|           chmod 000 static/generated web/generated | ||||
|           ./scripts/lib/check-database-compatibility | ||||
|           chmod 755 static/generated web/generated | ||||
|  | ||||
|       - name: Check for untracked files | ||||
|         run: | | ||||
|           source tools/ci/activate-venv | ||||
|           # This final check looks for untracked files that may have been | ||||
|           # created by test-backend or provision. | ||||
|           untracked="$(git ls-files --exclude-standard --others)" | ||||
|           if [ -n "$untracked" ]; then | ||||
|               printf >&2 "Error: untracked files:\n%s\n" "$untracked" | ||||
|               exit 1 | ||||
|           fi | ||||
|  | ||||
|       - name: Test locked requirements | ||||
|         if: ${{ matrix.os == 'jammy' }} | ||||
|         run: | | ||||
|           . /srv/zulip-py3-venv/bin/activate && \ | ||||
|           ./tools/test-locked-requirements | ||||
|  | ||||
|       - name: Upload coverage reports | ||||
|  | ||||
|         # Only upload coverage when both frontend and backend | ||||
|         # tests are run. | ||||
|         if: ${{ matrix.include_frontend_tests }} | ||||
|         uses: codecov/codecov-action@v4 | ||||
|         with: | ||||
|           files: var/coverage.xml,var/node-coverage/lcov.info | ||||
|           token: ${{ secrets.CODECOV_TOKEN }} | ||||
|  | ||||
|       - name: Store Puppeteer artifacts | ||||
|         # Upload these on failure, as well | ||||
|         if: ${{ always() && matrix.include_frontend_tests }} | ||||
|         uses: actions/upload-artifact@v4 | ||||
|         with: | ||||
|           name: puppeteer | ||||
|           path: ./var/puppeteer | ||||
|           retention-days: 60 | ||||
|  | ||||
|       - name: Check development database build | ||||
|         run: ./tools/ci/setup-backend | ||||
|  | ||||
|       - name: Verify pnpm store path | ||||
|         run: | | ||||
|           set -x | ||||
|           path="$(pnpm store path)" | ||||
|           [[ "$path" == /__w/.pnpm-store/* ]] | ||||
|  | ||||
|       - name: Generate failure report string | ||||
|         id: failure_report_string | ||||
|         if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }} | ||||
|         run: tools/ci/generate-failure-message >> $GITHUB_OUTPUT | ||||
|  | ||||
|       - name: Report status to CZO | ||||
|         if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }} | ||||
|         uses: zulip/github-actions-zulip/send-message@v1 | ||||
|         with: | ||||
|           api-key: ${{ secrets.ZULIP_BOT_KEY }} | ||||
|           email: "github-actions-bot@chat.zulip.org" | ||||
|           organization-url: "https://chat.zulip.org" | ||||
|           to: "automated testing" | ||||
|           topic: ${{ steps.failure_report_string.outputs.topic }} | ||||
|           type: "stream" | ||||
|           content: ${{ steps.failure_report_string.outputs.content }} | ||||
							
								
								
									
										29
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										29
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							| @@ -17,27 +17,22 @@ | ||||
| # See `git help ignore` for details on the format. | ||||
|  | ||||
| ## Config files for the dev environment | ||||
| /zproject/apns-dev.pem | ||||
| /zproject/apns-dev-key.p8 | ||||
| /zproject/dev-secrets.conf | ||||
| /zproject/custom_dev_settings.py | ||||
| /tools/conf.ini | ||||
| /tools/custom_provision | ||||
| /tools/droplets/conf.ini | ||||
|  | ||||
| ## Byproducts of setting up and using the dev environment | ||||
| *.pyc | ||||
| *.tsbuildinfo | ||||
| package-lock.json | ||||
|  | ||||
| /.vagrant | ||||
| /var/* | ||||
| !/var/puppeteer | ||||
| /var/puppeteer/* | ||||
| !/var/puppeteer/test_credentials.d.ts | ||||
| /var | ||||
|  | ||||
| /.dmypy.json | ||||
| /.ruff_cache | ||||
|  | ||||
| # Dockerfiles generated for CircleCI | ||||
| /tools/circleci/images | ||||
|  | ||||
| # Generated i18n data | ||||
| /locale/en | ||||
| @@ -48,11 +43,11 @@ package-lock.json | ||||
| # Static build | ||||
| *.mo | ||||
| npm-debug.log | ||||
| /.pnpm-store | ||||
| /node_modules | ||||
| /prod-static | ||||
| /staticfiles.json | ||||
| /webpack-stats-production.json | ||||
| /yarn-error.log | ||||
| zulip-git-version | ||||
|  | ||||
| # Test / analysis tools | ||||
| @@ -75,21 +70,13 @@ zulip.kdev4 | ||||
| *.kate-swp | ||||
| *.sublime-project | ||||
| *.sublime-workspace | ||||
| .vscode/ | ||||
| *.DS_Store | ||||
| # VS Code. Avoid checking in .vscode in general, while still specifying | ||||
| # recommended extensions for working with this repository. | ||||
| /.vscode/**/* | ||||
| !/.vscode/extensions.json | ||||
| # .cache/ is generated by VS Code test runner | ||||
| # .cache/ is generated by VSCode's test runner | ||||
| .cache/ | ||||
| .eslintcache | ||||
|  | ||||
| # Core dump files | ||||
| core | ||||
|  | ||||
| # Static generated files for landing page. | ||||
| /static/images/landing-page/hello/generated | ||||
|  | ||||
| ## Miscellaneous | ||||
| # (Ideally this section is empty.) | ||||
| zthumbor/thumbor_local_settings.py | ||||
| .transifexrc | ||||
|   | ||||
							
								
								
									
										8
									
								
								.gitlint
									
									
									
									
									
								
							
							
						
						
									
										8
									
								
								.gitlint
									
									
									
									
									
								
							| @@ -1,13 +1,13 @@ | ||||
| [general] | ||||
| ignore=title-trailing-punctuation, body-min-length, body-is-missing | ||||
| ignore=title-trailing-punctuation, body-min-length, body-is-missing, title-imperative-mood | ||||
|  | ||||
| extra-path=tools/lib/gitlint_rules.py | ||||
| extra-path=tools/lib/gitlint-rules.py | ||||
|  | ||||
| [title-match-regex] | ||||
| [title-match-regex-allow-exception] | ||||
| regex=^(.+:\ )?[A-Z].+\.$ | ||||
|  | ||||
| [title-max-length] | ||||
| line-length=72 | ||||
| line-length=76 | ||||
|  | ||||
| [body-max-line-length] | ||||
| line-length=76 | ||||
|   | ||||
							
								
								
									
										10
									
								
								.isort.cfg
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										10
									
								
								.isort.cfg
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,10 @@ | ||||
| [settings] | ||||
| line_length = 79 | ||||
| multi_line_output = 2 | ||||
| balanced_wrapping = true | ||||
| known_third_party = django, ujson, sqlalchemy | ||||
| known_first_party = zerver, zproject, version, confirmation, zilencer, analytics, frontend_tests, scripts, corporate | ||||
| sections = FUTURE, STDLIB, THIRDPARTY, FIRSTPARTY, LOCALFOLDER | ||||
| lines_after_imports = 1 | ||||
| # See the comment related to ioloop_logging for why this is skipped. | ||||
| skip = zerver/management/commands/runtornado.py | ||||
							
								
								
									
										164
									
								
								.mailmap
									
									
									
									
									
								
							
							
						
						
									
										164
									
								
								.mailmap
									
									
									
									
									
								
							| @@ -1,164 +0,0 @@ | ||||
| # This file teaches `git log` and friends the canonical names | ||||
| # and email addresses to use for our contributors. | ||||
| # | ||||
| # For details on the format, see: | ||||
| #   https://git.github.io/htmldocs/gitmailmap.html | ||||
| # | ||||
| # Handy commands for examining or adding to this file: | ||||
| # | ||||
| #     # shows all names/emails after mapping, sorted: | ||||
| #   $ git shortlog -es | sort -k2 | ||||
| # | ||||
| #     # shows raw names/emails, filtered by mapped name: | ||||
| #   $ git log --format='%an %ae' --author=$NAME | uniq -c | ||||
|  | ||||
| acrefoot <acrefoot@zulip.com> <acrefoot@alum.mit.edu> | ||||
| acrefoot <acrefoot@zulip.com> <acrefoot@dropbox.com> | ||||
| acrefoot <acrefoot@zulip.com> <acrefoot@humbughq.com> | ||||
| Adam Benesh <Adam.Benesh@gmail.com> | ||||
| Adam Benesh <Adam.Benesh@gmail.com> <Adam-Daniel.Benesh@t-systems.com> | ||||
| Adarsh Tiwari <xoldyckk@gmail.com> | ||||
| Aditya Chaudhary <aditya.chaudhary1558@gmail.com> | ||||
| Adnan Shabbir Husain <generaladnan139@gmail.com> | ||||
| Adnan Shabbir Husain <generaladnan139@gmail.com> <78212328+adnan-td@users.noreply.github.com> | ||||
| Alex Vandiver <alexmv@zulip.com> <alex@chmrr.net> | ||||
| Alex Vandiver <alexmv@zulip.com> <github@chmrr.net> | ||||
| Allen Rabinovich <allenrabinovich@yahoo.com> <allenr@humbughq.com> | ||||
| Allen Rabinovich <allenrabinovich@yahoo.com> <allenr@zulip.com> | ||||
| Alya Abbott <alya@zulip.com> <2090066+alya@users.noreply.github.com> | ||||
| Alya Abbott <alya@zulip.com> <alyaabbott@elance-odesk.com> | ||||
| Aman Agrawal <amanagr@zulip.com> | ||||
| Aman Agrawal <amanagr@zulip.com> <f2016561@pilani.bits-pilani.ac.in> | ||||
| Anders Kaseorg <anders@zulip.com> <anders@zulipchat.com> | ||||
| Anders Kaseorg <anders@zulip.com> <andersk@mit.edu> | ||||
| aparna-bhatt <aparnabhatt2001@gmail.com> <86338542+aparna-bhatt@users.noreply.github.com> | ||||
| Aryan Bhokare <aryan1bhokare@gmail.com> | ||||
| Aryan Bhokare <aryan1bhokare@gmail.com> <92683836+aryan-bhokare@users.noreply.github.com> | ||||
| Aryan Shridhar <aryanshridhar7@gmail.com> | ||||
| Aryan Shridhar <aryanshridhar7@gmail.com> <53977614+aryanshridhar@users.noreply.github.com> | ||||
| Ashwat Kumar Singh <ashwat.kumarsingh.met20@itbhu.ac.in> | ||||
| Austin Riba <austin@zulip.com> <austin@m51.io> | ||||
| Bedo Khaled <bedokhaled66@gmail.com> | ||||
| Bedo Khaled <bedokhaled66@gmail.com> <64221784+abdelrahman725@users.noreply.github.com> | ||||
| BIKI DAS <bikid475@gmail.com> | ||||
| Brijmohan Siyag <brijsiyag@gmail.com> | ||||
| Brock Whittaker <whittakerbrock@gmail.com> <bjwhitta@asu.edu> | ||||
| Brock Whittaker <whittakerbrock@gmail.com> <brock@zulip.com> | ||||
| Brock Whittaker <whittakerbrock@gmail.com> <brock@zulip.org> | ||||
| Brock Whittaker <whittakerbrock@gmail.com> <brock@zulipchat.org> | ||||
| Brock Whittaker <whittakerbrock@gmail.com> <brockwhittaker@Brocks-MacBook.local> | ||||
| Chris Bobbe <cbobbe@zulip.com> <cbobbe@zulipchat.com> | ||||
| Chris Bobbe <cbobbe@zulip.com> <csbobbe@gmail.com> | ||||
| codewithnick <nikhilsingh526452@gmail.com> | ||||
| Danny Su <contact@dannysu.com> <opensource@emailengine.org> | ||||
| Dhruv Goyal <dhruvgoyal.dev@gmail.com> | ||||
| Dinesh <chdinesh1089@gmail.com> | ||||
| Dinesh <chdinesh1089@gmail.com> <chdinesh1089> | ||||
| Eeshan Garg <eeshan@zulip.com> <jerryguitarist@gmail.com> | ||||
| Eric Smith <erwsmith@gmail.com> <99841919+erwsmith@users.noreply.github.com> | ||||
| Evy Kassirer <evy@zulip.com> | ||||
| Evy Kassirer <evy@zulip.com> <evy.kassirer@gmail.com> | ||||
| Evy Kassirer <evy@zulip.com> <evykassirer@users.noreply.github.com> | ||||
| Ganesh Pawar <pawarg256@gmail.com> <58626718+ganpa3@users.noreply.github.com> | ||||
| Greg Price <greg@zulip.com> <gnprice@gmail.com> | ||||
| Greg Price <greg@zulip.com> <greg@zulipchat.com> | ||||
| Greg Price <greg@zulip.com> <price@mit.edu> | ||||
| Hardik Dharmani <Ddharmani99@gmail.com> <ddharmani99@gmail.com> | ||||
| Hemant Umre <hemantumre12@gmail.com> <87542880+HemantUmre12@users.noreply.github.com> | ||||
| Jai soni <jai_s@me.iitr.ac.in> | ||||
| Jai soni <jai_s@me.iitr.ac.in> <76561593+jai2201@users.noreply.github.com> | ||||
| Jeff Arnold <jbarnold@gmail.com> <jbarnold@humbughq.com> | ||||
| Jeff Arnold <jbarnold@gmail.com> <jbarnold@zulip.com> | ||||
| Jessica McKellar <jesstess@mit.edu> <jesstess@humbughq.com> | ||||
| Jessica McKellar <jesstess@mit.edu> <jesstess@zulip.com> | ||||
| John Lu <JohnLu10212004@gmail.com> | ||||
| John Lu <JohnLu10212004@gmail.com> <87673068+JohnLu2004@users.noreply.github.com> | ||||
| Joseph Ho <josephho678@gmail.com> | ||||
| Joseph Ho <josephho678@gmail.com> <62449508+Joelute@users.noreply.github.com> | ||||
| Julia Bichler <julia.bichler@tum.de> <74348920+juliaBichler01@users.noreply.github.com> | ||||
| Karl Stolley <karl@zulip.com> <karl@stolley.dev> | ||||
| Kevin Mehall <km@kevinmehall.net> <kevin@humbughq.com> | ||||
| Kevin Mehall <km@kevinmehall.net> <kevin@zulip.com> | ||||
| Kevin Scott <kevin.scott.98@gmail.com> | ||||
| Kislay Verma <kislayuv27@gmail.com> | ||||
| Kunal Sharma <v.shm.kunal@gmail.com> | ||||
| Lalit Kumar Singh <lalitkumarsingh3716@gmail.com> | ||||
| Lalit Kumar Singh <lalitkumarsingh3716@gmail.com> <lalits01@smartek21.com> | ||||
| Lauryn Menard <lauryn@zulip.com> <63245456+laurynmm@users.noreply.github.com> | ||||
| Lauryn Menard <lauryn@zulip.com> <lauryn.menard@gmail.com> | ||||
| m-e-l-u-h-a-n <purushottam.tiwari.cd.cse19@itbhu.ac.in> | ||||
| m-e-l-u-h-a-n <purushottam.tiwari.cd.cse19@itbhu.ac.in> <pururshottam.tiwari.cd.cse19@itbhu.ac.in> | ||||
| Mateusz Mandera <mateusz.mandera@zulip.com> <mateusz.mandera@protonmail.com> | ||||
| Matt Keller <matt@zulip.com> | ||||
| Matt Keller <matt@zulip.com> <m@cognusion.com> | ||||
| Nehal Sharma <bablinaneh@gmail.com> | ||||
| Nehal Sharma <bablinaneh@gmail.com> <68962290+N-Shar-ma@users.noreply.github.com> | ||||
| Nimish Medatwal <medatwalnimish@gmail.com> | ||||
| Noble Mittal <noblemittal@outlook.com> <62551163+beingnoble03@users.noreply.github.com> | ||||
| nzai <nzaih18@gmail.com> <70953556+nzaih1999@users.noreply.github.com> | ||||
| Palash Baderia <palash.baderia@outlook.com> | ||||
| Palash Baderia <palash.baderia@outlook.com> <66828942+palashb01@users.noreply.github.com> | ||||
| Palash Raghuwanshi <singhpalash0@gmail.com> | ||||
| Parth <mittalparth22@gmail.com> | ||||
| Pratik Chanda <pratikchanda2000@gmail.com> | ||||
| Pratik Solanki <pratiksolanki2021@gmail.com> | ||||
| Priyam Seth <sethpriyam1@gmail.com> <b19188@students.iitmandi.ac.in> | ||||
| Ray Kraesig <rkraesig@zulip.com> <rkraesig@zulipchat.com> | ||||
| Reid Barton <rwbarton@gmail.com> <rwbarton@humbughq.com> | ||||
| Rein Zustand (rht) <rhtbot@protonmail.com> | ||||
| Rishabh Maheshwari <b20063@students.iitmandi.ac.in> | ||||
| Rishi Gupta <rishig@zulipchat.com> <rishig+git@mit.edu> | ||||
| Rishi Gupta <rishig@zulipchat.com> <rishig@kandralabs.com> | ||||
| Rishi Gupta <rishig@zulipchat.com> <rishig@users.noreply.github.com> | ||||
| Rixant Rokaha <rixantrokaha@gmail.com> | ||||
| Rixant Rokaha <rixantrokaha@gmail.com> <rishantrokaha@gmail.com> | ||||
| Rixant Rokaha <rixantrokaha@gmail.com> <rrokaha@caldwell.edu> | ||||
| Rohan Gudimetla <rohan.gudimetla07@gmail.com> | ||||
| Sahil Batra <sahil@zulip.com> <35494118+sahil839@users.noreply.github.com> | ||||
| Sahil Batra <sahil@zulip.com> <sahilbatra839@gmail.com> | ||||
| Sanchit Sharma <ssharmas10662@gmail.com> | ||||
| Satyam Bansal <sbansal1999@gmail.com> | ||||
| Sayam Samal <samal.sayam@gmail.com> | ||||
| Scott Feeney <scott@oceanbase.org> <scott@humbughq.com> | ||||
| Scott Feeney <scott@oceanbase.org> <scott@zulip.com> | ||||
| Shashank Singh <21bec103@iiitdmj.ac.in> | ||||
| Shlok Patel <shlokcpatel2001@gmail.com> | ||||
| Shu Chen <shu@zulip.com> | ||||
| Shubham Padia <shubham@zulip.com> | ||||
| Shubham Padia <shubham@zulip.com> <shubham@glints.com> | ||||
| Somesh Ranjan <somesh.ranjan.met20@itbhu.ac.in> <77766761+somesh202@users.noreply.github.com> | ||||
| Steve Howell <showell@zulip.com> <showell30@yahoo.com> | ||||
| Steve Howell <showell@zulip.com> <showell@yahoo.com> | ||||
| Steve Howell <showell@zulip.com> <showell@zulipchat.com> | ||||
| Steve Howell <showell@zulip.com> <steve@humbughq.com> | ||||
| Steve Howell <showell@zulip.com> <steve@zulip.com> | ||||
| strifel <info@strifel.de> | ||||
| Sujal Shah <sujalshah28092004@gmail.com> | ||||
| Tanmay Kumar <tnmdotkr@gmail.com> | ||||
| Tanmay Kumar <tnmdotkr@gmail.com> <133781250+tnmkr@users.noreply.github.com> | ||||
| Tim Abbott <tabbott@zulip.com> | ||||
| Tim Abbott <tabbott@zulip.com> <tabbott@dropbox.com> | ||||
| Tim Abbott <tabbott@zulip.com> <tabbott@humbughq.com> | ||||
| Tim Abbott <tabbott@zulip.com> <tabbott@mit.edu> | ||||
| Tim Abbott <tabbott@zulip.com> <tabbott@zulipchat.com> | ||||
| Tomasz Kolek <tomasz-kolek@o2.pl> <tomasz-kolek@go2.pl> | ||||
| Ujjawal Modi <umodi2003@gmail.com> <99073049+Ujjawal3@users.noreply.github.com> | ||||
| umkay <ukhan@zulipchat.com> <umaimah.k@gmail.com> | ||||
| umkay <ukhan@zulipchat.com> <umkay@users.noreply.github.com> | ||||
| Viktor Illmer <1476338+v-ji@users.noreply.github.com> | ||||
| Vishnu KS <vishnu@zulip.com> <hackerkid@vishnuks.com> | ||||
| Vishnu KS <vishnu@zulip.com> <yo@vishnuks.com> | ||||
| Waseem Daher <wdaher@zulip.com> <wdaher@dropbox.com> | ||||
| Waseem Daher <wdaher@zulip.com> <wdaher@humbughq.com> | ||||
| Yash RE <33805964+YashRE42@users.noreply.github.com> | ||||
| Yash RE <33805964+YashRE42@users.noreply.github.com> <YashRE42@github.com> | ||||
| Yogesh Sirsat <yogeshsirsat56@gmail.com> | ||||
| Yogesh Sirsat <yogeshsirsat56@gmail.com> <41695888+yogesh-sirsat@users.noreply.github.com> | ||||
| Zeeshan Equbal <equbalzeeshan@gmail.com> | ||||
| Zeeshan Equbal <equbalzeeshan@gmail.com> <54993043+zee-bit@users.noreply.github.com> | ||||
| Zev Benjamin <zev@zulip.com> <zev@dropbox.com> | ||||
| Zev Benjamin <zev@zulip.com> <zev@humbughq.com> | ||||
| Zev Benjamin <zev@zulip.com> <zev@mit.edu> | ||||
| Zixuan James Li <p359101898@gmail.com> | ||||
| Zixuan James Li <p359101898@gmail.com> <359101898@qq.com> | ||||
| Zixuan James Li <p359101898@gmail.com> <39874143+PIG208@users.noreply.github.com> | ||||
| @@ -1,17 +0,0 @@ | ||||
| pnpm-lock.yaml | ||||
| /api_docs/**/*.md | ||||
| /corporate/tests/stripe_fixtures | ||||
| /help/**/*.md | ||||
| /locale | ||||
| /templates/**/*.md | ||||
| /tools/setup/emoji/emoji_map.json | ||||
| /web/third/* | ||||
| !/web/third/marked | ||||
| /web/third/marked/* | ||||
| !/web/third/marked/lib | ||||
| /web/third/marked/lib/* | ||||
| !/web/third/marked/lib/marked.d.ts | ||||
| /zerver/tests/fixtures | ||||
| /zerver/webhooks/*/doc.md | ||||
| /zerver/webhooks/github/githubsponsors.md | ||||
| /zerver/webhooks/*/fixtures | ||||
| @@ -1,15 +0,0 @@ | ||||
| { | ||||
|   "source_directories": ["."], | ||||
|   "taint_models_path": [ | ||||
|       "stubs/taint", | ||||
|       "zulip-py3-venv/lib/pyre_check/taint/" | ||||
|   ], | ||||
|   "search_path": [ | ||||
|       "stubs/", | ||||
|       "zulip-py3-venv/lib/pyre_check/stubs/" | ||||
|   ], | ||||
|   "typeshed": "zulip-py3-venv/lib/pyre_check/typeshed/", | ||||
|   "exclude": [ | ||||
|       "/srv/zulip/zulip-py3-venv/.*" | ||||
|   ] | ||||
| } | ||||
| @@ -1,15 +0,0 @@ | ||||
| # https://docs.readthedocs.io/en/stable/config-file/v2.html | ||||
| version: 2 | ||||
|  | ||||
| build: | ||||
|   os: ubuntu-22.04 | ||||
|   tools: | ||||
|     python: "3.10" | ||||
|  | ||||
| sphinx: | ||||
|   configuration: docs/conf.py | ||||
|   fail_on_warning: true | ||||
|  | ||||
| python: | ||||
|   install: | ||||
|     - requirements: requirements/docs.txt | ||||
| @@ -1 +0,0 @@ | ||||
| sonar.inclusions=**/*.py,**/*.html | ||||
							
								
								
									
										67
									
								
								.stylelintrc
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										67
									
								
								.stylelintrc
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,67 @@ | ||||
| { | ||||
|     "rules": { | ||||
|         # Stylistic rules for CSS. | ||||
|         "function-comma-space-after": "always", | ||||
|         "function-comma-space-before": "never", | ||||
|         "function-max-empty-lines": 0, | ||||
|         "function-whitespace-after": "always", | ||||
|  | ||||
|         "value-keyword-case": "lower", | ||||
|         "value-list-comma-newline-after": "always-multi-line", | ||||
|         "value-list-comma-space-after": "always-single-line", | ||||
|         "value-list-comma-space-before": "never", | ||||
|         "value-list-max-empty-lines": 0, | ||||
|  | ||||
|         "unit-case": "lower", | ||||
|         "property-case": "lower", | ||||
|         "color-hex-case": "lower", | ||||
|  | ||||
|         "declaration-bang-space-before": "always", | ||||
|         "declaration-colon-newline-after": "always-multi-line", | ||||
|         "declaration-colon-space-after": "always-single-line", | ||||
|         "declaration-colon-space-before": "never", | ||||
|         "declaration-block-semicolon-newline-after": "always", | ||||
|         "declaration-block-semicolon-space-before": "never", | ||||
|         "declaration-block-trailing-semicolon": "always", | ||||
|  | ||||
|         "block-closing-brace-empty-line-before": "never", | ||||
|         "block-closing-brace-newline-after": "always", | ||||
|         "block-closing-brace-newline-before": "always", | ||||
|         "block-opening-brace-newline-after": "always", | ||||
|         "block-opening-brace-space-before": "always", | ||||
|  | ||||
|         "selector-attribute-brackets-space-inside": "never", | ||||
|         "selector-attribute-operator-space-after": "never", | ||||
|         "selector-attribute-operator-space-before": "never", | ||||
|         "selector-combinator-space-after": "always", | ||||
|         "selector-combinator-space-before": "always", | ||||
|         "selector-descendant-combinator-no-non-space": true, | ||||
|         "selector-pseudo-class-parentheses-space-inside": "never", | ||||
|         "selector-pseudo-element-case": "lower", | ||||
|         "selector-pseudo-element-colon-notation": "double", | ||||
|         "selector-type-case": "lower", | ||||
|         "selector-list-comma-newline-after": "always", | ||||
|         "selector-list-comma-space-before": "never", | ||||
|  | ||||
|         "media-feature-colon-space-after": "always", | ||||
|         "media-feature-colon-space-before": "never", | ||||
|         "media-feature-name-case": "lower", | ||||
|         "media-feature-parentheses-space-inside": "never", | ||||
|         "media-feature-range-operator-space-after": "always", | ||||
|         "media-feature-range-operator-space-before": "always", | ||||
|         "media-query-list-comma-newline-after": "always", | ||||
|         "media-query-list-comma-space-before": "never", | ||||
|  | ||||
|         "at-rule-name-case": "lower", | ||||
|         "at-rule-name-space-after": "always", | ||||
|         "at-rule-semicolon-newline-after": "always", | ||||
|         "at-rule-semicolon-space-before": "never", | ||||
|  | ||||
|         "comment-whitespace-inside": "always", | ||||
|         "indentation": 4, | ||||
|          | ||||
|         # Limit language features | ||||
|         "color-no-hex": true, | ||||
|         "color-named": "never", | ||||
|     } | ||||
| } | ||||
							
								
								
									
										66
									
								
								.travis.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										66
									
								
								.travis.yml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,66 @@ | ||||
| # See https://zulip.readthedocs.io/en/latest/testing/continuous-integration.html for | ||||
| # high-level documentation on our Travis CI setup. | ||||
| dist: xenial | ||||
| install: | ||||
|   # Disable sometimes-broken sources.list in Travis base images | ||||
|   - sudo rm -vf /etc/apt/sources.list.d/* | ||||
|   - sudo apt-get update | ||||
|  | ||||
|   # Disable Travis CI's built-in NVM installation | ||||
|   - mispipe "mv ~/.nvm ~/.travis-nvm-disabled" ts | ||||
|  | ||||
|   # Install codecov, the library for the code coverage reporting tool we use | ||||
|   # With a retry to minimize impact of transient networking errors. | ||||
|   - mispipe "pip install codecov" ts || mispipe "pip install codecov" ts | ||||
|  | ||||
|   # This is the main setup job for the test suite | ||||
|   - mispipe "tools/ci/setup-$TEST_SUITE" ts | ||||
|  | ||||
|   # Clean any caches that are not in use to avoid our cache | ||||
|   # becoming huge. | ||||
|   - mispipe "scripts/lib/clean-unused-caches --verbose --threshold 0" ts | ||||
|  | ||||
| script: | ||||
|   # We unset GEM_PATH here as a hack to work around Travis CI having | ||||
|   # broken running their system puppet with Ruby.  See | ||||
|   # https://travis-ci.org/zulip/zulip/jobs/240120991 for an example traceback. | ||||
|   - unset GEM_PATH | ||||
|   - mispipe "./tools/ci/$TEST_SUITE" ts | ||||
| cache: | ||||
|   yarn: true | ||||
|   apt: false | ||||
|   directories: | ||||
|     - $HOME/zulip-venv-cache | ||||
|     - $HOME/zulip-npm-cache | ||||
|     - $HOME/zulip-emoji-cache | ||||
|     - $HOME/node | ||||
|     - $HOME/misc | ||||
| env: | ||||
|   global: | ||||
|     - BOTO_CONFIG=/nonexistent | ||||
| language: python | ||||
| # Our test suites generally run on Python 3.5, the version in | ||||
| # Ubuntu 16.04 xenial, which is the oldest OS release we support. | ||||
| matrix: | ||||
|   include: | ||||
|     # Travis will actually run the jobs in the order they're listed here; | ||||
|     # that doesn't seem to be documented, but it's what we see empirically. | ||||
|     # We only get 4 jobs running at a time, so we try to make the first few | ||||
|     # the most likely to break. | ||||
|     - python: "3.5" | ||||
|       env: TEST_SUITE=production | ||||
|     # Other suites moved to CircleCI -- see .circleci/. | ||||
| sudo: required | ||||
| addons: | ||||
|   artifacts: | ||||
|     paths: | ||||
|       # Casper debugging data (screenshots, etc.) is super useful for | ||||
|       # debugging test flakes. | ||||
|       - $(ls var/casper/* | tr "\n" ":") | ||||
|       - $(ls /tmp/zulip-test-event-log/* | tr "\n" ":") | ||||
|   postgresql: "9.5" | ||||
|   apt: | ||||
|     packages: | ||||
|       - moreutils | ||||
| after_success: | ||||
|   - codecov | ||||
							
								
								
									
										27
									
								
								.tx/config
									
									
									
									
									
								
							
							
						
						
									
										27
									
								
								.tx/config
									
									
									
									
									
								
							| @@ -1,39 +1,32 @@ | ||||
| # Migrated from transifex-client format with `tx migrate` | ||||
| # | ||||
| # See https://developers.transifex.com/docs/using-the-client which hints at | ||||
| # this format, but in general, the headings are in the format of: | ||||
| # | ||||
| # [o:<org>:p:<project>:r:<resource>] | ||||
|  | ||||
| [main] | ||||
| host = https://www.transifex.com | ||||
| lang_map = zh-Hans: zh_Hans, zh-Hant: zh_Hant | ||||
|  | ||||
| [o:zulip:p:zulip:r:djangopo] | ||||
| [zulip.djangopo] | ||||
| file_filter = locale/<lang>/LC_MESSAGES/django.po | ||||
| source_file = locale/en/LC_MESSAGES/django.po | ||||
| source_lang = en | ||||
| type = PO | ||||
|  | ||||
| [o:zulip:p:zulip:r:mobile] | ||||
| [zulip.translationsjson] | ||||
| file_filter = locale/<lang>/translations.json | ||||
| source_file = locale/en/translations.json | ||||
| source_lang = en | ||||
| type = KEYVALUEJSON | ||||
|  | ||||
| [zulip.mobile] | ||||
| file_filter = locale/<lang>/mobile.json | ||||
| source_file = locale/en/mobile.json | ||||
| source_lang = en | ||||
| type = KEYVALUEJSON | ||||
|  | ||||
| [o:zulip:p:zulip:r:translationsjson] | ||||
| file_filter = locale/<lang>/translations.json | ||||
| source_file = locale/en/translations.json | ||||
| source_lang = en | ||||
| type = KEYVALUEJSON | ||||
|  | ||||
| [o:zulip:p:zulip-test:r:djangopo] | ||||
| [zulip-test.djangopo] | ||||
| file_filter = locale/<lang>/LC_MESSAGES/django.po | ||||
| source_file = locale/en/LC_MESSAGES/django.po | ||||
| source_lang = en | ||||
| type = PO | ||||
|  | ||||
| [o:zulip:p:zulip-test:r:translationsjson] | ||||
| [zulip-test.translationsjson] | ||||
| file_filter = locale/<lang>/translations.json | ||||
| source_file = locale/en/translations.json | ||||
| source_lang = en | ||||
|   | ||||
							
								
								
									
										23
									
								
								.vscode/extensions.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										23
									
								
								.vscode/extensions.json
									
									
									
									
										vendored
									
									
								
							| @@ -1,23 +0,0 @@ | ||||
| { | ||||
|     // Recommended VS Code extensions for zulip/zulip. | ||||
|     // | ||||
|     // VS Code prompts a user to install the recommended extensions | ||||
|     // when a workspace is opened for the first time.  The user can | ||||
|     // also review the list with the 'Extensions: Show Recommended | ||||
|     // Extensions' command.  See | ||||
|     // https://code.visualstudio.com/docs/editor/extension-marketplace#_workspace-recommended-extensions | ||||
|     // for more information. | ||||
|     // | ||||
|     // Extension identifier format: ${publisher}.${name}. | ||||
|     // Example: vscode.csharp | ||||
|  | ||||
|     "recommendations": [ | ||||
|         "42crunch.vscode-openapi", | ||||
|         "dbaeumer.vscode-eslint", | ||||
|         "esbenp.prettier-vscode", | ||||
|         "ms-vscode-remote.vscode-remote-extensionpack" | ||||
|     ], | ||||
|  | ||||
|     // Extensions recommended by VS Code which are not recommended for users of zulip/zulip. | ||||
|     "unwantedRecommendations": [] | ||||
| } | ||||
| @@ -14,46 +14,46 @@ This isn't an exhaustive list of things that you can't do. Rather, take it | ||||
| in the spirit in which it's intended --- a guide to make it easier to enrich | ||||
| all of us and the technical communities in which we participate. | ||||
|  | ||||
| ## Expected behavior | ||||
| ## Expected Behavior | ||||
|  | ||||
| The following behaviors are expected and requested of all community members: | ||||
|  | ||||
| - Participate. In doing so, you contribute to the health and longevity of | ||||
| * Participate. In doing so, you contribute to the health and longevity of | ||||
|   the community. | ||||
| - Exercise consideration and respect in your speech and actions. | ||||
| - Attempt collaboration before conflict. Assume good faith. | ||||
| - Refrain from demeaning, discriminatory, or harassing behavior and speech. | ||||
| - Take action or alert community leaders if you notice a dangerous | ||||
| * Exercise consideration and respect in your speech and actions. | ||||
| * Attempt collaboration before conflict. Assume good faith. | ||||
| * Refrain from demeaning, discriminatory, or harassing behavior and speech. | ||||
| * Take action or alert community leaders if you notice a dangerous | ||||
|   situation, someone in distress, or violations of this code, even if they | ||||
|   seem inconsequential. | ||||
| - Community event venues may be shared with members of the public; be | ||||
| * Community event venues may be shared with members of the public; be | ||||
|   respectful to all patrons of these locations. | ||||
|  | ||||
| ## Unacceptable behavior | ||||
| ## Unacceptable Behavior | ||||
|  | ||||
| The following behaviors are considered harassment and are unacceptable | ||||
| within the Zulip community: | ||||
|  | ||||
| - Jokes or derogatory language that singles out members of any race, | ||||
| * Jokes or derogatory language that singles out members of any race, | ||||
|   ethnicity, culture, national origin, color, immigration status, social and | ||||
|   economic class, educational level, language proficiency, sex, sexual | ||||
|   orientation, gender identity and expression, age, size, family status, | ||||
|   political belief, religion, and mental and physical ability. | ||||
| - Violence, threats of violence, or violent language directed against | ||||
| * Violence, threats of violence, or violent language directed against | ||||
|   another person. | ||||
| - Disseminating or threatening to disseminate another person's personal | ||||
| * Disseminating or threatening to disseminate another person's personal | ||||
|   information. | ||||
| - Personal insults of any sort. | ||||
| - Posting or displaying sexually explicit or violent material. | ||||
| - Inappropriate photography or recording. | ||||
| - Deliberate intimidation, stalking, or following (online or in person). | ||||
| - Unwelcome sexual attention. This includes sexualized comments or jokes, | ||||
| * Personal insults of any sort. | ||||
| * Posting or displaying sexually explicit or violent material. | ||||
| * Inappropriate photography or recording. | ||||
| * Deliberate intimidation, stalking, or following (online or in person). | ||||
| * Unwelcome sexual attention. This includes sexualized comments or jokes, | ||||
|   inappropriate touching or groping, and unwelcomed sexual advances. | ||||
| - Sustained disruption of community events, including talks and | ||||
| * Sustained disruption of community events, including talks and | ||||
|   presentations. | ||||
| - Advocating for, or encouraging, any of the behaviors above. | ||||
| * Advocating for, or encouraging, any of the behaviors above. | ||||
|  | ||||
| ## Reporting and enforcement | ||||
| ## Reporting and Enforcement | ||||
|  | ||||
| Harassment and other code of conduct violations reduce the value of the | ||||
| community for everyone. If someone makes you or anyone else feel unsafe or | ||||
| @@ -66,7 +66,7 @@ organizers may take any action they deem appropriate, up to and including a | ||||
| temporary ban or permanent expulsion from the community without warning (and | ||||
| without refund in the case of a paid event). | ||||
|  | ||||
| If someone outside the development community (e.g., a user of the Zulip | ||||
| If someone outside the development community (e.g. a user of the Zulip | ||||
| software) engages in unacceptable behavior that affects someone in the | ||||
| community, we still want to know. Even if we don't have direct control over | ||||
| the violator, the community organizers can still support the people | ||||
| @@ -95,79 +95,11 @@ behavior occurring outside the scope of community activities when such | ||||
| behavior has the potential to adversely affect the safety and well-being of | ||||
| community members. | ||||
|  | ||||
| ## License and attribution | ||||
| ## License and Attribution | ||||
|  | ||||
| This Code of Conduct is adapted from the | ||||
| [Citizen Code of Conduct](http://citizencodeofconduct.org/) and the | ||||
| [Django Code of Conduct](https://www.djangoproject.com/conduct/), and is | ||||
| under a | ||||
| [Creative Commons BY-SA](https://creativecommons.org/licenses/by-sa/4.0/) | ||||
| [Creative Commons BY-SA](http://creativecommons.org/licenses/by-sa/4.0/) | ||||
| license. | ||||
|  | ||||
| ## Moderating the Zulip community | ||||
|  | ||||
| Anyone can help moderate the Zulip community by helping make sure that folks are | ||||
| aware of the [community guidelines](https://zulip.com/development-community/) | ||||
| and this Code of Conduct, and that we maintain a positive and respectful | ||||
| atmosphere. | ||||
|  | ||||
| Here are some guidelines for you how can help: | ||||
|  | ||||
| - Be friendly! Welcoming folks, thanking them for their feedback, ideas and effort, | ||||
|   and just trying to keep the atmosphere warm make the whole community function | ||||
|   more smoothly. New participants who feel accepted, listened to and respected | ||||
|   are likely to treat others the same way. | ||||
|  | ||||
| - Be familiar with the [community | ||||
|   guidelines](https://zulip.com/development-community/), and cite them liberally | ||||
|   when a user violates them. Be polite but firm. Some examples: | ||||
|  | ||||
|   - @user please note that there is no need to @-mention @\_**Tim Abbott** when | ||||
|     you ask a question. As noted in the [guidelines for this | ||||
|     community](https://zulip.com/development-community/): | ||||
|  | ||||
|     > Use @-mentions sparingly… there is generally no need to @-mention a | ||||
|     > core contributor unless you need their timely attention. | ||||
|  | ||||
|   - @user, please keep in mind the following [community | ||||
|     guideline](https://zulip.com/development-community/): | ||||
|  | ||||
|     > Don’t ask the same question in multiple places. Moderators read every | ||||
|     > public stream, and make sure every question gets a reply. | ||||
|  | ||||
|     I’ve gone ahead and moved the other copy of this message to this thread. | ||||
|  | ||||
|   - If asked a question in a direct message that is better discussed in a public | ||||
|     stream: | ||||
|     > Hi @user! Please start by reviewing | ||||
|     > https://zulip.com/development-community/#community-norms to learn how to | ||||
|     > get help in this community. | ||||
|  | ||||
| - Users sometimes think chat.zulip.org is a testing instance. When this happens, | ||||
|   kindly direct them to use the **#test here** stream. | ||||
|  | ||||
| - If you see a message that’s posted in the wrong place, go ahead and move it if | ||||
|   you have permissions to do so, even if you don’t plan to respond to it. | ||||
|   Leaving the “Send automated notice to new topic” option enabled helps make it | ||||
|   clear what happened to the person who sent the message. | ||||
|  | ||||
|   If you are responding to a message that's been moved, mention the user in your | ||||
|   reply, so that the mention serves as a notification of the new location for | ||||
|   their conversation. | ||||
|  | ||||
| - If a user is posting spam, please report it to an administrator. They will: | ||||
|  | ||||
|   - Change the user's name to `<name> (spammer)` and deactivate them. | ||||
|   - Delete any spam messages they posted in public streams. | ||||
|  | ||||
| - We care very much about maintaining a respectful tone in our community. If you | ||||
|   see someone being mean or rude, point out that their tone is inappropriate, | ||||
|   and ask them to communicate their perspective in a respectful way in the | ||||
|   future. If you don’t feel comfortable doing so yourself, feel free to ask a | ||||
|   member of Zulip's core team to take care of the situation. | ||||
|  | ||||
| - Try to assume the best intentions from others (given the range of | ||||
|   possibilities presented by their visible behavior), and stick with a friendly | ||||
|   and positive tone even when someone’s behavior is poor or disrespectful. | ||||
|   Everyone has bad days and stressful situations that can result in them | ||||
|   behaving not their best, and while we should be firm about our community | ||||
|   rules, we should also enforce them with kindness. | ||||
|   | ||||
							
								
								
									
										610
									
								
								CONTRIBUTING.md
									
									
									
									
									
								
							
							
						
						
									
										610
									
								
								CONTRIBUTING.md
									
									
									
									
									
								
							| @@ -1,36 +1,23 @@ | ||||
| # Contributing guide | ||||
| # Contributing to Zulip | ||||
|  | ||||
| Welcome to the Zulip community! | ||||
|  | ||||
| ## Zulip development community | ||||
| ## Community | ||||
|  | ||||
| The primary communication forum for the Zulip community is the Zulip | ||||
| server hosted at [chat.zulip.org](https://chat.zulip.org/): | ||||
| The | ||||
| [Zulip community server](https://zulip.readthedocs.io/en/latest/contributing/chat-zulip-org.html) | ||||
| is the primary communication forum for the Zulip community. It is a good | ||||
| place to start whether you have a question, are a new contributor, are a new | ||||
| user, or anything else. Make sure to read the | ||||
| [community norms](https://zulip.readthedocs.io/en/latest/contributing/chat-zulip-org.html#community-norms) | ||||
| before posting. The Zulip community is also governed by a | ||||
| [code of conduct](https://zulip.readthedocs.io/en/latest/code-of-conduct.html). | ||||
|  | ||||
| - **Users** and **administrators** of Zulip organizations stop by to | ||||
|   ask questions, offer feedback, and participate in product design | ||||
|   discussions. | ||||
| - **Contributors to the project**, including the **core Zulip | ||||
|   development team**, discuss ongoing and future projects, brainstorm | ||||
|   ideas, and generally help each other out. | ||||
|  | ||||
| Everyone is welcome to [sign up](https://chat.zulip.org/) and | ||||
| participate — we love hearing from our users! Public streams in the | ||||
| community receive thousands of messages a week. We recommend signing | ||||
| up using the special invite links for | ||||
| [users](https://chat.zulip.org/join/t5crtoe62bpcxyisiyglmtvb/), | ||||
| [self-hosters](https://chat.zulip.org/join/wnhv3jzm6afa4raenedanfno/) | ||||
| and | ||||
| [contributors](https://chat.zulip.org/join/npzwak7vpmaknrhxthna3c7p/) | ||||
| to get a curated list of initial stream subscriptions. | ||||
|  | ||||
| To learn how to get started participating in the community, including [community | ||||
| norms](https://zulip.com/development-community/#community-norms) and [where to | ||||
| post](https://zulip.com/development-community/#where-do-i-send-my-message), | ||||
| check out our [Zulip development community | ||||
| guide](https://zulip.com/development-community/). The Zulip community is | ||||
| governed by a [code of | ||||
| conduct](https://zulip.readthedocs.io/en/latest/code-of-conduct.html). | ||||
| You can subscribe to zulip-devel-announce@googlegroups.com or our | ||||
| [Twitter](https://twitter.com/zulip) account for a lower traffic (~1 | ||||
| email/month) way to hear about things like mentorship opportunities with Google | ||||
| Code-in, in-person sprints at conferences, and other opportunities to | ||||
| contribute. | ||||
|  | ||||
| ## Ways to contribute | ||||
|  | ||||
| @@ -38,322 +25,199 @@ To make a code or documentation contribution, read our | ||||
| [step-by-step guide](#your-first-codebase-contribution) to getting | ||||
| started with the Zulip codebase. A small sample of the type of work that | ||||
| needs doing: | ||||
|  | ||||
| - Bug squashing and feature development on our Python/Django | ||||
| * Bug squashing and feature development on our Python/Django | ||||
|   [backend](https://github.com/zulip/zulip), web | ||||
|   [frontend](https://github.com/zulip/zulip), | ||||
|   Flutter [mobile app](https://github.com/zulip/zulip-flutter) in beta, | ||||
|   or Electron [desktop app](https://github.com/zulip/zulip-desktop). | ||||
| - Building out our | ||||
|   [frontend](https://github.com/zulip/zulip), React Native | ||||
|   [mobile app](https://github.com/zulip/zulip-mobile), or Electron | ||||
|   [desktop app](https://github.com/zulip/zulip-desktop). | ||||
| * Building out our | ||||
|   [Python API and bots](https://github.com/zulip/python-zulip-api) framework. | ||||
| - [Writing an integration](https://zulip.com/api/integrations-overview). | ||||
| - Improving our [user](https://zulip.com/help/) or | ||||
| * [Writing an integration](https://zulipchat.com/api/integrations-overview). | ||||
| * Improving our [user](https://zulipchat.com/help/) or | ||||
|   [developer](https://zulip.readthedocs.io/en/latest/) documentation. | ||||
| - [Reviewing code](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html) | ||||
| * [Reviewing code](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html) | ||||
|   and manually testing pull requests. | ||||
|  | ||||
| **Non-code contributions**: Some of the most valuable ways to contribute | ||||
| don't require touching the codebase at all. For example, you can: | ||||
| don't require touching the codebase at all. We list a few of them below: | ||||
|  | ||||
| - Report issues, including both [feature | ||||
|   requests](https://zulip.readthedocs.io/en/latest/contributing/suggesting-features.html) | ||||
|   and [bug | ||||
|   reports](https://zulip.readthedocs.io/en/latest/contributing/reporting-bugs.html). | ||||
| - [Give feedback](#user-feedback) if you are evaluating or using Zulip. | ||||
| - [Participate | ||||
|   thoughtfully](https://zulip.readthedocs.io/en/latest/contributing/design-discussions.html) | ||||
|   in design discussions. | ||||
| - [Sponsor Zulip](https://github.com/sponsors/zulip) through the GitHub sponsors program. | ||||
| - [Translate](https://zulip.readthedocs.io/en/latest/translating/translating.html) | ||||
|   Zulip into your language. | ||||
| - [Stay connected](#stay-connected) with Zulip, and [help others | ||||
|   find us](#help-others-find-zulip). | ||||
| * [Reporting issues](#reporting-issues), including both feature requests and | ||||
|   bug reports. | ||||
| * [Giving feedback](#user-feedback) if you are evaluating or using Zulip. | ||||
| * [Translating](https://zulip.readthedocs.io/en/latest/translating/translating.html) | ||||
|   Zulip. | ||||
| * [Outreach](#zulip-outreach): Star us on GitHub, upvote us | ||||
|   on product comparison sites, or write for [the Zulip blog](http://blog.zulip.org/). | ||||
|  | ||||
| ## Your first codebase contribution | ||||
| ## Your first (codebase) contribution | ||||
|  | ||||
| This section has a step by step guide to starting as a Zulip codebase | ||||
| contributor. It's long, but don't worry about doing all the steps perfectly; | ||||
| no one gets it right the first time, and there are a lot of people available | ||||
| to help. | ||||
|  | ||||
| - First, make an account on the | ||||
|   [Zulip community server](https://zulip.com/development-community/), | ||||
|   paying special attention to the | ||||
|   [community norms](https://zulip.com/development-community/#community-norms). | ||||
|   If you'd like, introduce yourself in | ||||
|   [#new members](https://chat.zulip.org/#narrow/channel/95-new-members), using | ||||
| * First, make an account on the | ||||
|   [Zulip community server](https://zulip.readthedocs.io/en/latest/contributing/chat-zulip-org.html), | ||||
|   paying special attention to the community norms. If you'd like, introduce | ||||
|   yourself in | ||||
|   [#new members](https://chat.zulip.org/#narrow/stream/95-new-members), using | ||||
|   your name as the topic. Bonus: tell us about your first impressions of | ||||
|   Zulip, and anything that felt confusing/broken or interesting/helpful as you | ||||
|   started using the product. | ||||
|   Zulip, and anything that felt confusing/broken as you started using the | ||||
|   product. | ||||
| * Read [What makes a great Zulip contributor](#what-makes-a-great-zulip-contributor). | ||||
| * [Install the development environment](https://zulip.readthedocs.io/en/latest/development/overview.html), | ||||
|   getting help in | ||||
|   [#development help](https://chat.zulip.org/#narrow/stream/49-development-help) | ||||
|   if you run into any troubles. | ||||
| * Read the | ||||
|   [Zulip guide to Git](https://zulip.readthedocs.io/en/latest/git/index.html) | ||||
|   and do the Git tutorial (coming soon) if you are unfamiliar with | ||||
|   Git, getting help in | ||||
|   [#git help](https://chat.zulip.org/#narrow/stream/44-git-help) if | ||||
|   you run into any troubles.  Be sure to check out the | ||||
|   [extremely useful Zulip-specific tools page](https://zulip.readthedocs.io/en/latest/git/zulip-tools.html). | ||||
| * Sign the | ||||
|   [Dropbox Contributor License Agreement](https://opensource.dropbox.com/cla/). | ||||
|  | ||||
| - Read [What makes a great Zulip contributor](#what-makes-a-great-zulip-contributor). | ||||
| ### Picking an issue | ||||
|  | ||||
| - Set up the development environment for the Zulip codebase you want | ||||
|   to work on, and start getting familiar with the code. | ||||
| Now, you're ready to pick your first issue! There are hundreds of open issues | ||||
| in the main codebase alone. This section will help you find an issue to work | ||||
| on. | ||||
|  | ||||
|   - For the server and web app: | ||||
|  | ||||
|     - [Install the development environment](https://zulip.readthedocs.io/en/latest/development/overview.html), | ||||
|       getting help in | ||||
|       [#provision help](https://chat.zulip.org/#narrow/channel/21-provision-help) | ||||
|       if you run into any troubles. | ||||
|     - Familiarize yourself with [using the development environment](https://zulip.readthedocs.io/en/latest/development/using.html). | ||||
|     - Go through the [new application feature | ||||
|       tutorial](https://zulip.readthedocs.io/en/latest/tutorials/new-feature-tutorial.html) to get familiar with | ||||
|       how the Zulip codebase is organized and how to find code in it. | ||||
|  | ||||
|   - For the upcoming Flutter-based mobile app: | ||||
|     - Set up a development environment following the instructions in | ||||
|       [the project README](https://github.com/zulip/zulip-flutter). | ||||
|     - Start reading recent commits to see the code we're writing. | ||||
|       Use either a [graphical Git viewer][] like `gitk`, or `git log -p` | ||||
|       with [the "secret" to reading its output][git-log-secret]. | ||||
|     - Pick some of the code that appears in those Git commits and | ||||
|       that looks interesting. Use your IDE to visit that code | ||||
|       and to navigate to related code, reading to see how it works | ||||
|       and how the codebase is organized. | ||||
|  | ||||
| - Read the [Zulip guide to | ||||
|   Git](https://zulip.readthedocs.io/en/latest/git/index.html) if you | ||||
|   are unfamiliar with Git or Zulip's rebase-based Git workflow, | ||||
|   getting help in [#git | ||||
|   help](https://chat.zulip.org/#narrow/channel/44-git-help) if you run | ||||
|   into any troubles. Even Git experts should read the [Zulip-specific | ||||
|   Git tools | ||||
|   page](https://zulip.readthedocs.io/en/latest/git/zulip-tools.html). | ||||
|  | ||||
| [graphical Git viewer]: https://zulip.readthedocs.io/en/latest/git/setup.html#get-a-graphical-client | ||||
| [git-log-secret]: https://github.com/zulip/zulip-mobile/blob/main/docs/howto/git.md#git-log-secret | ||||
|  | ||||
| ### Where to look for an issue | ||||
|  | ||||
| Now you're ready to pick your first issue! Zulip has several repositories you | ||||
| can check out, depending on your interests. There are hundreds of open issues in | ||||
| the [main Zulip server and web app | ||||
| repository](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22) | ||||
| alone. | ||||
|  | ||||
| You can look through issues tagged with the "help wanted" label, which is used | ||||
| to indicate the issues that are ready for contributions. Some repositories also | ||||
| use the "good first issue" label to tag issues that are especially approachable | ||||
| for new contributors. | ||||
|  | ||||
| - [Server and web app](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22) | ||||
| - Mobile apps: no "help wanted" label, but see the | ||||
|   [project board](https://github.com/orgs/zulip/projects/5/views/4) | ||||
|   for the upcoming Flutter-based app. Look for issues up through the | ||||
|   "Launch" milestone, and that aren't already assigned. | ||||
| - [Desktop app](https://github.com/zulip/zulip-desktop/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22) | ||||
| - [Terminal app](https://github.com/zulip/zulip-terminal/issues?q=is%3Aopen+is%3Aissue+label%3A"help+wanted") | ||||
| - [Python API bindings and bots](https://github.com/zulip/python-zulip-api/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22) | ||||
|  | ||||
| ### Picking an issue to work on | ||||
|  | ||||
| There's a lot to learn while making your first pull request, so start small! | ||||
| Many first contributions have fewer than 10 lines of changes (not counting | ||||
| changes to tests). | ||||
|  | ||||
| We recommend the following process for finding an issue to work on: | ||||
|  | ||||
| 1. Read the description of an issue tagged with the "help wanted" label and make | ||||
|    sure you understand it. | ||||
| 2. If it seems promising, poke around the product | ||||
|    (on [chat.zulip.org](https://chat.zulip.org) or in the development | ||||
|    environment) until you know how the piece being | ||||
|    described fits into the bigger picture. If after some exploration the | ||||
|    description seems confusing or ambiguous, post a question on the GitHub | ||||
|    issue, as others may benefit from the clarification as well. | ||||
| 3. When you find an issue you like, try to get started working on it. See if you | ||||
|    can find the part of the code you'll need to modify (`git grep` is your | ||||
|    friend!) and get some idea of how you'll approach the problem. | ||||
| 4. If you feel lost, that's OK! Go through these steps again with another issue. | ||||
|    There's plenty to work on, and the exploration you do will help you learn | ||||
|    more about the project. | ||||
|  | ||||
| Note that you are _not_ claiming an issue while you are iterating through steps | ||||
| 1-4. _Before you claim an issue_, you should be confident that you will be able to | ||||
| tackle it effectively. | ||||
|  | ||||
| Additional tips for the [main server and web app | ||||
| repository](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22): | ||||
|  | ||||
| - We especially recommend browsing recently opened issues, as there are more | ||||
|   likely to be easy ones for you to find. | ||||
| - Take a look at issues with the ["good first issue" | ||||
|   label](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22), | ||||
|   as they are especially accessible to new contributors. However, you will | ||||
|   likely find issues without this label that are accessible as well. | ||||
| - All issues are partitioned into areas like | ||||
| * If you're interested in | ||||
|   [mobile](https://github.com/zulip/zulip-mobile/issues?q=is%3Aopen+is%3Aissue), | ||||
|   [desktop](https://github.com/zulip/zulip-desktop/issues?q=is%3Aopen+is%3Aissue), | ||||
|   or | ||||
|   [bots](https://github.com/zulip/python-zulip-api/issues?q=is%3Aopen+is%3Aissue) | ||||
|   development, check the respective links for open issues, or post in | ||||
|   [#mobile](https://chat.zulip.org/#narrow/stream/48-mobile), | ||||
|   [#desktop](https://chat.zulip.org/#narrow/stream/16-desktop), or | ||||
|   [#integration](https://chat.zulip.org/#narrow/stream/127-integrations). | ||||
| * For the main server and web repository, we recommend browsing | ||||
|   recently opened issues to look for issues you are confident you can | ||||
|   fix correctly in a way that clearly communicates why your changes | ||||
|   are the correct fix.  Our GitHub workflow bot, zulipbot, limits | ||||
|   users who have 0 commits merged to claiming a single issue labeled | ||||
|   with "good first issue" or "help wanted". | ||||
| * We also partition all of our issues in the main repo into areas like | ||||
|   admin, compose, emoji, hotkeys, i18n, onboarding, search, etc. Look | ||||
|   through our [list of labels](https://github.com/zulip/zulip/labels), and | ||||
|   click on some of the `area:` labels to see all the issues related to your | ||||
|   areas of interest. | ||||
| - Avoid issues with the "difficult" label unless you | ||||
|   understand why it is difficult and are highly confident you can resolve the | ||||
|   issue correctly and completely. | ||||
| * If the lists of issues are overwhelming, post in | ||||
|   [#new members](https://chat.zulip.org/#narrow/stream/95-new-members) with a | ||||
|   bit about your background and interests, and we'll help you out. The most | ||||
|   important thing to say is whether you're looking for a backend (Python), | ||||
|   frontend (JavaScript and TypeScript), mobile (React Native), desktop (Electron), | ||||
|   documentation (English) or visual design (JavaScript/TypeScript + CSS) issue, and a | ||||
|   bit about your programming experience and available time. | ||||
|  | ||||
| ### Claiming an issue | ||||
| We also welcome suggestions of features that you feel would be valuable or | ||||
| changes that you feel would make Zulip a better open source project. If you | ||||
| have a new feature you'd like to add, we recommend you start by posting in | ||||
| [#new members](https://chat.zulip.org/#narrow/stream/95-new-members) with the | ||||
| feature idea and the problem that you're hoping to solve. | ||||
|  | ||||
| #### In the main server/web app repository and Zulip Terminal repository | ||||
|  | ||||
| The Zulip server/web app repository | ||||
| ([`zulip/zulip`](https://github.com/zulip/zulip/)) and the Zulip Terminal | ||||
| repository ([`zulip/zulip-terminal`](https://github.com/zulip/zulip-terminal/)) | ||||
| are set up with a GitHub workflow bot called | ||||
| [Zulipbot](https://github.com/zulip/zulipbot), which manages issues and pull | ||||
| requests in order to create a better workflow for Zulip contributors. | ||||
|  | ||||
| To claim an issue in these repositories, simply post a comment that says | ||||
| `@zulipbot claim` to the issue thread. If the issue is tagged with a [help | ||||
| wanted](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22) | ||||
| label, Zulipbot will immediately assign the issue to you. | ||||
|  | ||||
| Note that new contributors can only claim one issue until their first pull request is | ||||
| merged. This is to encourage folks to finish ongoing work before starting | ||||
| something new. If you would like to pick up a new issue while waiting for review | ||||
| on an almost-ready pull request, you can post a comment to this effect on the | ||||
| issue you're interested in. | ||||
|  | ||||
| #### In other Zulip repositories | ||||
|  | ||||
| There is no bot for other Zulip repositories | ||||
| ([`zulip/zulip-flutter`](https://github.com/zulip/zulip-flutter/), etc.). If | ||||
| you are interested in claiming an issue in one of these repositories, simply | ||||
| post a comment on the issue thread saying that you'd like to work on it. There | ||||
| is no need to @-mention the issue creator in your comment. | ||||
|  | ||||
| Please follow the same guidelines as described above: find an issue labeled | ||||
| "help wanted", and only pick up one issue at a time to start with. | ||||
| Other notes: | ||||
| * For a first pull request, it's better to aim for a smaller contribution | ||||
|   than a bigger one. Many first contributions have fewer than 10 lines of | ||||
|   changes (not counting changes to tests). | ||||
| * The full list of issues explicitly looking for a contributor can be | ||||
|   found with the | ||||
|   [good first issue](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22) | ||||
|   and | ||||
|   [help wanted](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22) | ||||
|   labels.  Avoid issues with the "difficult" label unless you | ||||
|   understand why it is difficult and are confident you can resolve the | ||||
|   issue correctly and completely.  Issues without one of these labels | ||||
|   are fair game if Tim has written a clear technical design proposal | ||||
|   in the issue, or it is a bug that you can reproduce and you are | ||||
|   confident you can fix the issue correctly. | ||||
| * For most new contributors, there's a lot to learn while making your first | ||||
|   pull request. It's OK if it takes you a while; that's normal! You'll be | ||||
|   able to work a lot faster as you build experience. | ||||
|  | ||||
| ### Working on an issue | ||||
|  | ||||
| You're encouraged to ask questions on how to best implement or debug your | ||||
| changes -- the Zulip maintainers are excited to answer questions to help you | ||||
| stay unblocked and working efficiently. You can ask questions in the [Zulip | ||||
| development community](https://zulip.com/development-community/), or on the | ||||
| GitHub issue or pull request. | ||||
| To work on an issue, claim it by adding a comment with `@zulipbot claim` to | ||||
| the issue thread. [Zulipbot](https://github.com/zulip/zulipbot) is a GitHub | ||||
| workflow bot; it will assign you to the issue and label the issue as "in | ||||
| progress". Some additional notes: | ||||
|  | ||||
| To get early feedback on any UI changes, we encourage you to post screenshots of | ||||
| your work in the [#design | ||||
| stream](https://chat.zulip.org/#narrow/channel/101-design) in the [Zulip | ||||
| development community](https://zulip.com/development-community/) | ||||
| * You can only claim issues with the | ||||
|   [good first issue](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22) | ||||
|   or | ||||
|   [help wanted](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22) | ||||
|   labels. Zulipbot will give you an error if you try to claim an issue | ||||
|   without one of those labels. | ||||
| * You're encouraged to ask questions on how to best implement or debug your | ||||
|   changes -- the Zulip maintainers are excited to answer questions to help | ||||
|   you stay unblocked and working efficiently. You can ask questions on | ||||
|   chat.zulip.org, or on the GitHub issue or pull request. | ||||
| * We encourage early pull requests for work in progress. Prefix the title of | ||||
|   work in progress pull requests with `[WIP]`, and remove the prefix when | ||||
|   you think it might be mergeable and want it to be reviewed. | ||||
| * After updating a PR, add a comment to the GitHub thread mentioning that it | ||||
|   is ready for another review. GitHub only notifies maintainers of the | ||||
|   changes when you post a comment, so if you don't, your PR will likely be | ||||
|   neglected by accident! | ||||
|  | ||||
| For more advice, see [What makes a great Zulip | ||||
| contributor?](#what-makes-a-great-zulip-contributor) below. It's OK if your | ||||
| first issue takes you a while; that's normal! You'll be able to work a lot | ||||
| faster as you build experience. | ||||
| ### And beyond | ||||
|  | ||||
| ### Submitting a pull request | ||||
|  | ||||
| See the [guide on submitting a pull | ||||
| request](https://zulip.readthedocs.io/en/latest/contributing/reviewable-prs.html) | ||||
| for detailed instructions on how to present your proposed changes to Zulip. | ||||
|  | ||||
| The [pull request review process | ||||
| guide](https://zulip.readthedocs.io/en/latest/contributing/review-process.html) | ||||
| explains the stages of review your PR will go through, and offers guidance on | ||||
| how to help the review process move forward. | ||||
|  | ||||
| ### Beyond the first issue | ||||
|  | ||||
| To find a second issue to work on, we recommend looking through issues with the same | ||||
| A great place to look for a second issue is to look for issues with the same | ||||
| `area:` label as the last issue you resolved. You'll be able to reuse the | ||||
| work you did learning how that part of the codebase works. Also, the path to | ||||
| becoming a core developer often involves taking ownership of one of these area | ||||
| labels. | ||||
|  | ||||
| ### Common questions | ||||
|  | ||||
| - **What if somebody is already working on the issue I want do claim?** There | ||||
|   are lots of issue to work on! If somebody else is actively working on the | ||||
|   issue, you can find a different one, or help with | ||||
|   reviewing their work. | ||||
| - **What if somebody else claims an issue while I'm figuring out whether or not to | ||||
|   work on it?** No worries! You can contribute by providing feedback on | ||||
|   their pull request. If you've made good progress in understanding part of the | ||||
|   codebase, you can also find another "help wanted" issue in the same area to | ||||
|   work on. | ||||
| - **What if there is already a pull request for the issue I want to work on?** | ||||
|   Start by reviewing the existing work. If you agree with the approach, you can | ||||
|   use the existing pull request (PR) as a starting point for your contribution. If | ||||
|   you think a different approach is needed, you can post a new PR, with a comment that clearly | ||||
|   explains _why_ you decided to start from scratch. | ||||
| - **What if I ask if someone is still working on an issue, and they don't | ||||
|   respond?** If you don't get a reply within 2-3 days, go ahead and post a comment | ||||
|   that you are working on the issue, and submit a pull request. If the original | ||||
|   assignee ends up submitting a pull request first, no worries! You can help by | ||||
|   providing feedback on their work, or submit your own PR if you think a | ||||
|   different approach is needed (as described above). | ||||
| - **Can I come up with my own feature idea and work on it?** We welcome | ||||
|   suggestions of features or other improvements that you feel would be valuable. If you | ||||
|   have a new feature you'd like to add, you can start a conversation [in our | ||||
|   development community](https://zulip.com/development-community/#where-do-i-send-my-message) | ||||
|   explaining the feature idea and the problem that you're hoping to solve. | ||||
| - **I'm waiting for the next round of review on my PR. Can I pick up | ||||
|   another issue in the meantime?** Someone's first Zulip PR often | ||||
|   requires quite a bit of iteration, so please [make sure your pull | ||||
|   request is reviewable][reviewable-pull-requests] and go through at | ||||
|   least one round of feedback from others before picking up a second | ||||
|   issue. After that, sure! If | ||||
|   [Zulipbot](https://github.com/zulip/zulipbot) does not allow you to | ||||
|   claim an issue, you can post a comment describing the status of your | ||||
|   other work on the issue you're interested in, and asking for the | ||||
|   issue to be assigned to you. Note that addressing feedback on | ||||
|   in-progress PRs should always take priority over starting a new PR. | ||||
| - **I think my PR is done, but it hasn't been merged yet. What's going on?** | ||||
|   1. **Double-check that you have addressed all the feedback**, including any comments | ||||
|      on [Git commit | ||||
|      discipline](https://zulip.readthedocs.io/en/latest/contributing/commit-discipline.html). | ||||
|   2. If all the feedback has been addressed, did you [leave a | ||||
|      comment](https://zulip.readthedocs.io/en/latest/contributing/review-process.html#how-to-help-move-the-review-process-forward) | ||||
|      explaining that you have done so and **requesting another review**? If not, | ||||
|      it may not be clear to project maintainers or reviewers that your PR is | ||||
|      ready for another look. | ||||
|   3. There may be a pause between initial rounds of review for your PR and final | ||||
|      review by project maintainers. This is normal, and we encourage you to **work | ||||
|      on other issues** while you wait. | ||||
|   4. If you think the PR is ready and haven't seen any updates for a couple | ||||
|      of weeks, it can be helpful to **leave another comment**. Summarize the | ||||
|      overall state of the review process and your work, and indicate that you | ||||
|      are waiting for a review. | ||||
|   5. Finally, **Zulip project maintainers are people too**! They may be busy | ||||
|      with other work, and sometimes they might even take a vacation. ;) It can | ||||
|      occasionally take a few weeks for a PR in the final stages of the review | ||||
|      process to be merged. | ||||
|  | ||||
| [reviewable-pull-requests]: https://zulip.readthedocs.io/en/latest/contributing/reviewable-prs.html | ||||
|  | ||||
| ## What makes a great Zulip contributor? | ||||
|  | ||||
| Zulip has a lot of experience working with new contributors. In our | ||||
| experience, these are the best predictors of success: | ||||
| Zulip runs a lot of [internship programs](#internship-programs), so we have | ||||
| a lot of experience with new contributors. In our experience, these are the | ||||
| best predictors of success: | ||||
|  | ||||
| - [Asking great questions][great-questions]. It's very hard to answer a general | ||||
|   question like, "How do I do this issue?" When asking for help, explain your | ||||
|   current understanding, including what you've done or tried so far and where | ||||
|   you got stuck. Post tracebacks or other error messages if appropriate. For | ||||
|   more advice, check out [our guide][great-questions]! | ||||
| - Learning and practicing | ||||
|   [Git commit discipline](https://zulip.readthedocs.io/en/latest/contributing/commit-discipline.html). | ||||
| - Submitting carefully tested code. See our [detailed guide on how to review | ||||
|   code](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html#how-to-review-code) | ||||
|   (yours or someone else's). | ||||
| - Posting | ||||
| * Posting good questions. This generally means explaining your current | ||||
|   understanding, saying what you've done or tried so far, and including | ||||
|   tracebacks or other error messages if appropriate. | ||||
| * Learning and practicing | ||||
|   [Git commit discipline](https://zulip.readthedocs.io/en/latest/contributing/version-control.html#commit-discipline). | ||||
| * Submitting carefully tested code. This generally means checking your work | ||||
|   through a combination of automated tests and manually clicking around the | ||||
|   UI trying to find bugs in your work. See | ||||
|   [things to look for](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html#things-to-look-for) | ||||
|   for additional ideas. | ||||
| * Posting | ||||
|   [screenshots or GIFs](https://zulip.readthedocs.io/en/latest/tutorials/screenshot-and-gif-software.html) | ||||
|   for frontend changes. | ||||
| - Working to [make your pull requests easy to | ||||
|   review](https://zulip.readthedocs.io/en/latest/contributing/reviewable-prs.html). | ||||
| - Clearly describing what you have implemented and why. For example, if your | ||||
|   implementation differs from the issue description in some way or is a partial | ||||
|   step towards the requirements described in the issue, be sure to call | ||||
|   out those differences. | ||||
| - Being responsive to feedback on pull requests. This means incorporating or | ||||
| * Being responsive to feedback on pull requests. This means incorporating or | ||||
|   responding to all suggested changes, and leaving a note if you won't be | ||||
|   able to address things within a few days. | ||||
| - Being helpful and friendly on the [Zulip community | ||||
|   server](https://zulip.com/development-community/). | ||||
| * Being helpful and friendly on chat.zulip.org. | ||||
|  | ||||
| [great-questions]: https://zulip.readthedocs.io/en/latest/contributing/asking-great-questions.html | ||||
| These are also the main criteria we use to select interns for all of our | ||||
| internship programs. | ||||
|  | ||||
| ## Reporting issues | ||||
|  | ||||
| If you find an easily reproducible bug and/or are experienced in reporting | ||||
| bugs, feel free to just open an issue on the relevant project on GitHub. | ||||
|  | ||||
| If you have a feature request or are not yet sure what the underlying bug | ||||
| is, the best place to post issues is | ||||
| [#issues](https://chat.zulip.org/#narrow/stream/9-issues) (or | ||||
| [#mobile](https://chat.zulip.org/#narrow/stream/48-mobile) or | ||||
| [#desktop](https://chat.zulip.org/#narrow/stream/16-desktop)) on the | ||||
| [Zulip community server](https://zulip.readthedocs.io/en/latest/contributing/chat-zulip-org.html). | ||||
| This allows us to interactively figure out what is going on, let you know if | ||||
| a similar issue has already been opened, and collect any other information | ||||
| we need. Choose a 2-4 word topic that describes the issue, explain the issue | ||||
| and how to reproduce it if known, your browser/OS if relevant, and a | ||||
| [screenshot or screenGIF](https://zulip.readthedocs.io/en/latest/tutorials/screenshot-and-gif-software.html) | ||||
| if appropriate. | ||||
|  | ||||
| **Reporting security issues**. Please do not report security issues | ||||
|   publicly, including on public streams on chat.zulip.org. You can email | ||||
|   zulip-security@googlegroups.com. We create a CVE for every security issue. | ||||
|  | ||||
| ## User feedback | ||||
|  | ||||
| @@ -363,67 +227,125 @@ hear about your experience with the product. If you're not sure what to | ||||
| write, here are some questions we're always very curious to know the answer | ||||
| to: | ||||
|  | ||||
| - Evaluation: What is the process by which your organization chose or will | ||||
| * Evaluation: What is the process by which your organization chose or will | ||||
|   choose a group chat product? | ||||
| - Pros and cons: What are the pros and cons of Zulip for your organization, | ||||
| * Pros and cons: What are the pros and cons of Zulip for your organization, | ||||
|   and the pros and cons of other products you are evaluating? | ||||
| - Features: What are the features that are most important for your | ||||
|   organization? In the best-case scenario, what would your chat solution do | ||||
| * Features: What are the features that are most important for your | ||||
|   organization? In the best case scenario, what would your chat solution do | ||||
|   for you? | ||||
| - Onboarding: If you remember it, what was your impression during your first | ||||
| * Onboarding: If you remember it, what was your impression during your first | ||||
|   few minutes of using Zulip? What did you notice, and how did you feel? Was | ||||
|   there anything that stood out to you as confusing, or broken, or great? | ||||
| - Organization: What does your organization do? How big is the organization? | ||||
| * Organization: What does your organization do? How big is the organization? | ||||
|   A link to your organization's website? | ||||
|  | ||||
| You can contact us in the [#feedback stream of the Zulip development | ||||
| community](https://chat.zulip.org/#narrow/channel/137-feedback) or | ||||
| by emailing [support@zulip.com](mailto:support@zulip.com). | ||||
| ## Internship programs | ||||
|  | ||||
| ## Outreach programs | ||||
| Zulip runs internship programs with | ||||
| [Outreachy](https://www.outreachy.org/), | ||||
| [Google Summer of Code (GSoC)](https://developers.google.com/open-source/gsoc/) | ||||
| [1], and the | ||||
| [MIT Externship program](https://alum.mit.edu/students/NetworkwithAlumni/ExternshipProgram), | ||||
| and has in the past taken summer interns from Harvard, MIT, and | ||||
| Stanford. | ||||
|  | ||||
| Zulip regularly participates in [Google Summer of Code | ||||
| (GSoC)](https://developers.google.com/open-source/gsoc/) and | ||||
| [Outreachy](https://www.outreachy.org/). We have been a GSoC mentoring | ||||
| organization since 2016, and we accept 15-20 GSoC participants each summer. In | ||||
| the past, we’ve also participated in [Google | ||||
| Code-In](https://developers.google.com/open-source/gci/), and hosted summer | ||||
| interns from Harvard, MIT, and Stanford. | ||||
| While each third-party program has its own rules and requirements, the | ||||
| Zulip community's approaches all of these programs with these ideas in | ||||
| mind: | ||||
| * We try to make the application process as valuable for the applicant as | ||||
|   possible. Expect high quality code reviews, a supportive community, and | ||||
|   publicly viewable patches you can link to from your resume, regardless of | ||||
|   whether you are selected. | ||||
| * To apply, you'll have to submit at least one pull request to a Zulip | ||||
|   repository.  Most students accepted to one of our programs have | ||||
|   several merged pull requests (including at least one larger PR) by | ||||
|   the time of the application deadline. | ||||
| * The main criteria we use is quality of your best contributions, and | ||||
|   the bullets listed at | ||||
|   [What makes a great Zulip contributor](#what-makes-a-great-zulip-contributor). | ||||
|   Because we focus on evaluating your best work, it doesn't hurt your | ||||
|   application to makes mistakes in your first few PRs as long as your | ||||
|   work improves. | ||||
|  | ||||
| Check out our [outreach programs | ||||
| overview](https://zulip.readthedocs.io/en/latest/outreach/overview.html) to learn | ||||
| more about participating in an outreach program with Zulip. Most of our program | ||||
| participants end up sticking around the project long-term, and many have become | ||||
| core team members, maintaining important parts of the project. We hope you | ||||
| apply! | ||||
| Zulip also participates in | ||||
| [Google Code-In](https://developers.google.com/open-source/gci/). Our | ||||
| selection criteria for Finalists and Grand Prize Winners is the same as our | ||||
| selection criteria for interns above. | ||||
|  | ||||
| ## Stay connected | ||||
| Most of our interns end up sticking around the project long-term, and many | ||||
| quickly become core team members. We hope you apply! | ||||
|  | ||||
| Even if you are not logging into the development community on a regular basis, | ||||
| you can still stay connected with the project. | ||||
| ### Google Summer of Code | ||||
|  | ||||
| - Follow us [on Twitter](https://twitter.com/zulip). | ||||
| - Subscribe to [our blog](https://blog.zulip.org/). | ||||
| - Join or follow the project [on LinkedIn](https://www.linkedin.com/company/zulip-project/). | ||||
| GSoC is by far the largest of our internship programs (14 students in | ||||
| 2017; 11 in 2018; 17 in 2019).  While we don't control how many slots | ||||
| Google allocates to Zulip, we hope to mentor a similar number of | ||||
| students in future summers. | ||||
|  | ||||
| ## Help others find Zulip | ||||
| If you're reading this well before the application deadline and want | ||||
| to make your application strong, we recommend getting involved in the | ||||
| community and fixing issues in Zulip now. Having good contributions | ||||
| and building a reputation for doing good work is best way to have a | ||||
| strong application.  About half of Zulip's GSoC students for Summer | ||||
| 2017 had made significant contributions to the project by February | ||||
| 2017, and about half had not.  Our | ||||
| [GSoC project ideas page][gsoc-guide] has lots more details on how | ||||
| Zulip does GSoC, as well as project ideas (though the project idea | ||||
| list is maintained only during the GSoC application period, so if | ||||
| you're looking at some other time of year, the project list is likely | ||||
| out-of-date). | ||||
|  | ||||
| Here are some ways you can help others find Zulip: | ||||
| We also have in some past years run a Zulip Summer of Code (ZSoC) | ||||
| program for students who we didn't have enough slots to accept for | ||||
| GSoC but were able to find funding for.  Student expectations are the | ||||
| same as with GSoC, and it has no separate application process; your | ||||
| GSoC application is your ZSoC application.  If we'd like to select you | ||||
| for ZSoC, we'll contact you when the GSoC results are announced. | ||||
|  | ||||
| - Star us on GitHub. There are four main repositories: | ||||
| [gsoc-guide]: https://zulip.readthedocs.io/en/latest/overview/gsoc-ideas.html | ||||
| [gsoc-faq]: https://developers.google.com/open-source/gsoc/faq | ||||
|  | ||||
| [1] Formally, [GSoC isn't an internship][gsoc-faq], but it is similar | ||||
| enough that we're treating it as such for the purposes of this | ||||
| documentation. | ||||
|  | ||||
| ## Zulip Outreach | ||||
|  | ||||
| **Upvoting Zulip**. Upvotes and reviews make a big difference in the public | ||||
| perception of projects like Zulip. We've collected a few sites below | ||||
| where we know Zulip has been discussed. Doing everything in the following | ||||
| list typically takes about 15 minutes. | ||||
| * Star us on GitHub. There are four main repositories: | ||||
|   [server/web](https://github.com/zulip/zulip), | ||||
|   [Flutter mobile](https://github.com/zulip/zulip-flutter), | ||||
|   [mobile](https://github.com/zulip/zulip-mobile), | ||||
|   [desktop](https://github.com/zulip/zulip-desktop), and | ||||
|   [Python API](https://github.com/zulip/python-zulip-api). | ||||
| * [Follow us](https://twitter.com/zulip) on Twitter. | ||||
|  | ||||
| - "Like" and retweet [our tweets](https://twitter.com/zulip). | ||||
| For both of the following, you'll need to make an account on the site if you | ||||
| don't already have one. | ||||
|  | ||||
| - Upvote and post feedback on Zulip on comparison websites. A couple specific | ||||
|   ones to highlight: | ||||
| * [Like Zulip](https://alternativeto.net/software/zulip-chat-server/) on | ||||
|   AlternativeTo. We recommend upvoting a couple of other products you like | ||||
|   as well, both to give back to their community, and since single-upvote | ||||
|   accounts are generally given less weight. You can also | ||||
|   [upvote Zulip](https://alternativeto.net/software/slack/) on their page | ||||
|   for Slack. | ||||
| * [Add Zulip to your stack](https://stackshare.io/zulip) on StackShare, star | ||||
|   it, and upvote the reasons why people like Zulip that you find most | ||||
|   compelling. Again, we recommend adding a few other products that you like | ||||
|   as well. | ||||
|  | ||||
|   - [AlternativeTo](https://alternativeto.net/software/zulip-chat-server/). You can also | ||||
|     [upvote Zulip](https://alternativeto.net/software/slack/) on their page | ||||
|     for Slack. | ||||
|   - [Add Zulip to your stack](https://stackshare.io/zulip) on StackShare, star | ||||
|     it, and upvote the reasons why people like Zulip that you find most | ||||
|     compelling. | ||||
| We have a doc with more detailed instructions and a few other sites, if you | ||||
| have been using Zulip for a while and want to contribute more. | ||||
|  | ||||
| **Blog posts**. Writing a blog post about your experiences with Zulip, or | ||||
| about a technical aspect of Zulip can be a great way to spread the word | ||||
| about Zulip. | ||||
|  | ||||
| We also occasionally [publish](http://blog.zulip.org/) longer form | ||||
| articles related to Zulip. Our posts typically get tens of thousands | ||||
| of views, and we always have good ideas for blog posts that we can | ||||
| outline but don't have time to write. If you are an experienced writer | ||||
| or copyeditor, send us a portfolio; we'd love to talk! | ||||
|   | ||||
| @@ -1,25 +1,23 @@ | ||||
| # This is a multiarch Dockerfile.  See https://docs.docker.com/desktop/multi-arch/ | ||||
| # | ||||
| # To set up the first time: | ||||
| #     docker buildx create --name multiarch --use | ||||
| # | ||||
| # To build: | ||||
| #     docker buildx build --platform linux/amd64,linux/arm64 \ | ||||
| #       -f ./Dockerfile-postgresql -t zulip/zulip-postgresql:14 --push . | ||||
| # To build run `docker build -f Dockerfile-postgresql .` from the root of the | ||||
| # zulip repo. | ||||
|  | ||||
| # Currently the PostgreSQL images do not support automatic upgrading of | ||||
| # the on-disk data in volumes. So the base image cannot currently be upgraded | ||||
| # Currently the postgres images do not support automatic upgrading of | ||||
| # the on-disk data in volumes. So the base image can not currently be upgraded | ||||
| # without users needing a manual pgdump and restore. | ||||
|  | ||||
| # https://hub.docker.com/r/groonga/pgroonga/tags | ||||
| ARG PGROONGA_VERSION=latest | ||||
| ARG POSTGRESQL_VERSION=14 | ||||
| FROM groonga/pgroonga:$PGROONGA_VERSION-alpine-$POSTGRESQL_VERSION-slim | ||||
|  | ||||
| # Install hunspell, Zulip stop words, and run Zulip database | ||||
| # Install hunspell, zulip stop words, and run zulip database | ||||
| # init. | ||||
| RUN apk add -U --no-cache hunspell-en | ||||
| RUN ln -sf /usr/share/hunspell/en_US.dic /usr/local/share/postgresql/tsearch_data/en_us.dict && ln -sf /usr/share/hunspell/en_US.aff /usr/local/share/postgresql/tsearch_data/en_us.affix | ||||
| COPY puppet/zulip/files/postgresql/zulip_english.stop /usr/local/share/postgresql/tsearch_data/zulip_english.stop | ||||
| FROM postgres:10 | ||||
| COPY puppet/zulip/files/postgresql/zulip_english.stop /usr/share/postgresql/$PG_MAJOR/tsearch_data/zulip_english.stop | ||||
| COPY scripts/setup/create-db.sql /docker-entrypoint-initdb.d/zulip-create-db.sql | ||||
| COPY scripts/setup/create-pgroonga.sql /docker-entrypoint-initdb.d/zulip-create-pgroonga.sql | ||||
| COPY scripts/setup/pgroonga-debian.asc /tmp | ||||
| RUN apt-key add /tmp/pgroonga-debian.asc \ | ||||
|     && echo "deb http://packages.groonga.org/debian/ stretch main" > /etc/apt/sources.list.d/zulip.list \ | ||||
|     && apt-get update \ | ||||
|     && DEBIAN_FRONTEND=noninteractive apt-get install --no-install-recommends -y \ | ||||
|        hunspell-en-us \ | ||||
|        postgresql-${PG_MAJOR}-pgroonga \ | ||||
|     && ln -sf /var/cache/postgresql/dicts/en_us.dict "/usr/share/postgresql/$PG_MAJOR/tsearch_data/en_us.dict" \ | ||||
|     && ln -sf /var/cache/postgresql/dicts/en_us.affix "/usr/share/postgresql/$PG_MAJOR/tsearch_data/en_us.affix" \ | ||||
|     && rm -rf /var/lib/apt/lists/* | ||||
|   | ||||
							
								
								
									
										1
									
								
								LICENSE
									
									
									
									
									
								
							
							
						
						
									
										1
									
								
								LICENSE
									
									
									
									
									
								
							| @@ -1,3 +1,4 @@ | ||||
| Copyright 2011-2018 Dropbox, Inc., Kandra Labs, Inc., and contributors | ||||
|  | ||||
|                                  Apache License | ||||
|                            Version 2.0, January 2004 | ||||
|   | ||||
							
								
								
									
										2
									
								
								NOTICE
									
									
									
									
									
								
							
							
						
						
									
										2
									
								
								NOTICE
									
									
									
									
									
								
							| @@ -1,5 +1,3 @@ | ||||
| Copyright 2012–2015 Dropbox, Inc., 2015–2021 Kandra Labs, Inc., and contributors | ||||
|  | ||||
| Licensed under the Apache License, Version 2.0 (the "License"); | ||||
| you may not use this project except in compliance with the License. | ||||
| You may obtain a copy of the License at | ||||
|   | ||||
							
								
								
									
										120
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										120
									
								
								README.md
									
									
									
									
									
								
							| @@ -1,82 +1,82 @@ | ||||
| # Zulip overview | ||||
|  | ||||
| [Zulip](https://zulip.com) is an open-source team collaboration tool with unique | ||||
| [topic-based threading][why-zulip] that combines the best of email and chat to | ||||
| make remote work productive and delightful. Fortune 500 companies, [leading open | ||||
| source projects][rust-case-study], and thousands of other organizations use | ||||
| Zulip every day. Zulip is the only [modern team chat app][features] that is | ||||
| designed for both live and asynchronous conversations. | ||||
| Zulip is a powerful, open source group chat application that combines the | ||||
| immediacy of real-time chat with the productivity benefits of threaded | ||||
| conversations. Zulip is used by open source projects, Fortune 500 companies, | ||||
| large standards bodies, and others who need a real-time chat system that | ||||
| allows users to easily process hundreds or thousands of messages a day. With | ||||
| over 500 contributors merging over 500 commits a month, Zulip is also the | ||||
| largest and fastest growing open source group chat project. | ||||
|  | ||||
| Zulip is built by a distributed community of developers from all around the | ||||
| world, with 74+ people who have each contributed 100+ commits. With | ||||
| over 1000 contributors merging over 500 commits a month, Zulip is the | ||||
| largest and fastest growing open source team chat project. | ||||
|  | ||||
| Come find us on the [development community chat](https://zulip.com/development-community/)! | ||||
|  | ||||
| [](https://github.com/zulip/zulip/actions/workflows/zulip-ci.yml?query=branch%3Amain) | ||||
| [](https://codecov.io/gh/zulip/zulip) | ||||
| [](https://circleci.com/gh/zulip/zulip/tree/master) | ||||
| [](https://codecov.io/gh/zulip/zulip/branch/master) | ||||
| [][mypy-coverage] | ||||
| [](https://github.com/astral-sh/ruff) | ||||
| [](https://github.com/prettier/prettier) | ||||
| [](https://github.com/zulip/zulip/releases/latest) | ||||
| [](https://zulip.readthedocs.io/en/latest/) | ||||
| [](https://chat.zulip.org) | ||||
| [](https://twitter.com/zulip) | ||||
| [](https://github.com/sponsors/zulip) | ||||
|  | ||||
| [mypy-coverage]: https://blog.zulip.org/2016/10/13/static-types-in-python-oh-mypy/ | ||||
| [why-zulip]: https://zulip.com/why-zulip/ | ||||
| [rust-case-study]: https://zulip.com/case-studies/rust/ | ||||
| [features]: https://zulip.com/features/ | ||||
|  | ||||
| ## Getting started | ||||
|  | ||||
| - **Contributing code**. Check out our [guide for new | ||||
|   contributors](https://zulip.readthedocs.io/en/latest/contributing/contributing.html) | ||||
|   to get started. We have invested in making Zulip’s code highly | ||||
|   readable, thoughtfully tested, and easy to modify. Beyond that, we | ||||
|   have written an extraordinary 150K words of documentation for Zulip | ||||
|   contributors. | ||||
| Click on the appropriate link below. If nothing seems to apply, | ||||
| join us on the | ||||
| [Zulip community server](https://zulip.readthedocs.io/en/latest/contributing/chat-zulip-org.html) | ||||
| and tell us what's up! | ||||
|  | ||||
| - **Contributing non-code**. [Report an | ||||
|   issue](https://zulip.readthedocs.io/en/latest/contributing/contributing.html#reporting-issues), | ||||
|   [translate](https://zulip.readthedocs.io/en/latest/translating/translating.html) | ||||
|   Zulip into your language, or [give us | ||||
|   feedback](https://zulip.readthedocs.io/en/latest/contributing/contributing.html#user-feedback). | ||||
|   We'd love to hear from you, whether you've been using Zulip for years, or are just | ||||
|   trying it out for the first time. | ||||
| You might be interested in: | ||||
|  | ||||
| - **Checking Zulip out**. The best way to see Zulip in action is to drop by the | ||||
|   [Zulip community server](https://zulip.com/development-community/). We also | ||||
|   recommend reading about Zulip's [unique | ||||
|   approach](https://zulip.com/why-zulip/) to organizing conversations. | ||||
| * **Contributing code**. Check out our | ||||
|   [guide for new contributors](https://zulip.readthedocs.io/en/latest/overview/contributing.html) | ||||
|   to get started. Zulip prides itself on maintaining a clean and | ||||
|   well-tested codebase, and a stock of hundreds of | ||||
|   [beginner-friendly issues][beginner-friendly]. | ||||
|  | ||||
| - **Running a Zulip server**. Self-host Zulip directly on Ubuntu or Debian | ||||
|   Linux, in [Docker](https://github.com/zulip/docker-zulip), or with prebuilt | ||||
|   images for [Digital Ocean](https://marketplace.digitalocean.com/apps/zulip) and | ||||
|   [Render](https://render.com/docs/deploy-zulip). | ||||
|   Learn more about [self-hosting Zulip](https://zulip.com/self-hosting/). | ||||
| * **Contributing non-code**. | ||||
|   [Report an issue](https://zulip.readthedocs.io/en/latest/overview/contributing.html#reporting-issue), | ||||
|   [translate](https://zulip.readthedocs.io/en/latest/translating/translating.html) Zulip | ||||
|   into your language, | ||||
|   [write](https://zulip.readthedocs.io/en/latest/overview/contributing.html#zulip-outreach) | ||||
|   for the Zulip blog, or | ||||
|   [give us feedback](https://zulip.readthedocs.io/en/latest/overview/contributing.html#user-feedback). We | ||||
|   would love to hear from you, even if you're just trying the product out. | ||||
|  | ||||
| - **Using Zulip without setting up a server**. Learn about [Zulip | ||||
|   Cloud](https://zulip.com/plans/) hosting options. Zulip sponsors free [Zulip | ||||
|   Cloud Standard](https://zulip.com/plans/) for hundreds of worthy | ||||
|   organizations, including [fellow open-source | ||||
|   projects](https://zulip.com/for/open-source/). | ||||
| * **Supporting Zulip**. Advocate for your organization to use Zulip, write a | ||||
|   review in the mobile app stores, or | ||||
|   [upvote Zulip](https://zulip.readthedocs.io/en/latest/overview/contributing.html#zulip-outreach) on | ||||
|   product comparison sites. | ||||
|  | ||||
| - **Participating in [outreach | ||||
|   programs](https://zulip.readthedocs.io/en/latest/contributing/contributing.html#outreach-programs)** | ||||
|   like [Google Summer of Code](https://developers.google.com/open-source/gsoc/) | ||||
|   and [Outreachy](https://www.outreachy.org/). | ||||
| * **Checking Zulip out**. The best way to see Zulip in action is to drop by | ||||
|   the | ||||
|   [Zulip community server](https://zulip.readthedocs.io/en/latest/contributing/chat-zulip-org.html). We | ||||
|   also recommend reading Zulip for | ||||
|   [open source](https://zulipchat.com/for/open-source/), Zulip for | ||||
|   [companies](https://zulipchat.com/for/companies/), or Zulip for | ||||
|   [working groups and part time communities](https://zulipchat.com/for/working-groups-and-communities/). | ||||
|  | ||||
| - **Supporting Zulip**. Advocate for your organization to use Zulip, become a | ||||
|   [sponsor](https://github.com/sponsors/zulip), write a review in the mobile app | ||||
|   stores, or [help others find | ||||
|   Zulip](https://zulip.readthedocs.io/en/latest/contributing/contributing.html#help-others-find-zulip). | ||||
| * **Running a Zulip server**. Use a preconfigured [Digital Ocean droplet](https://marketplace.digitalocean.com/apps/zulip), | ||||
|   [install Zulip](https://zulip.readthedocs.io/en/stable/production/install.html) | ||||
|   directly, or use Zulip's | ||||
|   experimental [Docker image](https://zulip.readthedocs.io/en/latest/production/deployment.html#zulip-in-docker). | ||||
|   Commercial support is available; see <https://zulipchat.com/plans> for details. | ||||
|  | ||||
| You may also be interested in reading our [blog](https://blog.zulip.org/), and | ||||
| following us on [Twitter](https://twitter.com/zulip) and | ||||
| [LinkedIn](https://www.linkedin.com/company/zulip-project/). | ||||
| * **Using Zulip without setting up a server**. <https://zulipchat.com> offers | ||||
|   free and commercial hosting. | ||||
|  | ||||
| * **Applying for a Zulip internship**. Zulip runs internship programs with | ||||
|   [Outreachy](https://www.outreachy.org/), | ||||
|   [Google Summer of Code](https://developers.google.com/open-source/gsoc/), | ||||
|   and the | ||||
|   [MIT Externship program](https://alum.mit.edu/students/NetworkwithAlumni/ExternshipProgram). Zulip | ||||
|   also participates in | ||||
|   [Google Code-In](https://developers.google.com/open-source/gci/). More | ||||
|   information is available | ||||
|   [here](https://zulip.readthedocs.io/en/latest/overview/contributing.html#internship-programs). | ||||
|  | ||||
| You may also be interested in reading our [blog](http://blog.zulip.org/) or | ||||
| following us on [twitter](https://twitter.com/zulip). | ||||
| Zulip is distributed under the | ||||
| [Apache 2.0](https://github.com/zulip/zulip/blob/main/LICENSE) license. | ||||
| [Apache 2.0](https://github.com/zulip/zulip/blob/master/LICENSE) license. | ||||
|  | ||||
| [beginner-friendly]: https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22 | ||||
|   | ||||
							
								
								
									
										37
									
								
								SECURITY.md
									
									
									
									
									
								
							
							
						
						
									
										37
									
								
								SECURITY.md
									
									
									
									
									
								
							| @@ -1,37 +0,0 @@ | ||||
| # Security policy | ||||
|  | ||||
| ## Reporting a vulnerability | ||||
|  | ||||
| We love responsible reports of (potential) security issues in Zulip, | ||||
| whether in the latest release or our development branch. | ||||
|  | ||||
| Our security contact is security@zulip.com. Reporters should expect a | ||||
| response within 24 hours. | ||||
|  | ||||
| Please include details on the issue and how you'd like to be credited | ||||
| in our release notes when we publish the fix. | ||||
|  | ||||
| Our [security model][security-model] document may be a helpful | ||||
| resource. | ||||
|  | ||||
| ## Security announcements | ||||
|  | ||||
| We send security announcements to our [announcement mailing | ||||
| list](https://groups.google.com/g/zulip-announce). If you are running | ||||
| Zulip in production, you should subscribe, by clicking "Join group" at | ||||
| the top of that page. | ||||
|  | ||||
| ## Supported versions | ||||
|  | ||||
| Zulip provides security support for the latest major release, in the | ||||
| form of minor security/maintenance releases. | ||||
|  | ||||
| We work hard to make [upgrades][upgrades] reliable, so that there's no | ||||
| reason to run older major releases. | ||||
|  | ||||
| See also our documentation on the [Zulip release | ||||
| lifecycle][release-lifecycle]. | ||||
|  | ||||
| [security-model]: https://zulip.readthedocs.io/en/latest/production/security-model.html | ||||
| [upgrades]: https://zulip.readthedocs.io/en/stable/production/upgrade.html#upgrading-to-a-release | ||||
| [release-lifecycle]: https://zulip.readthedocs.io/en/latest/overview/release-lifecycle.html | ||||
							
								
								
									
										155
									
								
								Vagrantfile
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										155
									
								
								Vagrantfile
									
									
									
									
										vendored
									
									
								
							| @@ -1,8 +1,53 @@ | ||||
| # -*- mode: ruby -*- | ||||
|  | ||||
| Vagrant.require_version ">= 2.2.6" | ||||
| VAGRANTFILE_API_VERSION = "2" | ||||
|  | ||||
| def command?(name) | ||||
|   `which #{name} > /dev/null 2>&1` | ||||
|   $?.success? | ||||
| end | ||||
|  | ||||
| if Vagrant::VERSION == "1.8.7" then | ||||
|     path = `which curl` | ||||
|     if path.include?('/opt/vagrant/embedded/bin/curl') then | ||||
|         puts "In Vagrant 1.8.7, curl is broken. Please use Vagrant 2.0.2 "\ | ||||
|              "or run 'sudo rm -f /opt/vagrant/embedded/bin/curl' to fix the "\ | ||||
|              "issue before provisioning. See "\ | ||||
|              "https://github.com/mitchellh/vagrant/issues/7997 "\ | ||||
|              "for reference." | ||||
|         exit | ||||
|     end | ||||
| end | ||||
|  | ||||
| # Workaround: Vagrant removed the atlas.hashicorp.com to | ||||
| # vagrantcloud.com redirect in February 2018. The value of | ||||
| # DEFAULT_SERVER_URL in Vagrant versions less than 1.9.3 is | ||||
| # atlas.hashicorp.com, which means that removal broke the fetching and | ||||
| # updating of boxes (since the old URL doesn't work).  See | ||||
| # https://github.com/hashicorp/vagrant/issues/9442 | ||||
| if Vagrant::DEFAULT_SERVER_URL == "atlas.hashicorp.com" | ||||
|   Vagrant::DEFAULT_SERVER_URL.replace('https://vagrantcloud.com') | ||||
| end | ||||
|  | ||||
| # Monkey patch https://github.com/hashicorp/vagrant/pull/10879 so we | ||||
| # can fall back to another provider if docker is not installed. | ||||
| begin | ||||
|   require Vagrant.source_root.join("plugins", "providers", "docker", "provider") | ||||
| rescue LoadError | ||||
| else | ||||
|   VagrantPlugins::DockerProvider::Provider.class_eval do | ||||
|     method(:usable?).owner == singleton_class or def self.usable?(raise_error=false) | ||||
|       VagrantPlugins::DockerProvider::Driver.new.execute("docker", "version") | ||||
|       true | ||||
|     rescue Vagrant::Errors::CommandUnavailable, VagrantPlugins::DockerProvider::Errors::ExecuteError | ||||
|       raise if raise_error | ||||
|       return false | ||||
|     end | ||||
|   end | ||||
| end | ||||
|  | ||||
| Vagrant.configure(VAGRANTFILE_API_VERSION) do |config| | ||||
|  | ||||
| Vagrant.configure("2") do |config| | ||||
|   # The Zulip development environment runs on 9991 on the guest. | ||||
|   host_port = 9991 | ||||
|   http_proxy = https_proxy = no_proxy = nil | ||||
| @@ -13,20 +58,17 @@ Vagrant.configure("2") do |config| | ||||
|   vm_memory = "2048" | ||||
|  | ||||
|   ubuntu_mirror = "" | ||||
|   vboxadd_version = nil | ||||
|  | ||||
|   config.vm.box = "bento/ubuntu-22.04" | ||||
|  | ||||
|   config.vm.synced_folder ".", "/vagrant", disabled: true | ||||
|   config.vm.synced_folder ".", "/srv/zulip", docker_consistency: "z" | ||||
|   config.vm.synced_folder ".", "/srv/zulip" | ||||
|  | ||||
|   vagrant_config_file = ENV["HOME"] + "/.zulip-vagrant-config" | ||||
|   vagrant_config_file = ENV['HOME'] + "/.zulip-vagrant-config" | ||||
|   if File.file?(vagrant_config_file) | ||||
|     IO.foreach(vagrant_config_file) do |line| | ||||
|       line.chomp! | ||||
|       key, value = line.split(nil, 2) | ||||
|       case key | ||||
|       when /^([#;]|$)/ # ignore comments | ||||
|       when /^([#;]|$)/; # ignore comments | ||||
|       when "HTTP_PROXY"; http_proxy = value | ||||
|       when "HTTPS_PROXY"; https_proxy = value | ||||
|       when "NO_PROXY"; no_proxy = value | ||||
| @@ -35,7 +77,6 @@ Vagrant.configure("2") do |config| | ||||
|       when "GUEST_CPUS"; vm_num_cpus = value | ||||
|       when "GUEST_MEMORY_MB"; vm_memory = value | ||||
|       when "UBUNTU_MIRROR"; ubuntu_mirror = value | ||||
|       when "VBOXADD_VERSION"; vboxadd_version = value | ||||
|       end | ||||
|     end | ||||
|   end | ||||
| @@ -53,9 +94,9 @@ Vagrant.configure("2") do |config| | ||||
|   elsif !http_proxy.nil? or !https_proxy.nil? | ||||
|     # This prints twice due to https://github.com/hashicorp/vagrant/issues/7504 | ||||
|     # We haven't figured out a workaround. | ||||
|     puts "You have specified value for proxy in ~/.zulip-vagrant-config file but did not " \ | ||||
|          "install the vagrant-proxyconf plugin. To install it, run `vagrant plugin install " \ | ||||
|          "vagrant-proxyconf` in a terminal.  This error will appear twice." | ||||
|     puts 'You have specified value for proxy in ~/.zulip-vagrant-config file but did not ' \ | ||||
|          'install the vagrant-proxyconf plugin. To install it, run `vagrant plugin install ' \ | ||||
|          'vagrant-proxyconf` in a terminal.  This error will appear twice.' | ||||
|     exit | ||||
|   end | ||||
|  | ||||
| @@ -63,7 +104,6 @@ Vagrant.configure("2") do |config| | ||||
|   config.vm.network "forwarded_port", guest: 9994, host: host_port + 3, host_ip: host_ip_addr | ||||
|   # Specify Docker provider before VirtualBox provider so it's preferred. | ||||
|   config.vm.provider "docker" do |d, override| | ||||
|     override.vm.box = nil | ||||
|     d.build_dir = File.join(__dir__, "tools", "setup", "dev-vagrant-docker") | ||||
|     d.build_args = ["--build-arg", "VAGRANT_UID=#{Process.uid}"] | ||||
|     if !ubuntu_mirror.empty? | ||||
| @@ -74,35 +114,82 @@ Vagrant.configure("2") do |config| | ||||
|   end | ||||
|  | ||||
|   config.vm.provider "virtualbox" do |vb, override| | ||||
|     override.vm.box = "ubuntu/bionic64" | ||||
|     # An unnecessary log file gets generated when running vagrant up for the | ||||
|     # first time with the Ubuntu Bionic box. This looks like it is being | ||||
|     # caused upstream by the base box containing a Vagrantfile with a similar | ||||
|     # line to the one below. | ||||
|     # see https://github.com/hashicorp/vagrant/issues/9425 | ||||
|     vb.customize [ "modifyvm", :id, "--uartmode1", "disconnected" ] | ||||
|     # It's possible we can get away with just 1.5GB; more testing needed | ||||
|     vb.memory = vm_memory | ||||
|     vb.cpus = vm_num_cpus | ||||
|  | ||||
|     if !vboxadd_version.nil? | ||||
|       override.vbguest.installer = Class.new(VagrantVbguest::Installers::Ubuntu) do | ||||
|         define_method(:host_version) do |reload = false| | ||||
|           VagrantVbguest::Version(vboxadd_version) | ||||
|         end | ||||
|       end | ||||
|       override.vbguest.allow_downgrade = true | ||||
|       override.vbguest.iso_path = "https://download.virtualbox.org/virtualbox/#{vboxadd_version}/VBoxGuestAdditions_#{vboxadd_version}.iso" | ||||
|     end | ||||
|   end | ||||
|  | ||||
|   config.vm.provider "hyperv" do |h, override| | ||||
|     h.memory = vm_memory | ||||
|     h.maxmemory = vm_memory | ||||
|     h.cpus = vm_num_cpus | ||||
|   end | ||||
| $provision_script = <<SCRIPT | ||||
| set -x | ||||
| set -e | ||||
| set -o pipefail | ||||
|  | ||||
|   config.vm.provider "parallels" do |prl, override| | ||||
|     prl.memory = vm_memory | ||||
|     prl.cpus = vm_num_cpus | ||||
|   end | ||||
| # Code should go here, rather than tools/provision, only if it is | ||||
| # something that we don't want to happen when running provision in a | ||||
| # development environment not using Vagrant. | ||||
|  | ||||
| # Set the Ubuntu mirror | ||||
| [ ! '#{ubuntu_mirror}' ] || sudo sed -i 's|http://\\(\\w*\\.\\)*archive\\.ubuntu\\.com/ubuntu/\\? |#{ubuntu_mirror} |' /etc/apt/sources.list | ||||
|  | ||||
| # Set the MOTD on the system to have Zulip instructions | ||||
| sudo ln -nsf /srv/zulip/tools/setup/dev-motd /etc/update-motd.d/99-zulip-dev | ||||
| sudo rm -f /etc/update-motd.d/10-help-text | ||||
| sudo dpkg --purge landscape-client landscape-common ubuntu-release-upgrader-core update-manager-core update-notifier-common ubuntu-server | ||||
| sudo dpkg-divert --add --rename /etc/default/motd-news | ||||
| sudo sh -c 'echo ENABLED=0 > /etc/default/motd-news' | ||||
|  | ||||
| # If the host is running SELinux remount the /sys/fs/selinux directory as read only, | ||||
| # needed for apt-get to work. | ||||
| if [ -d "/sys/fs/selinux" ]; then | ||||
|     sudo mount -o remount,ro /sys/fs/selinux | ||||
| fi | ||||
|  | ||||
| # Set default locale, this prevents errors if the user has another locale set. | ||||
| if ! grep -q 'LC_ALL=en_US.UTF-8' /etc/default/locale; then | ||||
|     echo "LC_ALL=en_US.UTF-8" | sudo tee -a /etc/default/locale | ||||
| fi | ||||
|  | ||||
| # Set an environment variable, so that we won't print the virtualenv | ||||
| # shell warning (it'll be wrong, since the shell is dying anyway) | ||||
| export SKIP_VENV_SHELL_WARNING=1 | ||||
|  | ||||
| # End `set -x`, so that the end of provision doesn't look like an error | ||||
| # message after a successful run. | ||||
| set +x | ||||
|  | ||||
| # Check if the zulip directory is writable | ||||
| if [ ! -w /srv/zulip ]; then | ||||
|     echo "The vagrant user is unable to write to the zulip directory." | ||||
|     echo "To fix this, run the following commands on the host machine:" | ||||
|     # sudo is required since our uid is not 1000 | ||||
|     echo '    vagrant halt -f' | ||||
|     echo '    rm -rf /PATH/TO/ZULIP/CLONE/.vagrant' | ||||
|     echo '    sudo chown -R 1000:$(id -g) /PATH/TO/ZULIP/CLONE' | ||||
|     echo "Replace /PATH/TO/ZULIP/CLONE with the path to where zulip code is cloned." | ||||
|     echo "You can resume setting up your vagrant environment by running:" | ||||
|     echo "    vagrant up" | ||||
|     exit 1 | ||||
| fi | ||||
| # Provision the development environment | ||||
| ln -nsf /srv/zulip ~/zulip | ||||
| /srv/zulip/tools/provision | ||||
|  | ||||
| # Run any custom provision hooks the user has configured | ||||
| if [ -f /srv/zulip/tools/custom_provision ]; then | ||||
|     chmod +x /srv/zulip/tools/custom_provision | ||||
|     /srv/zulip/tools/custom_provision | ||||
| fi | ||||
| SCRIPT | ||||
|  | ||||
|   config.vm.provision "shell", | ||||
|     # We want provision to be run with the permissions of the vagrant user. | ||||
|     privileged: false, | ||||
|     path: "tools/setup/vagrant-provision", | ||||
|     env: { "UBUNTU_MIRROR" => ubuntu_mirror } | ||||
|     inline: $provision_script | ||||
| end | ||||
|   | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,21 +1,14 @@ | ||||
| from math import sqrt | ||||
| from random import Random | ||||
| from random import gauss, random, seed | ||||
| from typing import List | ||||
|  | ||||
| from analytics.lib.counts import CountStat | ||||
|  | ||||
|  | ||||
| def generate_time_series_data( | ||||
|     days: int = 100, | ||||
|     business_hours_base: float = 10, | ||||
|     non_business_hours_base: float = 10, | ||||
|     growth: float = 1, | ||||
|     autocorrelation: float = 0, | ||||
|     spikiness: float = 1, | ||||
|     holiday_rate: float = 0, | ||||
|     frequency: str = CountStat.DAY, | ||||
|     partial_sum: bool = False, | ||||
|     random_seed: int = 26, | ||||
| ) -> list[int]: | ||||
| def generate_time_series_data(days: int=100, business_hours_base: float=10, | ||||
|                               non_business_hours_base: float=10, growth: float=1, | ||||
|                               autocorrelation: float=0, spikiness: float=1, | ||||
|                               holiday_rate: float=0, frequency: str=CountStat.DAY, | ||||
|                               partial_sum: bool=False, random_seed: int=26) -> List[int]: | ||||
|     """ | ||||
|     Generate semi-realistic looking time series data for testing analytics graphs. | ||||
|  | ||||
| @@ -35,43 +28,36 @@ def generate_time_series_data( | ||||
|     partial_sum -- If True, return partial sum of the series. | ||||
|     random_seed -- Seed for random number generator. | ||||
|     """ | ||||
|     rng = Random(random_seed) | ||||
|  | ||||
|     if frequency == CountStat.HOUR: | ||||
|         length = days * 24 | ||||
|         length = days*24 | ||||
|         seasonality = [non_business_hours_base] * 24 * 7 | ||||
|         for day in range(5): | ||||
|             for hour in range(8): | ||||
|                 seasonality[24 * day + hour] = business_hours_base | ||||
|         holidays = [] | ||||
|                 seasonality[24*day + hour] = business_hours_base | ||||
|         holidays  = [] | ||||
|         for i in range(days): | ||||
|             holidays.extend([rng.random() < holiday_rate] * 24) | ||||
|             holidays.extend([random() < holiday_rate] * 24) | ||||
|     elif frequency == CountStat.DAY: | ||||
|         length = days | ||||
|         seasonality = [8 * business_hours_base + 16 * non_business_hours_base] * 5 + [ | ||||
|             24 * non_business_hours_base | ||||
|         ] * 2 | ||||
|         holidays = [rng.random() < holiday_rate for i in range(days)] | ||||
|         seasonality = [8*business_hours_base + 16*non_business_hours_base] * 5 + \ | ||||
|                       [24*non_business_hours_base] * 2 | ||||
|         holidays = [random() < holiday_rate for i in range(days)] | ||||
|     else: | ||||
|         raise AssertionError(f"Unknown frequency: {frequency}") | ||||
|         raise AssertionError("Unknown frequency: %s" % (frequency,)) | ||||
|     if length < 2: | ||||
|         raise AssertionError( | ||||
|             f"Must be generating at least 2 data points. Currently generating {length}" | ||||
|         ) | ||||
|     growth_base = growth ** (1.0 / (length - 1)) | ||||
|         raise AssertionError("Must be generating at least 2 data points. " | ||||
|                              "Currently generating %s" % (length,)) | ||||
|     growth_base = growth ** (1. / (length-1)) | ||||
|     values_no_noise = [seasonality[i % len(seasonality)] * (growth_base**i) for i in range(length)] | ||||
|  | ||||
|     noise_scalars = [rng.gauss(0, 1)] | ||||
|     seed(random_seed) | ||||
|     noise_scalars = [gauss(0, 1)] | ||||
|     for i in range(1, length): | ||||
|         noise_scalars.append( | ||||
|             noise_scalars[-1] * autocorrelation + rng.gauss(0, 1) * (1 - autocorrelation) | ||||
|         ) | ||||
|         noise_scalars.append(noise_scalars[-1]*autocorrelation + gauss(0, 1)*(1-autocorrelation)) | ||||
|  | ||||
|     values = [ | ||||
|         0 if holiday else int(v + sqrt(v) * noise_scalar * spikiness) | ||||
|         for v, noise_scalar, holiday in zip(values_no_noise, noise_scalars, holidays, strict=False) | ||||
|     ] | ||||
|     values = [0 if holiday else int(v + sqrt(v)*noise_scalar*spikiness) | ||||
|               for v, noise_scalar, holiday in zip(values_no_noise, noise_scalars, holidays)] | ||||
|     if partial_sum: | ||||
|         for i in range(1, length): | ||||
|             values[i] = values[i - 1] + values[i] | ||||
|             values[i] = values[i-1] + values[i] | ||||
|     return [max(v, 0) for v in values] | ||||
|   | ||||
| @@ -1,16 +1,15 @@ | ||||
| from datetime import datetime, timedelta | ||||
| from typing import List, Optional | ||||
|  | ||||
| from analytics.lib.counts import CountStat | ||||
| from zerver.lib.timestamp import floor_to_day, floor_to_hour, verify_UTC | ||||
|  | ||||
|  | ||||
| # If min_length is None, returns end_times from ceiling(start) to floor(end), inclusive. | ||||
| # If min_length is greater than 0, pads the list to the left. | ||||
| # So informally, time_range(Sep 20, Sep 22, day, None) returns [Sep 20, Sep 21, Sep 22], | ||||
| # and time_range(Sep 20, Sep 22, day, 5) returns [Sep 18, Sep 19, Sep 20, Sep 21, Sep 22] | ||||
| def time_range( | ||||
|     start: datetime, end: datetime, frequency: str, min_length: int | None | ||||
| ) -> list[datetime]: | ||||
| def time_range(start: datetime, end: datetime, frequency: str, | ||||
|                min_length: Optional[int]) -> List[datetime]: | ||||
|     verify_UTC(start) | ||||
|     verify_UTC(end) | ||||
|     if frequency == CountStat.HOUR: | ||||
| @@ -20,14 +19,13 @@ def time_range( | ||||
|         end = floor_to_day(end) | ||||
|         step = timedelta(days=1) | ||||
|     else: | ||||
|         raise AssertionError(f"Unknown frequency: {frequency}") | ||||
|         raise AssertionError("Unknown frequency: %s" % (frequency,)) | ||||
|  | ||||
|     times = [] | ||||
|     if min_length is not None: | ||||
|         start = min(start, end - (min_length - 1) * step) | ||||
|         start = min(start, end - (min_length-1)*step) | ||||
|     current = end | ||||
|     while current >= start: | ||||
|         times.append(current) | ||||
|         current -= step | ||||
|     times.reverse() | ||||
|     return times | ||||
|     return list(reversed(times)) | ||||
|   | ||||
							
								
								
									
										81
									
								
								analytics/management/commands/analyze_mit.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										81
									
								
								analytics/management/commands/analyze_mit.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,81 @@ | ||||
| import datetime | ||||
| import logging | ||||
| import time | ||||
| from typing import Any, Dict | ||||
|  | ||||
| from django.core.management.base import BaseCommand, CommandParser | ||||
|  | ||||
| from zerver.lib.timestamp import timestamp_to_datetime | ||||
| from zerver.models import Message, Recipient | ||||
|  | ||||
| def compute_stats(log_level: int) -> None: | ||||
|     logger = logging.getLogger() | ||||
|     logger.setLevel(log_level) | ||||
|  | ||||
|     one_week_ago = timestamp_to_datetime(time.time()) - datetime.timedelta(weeks=1) | ||||
|     mit_query = Message.objects.filter(sender__realm__string_id="zephyr", | ||||
|                                        recipient__type=Recipient.STREAM, | ||||
|                                        date_sent__gt=one_week_ago) | ||||
|     for bot_sender_start in ["imap.", "rcmd.", "sys."]: | ||||
|         mit_query = mit_query.exclude(sender__email__startswith=(bot_sender_start)) | ||||
|     # Filtering for "/" covers tabbott/extra@ and all the daemon/foo bots. | ||||
|     mit_query = mit_query.exclude(sender__email__contains=("/")) | ||||
|     mit_query = mit_query.exclude(sender__email__contains=("aim.com")) | ||||
|     mit_query = mit_query.exclude( | ||||
|         sender__email__in=["rss@mit.edu", "bash@mit.edu", "apache@mit.edu", | ||||
|                            "bitcoin@mit.edu", "lp@mit.edu", "clocks@mit.edu", | ||||
|                            "root@mit.edu", "nagios@mit.edu", | ||||
|                            "www-data|local-realm@mit.edu"]) | ||||
|     user_counts = {}  # type: Dict[str, Dict[str, int]] | ||||
|     for m in mit_query.select_related("sending_client", "sender"): | ||||
|         email = m.sender.email | ||||
|         user_counts.setdefault(email, {}) | ||||
|         user_counts[email].setdefault(m.sending_client.name, 0) | ||||
|         user_counts[email][m.sending_client.name] += 1 | ||||
|  | ||||
|     total_counts = {}  # type: Dict[str, int] | ||||
|     total_user_counts = {}  # type: Dict[str, int] | ||||
|     for email, counts in user_counts.items(): | ||||
|         total_user_counts.setdefault(email, 0) | ||||
|         for client_name, count in counts.items(): | ||||
|             total_counts.setdefault(client_name, 0) | ||||
|             total_counts[client_name] += count | ||||
|             total_user_counts[email] += count | ||||
|  | ||||
|     logging.debug("%40s | %10s | %s" % ("User", "Messages", "Percentage Zulip")) | ||||
|     top_percents = {}  # type: Dict[int, float] | ||||
|     for size in [10, 25, 50, 100, 200, len(total_user_counts.keys())]: | ||||
|         top_percents[size] = 0.0 | ||||
|     for i, email in enumerate(sorted(total_user_counts.keys(), | ||||
|                                      key=lambda x: -total_user_counts[x])): | ||||
|         percent_zulip = round(100 - (user_counts[email].get("zephyr_mirror", 0)) * 100. / | ||||
|                               total_user_counts[email], 1) | ||||
|         for size in top_percents.keys(): | ||||
|             top_percents.setdefault(size, 0) | ||||
|             if i < size: | ||||
|                 top_percents[size] += (percent_zulip * 1.0 / size) | ||||
|  | ||||
|         logging.debug("%40s | %10s | %s%%" % (email, total_user_counts[email], | ||||
|                                               percent_zulip)) | ||||
|  | ||||
|     logging.info("") | ||||
|     for size in sorted(top_percents.keys()): | ||||
|         logging.info("Top %6s | %s%%" % (size, round(top_percents[size], 1))) | ||||
|  | ||||
|     grand_total = sum(total_counts.values()) | ||||
|     print(grand_total) | ||||
|     logging.info("%15s | %s" % ("Client", "Percentage")) | ||||
|     for client in total_counts.keys(): | ||||
|         logging.info("%15s | %s%%" % (client, round(100. * total_counts[client] / grand_total, 1))) | ||||
|  | ||||
| class Command(BaseCommand): | ||||
|     help = "Compute statistics on MIT Zephyr usage." | ||||
|  | ||||
|     def add_arguments(self, parser: CommandParser) -> None: | ||||
|         parser.add_argument('--verbose', default=False, action='store_true') | ||||
|  | ||||
|     def handle(self, *args: Any, **options: Any) -> None: | ||||
|         level = logging.INFO | ||||
|         if options["verbose"]: | ||||
|             level = logging.DEBUG | ||||
|         compute_stats(level) | ||||
							
								
								
									
										56
									
								
								analytics/management/commands/analyze_user_activity.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										56
									
								
								analytics/management/commands/analyze_user_activity.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,56 @@ | ||||
| import datetime | ||||
| from typing import Any, Dict | ||||
|  | ||||
| from django.core.management.base import BaseCommand, CommandParser | ||||
| from django.utils.timezone import utc | ||||
|  | ||||
| from zerver.lib.statistics import seconds_usage_between | ||||
| from zerver.models import UserProfile | ||||
|  | ||||
| def analyze_activity(options: Dict[str, Any]) -> None: | ||||
|     day_start = datetime.datetime.strptime(options["date"], "%Y-%m-%d").replace(tzinfo=utc) | ||||
|     day_end = day_start + datetime.timedelta(days=options["duration"]) | ||||
|  | ||||
|     user_profile_query = UserProfile.objects.all() | ||||
|     if options["realm"]: | ||||
|         user_profile_query = user_profile_query.filter(realm__string_id=options["realm"]) | ||||
|  | ||||
|     print("Per-user online duration:\n") | ||||
|     total_duration = datetime.timedelta(0) | ||||
|     for user_profile in user_profile_query: | ||||
|         duration = seconds_usage_between(user_profile, day_start, day_end) | ||||
|  | ||||
|         if duration == datetime.timedelta(0): | ||||
|             continue | ||||
|  | ||||
|         total_duration += duration | ||||
|         print("%-*s%s" % (37, user_profile.email, duration,)) | ||||
|  | ||||
|     print("\nTotal Duration:                      %s" % (total_duration,)) | ||||
|     print("\nTotal Duration in minutes:           %s" % (total_duration.total_seconds() / 60.,)) | ||||
|     print("Total Duration amortized to a month: %s" % (total_duration.total_seconds() * 30. / 60.,)) | ||||
|  | ||||
| class Command(BaseCommand): | ||||
|     help = """Report analytics of user activity on a per-user and realm basis. | ||||
|  | ||||
| This command aggregates user activity data that is collected by each user using Zulip. It attempts | ||||
| to approximate how much each user has been using Zulip per day, measured by recording each 15 minute | ||||
| period where some activity has occurred (mouse move or keyboard activity). | ||||
|  | ||||
| It will correctly not count server-initiated reloads in the activity statistics. | ||||
|  | ||||
| The duration flag can be used to control how many days to show usage duration for | ||||
|  | ||||
| Usage: ./manage.py analyze_user_activity [--realm=zulip] [--date=2013-09-10] [--duration=1] | ||||
|  | ||||
| By default, if no date is selected 2013-09-10 is used. If no realm is provided, information | ||||
| is shown for all realms""" | ||||
|  | ||||
|     def add_arguments(self, parser: CommandParser) -> None: | ||||
|         parser.add_argument('--realm', action='store') | ||||
|         parser.add_argument('--date', action='store', default="2013-09-06") | ||||
|         parser.add_argument('--duration', action='store', default=1, type=int, | ||||
|                             help="How many days to show usage information for") | ||||
|  | ||||
|     def handle(self, *args: Any, **options: Any) -> None: | ||||
|         analyze_activity(options) | ||||
| @@ -1,57 +1,58 @@ | ||||
| from dataclasses import dataclass | ||||
| from datetime import timedelta | ||||
| from typing import Any, Literal | ||||
|  | ||||
| from django.core.management.base import BaseCommand | ||||
| from django.utils.timezone import now as timezone_now | ||||
| from typing_extensions import override | ||||
|  | ||||
| from analytics.lib.counts import ALL_COUNT_STATS, CountStat | ||||
| from analytics.models import installation_epoch | ||||
| from scripts.lib.zulip_tools import atomic_nagios_write | ||||
| from zerver.lib.management import ZulipBaseCommand | ||||
| from zerver.lib.timestamp import TimeZoneNotUTCError, floor_to_day, floor_to_hour, verify_UTC | ||||
| from analytics.models import installation_epoch, \ | ||||
|     last_successful_fill | ||||
| from analytics.lib.counts import COUNT_STATS, CountStat | ||||
| from zerver.lib.timestamp import floor_to_hour, floor_to_day, verify_UTC, \ | ||||
|     TimezoneNotUTCException | ||||
| from zerver.models import Realm | ||||
|  | ||||
| import os | ||||
| import time | ||||
| from typing import Any, Dict | ||||
|  | ||||
| states = { | ||||
|     0: "OK", | ||||
|     1: "WARNING", | ||||
|     2: "CRITICAL", | ||||
|     3: "UNKNOWN", | ||||
|     3: "UNKNOWN" | ||||
| } | ||||
|  | ||||
|  | ||||
| @dataclass | ||||
| class NagiosResult: | ||||
|     status: Literal["ok", "warning", "critical", "unknown"] | ||||
|     message: str | ||||
|  | ||||
|  | ||||
| class Command(ZulipBaseCommand): | ||||
| class Command(BaseCommand): | ||||
|     help = """Checks FillState table. | ||||
|  | ||||
|     Run as a cron job that runs every hour.""" | ||||
|  | ||||
|     @override | ||||
|     def handle(self, *args: Any, **options: Any) -> None: | ||||
|         fill_state = self.get_fill_state() | ||||
|         atomic_nagios_write("check-analytics-state", fill_state.status, fill_state.message) | ||||
|         status = fill_state['status'] | ||||
|         message = fill_state['message'] | ||||
|  | ||||
|     def get_fill_state(self) -> NagiosResult: | ||||
|         state_file_path = "/var/lib/nagios_state/check-analytics-state" | ||||
|         state_file_tmp = state_file_path + "-tmp" | ||||
|  | ||||
|         with open(state_file_tmp, "w") as f: | ||||
|             f.write("%s|%s|%s|%s\n" % ( | ||||
|                 int(time.time()), status, states[status], message)) | ||||
|         os.rename(state_file_tmp, state_file_path) | ||||
|  | ||||
|     def get_fill_state(self) -> Dict[str, Any]: | ||||
|         if not Realm.objects.exists(): | ||||
|             return NagiosResult(status="ok", message="No realms exist, so not checking FillState.") | ||||
|             return {'status': 0, 'message': 'No realms exist, so not checking FillState.'} | ||||
|  | ||||
|         warning_unfilled_properties = [] | ||||
|         critical_unfilled_properties = [] | ||||
|         for property, stat in ALL_COUNT_STATS.items(): | ||||
|             last_fill = stat.last_successful_fill() | ||||
|         for property, stat in COUNT_STATS.items(): | ||||
|             last_fill = last_successful_fill(property) | ||||
|             if last_fill is None: | ||||
|                 last_fill = installation_epoch() | ||||
|             try: | ||||
|                 verify_UTC(last_fill) | ||||
|             except TimeZoneNotUTCError: | ||||
|                 return NagiosResult( | ||||
|                     status="critical", message=f"FillState not in UTC for {property}" | ||||
|                 ) | ||||
|             except TimezoneNotUTCException: | ||||
|                 return {'status': 2, 'message': 'FillState not in UTC for %s' % (property,)} | ||||
|  | ||||
|             if stat.frequency == CountStat.DAY: | ||||
|                 floor_function = floor_to_day | ||||
| @@ -63,10 +64,8 @@ class Command(ZulipBaseCommand): | ||||
|                 critical_threshold = timedelta(minutes=150) | ||||
|  | ||||
|             if floor_function(last_fill) != last_fill: | ||||
|                 return NagiosResult( | ||||
|                     status="critical", | ||||
|                     message=f"FillState not on {stat.frequency} boundary for {property}", | ||||
|                 ) | ||||
|                 return {'status': 2, 'message': 'FillState not on %s boundary for %s' % | ||||
|                         (stat.frequency, property)} | ||||
|  | ||||
|             time_to_last_fill = timezone_now() - last_fill | ||||
|             if time_to_last_fill > critical_threshold: | ||||
| @@ -75,18 +74,9 @@ class Command(ZulipBaseCommand): | ||||
|                 warning_unfilled_properties.append(property) | ||||
|  | ||||
|         if len(critical_unfilled_properties) == 0 and len(warning_unfilled_properties) == 0: | ||||
|             return NagiosResult(status="ok", message="FillState looks fine.") | ||||
|             return {'status': 0, 'message': 'FillState looks fine.'} | ||||
|         if len(critical_unfilled_properties) == 0: | ||||
|             return NagiosResult( | ||||
|                 status="warning", | ||||
|                 message="Missed filling {} once.".format( | ||||
|                     ", ".join(warning_unfilled_properties), | ||||
|                 ), | ||||
|             ) | ||||
|         return NagiosResult( | ||||
|             status="critical", | ||||
|             message="Missed filling {} once. Missed filling {} at least twice.".format( | ||||
|                 ", ".join(warning_unfilled_properties), | ||||
|                 ", ".join(critical_unfilled_properties), | ||||
|             ), | ||||
|         ) | ||||
|             return {'status': 1, 'message': 'Missed filling %s once.' % | ||||
|                     (', '.join(warning_unfilled_properties),)} | ||||
|         return {'status': 2, 'message': 'Missed filling %s once. Missed filling %s at least twice.' % | ||||
|                 (', '.join(warning_unfilled_properties), ', '.join(critical_unfilled_properties))} | ||||
|   | ||||
| @@ -1,25 +1,20 @@ | ||||
| from argparse import ArgumentParser | ||||
| from typing import Any | ||||
|  | ||||
| from django.core.management.base import CommandError | ||||
| from typing_extensions import override | ||||
| from django.core.management.base import BaseCommand, CommandError | ||||
|  | ||||
| from analytics.lib.counts import do_drop_all_analytics_tables | ||||
| from zerver.lib.management import ZulipBaseCommand | ||||
|  | ||||
|  | ||||
| class Command(ZulipBaseCommand): | ||||
| class Command(BaseCommand): | ||||
|     help = """Clear analytics tables.""" | ||||
|  | ||||
|     @override | ||||
|     def add_arguments(self, parser: ArgumentParser) -> None: | ||||
|         parser.add_argument("--force", action="store_true", help="Clear analytics tables.") | ||||
|         parser.add_argument('--force', | ||||
|                             action='store_true', | ||||
|                             help="Clear analytics tables.") | ||||
|  | ||||
|     @override | ||||
|     def handle(self, *args: Any, **options: Any) -> None: | ||||
|         if options["force"]: | ||||
|         if options['force']: | ||||
|             do_drop_all_analytics_tables() | ||||
|         else: | ||||
|             raise CommandError( | ||||
|                 "Would delete all data from analytics tables (!); use --force to do so." | ||||
|             ) | ||||
|             raise CommandError("Would delete all data from analytics tables (!); use --force to do so.") | ||||
|   | ||||
| @@ -1,27 +1,26 @@ | ||||
| from argparse import ArgumentParser | ||||
| from typing import Any | ||||
|  | ||||
| from django.core.management.base import CommandError | ||||
| from typing_extensions import override | ||||
| from django.core.management.base import BaseCommand, CommandError | ||||
|  | ||||
| from analytics.lib.counts import ALL_COUNT_STATS, do_drop_single_stat | ||||
| from zerver.lib.management import ZulipBaseCommand | ||||
| from analytics.lib.counts import COUNT_STATS, do_drop_single_stat | ||||
|  | ||||
|  | ||||
| class Command(ZulipBaseCommand): | ||||
| class Command(BaseCommand): | ||||
|     help = """Clear analytics tables.""" | ||||
|  | ||||
|     @override | ||||
|     def add_arguments(self, parser: ArgumentParser) -> None: | ||||
|         parser.add_argument("--force", action="store_true", help="Actually do it.") | ||||
|         parser.add_argument("--property", help="The property of the stat to be cleared.") | ||||
|         parser.add_argument('--force', | ||||
|                             action='store_true', | ||||
|                             help="Actually do it.") | ||||
|         parser.add_argument('--property', | ||||
|                             type=str, | ||||
|                             help="The property of the stat to be cleared.") | ||||
|  | ||||
|     @override | ||||
|     def handle(self, *args: Any, **options: Any) -> None: | ||||
|         property = options["property"] | ||||
|         if property not in ALL_COUNT_STATS: | ||||
|             raise CommandError(f"Invalid property: {property}") | ||||
|         if not options["force"]: | ||||
|         property = options['property'] | ||||
|         if property not in COUNT_STATS: | ||||
|             raise CommandError("Invalid property: %s" % (property,)) | ||||
|         if not options['force']: | ||||
|             raise CommandError("No action taken. Use --force.") | ||||
|  | ||||
|         do_drop_single_stat(property) | ||||
|   | ||||
							
								
								
									
										73
									
								
								analytics/management/commands/client_activity.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										73
									
								
								analytics/management/commands/client_activity.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,73 @@ | ||||
| import datetime | ||||
| from argparse import ArgumentParser | ||||
| from typing import Any, Optional | ||||
|  | ||||
| from django.db.models import Count, QuerySet | ||||
| from django.utils.timezone import now as timezone_now | ||||
|  | ||||
| from zerver.lib.management import ZulipBaseCommand | ||||
| from zerver.models import UserActivity | ||||
|  | ||||
| class Command(ZulipBaseCommand): | ||||
|     help = """Report rough client activity globally, for a realm, or for a user | ||||
|  | ||||
| Usage examples: | ||||
|  | ||||
| ./manage.py client_activity --target server | ||||
| ./manage.py client_activity --target realm --realm zulip | ||||
| ./manage.py client_activity --target user --user hamlet@zulip.com --realm zulip""" | ||||
|  | ||||
|     def add_arguments(self, parser: ArgumentParser) -> None: | ||||
|         parser.add_argument('--target', dest='target', required=True, type=str, | ||||
|                             help="'server' will calculate client activity of the entire server. " | ||||
|                                  "'realm' will calculate client activity of realm. " | ||||
|                                  "'user' will calculate client activity of the user.") | ||||
|         parser.add_argument('--user', dest='user', type=str, | ||||
|                             help="The email address of the user you want to calculate activity.") | ||||
|         self.add_realm_args(parser) | ||||
|  | ||||
|     def compute_activity(self, user_activity_objects: QuerySet) -> None: | ||||
|         # Report data from the past week. | ||||
|         # | ||||
|         # This is a rough report of client activity because we inconsistently | ||||
|         # register activity from various clients; think of it as telling you | ||||
|         # approximately how many people from a group have used a particular | ||||
|         # client recently. For example, this might be useful to get a sense of | ||||
|         # how popular different versions of a desktop client are. | ||||
|         # | ||||
|         # Importantly, this does NOT tell you anything about the relative | ||||
|         # volumes of requests from clients. | ||||
|         threshold = timezone_now() - datetime.timedelta(days=7) | ||||
|         client_counts = user_activity_objects.filter( | ||||
|             last_visit__gt=threshold).values("client__name").annotate( | ||||
|             count=Count('client__name')) | ||||
|  | ||||
|         total = 0 | ||||
|         counts = [] | ||||
|         for client_type in client_counts: | ||||
|             count = client_type["count"] | ||||
|             client = client_type["client__name"] | ||||
|             total += count | ||||
|             counts.append((count, client)) | ||||
|  | ||||
|         counts.sort() | ||||
|  | ||||
|         for count in counts: | ||||
|             print("%25s %15d" % (count[1], count[0])) | ||||
|         print("Total:", total) | ||||
|  | ||||
|     def handle(self, *args: Any, **options: Optional[str]) -> None: | ||||
|         realm = self.get_realm(options) | ||||
|         if options["user"] is None: | ||||
|             if options["target"] == "server" and realm is None: | ||||
|                 # Report global activity. | ||||
|                 self.compute_activity(UserActivity.objects.all()) | ||||
|             elif options["target"] == "realm" and realm is not None: | ||||
|                 self.compute_activity(UserActivity.objects.filter(user_profile__realm=realm)) | ||||
|             else: | ||||
|                 self.print_help("./manage.py", "client_activity") | ||||
|         elif options["target"] == "user": | ||||
|             user_profile = self.get_user(options["user"], realm) | ||||
|             self.compute_activity(UserActivity.objects.filter(user_profile=user_profile)) | ||||
|         else: | ||||
|             self.print_help("./manage.py", "client_activity") | ||||
| @@ -1,82 +1,46 @@ | ||||
| from collections.abc import Mapping | ||||
| from datetime import timedelta | ||||
| from typing import Any, TypeAlias | ||||
| from typing import Any, Dict, List, Mapping, Optional, Type | ||||
| import mock | ||||
|  | ||||
| from django.core.files.uploadedfile import UploadedFile | ||||
| from django.core.management.base import BaseCommand | ||||
| from django.utils.timezone import now as timezone_now | ||||
| from typing_extensions import override | ||||
|  | ||||
| from analytics.lib.counts import COUNT_STATS, CountStat, do_drop_all_analytics_tables | ||||
| from analytics.lib.counts import COUNT_STATS, \ | ||||
|     CountStat, do_drop_all_analytics_tables | ||||
| from analytics.lib.fixtures import generate_time_series_data | ||||
| from analytics.lib.time_utils import time_range | ||||
| from analytics.models import ( | ||||
|     BaseCount, | ||||
|     FillState, | ||||
|     InstallationCount, | ||||
|     RealmCount, | ||||
|     StreamCount, | ||||
|     UserCount, | ||||
| ) | ||||
| from zerver.actions.create_realm import do_create_realm | ||||
| from zerver.actions.users import do_change_user_role | ||||
| from analytics.models import BaseCount, FillState, RealmCount, UserCount, \ | ||||
|     StreamCount, InstallationCount | ||||
| from zerver.lib.actions import do_change_is_admin, STREAM_ASSIGNMENT_COLORS | ||||
| from zerver.lib.create_user import create_user | ||||
| from zerver.lib.management import ZulipBaseCommand | ||||
| from zerver.lib.storage import static_path | ||||
| from zerver.lib.stream_color import STREAM_ASSIGNMENT_COLORS | ||||
| from zerver.lib.timestamp import floor_to_day | ||||
| from zerver.lib.upload import upload_message_attachment_from_request | ||||
| from zerver.models import ( | ||||
|     Client, | ||||
|     NamedUserGroup, | ||||
|     Realm, | ||||
|     RealmAuditLog, | ||||
|     Recipient, | ||||
|     Stream, | ||||
|     Subscription, | ||||
|     UserProfile, | ||||
| ) | ||||
| from zerver.models.groups import SystemGroups | ||||
| from zerver.models import Realm, Stream, Client, \ | ||||
|     Recipient, Subscription | ||||
|  | ||||
|  | ||||
| class Command(ZulipBaseCommand): | ||||
| class Command(BaseCommand): | ||||
|     help = """Populates analytics tables with randomly generated data.""" | ||||
|  | ||||
|     DAYS_OF_DATA = 100 | ||||
|     random_seed = 26 | ||||
|  | ||||
|     def generate_fixture_data( | ||||
|         self, | ||||
|         stat: CountStat, | ||||
|         business_hours_base: float, | ||||
|         non_business_hours_base: float, | ||||
|         growth: float, | ||||
|         autocorrelation: float, | ||||
|         spikiness: float, | ||||
|         holiday_rate: float = 0, | ||||
|         partial_sum: bool = False, | ||||
|     ) -> list[int]: | ||||
|     def generate_fixture_data(self, stat: CountStat, business_hours_base: float, | ||||
|                               non_business_hours_base: float, growth: float, | ||||
|                               autocorrelation: float, spikiness: float, | ||||
|                               holiday_rate: float=0, partial_sum: bool=False) -> List[int]: | ||||
|         self.random_seed += 1 | ||||
|         return generate_time_series_data( | ||||
|             days=self.DAYS_OF_DATA, | ||||
|             business_hours_base=business_hours_base, | ||||
|             non_business_hours_base=non_business_hours_base, | ||||
|             growth=growth, | ||||
|             autocorrelation=autocorrelation, | ||||
|             spikiness=spikiness, | ||||
|             holiday_rate=holiday_rate, | ||||
|             frequency=stat.frequency, | ||||
|             partial_sum=partial_sum, | ||||
|             random_seed=self.random_seed, | ||||
|         ) | ||||
|             days=self.DAYS_OF_DATA, business_hours_base=business_hours_base, | ||||
|             non_business_hours_base=non_business_hours_base, growth=growth, | ||||
|             autocorrelation=autocorrelation, spikiness=spikiness, holiday_rate=holiday_rate, | ||||
|             frequency=stat.frequency, partial_sum=partial_sum, random_seed=self.random_seed) | ||||
|  | ||||
|     @override | ||||
|     def handle(self, *args: Any, **options: Any) -> None: | ||||
|         # TODO: This should arguably only delete the objects | ||||
|         # associated with the "analytics" realm. | ||||
|         do_drop_all_analytics_tables() | ||||
|  | ||||
|         # This also deletes any objects with this realm as a foreign key | ||||
|         Realm.objects.filter(string_id="analytics").delete() | ||||
|         Realm.objects.filter(string_id='analytics').delete() | ||||
|  | ||||
|         # Because we just deleted a bunch of objects in the database | ||||
|         # directly (rather than deleting individual objects in Django, | ||||
| @@ -85,268 +49,168 @@ class Command(ZulipBaseCommand): | ||||
|         # memcached in order to ensure deleted objects aren't still | ||||
|         # present in the memcached cache. | ||||
|         from zerver.apps import flush_cache | ||||
|  | ||||
|         flush_cache(None) | ||||
|  | ||||
|         installation_time = timezone_now() - timedelta(days=self.DAYS_OF_DATA) | ||||
|         last_end_time = floor_to_day(timezone_now()) | ||||
|         realm = do_create_realm( | ||||
|             string_id="analytics", name="Analytics", date_created=installation_time | ||||
|         ) | ||||
|  | ||||
|         shylock = create_user( | ||||
|             "shylock@analytics.ds", | ||||
|             "Shylock", | ||||
|             realm, | ||||
|             full_name="Shylock", | ||||
|             role=UserProfile.ROLE_REALM_OWNER, | ||||
|             force_date_joined=installation_time, | ||||
|         ) | ||||
|         do_change_user_role(shylock, UserProfile.ROLE_REALM_OWNER, acting_user=None) | ||||
|  | ||||
|         # Create guest user for set_guest_users_statistic. | ||||
|         create_user( | ||||
|             "bassanio@analytics.ds", | ||||
|             "Bassanio", | ||||
|             realm, | ||||
|             full_name="Bassanio", | ||||
|             role=UserProfile.ROLE_GUEST, | ||||
|             force_date_joined=installation_time, | ||||
|         ) | ||||
|  | ||||
|         administrators_user_group = NamedUserGroup.objects.get( | ||||
|             name=SystemGroups.ADMINISTRATORS, realm=realm, is_system_group=True | ||||
|         ) | ||||
|         realm = Realm.objects.create( | ||||
|             string_id='analytics', name='Analytics', date_created=installation_time) | ||||
|         with mock.patch("zerver.lib.create_user.timezone_now", return_value=installation_time): | ||||
|             shylock = create_user('shylock@analytics.ds', 'Shylock', realm, | ||||
|                                   full_name='Shylock', short_name='shylock', | ||||
|                                   is_realm_admin=True) | ||||
|         do_change_is_admin(shylock, True) | ||||
|         stream = Stream.objects.create( | ||||
|             name="all", | ||||
|             realm=realm, | ||||
|             date_created=installation_time, | ||||
|             can_remove_subscribers_group=administrators_user_group, | ||||
|         ) | ||||
|             name='all', realm=realm, date_created=installation_time) | ||||
|         recipient = Recipient.objects.create(type_id=stream.id, type=Recipient.STREAM) | ||||
|         stream.recipient = recipient | ||||
|         stream.save(update_fields=["recipient"]) | ||||
|  | ||||
|         # Subscribe shylock to the stream to avoid invariant failures. | ||||
|         Subscription.objects.create( | ||||
|             recipient=recipient, | ||||
|             user_profile=shylock, | ||||
|             is_user_active=shylock.is_active, | ||||
|             color=STREAM_ASSIGNMENT_COLORS[0], | ||||
|         ) | ||||
|         RealmAuditLog.objects.create( | ||||
|             realm=realm, | ||||
|             modified_user=shylock, | ||||
|             modified_stream=stream, | ||||
|             event_last_message_id=0, | ||||
|             event_type=RealmAuditLog.SUBSCRIPTION_CREATED, | ||||
|             event_time=installation_time, | ||||
|         ) | ||||
|         # TODO: This should use subscribe_users_to_streams from populate_db. | ||||
|         subs = [ | ||||
|             Subscription(recipient=recipient, | ||||
|                          user_profile=shylock, | ||||
|                          color=STREAM_ASSIGNMENT_COLORS[0]), | ||||
|         ] | ||||
|         Subscription.objects.bulk_create(subs) | ||||
|  | ||||
|         # Create an attachment in the database for set_storage_space_used_statistic. | ||||
|         IMAGE_FILE_PATH = static_path("images/test-images/checkbox.png") | ||||
|         with open(IMAGE_FILE_PATH, "rb") as fp: | ||||
|             upload_message_attachment_from_request(UploadedFile(fp), shylock) | ||||
|  | ||||
|         FixtureData: TypeAlias = Mapping[str | int | None, list[int]] | ||||
|  | ||||
|         def insert_fixture_data( | ||||
|             stat: CountStat, | ||||
|             fixture_data: FixtureData, | ||||
|             table: type[BaseCount], | ||||
|         ) -> None: | ||||
|             end_times = time_range( | ||||
|                 last_end_time, last_end_time, stat.frequency, len(next(iter(fixture_data.values()))) | ||||
|             ) | ||||
|         def insert_fixture_data(stat: CountStat, | ||||
|                                 fixture_data: Mapping[Optional[str], List[int]], | ||||
|                                 table: Type[BaseCount]) -> None: | ||||
|             end_times = time_range(last_end_time, last_end_time, stat.frequency, | ||||
|                                    len(list(fixture_data.values())[0])) | ||||
|             if table == InstallationCount: | ||||
|                 id_args: dict[str, Any] = {} | ||||
|                 id_args = {}  # type: Dict[str, Any] | ||||
|             if table == RealmCount: | ||||
|                 id_args = {"realm": realm} | ||||
|                 id_args = {'realm': realm} | ||||
|             if table == UserCount: | ||||
|                 id_args = {"realm": realm, "user": shylock} | ||||
|                 id_args = {'realm': realm, 'user': shylock} | ||||
|             if table == StreamCount: | ||||
|                 id_args = {"stream": stream, "realm": realm} | ||||
|                 id_args = {'stream': stream, 'realm': realm} | ||||
|  | ||||
|             for subgroup, values in fixture_data.items(): | ||||
|                 table._default_manager.bulk_create( | ||||
|                     table( | ||||
|                         property=stat.property, | ||||
|                         subgroup=subgroup, | ||||
|                         end_time=end_time, | ||||
|                         value=value, | ||||
|                         **id_args, | ||||
|                     ) | ||||
|                     for end_time, value in zip(end_times, values, strict=False) | ||||
|                     if value != 0 | ||||
|                 ) | ||||
|                 table.objects.bulk_create([ | ||||
|                     table(property=stat.property, subgroup=subgroup, end_time=end_time, | ||||
|                           value=value, **id_args) | ||||
|                     for end_time, value in zip(end_times, values) if value != 0]) | ||||
|  | ||||
|         stat = COUNT_STATS["1day_actives::day"] | ||||
|         realm_data: FixtureData = { | ||||
|             None: self.generate_fixture_data(stat, 0.08, 0.02, 3, 0.3, 6, partial_sum=True), | ||||
|         } | ||||
|         insert_fixture_data(stat, realm_data, RealmCount) | ||||
|         installation_data: FixtureData = { | ||||
|             None: self.generate_fixture_data(stat, 0.8, 0.2, 4, 0.3, 6, partial_sum=True), | ||||
|         } | ||||
|         insert_fixture_data(stat, installation_data, InstallationCount) | ||||
|         FillState.objects.create( | ||||
|             property=stat.property, end_time=last_end_time, state=FillState.DONE | ||||
|         ) | ||||
|  | ||||
|         stat = COUNT_STATS["7day_actives::day"] | ||||
|         stat = COUNT_STATS['1day_actives::day'] | ||||
|         realm_data = { | ||||
|             None: self.generate_fixture_data(stat, 0.2, 0.07, 3, 0.3, 6, partial_sum=True), | ||||
|             None: self.generate_fixture_data(stat, .08, .02, 3, .3, 6, partial_sum=True), | ||||
|         }  # type: Mapping[Optional[str], List[int]] | ||||
|         insert_fixture_data(stat, realm_data, RealmCount) | ||||
|         installation_data = { | ||||
|             None: self.generate_fixture_data(stat, .8, .2, 4, .3, 6, partial_sum=True), | ||||
|         }  # type: Mapping[Optional[str], List[int]] | ||||
|         insert_fixture_data(stat, installation_data, InstallationCount) | ||||
|         FillState.objects.create(property=stat.property, end_time=last_end_time, | ||||
|                                  state=FillState.DONE) | ||||
|  | ||||
|         stat = COUNT_STATS['realm_active_humans::day'] | ||||
|         realm_data = { | ||||
|             None: self.generate_fixture_data(stat, .1, .03, 3, .5, 3, partial_sum=True), | ||||
|         } | ||||
|         insert_fixture_data(stat, realm_data, RealmCount) | ||||
|         installation_data = { | ||||
|             None: self.generate_fixture_data(stat, 2, 0.7, 4, 0.3, 6, partial_sum=True), | ||||
|             None: self.generate_fixture_data(stat, 1, .3, 4, .5, 3, partial_sum=True), | ||||
|         } | ||||
|         insert_fixture_data(stat, installation_data, InstallationCount) | ||||
|         FillState.objects.create( | ||||
|             property=stat.property, end_time=last_end_time, state=FillState.DONE | ||||
|         ) | ||||
|         FillState.objects.create(property=stat.property, end_time=last_end_time, | ||||
|                                  state=FillState.DONE) | ||||
|  | ||||
|         stat = COUNT_STATS["realm_active_humans::day"] | ||||
|         stat = COUNT_STATS['active_users_audit:is_bot:day'] | ||||
|         realm_data = { | ||||
|             None: self.generate_fixture_data(stat, 0.8, 0.08, 3, 0.5, 3, partial_sum=True), | ||||
|             'false': self.generate_fixture_data(stat, .1, .03, 3.5, .8, 2, partial_sum=True), | ||||
|         } | ||||
|         insert_fixture_data(stat, realm_data, RealmCount) | ||||
|         installation_data = { | ||||
|             None: self.generate_fixture_data(stat, 1, 0.3, 4, 0.5, 3, partial_sum=True), | ||||
|             'false': self.generate_fixture_data(stat, 1, .3, 6, .8, 2, partial_sum=True), | ||||
|         } | ||||
|         insert_fixture_data(stat, installation_data, InstallationCount) | ||||
|         FillState.objects.create( | ||||
|             property=stat.property, end_time=last_end_time, state=FillState.DONE | ||||
|         ) | ||||
|         FillState.objects.create(property=stat.property, end_time=last_end_time, | ||||
|                                  state=FillState.DONE) | ||||
|  | ||||
|         stat = COUNT_STATS["active_users_audit:is_bot:day"] | ||||
|         realm_data = { | ||||
|             "false": self.generate_fixture_data(stat, 1, 0.2, 3.5, 0.8, 2, partial_sum=True), | ||||
|             "true": self.generate_fixture_data(stat, 0.3, 0.05, 3, 0.3, 2, partial_sum=True), | ||||
|         } | ||||
|         insert_fixture_data(stat, realm_data, RealmCount) | ||||
|         installation_data = { | ||||
|             "false": self.generate_fixture_data(stat, 3, 1, 4, 0.8, 2, partial_sum=True), | ||||
|             "true": self.generate_fixture_data(stat, 1, 0.4, 4, 0.8, 2, partial_sum=True), | ||||
|         } | ||||
|         insert_fixture_data(stat, installation_data, InstallationCount) | ||||
|         FillState.objects.create( | ||||
|             property=stat.property, end_time=last_end_time, state=FillState.DONE | ||||
|         ) | ||||
|  | ||||
|         stat = COUNT_STATS["messages_sent:is_bot:hour"] | ||||
|         user_data: FixtureData = { | ||||
|             "false": self.generate_fixture_data(stat, 2, 1, 1.5, 0.6, 8, holiday_rate=0.1), | ||||
|         } | ||||
|         stat = COUNT_STATS['messages_sent:is_bot:hour'] | ||||
|         user_data = {'false': self.generate_fixture_data( | ||||
|             stat, 2, 1, 1.5, .6, 8, holiday_rate=.1)}  # type: Mapping[Optional[str], List[int]] | ||||
|         insert_fixture_data(stat, user_data, UserCount) | ||||
|         realm_data = { | ||||
|             "false": self.generate_fixture_data(stat, 35, 15, 6, 0.6, 4), | ||||
|             "true": self.generate_fixture_data(stat, 15, 15, 3, 0.4, 2), | ||||
|         } | ||||
|         realm_data = {'false': self.generate_fixture_data(stat, 35, 15, 6, .6, 4), | ||||
|                       'true': self.generate_fixture_data(stat, 15, 15, 3, .4, 2)} | ||||
|         insert_fixture_data(stat, realm_data, RealmCount) | ||||
|         installation_data = { | ||||
|             "false": self.generate_fixture_data(stat, 350, 150, 6, 0.6, 4), | ||||
|             "true": self.generate_fixture_data(stat, 150, 150, 3, 0.4, 2), | ||||
|         } | ||||
|         installation_data = {'false': self.generate_fixture_data(stat, 350, 150, 6, .6, 4), | ||||
|                              'true': self.generate_fixture_data(stat, 150, 150, 3, .4, 2)} | ||||
|         insert_fixture_data(stat, installation_data, InstallationCount) | ||||
|         FillState.objects.create( | ||||
|             property=stat.property, end_time=last_end_time, state=FillState.DONE | ||||
|         ) | ||||
|         FillState.objects.create(property=stat.property, end_time=last_end_time, | ||||
|                                  state=FillState.DONE) | ||||
|  | ||||
|         stat = COUNT_STATS["messages_sent:message_type:day"] | ||||
|         stat = COUNT_STATS['messages_sent:message_type:day'] | ||||
|         user_data = { | ||||
|             "public_stream": self.generate_fixture_data(stat, 1.5, 1, 3, 0.6, 8), | ||||
|             "private_message": self.generate_fixture_data(stat, 0.5, 0.3, 1, 0.6, 8), | ||||
|             "huddle_message": self.generate_fixture_data(stat, 0.2, 0.2, 2, 0.6, 8), | ||||
|         } | ||||
|             'public_stream': self.generate_fixture_data(stat, 1.5, 1, 3, .6, 8), | ||||
|             'private_message': self.generate_fixture_data(stat, .5, .3, 1, .6, 8), | ||||
|             'huddle_message': self.generate_fixture_data(stat, .2, .2, 2, .6, 8)} | ||||
|         insert_fixture_data(stat, user_data, UserCount) | ||||
|         realm_data = { | ||||
|             "public_stream": self.generate_fixture_data(stat, 30, 8, 5, 0.6, 4), | ||||
|             "private_stream": self.generate_fixture_data(stat, 7, 7, 5, 0.6, 4), | ||||
|             "private_message": self.generate_fixture_data(stat, 13, 5, 5, 0.6, 4), | ||||
|             "huddle_message": self.generate_fixture_data(stat, 6, 3, 3, 0.6, 4), | ||||
|         } | ||||
|             'public_stream': self.generate_fixture_data(stat, 30, 8, 5, .6, 4), | ||||
|             'private_stream': self.generate_fixture_data(stat, 7, 7, 5, .6, 4), | ||||
|             'private_message': self.generate_fixture_data(stat, 13, 5, 5, .6, 4), | ||||
|             'huddle_message': self.generate_fixture_data(stat, 6, 3, 3, .6, 4)} | ||||
|         insert_fixture_data(stat, realm_data, RealmCount) | ||||
|         installation_data = { | ||||
|             "public_stream": self.generate_fixture_data(stat, 300, 80, 5, 0.6, 4), | ||||
|             "private_stream": self.generate_fixture_data(stat, 70, 70, 5, 0.6, 4), | ||||
|             "private_message": self.generate_fixture_data(stat, 130, 50, 5, 0.6, 4), | ||||
|             "huddle_message": self.generate_fixture_data(stat, 60, 30, 3, 0.6, 4), | ||||
|         } | ||||
|             'public_stream': self.generate_fixture_data(stat, 300, 80, 5, .6, 4), | ||||
|             'private_stream': self.generate_fixture_data(stat, 70, 70, 5, .6, 4), | ||||
|             'private_message': self.generate_fixture_data(stat, 130, 50, 5, .6, 4), | ||||
|             'huddle_message': self.generate_fixture_data(stat, 60, 30, 3, .6, 4)} | ||||
|         insert_fixture_data(stat, installation_data, InstallationCount) | ||||
|         FillState.objects.create( | ||||
|             property=stat.property, end_time=last_end_time, state=FillState.DONE | ||||
|         ) | ||||
|         FillState.objects.create(property=stat.property, end_time=last_end_time, | ||||
|                                  state=FillState.DONE) | ||||
|  | ||||
|         website, created = Client.objects.get_or_create(name="website") | ||||
|         old_desktop, created = Client.objects.get_or_create(name="desktop app Linux 0.3.7") | ||||
|         android, created = Client.objects.get_or_create(name="ZulipAndroid") | ||||
|         iOS, created = Client.objects.get_or_create(name="ZulipiOS") | ||||
|         react_native, created = Client.objects.get_or_create(name="ZulipMobile") | ||||
|         flutter, created = Client.objects.get_or_create(name="ZulipFlutter") | ||||
|         API, created = Client.objects.get_or_create(name="API: Python") | ||||
|         zephyr_mirror, created = Client.objects.get_or_create(name="zephyr_mirror") | ||||
|         unused, created = Client.objects.get_or_create(name="unused") | ||||
|         long_webhook, created = Client.objects.get_or_create(name="ZulipLooooooooooongNameWebhook") | ||||
|         website, created = Client.objects.get_or_create(name='website') | ||||
|         old_desktop, created = Client.objects.get_or_create(name='desktop app Linux 0.3.7') | ||||
|         android, created = Client.objects.get_or_create(name='ZulipAndroid') | ||||
|         iOS, created = Client.objects.get_or_create(name='ZulipiOS') | ||||
|         react_native, created = Client.objects.get_or_create(name='ZulipMobile') | ||||
|         API, created = Client.objects.get_or_create(name='API: Python') | ||||
|         zephyr_mirror, created = Client.objects.get_or_create(name='zephyr_mirror') | ||||
|         unused, created = Client.objects.get_or_create(name='unused') | ||||
|         long_webhook, created = Client.objects.get_or_create(name='ZulipLooooooooooongNameWebhook') | ||||
|  | ||||
|         stat = COUNT_STATS["messages_sent:client:day"] | ||||
|         stat = COUNT_STATS['messages_sent:client:day'] | ||||
|         user_data = { | ||||
|             website.id: self.generate_fixture_data(stat, 2, 1, 1.5, 0.6, 8), | ||||
|             zephyr_mirror.id: self.generate_fixture_data(stat, 0, 0.3, 1.5, 0.6, 8), | ||||
|         } | ||||
|             website.id: self.generate_fixture_data(stat, 2, 1, 1.5, .6, 8), | ||||
|             zephyr_mirror.id: self.generate_fixture_data(stat, 0, .3, 1.5, .6, 8)} | ||||
|         insert_fixture_data(stat, user_data, UserCount) | ||||
|         realm_data = { | ||||
|             website.id: self.generate_fixture_data(stat, 30, 20, 5, 0.6, 3), | ||||
|             old_desktop.id: self.generate_fixture_data(stat, 5, 3, 8, 0.6, 3), | ||||
|             android.id: self.generate_fixture_data(stat, 5, 5, 2, 0.6, 3), | ||||
|             iOS.id: self.generate_fixture_data(stat, 5, 5, 2, 0.6, 3), | ||||
|             react_native.id: self.generate_fixture_data(stat, 5, 5, 10, 0.6, 3), | ||||
|             flutter.id: self.generate_fixture_data(stat, 5, 5, 10, 0.6, 3), | ||||
|             API.id: self.generate_fixture_data(stat, 5, 5, 5, 0.6, 3), | ||||
|             zephyr_mirror.id: self.generate_fixture_data(stat, 1, 1, 3, 0.6, 3), | ||||
|             website.id: self.generate_fixture_data(stat, 30, 20, 5, .6, 3), | ||||
|             old_desktop.id: self.generate_fixture_data(stat, 5, 3, 8, .6, 3), | ||||
|             android.id: self.generate_fixture_data(stat, 5, 5, 2, .6, 3), | ||||
|             iOS.id: self.generate_fixture_data(stat, 5, 5, 2, .6, 3), | ||||
|             react_native.id: self.generate_fixture_data(stat, 5, 5, 10, .6, 3), | ||||
|             API.id: self.generate_fixture_data(stat, 5, 5, 5, .6, 3), | ||||
|             zephyr_mirror.id: self.generate_fixture_data(stat, 1, 1, 3, .6, 3), | ||||
|             unused.id: self.generate_fixture_data(stat, 0, 0, 0, 0, 0), | ||||
|             long_webhook.id: self.generate_fixture_data(stat, 5, 5, 2, 0.6, 3), | ||||
|         } | ||||
|             long_webhook.id: self.generate_fixture_data(stat, 5, 5, 2, .6, 3)} | ||||
|         insert_fixture_data(stat, realm_data, RealmCount) | ||||
|         installation_data = { | ||||
|             website.id: self.generate_fixture_data(stat, 300, 200, 5, 0.6, 3), | ||||
|             old_desktop.id: self.generate_fixture_data(stat, 50, 30, 8, 0.6, 3), | ||||
|             android.id: self.generate_fixture_data(stat, 50, 50, 2, 0.6, 3), | ||||
|             iOS.id: self.generate_fixture_data(stat, 50, 50, 2, 0.6, 3), | ||||
|             flutter.id: self.generate_fixture_data(stat, 5, 5, 10, 0.6, 3), | ||||
|             react_native.id: self.generate_fixture_data(stat, 5, 5, 10, 0.6, 3), | ||||
|             API.id: self.generate_fixture_data(stat, 50, 50, 5, 0.6, 3), | ||||
|             zephyr_mirror.id: self.generate_fixture_data(stat, 10, 10, 3, 0.6, 3), | ||||
|             website.id: self.generate_fixture_data(stat, 300, 200, 5, .6, 3), | ||||
|             old_desktop.id: self.generate_fixture_data(stat, 50, 30, 8, .6, 3), | ||||
|             android.id: self.generate_fixture_data(stat, 50, 50, 2, .6, 3), | ||||
|             iOS.id: self.generate_fixture_data(stat, 50, 50, 2, .6, 3), | ||||
|             react_native.id: self.generate_fixture_data(stat, 5, 5, 10, .6, 3), | ||||
|             API.id: self.generate_fixture_data(stat, 50, 50, 5, .6, 3), | ||||
|             zephyr_mirror.id: self.generate_fixture_data(stat, 10, 10, 3, .6, 3), | ||||
|             unused.id: self.generate_fixture_data(stat, 0, 0, 0, 0, 0), | ||||
|             long_webhook.id: self.generate_fixture_data(stat, 50, 50, 2, 0.6, 3), | ||||
|         } | ||||
|             long_webhook.id: self.generate_fixture_data(stat, 50, 50, 2, .6, 3)} | ||||
|         insert_fixture_data(stat, installation_data, InstallationCount) | ||||
|         FillState.objects.create( | ||||
|             property=stat.property, end_time=last_end_time, state=FillState.DONE | ||||
|         ) | ||||
|         FillState.objects.create(property=stat.property, end_time=last_end_time, | ||||
|                                  state=FillState.DONE) | ||||
|  | ||||
|         stat = COUNT_STATS["messages_in_stream:is_bot:day"] | ||||
|         realm_data = { | ||||
|             "false": self.generate_fixture_data(stat, 30, 5, 6, 0.6, 4), | ||||
|             "true": self.generate_fixture_data(stat, 20, 2, 3, 0.2, 3), | ||||
|         } | ||||
|         stat = COUNT_STATS['messages_in_stream:is_bot:day'] | ||||
|         realm_data = {'false': self.generate_fixture_data(stat, 30, 5, 6, .6, 4), | ||||
|                       'true': self.generate_fixture_data(stat, 20, 2, 3, .2, 3)} | ||||
|         insert_fixture_data(stat, realm_data, RealmCount) | ||||
|         stream_data: Mapping[int | str | None, list[int]] = { | ||||
|             "false": self.generate_fixture_data(stat, 10, 7, 5, 0.6, 4), | ||||
|             "true": self.generate_fixture_data(stat, 5, 3, 2, 0.4, 2), | ||||
|         } | ||||
|         stream_data = {'false': self.generate_fixture_data(stat, 10, 7, 5, .6, 4), | ||||
|                        'true': self.generate_fixture_data(stat, 5, 3, 2, .4, 2)}  # type: Mapping[Optional[str], List[int]] | ||||
|         insert_fixture_data(stat, stream_data, StreamCount) | ||||
|         FillState.objects.create( | ||||
|             property=stat.property, end_time=last_end_time, state=FillState.DONE | ||||
|         ) | ||||
|  | ||||
|         stat = COUNT_STATS["messages_read::hour"] | ||||
|         user_data = { | ||||
|             None: self.generate_fixture_data(stat, 7, 3, 2, 0.6, 8, holiday_rate=0.1), | ||||
|         } | ||||
|         insert_fixture_data(stat, user_data, UserCount) | ||||
|         realm_data = {None: self.generate_fixture_data(stat, 50, 35, 6, 0.6, 4)} | ||||
|         insert_fixture_data(stat, realm_data, RealmCount) | ||||
|         FillState.objects.create( | ||||
|             property=stat.property, end_time=last_end_time, state=FillState.DONE | ||||
|         ) | ||||
|         FillState.objects.create(property=stat.property, end_time=last_end_time, | ||||
|                                  state=FillState.DONE) | ||||
|   | ||||
							
								
								
									
										151
									
								
								analytics/management/commands/realm_stats.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										151
									
								
								analytics/management/commands/realm_stats.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,151 @@ | ||||
| import datetime | ||||
| from argparse import ArgumentParser | ||||
| from typing import Any, List | ||||
|  | ||||
| from django.core.management.base import BaseCommand, CommandError | ||||
| from django.db.models import Count | ||||
| from django.utils.timezone import now as timezone_now | ||||
|  | ||||
| from zerver.models import Message, Realm, Recipient, Stream, \ | ||||
|     Subscription, UserActivity, UserMessage, UserProfile, get_realm | ||||
|  | ||||
| MOBILE_CLIENT_LIST = ["Android", "ios"] | ||||
| HUMAN_CLIENT_LIST = MOBILE_CLIENT_LIST + ["website"] | ||||
|  | ||||
| human_messages = Message.objects.filter(sending_client__name__in=HUMAN_CLIENT_LIST) | ||||
|  | ||||
| class Command(BaseCommand): | ||||
|     help = "Generate statistics on realm activity." | ||||
|  | ||||
|     def add_arguments(self, parser: ArgumentParser) -> None: | ||||
|         parser.add_argument('realms', metavar='<realm>', type=str, nargs='*', | ||||
|                             help="realm to generate statistics for") | ||||
|  | ||||
|     def active_users(self, realm: Realm) -> List[UserProfile]: | ||||
|         # Has been active (on the website, for now) in the last 7 days. | ||||
|         activity_cutoff = timezone_now() - datetime.timedelta(days=7) | ||||
|         return [activity.user_profile for activity in ( | ||||
|             UserActivity.objects.filter(user_profile__realm=realm, | ||||
|                                         user_profile__is_active=True, | ||||
|                                         last_visit__gt=activity_cutoff, | ||||
|                                         query="/json/users/me/pointer", | ||||
|                                         client__name="website"))] | ||||
|  | ||||
|     def messages_sent_by(self, user: UserProfile, days_ago: int) -> int: | ||||
|         sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago) | ||||
|         return human_messages.filter(sender=user, date_sent__gt=sent_time_cutoff).count() | ||||
|  | ||||
|     def total_messages(self, realm: Realm, days_ago: int) -> int: | ||||
|         sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago) | ||||
|         return Message.objects.filter(sender__realm=realm, date_sent__gt=sent_time_cutoff).count() | ||||
|  | ||||
|     def human_messages(self, realm: Realm, days_ago: int) -> int: | ||||
|         sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago) | ||||
|         return human_messages.filter(sender__realm=realm, date_sent__gt=sent_time_cutoff).count() | ||||
|  | ||||
|     def api_messages(self, realm: Realm, days_ago: int) -> int: | ||||
|         return (self.total_messages(realm, days_ago) - self.human_messages(realm, days_ago)) | ||||
|  | ||||
|     def stream_messages(self, realm: Realm, days_ago: int) -> int: | ||||
|         sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago) | ||||
|         return human_messages.filter(sender__realm=realm, date_sent__gt=sent_time_cutoff, | ||||
|                                      recipient__type=Recipient.STREAM).count() | ||||
|  | ||||
|     def private_messages(self, realm: Realm, days_ago: int) -> int: | ||||
|         sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago) | ||||
|         return human_messages.filter(sender__realm=realm, date_sent__gt=sent_time_cutoff).exclude( | ||||
|             recipient__type=Recipient.STREAM).exclude(recipient__type=Recipient.HUDDLE).count() | ||||
|  | ||||
|     def group_private_messages(self, realm: Realm, days_ago: int) -> int: | ||||
|         sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago) | ||||
|         return human_messages.filter(sender__realm=realm, date_sent__gt=sent_time_cutoff).exclude( | ||||
|             recipient__type=Recipient.STREAM).exclude(recipient__type=Recipient.PERSONAL).count() | ||||
|  | ||||
|     def report_percentage(self, numerator: float, denominator: float, text: str) -> None: | ||||
|         if not denominator: | ||||
|             fraction = 0.0 | ||||
|         else: | ||||
|             fraction = numerator / float(denominator) | ||||
|         print("%.2f%% of" % (fraction * 100,), text) | ||||
|  | ||||
|     def handle(self, *args: Any, **options: Any) -> None: | ||||
|         if options['realms']: | ||||
|             try: | ||||
|                 realms = [get_realm(string_id) for string_id in options['realms']] | ||||
|             except Realm.DoesNotExist as e: | ||||
|                 raise CommandError(e) | ||||
|         else: | ||||
|             realms = Realm.objects.all() | ||||
|  | ||||
|         for realm in realms: | ||||
|             print(realm.string_id) | ||||
|  | ||||
|             user_profiles = UserProfile.objects.filter(realm=realm, is_active=True) | ||||
|             active_users = self.active_users(realm) | ||||
|             num_active = len(active_users) | ||||
|  | ||||
|             print("%d active users (%d total)" % (num_active, len(user_profiles))) | ||||
|             streams = Stream.objects.filter(realm=realm).extra( | ||||
|                 tables=['zerver_subscription', 'zerver_recipient'], | ||||
|                 where=['zerver_subscription.recipient_id = zerver_recipient.id', | ||||
|                        'zerver_recipient.type = 2', | ||||
|                        'zerver_recipient.type_id = zerver_stream.id', | ||||
|                        'zerver_subscription.active = true']).annotate(count=Count("name")) | ||||
|             print("%d streams" % (streams.count(),)) | ||||
|  | ||||
|             for days_ago in (1, 7, 30): | ||||
|                 print("In last %d days, users sent:" % (days_ago,)) | ||||
|                 sender_quantities = [self.messages_sent_by(user, days_ago) for user in user_profiles] | ||||
|                 for quantity in sorted(sender_quantities, reverse=True): | ||||
|                     print(quantity, end=' ') | ||||
|                 print("") | ||||
|  | ||||
|                 print("%d stream messages" % (self.stream_messages(realm, days_ago),)) | ||||
|                 print("%d one-on-one private messages" % (self.private_messages(realm, days_ago),)) | ||||
|                 print("%d messages sent via the API" % (self.api_messages(realm, days_ago),)) | ||||
|                 print("%d group private messages" % (self.group_private_messages(realm, days_ago),)) | ||||
|  | ||||
|             num_notifications_enabled = len([x for x in active_users if x.enable_desktop_notifications]) | ||||
|             self.report_percentage(num_notifications_enabled, num_active, | ||||
|                                    "active users have desktop notifications enabled") | ||||
|  | ||||
|             num_enter_sends = len([x for x in active_users if x.enter_sends]) | ||||
|             self.report_percentage(num_enter_sends, num_active, | ||||
|                                    "active users have enter-sends") | ||||
|  | ||||
|             all_message_count = human_messages.filter(sender__realm=realm).count() | ||||
|             multi_paragraph_message_count = human_messages.filter( | ||||
|                 sender__realm=realm, content__contains="\n\n").count() | ||||
|             self.report_percentage(multi_paragraph_message_count, all_message_count, | ||||
|                                    "all messages are multi-paragraph") | ||||
|  | ||||
|             # Starred messages | ||||
|             starrers = UserMessage.objects.filter(user_profile__in=user_profiles, | ||||
|                                                   flags=UserMessage.flags.starred).values( | ||||
|                 "user_profile").annotate(count=Count("user_profile")) | ||||
|             print("%d users have starred %d messages" % ( | ||||
|                 len(starrers), sum([elt["count"] for elt in starrers]))) | ||||
|  | ||||
|             active_user_subs = Subscription.objects.filter( | ||||
|                 user_profile__in=user_profiles, active=True) | ||||
|  | ||||
|             # Streams not in home view | ||||
|             non_home_view = active_user_subs.filter(is_muted=True).values( | ||||
|                 "user_profile").annotate(count=Count("user_profile")) | ||||
|             print("%d users have %d streams not in home view" % ( | ||||
|                 len(non_home_view), sum([elt["count"] for elt in non_home_view]))) | ||||
|  | ||||
|             # Code block markup | ||||
|             markup_messages = human_messages.filter( | ||||
|                 sender__realm=realm, content__contains="~~~").values( | ||||
|                 "sender").annotate(count=Count("sender")) | ||||
|             print("%d users have used code block markup on %s messages" % ( | ||||
|                 len(markup_messages), sum([elt["count"] for elt in markup_messages]))) | ||||
|  | ||||
|             # Notifications for stream messages | ||||
|             notifications = active_user_subs.filter(desktop_notifications=True).values( | ||||
|                 "user_profile").annotate(count=Count("user_profile")) | ||||
|             print("%d users receive desktop notifications for %d streams" % ( | ||||
|                 len(notifications), sum([elt["count"] for elt in notifications]))) | ||||
|  | ||||
|             print("") | ||||
							
								
								
									
										56
									
								
								analytics/management/commands/stream_stats.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										56
									
								
								analytics/management/commands/stream_stats.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,56 @@ | ||||
| from argparse import ArgumentParser | ||||
| from typing import Any | ||||
|  | ||||
| from django.core.management.base import BaseCommand, CommandError | ||||
| from django.db.models import Q | ||||
|  | ||||
| from zerver.models import Message, Realm, \ | ||||
|     Recipient, Stream, Subscription, get_realm | ||||
|  | ||||
| class Command(BaseCommand): | ||||
|     help = "Generate statistics on the streams for a realm." | ||||
|  | ||||
|     def add_arguments(self, parser: ArgumentParser) -> None: | ||||
|         parser.add_argument('realms', metavar='<realm>', type=str, nargs='*', | ||||
|                             help="realm to generate statistics for") | ||||
|  | ||||
|     def handle(self, *args: Any, **options: str) -> None: | ||||
|         if options['realms']: | ||||
|             try: | ||||
|                 realms = [get_realm(string_id) for string_id in options['realms']] | ||||
|             except Realm.DoesNotExist as e: | ||||
|                 raise CommandError(e) | ||||
|         else: | ||||
|             realms = Realm.objects.all() | ||||
|  | ||||
|         for realm in realms: | ||||
|             streams = Stream.objects.filter(realm=realm).exclude(Q(name__istartswith="tutorial-")) | ||||
|             # private stream count | ||||
|             private_count = 0 | ||||
|             # public stream count | ||||
|             public_count = 0 | ||||
|             for stream in streams: | ||||
|                 if stream.invite_only: | ||||
|                     private_count += 1 | ||||
|                 else: | ||||
|                     public_count += 1 | ||||
|             print("------------") | ||||
|             print(realm.string_id, end=' ') | ||||
|             print("%10s %d public streams and" % ("(", public_count), end=' ') | ||||
|             print("%d private streams )" % (private_count,)) | ||||
|             print("------------") | ||||
|             print("%25s %15s %10s %12s" % ("stream", "subscribers", "messages", "type")) | ||||
|  | ||||
|             for stream in streams: | ||||
|                 if stream.invite_only: | ||||
|                     stream_type = 'private' | ||||
|                 else: | ||||
|                     stream_type = 'public' | ||||
|                 print("%25s" % (stream.name,), end=' ') | ||||
|                 recipient = Recipient.objects.filter(type=Recipient.STREAM, type_id=stream.id) | ||||
|                 print("%10d" % (len(Subscription.objects.filter(recipient=recipient, | ||||
|                                                                 active=True)),), end=' ') | ||||
|                 num_messages = len(Message.objects.filter(recipient=recipient)) | ||||
|                 print("%12d" % (num_messages,), end=' ') | ||||
|                 print("%15s" % (stream_type,)) | ||||
|             print("") | ||||
| @@ -1,99 +1,90 @@ | ||||
| import hashlib | ||||
| import os | ||||
| import time | ||||
| from argparse import ArgumentParser | ||||
| from datetime import timezone | ||||
| from typing import Any | ||||
| from typing import Any, Dict | ||||
|  | ||||
| from django.conf import settings | ||||
| from django.core.management.base import BaseCommand | ||||
| from django.utils.dateparse import parse_datetime | ||||
| from django.utils.timezone import now as timezone_now | ||||
| from typing_extensions import override | ||||
| from django.utils.timezone import utc as timezone_utc | ||||
|  | ||||
| from analytics.lib.counts import ALL_COUNT_STATS, logger, process_count_stat | ||||
| from zerver.lib.management import ZulipBaseCommand, abort_unless_locked | ||||
| from zerver.lib.remote_server import send_server_data_to_push_bouncer, should_send_analytics_data | ||||
| from analytics.lib.counts import COUNT_STATS, logger, process_count_stat | ||||
| from scripts.lib.zulip_tools import ENDC, WARNING | ||||
| from zerver.lib.remote_server import send_analytics_to_remote_server | ||||
| from zerver.lib.timestamp import floor_to_hour | ||||
| from zerver.models import Realm | ||||
|  | ||||
|  | ||||
| class Command(ZulipBaseCommand): | ||||
| class Command(BaseCommand): | ||||
|     help = """Fills Analytics tables. | ||||
|  | ||||
|     Run as a cron job that runs every hour.""" | ||||
|  | ||||
|     @override | ||||
|     def add_arguments(self, parser: ArgumentParser) -> None: | ||||
|         parser.add_argument( | ||||
|             "--time", | ||||
|             "-t", | ||||
|             help="Update stat tables from current state to " | ||||
|             "--time. Defaults to the current time.", | ||||
|             default=timezone_now().isoformat(), | ||||
|         ) | ||||
|         parser.add_argument("--utc", action="store_true", help="Interpret --time in UTC.") | ||||
|         parser.add_argument( | ||||
|             "--stat", "-s", help="CountStat to process. If omitted, all stats are processed." | ||||
|         ) | ||||
|         parser.add_argument( | ||||
|             "--verbose", action="store_true", help="Print timing information to stdout." | ||||
|         ) | ||||
|         parser.add_argument('--time', '-t', | ||||
|                             type=str, | ||||
|                             help='Update stat tables from current state to' | ||||
|                                  '--time. Defaults to the current time.', | ||||
|                             default=timezone_now().isoformat()) | ||||
|         parser.add_argument('--utc', | ||||
|                             action='store_true', | ||||
|                             help="Interpret --time in UTC.", | ||||
|                             default=False) | ||||
|         parser.add_argument('--stat', '-s', | ||||
|                             type=str, | ||||
|                             help="CountStat to process. If omitted, all stats are processed.") | ||||
|         parser.add_argument('--verbose', | ||||
|                             action='store_true', | ||||
|                             help="Print timing information to stdout.", | ||||
|                             default=False) | ||||
|  | ||||
|     @override | ||||
|     @abort_unless_locked | ||||
|     def handle(self, *args: Any, **options: Any) -> None: | ||||
|         self.run_update_analytics_counts(options) | ||||
|         try: | ||||
|             os.mkdir(settings.ANALYTICS_LOCK_DIR) | ||||
|         except OSError: | ||||
|             print(WARNING + "Analytics lock %s is unavailable; exiting... " + ENDC) | ||||
|             return | ||||
|  | ||||
|     def run_update_analytics_counts(self, options: dict[str, Any]) -> None: | ||||
|         try: | ||||
|             self.run_update_analytics_counts(options) | ||||
|         finally: | ||||
|             os.rmdir(settings.ANALYTICS_LOCK_DIR) | ||||
|  | ||||
|     def run_update_analytics_counts(self, options: Dict[str, Any]) -> None: | ||||
|         # installation_epoch relies on there being at least one realm; we | ||||
|         # shouldn't run the analytics code if that condition isn't satisfied | ||||
|         if not Realm.objects.exists(): | ||||
|             logger.info("No realms, stopping update_analytics_counts") | ||||
|             return | ||||
|  | ||||
|         fill_to_time = parse_datetime(options["time"]) | ||||
|         assert fill_to_time is not None | ||||
|         if options["utc"]: | ||||
|             fill_to_time = fill_to_time.replace(tzinfo=timezone.utc) | ||||
|         fill_to_time = parse_datetime(options['time']) | ||||
|         if options['utc']: | ||||
|             fill_to_time = fill_to_time.replace(tzinfo=timezone_utc) | ||||
|         if fill_to_time.tzinfo is None: | ||||
|             raise ValueError( | ||||
|                 "--time must be time-zone-aware. Maybe you meant to use the --utc option?" | ||||
|             ) | ||||
|             raise ValueError("--time must be timezone aware. Maybe you meant to use the --utc option?") | ||||
|  | ||||
|         fill_to_time = floor_to_hour(fill_to_time.astimezone(timezone.utc)) | ||||
|         fill_to_time = floor_to_hour(fill_to_time.astimezone(timezone_utc)) | ||||
|  | ||||
|         if options["stat"] is not None: | ||||
|             stats = [ALL_COUNT_STATS[options["stat"]]] | ||||
|         if options['stat'] is not None: | ||||
|             stats = [COUNT_STATS[options['stat']]] | ||||
|         else: | ||||
|             stats = list(ALL_COUNT_STATS.values()) | ||||
|             stats = list(COUNT_STATS.values()) | ||||
|  | ||||
|         logger.info("Starting updating analytics counts through %s", fill_to_time) | ||||
|         if options["verbose"]: | ||||
|         logger.info("Starting updating analytics counts through %s" % (fill_to_time,)) | ||||
|         if options['verbose']: | ||||
|             start = time.time() | ||||
|             last = start | ||||
|  | ||||
|         for stat in stats: | ||||
|             process_count_stat(stat, fill_to_time) | ||||
|             if options["verbose"]: | ||||
|                 print(f"Updated {stat.property} in {time.time() - last:.3f}s") | ||||
|             if options['verbose']: | ||||
|                 print("Updated %s in %.3fs" % (stat.property, time.time() - last)) | ||||
|                 last = time.time() | ||||
|  | ||||
|         if options["verbose"]: | ||||
|             print( | ||||
|                 f"Finished updating analytics counts through {fill_to_time} in {time.time() - start:.3f}s" | ||||
|             ) | ||||
|         logger.info("Finished updating analytics counts through %s", fill_to_time) | ||||
|         if options['verbose']: | ||||
|             print("Finished updating analytics counts through %s in %.3fs" % | ||||
|                   (fill_to_time, time.time() - start)) | ||||
|         logger.info("Finished updating analytics counts through %s" % (fill_to_time,)) | ||||
|  | ||||
|         if should_send_analytics_data(): | ||||
|             # Based on the specific value of the setting, the exact details to send | ||||
|             # will be decided. However, we proceed just based on this not being falsey. | ||||
|  | ||||
|             # Skew 0-10 minutes based on a hash of settings.ZULIP_ORG_ID, so | ||||
|             # that each server will report in at a somewhat consistent time. | ||||
|             assert settings.ZULIP_ORG_ID | ||||
|             delay = int.from_bytes( | ||||
|                 hashlib.sha256(settings.ZULIP_ORG_ID.encode()).digest(), byteorder="big" | ||||
|             ) % (60 * 10) | ||||
|             logger.info("Sleeping %d seconds before reporting...", delay) | ||||
|             time.sleep(delay) | ||||
|  | ||||
|             send_server_data_to_push_bouncer(consider_usage_statistics=True) | ||||
|         if settings.PUSH_NOTIFICATION_BOUNCER_URL and settings.SUBMIT_USAGE_STATISTICS: | ||||
|             send_analytics_to_remote_server() | ||||
|   | ||||
							
								
								
									
										41
									
								
								analytics/management/commands/user_stats.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										41
									
								
								analytics/management/commands/user_stats.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,41 @@ | ||||
| import datetime | ||||
| from argparse import ArgumentParser | ||||
| from typing import Any | ||||
|  | ||||
| from django.core.management.base import BaseCommand, CommandError | ||||
| from django.utils.timezone import now as timezone_now | ||||
|  | ||||
| from zerver.models import Message, Realm, Stream, UserProfile, get_realm | ||||
|  | ||||
| class Command(BaseCommand): | ||||
|     help = "Generate statistics on user activity." | ||||
|  | ||||
|     def add_arguments(self, parser: ArgumentParser) -> None: | ||||
|         parser.add_argument('realms', metavar='<realm>', type=str, nargs='*', | ||||
|                             help="realm to generate statistics for") | ||||
|  | ||||
|     def messages_sent_by(self, user: UserProfile, week: int) -> int: | ||||
|         start = timezone_now() - datetime.timedelta(days=(week + 1)*7) | ||||
|         end = timezone_now() - datetime.timedelta(days=week*7) | ||||
|         return Message.objects.filter(sender=user, date_sent__gt=start, date_sent__lte=end).count() | ||||
|  | ||||
|     def handle(self, *args: Any, **options: Any) -> None: | ||||
|         if options['realms']: | ||||
|             try: | ||||
|                 realms = [get_realm(string_id) for string_id in options['realms']] | ||||
|             except Realm.DoesNotExist as e: | ||||
|                 raise CommandError(e) | ||||
|         else: | ||||
|             realms = Realm.objects.all() | ||||
|  | ||||
|         for realm in realms: | ||||
|             print(realm.string_id) | ||||
|             user_profiles = UserProfile.objects.filter(realm=realm, is_active=True) | ||||
|             print("%d users" % (len(user_profiles),)) | ||||
|             print("%d streams" % (len(Stream.objects.filter(realm=realm)),)) | ||||
|  | ||||
|             for user_profile in user_profiles: | ||||
|                 print("%35s" % (user_profile.email,), end=' ') | ||||
|                 for week in range(10): | ||||
|                     print("%5d" % (self.messages_sent_by(user_profile, week),), end=' ') | ||||
|                 print("") | ||||
| @@ -1,208 +1,110 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| import django.db.models.deletion | ||||
| from django.conf import settings | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("zerver", "0030_realm_org_type"), | ||||
|         ('zerver', '0030_realm_org_type'), | ||||
|         migrations.swappable_dependency(settings.AUTH_USER_MODEL), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.CreateModel( | ||||
|             name="Anomaly", | ||||
|             name='Anomaly', | ||||
|             fields=[ | ||||
|                 ( | ||||
|                     "id", | ||||
|                     models.AutoField( | ||||
|                         verbose_name="ID", serialize=False, auto_created=True, primary_key=True | ||||
|                     ), | ||||
|                 ), | ||||
|                 ("info", models.CharField(max_length=1000)), | ||||
|                 ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), | ||||
|                 ('info', models.CharField(max_length=1000)), | ||||
|             ], | ||||
|             bases=(models.Model,), | ||||
|         ), | ||||
|         migrations.CreateModel( | ||||
|             name="HuddleCount", | ||||
|             name='HuddleCount', | ||||
|             fields=[ | ||||
|                 ( | ||||
|                     "id", | ||||
|                     models.AutoField( | ||||
|                         verbose_name="ID", serialize=False, auto_created=True, primary_key=True | ||||
|                     ), | ||||
|                 ), | ||||
|                 ( | ||||
|                     "huddle", | ||||
|                     models.ForeignKey( | ||||
|                         on_delete=django.db.models.deletion.CASCADE, to="zerver.Recipient" | ||||
|                     ), | ||||
|                 ), | ||||
|                 ( | ||||
|                     "user", | ||||
|                     models.ForeignKey( | ||||
|                         on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL | ||||
|                     ), | ||||
|                 ), | ||||
|                 ("property", models.CharField(max_length=40)), | ||||
|                 ("end_time", models.DateTimeField()), | ||||
|                 ("interval", models.CharField(max_length=20)), | ||||
|                 ("value", models.BigIntegerField()), | ||||
|                 ( | ||||
|                     "anomaly", | ||||
|                     models.ForeignKey( | ||||
|                         on_delete=django.db.models.deletion.CASCADE, | ||||
|                         to="analytics.Anomaly", | ||||
|                         null=True, | ||||
|                     ), | ||||
|                 ), | ||||
|                 ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), | ||||
|                 ('huddle', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Recipient')), | ||||
|                 ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), | ||||
|                 ('property', models.CharField(max_length=40)), | ||||
|                 ('end_time', models.DateTimeField()), | ||||
|                 ('interval', models.CharField(max_length=20)), | ||||
|                 ('value', models.BigIntegerField()), | ||||
|                 ('anomaly', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='analytics.Anomaly', null=True)), | ||||
|             ], | ||||
|             bases=(models.Model,), | ||||
|         ), | ||||
|         migrations.CreateModel( | ||||
|             name="InstallationCount", | ||||
|             name='InstallationCount', | ||||
|             fields=[ | ||||
|                 ( | ||||
|                     "id", | ||||
|                     models.AutoField( | ||||
|                         verbose_name="ID", serialize=False, auto_created=True, primary_key=True | ||||
|                     ), | ||||
|                 ), | ||||
|                 ("property", models.CharField(max_length=40)), | ||||
|                 ("end_time", models.DateTimeField()), | ||||
|                 ("interval", models.CharField(max_length=20)), | ||||
|                 ("value", models.BigIntegerField()), | ||||
|                 ( | ||||
|                     "anomaly", | ||||
|                     models.ForeignKey( | ||||
|                         on_delete=django.db.models.deletion.CASCADE, | ||||
|                         to="analytics.Anomaly", | ||||
|                         null=True, | ||||
|                     ), | ||||
|                 ), | ||||
|                 ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), | ||||
|                 ('property', models.CharField(max_length=40)), | ||||
|                 ('end_time', models.DateTimeField()), | ||||
|                 ('interval', models.CharField(max_length=20)), | ||||
|                 ('value', models.BigIntegerField()), | ||||
|                 ('anomaly', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='analytics.Anomaly', null=True)), | ||||
|             ], | ||||
|             bases=(models.Model,), | ||||
|         ), | ||||
|         migrations.CreateModel( | ||||
|             name="RealmCount", | ||||
|             name='RealmCount', | ||||
|             fields=[ | ||||
|                 ( | ||||
|                     "id", | ||||
|                     models.AutoField( | ||||
|                         verbose_name="ID", serialize=False, auto_created=True, primary_key=True | ||||
|                     ), | ||||
|                 ), | ||||
|                 ( | ||||
|                     "realm", | ||||
|                     models.ForeignKey( | ||||
|                         on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm" | ||||
|                     ), | ||||
|                 ), | ||||
|                 ("property", models.CharField(max_length=40)), | ||||
|                 ("end_time", models.DateTimeField()), | ||||
|                 ("interval", models.CharField(max_length=20)), | ||||
|                 ("value", models.BigIntegerField()), | ||||
|                 ( | ||||
|                     "anomaly", | ||||
|                     models.ForeignKey( | ||||
|                         on_delete=django.db.models.deletion.CASCADE, | ||||
|                         to="analytics.Anomaly", | ||||
|                         null=True, | ||||
|                     ), | ||||
|                 ), | ||||
|                 ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), | ||||
|                 ('realm', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Realm')), | ||||
|                 ('property', models.CharField(max_length=40)), | ||||
|                 ('end_time', models.DateTimeField()), | ||||
|                 ('interval', models.CharField(max_length=20)), | ||||
|                 ('value', models.BigIntegerField()), | ||||
|                 ('anomaly', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='analytics.Anomaly', null=True)), | ||||
|  | ||||
|             ], | ||||
|             bases=(models.Model,), | ||||
|         ), | ||||
|         migrations.CreateModel( | ||||
|             name="StreamCount", | ||||
|             name='StreamCount', | ||||
|             fields=[ | ||||
|                 ( | ||||
|                     "id", | ||||
|                     models.AutoField( | ||||
|                         verbose_name="ID", serialize=False, auto_created=True, primary_key=True | ||||
|                     ), | ||||
|                 ), | ||||
|                 ( | ||||
|                     "realm", | ||||
|                     models.ForeignKey( | ||||
|                         on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm" | ||||
|                     ), | ||||
|                 ), | ||||
|                 ( | ||||
|                     "stream", | ||||
|                     models.ForeignKey( | ||||
|                         on_delete=django.db.models.deletion.CASCADE, to="zerver.Stream" | ||||
|                     ), | ||||
|                 ), | ||||
|                 ("property", models.CharField(max_length=40)), | ||||
|                 ("end_time", models.DateTimeField()), | ||||
|                 ("interval", models.CharField(max_length=20)), | ||||
|                 ("value", models.BigIntegerField()), | ||||
|                 ( | ||||
|                     "anomaly", | ||||
|                     models.ForeignKey( | ||||
|                         on_delete=django.db.models.deletion.CASCADE, | ||||
|                         to="analytics.Anomaly", | ||||
|                         null=True, | ||||
|                     ), | ||||
|                 ), | ||||
|                 ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), | ||||
|                 ('realm', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Realm')), | ||||
|                 ('stream', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Stream')), | ||||
|                 ('property', models.CharField(max_length=40)), | ||||
|                 ('end_time', models.DateTimeField()), | ||||
|                 ('interval', models.CharField(max_length=20)), | ||||
|                 ('value', models.BigIntegerField()), | ||||
|                 ('anomaly', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='analytics.Anomaly', null=True)), | ||||
|             ], | ||||
|             bases=(models.Model,), | ||||
|         ), | ||||
|         migrations.CreateModel( | ||||
|             name="UserCount", | ||||
|             name='UserCount', | ||||
|             fields=[ | ||||
|                 ( | ||||
|                     "id", | ||||
|                     models.AutoField( | ||||
|                         verbose_name="ID", serialize=False, auto_created=True, primary_key=True | ||||
|                     ), | ||||
|                 ), | ||||
|                 ( | ||||
|                     "realm", | ||||
|                     models.ForeignKey( | ||||
|                         on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm" | ||||
|                     ), | ||||
|                 ), | ||||
|                 ( | ||||
|                     "user", | ||||
|                     models.ForeignKey( | ||||
|                         on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL | ||||
|                     ), | ||||
|                 ), | ||||
|                 ("property", models.CharField(max_length=40)), | ||||
|                 ("end_time", models.DateTimeField()), | ||||
|                 ("interval", models.CharField(max_length=20)), | ||||
|                 ("value", models.BigIntegerField()), | ||||
|                 ( | ||||
|                     "anomaly", | ||||
|                     models.ForeignKey( | ||||
|                         on_delete=django.db.models.deletion.CASCADE, | ||||
|                         to="analytics.Anomaly", | ||||
|                         null=True, | ||||
|                     ), | ||||
|                 ), | ||||
|                 ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), | ||||
|                 ('realm', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Realm')), | ||||
|                 ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), | ||||
|                 ('property', models.CharField(max_length=40)), | ||||
|                 ('end_time', models.DateTimeField()), | ||||
|                 ('interval', models.CharField(max_length=20)), | ||||
|                 ('value', models.BigIntegerField()), | ||||
|                 ('anomaly', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='analytics.Anomaly', null=True)), | ||||
|             ], | ||||
|             bases=(models.Model,), | ||||
|         ), | ||||
|         migrations.AlterUniqueTogether( | ||||
|             name="usercount", | ||||
|             unique_together={("user", "property", "end_time", "interval")}, | ||||
|             name='usercount', | ||||
|             unique_together=set([('user', 'property', 'end_time', 'interval')]), | ||||
|         ), | ||||
|         migrations.AlterUniqueTogether( | ||||
|             name="streamcount", | ||||
|             unique_together={("stream", "property", "end_time", "interval")}, | ||||
|             name='streamcount', | ||||
|             unique_together=set([('stream', 'property', 'end_time', 'interval')]), | ||||
|         ), | ||||
|         migrations.AlterUniqueTogether( | ||||
|             name="realmcount", | ||||
|             unique_together={("realm", "property", "end_time", "interval")}, | ||||
|             name='realmcount', | ||||
|             unique_together=set([('realm', 'property', 'end_time', 'interval')]), | ||||
|         ), | ||||
|         migrations.AlterUniqueTogether( | ||||
|             name="installationcount", | ||||
|             unique_together={("property", "end_time", "interval")}, | ||||
|             name='installationcount', | ||||
|             unique_together=set([('property', 'end_time', 'interval')]), | ||||
|         ), | ||||
|         migrations.AlterUniqueTogether( | ||||
|             name="huddlecount", | ||||
|             unique_together={("huddle", "property", "end_time", "interval")}, | ||||
|             name='huddlecount', | ||||
|             unique_together=set([('huddle', 'property', 'end_time', 'interval')]), | ||||
|         ), | ||||
|     ] | ||||
|   | ||||
| @@ -1,29 +1,30 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| from django.db import migrations | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("analytics", "0001_initial"), | ||||
|         ('analytics', '0001_initial'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AlterUniqueTogether( | ||||
|             name="huddlecount", | ||||
|             unique_together=set(), | ||||
|             name='huddlecount', | ||||
|             unique_together=set([]), | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name="huddlecount", | ||||
|             name="anomaly", | ||||
|             model_name='huddlecount', | ||||
|             name='anomaly', | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name="huddlecount", | ||||
|             name="huddle", | ||||
|             model_name='huddlecount', | ||||
|             name='huddle', | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name="huddlecount", | ||||
|             name="user", | ||||
|             model_name='huddlecount', | ||||
|             name='user', | ||||
|         ), | ||||
|         migrations.DeleteModel( | ||||
|             name="HuddleCount", | ||||
|             name='HuddleCount', | ||||
|         ), | ||||
|     ] | ||||
|   | ||||
| @@ -1,25 +1,21 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("analytics", "0002_remove_huddlecount"), | ||||
|         ('analytics', '0002_remove_huddlecount'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.CreateModel( | ||||
|             name="FillState", | ||||
|             name='FillState', | ||||
|             fields=[ | ||||
|                 ( | ||||
|                     "id", | ||||
|                     models.AutoField( | ||||
|                         verbose_name="ID", serialize=False, auto_created=True, primary_key=True | ||||
|                     ), | ||||
|                 ), | ||||
|                 ("property", models.CharField(unique=True, max_length=40)), | ||||
|                 ("end_time", models.DateTimeField()), | ||||
|                 ("state", models.PositiveSmallIntegerField()), | ||||
|                 ("last_modified", models.DateTimeField(auto_now=True)), | ||||
|                 ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), | ||||
|                 ('property', models.CharField(unique=True, max_length=40)), | ||||
|                 ('end_time', models.DateTimeField()), | ||||
|                 ('state', models.PositiveSmallIntegerField()), | ||||
|                 ('last_modified', models.DateTimeField(auto_now=True)), | ||||
|             ], | ||||
|             bases=(models.Model,), | ||||
|         ), | ||||
|   | ||||
| @@ -1,30 +1,31 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("analytics", "0003_fillstate"), | ||||
|         ('analytics', '0003_fillstate'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name="installationcount", | ||||
|             name="subgroup", | ||||
|             model_name='installationcount', | ||||
|             name='subgroup', | ||||
|             field=models.CharField(max_length=16, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name="realmcount", | ||||
|             name="subgroup", | ||||
|             model_name='realmcount', | ||||
|             name='subgroup', | ||||
|             field=models.CharField(max_length=16, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name="streamcount", | ||||
|             name="subgroup", | ||||
|             model_name='streamcount', | ||||
|             name='subgroup', | ||||
|             field=models.CharField(max_length=16, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name="usercount", | ||||
|             name="subgroup", | ||||
|             model_name='usercount', | ||||
|             name='subgroup', | ||||
|             field=models.CharField(max_length=16, null=True), | ||||
|         ), | ||||
|     ] | ||||
|   | ||||
| @@ -1,50 +1,51 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("analytics", "0004_add_subgroup"), | ||||
|         ('analytics', '0004_add_subgroup'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AlterField( | ||||
|             model_name="installationcount", | ||||
|             name="interval", | ||||
|             model_name='installationcount', | ||||
|             name='interval', | ||||
|             field=models.CharField(max_length=8), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name="installationcount", | ||||
|             name="property", | ||||
|             model_name='installationcount', | ||||
|             name='property', | ||||
|             field=models.CharField(max_length=32), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name="realmcount", | ||||
|             name="interval", | ||||
|             model_name='realmcount', | ||||
|             name='interval', | ||||
|             field=models.CharField(max_length=8), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name="realmcount", | ||||
|             name="property", | ||||
|             model_name='realmcount', | ||||
|             name='property', | ||||
|             field=models.CharField(max_length=32), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name="streamcount", | ||||
|             name="interval", | ||||
|             model_name='streamcount', | ||||
|             name='interval', | ||||
|             field=models.CharField(max_length=8), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name="streamcount", | ||||
|             name="property", | ||||
|             model_name='streamcount', | ||||
|             name='property', | ||||
|             field=models.CharField(max_length=32), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name="usercount", | ||||
|             name="interval", | ||||
|             model_name='usercount', | ||||
|             name='interval', | ||||
|             field=models.CharField(max_length=8), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name="usercount", | ||||
|             name="property", | ||||
|             model_name='usercount', | ||||
|             name='property', | ||||
|             field=models.CharField(max_length=32), | ||||
|         ), | ||||
|     ] | ||||
|   | ||||
| @@ -1,26 +1,27 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| from django.db import migrations | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("analytics", "0005_alter_field_size"), | ||||
|         ('analytics', '0005_alter_field_size'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AlterUniqueTogether( | ||||
|             name="installationcount", | ||||
|             unique_together={("property", "subgroup", "end_time", "interval")}, | ||||
|             name='installationcount', | ||||
|             unique_together=set([('property', 'subgroup', 'end_time', 'interval')]), | ||||
|         ), | ||||
|         migrations.AlterUniqueTogether( | ||||
|             name="realmcount", | ||||
|             unique_together={("realm", "property", "subgroup", "end_time", "interval")}, | ||||
|             name='realmcount', | ||||
|             unique_together=set([('realm', 'property', 'subgroup', 'end_time', 'interval')]), | ||||
|         ), | ||||
|         migrations.AlterUniqueTogether( | ||||
|             name="streamcount", | ||||
|             unique_together={("stream", "property", "subgroup", "end_time", "interval")}, | ||||
|             name='streamcount', | ||||
|             unique_together=set([('stream', 'property', 'subgroup', 'end_time', 'interval')]), | ||||
|         ), | ||||
|         migrations.AlterUniqueTogether( | ||||
|             name="usercount", | ||||
|             unique_together={("user", "property", "subgroup", "end_time", "interval")}, | ||||
|             name='usercount', | ||||
|             unique_together=set([('user', 'property', 'subgroup', 'end_time', 'interval')]), | ||||
|         ), | ||||
|     ] | ||||
|   | ||||
| @@ -1,43 +1,44 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| # Generated by Django 1.10.4 on 2017-01-16 20:50 | ||||
| from django.db import migrations | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("analytics", "0006_add_subgroup_to_unique_constraints"), | ||||
|         ('analytics', '0006_add_subgroup_to_unique_constraints'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AlterUniqueTogether( | ||||
|             name="installationcount", | ||||
|             unique_together={("property", "subgroup", "end_time")}, | ||||
|             name='installationcount', | ||||
|             unique_together=set([('property', 'subgroup', 'end_time')]), | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name="installationcount", | ||||
|             name="interval", | ||||
|             model_name='installationcount', | ||||
|             name='interval', | ||||
|         ), | ||||
|         migrations.AlterUniqueTogether( | ||||
|             name="realmcount", | ||||
|             unique_together={("realm", "property", "subgroup", "end_time")}, | ||||
|             name='realmcount', | ||||
|             unique_together=set([('realm', 'property', 'subgroup', 'end_time')]), | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name="realmcount", | ||||
|             name="interval", | ||||
|             model_name='realmcount', | ||||
|             name='interval', | ||||
|         ), | ||||
|         migrations.AlterUniqueTogether( | ||||
|             name="streamcount", | ||||
|             unique_together={("stream", "property", "subgroup", "end_time")}, | ||||
|             name='streamcount', | ||||
|             unique_together=set([('stream', 'property', 'subgroup', 'end_time')]), | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name="streamcount", | ||||
|             name="interval", | ||||
|             model_name='streamcount', | ||||
|             name='interval', | ||||
|         ), | ||||
|         migrations.AlterUniqueTogether( | ||||
|             name="usercount", | ||||
|             unique_together={("user", "property", "subgroup", "end_time")}, | ||||
|             name='usercount', | ||||
|             unique_together=set([('user', 'property', 'subgroup', 'end_time')]), | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name="usercount", | ||||
|             name="interval", | ||||
|             model_name='usercount', | ||||
|             name='interval', | ||||
|         ), | ||||
|     ] | ||||
|   | ||||
| @@ -1,33 +1,25 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| # Generated by Django 1.10.5 on 2017-02-01 22:28 | ||||
| from django.db import migrations, models | ||||
|  | ||||
| from django.db import migrations | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("zerver", "0050_userprofile_avatar_version"), | ||||
|         ("analytics", "0007_remove_interval"), | ||||
|         ('zerver', '0050_userprofile_avatar_version'), | ||||
|         ('analytics', '0007_remove_interval'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddIndex( | ||||
|             model_name="realmcount", | ||||
|             index=models.Index( | ||||
|                 fields=["property", "end_time"], | ||||
|                 name="analytics_realmcount_property_end_time_3b60396b_idx", | ||||
|             ), | ||||
|         migrations.AlterIndexTogether( | ||||
|             name='realmcount', | ||||
|             index_together=set([('property', 'end_time')]), | ||||
|         ), | ||||
|         migrations.AddIndex( | ||||
|             model_name="streamcount", | ||||
|             index=models.Index( | ||||
|                 fields=["property", "realm", "end_time"], | ||||
|                 name="analytics_streamcount_property_realm_id_end_time_155ae930_idx", | ||||
|             ), | ||||
|         migrations.AlterIndexTogether( | ||||
|             name='streamcount', | ||||
|             index_together=set([('property', 'realm', 'end_time')]), | ||||
|         ), | ||||
|         migrations.AddIndex( | ||||
|             model_name="usercount", | ||||
|             index=models.Index( | ||||
|                 fields=["property", "realm", "end_time"], | ||||
|                 name="analytics_usercount_property_realm_id_end_time_591dbec1_idx", | ||||
|             ), | ||||
|         migrations.AlterIndexTogether( | ||||
|             name='usercount', | ||||
|             index_together=set([('property', 'realm', 'end_time')]), | ||||
|         ), | ||||
|     ] | ||||
|   | ||||
| @@ -1,28 +1,26 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| from django.db import migrations | ||||
| from django.db.backends.base.schema import BaseDatabaseSchemaEditor | ||||
| from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor | ||||
| from django.db.migrations.state import StateApps | ||||
|  | ||||
| def delete_messages_sent_to_stream_stat(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: | ||||
|     UserCount = apps.get_model('analytics', 'UserCount') | ||||
|     StreamCount = apps.get_model('analytics', 'StreamCount') | ||||
|     RealmCount = apps.get_model('analytics', 'RealmCount') | ||||
|     InstallationCount = apps.get_model('analytics', 'InstallationCount') | ||||
|     FillState = apps.get_model('analytics', 'FillState') | ||||
|  | ||||
| def delete_messages_sent_to_stream_stat( | ||||
|     apps: StateApps, schema_editor: BaseDatabaseSchemaEditor | ||||
| ) -> None: | ||||
|     UserCount = apps.get_model("analytics", "UserCount") | ||||
|     StreamCount = apps.get_model("analytics", "StreamCount") | ||||
|     RealmCount = apps.get_model("analytics", "RealmCount") | ||||
|     InstallationCount = apps.get_model("analytics", "InstallationCount") | ||||
|     FillState = apps.get_model("analytics", "FillState") | ||||
|  | ||||
|     property = "messages_sent_to_stream:is_bot" | ||||
|     property = 'messages_sent_to_stream:is_bot' | ||||
|     UserCount.objects.filter(property=property).delete() | ||||
|     StreamCount.objects.filter(property=property).delete() | ||||
|     RealmCount.objects.filter(property=property).delete() | ||||
|     InstallationCount.objects.filter(property=property).delete() | ||||
|     FillState.objects.filter(property=property).delete() | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("analytics", "0008_add_count_indexes"), | ||||
|         ('analytics', '0008_add_count_indexes'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|   | ||||
| @@ -1,27 +1,25 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| from django.db import migrations | ||||
| from django.db.backends.base.schema import BaseDatabaseSchemaEditor | ||||
| from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor | ||||
| from django.db.migrations.state import StateApps | ||||
|  | ||||
| def clear_message_sent_by_message_type_values(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: | ||||
|     UserCount = apps.get_model('analytics', 'UserCount') | ||||
|     StreamCount = apps.get_model('analytics', 'StreamCount') | ||||
|     RealmCount = apps.get_model('analytics', 'RealmCount') | ||||
|     InstallationCount = apps.get_model('analytics', 'InstallationCount') | ||||
|     FillState = apps.get_model('analytics', 'FillState') | ||||
|  | ||||
| def clear_message_sent_by_message_type_values( | ||||
|     apps: StateApps, schema_editor: BaseDatabaseSchemaEditor | ||||
| ) -> None: | ||||
|     UserCount = apps.get_model("analytics", "UserCount") | ||||
|     StreamCount = apps.get_model("analytics", "StreamCount") | ||||
|     RealmCount = apps.get_model("analytics", "RealmCount") | ||||
|     InstallationCount = apps.get_model("analytics", "InstallationCount") | ||||
|     FillState = apps.get_model("analytics", "FillState") | ||||
|  | ||||
|     property = "messages_sent:message_type:day" | ||||
|     property = 'messages_sent:message_type:day' | ||||
|     UserCount.objects.filter(property=property).delete() | ||||
|     StreamCount.objects.filter(property=property).delete() | ||||
|     RealmCount.objects.filter(property=property).delete() | ||||
|     InstallationCount.objects.filter(property=property).delete() | ||||
|     FillState.objects.filter(property=property).delete() | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|     dependencies = [("analytics", "0009_remove_messages_to_stream_stat")] | ||||
|  | ||||
|     dependencies = [('analytics', '0009_remove_messages_to_stream_stat')] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.RunPython(clear_message_sent_by_message_type_values), | ||||
|   | ||||
| @@ -1,14 +1,14 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| from django.db import migrations | ||||
| from django.db.backends.base.schema import BaseDatabaseSchemaEditor | ||||
| from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor | ||||
| from django.db.migrations.state import StateApps | ||||
|  | ||||
|  | ||||
| def clear_analytics_tables(apps: StateApps, schema_editor: BaseDatabaseSchemaEditor) -> None: | ||||
|     UserCount = apps.get_model("analytics", "UserCount") | ||||
|     StreamCount = apps.get_model("analytics", "StreamCount") | ||||
|     RealmCount = apps.get_model("analytics", "RealmCount") | ||||
|     InstallationCount = apps.get_model("analytics", "InstallationCount") | ||||
|     FillState = apps.get_model("analytics", "FillState") | ||||
| def clear_analytics_tables(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: | ||||
|     UserCount = apps.get_model('analytics', 'UserCount') | ||||
|     StreamCount = apps.get_model('analytics', 'StreamCount') | ||||
|     RealmCount = apps.get_model('analytics', 'RealmCount') | ||||
|     InstallationCount = apps.get_model('analytics', 'InstallationCount') | ||||
|     FillState = apps.get_model('analytics', 'FillState') | ||||
|  | ||||
|     UserCount.objects.all().delete() | ||||
|     StreamCount.objects.all().delete() | ||||
| @@ -16,10 +16,10 @@ def clear_analytics_tables(apps: StateApps, schema_editor: BaseDatabaseSchemaEdi | ||||
|     InstallationCount.objects.all().delete() | ||||
|     FillState.objects.all().delete() | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("analytics", "0010_clear_messages_sent_values"), | ||||
|         ('analytics', '0010_clear_messages_sent_values'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|   | ||||
| @@ -1,41 +1,36 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| # Generated by Django 1.11.6 on 2018-01-29 08:14 | ||||
| from __future__ import unicode_literals | ||||
|  | ||||
| import django.db.models.deletion | ||||
| from django.db import migrations, models | ||||
| import django.db.models.deletion | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("analytics", "0011_clear_analytics_tables"), | ||||
|         ('analytics', '0011_clear_analytics_tables'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AlterField( | ||||
|             model_name="installationcount", | ||||
|             name="anomaly", | ||||
|             field=models.ForeignKey( | ||||
|                 null=True, on_delete=django.db.models.deletion.SET_NULL, to="analytics.Anomaly" | ||||
|             ), | ||||
|             model_name='installationcount', | ||||
|             name='anomaly', | ||||
|             field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='analytics.Anomaly'), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name="realmcount", | ||||
|             name="anomaly", | ||||
|             field=models.ForeignKey( | ||||
|                 null=True, on_delete=django.db.models.deletion.SET_NULL, to="analytics.Anomaly" | ||||
|             ), | ||||
|             model_name='realmcount', | ||||
|             name='anomaly', | ||||
|             field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='analytics.Anomaly'), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name="streamcount", | ||||
|             name="anomaly", | ||||
|             field=models.ForeignKey( | ||||
|                 null=True, on_delete=django.db.models.deletion.SET_NULL, to="analytics.Anomaly" | ||||
|             ), | ||||
|             model_name='streamcount', | ||||
|             name='anomaly', | ||||
|             field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='analytics.Anomaly'), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name="usercount", | ||||
|             name="anomaly", | ||||
|             field=models.ForeignKey( | ||||
|                 null=True, on_delete=django.db.models.deletion.SET_NULL, to="analytics.Anomaly" | ||||
|             ), | ||||
|             model_name='usercount', | ||||
|             name='anomaly', | ||||
|             field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='analytics.Anomaly'), | ||||
|         ), | ||||
|     ] | ||||
|   | ||||
| @@ -1,31 +1,34 @@ | ||||
| # -*- coding: utf-8 -*- | ||||
| # Generated by Django 1.11.18 on 2019-02-02 02:47 | ||||
| from __future__ import unicode_literals | ||||
|  | ||||
| from django.db import migrations | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("analytics", "0012_add_on_delete"), | ||||
|         ('analytics', '0012_add_on_delete'), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.RemoveField( | ||||
|             model_name="installationcount", | ||||
|             name="anomaly", | ||||
|             model_name='installationcount', | ||||
|             name='anomaly', | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name="realmcount", | ||||
|             name="anomaly", | ||||
|             model_name='realmcount', | ||||
|             name='anomaly', | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name="streamcount", | ||||
|             name="anomaly", | ||||
|             model_name='streamcount', | ||||
|             name='anomaly', | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name="usercount", | ||||
|             name="anomaly", | ||||
|             model_name='usercount', | ||||
|             name='anomaly', | ||||
|         ), | ||||
|         migrations.DeleteModel( | ||||
|             name="Anomaly", | ||||
|             name='Anomaly', | ||||
|         ), | ||||
|     ] | ||||
|   | ||||
| @@ -1,16 +0,0 @@ | ||||
| # Generated by Django 1.11.26 on 2020-01-27 04:32 | ||||
|  | ||||
| from django.db import migrations | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|     dependencies = [ | ||||
|         ("analytics", "0013_remove_anomaly"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.RemoveField( | ||||
|             model_name="fillstate", | ||||
|             name="last_modified", | ||||
|         ), | ||||
|     ] | ||||
| @@ -1,64 +0,0 @@ | ||||
| from django.db import migrations | ||||
| from django.db.backends.base.schema import BaseDatabaseSchemaEditor | ||||
| from django.db.migrations.state import StateApps | ||||
| from django.db.models import Count, Sum | ||||
|  | ||||
|  | ||||
| def clear_duplicate_counts(apps: StateApps, schema_editor: BaseDatabaseSchemaEditor) -> None: | ||||
|     """This is a preparatory migration for our Analytics tables. | ||||
|  | ||||
|     The backstory is that Django's unique_together indexes do not properly | ||||
|     handle the subgroup=None corner case (allowing duplicate rows that have a | ||||
|     subgroup of None), which meant that in race conditions, rather than updating | ||||
|     an existing row for the property/(realm, stream, user)/time with subgroup=None, Django would | ||||
|     create a duplicate row. | ||||
|  | ||||
|     In the next migration, we'll add a proper constraint to fix this bug, but | ||||
|     we need to fix any existing problematic rows before we can add that constraint. | ||||
|  | ||||
|     We fix this in an appropriate fashion for each type of CountStat object; mainly | ||||
|     this means deleting the extra rows, but for LoggingCountStat objects, we need to | ||||
|     additionally combine the sums. | ||||
|     """ | ||||
|     count_tables = dict( | ||||
|         realm=apps.get_model("analytics", "RealmCount"), | ||||
|         user=apps.get_model("analytics", "UserCount"), | ||||
|         stream=apps.get_model("analytics", "StreamCount"), | ||||
|         installation=apps.get_model("analytics", "InstallationCount"), | ||||
|     ) | ||||
|  | ||||
|     for name, count_table in count_tables.items(): | ||||
|         value = [name, "property", "end_time"] | ||||
|         if name == "installation": | ||||
|             value = ["property", "end_time"] | ||||
|         counts = ( | ||||
|             count_table.objects.filter(subgroup=None) | ||||
|             .values(*value) | ||||
|             .annotate(Count("id"), Sum("value")) | ||||
|             .filter(id__count__gt=1) | ||||
|         ) | ||||
|  | ||||
|         for count in counts: | ||||
|             count.pop("id__count") | ||||
|             total_value = count.pop("value__sum") | ||||
|             duplicate_counts = list(count_table.objects.filter(**count)) | ||||
|             first_count = duplicate_counts[0] | ||||
|             if count["property"] in ["invites_sent::day", "active_users_log:is_bot:day"]: | ||||
|                 # For LoggingCountStat objects, the right fix is to combine the totals; | ||||
|                 # for other CountStat objects, we expect the duplicates to have the same value. | ||||
|                 # And so all we need to do is delete them. | ||||
|                 first_count.value = total_value | ||||
|                 first_count.save() | ||||
|             to_cleanup = duplicate_counts[1:] | ||||
|             for duplicate_count in to_cleanup: | ||||
|                 duplicate_count.delete() | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|     dependencies = [ | ||||
|         ("analytics", "0014_remove_fillstate_last_modified"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.RunPython(clear_duplicate_counts, reverse_code=migrations.RunPython.noop), | ||||
|     ] | ||||
| @@ -1,92 +0,0 @@ | ||||
| # Generated by Django 2.2.10 on 2020-02-29 19:40 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|     dependencies = [ | ||||
|         ("analytics", "0015_clear_duplicate_counts"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AlterUniqueTogether( | ||||
|             name="installationcount", | ||||
|             unique_together=set(), | ||||
|         ), | ||||
|         migrations.AlterUniqueTogether( | ||||
|             name="realmcount", | ||||
|             unique_together=set(), | ||||
|         ), | ||||
|         migrations.AlterUniqueTogether( | ||||
|             name="streamcount", | ||||
|             unique_together=set(), | ||||
|         ), | ||||
|         migrations.AlterUniqueTogether( | ||||
|             name="usercount", | ||||
|             unique_together=set(), | ||||
|         ), | ||||
|         migrations.AddConstraint( | ||||
|             model_name="installationcount", | ||||
|             constraint=models.UniqueConstraint( | ||||
|                 condition=models.Q(subgroup__isnull=False), | ||||
|                 fields=("property", "subgroup", "end_time"), | ||||
|                 name="unique_installation_count", | ||||
|             ), | ||||
|         ), | ||||
|         migrations.AddConstraint( | ||||
|             model_name="installationcount", | ||||
|             constraint=models.UniqueConstraint( | ||||
|                 condition=models.Q(subgroup__isnull=True), | ||||
|                 fields=("property", "end_time"), | ||||
|                 name="unique_installation_count_null_subgroup", | ||||
|             ), | ||||
|         ), | ||||
|         migrations.AddConstraint( | ||||
|             model_name="realmcount", | ||||
|             constraint=models.UniqueConstraint( | ||||
|                 condition=models.Q(subgroup__isnull=False), | ||||
|                 fields=("realm", "property", "subgroup", "end_time"), | ||||
|                 name="unique_realm_count", | ||||
|             ), | ||||
|         ), | ||||
|         migrations.AddConstraint( | ||||
|             model_name="realmcount", | ||||
|             constraint=models.UniqueConstraint( | ||||
|                 condition=models.Q(subgroup__isnull=True), | ||||
|                 fields=("realm", "property", "end_time"), | ||||
|                 name="unique_realm_count_null_subgroup", | ||||
|             ), | ||||
|         ), | ||||
|         migrations.AddConstraint( | ||||
|             model_name="streamcount", | ||||
|             constraint=models.UniqueConstraint( | ||||
|                 condition=models.Q(subgroup__isnull=False), | ||||
|                 fields=("stream", "property", "subgroup", "end_time"), | ||||
|                 name="unique_stream_count", | ||||
|             ), | ||||
|         ), | ||||
|         migrations.AddConstraint( | ||||
|             model_name="streamcount", | ||||
|             constraint=models.UniqueConstraint( | ||||
|                 condition=models.Q(subgroup__isnull=True), | ||||
|                 fields=("stream", "property", "end_time"), | ||||
|                 name="unique_stream_count_null_subgroup", | ||||
|             ), | ||||
|         ), | ||||
|         migrations.AddConstraint( | ||||
|             model_name="usercount", | ||||
|             constraint=models.UniqueConstraint( | ||||
|                 condition=models.Q(subgroup__isnull=False), | ||||
|                 fields=("user", "property", "subgroup", "end_time"), | ||||
|                 name="unique_user_count", | ||||
|             ), | ||||
|         ), | ||||
|         migrations.AddConstraint( | ||||
|             model_name="usercount", | ||||
|             constraint=models.UniqueConstraint( | ||||
|                 condition=models.Q(subgroup__isnull=True), | ||||
|                 fields=("user", "property", "end_time"), | ||||
|                 name="unique_user_count_null_subgroup", | ||||
|             ), | ||||
|         ), | ||||
|     ] | ||||
| @@ -1,114 +0,0 @@ | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|     dependencies = [ | ||||
|         ("analytics", "0016_unique_constraint_when_subgroup_null"), | ||||
|     ] | ||||
|  | ||||
|     # If the server was installed between 7.0 and 7.4 (or main between | ||||
|     # 2c20028aa451 and 7807bff52635), it contains indexes which (when | ||||
|     # running 7.5 or 7807bff52635 or higher) are never used, because | ||||
|     # they contain an improper cast | ||||
|     # (https://code.djangoproject.com/ticket/34840). | ||||
|     # | ||||
|     # We regenerate the indexes here, by dropping and re-creating | ||||
|     # them, so that we know that they are properly formed. | ||||
|     operations = [ | ||||
|         migrations.RemoveConstraint( | ||||
|             model_name="installationcount", | ||||
|             name="unique_installation_count", | ||||
|         ), | ||||
|         migrations.AddConstraint( | ||||
|             model_name="installationcount", | ||||
|             constraint=models.UniqueConstraint( | ||||
|                 condition=models.Q(subgroup__isnull=False), | ||||
|                 fields=("property", "subgroup", "end_time"), | ||||
|                 name="unique_installation_count", | ||||
|             ), | ||||
|         ), | ||||
|         migrations.RemoveConstraint( | ||||
|             model_name="installationcount", | ||||
|             name="unique_installation_count_null_subgroup", | ||||
|         ), | ||||
|         migrations.AddConstraint( | ||||
|             model_name="installationcount", | ||||
|             constraint=models.UniqueConstraint( | ||||
|                 condition=models.Q(subgroup__isnull=True), | ||||
|                 fields=("property", "end_time"), | ||||
|                 name="unique_installation_count_null_subgroup", | ||||
|             ), | ||||
|         ), | ||||
|         migrations.RemoveConstraint( | ||||
|             model_name="realmcount", | ||||
|             name="unique_realm_count", | ||||
|         ), | ||||
|         migrations.AddConstraint( | ||||
|             model_name="realmcount", | ||||
|             constraint=models.UniqueConstraint( | ||||
|                 condition=models.Q(subgroup__isnull=False), | ||||
|                 fields=("realm", "property", "subgroup", "end_time"), | ||||
|                 name="unique_realm_count", | ||||
|             ), | ||||
|         ), | ||||
|         migrations.RemoveConstraint( | ||||
|             model_name="realmcount", | ||||
|             name="unique_realm_count_null_subgroup", | ||||
|         ), | ||||
|         migrations.AddConstraint( | ||||
|             model_name="realmcount", | ||||
|             constraint=models.UniqueConstraint( | ||||
|                 condition=models.Q(subgroup__isnull=True), | ||||
|                 fields=("realm", "property", "end_time"), | ||||
|                 name="unique_realm_count_null_subgroup", | ||||
|             ), | ||||
|         ), | ||||
|         migrations.RemoveConstraint( | ||||
|             model_name="streamcount", | ||||
|             name="unique_stream_count", | ||||
|         ), | ||||
|         migrations.AddConstraint( | ||||
|             model_name="streamcount", | ||||
|             constraint=models.UniqueConstraint( | ||||
|                 condition=models.Q(subgroup__isnull=False), | ||||
|                 fields=("stream", "property", "subgroup", "end_time"), | ||||
|                 name="unique_stream_count", | ||||
|             ), | ||||
|         ), | ||||
|         migrations.RemoveConstraint( | ||||
|             model_name="streamcount", | ||||
|             name="unique_stream_count_null_subgroup", | ||||
|         ), | ||||
|         migrations.AddConstraint( | ||||
|             model_name="streamcount", | ||||
|             constraint=models.UniqueConstraint( | ||||
|                 condition=models.Q(subgroup__isnull=True), | ||||
|                 fields=("stream", "property", "end_time"), | ||||
|                 name="unique_stream_count_null_subgroup", | ||||
|             ), | ||||
|         ), | ||||
|         migrations.RemoveConstraint( | ||||
|             model_name="usercount", | ||||
|             name="unique_user_count", | ||||
|         ), | ||||
|         migrations.AddConstraint( | ||||
|             model_name="usercount", | ||||
|             constraint=models.UniqueConstraint( | ||||
|                 condition=models.Q(subgroup__isnull=False), | ||||
|                 fields=("user", "property", "subgroup", "end_time"), | ||||
|                 name="unique_user_count", | ||||
|             ), | ||||
|         ), | ||||
|         migrations.RemoveConstraint( | ||||
|             model_name="usercount", | ||||
|             name="unique_user_count_null_subgroup", | ||||
|         ), | ||||
|         migrations.AddConstraint( | ||||
|             model_name="usercount", | ||||
|             constraint=models.UniqueConstraint( | ||||
|                 condition=models.Q(subgroup__isnull=True), | ||||
|                 fields=("user", "property", "end_time"), | ||||
|                 name="unique_user_count_null_subgroup", | ||||
|             ), | ||||
|         ), | ||||
|     ] | ||||
| @@ -1,15 +0,0 @@ | ||||
| from django.db import migrations | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|     elidable = True | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("analytics", "0017_regenerate_partial_indexes"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.RunSQL( | ||||
|             "DELETE FROM analytics_usercount WHERE property = 'active_users_audit:is_bot:day'" | ||||
|         ) | ||||
|     ] | ||||
| @@ -1,26 +0,0 @@ | ||||
| from django.db import migrations | ||||
|  | ||||
| REMOVED_COUNTS = ( | ||||
|     "active_users_log:is_bot:day", | ||||
|     "active_users:is_bot:day", | ||||
| ) | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|     elidable = True | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("analytics", "0018_remove_usercount_active_users_audit"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.RunSQL( | ||||
|             [ | ||||
|                 ("DELETE FROM analytics_realmcount WHERE property IN %s", (REMOVED_COUNTS,)), | ||||
|                 ( | ||||
|                     "DELETE FROM analytics_installationcount WHERE property IN %s", | ||||
|                     (REMOVED_COUNTS,), | ||||
|                 ), | ||||
|             ] | ||||
|         ) | ||||
|     ] | ||||
| @@ -1,40 +0,0 @@ | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|     atomic = False | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("analytics", "0019_remove_unused_counts"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AlterField( | ||||
|             model_name="installationcount", | ||||
|             name="id", | ||||
|             field=models.BigAutoField( | ||||
|                 auto_created=True, primary_key=True, serialize=False, verbose_name="ID" | ||||
|             ), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name="realmcount", | ||||
|             name="id", | ||||
|             field=models.BigAutoField( | ||||
|                 auto_created=True, primary_key=True, serialize=False, verbose_name="ID" | ||||
|             ), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name="streamcount", | ||||
|             name="id", | ||||
|             field=models.BigAutoField( | ||||
|                 auto_created=True, primary_key=True, serialize=False, verbose_name="ID" | ||||
|             ), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name="usercount", | ||||
|             name="id", | ||||
|             field=models.BigAutoField( | ||||
|                 auto_created=True, primary_key=True, serialize=False, verbose_name="ID" | ||||
|             ), | ||||
|         ), | ||||
|     ] | ||||
| @@ -1,17 +0,0 @@ | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|     dependencies = [ | ||||
|         ("analytics", "0020_alter_installationcount_id_alter_realmcount_id_and_more"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AlterField( | ||||
|             model_name="fillstate", | ||||
|             name="id", | ||||
|             field=models.BigAutoField( | ||||
|                 auto_created=True, primary_key=True, serialize=False, verbose_name="ID" | ||||
|             ), | ||||
|         ), | ||||
|     ] | ||||
| @@ -1,161 +1,92 @@ | ||||
| # https://github.com/typeddjango/django-stubs/issues/1698 | ||||
| # mypy: disable-error-code="explicit-override" | ||||
|  | ||||
| from datetime import datetime | ||||
| import datetime | ||||
| from typing import Optional | ||||
|  | ||||
| from django.db import models | ||||
| from django.db.models import Q, UniqueConstraint | ||||
| from typing_extensions import override | ||||
|  | ||||
| from zerver.lib.timestamp import floor_to_day | ||||
| from zerver.models import Realm, Stream, UserProfile | ||||
|  | ||||
|  | ||||
| class FillState(models.Model): | ||||
|     property = models.CharField(max_length=40, unique=True) | ||||
|     end_time = models.DateTimeField() | ||||
|     property = models.CharField(max_length=40, unique=True)  # type: str | ||||
|     end_time = models.DateTimeField()  # type: datetime.datetime | ||||
|  | ||||
|     # Valid states are {DONE, STARTED} | ||||
|     DONE = 1 | ||||
|     STARTED = 2 | ||||
|     state = models.PositiveSmallIntegerField() | ||||
|     state = models.PositiveSmallIntegerField()  # type: int | ||||
|  | ||||
|     last_modified = models.DateTimeField(auto_now=True)  # type: datetime.datetime | ||||
|  | ||||
|     @override | ||||
|     def __str__(self) -> str: | ||||
|         return f"{self.property} {self.end_time} {self.state}" | ||||
|  | ||||
|         return "<FillState: %s %s %s>" % (self.property, self.end_time, self.state) | ||||
|  | ||||
| # The earliest/starting end_time in FillState | ||||
| # We assume there is at least one realm | ||||
| def installation_epoch() -> datetime: | ||||
|     earliest_realm_creation = Realm.objects.aggregate(models.Min("date_created"))[ | ||||
|         "date_created__min" | ||||
|     ] | ||||
| def installation_epoch() -> datetime.datetime: | ||||
|     earliest_realm_creation = Realm.objects.aggregate(models.Min('date_created'))['date_created__min'] | ||||
|     return floor_to_day(earliest_realm_creation) | ||||
|  | ||||
| def last_successful_fill(property: str) -> Optional[datetime.datetime]: | ||||
|     fillstate = FillState.objects.filter(property=property).first() | ||||
|     if fillstate is None: | ||||
|         return None | ||||
|     if fillstate.state == FillState.DONE: | ||||
|         return fillstate.end_time | ||||
|     return fillstate.end_time - datetime.timedelta(hours=1) | ||||
|  | ||||
| class BaseCount(models.Model): | ||||
|     # Note: When inheriting from BaseCount, you may want to rearrange | ||||
|     # the order of the columns in the migration to make sure they | ||||
|     # match how you'd like the table to be arranged. | ||||
|     property = models.CharField(max_length=32) | ||||
|     subgroup = models.CharField(max_length=16, null=True) | ||||
|     end_time = models.DateTimeField() | ||||
|     value = models.BigIntegerField() | ||||
|     property = models.CharField(max_length=32)  # type: str | ||||
|     subgroup = models.CharField(max_length=16, null=True)  # type: Optional[str] | ||||
|     end_time = models.DateTimeField()  # type: datetime.datetime | ||||
|     value = models.BigIntegerField()  # type: int | ||||
|  | ||||
|     class Meta: | ||||
|         abstract = True | ||||
|  | ||||
|  | ||||
| class InstallationCount(BaseCount): | ||||
|  | ||||
|     class Meta: | ||||
|         # Handles invalid duplicate InstallationCount data | ||||
|         constraints = [ | ||||
|             UniqueConstraint( | ||||
|                 fields=["property", "subgroup", "end_time"], | ||||
|                 condition=Q(subgroup__isnull=False), | ||||
|                 name="unique_installation_count", | ||||
|             ), | ||||
|             UniqueConstraint( | ||||
|                 fields=["property", "end_time"], | ||||
|                 condition=Q(subgroup__isnull=True), | ||||
|                 name="unique_installation_count_null_subgroup", | ||||
|             ), | ||||
|         ] | ||||
|         unique_together = ("property", "subgroup", "end_time") | ||||
|  | ||||
|     @override | ||||
|     def __str__(self) -> str: | ||||
|         return f"{self.property} {self.subgroup} {self.value}" | ||||
|  | ||||
|         return "<InstallationCount: %s %s %s>" % (self.property, self.subgroup, self.value) | ||||
|  | ||||
| class RealmCount(BaseCount): | ||||
|     realm = models.ForeignKey(Realm, on_delete=models.CASCADE) | ||||
|  | ||||
|     class Meta: | ||||
|         # Handles invalid duplicate RealmCount data | ||||
|         constraints = [ | ||||
|             UniqueConstraint( | ||||
|                 fields=["realm", "property", "subgroup", "end_time"], | ||||
|                 condition=Q(subgroup__isnull=False), | ||||
|                 name="unique_realm_count", | ||||
|             ), | ||||
|             UniqueConstraint( | ||||
|                 fields=["realm", "property", "end_time"], | ||||
|                 condition=Q(subgroup__isnull=True), | ||||
|                 name="unique_realm_count_null_subgroup", | ||||
|             ), | ||||
|         ] | ||||
|         indexes = [ | ||||
|             models.Index( | ||||
|                 fields=["property", "end_time"], | ||||
|                 name="analytics_realmcount_property_end_time_3b60396b_idx", | ||||
|             ) | ||||
|         ] | ||||
|         unique_together = ("realm", "property", "subgroup", "end_time") | ||||
|         index_together = ["property", "end_time"] | ||||
|  | ||||
|     @override | ||||
|     def __str__(self) -> str: | ||||
|         return f"{self.realm!r} {self.property} {self.subgroup} {self.value}" | ||||
|  | ||||
|         return "<RealmCount: %s %s %s %s>" % (self.realm, self.property, self.subgroup, self.value) | ||||
|  | ||||
| class UserCount(BaseCount): | ||||
|     user = models.ForeignKey(UserProfile, on_delete=models.CASCADE) | ||||
|     realm = models.ForeignKey(Realm, on_delete=models.CASCADE) | ||||
|  | ||||
|     class Meta: | ||||
|         # Handles invalid duplicate UserCount data | ||||
|         constraints = [ | ||||
|             UniqueConstraint( | ||||
|                 fields=["user", "property", "subgroup", "end_time"], | ||||
|                 condition=Q(subgroup__isnull=False), | ||||
|                 name="unique_user_count", | ||||
|             ), | ||||
|             UniqueConstraint( | ||||
|                 fields=["user", "property", "end_time"], | ||||
|                 condition=Q(subgroup__isnull=True), | ||||
|                 name="unique_user_count_null_subgroup", | ||||
|             ), | ||||
|         ] | ||||
|         unique_together = ("user", "property", "subgroup", "end_time") | ||||
|         # This index dramatically improves the performance of | ||||
|         # aggregating from users to realms | ||||
|         indexes = [ | ||||
|             models.Index( | ||||
|                 fields=["property", "realm", "end_time"], | ||||
|                 name="analytics_usercount_property_realm_id_end_time_591dbec1_idx", | ||||
|             ) | ||||
|         ] | ||||
|         index_together = ["property", "realm", "end_time"] | ||||
|  | ||||
|     @override | ||||
|     def __str__(self) -> str: | ||||
|         return f"{self.user!r} {self.property} {self.subgroup} {self.value}" | ||||
|  | ||||
|         return "<UserCount: %s %s %s %s>" % (self.user, self.property, self.subgroup, self.value) | ||||
|  | ||||
| class StreamCount(BaseCount): | ||||
|     stream = models.ForeignKey(Stream, on_delete=models.CASCADE) | ||||
|     realm = models.ForeignKey(Realm, on_delete=models.CASCADE) | ||||
|  | ||||
|     class Meta: | ||||
|         # Handles invalid duplicate StreamCount data | ||||
|         constraints = [ | ||||
|             UniqueConstraint( | ||||
|                 fields=["stream", "property", "subgroup", "end_time"], | ||||
|                 condition=Q(subgroup__isnull=False), | ||||
|                 name="unique_stream_count", | ||||
|             ), | ||||
|             UniqueConstraint( | ||||
|                 fields=["stream", "property", "end_time"], | ||||
|                 condition=Q(subgroup__isnull=True), | ||||
|                 name="unique_stream_count_null_subgroup", | ||||
|             ), | ||||
|         ] | ||||
|         unique_together = ("stream", "property", "subgroup", "end_time") | ||||
|         # This index dramatically improves the performance of | ||||
|         # aggregating from streams to realms | ||||
|         indexes = [ | ||||
|             models.Index( | ||||
|                 fields=["property", "realm", "end_time"], | ||||
|                 name="analytics_streamcount_property_realm_id_end_time_155ae930_idx", | ||||
|             ) | ||||
|         ] | ||||
|         index_together = ["property", "realm", "end_time"] | ||||
|  | ||||
|     @override | ||||
|     def __str__(self) -> str: | ||||
|         return f"{self.stream!r} {self.property} {self.subgroup} {self.value} {self.id}" | ||||
|         return "<StreamCount: %s %s %s %s %s>" % ( | ||||
|             self.stream, self.property, self.subgroup, self.value, self.id) | ||||
|   | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -2,39 +2,28 @@ from analytics.lib.counts import CountStat | ||||
| from analytics.lib.fixtures import generate_time_series_data | ||||
| from zerver.lib.test_classes import ZulipTestCase | ||||
|  | ||||
|  | ||||
| # A very light test suite; the code being tested is not run in production. | ||||
| class TestFixtures(ZulipTestCase): | ||||
|     def test_deterministic_settings(self) -> None: | ||||
|         # test basic business_hour / non_business_hour calculation | ||||
|         # test we get an array of the right length with frequency=CountStat.DAY | ||||
|         data = generate_time_series_data( | ||||
|             days=7, business_hours_base=20, non_business_hours_base=15, spikiness=0 | ||||
|         ) | ||||
|             days=7, business_hours_base=20, non_business_hours_base=15, spikiness=0) | ||||
|         self.assertEqual(data, [400, 400, 400, 400, 400, 360, 360]) | ||||
|  | ||||
|         data = generate_time_series_data( | ||||
|             days=1, | ||||
|             business_hours_base=2000, | ||||
|             non_business_hours_base=1500, | ||||
|             growth=2, | ||||
|             spikiness=0, | ||||
|             frequency=CountStat.HOUR, | ||||
|         ) | ||||
|             days=1, business_hours_base=2000, non_business_hours_base=1500, | ||||
|             growth=2, spikiness=0, frequency=CountStat.HOUR) | ||||
|         # test we get an array of the right length with frequency=CountStat.HOUR | ||||
|         self.assert_length(data, 24) | ||||
|         self.assertEqual(len(data), 24) | ||||
|         # test that growth doesn't affect the first data point | ||||
|         self.assertEqual(data[0], 2000) | ||||
|         # test that the last data point is growth times what it otherwise would be | ||||
|         self.assertEqual(data[-1], 1500 * 2) | ||||
|         self.assertEqual(data[-1], 1500*2) | ||||
|  | ||||
|         # test autocorrelation == 1, since that's the easiest value to test | ||||
|         data = generate_time_series_data( | ||||
|             days=1, | ||||
|             business_hours_base=2000, | ||||
|             non_business_hours_base=2000, | ||||
|             autocorrelation=1, | ||||
|             frequency=CountStat.HOUR, | ||||
|         ) | ||||
|             days=1, business_hours_base=2000, non_business_hours_base=2000, | ||||
|             autocorrelation=1, frequency=CountStat.HOUR) | ||||
|         self.assertEqual(data[0], data[1]) | ||||
|         self.assertEqual(data[0], data[-1]) | ||||
|   | ||||
| @@ -1,689 +0,0 @@ | ||||
| from datetime import datetime, timedelta, timezone | ||||
|  | ||||
| from django.utils.timezone import now as timezone_now | ||||
| from typing_extensions import override | ||||
|  | ||||
| from analytics.lib.counts import COUNT_STATS, CountStat | ||||
| from analytics.lib.time_utils import time_range | ||||
| from analytics.models import FillState, RealmCount, StreamCount, UserCount | ||||
| from analytics.views.stats import rewrite_client_arrays, sort_by_totals, sort_client_labels | ||||
| from zerver.lib.test_classes import ZulipTestCase | ||||
| from zerver.lib.timestamp import ceiling_to_day, ceiling_to_hour, datetime_to_timestamp | ||||
| from zerver.models import Client | ||||
| from zerver.models.realms import get_realm | ||||
|  | ||||
|  | ||||
| class TestStatsEndpoint(ZulipTestCase): | ||||
|     def test_stats(self) -> None: | ||||
|         self.user = self.example_user("hamlet") | ||||
|         self.login_user(self.user) | ||||
|         result = self.client_get("/stats") | ||||
|         self.assertEqual(result.status_code, 200) | ||||
|         # Check that we get something back | ||||
|         self.assert_in_response("Zulip analytics for", result) | ||||
|  | ||||
|     def test_guest_user_cant_access_stats(self) -> None: | ||||
|         self.user = self.example_user("polonius") | ||||
|         self.login_user(self.user) | ||||
|         result = self.client_get("/stats") | ||||
|         self.assert_json_error(result, "Not allowed for guest users", 400) | ||||
|  | ||||
|         result = self.client_get("/json/analytics/chart_data") | ||||
|         self.assert_json_error(result, "Not allowed for guest users", 400) | ||||
|  | ||||
|     def test_stats_for_realm(self) -> None: | ||||
|         user = self.example_user("hamlet") | ||||
|         self.login_user(user) | ||||
|  | ||||
|         result = self.client_get("/stats/realm/zulip/") | ||||
|         self.assertEqual(result.status_code, 302) | ||||
|  | ||||
|         result = self.client_get("/stats/realm/not_existing_realm/") | ||||
|         self.assertEqual(result.status_code, 302) | ||||
|  | ||||
|         user = self.example_user("hamlet") | ||||
|         user.is_staff = True | ||||
|         user.save(update_fields=["is_staff"]) | ||||
|  | ||||
|         result = self.client_get("/stats/realm/not_existing_realm/") | ||||
|         self.assertEqual(result.status_code, 404) | ||||
|  | ||||
|         result = self.client_get("/stats/realm/zulip/") | ||||
|         self.assertEqual(result.status_code, 200) | ||||
|         self.assert_in_response("Zulip analytics for", result) | ||||
|  | ||||
|     def test_stats_for_installation(self) -> None: | ||||
|         user = self.example_user("hamlet") | ||||
|         self.login_user(user) | ||||
|  | ||||
|         result = self.client_get("/stats/installation") | ||||
|         self.assertEqual(result.status_code, 302) | ||||
|  | ||||
|         user = self.example_user("hamlet") | ||||
|         user.is_staff = True | ||||
|         user.save(update_fields=["is_staff"]) | ||||
|  | ||||
|         result = self.client_get("/stats/installation") | ||||
|         self.assertEqual(result.status_code, 200) | ||||
|         self.assert_in_response("Zulip analytics for", result) | ||||
|  | ||||
|  | ||||
| class TestGetChartData(ZulipTestCase): | ||||
|     @override | ||||
|     def setUp(self) -> None: | ||||
|         super().setUp() | ||||
|         self.realm = get_realm("zulip") | ||||
|         self.user = self.example_user("hamlet") | ||||
|         self.stream_id = self.get_stream_id(self.get_streams(self.user)[0]) | ||||
|         self.login_user(self.user) | ||||
|         self.end_times_hour = [ | ||||
|             ceiling_to_hour(self.realm.date_created) + timedelta(hours=i) for i in range(4) | ||||
|         ] | ||||
|         self.end_times_day = [ | ||||
|             ceiling_to_day(self.realm.date_created) + timedelta(days=i) for i in range(4) | ||||
|         ] | ||||
|  | ||||
|     def data(self, i: int) -> list[int]: | ||||
|         return [0, 0, i, 0] | ||||
|  | ||||
|     def insert_data( | ||||
|         self, stat: CountStat, realm_subgroups: list[str | None], user_subgroups: list[str] | ||||
|     ) -> None: | ||||
|         if stat.frequency == CountStat.HOUR: | ||||
|             insert_time = self.end_times_hour[2] | ||||
|             fill_time = self.end_times_hour[-1] | ||||
|         if stat.frequency == CountStat.DAY: | ||||
|             insert_time = self.end_times_day[2] | ||||
|             fill_time = self.end_times_day[-1] | ||||
|  | ||||
|         RealmCount.objects.bulk_create( | ||||
|             RealmCount( | ||||
|                 property=stat.property, | ||||
|                 subgroup=subgroup, | ||||
|                 end_time=insert_time, | ||||
|                 value=100 + i, | ||||
|                 realm=self.realm, | ||||
|             ) | ||||
|             for i, subgroup in enumerate(realm_subgroups) | ||||
|         ) | ||||
|         UserCount.objects.bulk_create( | ||||
|             UserCount( | ||||
|                 property=stat.property, | ||||
|                 subgroup=subgroup, | ||||
|                 end_time=insert_time, | ||||
|                 value=200 + i, | ||||
|                 realm=self.realm, | ||||
|                 user=self.user, | ||||
|             ) | ||||
|             for i, subgroup in enumerate(user_subgroups) | ||||
|         ) | ||||
|         StreamCount.objects.bulk_create( | ||||
|             StreamCount( | ||||
|                 property=stat.property, | ||||
|                 subgroup=subgroup, | ||||
|                 end_time=insert_time, | ||||
|                 value=100 + i, | ||||
|                 stream_id=self.stream_id, | ||||
|                 realm=self.realm, | ||||
|             ) | ||||
|             for i, subgroup in enumerate(realm_subgroups) | ||||
|         ) | ||||
|         FillState.objects.create(property=stat.property, end_time=fill_time, state=FillState.DONE) | ||||
|  | ||||
|     def test_number_of_humans(self) -> None: | ||||
|         stat = COUNT_STATS["realm_active_humans::day"] | ||||
|         self.insert_data(stat, [None], []) | ||||
|         stat = COUNT_STATS["1day_actives::day"] | ||||
|         self.insert_data(stat, [None], []) | ||||
|         stat = COUNT_STATS["active_users_audit:is_bot:day"] | ||||
|         self.insert_data(stat, ["false"], []) | ||||
|         result = self.client_get("/json/analytics/chart_data", {"chart_name": "number_of_humans"}) | ||||
|         data = self.assert_json_success(result) | ||||
|         self.assertEqual( | ||||
|             data, | ||||
|             { | ||||
|                 "msg": "", | ||||
|                 "end_times": [datetime_to_timestamp(dt) for dt in self.end_times_day], | ||||
|                 "frequency": CountStat.DAY, | ||||
|                 "everyone": { | ||||
|                     "_1day": self.data(100), | ||||
|                     "_15day": self.data(100), | ||||
|                     "all_time": self.data(100), | ||||
|                 }, | ||||
|                 "display_order": None, | ||||
|                 "result": "success", | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
|     def test_messages_sent_over_time(self) -> None: | ||||
|         stat = COUNT_STATS["messages_sent:is_bot:hour"] | ||||
|         self.insert_data(stat, ["true", "false"], ["false"]) | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"} | ||||
|         ) | ||||
|         data = self.assert_json_success(result) | ||||
|         self.assertEqual( | ||||
|             data, | ||||
|             { | ||||
|                 "msg": "", | ||||
|                 "end_times": [datetime_to_timestamp(dt) for dt in self.end_times_hour], | ||||
|                 "frequency": CountStat.HOUR, | ||||
|                 "everyone": {"bot": self.data(100), "human": self.data(101)}, | ||||
|                 "user": {"bot": self.data(0), "human": self.data(200)}, | ||||
|                 "display_order": None, | ||||
|                 "result": "success", | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
|     def test_messages_sent_by_message_type(self) -> None: | ||||
|         stat = COUNT_STATS["messages_sent:message_type:day"] | ||||
|         self.insert_data( | ||||
|             stat, ["public_stream", "private_message"], ["public_stream", "private_stream"] | ||||
|         ) | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", {"chart_name": "messages_sent_by_message_type"} | ||||
|         ) | ||||
|         data = self.assert_json_success(result) | ||||
|         self.assertEqual( | ||||
|             data, | ||||
|             { | ||||
|                 "msg": "", | ||||
|                 "end_times": [datetime_to_timestamp(dt) for dt in self.end_times_day], | ||||
|                 "frequency": CountStat.DAY, | ||||
|                 "everyone": { | ||||
|                     "Public channels": self.data(100), | ||||
|                     "Private channels": self.data(0), | ||||
|                     "Direct messages": self.data(101), | ||||
|                     "Group direct messages": self.data(0), | ||||
|                 }, | ||||
|                 "user": { | ||||
|                     "Public channels": self.data(200), | ||||
|                     "Private channels": self.data(201), | ||||
|                     "Direct messages": self.data(0), | ||||
|                     "Group direct messages": self.data(0), | ||||
|                 }, | ||||
|                 "display_order": [ | ||||
|                     "Direct messages", | ||||
|                     "Public channels", | ||||
|                     "Private channels", | ||||
|                     "Group direct messages", | ||||
|                 ], | ||||
|                 "result": "success", | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
|     def test_messages_sent_by_client(self) -> None: | ||||
|         stat = COUNT_STATS["messages_sent:client:day"] | ||||
|         client1 = Client.objects.create(name="client 1") | ||||
|         client2 = Client.objects.create(name="client 2") | ||||
|         client3 = Client.objects.create(name="client 3") | ||||
|         client4 = Client.objects.create(name="client 4") | ||||
|         self.insert_data( | ||||
|             stat, | ||||
|             [str(client4.id), str(client3.id), str(client2.id)], | ||||
|             [str(client3.id), str(client1.id)], | ||||
|         ) | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", {"chart_name": "messages_sent_by_client"} | ||||
|         ) | ||||
|         data = self.assert_json_success(result) | ||||
|         self.assertEqual( | ||||
|             data, | ||||
|             { | ||||
|                 "msg": "", | ||||
|                 "end_times": [datetime_to_timestamp(dt) for dt in self.end_times_day], | ||||
|                 "frequency": CountStat.DAY, | ||||
|                 "everyone": { | ||||
|                     "client 4": self.data(100), | ||||
|                     "client 3": self.data(101), | ||||
|                     "client 2": self.data(102), | ||||
|                 }, | ||||
|                 "user": {"client 3": self.data(200), "client 1": self.data(201)}, | ||||
|                 "display_order": ["client 1", "client 2", "client 3", "client 4"], | ||||
|                 "result": "success", | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
|     def test_messages_read_over_time(self) -> None: | ||||
|         stat = COUNT_STATS["messages_read::hour"] | ||||
|         self.insert_data(stat, [None], []) | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", {"chart_name": "messages_read_over_time"} | ||||
|         ) | ||||
|         data = self.assert_json_success(result) | ||||
|         self.assertEqual( | ||||
|             data, | ||||
|             { | ||||
|                 "msg": "", | ||||
|                 "end_times": [datetime_to_timestamp(dt) for dt in self.end_times_hour], | ||||
|                 "frequency": CountStat.HOUR, | ||||
|                 "everyone": {"read": self.data(100)}, | ||||
|                 "user": {"read": self.data(0)}, | ||||
|                 "display_order": None, | ||||
|                 "result": "success", | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
|     def test_messages_sent_by_stream(self) -> None: | ||||
|         stat = COUNT_STATS["messages_in_stream:is_bot:day"] | ||||
|         self.insert_data(stat, ["true", "false"], []) | ||||
|  | ||||
|         result = self.client_get( | ||||
|             f"/json/analytics/chart_data/stream/{self.stream_id}", | ||||
|             { | ||||
|                 "chart_name": "messages_sent_by_stream", | ||||
|             }, | ||||
|         ) | ||||
|         data = self.assert_json_success(result) | ||||
|         self.assertEqual( | ||||
|             data, | ||||
|             { | ||||
|                 "msg": "", | ||||
|                 "end_times": [datetime_to_timestamp(dt) for dt in self.end_times_day], | ||||
|                 "frequency": CountStat.DAY, | ||||
|                 "everyone": {"bot": self.data(100), "human": self.data(101)}, | ||||
|                 "display_order": None, | ||||
|                 "result": "success", | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
|         result = self.api_get( | ||||
|             self.example_user("polonius"), | ||||
|             f"/api/v1/analytics/chart_data/stream/{self.stream_id}", | ||||
|             { | ||||
|                 "chart_name": "messages_sent_by_stream", | ||||
|             }, | ||||
|         ) | ||||
|         self.assert_json_error(result, "Not allowed for guest users") | ||||
|  | ||||
|         # Verify we correctly forbid access to stats of streams in other realms. | ||||
|         result = self.api_get( | ||||
|             self.mit_user("sipbtest"), | ||||
|             f"/api/v1/analytics/chart_data/stream/{self.stream_id}", | ||||
|             { | ||||
|                 "chart_name": "messages_sent_by_stream", | ||||
|             }, | ||||
|             subdomain="zephyr", | ||||
|         ) | ||||
|         self.assert_json_error(result, "Invalid channel ID") | ||||
|  | ||||
|     def test_include_empty_subgroups(self) -> None: | ||||
|         FillState.objects.create( | ||||
|             property="realm_active_humans::day", | ||||
|             end_time=self.end_times_day[0], | ||||
|             state=FillState.DONE, | ||||
|         ) | ||||
|         result = self.client_get("/json/analytics/chart_data", {"chart_name": "number_of_humans"}) | ||||
|         data = self.assert_json_success(result) | ||||
|         self.assertEqual(data["everyone"], {"_1day": [0], "_15day": [0], "all_time": [0]}) | ||||
|         self.assertFalse("user" in data) | ||||
|  | ||||
|         FillState.objects.create( | ||||
|             property="messages_sent:is_bot:hour", | ||||
|             end_time=self.end_times_hour[0], | ||||
|             state=FillState.DONE, | ||||
|         ) | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"} | ||||
|         ) | ||||
|         data = self.assert_json_success(result) | ||||
|         self.assertEqual(data["everyone"], {"human": [0], "bot": [0]}) | ||||
|         self.assertEqual(data["user"], {"human": [0], "bot": [0]}) | ||||
|  | ||||
|         FillState.objects.create( | ||||
|             property="messages_sent:message_type:day", | ||||
|             end_time=self.end_times_day[0], | ||||
|             state=FillState.DONE, | ||||
|         ) | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", {"chart_name": "messages_sent_by_message_type"} | ||||
|         ) | ||||
|         data = self.assert_json_success(result) | ||||
|         self.assertEqual( | ||||
|             data["everyone"], | ||||
|             { | ||||
|                 "Public channels": [0], | ||||
|                 "Private channels": [0], | ||||
|                 "Direct messages": [0], | ||||
|                 "Group direct messages": [0], | ||||
|             }, | ||||
|         ) | ||||
|         self.assertEqual( | ||||
|             data["user"], | ||||
|             { | ||||
|                 "Public channels": [0], | ||||
|                 "Private channels": [0], | ||||
|                 "Direct messages": [0], | ||||
|                 "Group direct messages": [0], | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
|         FillState.objects.create( | ||||
|             property="messages_sent:client:day", | ||||
|             end_time=self.end_times_day[0], | ||||
|             state=FillState.DONE, | ||||
|         ) | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", {"chart_name": "messages_sent_by_client"} | ||||
|         ) | ||||
|         data = self.assert_json_success(result) | ||||
|         self.assertEqual(data["everyone"], {}) | ||||
|         self.assertEqual(data["user"], {}) | ||||
|  | ||||
|     def test_start_and_end(self) -> None: | ||||
|         stat = COUNT_STATS["realm_active_humans::day"] | ||||
|         self.insert_data(stat, [None], []) | ||||
|         stat = COUNT_STATS["1day_actives::day"] | ||||
|         self.insert_data(stat, [None], []) | ||||
|         stat = COUNT_STATS["active_users_audit:is_bot:day"] | ||||
|         self.insert_data(stat, ["false"], []) | ||||
|         end_time_timestamps = [datetime_to_timestamp(dt) for dt in self.end_times_day] | ||||
|  | ||||
|         # valid start and end | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", | ||||
|             { | ||||
|                 "chart_name": "number_of_humans", | ||||
|                 "start": end_time_timestamps[1], | ||||
|                 "end": end_time_timestamps[2], | ||||
|             }, | ||||
|         ) | ||||
|         data = self.assert_json_success(result) | ||||
|         self.assertEqual(data["end_times"], end_time_timestamps[1:3]) | ||||
|         self.assertEqual( | ||||
|             data["everyone"], {"_1day": [0, 100], "_15day": [0, 100], "all_time": [0, 100]} | ||||
|         ) | ||||
|  | ||||
|         # start later then end | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", | ||||
|             { | ||||
|                 "chart_name": "number_of_humans", | ||||
|                 "start": end_time_timestamps[2], | ||||
|                 "end": end_time_timestamps[1], | ||||
|             }, | ||||
|         ) | ||||
|         self.assert_json_error_contains(result, "Start time is later than") | ||||
|  | ||||
|     def test_min_length(self) -> None: | ||||
|         stat = COUNT_STATS["realm_active_humans::day"] | ||||
|         self.insert_data(stat, [None], []) | ||||
|         stat = COUNT_STATS["1day_actives::day"] | ||||
|         self.insert_data(stat, [None], []) | ||||
|         stat = COUNT_STATS["active_users_audit:is_bot:day"] | ||||
|         self.insert_data(stat, ["false"], []) | ||||
|         # test min_length is too short to change anything | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", {"chart_name": "number_of_humans", "min_length": 2} | ||||
|         ) | ||||
|         data = self.assert_json_success(result) | ||||
|         self.assertEqual( | ||||
|             data["end_times"], [datetime_to_timestamp(dt) for dt in self.end_times_day] | ||||
|         ) | ||||
|         self.assertEqual( | ||||
|             data["everyone"], | ||||
|             {"_1day": self.data(100), "_15day": self.data(100), "all_time": self.data(100)}, | ||||
|         ) | ||||
|         # test min_length larger than filled data | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", {"chart_name": "number_of_humans", "min_length": 5} | ||||
|         ) | ||||
|         data = self.assert_json_success(result) | ||||
|         end_times = [ | ||||
|             ceiling_to_day(self.realm.date_created) + timedelta(days=i) for i in range(-1, 4) | ||||
|         ] | ||||
|         self.assertEqual(data["end_times"], [datetime_to_timestamp(dt) for dt in end_times]) | ||||
|         self.assertEqual( | ||||
|             data["everyone"], | ||||
|             { | ||||
|                 "_1day": [0, *self.data(100)], | ||||
|                 "_15day": [0, *self.data(100)], | ||||
|                 "all_time": [0, *self.data(100)], | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
|     def test_non_existent_chart(self) -> None: | ||||
|         result = self.client_get("/json/analytics/chart_data", {"chart_name": "does_not_exist"}) | ||||
|         self.assert_json_error_contains(result, "Unknown chart name") | ||||
|  | ||||
|     def test_analytics_not_running(self) -> None: | ||||
|         realm = get_realm("zulip") | ||||
|  | ||||
|         self.assertEqual(FillState.objects.count(), 0) | ||||
|  | ||||
|         realm.date_created = timezone_now() - timedelta(days=3) | ||||
|         realm.save(update_fields=["date_created"]) | ||||
|         with self.assertLogs(level="WARNING") as m: | ||||
|             result = self.client_get( | ||||
|                 "/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"} | ||||
|             ) | ||||
|             self.assertEqual( | ||||
|                 m.output, | ||||
|                 [ | ||||
|                     f"WARNING:root:User from realm zulip attempted to access /stats, but the computed start time: {realm.date_created} (creation of realm or installation) is later than the computed end time: 0001-01-01 00:00:00+00:00 (last successful analytics update). Is the analytics cron job running?" | ||||
|                 ], | ||||
|             ) | ||||
|  | ||||
|         self.assert_json_error_contains(result, "No analytics data available") | ||||
|  | ||||
|         realm.date_created = timezone_now() - timedelta(days=1, hours=2) | ||||
|         realm.save(update_fields=["date_created"]) | ||||
|         with self.assertLogs(level="WARNING") as m: | ||||
|             result = self.client_get( | ||||
|                 "/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"} | ||||
|             ) | ||||
|             self.assertEqual( | ||||
|                 m.output, | ||||
|                 [ | ||||
|                     f"WARNING:root:User from realm zulip attempted to access /stats, but the computed start time: {realm.date_created} (creation of realm or installation) is later than the computed end time: 0001-01-01 00:00:00+00:00 (last successful analytics update). Is the analytics cron job running?" | ||||
|                 ], | ||||
|             ) | ||||
|  | ||||
|         self.assert_json_error_contains(result, "No analytics data available") | ||||
|  | ||||
|         realm.date_created = timezone_now() - timedelta(days=1, minutes=10) | ||||
|         realm.save(update_fields=["date_created"]) | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"} | ||||
|         ) | ||||
|         self.assert_json_success(result) | ||||
|  | ||||
|         realm.date_created = timezone_now() - timedelta(hours=10) | ||||
|         realm.save(update_fields=["date_created"]) | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"} | ||||
|         ) | ||||
|         self.assert_json_success(result) | ||||
|  | ||||
|         end_time = timezone_now() - timedelta(days=5) | ||||
|         fill_state = FillState.objects.create( | ||||
|             property="messages_sent:is_bot:hour", end_time=end_time, state=FillState.DONE | ||||
|         ) | ||||
|  | ||||
|         realm.date_created = timezone_now() - timedelta(days=3) | ||||
|         realm.save(update_fields=["date_created"]) | ||||
|         with self.assertLogs(level="WARNING") as m: | ||||
|             result = self.client_get( | ||||
|                 "/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"} | ||||
|             ) | ||||
|             self.assertEqual( | ||||
|                 m.output, | ||||
|                 [ | ||||
|                     f"WARNING:root:User from realm zulip attempted to access /stats, but the computed start time: {realm.date_created} (creation of realm or installation) is later than the computed end time: {end_time} (last successful analytics update). Is the analytics cron job running?" | ||||
|                 ], | ||||
|             ) | ||||
|  | ||||
|         self.assert_json_error_contains(result, "No analytics data available") | ||||
|  | ||||
|         realm.date_created = timezone_now() - timedelta(days=1, minutes=10) | ||||
|         realm.save(update_fields=["date_created"]) | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"} | ||||
|         ) | ||||
|         self.assert_json_success(result) | ||||
|  | ||||
|         end_time = timezone_now() - timedelta(days=2) | ||||
|         fill_state.end_time = end_time | ||||
|         fill_state.save(update_fields=["end_time"]) | ||||
|  | ||||
|         realm.date_created = timezone_now() - timedelta(days=3) | ||||
|         realm.save(update_fields=["date_created"]) | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"} | ||||
|         ) | ||||
|         self.assert_json_success(result) | ||||
|  | ||||
|         realm.date_created = timezone_now() - timedelta(days=1, hours=2) | ||||
|         realm.save(update_fields=["date_created"]) | ||||
|         with self.assertLogs(level="WARNING") as m: | ||||
|             result = self.client_get( | ||||
|                 "/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"} | ||||
|             ) | ||||
|             self.assertEqual( | ||||
|                 m.output, | ||||
|                 [ | ||||
|                     f"WARNING:root:User from realm zulip attempted to access /stats, but the computed start time: {realm.date_created} (creation of realm or installation) is later than the computed end time: {end_time} (last successful analytics update). Is the analytics cron job running?" | ||||
|                 ], | ||||
|             ) | ||||
|  | ||||
|         self.assert_json_error_contains(result, "No analytics data available") | ||||
|  | ||||
|         realm.date_created = timezone_now() - timedelta(days=1, minutes=10) | ||||
|         realm.save(update_fields=["date_created"]) | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"} | ||||
|         ) | ||||
|         self.assert_json_success(result) | ||||
|  | ||||
|     def test_get_chart_data_for_realm(self) -> None: | ||||
|         user = self.example_user("hamlet") | ||||
|         self.login_user(user) | ||||
|  | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data/realm/zulip", {"chart_name": "number_of_humans"} | ||||
|         ) | ||||
|         self.assert_json_error(result, "Must be an server administrator", 400) | ||||
|  | ||||
|         user = self.example_user("hamlet") | ||||
|         user.is_staff = True | ||||
|         user.save(update_fields=["is_staff"]) | ||||
|         stat = COUNT_STATS["realm_active_humans::day"] | ||||
|         self.insert_data(stat, [None], []) | ||||
|  | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data/realm/not_existing_realm", | ||||
|             {"chart_name": "number_of_humans"}, | ||||
|         ) | ||||
|         self.assert_json_error(result, "Invalid organization", 400) | ||||
|  | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data/realm/zulip", {"chart_name": "number_of_humans"} | ||||
|         ) | ||||
|         self.assert_json_success(result) | ||||
|  | ||||
|     def test_get_chart_data_for_installation(self) -> None: | ||||
|         user = self.example_user("hamlet") | ||||
|         self.login_user(user) | ||||
|  | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data/installation", {"chart_name": "number_of_humans"} | ||||
|         ) | ||||
|         self.assert_json_error(result, "Must be an server administrator", 400) | ||||
|  | ||||
|         user = self.example_user("hamlet") | ||||
|         user.is_staff = True | ||||
|         user.save(update_fields=["is_staff"]) | ||||
|         stat = COUNT_STATS["realm_active_humans::day"] | ||||
|         self.insert_data(stat, [None], []) | ||||
|  | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data/installation", {"chart_name": "number_of_humans"} | ||||
|         ) | ||||
|         self.assert_json_success(result) | ||||
|  | ||||
|  | ||||
| class TestGetChartDataHelpers(ZulipTestCase): | ||||
|     def test_sort_by_totals(self) -> None: | ||||
|         empty: list[int] = [] | ||||
|         value_arrays = {"c": [0, 1], "a": [9], "b": [1, 1, 1], "d": empty} | ||||
|         self.assertEqual(sort_by_totals(value_arrays), ["a", "b", "c", "d"]) | ||||
|  | ||||
|     def test_sort_client_labels(self) -> None: | ||||
|         data = { | ||||
|             "everyone": {"a": [16], "c": [15], "b": [14], "e": [13], "d": [12], "h": [11]}, | ||||
|             "user": {"a": [6], "b": [5], "d": [4], "e": [3], "f": [2], "g": [1]}, | ||||
|         } | ||||
|         self.assertEqual(sort_client_labels(data), ["a", "b", "c", "d", "e", "f", "g", "h"]) | ||||
|  | ||||
|  | ||||
| class TestTimeRange(ZulipTestCase): | ||||
|     def test_time_range(self) -> None: | ||||
|         HOUR = timedelta(hours=1) | ||||
|         DAY = timedelta(days=1) | ||||
|  | ||||
|         a_time = datetime(2016, 3, 14, 22, 59, tzinfo=timezone.utc) | ||||
|         floor_hour = datetime(2016, 3, 14, 22, tzinfo=timezone.utc) | ||||
|         floor_day = datetime(2016, 3, 14, tzinfo=timezone.utc) | ||||
|  | ||||
|         # test start == end | ||||
|         self.assertEqual(time_range(a_time, a_time, CountStat.HOUR, None), []) | ||||
|         self.assertEqual(time_range(a_time, a_time, CountStat.DAY, None), []) | ||||
|         # test start == end == boundary, and min_length == 0 | ||||
|         self.assertEqual(time_range(floor_hour, floor_hour, CountStat.HOUR, 0), [floor_hour]) | ||||
|         self.assertEqual(time_range(floor_day, floor_day, CountStat.DAY, 0), [floor_day]) | ||||
|         # test start and end on different boundaries | ||||
|         self.assertEqual( | ||||
|             time_range(floor_hour, floor_hour + HOUR, CountStat.HOUR, None), | ||||
|             [floor_hour, floor_hour + HOUR], | ||||
|         ) | ||||
|         self.assertEqual( | ||||
|             time_range(floor_day, floor_day + DAY, CountStat.DAY, None), | ||||
|             [floor_day, floor_day + DAY], | ||||
|         ) | ||||
|         # test min_length | ||||
|         self.assertEqual( | ||||
|             time_range(floor_hour, floor_hour + HOUR, CountStat.HOUR, 4), | ||||
|             [floor_hour - 2 * HOUR, floor_hour - HOUR, floor_hour, floor_hour + HOUR], | ||||
|         ) | ||||
|         self.assertEqual( | ||||
|             time_range(floor_day, floor_day + DAY, CountStat.DAY, 4), | ||||
|             [floor_day - 2 * DAY, floor_day - DAY, floor_day, floor_day + DAY], | ||||
|         ) | ||||
|  | ||||
|  | ||||
| class TestMapArrays(ZulipTestCase): | ||||
|     def test_map_arrays(self) -> None: | ||||
|         a = { | ||||
|             "desktop app 1.0": [1, 2, 3], | ||||
|             "desktop app 2.0": [10, 12, 13], | ||||
|             "desktop app 3.0": [21, 22, 23], | ||||
|             "website": [1, 2, 3], | ||||
|             "ZulipiOS": [1, 2, 3], | ||||
|             "ZulipElectron": [2, 5, 7], | ||||
|             "ZulipMobile": [1, 2, 3], | ||||
|             "ZulipMobile/flutter": [1, 1, 1], | ||||
|             "ZulipFlutter": [1, 1, 1], | ||||
|             "ZulipPython": [1, 2, 3], | ||||
|             "API: Python": [1, 2, 3], | ||||
|             "SomethingRandom": [4, 5, 6], | ||||
|             "ZulipGitHubWebhook": [7, 7, 9], | ||||
|             "ZulipAndroid": [64, 63, 65], | ||||
|             "ZulipTerminal": [9, 10, 11], | ||||
|         } | ||||
|         result = rewrite_client_arrays(a) | ||||
|         self.assertEqual( | ||||
|             result, | ||||
|             { | ||||
|                 "Old desktop app": [32, 36, 39], | ||||
|                 "Old iOS app": [1, 2, 3], | ||||
|                 "Desktop app": [2, 5, 7], | ||||
|                 "Mobile app (React Native)": [1, 2, 3], | ||||
|                 "Mobile app beta (Flutter)": [2, 2, 2], | ||||
|                 "Web app": [1, 2, 3], | ||||
|                 "Python API": [2, 4, 6], | ||||
|                 "SomethingRandom": [4, 5, 6], | ||||
|                 "GitHub webhook": [7, 7, 9], | ||||
|                 "Old Android app": [64, 63, 65], | ||||
|                 "Terminal app": [9, 10, 11], | ||||
|             }, | ||||
|         ) | ||||
							
								
								
									
										618
									
								
								analytics/tests/test_views.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										618
									
								
								analytics/tests/test_views.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,618 @@ | ||||
| from datetime import datetime, timedelta | ||||
| from typing import List, Optional | ||||
|  | ||||
| import mock | ||||
| from django.utils.timezone import utc | ||||
| from django.http import HttpResponse | ||||
| import ujson | ||||
|  | ||||
| from analytics.lib.counts import COUNT_STATS, CountStat | ||||
| from analytics.lib.time_utils import time_range | ||||
| from analytics.models import FillState, \ | ||||
|     RealmCount, UserCount, last_successful_fill | ||||
| from analytics.views import rewrite_client_arrays, \ | ||||
|     sort_by_totals, sort_client_labels | ||||
| from zerver.lib.test_classes import ZulipTestCase | ||||
| from zerver.lib.timestamp import ceiling_to_day, \ | ||||
|     ceiling_to_hour, datetime_to_timestamp | ||||
| from zerver.lib.actions import do_create_multiuse_invite_link, \ | ||||
|     do_send_realm_reactivation_email | ||||
| from zerver.models import Client, get_realm, MultiuseInvite | ||||
|  | ||||
| class TestStatsEndpoint(ZulipTestCase): | ||||
|     def test_stats(self) -> None: | ||||
|         self.user = self.example_user('hamlet') | ||||
|         self.login(self.user.email) | ||||
|         result = self.client_get('/stats') | ||||
|         self.assertEqual(result.status_code, 200) | ||||
|         # Check that we get something back | ||||
|         self.assert_in_response("Zulip analytics for", result) | ||||
|  | ||||
|     def test_guest_user_cant_access_stats(self) -> None: | ||||
|         self.user = self.example_user('polonius') | ||||
|         self.login(self.user.email) | ||||
|         result = self.client_get('/stats') | ||||
|         self.assert_json_error(result, "Not allowed for guest users", 400) | ||||
|  | ||||
|         result = self.client_get('/json/analytics/chart_data') | ||||
|         self.assert_json_error(result, "Not allowed for guest users", 400) | ||||
|  | ||||
|     def test_stats_for_realm(self) -> None: | ||||
|         user_profile = self.example_user('hamlet') | ||||
|         self.login(user_profile.email) | ||||
|  | ||||
|         result = self.client_get('/stats/realm/zulip/') | ||||
|         self.assertEqual(result.status_code, 302) | ||||
|  | ||||
|         user_profile = self.example_user('hamlet') | ||||
|         user_profile.is_staff = True | ||||
|         user_profile.save(update_fields=['is_staff']) | ||||
|  | ||||
|         result = self.client_get('/stats/realm/not_existing_realm/') | ||||
|         self.assertEqual(result.status_code, 302) | ||||
|  | ||||
|         result = self.client_get('/stats/realm/zulip/') | ||||
|         self.assertEqual(result.status_code, 200) | ||||
|         self.assert_in_response("Zulip analytics for", result) | ||||
|  | ||||
|     def test_stats_for_installation(self) -> None: | ||||
|         user_profile = self.example_user('hamlet') | ||||
|         self.login(user_profile.email) | ||||
|  | ||||
|         result = self.client_get('/stats/installation') | ||||
|         self.assertEqual(result.status_code, 302) | ||||
|  | ||||
|         user_profile = self.example_user('hamlet') | ||||
|         user_profile.is_staff = True | ||||
|         user_profile.save(update_fields=['is_staff']) | ||||
|  | ||||
|         result = self.client_get('/stats/installation') | ||||
|         self.assertEqual(result.status_code, 200) | ||||
|         self.assert_in_response("Zulip analytics for", result) | ||||
|  | ||||
| class TestGetChartData(ZulipTestCase): | ||||
|     def setUp(self) -> None: | ||||
|         super().setUp() | ||||
|         self.realm = get_realm('zulip') | ||||
|         self.user = self.example_user('hamlet') | ||||
|         self.login(self.user.email) | ||||
|         self.end_times_hour = [ceiling_to_hour(self.realm.date_created) + timedelta(hours=i) | ||||
|                                for i in range(4)] | ||||
|         self.end_times_day = [ceiling_to_day(self.realm.date_created) + timedelta(days=i) | ||||
|                               for i in range(4)] | ||||
|  | ||||
|     def data(self, i: int) -> List[int]: | ||||
|         return [0, 0, i, 0] | ||||
|  | ||||
|     def insert_data(self, stat: CountStat, realm_subgroups: List[Optional[str]], | ||||
|                     user_subgroups: List[str]) -> None: | ||||
|         if stat.frequency == CountStat.HOUR: | ||||
|             insert_time = self.end_times_hour[2] | ||||
|             fill_time = self.end_times_hour[-1] | ||||
|         if stat.frequency == CountStat.DAY: | ||||
|             insert_time = self.end_times_day[2] | ||||
|             fill_time = self.end_times_day[-1] | ||||
|  | ||||
|         RealmCount.objects.bulk_create([ | ||||
|             RealmCount(property=stat.property, subgroup=subgroup, end_time=insert_time, | ||||
|                        value=100+i, realm=self.realm) | ||||
|             for i, subgroup in enumerate(realm_subgroups)]) | ||||
|         UserCount.objects.bulk_create([ | ||||
|             UserCount(property=stat.property, subgroup=subgroup, end_time=insert_time, | ||||
|                       value=200+i, realm=self.realm, user=self.user) | ||||
|             for i, subgroup in enumerate(user_subgroups)]) | ||||
|         FillState.objects.create(property=stat.property, end_time=fill_time, state=FillState.DONE) | ||||
|  | ||||
|     def test_number_of_humans(self) -> None: | ||||
|         stat = COUNT_STATS['realm_active_humans::day'] | ||||
|         self.insert_data(stat, [None], []) | ||||
|         stat = COUNT_STATS['1day_actives::day'] | ||||
|         self.insert_data(stat, [None], []) | ||||
|         stat = COUNT_STATS['active_users_audit:is_bot:day'] | ||||
|         self.insert_data(stat, ['false'], []) | ||||
|         result = self.client_get('/json/analytics/chart_data', | ||||
|                                  {'chart_name': 'number_of_humans'}) | ||||
|         self.assert_json_success(result) | ||||
|         data = result.json() | ||||
|         self.assertEqual(data, { | ||||
|             'msg': '', | ||||
|             'end_times': [datetime_to_timestamp(dt) for dt in self.end_times_day], | ||||
|             'frequency': CountStat.DAY, | ||||
|             'everyone': {'_1day': self.data(100), '_15day': self.data(100), 'all_time': self.data(100)}, | ||||
|             'display_order': None, | ||||
|             'result': 'success', | ||||
|         }) | ||||
|  | ||||
|     def test_messages_sent_over_time(self) -> None: | ||||
|         stat = COUNT_STATS['messages_sent:is_bot:hour'] | ||||
|         self.insert_data(stat, ['true', 'false'], ['false']) | ||||
|         result = self.client_get('/json/analytics/chart_data', | ||||
|                                  {'chart_name': 'messages_sent_over_time'}) | ||||
|         self.assert_json_success(result) | ||||
|         data = result.json() | ||||
|         self.assertEqual(data, { | ||||
|             'msg': '', | ||||
|             'end_times': [datetime_to_timestamp(dt) for dt in self.end_times_hour], | ||||
|             'frequency': CountStat.HOUR, | ||||
|             'everyone': {'bot': self.data(100), 'human': self.data(101)}, | ||||
|             'user': {'bot': self.data(0), 'human': self.data(200)}, | ||||
|             'display_order': None, | ||||
|             'result': 'success', | ||||
|         }) | ||||
|  | ||||
|     def test_messages_sent_by_message_type(self) -> None: | ||||
|         stat = COUNT_STATS['messages_sent:message_type:day'] | ||||
|         self.insert_data(stat, ['public_stream', 'private_message'], | ||||
|                          ['public_stream', 'private_stream']) | ||||
|         result = self.client_get('/json/analytics/chart_data', | ||||
|                                  {'chart_name': 'messages_sent_by_message_type'}) | ||||
|         self.assert_json_success(result) | ||||
|         data = result.json() | ||||
|         self.assertEqual(data, { | ||||
|             'msg': '', | ||||
|             'end_times': [datetime_to_timestamp(dt) for dt in self.end_times_day], | ||||
|             'frequency': CountStat.DAY, | ||||
|             'everyone': {'Public streams': self.data(100), 'Private streams': self.data(0), | ||||
|                          'Private messages': self.data(101), 'Group private messages': self.data(0)}, | ||||
|             'user': {'Public streams': self.data(200), 'Private streams': self.data(201), | ||||
|                      'Private messages': self.data(0), 'Group private messages': self.data(0)}, | ||||
|             'display_order': ['Private messages', 'Public streams', 'Private streams', 'Group private messages'], | ||||
|             'result': 'success', | ||||
|         }) | ||||
|  | ||||
|     def test_messages_sent_by_client(self) -> None: | ||||
|         stat = COUNT_STATS['messages_sent:client:day'] | ||||
|         client1 = Client.objects.create(name='client 1') | ||||
|         client2 = Client.objects.create(name='client 2') | ||||
|         client3 = Client.objects.create(name='client 3') | ||||
|         client4 = Client.objects.create(name='client 4') | ||||
|         self.insert_data(stat, [client4.id, client3.id, client2.id], | ||||
|                          [client3.id, client1.id]) | ||||
|         result = self.client_get('/json/analytics/chart_data', | ||||
|                                  {'chart_name': 'messages_sent_by_client'}) | ||||
|         self.assert_json_success(result) | ||||
|         data = result.json() | ||||
|         self.assertEqual(data, { | ||||
|             'msg': '', | ||||
|             'end_times': [datetime_to_timestamp(dt) for dt in self.end_times_day], | ||||
|             'frequency': CountStat.DAY, | ||||
|             'everyone': {'client 4': self.data(100), 'client 3': self.data(101), | ||||
|                          'client 2': self.data(102)}, | ||||
|             'user': {'client 3': self.data(200), 'client 1': self.data(201)}, | ||||
|             'display_order': ['client 1', 'client 2', 'client 3', 'client 4'], | ||||
|             'result': 'success', | ||||
|         }) | ||||
|  | ||||
|     def test_include_empty_subgroups(self) -> None: | ||||
|         FillState.objects.create( | ||||
|             property='realm_active_humans::day', end_time=self.end_times_day[0], | ||||
|             state=FillState.DONE) | ||||
|         result = self.client_get('/json/analytics/chart_data', | ||||
|                                  {'chart_name': 'number_of_humans'}) | ||||
|         self.assert_json_success(result) | ||||
|         data = result.json() | ||||
|         self.assertEqual(data['everyone'], {"_1day": [0], "_15day": [0], "all_time": [0]}) | ||||
|         self.assertFalse('user' in data) | ||||
|  | ||||
|         FillState.objects.create( | ||||
|             property='messages_sent:is_bot:hour', end_time=self.end_times_hour[0], | ||||
|             state=FillState.DONE) | ||||
|         result = self.client_get('/json/analytics/chart_data', | ||||
|                                  {'chart_name': 'messages_sent_over_time'}) | ||||
|         self.assert_json_success(result) | ||||
|         data = result.json() | ||||
|         self.assertEqual(data['everyone'], {'human': [0], 'bot': [0]}) | ||||
|         self.assertEqual(data['user'], {'human': [0], 'bot': [0]}) | ||||
|  | ||||
|         FillState.objects.create( | ||||
|             property='messages_sent:message_type:day', end_time=self.end_times_day[0], | ||||
|             state=FillState.DONE) | ||||
|         result = self.client_get('/json/analytics/chart_data', | ||||
|                                  {'chart_name': 'messages_sent_by_message_type'}) | ||||
|         self.assert_json_success(result) | ||||
|         data = result.json() | ||||
|         self.assertEqual(data['everyone'], { | ||||
|             'Public streams': [0], 'Private streams': [0], | ||||
|             'Private messages': [0], 'Group private messages': [0]}) | ||||
|         self.assertEqual(data['user'], { | ||||
|             'Public streams': [0], 'Private streams': [0], | ||||
|             'Private messages': [0], 'Group private messages': [0]}) | ||||
|  | ||||
|         FillState.objects.create( | ||||
|             property='messages_sent:client:day', end_time=self.end_times_day[0], | ||||
|             state=FillState.DONE) | ||||
|         result = self.client_get('/json/analytics/chart_data', | ||||
|                                  {'chart_name': 'messages_sent_by_client'}) | ||||
|         self.assert_json_success(result) | ||||
|         data = result.json() | ||||
|         self.assertEqual(data['everyone'], {}) | ||||
|         self.assertEqual(data['user'], {}) | ||||
|  | ||||
|     def test_start_and_end(self) -> None: | ||||
|         stat = COUNT_STATS['realm_active_humans::day'] | ||||
|         self.insert_data(stat, [None], []) | ||||
|         stat = COUNT_STATS['1day_actives::day'] | ||||
|         self.insert_data(stat, [None], []) | ||||
|         stat = COUNT_STATS['active_users_audit:is_bot:day'] | ||||
|         self.insert_data(stat, ['false'], []) | ||||
|         end_time_timestamps = [datetime_to_timestamp(dt) for dt in self.end_times_day] | ||||
|  | ||||
|         # valid start and end | ||||
|         result = self.client_get('/json/analytics/chart_data', | ||||
|                                  {'chart_name': 'number_of_humans', | ||||
|                                   'start': end_time_timestamps[1], | ||||
|                                   'end': end_time_timestamps[2]}) | ||||
|         self.assert_json_success(result) | ||||
|         data = result.json() | ||||
|         self.assertEqual(data['end_times'], end_time_timestamps[1:3]) | ||||
|         self.assertEqual(data['everyone'], {'_1day': [0, 100], '_15day': [0, 100], 'all_time': [0, 100]}) | ||||
|  | ||||
|         # start later then end | ||||
|         result = self.client_get('/json/analytics/chart_data', | ||||
|                                  {'chart_name': 'number_of_humans', | ||||
|                                   'start': end_time_timestamps[2], | ||||
|                                   'end': end_time_timestamps[1]}) | ||||
|         self.assert_json_error_contains(result, 'Start time is later than') | ||||
|  | ||||
|     def test_min_length(self) -> None: | ||||
|         stat = COUNT_STATS['realm_active_humans::day'] | ||||
|         self.insert_data(stat, [None], []) | ||||
|         stat = COUNT_STATS['1day_actives::day'] | ||||
|         self.insert_data(stat, [None], []) | ||||
|         stat = COUNT_STATS['active_users_audit:is_bot:day'] | ||||
|         self.insert_data(stat, ['false'], []) | ||||
|         # test min_length is too short to change anything | ||||
|         result = self.client_get('/json/analytics/chart_data', | ||||
|                                  {'chart_name': 'number_of_humans', | ||||
|                                   'min_length': 2}) | ||||
|         self.assert_json_success(result) | ||||
|         data = result.json() | ||||
|         self.assertEqual(data['end_times'], [datetime_to_timestamp(dt) for dt in self.end_times_day]) | ||||
|         self.assertEqual(data['everyone'], {'_1day': self.data(100), '_15day': self.data(100), 'all_time': self.data(100)}) | ||||
|         # test min_length larger than filled data | ||||
|         result = self.client_get('/json/analytics/chart_data', | ||||
|                                  {'chart_name': 'number_of_humans', | ||||
|                                   'min_length': 5}) | ||||
|         self.assert_json_success(result) | ||||
|         data = result.json() | ||||
|         end_times = [ceiling_to_day(self.realm.date_created) + timedelta(days=i) for i in range(-1, 4)] | ||||
|         self.assertEqual(data['end_times'], [datetime_to_timestamp(dt) for dt in end_times]) | ||||
|         self.assertEqual(data['everyone'], {'_1day': [0]+self.data(100), '_15day': [0]+self.data(100), 'all_time': [0]+self.data(100)}) | ||||
|  | ||||
|     def test_non_existent_chart(self) -> None: | ||||
|         result = self.client_get('/json/analytics/chart_data', | ||||
|                                  {'chart_name': 'does_not_exist'}) | ||||
|         self.assert_json_error_contains(result, 'Unknown chart name') | ||||
|  | ||||
|     def test_analytics_not_running(self) -> None: | ||||
|         # try to get data for a valid chart, but before we've put anything in the database | ||||
|         # (e.g. before update_analytics_counts has been run) | ||||
|         with mock.patch('logging.warning'): | ||||
|             result = self.client_get('/json/analytics/chart_data', | ||||
|                                      {'chart_name': 'number_of_humans'}) | ||||
|         self.assert_json_error_contains(result, 'No analytics data available') | ||||
|  | ||||
|     def test_get_chart_data_for_realm(self) -> None: | ||||
|         user_profile = self.example_user('hamlet') | ||||
|         self.login(user_profile.email) | ||||
|  | ||||
|         result = self.client_get('/json/analytics/chart_data/realm/zulip/', | ||||
|                                  {'chart_name': 'number_of_humans'}) | ||||
|         self.assert_json_error(result, "Must be an server administrator", 400) | ||||
|  | ||||
|         user_profile = self.example_user('hamlet') | ||||
|         user_profile.is_staff = True | ||||
|         user_profile.save(update_fields=['is_staff']) | ||||
|         stat = COUNT_STATS['realm_active_humans::day'] | ||||
|         self.insert_data(stat, [None], []) | ||||
|  | ||||
|         result = self.client_get('/json/analytics/chart_data/realm/not_existing_realm', | ||||
|                                  {'chart_name': 'number_of_humans'}) | ||||
|         self.assert_json_error(result, 'Invalid organization', 400) | ||||
|  | ||||
|         result = self.client_get('/json/analytics/chart_data/realm/zulip', | ||||
|                                  {'chart_name': 'number_of_humans'}) | ||||
|         self.assert_json_success(result) | ||||
|  | ||||
|     def test_get_chart_data_for_installation(self) -> None: | ||||
|         user_profile = self.example_user('hamlet') | ||||
|         self.login(user_profile.email) | ||||
|  | ||||
|         result = self.client_get('/json/analytics/chart_data/installation', | ||||
|                                  {'chart_name': 'number_of_humans'}) | ||||
|         self.assert_json_error(result, "Must be an server administrator", 400) | ||||
|  | ||||
|         user_profile = self.example_user('hamlet') | ||||
|         user_profile.is_staff = True | ||||
|         user_profile.save(update_fields=['is_staff']) | ||||
|         stat = COUNT_STATS['realm_active_humans::day'] | ||||
|         self.insert_data(stat, [None], []) | ||||
|  | ||||
|         result = self.client_get('/json/analytics/chart_data/installation', | ||||
|                                  {'chart_name': 'number_of_humans'}) | ||||
|         self.assert_json_success(result) | ||||
|  | ||||
| class TestSupportEndpoint(ZulipTestCase): | ||||
|     def test_search(self) -> None: | ||||
|         def check_hamlet_user_query_result(result: HttpResponse) -> None: | ||||
|             self.assert_in_success_response(['<span class="label">user</span>\n', '<h3>King Hamlet</h3>', | ||||
|                                              '<b>Email</b>: hamlet@zulip.com', '<b>Is active</b>: True<br>', | ||||
|                                              '<b>Admins</b>: iago@zulip.com\n', | ||||
|                                              'class="copy-button" data-copytext="iago@zulip.com"' | ||||
|                                              ], result) | ||||
|  | ||||
|         def check_zulip_realm_query_result(result: HttpResponse) -> None: | ||||
|             zulip_realm = get_realm("zulip") | ||||
|             self.assert_in_success_response(['<input type="hidden" name="realm_id" value="%s"' % (zulip_realm.id,), | ||||
|                                              'Zulip Dev</h3>', | ||||
|                                              '<option value="1" selected>Self Hosted</option>', | ||||
|                                              '<option value="2" >Limited</option>', | ||||
|                                              'input type="number" name="discount" value="None"', | ||||
|                                              '<option value="active" selected>Active</option>', | ||||
|                                              '<option value="deactivated" >Deactivated</option>', | ||||
|                                              'scrub-realm-button">', | ||||
|                                              'data-string-id="zulip"'], result) | ||||
|  | ||||
|         def check_lear_realm_query_result(result: HttpResponse) -> None: | ||||
|             lear_realm = get_realm("lear") | ||||
|             self.assert_in_success_response(['<input type="hidden" name="realm_id" value="%s"' % (lear_realm.id,), | ||||
|                                              'Lear & Co.</h3>', | ||||
|                                              '<option value="1" selected>Self Hosted</option>', | ||||
|                                              '<option value="2" >Limited</option>', | ||||
|                                              'input type="number" name="discount" value="None"', | ||||
|                                              '<option value="active" selected>Active</option>', | ||||
|                                              '<option value="deactivated" >Deactivated</option>', | ||||
|                                              'scrub-realm-button">', | ||||
|                                              'data-string-id="lear"'], result) | ||||
|  | ||||
|         def check_preregistration_user_query_result(result: HttpResponse, email: str, invite: Optional[bool]=False) -> None: | ||||
|             self.assert_in_success_response(['<span class="label">preregistration user</span>\n', | ||||
|                                              '<b>Email</b>: {}'.format(email), | ||||
|                                              ], result) | ||||
|             if invite: | ||||
|                 self.assert_in_success_response(['<span class="label">invite</span>'], result) | ||||
|                 self.assert_in_success_response(['<b>Expires in</b>: 1\xa0week, 3', | ||||
|                                                  '<b>Status</b>: Link has never been clicked'], result) | ||||
|                 self.assert_in_success_response([], result) | ||||
|             else: | ||||
|                 self.assert_not_in_success_response(['<span class="label">invite</span>'], result) | ||||
|                 self.assert_in_success_response(['<b>Expires in</b>: 1\xa0day', | ||||
|                                                  '<b>Status</b>: Link has never been clicked'], result) | ||||
|  | ||||
|         def check_realm_creation_query_result(result: HttpResponse, email: str) -> None: | ||||
|             self.assert_in_success_response(['<span class="label">preregistration user</span>\n', | ||||
|                                              '<span class="label">realm creation</span>\n', | ||||
|                                              '<b>Link</b>: http://zulip.testserver/accounts/do_confirm/', | ||||
|                                              '<b>Expires in</b>: 1\xa0day<br>\n' | ||||
|                                              ], result) | ||||
|  | ||||
|         def check_multiuse_invite_link_query_result(result: HttpResponse) -> None: | ||||
|             self.assert_in_success_response(['<span class="label">multiuse invite</span>\n', | ||||
|                                              '<b>Link</b>: http://zulip.testserver/join/', | ||||
|                                              '<b>Expires in</b>: 1\xa0week, 3' | ||||
|                                              ], result) | ||||
|  | ||||
|         def check_realm_reactivation_link_query_result(result: HttpResponse) -> None: | ||||
|             self.assert_in_success_response(['<span class="label">realm reactivation</span>\n', | ||||
|                                              '<b>Link</b>: http://zulip.testserver/reactivate/', | ||||
|                                              '<b>Expires in</b>: 1\xa0day' | ||||
|                                              ], result) | ||||
|  | ||||
|         cordelia_email = self.example_email("cordelia") | ||||
|         self.login(cordelia_email) | ||||
|  | ||||
|         result = self.client_get("/activity/support") | ||||
|         self.assertEqual(result.status_code, 302) | ||||
|         self.assertEqual(result["Location"], "/login/") | ||||
|  | ||||
|         iago_email = self.example_email("iago") | ||||
|         self.login(iago_email) | ||||
|  | ||||
|         result = self.client_get("/activity/support") | ||||
|         self.assert_in_success_response(['<input type="text" name="q" class="input-xxlarge search-query"'], result) | ||||
|  | ||||
|         result = self.client_get("/activity/support", {"q": "hamlet@zulip.com"}) | ||||
|         check_hamlet_user_query_result(result) | ||||
|         check_zulip_realm_query_result(result) | ||||
|  | ||||
|         result = self.client_get("/activity/support", {"q": "lear"}) | ||||
|         check_lear_realm_query_result(result) | ||||
|  | ||||
|         result = self.client_get("/activity/support", {"q": "http://lear.testserver"}) | ||||
|         check_lear_realm_query_result(result) | ||||
|  | ||||
|         with self.settings(REALM_HOSTS={'zulip': 'localhost'}): | ||||
|             result = self.client_get("/activity/support", {"q": "http://localhost"}) | ||||
|             check_zulip_realm_query_result(result) | ||||
|  | ||||
|         result = self.client_get("/activity/support", {"q": "hamlet@zulip.com, lear"}) | ||||
|         check_hamlet_user_query_result(result) | ||||
|         check_zulip_realm_query_result(result) | ||||
|         check_lear_realm_query_result(result) | ||||
|  | ||||
|         result = self.client_get("/activity/support", {"q": "lear, Hamlet <hamlet@zulip.com>"}) | ||||
|         check_hamlet_user_query_result(result) | ||||
|         check_zulip_realm_query_result(result) | ||||
|         check_lear_realm_query_result(result) | ||||
|  | ||||
|         self.client_post('/accounts/home/', {'email': self.nonreg_email("test")}) | ||||
|         self.login(iago_email) | ||||
|         result = self.client_get("/activity/support", {"q": self.nonreg_email("test")}) | ||||
|         check_preregistration_user_query_result(result, self.nonreg_email("test")) | ||||
|         check_zulip_realm_query_result(result) | ||||
|  | ||||
|         stream_ids = [self.get_stream_id("Denmark")] | ||||
|         invitee_emails = [self.nonreg_email("test1")] | ||||
|         self.client_post("/json/invites", {"invitee_emails": invitee_emails, | ||||
|                          "stream_ids": ujson.dumps(stream_ids), "invite_as": 1}) | ||||
|         result = self.client_get("/activity/support", {"q": self.nonreg_email("test1")}) | ||||
|         check_preregistration_user_query_result(result, self.nonreg_email("test1"), invite=True) | ||||
|         check_zulip_realm_query_result(result) | ||||
|  | ||||
|         email = self.nonreg_email('alice') | ||||
|         self.client_post('/new/', {'email': email}) | ||||
|         result = self.client_get("/activity/support", {"q": email}) | ||||
|         check_realm_creation_query_result(result, email) | ||||
|  | ||||
|         do_create_multiuse_invite_link(self.example_user("hamlet"), invited_as=1) | ||||
|         result = self.client_get("/activity/support", {"q": "zulip"}) | ||||
|         check_multiuse_invite_link_query_result(result) | ||||
|         check_zulip_realm_query_result(result) | ||||
|         MultiuseInvite.objects.all().delete() | ||||
|  | ||||
|         do_send_realm_reactivation_email(get_realm("zulip")) | ||||
|         result = self.client_get("/activity/support", {"q": "zulip"}) | ||||
|         check_realm_reactivation_link_query_result(result) | ||||
|         check_zulip_realm_query_result(result) | ||||
|  | ||||
|     def test_change_plan_type(self) -> None: | ||||
|         cordelia = self.example_user("cordelia") | ||||
|         self.login(cordelia.email) | ||||
|  | ||||
|         result = self.client_post("/activity/support", {"realm_id": "%s" % (cordelia.realm_id,), "plan_type": "2"}) | ||||
|         self.assertEqual(result.status_code, 302) | ||||
|         self.assertEqual(result["Location"], "/login/") | ||||
|  | ||||
|         iago = self.example_user("iago") | ||||
|         self.login(iago.email) | ||||
|  | ||||
|         with mock.patch("analytics.views.do_change_plan_type") as m: | ||||
|             result = self.client_post("/activity/support", {"realm_id": "%s" % (iago.realm_id,), "plan_type": "2"}) | ||||
|             m.assert_called_once_with(get_realm("zulip"), 2) | ||||
|             self.assert_in_success_response(["Plan type of Zulip Dev changed from self hosted to limited"], result) | ||||
|  | ||||
|     def test_attach_discount(self) -> None: | ||||
|         lear_realm = get_realm("lear") | ||||
|         cordelia_email = self.example_email("cordelia") | ||||
|         self.login(cordelia_email) | ||||
|  | ||||
|         result = self.client_post("/activity/support", {"realm_id": "%s" % (lear_realm.id,), "discount": "25"}) | ||||
|         self.assertEqual(result.status_code, 302) | ||||
|         self.assertEqual(result["Location"], "/login/") | ||||
|  | ||||
|         iago_email = self.example_email("iago") | ||||
|         self.login(iago_email) | ||||
|  | ||||
|         with mock.patch("analytics.views.attach_discount_to_realm") as m: | ||||
|             result = self.client_post("/activity/support", {"realm_id": "%s" % (lear_realm.id,), "discount": "25"}) | ||||
|             m.assert_called_once_with(get_realm("lear"), 25) | ||||
|             self.assert_in_success_response(["Discount of Lear & Co. changed to 25 from None"], result) | ||||
|  | ||||
|     def test_activate_or_deactivate_realm(self) -> None: | ||||
|         lear_realm = get_realm("lear") | ||||
|         cordelia_email = self.example_email("cordelia") | ||||
|         self.login(cordelia_email) | ||||
|  | ||||
|         result = self.client_post("/activity/support", {"realm_id": "%s" % (lear_realm.id,), "status": "deactivated"}) | ||||
|         self.assertEqual(result.status_code, 302) | ||||
|         self.assertEqual(result["Location"], "/login/") | ||||
|  | ||||
|         iago_email = self.example_email("iago") | ||||
|         self.login(iago_email) | ||||
|  | ||||
|         with mock.patch("analytics.views.do_deactivate_realm") as m: | ||||
|             result = self.client_post("/activity/support", {"realm_id": "%s" % (lear_realm.id,), "status": "deactivated"}) | ||||
|             m.assert_called_once_with(lear_realm, self.example_user("iago")) | ||||
|             self.assert_in_success_response(["Lear & Co. deactivated"], result) | ||||
|  | ||||
|         with mock.patch("analytics.views.do_send_realm_reactivation_email") as m: | ||||
|             result = self.client_post("/activity/support", {"realm_id": "%s" % (lear_realm.id,), "status": "active"}) | ||||
|             m.assert_called_once_with(lear_realm) | ||||
|             self.assert_in_success_response(["Realm reactivation email sent to admins of Lear"], result) | ||||
|  | ||||
|     def test_scrub_realm(self) -> None: | ||||
|         lear_realm = get_realm("lear") | ||||
|         cordelia_email = self.example_email("cordelia") | ||||
|         self.login(cordelia_email) | ||||
|  | ||||
|         result = self.client_post("/activity/support", {"realm_id": "%s" % (lear_realm.id,), "discount": "25"}) | ||||
|         self.assertEqual(result.status_code, 302) | ||||
|         self.assertEqual(result["Location"], "/login/") | ||||
|  | ||||
|         iago_email = self.example_email("iago") | ||||
|         self.login(iago_email) | ||||
|  | ||||
|         with mock.patch("analytics.views.do_scrub_realm") as m: | ||||
|             result = self.client_post("/activity/support", {"realm_id": "%s" % (lear_realm.id,), "scrub_realm": "scrub_realm"}) | ||||
|             m.assert_called_once_with(lear_realm) | ||||
|             self.assert_in_success_response(["Lear & Co. scrubbed"], result) | ||||
|  | ||||
|         with mock.patch("analytics.views.do_scrub_realm") as m: | ||||
|             result = self.client_post("/activity/support", {"realm_id": "%s" % (lear_realm.id,)}) | ||||
|             m.assert_not_called() | ||||
|  | ||||
| class TestGetChartDataHelpers(ZulipTestCase): | ||||
|     # last_successful_fill is in analytics/models.py, but get_chart_data is | ||||
|     # the only function that uses it at the moment | ||||
|     def test_last_successful_fill(self) -> None: | ||||
|         self.assertIsNone(last_successful_fill('non-existant')) | ||||
|         a_time = datetime(2016, 3, 14, 19).replace(tzinfo=utc) | ||||
|         one_hour_before = datetime(2016, 3, 14, 18).replace(tzinfo=utc) | ||||
|         fillstate = FillState.objects.create(property='property', end_time=a_time, | ||||
|                                              state=FillState.DONE) | ||||
|         self.assertEqual(last_successful_fill('property'), a_time) | ||||
|         fillstate.state = FillState.STARTED | ||||
|         fillstate.save() | ||||
|         self.assertEqual(last_successful_fill('property'), one_hour_before) | ||||
|  | ||||
|     def test_sort_by_totals(self) -> None: | ||||
|         empty = []  # type: List[int] | ||||
|         value_arrays = {'c': [0, 1], 'a': [9], 'b': [1, 1, 1], 'd': empty} | ||||
|         self.assertEqual(sort_by_totals(value_arrays), ['a', 'b', 'c', 'd']) | ||||
|  | ||||
|     def test_sort_client_labels(self) -> None: | ||||
|         data = {'everyone': {'a': [16], 'c': [15], 'b': [14], 'e': [13], 'd': [12], 'h': [11]}, | ||||
|                 'user': {'a': [6], 'b': [5], 'd': [4], 'e': [3], 'f': [2], 'g': [1]}} | ||||
|         self.assertEqual(sort_client_labels(data), ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h']) | ||||
|  | ||||
| class TestTimeRange(ZulipTestCase): | ||||
|     def test_time_range(self) -> None: | ||||
|         HOUR = timedelta(hours=1) | ||||
|         DAY = timedelta(days=1) | ||||
|  | ||||
|         a_time = datetime(2016, 3, 14, 22, 59).replace(tzinfo=utc) | ||||
|         floor_hour = datetime(2016, 3, 14, 22).replace(tzinfo=utc) | ||||
|         floor_day = datetime(2016, 3, 14).replace(tzinfo=utc) | ||||
|  | ||||
|         # test start == end | ||||
|         self.assertEqual(time_range(a_time, a_time, CountStat.HOUR, None), []) | ||||
|         self.assertEqual(time_range(a_time, a_time, CountStat.DAY, None), []) | ||||
|         # test start == end == boundary, and min_length == 0 | ||||
|         self.assertEqual(time_range(floor_hour, floor_hour, CountStat.HOUR, 0), [floor_hour]) | ||||
|         self.assertEqual(time_range(floor_day, floor_day, CountStat.DAY, 0), [floor_day]) | ||||
|         # test start and end on different boundaries | ||||
|         self.assertEqual(time_range(floor_hour, floor_hour+HOUR, CountStat.HOUR, None), | ||||
|                          [floor_hour, floor_hour+HOUR]) | ||||
|         self.assertEqual(time_range(floor_day, floor_day+DAY, CountStat.DAY, None), | ||||
|                          [floor_day, floor_day+DAY]) | ||||
|         # test min_length | ||||
|         self.assertEqual(time_range(floor_hour, floor_hour+HOUR, CountStat.HOUR, 4), | ||||
|                          [floor_hour-2*HOUR, floor_hour-HOUR, floor_hour, floor_hour+HOUR]) | ||||
|         self.assertEqual(time_range(floor_day, floor_day+DAY, CountStat.DAY, 4), | ||||
|                          [floor_day-2*DAY, floor_day-DAY, floor_day, floor_day+DAY]) | ||||
|  | ||||
| class TestMapArrays(ZulipTestCase): | ||||
|     def test_map_arrays(self) -> None: | ||||
|         a = {'desktop app 1.0': [1, 2, 3], | ||||
|              'desktop app 2.0': [10, 12, 13], | ||||
|              'desktop app 3.0': [21, 22, 23], | ||||
|              'website': [1, 2, 3], | ||||
|              'ZulipiOS': [1, 2, 3], | ||||
|              'ZulipElectron': [2, 5, 7], | ||||
|              'ZulipMobile': [1, 5, 7], | ||||
|              'ZulipPython': [1, 2, 3], | ||||
|              'API: Python': [1, 2, 3], | ||||
|              'SomethingRandom': [4, 5, 6], | ||||
|              'ZulipGitHubWebhook': [7, 7, 9], | ||||
|              'ZulipAndroid': [64, 63, 65]} | ||||
|         result = rewrite_client_arrays(a) | ||||
|         self.assertEqual(result, | ||||
|                          {'Old desktop app': [32, 36, 39], | ||||
|                           'Old iOS app': [1, 2, 3], | ||||
|                           'Desktop app': [2, 5, 7], | ||||
|                           'Mobile app': [1, 5, 7], | ||||
|                           'Website': [1, 2, 3], | ||||
|                           'Python API': [2, 4, 6], | ||||
|                           'SomethingRandom': [4, 5, 6], | ||||
|                           'GitHub webhook': [7, 7, 9], | ||||
|                           'Old Android app': [64, 63, 65]}) | ||||
| @@ -1,38 +1,35 @@ | ||||
| from django.conf import settings | ||||
| from django.conf.urls import include | ||||
| from django.urls import path | ||||
| from django.urls.resolvers import URLPattern, URLResolver | ||||
| from django.conf.urls import include, url | ||||
|  | ||||
| from analytics.views.stats import ( | ||||
|     get_chart_data, | ||||
|     get_chart_data_for_installation, | ||||
|     get_chart_data_for_realm, | ||||
|     get_chart_data_for_stream, | ||||
|     stats, | ||||
|     stats_for_installation, | ||||
|     stats_for_realm, | ||||
| ) | ||||
| from zerver.lib.rest import rest_path | ||||
| import analytics.views | ||||
| from zerver.lib.rest import rest_dispatch | ||||
|  | ||||
| i18n_urlpatterns: list[URLPattern | URLResolver] = [ | ||||
| i18n_urlpatterns = [ | ||||
|     # Server admin (user_profile.is_staff) visible stats pages | ||||
|     path("stats/realm/<realm_str>/", stats_for_realm), | ||||
|     path("stats/installation", stats_for_installation), | ||||
|     url(r'^activity$', analytics.views.get_activity, | ||||
|         name='analytics.views.get_activity'), | ||||
|     url(r'^activity/support$', analytics.views.support, | ||||
|         name='analytics.views.support'), | ||||
|     url(r'^realm_activity/(?P<realm_str>[\S]+)/$', analytics.views.get_realm_activity, | ||||
|         name='analytics.views.get_realm_activity'), | ||||
|     url(r'^user_activity/(?P<email>[\S]+)/$', analytics.views.get_user_activity, | ||||
|         name='analytics.views.get_user_activity'), | ||||
|  | ||||
|     url(r'^stats/realm/(?P<realm_str>[\S]+)/$', analytics.views.stats_for_realm, | ||||
|         name='analytics.views.stats_for_realm'), | ||||
|     url(r'^stats/installation$', analytics.views.stats_for_installation, | ||||
|         name='analytics.views.stats_for_installation'), | ||||
|     url(r'^stats/remote/(?P<remote_server_id>[\S]+)/installation$', | ||||
|         analytics.views.stats_for_remote_installation, | ||||
|         name='analytics.views.stats_for_remote_installation'), | ||||
|     url(r'^stats/remote/(?P<remote_server_id>[\S]+)/realm/(?P<remote_realm_id>[\S]+)/$', | ||||
|         analytics.views.stats_for_remote_realm, | ||||
|         name='analytics.views.stats_for_remote_realm'), | ||||
|  | ||||
|     # User-visible stats page | ||||
|     path("stats", stats, name="stats"), | ||||
|     url(r'^stats$', analytics.views.stats, | ||||
|         name='analytics.views.stats'), | ||||
| ] | ||||
|  | ||||
| if settings.ZILENCER_ENABLED: | ||||
|     from analytics.views.stats import stats_for_remote_installation, stats_for_remote_realm | ||||
|  | ||||
|     i18n_urlpatterns += [ | ||||
|         path("stats/remote/<int:remote_server_id>/installation", stats_for_remote_installation), | ||||
|         path( | ||||
|             "stats/remote/<int:remote_server_id>/realm/<int:remote_realm_id>/", | ||||
|             stats_for_remote_realm, | ||||
|         ), | ||||
|     ] | ||||
|  | ||||
| # These endpoints are a part of the API (V1), which uses: | ||||
| # * REST verbs | ||||
| # * Basic auth (username:password is email:apiKey) | ||||
| @@ -43,32 +40,22 @@ if settings.ZILENCER_ENABLED: | ||||
| # All of these paths are accessed by either a /json or /api prefix | ||||
| v1_api_and_json_patterns = [ | ||||
|     # get data for the graphs at /stats | ||||
|     rest_path("analytics/chart_data", GET=get_chart_data), | ||||
|     rest_path("analytics/chart_data/stream/<stream_id>", GET=get_chart_data_for_stream), | ||||
|     rest_path("analytics/chart_data/realm/<realm_str>", GET=get_chart_data_for_realm), | ||||
|     rest_path("analytics/chart_data/installation", GET=get_chart_data_for_installation), | ||||
|     url(r'^analytics/chart_data$', rest_dispatch, | ||||
|         {'GET': 'analytics.views.get_chart_data'}), | ||||
|     url(r'^analytics/chart_data/realm/(?P<realm_str>[\S]+)$', rest_dispatch, | ||||
|         {'GET': 'analytics.views.get_chart_data_for_realm'}), | ||||
|     url(r'^analytics/chart_data/installation$', rest_dispatch, | ||||
|         {'GET': 'analytics.views.get_chart_data_for_installation'}), | ||||
|     url(r'^analytics/chart_data/remote/(?P<remote_server_id>[\S]+)/installation$', rest_dispatch, | ||||
|         {'GET': 'analytics.views.get_chart_data_for_remote_installation'}), | ||||
|     url(r'^analytics/chart_data/remote/(?P<remote_server_id>[\S]+)/realm/(?P<remote_realm_id>[\S]+)$', | ||||
|         rest_dispatch, | ||||
|         {'GET': 'analytics.views.get_chart_data_for_remote_realm'}), | ||||
| ] | ||||
|  | ||||
| if settings.ZILENCER_ENABLED: | ||||
|     from analytics.views.stats import ( | ||||
|         get_chart_data_for_remote_installation, | ||||
|         get_chart_data_for_remote_realm, | ||||
|     ) | ||||
|  | ||||
|     v1_api_and_json_patterns += [ | ||||
|         rest_path( | ||||
|             "analytics/chart_data/remote/<int:remote_server_id>/installation", | ||||
|             GET=get_chart_data_for_remote_installation, | ||||
|         ), | ||||
|         rest_path( | ||||
|             "analytics/chart_data/remote/<int:remote_server_id>/realm/<int:remote_realm_id>", | ||||
|             GET=get_chart_data_for_remote_realm, | ||||
|         ), | ||||
|     ] | ||||
|  | ||||
| i18n_urlpatterns += [ | ||||
|     path("api/v1/", include(v1_api_and_json_patterns)), | ||||
|     path("json/", include(v1_api_and_json_patterns)), | ||||
|     url(r'^api/v1/', include(v1_api_and_json_patterns)), | ||||
|     url(r'^json/', include(v1_api_and_json_patterns)), | ||||
| ] | ||||
|  | ||||
| urlpatterns = i18n_urlpatterns | ||||
|   | ||||
							
								
								
									
										1484
									
								
								analytics/views.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1484
									
								
								analytics/views.py
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,575 +0,0 @@ | ||||
| import logging | ||||
| from collections import defaultdict | ||||
| from datetime import datetime, timedelta, timezone | ||||
| from typing import Any, Optional, TypeAlias, TypeVar, cast | ||||
|  | ||||
| from django.conf import settings | ||||
| from django.db.models import QuerySet | ||||
| from django.http import HttpRequest, HttpResponse, HttpResponseNotFound | ||||
| from django.shortcuts import render | ||||
| from django.utils import translation | ||||
| from django.utils.timezone import now as timezone_now | ||||
| from django.utils.translation import gettext as _ | ||||
|  | ||||
| from analytics.lib.counts import COUNT_STATS, CountStat | ||||
| from analytics.lib.time_utils import time_range | ||||
| from analytics.models import ( | ||||
|     BaseCount, | ||||
|     InstallationCount, | ||||
|     RealmCount, | ||||
|     StreamCount, | ||||
|     UserCount, | ||||
|     installation_epoch, | ||||
| ) | ||||
| from zerver.decorator import ( | ||||
|     require_non_guest_user, | ||||
|     require_server_admin, | ||||
|     require_server_admin_api, | ||||
|     to_utc_datetime, | ||||
|     zulip_login_required, | ||||
| ) | ||||
| from zerver.lib.exceptions import JsonableError | ||||
| from zerver.lib.i18n import get_and_set_request_language, get_language_translation_data | ||||
| from zerver.lib.request import REQ, has_request_variables | ||||
| from zerver.lib.response import json_success | ||||
| from zerver.lib.streams import access_stream_by_id | ||||
| from zerver.lib.timestamp import convert_to_UTC | ||||
| from zerver.lib.validator import to_non_negative_int | ||||
| from zerver.models import Client, Realm, Stream, UserProfile | ||||
| from zerver.models.realms import get_realm | ||||
|  | ||||
| if settings.ZILENCER_ENABLED: | ||||
|     from zilencer.models import RemoteInstallationCount, RemoteRealmCount, RemoteZulipServer | ||||
|  | ||||
| MAX_TIME_FOR_FULL_ANALYTICS_GENERATION = timedelta(days=1, minutes=30) | ||||
|  | ||||
|  | ||||
| def is_analytics_ready(realm: Realm) -> bool: | ||||
|     return (timezone_now() - realm.date_created) > MAX_TIME_FOR_FULL_ANALYTICS_GENERATION | ||||
|  | ||||
|  | ||||
| def render_stats( | ||||
|     request: HttpRequest, | ||||
|     data_url_suffix: str, | ||||
|     realm: Realm | None, | ||||
|     *, | ||||
|     title: str | None = None, | ||||
|     analytics_ready: bool = True, | ||||
| ) -> HttpResponse: | ||||
|     assert request.user.is_authenticated | ||||
|  | ||||
|     if realm is not None: | ||||
|         # Same query to get guest user count as in get_seat_count in corporate/lib/stripe.py. | ||||
|         guest_users = UserProfile.objects.filter( | ||||
|             realm=realm, is_active=True, is_bot=False, role=UserProfile.ROLE_GUEST | ||||
|         ).count() | ||||
|         space_used = realm.currently_used_upload_space_bytes() | ||||
|         if title: | ||||
|             pass | ||||
|         else: | ||||
|             title = realm.name or realm.string_id | ||||
|     else: | ||||
|         assert title | ||||
|         guest_users = None | ||||
|         space_used = None | ||||
|  | ||||
|     request_language = get_and_set_request_language( | ||||
|         request, | ||||
|         request.user.default_language, | ||||
|         translation.get_language_from_path(request.path_info), | ||||
|     ) | ||||
|  | ||||
|     # Sync this with stats_params_schema in base_page_params.ts. | ||||
|     page_params = dict( | ||||
|         page_type="stats", | ||||
|         data_url_suffix=data_url_suffix, | ||||
|         upload_space_used=space_used, | ||||
|         guest_users=guest_users, | ||||
|         translation_data=get_language_translation_data(request_language), | ||||
|     ) | ||||
|  | ||||
|     return render( | ||||
|         request, | ||||
|         "analytics/stats.html", | ||||
|         context=dict( | ||||
|             target_name=title, | ||||
|             page_params=page_params, | ||||
|             analytics_ready=analytics_ready, | ||||
|         ), | ||||
|     ) | ||||
|  | ||||
|  | ||||
| @zulip_login_required | ||||
| def stats(request: HttpRequest) -> HttpResponse: | ||||
|     assert request.user.is_authenticated | ||||
|     realm = request.user.realm | ||||
|     if request.user.is_guest: | ||||
|         # TODO: Make @zulip_login_required pass the UserProfile so we | ||||
|         # can use @require_member_or_admin | ||||
|         raise JsonableError(_("Not allowed for guest users")) | ||||
|     return render_stats(request, "", realm, analytics_ready=is_analytics_ready(realm)) | ||||
|  | ||||
|  | ||||
| @require_server_admin | ||||
| @has_request_variables | ||||
| def stats_for_realm(request: HttpRequest, realm_str: str) -> HttpResponse: | ||||
|     try: | ||||
|         realm = get_realm(realm_str) | ||||
|     except Realm.DoesNotExist: | ||||
|         return HttpResponseNotFound() | ||||
|  | ||||
|     return render_stats( | ||||
|         request, | ||||
|         f"/realm/{realm_str}", | ||||
|         realm, | ||||
|         analytics_ready=is_analytics_ready(realm), | ||||
|     ) | ||||
|  | ||||
|  | ||||
| @require_server_admin | ||||
| @has_request_variables | ||||
| def stats_for_remote_realm( | ||||
|     request: HttpRequest, remote_server_id: int, remote_realm_id: int | ||||
| ) -> HttpResponse: | ||||
|     assert settings.ZILENCER_ENABLED | ||||
|     server = RemoteZulipServer.objects.get(id=remote_server_id) | ||||
|     return render_stats( | ||||
|         request, | ||||
|         f"/remote/{server.id}/realm/{remote_realm_id}", | ||||
|         None, | ||||
|         title=f"Realm {remote_realm_id} on server {server.hostname}", | ||||
|     ) | ||||
|  | ||||
|  | ||||
| @require_server_admin_api | ||||
| @has_request_variables | ||||
| def get_chart_data_for_realm( | ||||
|     request: HttpRequest, /, user_profile: UserProfile, realm_str: str, **kwargs: Any | ||||
| ) -> HttpResponse: | ||||
|     try: | ||||
|         realm = get_realm(realm_str) | ||||
|     except Realm.DoesNotExist: | ||||
|         raise JsonableError(_("Invalid organization")) | ||||
|  | ||||
|     return get_chart_data(request, user_profile, realm=realm, **kwargs) | ||||
|  | ||||
|  | ||||
| @require_non_guest_user | ||||
| @has_request_variables | ||||
| def get_chart_data_for_stream( | ||||
|     request: HttpRequest, /, user_profile: UserProfile, stream_id: int | ||||
| ) -> HttpResponse: | ||||
|     stream, ignored_sub = access_stream_by_id( | ||||
|         user_profile, | ||||
|         stream_id, | ||||
|         require_active=True, | ||||
|         allow_realm_admin=True, | ||||
|     ) | ||||
|  | ||||
|     return get_chart_data(request, user_profile, stream=stream) | ||||
|  | ||||
|  | ||||
| @require_server_admin_api | ||||
| @has_request_variables | ||||
| def get_chart_data_for_remote_realm( | ||||
|     request: HttpRequest, | ||||
|     /, | ||||
|     user_profile: UserProfile, | ||||
|     remote_server_id: int, | ||||
|     remote_realm_id: int, | ||||
|     **kwargs: Any, | ||||
| ) -> HttpResponse: | ||||
|     assert settings.ZILENCER_ENABLED | ||||
|     server = RemoteZulipServer.objects.get(id=remote_server_id) | ||||
|     return get_chart_data( | ||||
|         request, | ||||
|         user_profile, | ||||
|         server=server, | ||||
|         remote=True, | ||||
|         remote_realm_id=int(remote_realm_id), | ||||
|         **kwargs, | ||||
|     ) | ||||
|  | ||||
|  | ||||
| @require_server_admin | ||||
| def stats_for_installation(request: HttpRequest) -> HttpResponse: | ||||
|     assert request.user.is_authenticated | ||||
|     return render_stats(request, "/installation", None, title="installation") | ||||
|  | ||||
|  | ||||
| @require_server_admin | ||||
| def stats_for_remote_installation(request: HttpRequest, remote_server_id: int) -> HttpResponse: | ||||
|     assert settings.ZILENCER_ENABLED | ||||
|     server = RemoteZulipServer.objects.get(id=remote_server_id) | ||||
|     return render_stats( | ||||
|         request, | ||||
|         f"/remote/{server.id}/installation", | ||||
|         None, | ||||
|         title=f"remote installation {server.hostname}", | ||||
|     ) | ||||
|  | ||||
|  | ||||
| @require_server_admin_api | ||||
| @has_request_variables | ||||
| def get_chart_data_for_installation( | ||||
|     request: HttpRequest, /, user_profile: UserProfile, chart_name: str = REQ(), **kwargs: Any | ||||
| ) -> HttpResponse: | ||||
|     return get_chart_data(request, user_profile, for_installation=True, **kwargs) | ||||
|  | ||||
|  | ||||
| @require_server_admin_api | ||||
| @has_request_variables | ||||
| def get_chart_data_for_remote_installation( | ||||
|     request: HttpRequest, | ||||
|     /, | ||||
|     user_profile: UserProfile, | ||||
|     remote_server_id: int, | ||||
|     chart_name: str = REQ(), | ||||
|     **kwargs: Any, | ||||
| ) -> HttpResponse: | ||||
|     assert settings.ZILENCER_ENABLED | ||||
|     server = RemoteZulipServer.objects.get(id=remote_server_id) | ||||
|     return get_chart_data( | ||||
|         request, | ||||
|         user_profile, | ||||
|         for_installation=True, | ||||
|         remote=True, | ||||
|         server=server, | ||||
|         **kwargs, | ||||
|     ) | ||||
|  | ||||
|  | ||||
| @require_non_guest_user | ||||
| @has_request_variables | ||||
| def get_chart_data( | ||||
|     request: HttpRequest, | ||||
|     user_profile: UserProfile, | ||||
|     chart_name: str = REQ(), | ||||
|     min_length: int | None = REQ(converter=to_non_negative_int, default=None), | ||||
|     start: datetime | None = REQ(converter=to_utc_datetime, default=None), | ||||
|     end: datetime | None = REQ(converter=to_utc_datetime, default=None), | ||||
|     # These last several parameters are only used by functions | ||||
|     # wrapping get_chart_data; the callers are responsible for | ||||
|     # parsing/validation/authorization for them. | ||||
|     realm: Realm | None = None, | ||||
|     for_installation: bool = False, | ||||
|     remote: bool = False, | ||||
|     remote_realm_id: int | None = None, | ||||
|     server: Optional["RemoteZulipServer"] = None, | ||||
|     stream: Stream | None = None, | ||||
| ) -> HttpResponse: | ||||
|     TableType: TypeAlias = ( | ||||
|         type["RemoteInstallationCount"] | ||||
|         | type[InstallationCount] | ||||
|         | type["RemoteRealmCount"] | ||||
|         | type[RealmCount] | ||||
|     ) | ||||
|     if for_installation: | ||||
|         if remote: | ||||
|             assert settings.ZILENCER_ENABLED | ||||
|             aggregate_table: TableType = RemoteInstallationCount | ||||
|             assert server is not None | ||||
|         else: | ||||
|             aggregate_table = InstallationCount | ||||
|     else: | ||||
|         if remote: | ||||
|             assert settings.ZILENCER_ENABLED | ||||
|             aggregate_table = RemoteRealmCount | ||||
|             assert server is not None | ||||
|             assert remote_realm_id is not None | ||||
|         else: | ||||
|             aggregate_table = RealmCount | ||||
|  | ||||
|     tables: ( | ||||
|         tuple[TableType] | tuple[TableType, type[UserCount]] | tuple[TableType, type[StreamCount]] | ||||
|     ) | ||||
|  | ||||
|     if chart_name == "number_of_humans": | ||||
|         stats = [ | ||||
|             COUNT_STATS["1day_actives::day"], | ||||
|             COUNT_STATS["realm_active_humans::day"], | ||||
|             COUNT_STATS["active_users_audit:is_bot:day"], | ||||
|         ] | ||||
|         tables = (aggregate_table,) | ||||
|         subgroup_to_label: dict[CountStat, dict[str | None, str]] = { | ||||
|             stats[0]: {None: "_1day"}, | ||||
|             stats[1]: {None: "_15day"}, | ||||
|             stats[2]: {"false": "all_time"}, | ||||
|         } | ||||
|         labels_sort_function = None | ||||
|         include_empty_subgroups = True | ||||
|     elif chart_name == "messages_sent_over_time": | ||||
|         stats = [COUNT_STATS["messages_sent:is_bot:hour"]] | ||||
|         tables = (aggregate_table, UserCount) | ||||
|         subgroup_to_label = {stats[0]: {"false": "human", "true": "bot"}} | ||||
|         labels_sort_function = None | ||||
|         include_empty_subgroups = True | ||||
|     elif chart_name == "messages_sent_by_message_type": | ||||
|         stats = [COUNT_STATS["messages_sent:message_type:day"]] | ||||
|         tables = (aggregate_table, UserCount) | ||||
|         subgroup_to_label = { | ||||
|             stats[0]: { | ||||
|                 "public_stream": _("Public channels"), | ||||
|                 "private_stream": _("Private channels"), | ||||
|                 "private_message": _("Direct messages"), | ||||
|                 "huddle_message": _("Group direct messages"), | ||||
|             } | ||||
|         } | ||||
|         labels_sort_function = lambda data: sort_by_totals(data["everyone"]) | ||||
|         include_empty_subgroups = True | ||||
|     elif chart_name == "messages_sent_by_client": | ||||
|         stats = [COUNT_STATS["messages_sent:client:day"]] | ||||
|         tables = (aggregate_table, UserCount) | ||||
|         # Note that the labels are further re-written by client_label_map | ||||
|         subgroup_to_label = { | ||||
|             stats[0]: {str(id): name for id, name in Client.objects.values_list("id", "name")} | ||||
|         } | ||||
|         labels_sort_function = sort_client_labels | ||||
|         include_empty_subgroups = False | ||||
|     elif chart_name == "messages_read_over_time": | ||||
|         stats = [COUNT_STATS["messages_read::hour"]] | ||||
|         tables = (aggregate_table, UserCount) | ||||
|         subgroup_to_label = {stats[0]: {None: "read"}} | ||||
|         labels_sort_function = None | ||||
|         include_empty_subgroups = True | ||||
|     elif chart_name == "messages_sent_by_stream": | ||||
|         if stream is None: | ||||
|             raise JsonableError( | ||||
|                 _("Missing channel for chart: {chart_name}").format(chart_name=chart_name) | ||||
|             ) | ||||
|         stats = [COUNT_STATS["messages_in_stream:is_bot:day"]] | ||||
|         tables = (aggregate_table, StreamCount) | ||||
|         subgroup_to_label = {stats[0]: {"false": "human", "true": "bot"}} | ||||
|         labels_sort_function = None | ||||
|         include_empty_subgroups = True | ||||
|     else: | ||||
|         raise JsonableError(_("Unknown chart name: {chart_name}").format(chart_name=chart_name)) | ||||
|  | ||||
|     # Most likely someone using our API endpoint. The /stats page does not | ||||
|     # pass a start or end in its requests. | ||||
|     if start is not None: | ||||
|         start = convert_to_UTC(start) | ||||
|     if end is not None: | ||||
|         end = convert_to_UTC(end) | ||||
|     if start is not None and end is not None and start > end: | ||||
|         raise JsonableError( | ||||
|             _("Start time is later than end time. Start: {start}, End: {end}").format( | ||||
|                 start=start, | ||||
|                 end=end, | ||||
|             ) | ||||
|         ) | ||||
|  | ||||
|     if realm is None: | ||||
|         # Note that this value is invalid for Remote tables; be | ||||
|         # careful not to access it in those code paths. | ||||
|         realm = user_profile.realm | ||||
|  | ||||
|     if remote: | ||||
|         # For remote servers, we don't have fillstate data, and thus | ||||
|         # should simply use the first and last data points for the | ||||
|         # table. | ||||
|         assert server is not None | ||||
|         assert aggregate_table is RemoteInstallationCount or aggregate_table is RemoteRealmCount | ||||
|         aggregate_table_remote = cast( | ||||
|             type[RemoteInstallationCount] | type[RemoteRealmCount], aggregate_table | ||||
|         )  # https://stackoverflow.com/questions/68540528/mypy-assertions-on-the-types-of-types | ||||
|         if not aggregate_table_remote.objects.filter(server=server).exists(): | ||||
|             raise JsonableError( | ||||
|                 _("No analytics data available. Please contact your server administrator.") | ||||
|             ) | ||||
|         if start is None: | ||||
|             first = ( | ||||
|                 aggregate_table_remote.objects.filter(server=server).order_by("remote_id").first() | ||||
|             ) | ||||
|             assert first is not None | ||||
|             start = first.end_time | ||||
|         if end is None: | ||||
|             last = aggregate_table_remote.objects.filter(server=server).order_by("remote_id").last() | ||||
|             assert last is not None | ||||
|             end = last.end_time | ||||
|     else: | ||||
|         # Otherwise, we can use tables on the current server to | ||||
|         # determine a nice range, and some additional validation. | ||||
|         if start is None: | ||||
|             if for_installation: | ||||
|                 start = installation_epoch() | ||||
|             else: | ||||
|                 start = realm.date_created | ||||
|         if end is None: | ||||
|             end = max( | ||||
|                 stat.last_successful_fill() or datetime.min.replace(tzinfo=timezone.utc) | ||||
|                 for stat in stats | ||||
|             ) | ||||
|  | ||||
|         if start > end and (timezone_now() - start > MAX_TIME_FOR_FULL_ANALYTICS_GENERATION): | ||||
|             logging.warning( | ||||
|                 "User from realm %s attempted to access /stats, but the computed " | ||||
|                 "start time: %s (creation of realm or installation) is later than the computed " | ||||
|                 "end time: %s (last successful analytics update). Is the " | ||||
|                 "analytics cron job running?", | ||||
|                 realm.string_id, | ||||
|                 start, | ||||
|                 end, | ||||
|             ) | ||||
|             raise JsonableError( | ||||
|                 _("No analytics data available. Please contact your server administrator.") | ||||
|             ) | ||||
|  | ||||
|     assert len({stat.frequency for stat in stats}) == 1 | ||||
|     end_times = time_range(start, end, stats[0].frequency, min_length) | ||||
|     data: dict[str, Any] = { | ||||
|         "end_times": [int(end_time.timestamp()) for end_time in end_times], | ||||
|         "frequency": stats[0].frequency, | ||||
|     } | ||||
|  | ||||
|     aggregation_level = { | ||||
|         InstallationCount: "everyone", | ||||
|         RealmCount: "everyone", | ||||
|         UserCount: "user", | ||||
|         StreamCount: "everyone", | ||||
|     } | ||||
|     if settings.ZILENCER_ENABLED: | ||||
|         aggregation_level[RemoteInstallationCount] = "everyone" | ||||
|         aggregation_level[RemoteRealmCount] = "everyone" | ||||
|  | ||||
|     # -1 is a placeholder value, since there is no relevant filtering on InstallationCount | ||||
|     id_value = { | ||||
|         InstallationCount: -1, | ||||
|         RealmCount: realm.id, | ||||
|         UserCount: user_profile.id, | ||||
|     } | ||||
|     if stream is not None: | ||||
|         id_value[StreamCount] = stream.id | ||||
|  | ||||
|     if settings.ZILENCER_ENABLED: | ||||
|         if server is not None: | ||||
|             id_value[RemoteInstallationCount] = server.id | ||||
|         # TODO: RemoteRealmCount logic doesn't correctly handle | ||||
|         # filtering by server_id as well. | ||||
|         if remote_realm_id is not None: | ||||
|             id_value[RemoteRealmCount] = remote_realm_id | ||||
|  | ||||
|     for table in tables: | ||||
|         data[aggregation_level[table]] = {} | ||||
|         for stat in stats: | ||||
|             data[aggregation_level[table]].update( | ||||
|                 get_time_series_by_subgroup( | ||||
|                     stat, | ||||
|                     table, | ||||
|                     id_value[table], | ||||
|                     end_times, | ||||
|                     subgroup_to_label[stat], | ||||
|                     include_empty_subgroups, | ||||
|                 ) | ||||
|             ) | ||||
|  | ||||
|     if labels_sort_function is not None: | ||||
|         data["display_order"] = labels_sort_function(data) | ||||
|     else: | ||||
|         data["display_order"] = None | ||||
|     return json_success(request, data=data) | ||||
|  | ||||
|  | ||||
| def sort_by_totals(value_arrays: dict[str, list[int]]) -> list[str]: | ||||
|     totals = sorted(((sum(values), label) for label, values in value_arrays.items()), reverse=True) | ||||
|     return [label for total, label in totals] | ||||
|  | ||||
|  | ||||
| # For any given user, we want to show a fixed set of clients in the chart, | ||||
| # regardless of the time aggregation or whether we're looking at realm or | ||||
| # user data. This fixed set ideally includes the clients most important in | ||||
| # understanding the realm's traffic and the user's traffic. This function | ||||
| # tries to rank the clients so that taking the first N elements of the | ||||
| # sorted list has a reasonable chance of doing so. | ||||
| def sort_client_labels(data: dict[str, dict[str, list[int]]]) -> list[str]: | ||||
|     realm_order = sort_by_totals(data["everyone"]) | ||||
|     user_order = sort_by_totals(data["user"]) | ||||
|     label_sort_values: dict[str, float] = {label: i for i, label in enumerate(realm_order)} | ||||
|     for i, label in enumerate(user_order): | ||||
|         label_sort_values[label] = min(i - 0.1, label_sort_values.get(label, i)) | ||||
|     return [label for label, sort_value in sorted(label_sort_values.items(), key=lambda x: x[1])] | ||||
|  | ||||
|  | ||||
| CountT = TypeVar("CountT", bound=BaseCount) | ||||
|  | ||||
|  | ||||
| def table_filtered_to_id(table: type[CountT], key_id: int) -> QuerySet[CountT]: | ||||
|     if table == RealmCount: | ||||
|         return table._default_manager.filter(realm_id=key_id) | ||||
|     elif table == UserCount: | ||||
|         return table._default_manager.filter(user_id=key_id) | ||||
|     elif table == StreamCount: | ||||
|         return table._default_manager.filter(stream_id=key_id) | ||||
|     elif table == InstallationCount: | ||||
|         return table._default_manager.all() | ||||
|     elif settings.ZILENCER_ENABLED and table == RemoteInstallationCount: | ||||
|         return table._default_manager.filter(server_id=key_id) | ||||
|     elif settings.ZILENCER_ENABLED and table == RemoteRealmCount: | ||||
|         return table._default_manager.filter(realm_id=key_id) | ||||
|     else: | ||||
|         raise AssertionError(f"Unknown table: {table}") | ||||
|  | ||||
|  | ||||
| def client_label_map(name: str) -> str: | ||||
|     if name == "website": | ||||
|         return "Web app" | ||||
|     if name.startswith("desktop app"): | ||||
|         return "Old desktop app" | ||||
|     if name == "ZulipElectron": | ||||
|         return "Desktop app" | ||||
|     if name == "ZulipTerminal": | ||||
|         return "Terminal app" | ||||
|     if name == "ZulipAndroid": | ||||
|         return "Old Android app" | ||||
|     if name == "ZulipiOS": | ||||
|         return "Old iOS app" | ||||
|     if name == "ZulipMobile": | ||||
|         return "Mobile app (React Native)" | ||||
|     if name in ["ZulipFlutter", "ZulipMobile/flutter"]: | ||||
|         return "Mobile app beta (Flutter)" | ||||
|     if name in ["ZulipPython", "API: Python"]: | ||||
|         return "Python API" | ||||
|     if name.startswith("Zulip") and name.endswith("Webhook"): | ||||
|         return name[len("Zulip") : -len("Webhook")] + " webhook" | ||||
|     return name | ||||
|  | ||||
|  | ||||
| def rewrite_client_arrays(value_arrays: dict[str, list[int]]) -> dict[str, list[int]]: | ||||
|     mapped_arrays: dict[str, list[int]] = {} | ||||
|     for label, array in value_arrays.items(): | ||||
|         mapped_label = client_label_map(label) | ||||
|         if mapped_label in mapped_arrays: | ||||
|             for i in range(len(array)): | ||||
|                 mapped_arrays[mapped_label][i] += value_arrays[label][i] | ||||
|         else: | ||||
|             mapped_arrays[mapped_label] = [value_arrays[label][i] for i in range(len(array))] | ||||
|     return mapped_arrays | ||||
|  | ||||
|  | ||||
| def get_time_series_by_subgroup( | ||||
|     stat: CountStat, | ||||
|     table: type[BaseCount], | ||||
|     key_id: int, | ||||
|     end_times: list[datetime], | ||||
|     subgroup_to_label: dict[str | None, str], | ||||
|     include_empty_subgroups: bool, | ||||
| ) -> dict[str, list[int]]: | ||||
|     queryset = ( | ||||
|         table_filtered_to_id(table, key_id) | ||||
|         .filter(property=stat.property) | ||||
|         .values_list("subgroup", "end_time", "value") | ||||
|     ) | ||||
|     value_dicts: dict[str | None, dict[datetime, int]] = defaultdict(lambda: defaultdict(int)) | ||||
|     for subgroup, end_time, value in queryset: | ||||
|         value_dicts[subgroup][end_time] = value | ||||
|     value_arrays = {} | ||||
|     for subgroup, label in subgroup_to_label.items(): | ||||
|         if (subgroup in value_dicts) or include_empty_subgroups: | ||||
|             value_arrays[label] = [value_dicts[subgroup][end_time] for end_time in end_times] | ||||
|  | ||||
|     if stat == COUNT_STATS["messages_sent:client:day"]: | ||||
|         # HACK: We rewrite these arrays to collapse the Client objects | ||||
|         # with similar names into a single sum, and generally give | ||||
|         # them better names | ||||
|         return rewrite_client_arrays(value_arrays) | ||||
|     return value_arrays | ||||
| @@ -1,31 +0,0 @@ | ||||
| {generate_api_header(API_ENDPOINT_NAME)} | ||||
|  | ||||
| ## Usage examples | ||||
|  | ||||
| {start_tabs} | ||||
|  | ||||
| {generate_code_example(python)|API_ENDPOINT_NAME|example} | ||||
|  | ||||
| {generate_code_example(javascript)|API_ENDPOINT_NAME|example} | ||||
|  | ||||
| {tab|curl} | ||||
|  | ||||
| {generate_code_example(curl)|API_ENDPOINT_NAME|example} | ||||
|  | ||||
| {end_tabs} | ||||
|  | ||||
| ## Parameters | ||||
|  | ||||
| {generate_api_arguments_table|zulip.yaml|API_ENDPOINT_NAME} | ||||
|  | ||||
| {generate_parameter_description(API_ENDPOINT_NAME)} | ||||
|  | ||||
| ## Response | ||||
|  | ||||
| {generate_return_values_table|zulip.yaml|API_ENDPOINT_NAME} | ||||
|  | ||||
| {generate_response_description(API_ENDPOINT_NAME)} | ||||
|  | ||||
| #### Example response(s) | ||||
|  | ||||
| {generate_code_example|API_ENDPOINT_NAME|fixture} | ||||
| @@ -1,89 +0,0 @@ | ||||
| # API keys | ||||
|  | ||||
| An **API key** is how a bot identifies itself to Zulip. For the official | ||||
| clients, such as the Python bindings, we recommend [downloading a `zuliprc` | ||||
| file](/api/configuring-python-bindings#download-a-zuliprc-file). This file | ||||
| contains an API key and other necessary configuration values for using the | ||||
| Zulip API with a specific account on a Zulip server. | ||||
|  | ||||
| ## Get a bot's API key | ||||
|  | ||||
| {start_tabs} | ||||
|  | ||||
| {tab|desktop-web} | ||||
|  | ||||
| {settings_tab|your-bots} | ||||
|  | ||||
| 1. Click **Active bots**. | ||||
|  | ||||
| 1. Find your bot. The bot's API key is under **API KEY**. | ||||
|  | ||||
| {end_tabs} | ||||
|  | ||||
| !!! warn "" | ||||
|  | ||||
|     Anyone with a bot's API key can impersonate the bot, so be careful with it! | ||||
|  | ||||
| ## Get your API key | ||||
|  | ||||
| {start_tabs} | ||||
|  | ||||
| {tab|desktop-web} | ||||
|  | ||||
| {settings_tab|account-and-privacy} | ||||
|  | ||||
| 1. Under **API key**, click **Manage your API key**. | ||||
|  | ||||
| 1. Enter your password, and click **Get API key**. If you don't know your | ||||
|    password, click **reset it** and follow the instructions from there. | ||||
|  | ||||
| 1. Copy your API key. | ||||
|  | ||||
| {end_tabs} | ||||
|  | ||||
| !!! warn "" | ||||
|  | ||||
|     Anyone with your API key can impersonate you, so be doubly careful with it. | ||||
|  | ||||
|  | ||||
| ## Invalidate an API key | ||||
|  | ||||
| To invalidate an existing API key, you have to generate a new key. | ||||
|  | ||||
| ### Invalidate a bot's API key | ||||
|  | ||||
| {start_tabs} | ||||
|  | ||||
| {tab|desktop-web} | ||||
|  | ||||
| {settings_tab|your-bots} | ||||
|  | ||||
| 1. Click **Active bots**. | ||||
|  | ||||
| 1. Find your bot. | ||||
|  | ||||
| 1. Under **API KEY**, click the **refresh** (<i class="fa fa-refresh"></i>) icon | ||||
|    to the right of the bot's API key. | ||||
|  | ||||
| {end_tabs} | ||||
|  | ||||
| ### Invalidate your API key | ||||
|  | ||||
| {start_tabs} | ||||
|  | ||||
| {tab|desktop-web} | ||||
|  | ||||
| {settings_tab|account-and-privacy} | ||||
|  | ||||
| 1. Under **API key**, click **Manage your API key**. | ||||
|  | ||||
| 1. Enter your password, and click **Get API key**. If you don't know your | ||||
|    password, click **reset it** and follow the instructions from there. | ||||
|  | ||||
| 1. Click **Generate new API key** | ||||
|  | ||||
| {end_tabs} | ||||
|  | ||||
| ## Related articles | ||||
|  | ||||
| * [Configuring the Python bindings](/api/configuring-python-bindings) | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,161 +0,0 @@ | ||||
| # Configuring the Python bindings | ||||
|  | ||||
| Zulip provides a set of tools that allows interacting with its API more | ||||
| easily, called the [Python bindings](https://pypi.python.org/pypi/zulip/). | ||||
| One of the most notable use cases for these bindings are bots developed | ||||
| using Zulip's [bot framework](/api/writing-bots). | ||||
|  | ||||
| In order to use them, you need to configure them with your identity | ||||
| (account, API key, and Zulip server URL). There are a few ways to | ||||
| achieve that: | ||||
|  | ||||
| - Using a `zuliprc` file, referenced via the `--config-file` option or | ||||
|   the `config_file` option to the `zulip.Client` constructor | ||||
|   (recommended for bots). | ||||
| - Using a `zuliprc` file in your home directory at `~/.zuliprc` | ||||
|   (recommended for your own API key). | ||||
| - Using the [environment | ||||
|   variables](https://en.wikipedia.org/wiki/Environment_variable) | ||||
|   documented below. | ||||
| - Using the `--api-key`, `--email`, and `--site` variables as command | ||||
|   line parameters. | ||||
| - Using the `api_key`, `email`, and `site` parameters to the | ||||
|   `zulip.Client` constructor. | ||||
|  | ||||
| ## Download a `zuliprc` file | ||||
|  | ||||
| {start_tabs} | ||||
|  | ||||
| {tab|for-a-bot} | ||||
|  | ||||
| {settings_tab|your-bots} | ||||
|  | ||||
| 1. Click the **download** (<i class="fa fa-download"></i>) icon on the profile | ||||
|    card of the desired bot to download the bot's `zuliprc` file. | ||||
|  | ||||
| !!! warn "" | ||||
|  | ||||
|     Anyone with a bot's API key can impersonate the bot, so be careful with it! | ||||
|  | ||||
| {tab|for-yourself} | ||||
|  | ||||
| {settings_tab|account-and-privacy} | ||||
|  | ||||
| 1. Under **API key**, click **Manage your API key**. | ||||
|  | ||||
| 1. Enter your password, and click **Get API key**. If you don't know your | ||||
|    password, click **reset it** and follow the | ||||
|    instructions from there. | ||||
|  | ||||
| 1. Click **Download zuliprc** to download your `zuliprc` file. | ||||
|  | ||||
| 1. (optional) If you'd like your credentials to be used by default | ||||
|    when using the Zulip API on your computer, move the `zuliprc` file | ||||
|    to `~/.zuliprc` in your home directory. | ||||
|  | ||||
| !!! warn "" | ||||
|  | ||||
|     Anyone with your API key can impersonate you, so be doubly careful with it. | ||||
|  | ||||
| {end_tabs} | ||||
|  | ||||
| ## Configuration keys and environment variables | ||||
|  | ||||
| `zuliprc` is a configuration file written in the | ||||
| [INI file format](https://en.wikipedia.org/wiki/INI_file), | ||||
| which contains key-value pairs as shown in the following example: | ||||
|  | ||||
| ``` | ||||
| [api] | ||||
| key=<API key from the web interface> | ||||
| email=<your email address> | ||||
| site=<your Zulip server's URI> | ||||
| ... | ||||
| ``` | ||||
|  | ||||
| The keys you can use in this file (and their equivalent environment variables) | ||||
| can be found in the following table: | ||||
|  | ||||
| <table class="table"> | ||||
|     <thead> | ||||
|         <tr> | ||||
|             <th><code>zuliprc</code> key</th> | ||||
|             <th>Environment variable</th> | ||||
|             <th>Required</th> | ||||
|             <th>Description</th> | ||||
|         </tr> | ||||
|     </thead> | ||||
|     <tr> | ||||
|         <td><code>key</code></td> | ||||
|         <td><code>ZULIP_API_KEY</code></td> | ||||
|         <td>Yes</td> | ||||
|         <td> | ||||
|             <a href="/api/api-keys">API key</a>, which you can get through | ||||
|             Zulip's web interface. | ||||
|         </td> | ||||
|     </tr> | ||||
|     <tr> | ||||
|         <td><code>email</code></td> | ||||
|         <td><code>ZULIP_EMAIL</code></td> | ||||
|         <td>Yes</td> | ||||
|         <td> | ||||
|             The email address of the user who owns the API key mentioned | ||||
|             above. | ||||
|         </td> | ||||
|     </tr> | ||||
|     <tr> | ||||
|         <td><code>site</code></td> | ||||
|         <td><code>ZULIP_SITE</code></td> | ||||
|         <td>No</td> | ||||
|         <td> | ||||
|             URL where your Zulip server is located. | ||||
|         </td> | ||||
|     </tr> | ||||
|     <tr> | ||||
|         <td><code>client_cert_key</code></td> | ||||
|         <td><code>ZULIP_CERT_KEY</code></td> | ||||
|         <td>No</td> | ||||
|         <td> | ||||
|             Path to the SSL/TLS private key that the binding should use to | ||||
|             connect to the server. | ||||
|         </td> | ||||
|     </tr> | ||||
|     <tr> | ||||
|         <td><code>client_cert</code></td> | ||||
|         <td><code>ZULIP_CERT</code></td> | ||||
|         <td>No*</td> | ||||
|         <td> | ||||
|             The public counterpart of <code>client_cert_key</code>/ | ||||
|             <code>ZULIP_CERT_KEY</code>. <i>This setting is required if a cert | ||||
|             key has been set.</i> | ||||
|         </td> | ||||
|     </tr> | ||||
|     <tr> | ||||
|         <td><code>client_bundle</code></td> | ||||
|         <td><code>ZULIP_CERT_BUNDLE</code></td> | ||||
|         <td>No</td> | ||||
|         <td> | ||||
|             Path where the server's PEM-encoded certificate is located. CA | ||||
|             certificates are also accepted, in case those CA's have issued the | ||||
|             server's certificate. Defaults to the built-in CA bundle trusted | ||||
|             by Python. | ||||
|         </td> | ||||
|     </tr> | ||||
|     <tr> | ||||
|         <td><code>insecure</code></td> | ||||
|         <td><code>ZULIP_ALLOW_INSECURE</code></td> | ||||
|         <td>No</td> | ||||
|         <td> | ||||
|             Allows connecting to Zulip servers with an invalid SSL/TLS | ||||
|             certificate. Please note that enabling this will make the HTTPS | ||||
|             connection insecure. Defaults to <code>false</code>. | ||||
|         </td> | ||||
|     </tr> | ||||
| </table> | ||||
|  | ||||
| ## Related articles | ||||
|  | ||||
| * [Installation instructions](/api/installation-instructions) | ||||
| * [API keys](/api/api-keys) | ||||
| * [Running bots](/api/running-bots) | ||||
| * [Deploying bots](/api/deploying-bots) | ||||
| @@ -1,173 +0,0 @@ | ||||
| # Construct a narrow | ||||
|  | ||||
| A **narrow** is a set of filters for Zulip messages, that can be based | ||||
| on many different factors (like sender, channel, topic, search | ||||
| keywords, etc.). Narrows are used in various places in the Zulip | ||||
| API (most importantly, in the API for fetching messages). | ||||
|  | ||||
| It is simplest to explain the algorithm for encoding a search as a | ||||
| narrow using a single example. Consider the following search query | ||||
| (written as it would be entered in the Zulip web app's search box). | ||||
| It filters for messages sent to channel `announce`, not sent by | ||||
| `iago@zulip.com`, and containing the words `cool` and `sunglasses`: | ||||
|  | ||||
| ``` | ||||
| channel:announce -sender:iago@zulip.com cool sunglasses | ||||
| ``` | ||||
|  | ||||
| This query would be JSON-encoded for use in the Zulip API using JSON | ||||
| as a list of simple objects, as follows: | ||||
|  | ||||
| ```json | ||||
| [ | ||||
|     { | ||||
|         "operator": "channel", | ||||
|         "operand": "announce" | ||||
|     }, | ||||
|     { | ||||
|         "operator": "sender", | ||||
|         "operand": "iago@zulip.com", | ||||
|         "negated": true | ||||
|     }, | ||||
|     { | ||||
|         "operator": "search", | ||||
|         "operand": "cool sunglasses" | ||||
|     } | ||||
| ] | ||||
| ``` | ||||
|  | ||||
| The Zulip help center article on [searching for messages](/help/search-for-messages) | ||||
| documents the majority of the search/narrow options supported by the | ||||
| Zulip API. | ||||
|  | ||||
| Note that many narrows, including all that lack a `channel` or `channels` | ||||
| operator, search the current user's personal message history. See | ||||
| [searching shared history](/help/search-for-messages#searching-shared-history) | ||||
| for details. | ||||
|  | ||||
| Clients should note that the `is:unread` filter takes advantage of the | ||||
| fact that there is a database index for unread messages, which can be an | ||||
| important optimization when fetching messages in certain cases (e.g., | ||||
| when [adding the `read` flag to a user's personal | ||||
| messages](/api/update-message-flags-for-narrow)). | ||||
|  | ||||
| ## Changes | ||||
|  | ||||
| * In Zulip 9.0 (feature level 271), support was added for a new filter | ||||
|   operator, `with`, which uses a [message ID](#message-ids) for its | ||||
|   operand, and is designed for creating permanent links to topics. | ||||
|  | ||||
| * In Zulip 9.0 (feature level 265), support was added for a new | ||||
|   `is:followed` filter, matching messages in topics that the current | ||||
|   user is [following](/help/follow-a-topic). | ||||
|  | ||||
| * In Zulip 9.0 (feature level 250), support was added for two filters | ||||
|   related to stream messages: `channel` and `channels`. The `channel` | ||||
|   operator is an alias for the `stream` operator. The `channels` | ||||
|   operator is an alias for the `streams` operator. Both `channel` and | ||||
|   `channels` return the same exact results as `stream` and `streams` | ||||
|   respectively. | ||||
|  | ||||
| * In Zulip 9.0 (feature level 249), support was added for a new filter, | ||||
|   `has:reaction`, which returns messages that have at least one [emoji | ||||
|   reaction](/help/emoji-reactions). | ||||
|  | ||||
| * In Zulip 7.0 (feature level 177), support was added for three filters | ||||
|   related to direct messages: `is:dm`, `dm` and `dm-including`. The | ||||
|   `dm` operator replaced and deprecated the `pm-with` operator. The | ||||
|   `is:dm` filter replaced and deprecated the `is:private` filter. The | ||||
|   `dm-including` operator replaced and deprecated the `group-pm-with` | ||||
|   operator. | ||||
|  | ||||
|     * The `dm-including` and `group-pm-with` operators return slightly | ||||
|       different results. For example, `dm-including:1234` returns all | ||||
|       direct messages (1-on-1 and group) that include the current user | ||||
|       and the user with the unique user ID of `1234`. On the other hand, | ||||
|       `group-pm-with:1234` returned only group direct messages that | ||||
|       included the current user and the user with the unique user ID of | ||||
|       `1234`. | ||||
|  | ||||
|     * Both `dm` and `is:dm` are aliases of `pm-with` and `is:private` | ||||
|       respectively, and return the same exact results that the | ||||
|       deprecated filters did. | ||||
|  | ||||
| ## Narrows that use IDs | ||||
|  | ||||
| ### Message IDs | ||||
|  | ||||
| The `near`, `id` and `with` operators use message IDs for their | ||||
| operands. The `near` and `id` operators are documented in the help | ||||
| center [here](/help/search-for-messages#search-by-message-id). | ||||
|  | ||||
| The `with` operator is designed to be used for permanent links to topics, | ||||
| which means they should continue to work when the topic is | ||||
| [moved](/help/move-content-to-another-topic) or | ||||
| [resolved](/help/resolve-a-topic). If the message with the specified ID | ||||
| exists, and can be accessed by the user, then it will return messages | ||||
| with the `channel`/`topic`/`dm` operators corresponding to the current | ||||
| conversation containing that message, and replacing any such filters | ||||
| included in the narrow. | ||||
|  | ||||
| * `with:12345`: Search for the conversation that contains the message | ||||
|   with ID `12345`. | ||||
| * `near:12345`: Search messages around the message with ID `12345`. | ||||
| * `id:12345`: Search for only the message with ID `12345`. | ||||
|  | ||||
| The message ID operand for the `with` and `id` operators may be encoded | ||||
| as either a number or a string. The message ID operand for the `near` | ||||
| operator must be encoded as a string. | ||||
|  | ||||
| **Changes**: Prior to Zulip 8.0 (feature level 194), the message ID | ||||
| operand for the `id` operator needed to be encoded as a string. | ||||
|  | ||||
|  | ||||
| ```json | ||||
| [ | ||||
|     { | ||||
|         "operator": "id", | ||||
|         "operand": 12345 | ||||
|     } | ||||
| ] | ||||
| ``` | ||||
|  | ||||
| ### Channel and user IDs | ||||
|  | ||||
| There are a few additional narrow/search options (new in Zulip 2.1) | ||||
| that use either channel IDs or user IDs that are not documented in the | ||||
| help center because they are primarily useful to API clients: | ||||
|  | ||||
| * `channel:1234`: Search messages sent to the channel with ID `1234`. | ||||
| * `sender:1234`: Search messages sent by user ID `1234`. | ||||
| * `dm:1234`: Search the direct message conversation between | ||||
|   you and user ID `1234`. | ||||
| * `dm:1234,5678`: Search the direct message conversation between | ||||
|   you, user ID `1234`, and user ID `5678`. | ||||
| * `dm-including:1234`: Search all direct messages (1-on-1 and group) | ||||
|   that include you and user ID `1234`. | ||||
|  | ||||
| !!! tip "" | ||||
|  | ||||
|     A user ID can be found by [viewing a user's profile][view-profile] | ||||
|     in the web or desktop apps. A channel ID can be found when [browsing | ||||
|     channels][browse-channels] in the web or desktop apps. | ||||
|  | ||||
| The operands for these search options must be encoded either as an | ||||
| integer ID or a JSON list of integer IDs. For example, to query | ||||
| messages sent by a user 1234 to a direct message thread with yourself, | ||||
| user 1234, and user 5678, the correct JSON-encoded query is: | ||||
|  | ||||
| ```json | ||||
| [ | ||||
|     { | ||||
|         "operator": "dm", | ||||
|         "operand": [1234, 5678] | ||||
|     }, | ||||
|     { | ||||
|         "operator": "sender", | ||||
|         "operand": 1234 | ||||
|     } | ||||
| ] | ||||
| ``` | ||||
|  | ||||
| [view-profile]: /help/view-someones-profile | ||||
| [browse-channels]: /help/introduction-to-channels#browse-and-subscribe-to-channels | ||||
| @@ -1,49 +0,0 @@ | ||||
| {generate_api_header(/scheduled_messages:post)} | ||||
|  | ||||
| ## Usage examples | ||||
|  | ||||
| {start_tabs} | ||||
|  | ||||
| {generate_code_example(python)|/scheduled_messages:post|example} | ||||
|  | ||||
| {generate_code_example(javascript)|/scheduled_messages:post|example} | ||||
|  | ||||
| {tab|curl} | ||||
|  | ||||
| ``` curl | ||||
| # Create a scheduled channel message | ||||
| curl -X POST {{ api_url }}/v1/scheduled_messages \ | ||||
|     -u BOT_EMAIL_ADDRESS:BOT_API_KEY \ | ||||
|     --data-urlencode type=stream \ | ||||
|     --data-urlencode to=9 \ | ||||
|     --data-urlencode topic=Hello \ | ||||
|     --data-urlencode 'content=Nice to meet everyone!' \ | ||||
|     --data-urlencode scheduled_delivery_timestamp=3165826990 | ||||
|  | ||||
| # Create a scheduled direct message | ||||
| curl -X POST {{ api_url }}/v1/messages \ | ||||
|     -u BOT_EMAIL_ADDRESS:BOT_API_KEY \ | ||||
|     --data-urlencode type=direct \ | ||||
|     --data-urlencode 'to=[9, 10]' \ | ||||
|     --data-urlencode 'content=Can we meet on Monday?' \ | ||||
|     --data-urlencode scheduled_delivery_timestamp=3165826990 | ||||
|  | ||||
| ``` | ||||
|  | ||||
| {end_tabs} | ||||
|  | ||||
| ## Parameters | ||||
|  | ||||
| {generate_api_arguments_table|zulip.yaml|/scheduled_messages:post} | ||||
|  | ||||
| {generate_parameter_description(/scheduled_messages:post)} | ||||
|  | ||||
| ## Response | ||||
|  | ||||
| {generate_return_values_table|zulip.yaml|/scheduled_messages:post} | ||||
|  | ||||
| {generate_response_description(/scheduled_messages:post)} | ||||
|  | ||||
| #### Example response(s) | ||||
|  | ||||
| {generate_code_example|/scheduled_messages:post|fixture} | ||||
| @@ -1,6 +0,0 @@ | ||||
| # Create a channel | ||||
|  | ||||
| You can create a channel using Zulip's REST API by submitting a | ||||
| [subscribe](/api/subscribe) request with a channel name that | ||||
| doesn't yet exist and passing appropriate parameters to define | ||||
| the initial configuration of the new channel. | ||||
| @@ -1,90 +0,0 @@ | ||||
| # Group-setting values | ||||
|  | ||||
| Settings defining permissions in Zulip are increasingly represented | ||||
| using [user groups](/help/user-groups), which offer much more flexible | ||||
| configuration than the older [roles](/api/roles-and-permissions) system. | ||||
|  | ||||
| !!! warn "" | ||||
|  | ||||
|     This API feature is under development, and currently only values that | ||||
|     correspond to a single named user group are permitted in | ||||
|     production environments, pending the web application UI supporting | ||||
|     displaying more complex values correctly. | ||||
|  | ||||
| In the API, these settings are represented using a **group-setting | ||||
| value**, which can take two forms: | ||||
|  | ||||
| - An integer user group ID, which can be either a named user group | ||||
|   visible in the UI or a [role-based system group](#system-groups). | ||||
| - An object with fields `direct_member_ids` containing a list of | ||||
|   integer user IDs and `direct_subgroup_ids` containing a list of | ||||
|   integer group IDs. The setting's value is the union of the | ||||
|   identified collection of users and groups. | ||||
|  | ||||
| Group-setting values in the object form function very much like a | ||||
| formal user group object, without requiring the naming and UI clutter | ||||
| overhead involved with creating a visible user group just to store the | ||||
| value of a single setting. | ||||
|  | ||||
| The server will canonicalize an object with empty `direct_member_ids` | ||||
| and with `direct_subgroup_ids` containing just the given group ID to | ||||
| the integer format. | ||||
|  | ||||
| ## System groups | ||||
|  | ||||
| The Zulip server maintains a collection of system groups that | ||||
| correspond to the users with a given role; this makes it convenient to | ||||
| store concepts like "all administrators" in a group-setting | ||||
| value. These use a special naming convention and can be recognized by | ||||
| the `is_system_group` property on their group object. | ||||
|  | ||||
| The following system groups are maintained by the Zulip server: | ||||
|  | ||||
| - `role:internet`: Everyone on the Internet has this permission; this | ||||
|   is used to configure the [public access | ||||
|   option](/help/public-access-option). | ||||
| - `role:everyone`: All users, including guests. | ||||
| - `role:members`: All users, excluding guests. | ||||
| - `role:fullmembers`: All [full | ||||
|   members](https://zulip.com/api/roles-and-permissions#determining-if-a-user-is-a-full-member) | ||||
|   of the organization. | ||||
| - `role:moderators`: All users with at least the moderator role. | ||||
| - `role:administrators`: All users with at least the administrator | ||||
|   role. | ||||
| - `role:owners`: All users with the owner role. | ||||
| - `role:nobody`: The formal empty group. Used in the API to represent | ||||
|   disabling a feature. | ||||
|  | ||||
| Client UI for setting a permission is encouraged to display system | ||||
| groups using their description, rather than using their names, which | ||||
| are chosen to be unique and clear in the API. | ||||
|  | ||||
| System groups should generally not be displayed in UI for | ||||
| administering an organization's user groups, since they are not | ||||
| directly mutable. | ||||
|  | ||||
| ## Updating group-setting values | ||||
|  | ||||
| The Zulip API uses a special format for modifying an existing setting | ||||
| using a group-setting value. | ||||
|  | ||||
| A **group-setting update** is an object with a `new` field and an | ||||
| optional `old` field, each containing a group-setting value. The | ||||
| setting's value will be set to the membership expressed by the `new` | ||||
| field. | ||||
|  | ||||
| The `old` field expresses the client's understanding of the current | ||||
| value of the setting. If the `old` field is present and does not match | ||||
| the actual current value of the setting, then the request will fail | ||||
| with error code `EXPECTATION_MISMATCH` and no changes will be applied. | ||||
|  | ||||
| When a user edits the setting in a UI, the resulting API request | ||||
| should generally always include the `old` field, giving the value | ||||
| the list had when the user started editing. This accurately expresses | ||||
| the user's intent, and if two users edit the same list around the | ||||
| same time, it prevents a situation where the second change | ||||
| accidentally reverts the first one without either user noticing. | ||||
|  | ||||
| Omitting `old` is appropriate where the intent really is a new complete | ||||
| list rather than an edit, for example in an integration that syncs the | ||||
| list from an external source of truth. | ||||
| @@ -1,80 +0,0 @@ | ||||
| # HTTP headers | ||||
|  | ||||
| This page documents the HTTP headers used by the Zulip API. | ||||
|  | ||||
| Most important is that API clients authenticate to the server using | ||||
| HTTP Basic authentication. If you're using the official [Python or | ||||
| JavaScript bindings](/api/installation-instructions), this is taken | ||||
| care of when you configure said bindings. | ||||
|  | ||||
| Otherwise, see the `curl` example on each endpoint's documentation | ||||
| page, which details the request format. | ||||
|  | ||||
| Documented below are additional HTTP headers and header conventions | ||||
| generally used by Zulip: | ||||
|  | ||||
| ## The `User-Agent` header | ||||
|  | ||||
| Clients are not required to pass a `User-Agent` HTTP header, but we | ||||
| highly recommend doing so when writing an integration. It's easy to do | ||||
| and it can help save time when debugging issues related to an API | ||||
| client. | ||||
|  | ||||
| If provided, the Zulip server will parse the `User-Agent` HTTP header | ||||
| in order to identify specific clients and integrations. This | ||||
| information is used by the server for logging, [usage | ||||
| statistics](/help/analytics), and on rare occasions, for | ||||
| backwards-compatibility logic to preserve support for older versions | ||||
| of official clients. | ||||
|  | ||||
| Official Zulip clients and integrations use a `User-Agent` that starts | ||||
| with something like `ZulipMobile/20.0.103 `, encoding the name of the | ||||
| application and it's version. | ||||
|  | ||||
| Zulip's official API bindings have reasonable defaults for | ||||
| `User-Agent`. For example, the official Zulip Python bindings have a | ||||
| default `User-Agent` starting with `ZulipPython/{version}`, where | ||||
| `version` is the version of the library. | ||||
|  | ||||
| You can give your bot/integration its own name by passing the `client` | ||||
| parameter when initializing the Python bindings. For example, the | ||||
| official Zulip Nagios integration is initialized like this: | ||||
|  | ||||
| ``` python | ||||
| client = zulip.Client( | ||||
|     config_file=opts.config, client=f"ZulipNagios/{VERSION}" | ||||
| ) | ||||
| ``` | ||||
|  | ||||
| If you are working on an integration that you plan to share outside | ||||
| your organization, you can get help picking a good name in | ||||
| `#integrations` in the [Zulip development | ||||
| community](https://zulip.com/development-community/). | ||||
|  | ||||
| ## Rate-limiting response headers | ||||
|  | ||||
| To help clients avoid exceeding rate limits, Zulip sets the following | ||||
| HTTP headers in all API responses: | ||||
|  | ||||
| * `X-RateLimit-Remaining`: The number of additional requests of this | ||||
|   type that the client can send before exceeding its limit. | ||||
| * `X-RateLimit-Limit`: The limit that would be applicable to a client | ||||
|   that had not made any recent requests of this type. This is useful | ||||
|   for designing a client's burst behavior so as to avoid ever reaching | ||||
|   a rate limit. | ||||
| * `X-RateLimit-Reset`: The time at which the client will no longer | ||||
|   have any rate limits applied to it (and thus could do a burst of | ||||
|   `X-RateLimit-Limit` requests). | ||||
|  | ||||
| [Zulip's rate limiting rules are configurable][rate-limiting-rules], | ||||
| and can vary by server and over time. The default configuration | ||||
| currently limits: | ||||
|  | ||||
| * Every user is limited to 200 total API requests per minute. | ||||
| * Separate, much lower limits for authentication/login attempts. | ||||
|  | ||||
| When the Zulip server has configured multiple rate limits that apply | ||||
| to a given request, the values returned will be for the strictest | ||||
| limit. | ||||
|  | ||||
| [rate-limiting-rules]: https://zulip.readthedocs.io/en/latest/production/security-model.html#rate-limiting | ||||
| @@ -1,136 +0,0 @@ | ||||
| #### Messages | ||||
|  | ||||
| * [Send a message](/api/send-message) | ||||
| * [Upload a file](/api/upload-file) | ||||
| * [Edit a message](/api/update-message) | ||||
| * [Delete a message](/api/delete-message) | ||||
| * [Get messages](/api/get-messages) | ||||
| * [Construct a narrow](/api/construct-narrow) | ||||
| * [Add an emoji reaction](/api/add-reaction) | ||||
| * [Remove an emoji reaction](/api/remove-reaction) | ||||
| * [Render a message](/api/render-message) | ||||
| * [Fetch a single message](/api/get-message) | ||||
| * [Check if messages match a narrow](/api/check-messages-match-narrow) | ||||
| * [Get a message's edit history](/api/get-message-history) | ||||
| * [Update personal message flags](/api/update-message-flags) | ||||
| * [Update personal message flags for narrow](/api/update-message-flags-for-narrow) | ||||
| * [Mark all messages as read](/api/mark-all-as-read) | ||||
| * [Mark messages in a channel as read](/api/mark-stream-as-read) | ||||
| * [Mark messages in a topic as read](/api/mark-topic-as-read) | ||||
| * [Get a message's read receipts](/api/get-read-receipts) | ||||
|  | ||||
| #### Scheduled messages | ||||
|  | ||||
| * [Get scheduled messages](/api/get-scheduled-messages) | ||||
| * [Create a scheduled message](/api/create-scheduled-message) | ||||
| * [Edit a scheduled message](/api/update-scheduled-message) | ||||
| * [Delete a scheduled message](/api/delete-scheduled-message) | ||||
|  | ||||
| #### Drafts | ||||
|  | ||||
| * [Get drafts](/api/get-drafts) | ||||
| * [Create drafts](/api/create-drafts) | ||||
| * [Edit a draft](/api/edit-draft) | ||||
| * [Delete a draft](/api/delete-draft) | ||||
|  | ||||
| #### Channels | ||||
|  | ||||
| * [Get subscribed channels](/api/get-subscriptions) | ||||
| * [Subscribe to a channel](/api/subscribe) | ||||
| * [Unsubscribe from a channel](/api/unsubscribe) | ||||
| * [Get subscription status](/api/get-subscription-status) | ||||
| * [Get channel subscribers](/api/get-subscribers) | ||||
| * [Update subscription settings](/api/update-subscription-settings) | ||||
| * [Get all channels](/api/get-streams) | ||||
| * [Get a channel by ID](/api/get-stream-by-id) | ||||
| * [Get channel ID](/api/get-stream-id) | ||||
| * [Create a channel](/api/create-stream) | ||||
| * [Update a channel](/api/update-stream) | ||||
| * [Archive a channel](/api/archive-stream) | ||||
| * [Get channel's email address](/api/get-stream-email-address) | ||||
| * [Get topics in a channel](/api/get-stream-topics) | ||||
| * [Topic muting](/api/mute-topic) | ||||
| * [Update personal preferences for a topic](/api/update-user-topic) | ||||
| * [Delete a topic](/api/delete-topic) | ||||
| * [Add a default channel](/api/add-default-stream) | ||||
| * [Remove a default channel](/api/remove-default-stream) | ||||
|  | ||||
| #### Users | ||||
|  | ||||
| * [Get a user](/api/get-user) | ||||
| * [Get a user by email](/api/get-user-by-email) | ||||
| * [Get own user](/api/get-own-user) | ||||
| * [Get all users](/api/get-users) | ||||
| * [Create a user](/api/create-user) | ||||
| * [Update a user](/api/update-user) | ||||
| * [Deactivate a user](/api/deactivate-user) | ||||
| * [Deactivate own user](/api/deactivate-own-user) | ||||
| * [Reactivate a user](/api/reactivate-user) | ||||
| * [Get a user's status](/api/get-user-status) | ||||
| * [Update your status](/api/update-status) | ||||
| * [Set "typing" status](/api/set-typing-status) | ||||
| * [Get a user's presence](/api/get-user-presence) | ||||
| * [Get presence of all users](/api/get-presence) | ||||
| * [Update your presence](/api/update-presence) | ||||
| * [Get attachments](/api/get-attachments) | ||||
| * [Delete an attachment](/api/remove-attachment) | ||||
| * [Update settings](/api/update-settings) | ||||
| * [Get user groups](/api/get-user-groups) | ||||
| * [Create a user group](/api/create-user-group) | ||||
| * [Update a user group](/api/update-user-group) | ||||
| * [Delete a user group](/api/remove-user-group) | ||||
| * [Update user group members](/api/update-user-group-members) | ||||
| * [Update subgroups of a user group](/api/update-user-group-subgroups) | ||||
| * [Get user group membership status](/api/get-is-user-group-member) | ||||
| * [Get user group members](/api/get-user-group-members) | ||||
| * [Get subgroups of a user group](/api/get-user-group-subgroups) | ||||
| * [Mute a user](/api/mute-user) | ||||
| * [Unmute a user](/api/unmute-user) | ||||
| * [Get all alert words](/api/get-alert-words) | ||||
| * [Add alert words](/api/add-alert-words) | ||||
| * [Remove alert words](/api/remove-alert-words) | ||||
|  | ||||
| #### Invitations | ||||
|  | ||||
| * [Get all invitations](/api/get-invites) | ||||
| * [Send invitations](/api/send-invites) | ||||
| * [Create a reusable invitation link](/api/create-invite-link) | ||||
| * [Resend an email invitation](/api/resend-email-invite) | ||||
| * [Revoke an email invitation](/api/revoke-email-invite) | ||||
| * [Revoke a reusable invitation link](/api/revoke-invite-link) | ||||
|  | ||||
| #### Server & organizations | ||||
|  | ||||
| * [Get server settings](/api/get-server-settings) | ||||
| * [Get linkifiers](/api/get-linkifiers) | ||||
| * [Add a linkifier](/api/add-linkifier) | ||||
| * [Update a linkifier](/api/update-linkifier) | ||||
| * [Remove a linkifier](/api/remove-linkifier) | ||||
| * [Reorder linkifiers](/api/reorder-linkifiers) | ||||
| * [Add a code playground](/api/add-code-playground) | ||||
| * [Remove a code playground](/api/remove-code-playground) | ||||
| * [Get all custom emoji](/api/get-custom-emoji) | ||||
| * [Upload custom emoji](/api/upload-custom-emoji) | ||||
| * [Deactivate custom emoji](/api/deactivate-custom-emoji) | ||||
| * [Get all custom profile fields](/api/get-custom-profile-fields) | ||||
| * [Reorder custom profile fields](/api/reorder-custom-profile-fields) | ||||
| * [Create a custom profile field](/api/create-custom-profile-field) | ||||
| * [Update realm-level defaults of user settings](/api/update-realm-user-settings-defaults) | ||||
|  | ||||
| #### Real-time events | ||||
|  | ||||
| * [Real time events API](/api/real-time-events) | ||||
| * [Register an event queue](/api/register-queue) | ||||
| * [Get events from an event queue](/api/get-events) | ||||
| * [Delete an event queue](/api/delete-queue) | ||||
|  | ||||
| #### Specialty endpoints | ||||
|  | ||||
| * [Fetch an API key (production)](/api/fetch-api-key) | ||||
| * [Fetch an API key (development only)](/api/dev-fetch-api-key) | ||||
| * [Send a test notification to mobile device(s)](/api/test-notify) | ||||
| * [Add an APNs device token](/api/add-apns-token) | ||||
| * [Remove an APNs device token](/api/remove-apns-token) | ||||
| * [Add an FCM registration token](/api/add-fcm-token) | ||||
| * [Remove an FCM registration token](/api/remove-fcm-token) | ||||
| * [Create BigBlueButton video call](/api/create-big-blue-button-video-call) | ||||
| @@ -1,223 +0,0 @@ | ||||
| # Incoming webhook integrations | ||||
|  | ||||
| An incoming webhook allows a third-party service to push data to Zulip when | ||||
| something happens. There are several ways to set up an incoming webhook in | ||||
| Zulip: | ||||
|  | ||||
| * Use our [REST API](/api/rest) endpoint for [sending | ||||
|   messages](/api/send-message).  This works great for internal tools | ||||
|   or cases where the third-party tool wants to control the formatting | ||||
|   of the messages in Zulip. | ||||
| * Use one of our supported [integration | ||||
|   frameworks](/integrations/meta-integration), such as the | ||||
|   [Slack-compatible incoming webhook](/integrations/doc/slack_incoming), | ||||
|   [Zapier integration](/integrations/doc/zapier), or | ||||
|   [IFTTT integration](/integrations/doc/ifttt). | ||||
| * Implementing an incoming webhook integration (detailed on this page), | ||||
|   where all the logic for formatting the Zulip messages lives in the | ||||
|   Zulip server.  This is how most of [Zulip's official | ||||
|   integrations](/integrations/) work, because they enable Zulip to | ||||
|   support third-party services that just have an "outgoing webhook" | ||||
|   feature (without the third party needing to do any work specific to | ||||
|   Zulip). | ||||
|  | ||||
| In an incoming webhook integration, the third-party service's | ||||
| "outgoing webhook" feature sends an `HTTP POST` to a special URL when | ||||
| it has something for you, and then the Zulip "incoming webhook" | ||||
| integration handles that incoming data to format and send a message in | ||||
| Zulip. | ||||
|  | ||||
| New official Zulip webhook integrations can take just a few hours to | ||||
| write, including tests and documentation, if you use the right | ||||
| process. | ||||
|  | ||||
| ## Quick guide | ||||
|  | ||||
| * Set up the | ||||
|   [Zulip development environment](https://zulip.readthedocs.io/en/latest/development/overview.html). | ||||
|  | ||||
| * Use [Zulip's JSON integration](/integrations/doc/json), | ||||
|   <https://webhook.site/>, or a similar site to capture an example | ||||
|   webhook payload from the third-party service. Create a | ||||
|   `zerver/webhooks/<mywebhook>/fixtures/` directory, and add the | ||||
|   captured JSON payload as a test fixture. | ||||
|  | ||||
| * Create an `Integration` object, and add it to the `WEBHOOK_INTEGRATIONS` | ||||
|   list in `zerver/lib/integrations.py`. Search for `WebhookIntegration` in that | ||||
|   file to find an existing one to copy. | ||||
|  | ||||
| * Write a draft webhook handler in `zerver/webhooks/<mywebhook>/view.py`. There | ||||
|   are a lot of examples in the `zerver/webhooks/` directory that you can copy. | ||||
|   We recommend templating from a short one, like `zendesk`. | ||||
|  | ||||
| * Write a test for your fixture in `zerver/webhooks/<mywebhook>/tests.py`. | ||||
|   Run the test for your integration like this: | ||||
|  | ||||
|     ``` | ||||
|     tools/test-backend zerver/webhooks/<mywebhook>/ | ||||
|     ``` | ||||
|  | ||||
|     Iterate on debugging the test and webhooks handler until it all | ||||
|     works. | ||||
|  | ||||
| * Capture payloads for the other common types of `POST`s the third-party | ||||
|   service will make, and add tests for them; usually this part of the | ||||
|   process is pretty fast. | ||||
|  | ||||
| * Document the integration in `zerver/webhooks/<mywebhook>/doc.md`(required for | ||||
|   getting it merged into Zulip). You can use existing documentation, like | ||||
|   [this one](https://raw.githubusercontent.com/zulip/zulip/main/zerver/webhooks/github/doc.md), | ||||
|   as a template. This should not take more than 15 minutes, even if you don't speak English | ||||
|   as a first language (we'll clean up the text before merging). | ||||
|  | ||||
| ## Hello world walkthrough | ||||
|  | ||||
| Check out the [detailed walkthrough](incoming-webhooks-walkthrough) for step-by-step | ||||
| instructions. | ||||
|  | ||||
| ## Checklist | ||||
|  | ||||
| ### Files that need to be created | ||||
|  | ||||
| Select a name for your incoming webhook and use it consistently. The examples | ||||
| below are for a webhook named `MyWebHook`. | ||||
|  | ||||
| * `zerver/webhooks/mywebhook/__init__.py`: Empty file that is an obligatory | ||||
|    part of every python package.  Remember to `git add` it. | ||||
| * `zerver/webhooks/mywebhook/view.py`: The main webhook integration function, | ||||
|   called `api_mywebhook_webhook`, along with any necessary helper functions. | ||||
| * `zerver/webhooks/mywebhook/fixtures/message_type.json`: Sample JSON payload data | ||||
|   used by tests. Add one fixture file per type of message supported by your | ||||
|   integration. | ||||
| * `zerver/webhooks/mywebhook/tests.py`: Tests for your webhook. | ||||
| * `zerver/webhooks/mywebhook/doc.md`: End-user documentation explaining | ||||
|   how to add the integration. | ||||
| * `static/images/integrations/logos/mywebhook.svg`: A square logo for the | ||||
|   platform/server/product you are integrating. Used on the documentation | ||||
|   pages as well as the sender's avatar for messages sent by the integration. | ||||
| * `static/images/integrations/mywebhook/001.png`: A screenshot of a message | ||||
|   sent by the integration, used on the documentation page. This can be | ||||
|   generated by running `tools/screenshots/generate-integration-docs-screenshot --integration mywebhook`. | ||||
| * `static/images/integrations/bot_avatars/mywebhook.png`: A square logo for the | ||||
|   platform/server/product you are integrating which is used to create the avatar | ||||
|   for generating screenshots with. This can be generated automatically from | ||||
|   `static/images/integrations/logos/mywebhook.svg` by running | ||||
|   `tools/setup/generate_integration_bots_avatars.py`. | ||||
|  | ||||
| ### Files that need to be updated | ||||
|  | ||||
| * `zerver/lib/integrations.py`: Add your integration to | ||||
|   `WEBHOOK_INTEGRATIONS`. This will automatically register a | ||||
|   URL for the incoming webhook of the form `api/v1/external/mywebhook` and | ||||
|   associate it with the function called `api_mywebhook_webhook` in | ||||
|   `zerver/webhooks/mywebhook/view.py`. Also add your integration to | ||||
|   `DOC_SCREENSHOT_CONFIG`. This will allow you to automatically generate | ||||
|   a screenshot for the documentation by running | ||||
|   `tools/screenshots/generate-integration-docs-screenshot --integration mywebhook`. | ||||
|  | ||||
| ## Common Helpers | ||||
|  | ||||
| * If your integration will receive a test webhook payload, you can use | ||||
|   `get_setup_webhook_message` to create our standard message for test payloads. | ||||
|   You can import this from `zerver/lib/webhooks/common.py`, and it will generate | ||||
|   a message like this: "GitHub webhook is successfully configured! 🎉" | ||||
|  | ||||
| ## General advice | ||||
|  | ||||
| * Consider using our Zulip markup to make the output from your | ||||
|   integration especially attractive or useful (e.g., emoji, Markdown | ||||
|   emphasis, or @-mentions). | ||||
|  | ||||
| * Use topics effectively to ensure sequential messages about the same | ||||
|   thing are threaded together; this makes for much better consumption | ||||
|   by users.  E.g., for a bug tracker integration, put the bug number in | ||||
|   the topic for all messages; for an integration like Nagios, put the | ||||
|   service in the topic. | ||||
|  | ||||
| * Integrations that don't match a team's workflow can often be | ||||
|   uselessly spammy.  Give careful thought to providing options for | ||||
|   triggering Zulip messages only for certain message types, certain | ||||
|   projects, or sending different messages to different channels/topics, | ||||
|   to make it easy for teams to configure the integration to support | ||||
|   their workflow. | ||||
|  | ||||
| * Consistently capitalize the name of the integration in the | ||||
|   documentation and the Client name the way the vendor does.  It's OK | ||||
|   to use all-lower-case in the implementation. | ||||
|  | ||||
| * Sometimes it can be helpful to contact the vendor if it appears they | ||||
|   don't have an API or webhook we can use; sometimes the right API | ||||
|   is just not properly documented. | ||||
|  | ||||
| * A helpful tool for testing your integration is | ||||
|   [UltraHook](http://www.ultrahook.com/), which allows you to receive webhook | ||||
|   calls via your local Zulip development environment. This enables you to do end-to-end | ||||
|   testing with live data from the service you're integrating and can help you | ||||
|   spot why something isn't working or if the service is using custom HTTP | ||||
|   headers. | ||||
|  | ||||
| ## URL specification | ||||
|  | ||||
| The base URL for an incoming webhook integration bot, where | ||||
| `INTEGRATION_NAME` is the name of the specific webhook integration and | ||||
| `API_KEY` is the API key of the bot created by the user for the | ||||
| integration, is: | ||||
|  | ||||
| ``` | ||||
| {{ api_url }}/v1/external/INTEGRATION_NAME?api_key=API_KEY | ||||
| ``` | ||||
|  | ||||
| The list of existing webhook integrations can be found by browsing the | ||||
| [Integrations documentation](/integrations/) or in | ||||
| `zerver/lib/integrations.py` at `WEBHOOK_INTEGRATIONS`. | ||||
|  | ||||
| Parameters accepted in the URL include: | ||||
|  | ||||
| ### api_key *(required)* | ||||
|  | ||||
| The API key of the bot created by the user for the integration. To get a | ||||
| bot's API key, see the [API keys](/api/api-keys) documentation. | ||||
|  | ||||
| ### stream | ||||
|  | ||||
| The channel for the integration to send notifications to. Can be either | ||||
| the channel ID or the [URL-encoded][url-encoder] channel name. By default | ||||
| the integration will send direct messages to the bot's owner. | ||||
|  | ||||
| !!! tip "" | ||||
|  | ||||
|     A channel ID can be found when [browsing channels][browse-channels] | ||||
|     in the web or desktop apps. | ||||
|  | ||||
| ### topic | ||||
|  | ||||
| The topic in the specified channel for the integration to send | ||||
| notifications to. The topic should also be [URL-encoded][url-encoder]. | ||||
| By default the integration will have a topic configured for channel | ||||
| messages. | ||||
|  | ||||
| ### only_events, exclude_events | ||||
|  | ||||
| Some incoming webhook integrations support these parameters to filter | ||||
| which events will trigger a notification. You can append either | ||||
| `&only_events=["event_a","event_b"]` or | ||||
| `&exclude_events=["event_a","event_b"]` (or both, with different events) | ||||
| to the URL, with an arbitrary number of supported events. | ||||
|  | ||||
| You can use UNIX-style wildcards like `*` to include multiple events. | ||||
| For example, `test*` matches every event that starts with `test`. | ||||
|  | ||||
| !!! tip "" | ||||
|  | ||||
|     For a list of supported events, see a specific [integration's | ||||
|     documentation](/integrations) page. | ||||
|  | ||||
| [browse-channels]: /help/introduction-to-channels#browse-and-subscribe-to-channels | ||||
| [add-bot]: /help/add-a-bot-or-integration | ||||
| [url-encoder]: https://www.urlencoder.org/ | ||||
|  | ||||
| ## Related articles | ||||
|  | ||||
| * [Integrations overview](/api/integrations-overview) | ||||
| * [Incoming webhook walkthrough](/api/incoming-webhooks-walkthrough) | ||||
| * [Non-webhook integrations](/api/non-webhook-integrations) | ||||
| @@ -1,660 +0,0 @@ | ||||
| # Incoming webhook walkthrough | ||||
|  | ||||
| Below, we explain each part of a simple incoming webhook integration, | ||||
| called **Hello World**.  This integration sends a "hello" message to the `test` | ||||
| channel and includes a link to the Wikipedia article of the day, which | ||||
| it formats from json data it receives in the http request. | ||||
|  | ||||
| Use this walkthrough to learn how to write your first webhook | ||||
| integration. | ||||
|  | ||||
| ## Step 0: Create fixtures | ||||
|  | ||||
| The first step in creating an incoming webhook is to examine the data that the | ||||
| service you want to integrate will be sending to Zulip. | ||||
|  | ||||
| * Use [Zulip's JSON integration](/integrations/doc/json), | ||||
| <https://webhook.site/>, or a similar tool to capture webhook | ||||
| payload(s) from the service you are integrating. Examining this data | ||||
| allows you to do two things: | ||||
|  | ||||
| 1. Determine how you will need to structure your webhook code, including what | ||||
|    message types your integration should support and how. | ||||
| 2. Create fixtures for your webhook tests. | ||||
|  | ||||
| A test fixture is a small file containing test data, one for each test. | ||||
| Fixtures enable the testing of webhook integration code without the need to | ||||
| actually contact the service being integrated. | ||||
|  | ||||
| Because `Hello World` is a very simple integration that does one | ||||
| thing, it requires only one fixture, | ||||
| `zerver/webhooks/helloworld/fixtures/hello.json`: | ||||
|  | ||||
| ```json | ||||
| { | ||||
|   "featured_title":"Marilyn Monroe", | ||||
|   "featured_url":"https://en.wikipedia.org/wiki/Marilyn_Monroe", | ||||
| } | ||||
| ``` | ||||
|  | ||||
| When writing your own incoming webhook integration, you'll want to write a test function | ||||
| for each distinct message condition your integration supports. You'll also need a | ||||
| corresponding fixture for each of these tests. Depending on the type of data | ||||
| the 3rd party service sends, your fixture may contain JSON, URL encoded text, or | ||||
| some other kind of data. See [Step 5: Create automated tests](#step-5-create-automated-tests) or | ||||
| [Testing](https://zulip.readthedocs.io/en/latest/testing/testing.html) for further details. | ||||
|  | ||||
| ### HTTP Headers | ||||
|  | ||||
| Some third-party webhook APIs, such as GitHub's, don't encode all the | ||||
| information about an event in the JSON request body.  Instead, they | ||||
| put key details like the event type in a separate HTTP header | ||||
| (generally this is clear in their API documentation).  In order to | ||||
| test Zulip's handling of that integration, you will need to record | ||||
| which HTTP headers are used with each fixture you capture. | ||||
|  | ||||
| Since this is integration-dependent, Zulip offers a simple API for | ||||
| doing this, which is probably best explained by looking at the example | ||||
| for GitHub: `zerver/webhooks/github/view.py`; basically, as part of | ||||
| writing your integration, you'll write a special function in your | ||||
| view.py file that maps the filename of the fixture to the set of HTTP | ||||
| headers to use. This function must be named "fixture_to_headers". Most | ||||
| integrations will use the same strategy as the GitHub integration: | ||||
| encoding the third party variable header data (usually just an event | ||||
| type) in the fixture filename, in such a case, you won't need to | ||||
| explicitly write the logic for such a special function again, | ||||
| instead you can just use the same helper method that the GitHub | ||||
| integration uses. | ||||
|  | ||||
| ## Step 1: Initialize your webhook python package | ||||
|  | ||||
| In the `zerver/webhooks/` directory, create new subdirectory that will | ||||
| contain all of the corresponding code. In our example, it will be | ||||
| `helloworld`. The new directory will be a python package, so you have | ||||
| to create an empty `__init__.py` file in that directory via, for | ||||
| example, `touch zerver/webhooks/helloworld/__init__.py`. | ||||
|  | ||||
| ## Step 2: Create main webhook code | ||||
|  | ||||
| The majority of the code for your new integration will be in a single | ||||
| python file, `zerver/webhooks/mywebhook/view.py`. | ||||
|  | ||||
| The Hello World integration is in `zerver/webhooks/helloworld/view.py`: | ||||
|  | ||||
| ```python | ||||
| from django.http import HttpRequest, HttpResponse | ||||
|  | ||||
| from zerver.decorator import webhook_view | ||||
| from zerver.lib.response import json_success | ||||
| from zerver.lib.typed_endpoint import JsonBodyPayload, typed_endpoint | ||||
| from zerver.lib.validator import WildValue, check_string | ||||
| from zerver.lib.webhooks.common import check_send_webhook_message | ||||
| from zerver.models import UserProfile | ||||
|  | ||||
|  | ||||
| @webhook_view("HelloWorld") | ||||
| @typed_endpoint | ||||
| def api_helloworld_webhook( | ||||
|     request: HttpRequest, | ||||
|     user_profile: UserProfile, | ||||
|     *, | ||||
|     payload: JsonBodyPayload[WildValue], | ||||
| ) -> HttpResponse: | ||||
|     # construct the body of the message | ||||
|     body = "Hello! I am happy to be here! :smile:" | ||||
|  | ||||
|     # try to add the Wikipedia article of the day | ||||
|     body_template = ( | ||||
|         "\nThe Wikipedia featured article for today is **[{featured_title}]({featured_url})**" | ||||
|     ) | ||||
|     body += body_template.format( | ||||
|         featured_title=payload["featured_title"].tame(check_string), | ||||
|         featured_url=payload["featured_url"].tame(check_string), | ||||
|     ) | ||||
|  | ||||
|     topic = "Hello World" | ||||
|  | ||||
|     # send the message | ||||
|     check_send_webhook_message(request, user_profile, topic, body) | ||||
|  | ||||
|     return json_success(request) | ||||
| ``` | ||||
|  | ||||
| The above code imports the required functions and defines the main webhook | ||||
| function `api_helloworld_webhook`, decorating it with `webhook_view` and | ||||
| `typed_endpoint`. The `typed_endpoint` decorator allows you to | ||||
| access request variables with `JsonBodyPayload()`. You can find more about `JsonBodyPayload` and request variables in [Writing views]( | ||||
| https://zulip.readthedocs.io/en/latest/tutorials/writing-views.html#request-variables). | ||||
|  | ||||
| You must pass the name of your integration to the | ||||
| `webhook_view` decorator; that name will be used to | ||||
| describe your integration in Zulip's analytics (e.g., the `/stats` | ||||
| page). Here we have used `HelloWorld`. To be consistent with other | ||||
| integrations, use the name of the product you are integrating in camel | ||||
| case, spelled as the product spells its own name (except always first | ||||
| letter upper-case). | ||||
|  | ||||
| The `webhook_view` decorator indicates that the 3rd party service will | ||||
| send the authorization as an API key in the query parameters. If your service uses | ||||
| HTTP basic authentication, you would instead use the `authenticated_rest_api_view` | ||||
| decorator. | ||||
|  | ||||
| You should name your webhook function as such | ||||
| `api_webhookname_webhook` where `webhookname` is the name of your | ||||
| integration and is always lower-case. | ||||
|  | ||||
| At minimum, the webhook function must accept `request` (Django | ||||
| [HttpRequest](https://docs.djangoproject.com/en/5.0/ref/request-response/#django.http.HttpRequest) | ||||
| object), and `user_profile` (Zulip's user object). You may also want to | ||||
| define additional parameters using the `REQ` object. | ||||
|  | ||||
| In the example above, we have defined `payload` which is populated | ||||
| from the body of the http request, `stream` with a default of `test` | ||||
| (available by default in the Zulip development environment), and | ||||
| `topic` with a default of `Hello World`. If your webhook uses a custom channel, | ||||
| it must exist before a message can be created in it. (See | ||||
| [Step 4: Create automated tests](#step-5-create-automated-tests) for how to handle this in tests.) | ||||
|  | ||||
| The line that begins `# type` is a mypy type annotation. See [this | ||||
| page](https://zulip.readthedocs.io/en/latest/testing/mypy.html) for details about | ||||
| how to properly annotate your webhook functions. | ||||
|  | ||||
| In the body of the function we define the body of the message as `Hello! I am | ||||
| happy to be here! :smile:`. The `:smile:` indicates an emoji. Then we append a | ||||
| link to the Wikipedia article of the day as provided by the json payload. | ||||
|  | ||||
| * Sometimes, it might occur that a json payload does not contain all required keys your | ||||
|   integration checks for. In such a case, any `KeyError` thrown is handled by the server | ||||
|   backend and will create an appropriate response. | ||||
|  | ||||
| Then we send a message with `check_send_webhook_message`, which will | ||||
| validate the message and do the following: | ||||
|  | ||||
| * Send a public (channel) message if the `stream` query parameter is | ||||
|   specified in the webhook URL. | ||||
| * If the `stream` query parameter isn't specified, it will send a direct | ||||
|   message to the owner of the webhook bot. | ||||
|  | ||||
| Finally, we return a 200 http status with a JSON format success message via | ||||
| `json_success(request)`. | ||||
|  | ||||
| ## Step 3: Create an API endpoint for the webhook | ||||
|  | ||||
| In order for an incoming webhook to be externally available, it must be mapped | ||||
| to a URL. This is done in `zerver/lib/integrations.py`. | ||||
|  | ||||
| Look for the lines beginning with: | ||||
|  | ||||
| ```python | ||||
| WEBHOOK_INTEGRATIONS: List[WebhookIntegration] = [ | ||||
| ``` | ||||
|  | ||||
| And you'll find the entry for Hello World: | ||||
|  | ||||
| ```python | ||||
|   WebhookIntegration("helloworld", ["misc"], display_name="Hello World"), | ||||
| ``` | ||||
|  | ||||
| This tells the Zulip API to call the `api_helloworld_webhook` function in | ||||
| `zerver/webhooks/helloworld/view.py` when it receives a request at | ||||
| `/api/v1/external/helloworld`. | ||||
|  | ||||
| This line also tells Zulip to generate an entry for Hello World on the Zulip | ||||
| integrations page using `static/images/integrations/logos/helloworld.svg` as its | ||||
| icon. The second positional argument defines a list of categories for the | ||||
| integration. | ||||
|  | ||||
| At this point, if you're following along and/or writing your own Hello World | ||||
| webhook, you have written enough code to test your integration. There are three | ||||
| tools which you can use to test your webhook - 2 command line tools and a GUI. | ||||
|  | ||||
| ### Webhooks requiring custom configuration | ||||
|  | ||||
| In rare cases, it's necessary for an incoming webhook to require | ||||
| additional user configuration beyond what is specified in the post | ||||
| URL.  The typical use case for this is APIs like the Stripe API that | ||||
| require clients to do a callback to get details beyond an opaque | ||||
| object ID that one would want to include in a Zulip notification. | ||||
|  | ||||
| These configuration options are declared as follows: | ||||
|  | ||||
| ```python | ||||
|     WebhookIntegration('helloworld', ['misc'], display_name='Hello World', | ||||
|                        config_options=[('HelloWorld API key', 'hw_api_key', check_string)]) | ||||
| ``` | ||||
|  | ||||
| `config_options` is a list describing the parameters the user should | ||||
| configure: | ||||
|     1. A user-facing string describing the field to display to users. | ||||
|     2. The field name you'll use to access this from your `view.py` function. | ||||
|     3. A Validator, used to verify the input is valid. | ||||
|  | ||||
| Common validators are available in `zerver/lib/validators.py`. | ||||
|  | ||||
| ## Step 4: Manually testing the webhook | ||||
|  | ||||
| For either one of the command line tools, first, you'll need to get an | ||||
| API key from the **Bots** section of your Zulip user's **Personal | ||||
| settings**. To test the webhook, you'll need to [create a | ||||
| bot](https://zulip.com/help/add-a-bot-or-integration) with the | ||||
| **Incoming webhook** type. Replace `<api_key>` with your bot's API key | ||||
| in the examples presented below! This is how Zulip knows that the | ||||
| request was made by an authorized user. | ||||
|  | ||||
| ### Curl | ||||
|  | ||||
| Using curl: | ||||
| ```bash | ||||
| curl -X POST -H "Content-Type: application/json" -d '{ "featured_title":"Marilyn Monroe", "featured_url":"https://en.wikipedia.org/wiki/Marilyn_Monroe" }' http://localhost:9991/api/v1/external/helloworld\?api_key\=<api_key> | ||||
| ``` | ||||
|  | ||||
| After running the above command, you should see something similar to: | ||||
|  | ||||
| ```json | ||||
| {"msg":"","result":"success"} | ||||
| ``` | ||||
|  | ||||
| ### Management command: send_webhook_fixture_message | ||||
|  | ||||
| Using `manage.py` from within the Zulip development environment: | ||||
|  | ||||
| ```console | ||||
| (zulip-py3-venv) vagrant@vagrant:/srv/zulip$ | ||||
| ./manage.py send_webhook_fixture_message \ | ||||
|     --fixture=zerver/webhooks/helloworld/fixtures/hello.json \ | ||||
|     '--url=http://localhost:9991/api/v1/external/helloworld?api_key=<api_key>' | ||||
| ``` | ||||
|  | ||||
| After running the above command, you should see something similar to: | ||||
|  | ||||
| ``` | ||||
| 2016-07-07 15:06:59,187 INFO     127.0.0.1       POST    200 143ms (mem: 6ms/13) (md: 43ms/1) (db: 20ms/9q) (+start: 147ms) /api/v1/external/helloworld (helloworld-bot@zulip.com via ZulipHelloWorldWebhook) | ||||
| ``` | ||||
|  | ||||
| Some webhooks require custom HTTP headers, which can be passed using | ||||
| `./manage.py send_webhook_fixture_message --custom-headers`.  For | ||||
| example: | ||||
|  | ||||
|     --custom-headers='{"X-Custom-Header": "value"}' | ||||
|  | ||||
| The format is a JSON dictionary, so make sure that the header names do | ||||
| not contain any spaces in them and that you use the precise quoting | ||||
| approach shown above. | ||||
|  | ||||
| For more information about `manage.py` command-line tools in Zulip, see | ||||
| the [management commands][management-commands] documentation. | ||||
|  | ||||
| [management-commands]: https://zulip.readthedocs.io/en/latest/production/management-commands.html | ||||
|  | ||||
| ### Integrations Dev Panel | ||||
| This is the GUI tool. | ||||
|  | ||||
| {start_tabs} | ||||
|  | ||||
| 1. Run `./tools/run-dev` then go to http://localhost:9991/devtools/integrations/. | ||||
|  | ||||
| 1. Set the following mandatory fields: | ||||
| **Bot** - Any incoming webhook bot. | ||||
| **Integration** - One of the integrations. | ||||
| **Fixture** - Though not mandatory, it's recommended that you select one and then tweak it if necessary. | ||||
| The remaining fields are optional, and the URL will automatically be generated. | ||||
|  | ||||
| 1. Click **Send**! | ||||
|  | ||||
| {end_tabs} | ||||
|  | ||||
| By opening Zulip in one tab and then this tool in another, you can quickly tweak | ||||
| your code and send sample messages for many different test fixtures. | ||||
|  | ||||
| Note: Custom HTTP Headers must be entered as a JSON dictionary, if you want to use any in the first place that is. | ||||
| Feel free to use 4-spaces as tabs for indentation if you'd like! | ||||
|  | ||||
| Your sample notification may look like: | ||||
|  | ||||
| <img class="screenshot" src="/static/images/api/helloworld-webhook.png" alt="screenshot" /> | ||||
|  | ||||
|  | ||||
|  | ||||
| ## Step 5: Create automated tests | ||||
|  | ||||
| Every webhook integration should have a corresponding test file: | ||||
| `zerver/webhooks/mywebhook/tests.py`. | ||||
|  | ||||
| The Hello World integration's tests are in `zerver/webhooks/helloworld/tests.py` | ||||
|  | ||||
| You should name the class `<WebhookName>HookTests` and have it inherit from | ||||
| the base class `WebhookTestCase`. For our HelloWorld webhook, we name the test | ||||
| class `HelloWorldHookTests`: | ||||
|  | ||||
| ```python | ||||
| class HelloWorldHookTests(WebhookTestCase): | ||||
|     CHANNEL_NAME = "test" | ||||
|     URL_TEMPLATE = "/api/v1/external/helloworld?&api_key={api_key}&stream={stream}" | ||||
|     DIRECT_MESSAGE_URL_TEMPLATE = "/api/v1/external/helloworld?&api_key={api_key}" | ||||
|     WEBHOOK_DIR_NAME = "helloworld" | ||||
|  | ||||
|     # Note: Include a test function per each distinct message condition your integration supports | ||||
|     def test_hello_message(self) -> None: | ||||
|         expected_topic = "Hello World" | ||||
|         expected_message = "Hello! I am happy to be here! :smile:\nThe Wikipedia featured article for today is **[Marilyn Monroe](https://en.wikipedia.org/wiki/Marilyn_Monroe)**" | ||||
|  | ||||
|         # use fixture named helloworld_hello | ||||
|         self.check_webhook( | ||||
|             "hello", | ||||
|             expected_topic, | ||||
|             expected_message, | ||||
|             content_type="application/x-www-form-urlencoded", | ||||
|         ) | ||||
| ``` | ||||
|  | ||||
| In the above example, `CHANNEL_NAME`, `URL_TEMPLATE`, and `WEBHOOK_DIR_NAME` refer | ||||
| to class attributes from the base class, `WebhookTestCase`. These are needed by | ||||
| the helper function `check_webhook` to determine how to execute | ||||
| your test. `CHANNEL_NAME` should be set to your default channel. If it doesn't exist, | ||||
| `check_webhook` will create it while executing your test. | ||||
|  | ||||
| If your test expects a channel name from a test fixture, the value in the fixture | ||||
| and the value you set for `CHANNEL_NAME` must match. The test helpers use `CHANNEL_NAME` | ||||
| to create the destination channel, and then create the message to send using the | ||||
| value from the fixture. If these don't match, the test will fail. | ||||
|  | ||||
| `URL_TEMPLATE` defines how the test runner will call your incoming webhook, in the same way | ||||
|  you would provide a webhook URL to the 3rd party service. `api_key={api_key}` says | ||||
| that an API key is expected. | ||||
|  | ||||
| When writing tests for your webhook, you'll want to include one test function | ||||
| (and corresponding fixture) per each distinct message condition that your | ||||
| integration supports. | ||||
|  | ||||
| If, for example, we added support for sending a goodbye message to our `Hello | ||||
| World` webhook, we would add another test function to `HelloWorldHookTests` | ||||
| class called something like `test_goodbye_message`: | ||||
|  | ||||
| ```python | ||||
|     def test_goodbye_message(self) -> None: | ||||
|         expected_topic = "Hello World" | ||||
|         expected_message = "Hello! I am happy to be here! :smile:\nThe Wikipedia featured article for today is **[Goodbye](https://en.wikipedia.org/wiki/Goodbye)**" | ||||
|  | ||||
|         # use fixture named helloworld_goodbye | ||||
|         self.check_webhook( | ||||
|             "goodbye", | ||||
|             expected_topic, | ||||
|             expected_message, | ||||
|             content_type="application/x-www-form-urlencoded", | ||||
|         ) | ||||
| ``` | ||||
|  | ||||
| As well as a new fixture `goodbye.json` in | ||||
| `zerver/webhooks/helloworld/fixtures/`: | ||||
|  | ||||
| ```json | ||||
| { | ||||
|   "featured_title":"Goodbye", | ||||
|   "featured_url":"https://en.wikipedia.org/wiki/Goodbye", | ||||
| } | ||||
| ``` | ||||
|  | ||||
| Also consider if your integration should have negative tests, a test where the | ||||
| data from the test fixture should result in an error. For details see | ||||
| [Negative tests](#negative-tests), below. | ||||
|  | ||||
| Once you have written some tests, you can run just these new tests from within | ||||
| the Zulip development environment with this command: | ||||
|  | ||||
| ```console | ||||
| (zulip-py3-venv) vagrant@vagrant:/srv/zulip$ | ||||
| ./tools/test-backend zerver/webhooks/helloworld | ||||
| ``` | ||||
|  | ||||
| (Note: You must run the tests from the top level of your development directory. | ||||
| The standard location in a Vagrant environment is `/srv/zulip`. If you are not | ||||
| using Vagrant, use the directory where you have your development environment.) | ||||
|  | ||||
| You will see some script output and if all the tests have passed, you will see: | ||||
|  | ||||
| ```console | ||||
| Running zerver.webhooks.helloworld.tests.HelloWorldHookTests.test_goodbye_message | ||||
| Running zerver.webhooks.helloworld.tests.HelloWorldHookTests.test_hello_message | ||||
| DONE! | ||||
| ``` | ||||
|  | ||||
| ## Step 6: Create documentation | ||||
|  | ||||
| Next, we add end-user documentation for our integration.  You | ||||
| can see the existing examples at <https://zulip.com/integrations> | ||||
| or by accessing `/integrations` in your Zulip development environment. | ||||
|  | ||||
| There are two parts to the end-user documentation on this page. | ||||
|  | ||||
| The first is the lozenge in the grid of integrations, showing your | ||||
| integration logo and name, which links to the full documentation. | ||||
| This is generated automatically once you've registered the integration | ||||
| in `WEBHOOK_INTEGRATIONS` in `zerver/lib/integrations.py`, and supports | ||||
| some customization via options to the `WebhookIntegration` class. | ||||
|  | ||||
| Second, you need to write the actual documentation content in | ||||
| `zerver/webhooks/mywebhook/doc.md`. | ||||
|  | ||||
| ```md | ||||
| Learn how Zulip integrations work with this simple Hello World example! | ||||
|  | ||||
| 1.  The Hello World webhook will use the `test` channel, which is created | ||||
|     by default in the Zulip development environment. If you are running | ||||
|     Zulip in production, you should make sure that this channel exists. | ||||
|  | ||||
| 1. {!create-an-incoming-webhook.md!} | ||||
|  | ||||
| 1. {!generate-integration-url.md!} | ||||
|  | ||||
| 1.  To trigger a notification using this example webhook, you can use | ||||
|     `send_webhook_fixture_message` from a [Zulip development | ||||
|     environment](https://zulip.readthedocs.io/en/latest/development/overview.html): | ||||
|  | ||||
|     ``` | ||||
|         (zulip-py3-venv) vagrant@vagrant:/srv/zulip$ | ||||
|         ./manage.py send_webhook_fixture_message \ | ||||
|         > --fixture=zerver/tests/fixtures/helloworld/hello.json \ | ||||
|         > '--url=http://localhost:9991/api/v1/external/helloworld?api_key=abcdefgh&stream=stream%20name;' | ||||
|     ``` | ||||
|  | ||||
|     Or, use curl: | ||||
|  | ||||
|     ``` | ||||
|     curl -X POST -H "Content-Type: application/json" -d '{ "featured_title":"Marilyn Monroe", "featured_url":"https://en.wikipedia.org/wiki/Marilyn_Monroe" }' http://localhost:9991/api/v1/external/helloworld\?api_key=abcdefgh&stream=stream%20name; | ||||
|     ``` | ||||
|  | ||||
| {!congrats.md!} | ||||
|  | ||||
|  | ||||
|  | ||||
| ``` | ||||
|  | ||||
| `{!create-an-incoming-webhook.md!}` and `{!congrats.md!}` are examples of | ||||
| a Markdown macro. Zulip has a macro-based Markdown/Jinja2 framework that | ||||
| includes macros for common instructions in Zulip's webhooks/integrations | ||||
| documentation. | ||||
|  | ||||
| See | ||||
| [our guide on documenting an integration][integration-docs-guide] | ||||
| for further details, including how to easily create the message | ||||
| screenshot. Mostly you should plan on templating off an existing guide, like | ||||
| [this one](https://raw.githubusercontent.com/zulip/zulip/main/zerver/webhooks/github/doc.md). | ||||
|  | ||||
| [integration-docs-guide]: https://zulip.readthedocs.io/en/latest/documentation/integrations.html | ||||
|  | ||||
| ## Step 7: Preparing a pull request to zulip/zulip | ||||
|  | ||||
| When you have finished your webhook integration, follow these guidelines before | ||||
| pushing the code to your fork and submitting a pull request to zulip/zulip: | ||||
|  | ||||
| - Run tests including linters and ensure you have addressed any issues they | ||||
|   report. See [Testing](https://zulip.readthedocs.io/en/latest/testing/testing.html) | ||||
|   and [Linters](https://zulip.readthedocs.io/en/latest/testing/linters.html) for details. | ||||
| - Read through [Code styles and conventions]( | ||||
|   https://zulip.readthedocs.io/en/latest/contributing/code-style.html) and take a look | ||||
|   through your code to double-check that you've followed Zulip's guidelines. | ||||
| - Take a look at your Git history to ensure your commits have been clear and | ||||
|   logical (see [Commit discipline]( | ||||
|   https://zulip.readthedocs.io/en/latest/contributing/commit-discipline.html) for tips). If not, | ||||
|   consider revising them with `git rebase --interactive`. For most incoming webhooks, | ||||
|   you'll want to squash your changes into a single commit and include a good, | ||||
|   clear commit message. | ||||
|  | ||||
| If you would like feedback on your integration as you go, feel free to post a | ||||
| message on the [public Zulip instance](https://chat.zulip.org/#narrow/channel/integrations). | ||||
| You can also create a [draft pull request]( | ||||
| https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/about-pull-requests#draft-pull-requests) while you | ||||
| are still working on your integration. See the | ||||
| [Git guide](https://zulip.readthedocs.io/en/latest/git/pull-requests.html#create-a-pull-request) | ||||
| for more on Zulip's pull request process. | ||||
|  | ||||
| ## Advanced topics | ||||
|  | ||||
| More complex implementation or testing needs may require additional code, beyond | ||||
| what the standard helper functions provide. This section discusses some of | ||||
| these situations. | ||||
|  | ||||
| ### Negative tests | ||||
|  | ||||
| A negative test is one that should result in an error, such as incorrect data. | ||||
| The helper functions may interpret this as a test failure, when it should instead | ||||
| be a successful test of an error condition. To correctly test these cases, you | ||||
| must explicitly code your test's execution (using other helpers, as needed) | ||||
| rather than call the usual helper function. | ||||
|  | ||||
| Here is an example from the WordPress integration: | ||||
|  | ||||
| ```python | ||||
| def test_unknown_action_no_data(self) -> None: | ||||
|     # Mimic check_webhook() to manually execute a negative test. | ||||
|     # Otherwise its call to send_webhook_payload() would assert on the non-success | ||||
|     # we are testing. The value of result is the error message the webhook should | ||||
|     # return if no params are sent. The fixture for this test is an empty file. | ||||
|  | ||||
|     # subscribe to the target channel | ||||
|     self.subscribe(self.test_user, self.CHANNEL_NAME) | ||||
|  | ||||
|     # post to the webhook url | ||||
|     post_params = {'stream_name': self.CHANNEL_NAME, | ||||
|                    'content_type': 'application/x-www-form-urlencoded'} | ||||
|     result = self.client_post(self.url, 'unknown_action', **post_params) | ||||
|  | ||||
|     # check that we got the expected error message | ||||
|     self.assert_json_error(result, "Unknown WordPress webhook action: WordPress action") | ||||
| ``` | ||||
|  | ||||
| In a normal test, `check_webhook` would handle all the setup | ||||
| and then check that the incoming webhook's response matches the expected result. If | ||||
| the webhook returns an error, the test fails. Instead, explicitly do the | ||||
| setup it would have done, and check the result yourself. | ||||
|  | ||||
| Here, `subscribe_to_stream` is a test helper that uses `TEST_USER_EMAIL` and | ||||
| `CHANNEL_NAME` (attributes from the base class) to register the user to receive | ||||
| messages in the given channel. If the channel doesn't exist, it creates it. | ||||
|  | ||||
| `client_post`, another helper, performs the HTTP POST that calls the incoming | ||||
| webhook. As long as `self.url` is correct, you don't need to construct the webhook | ||||
| URL yourself. (In most cases, it is.) | ||||
|  | ||||
| `assert_json_error` then checks if the result matches the expected error. | ||||
| If you had used `check_webhook`, it would have called | ||||
| `send_webhook_payload`, which checks the result with `assert_json_success`. | ||||
|  | ||||
| ### Custom query parameters | ||||
|  | ||||
| Custom arguments passed in URL query parameters work as expected in the webhook | ||||
| code, but require special handling in tests. | ||||
|  | ||||
| For example, here is the definition of a webhook function that gets both `stream` | ||||
| and `topic` from the query parameters: | ||||
|  | ||||
| ```python | ||||
| def api_querytest_webhook(request: HttpRequest, user_profile: UserProfile, | ||||
|                           payload: str=REQ(argument_type='body'), | ||||
|                           stream: str=REQ(default='test'), | ||||
|                           topic: str=REQ(default='Default Alert')): | ||||
| ``` | ||||
|  | ||||
| In actual use, you might configure the 3rd party service to call your Zulip | ||||
| integration with a URL like this: | ||||
|  | ||||
| ``` | ||||
| http://myhost/api/v1/external/querytest?api_key=abcdefgh&stream=alerts&topic=queries | ||||
| ``` | ||||
|  | ||||
| It provides values for `stream` and `topic`, and the webhook can get those | ||||
| using `REQ` without any special handling. How does this work in a test? | ||||
|  | ||||
| The new attribute `TOPIC` exists only in our class so far. In order to | ||||
| construct a URL with a query parameter for `topic`, you can pass the | ||||
| attribute `TOPIC` as a keyword argument to `build_webhook_url`, like so: | ||||
|  | ||||
| ```python | ||||
| class QuerytestHookTests(WebhookTestCase): | ||||
|  | ||||
|     CHANNEL_NAME = 'querytest' | ||||
|     TOPIC = "Default topic" | ||||
|     URL_TEMPLATE = "/api/v1/external/querytest?api_key={api_key}&stream={stream}" | ||||
|     FIXTURE_DIR_NAME = 'querytest' | ||||
|  | ||||
|     def test_querytest_test_one(self) -> None: | ||||
|         # construct the URL used for this test | ||||
|         self.TOPIC = "Query test" | ||||
|         self.url = self.build_webhook_url(topic=self.TOPIC) | ||||
|  | ||||
|         # define the expected message contents | ||||
|         expected_topic = "Query test" | ||||
|         expected_message = "This is a test of custom query parameters." | ||||
|  | ||||
|         self.check_webhook('test_one', expected_topic, expected_message, | ||||
|                                           content_type="application/x-www-form-urlencoded") | ||||
| ``` | ||||
|  | ||||
| You can also override `get_body` or `get_payload` if your test data | ||||
| needs to be constructed in an unusual way. | ||||
|  | ||||
| For more, see the definition for the base class, `WebhookTestCase` | ||||
| in `zerver/lib/test_classes.py`, or just grep for examples. | ||||
|  | ||||
|  | ||||
| ### Custom HTTP event-type headers | ||||
|  | ||||
| Some third-party services set a custom HTTP header to indicate the event type that | ||||
| generates a particular payload. To extract such headers, we recommend using the | ||||
| `validate_extract_webhook_http_header` function in `zerver/lib/webhooks/common.py`, | ||||
| like so: | ||||
|  | ||||
| ```python | ||||
| event = validate_extract_webhook_http_header(request, header, integration_name) | ||||
| ``` | ||||
|  | ||||
| `request` is the `HttpRequest` object passed to your main webhook function. `header` | ||||
| is the name of the custom header you'd like to extract, such as `X-Event-Key`, and | ||||
| `integration_name` is the name of the third-party service in question, such as | ||||
| `GitHub`. | ||||
|  | ||||
| Because such headers are how some integrations indicate the event types of their | ||||
| payloads, the absence of such a header usually indicates a configuration | ||||
| issue, where one either entered the URL for a different integration, or happens to | ||||
| be running an older version of the integration that doesn't set that header. | ||||
|  | ||||
| If the requisite header is missing, this function sends a direct message to the | ||||
| owner of the webhook bot, notifying them of the missing header. | ||||
|  | ||||
| ### Handling unexpected webhook event types | ||||
|  | ||||
| Many third-party services have dozens of different event types. In | ||||
| some cases, we may choose to explicitly ignore specific events. In | ||||
| other cases, there may be events that are new or events that we don't | ||||
| know about. In such cases, we recommend raising | ||||
| `UnsupportedWebhookEventTypeError` (found in `zerver/lib/exceptions.py`), | ||||
| with a string describing the unsupported event type, like so: | ||||
|  | ||||
| ``` | ||||
| raise UnsupportedWebhookEventTypeError(event_type) | ||||
| ``` | ||||
|  | ||||
| ## Related articles | ||||
|  | ||||
| * [Integrations overview](/api/integrations-overview) | ||||
| * [Incoming webhook integrations](/api/incoming-webhooks-overview) | ||||
| @@ -1,26 +0,0 @@ | ||||
| # The Zulip API | ||||
|  | ||||
| Zulip's APIs allow you to integrate other services with Zulip.  This | ||||
| guide should help you find the API you need: | ||||
|  | ||||
| * First, check if the tool you'd like to integrate with Zulip | ||||
|   [already has a native integration](/integrations/). | ||||
| * Next, check if [Zapier](https://zapier.com/apps) or | ||||
|   [IFTTT](https://ifttt.com/search) has an integration. | ||||
|   [Zulip's Zapier integration](/integrations/doc/zapier) and | ||||
|   [Zulip's IFTTT integration](/integrations/doc/ifttt) often allow | ||||
|   integrating a new service with Zulip without writing any code. | ||||
| * If you'd like to send content into Zulip, you can | ||||
|   [write a native incoming webhook integration](/api/incoming-webhooks-overview) | ||||
|   or use [Zulip's API for sending messages](/api/send-message). | ||||
| * If you're building an interactive bot that reacts to activity inside | ||||
|   Zulip, you'll want to look at Zulip's | ||||
|   [Python framework for interactive bots](/api/running-bots) or | ||||
|   [Zulip's real-time events API](/api/get-events). | ||||
|  | ||||
| And if you still need to build your own integration with Zulip, check out | ||||
| the full [REST API](/api/rest), generally starting with | ||||
| [installing the API client bindings](/api/installation-instructions). | ||||
|  | ||||
| In case you already know how you want to build your integration and you're | ||||
| just looking for an API key, we've got you covered [here](/api/api-keys). | ||||
| @@ -1,77 +0,0 @@ | ||||
| # Integrations overview | ||||
|  | ||||
| Integrations allow you to send data from other products into or out of | ||||
| Zulip. Zulip natively integrates with dozens of products, and with hundreds | ||||
| more through Zapier and IFTTT. | ||||
|  | ||||
| Zulip also makes it very easy to write your own integration, and (if you'd | ||||
| like) to get it merged into the main Zulip repository. | ||||
|  | ||||
| Integrations are one of the most important parts of a group chat tool like | ||||
| Zulip, and we are committed to making integrating with Zulip as easy as | ||||
| possible. | ||||
|  | ||||
| ## Set up an existing integration | ||||
|  | ||||
| Most existing integrations send content from a third-party product into | ||||
| Zulip. | ||||
|  | ||||
| * Search Zulip's [list of native integrations](/integrations/) for the | ||||
|   third-party product. Each integration has a page describing how to set it | ||||
|   up. | ||||
|  | ||||
| * Check if [Zapier](https://zapier.com/apps) has an integration with the | ||||
|   product. If it does, follow [these instructions](/integrations/doc/zapier) | ||||
|   to set it up. | ||||
|  | ||||
| * Check if [IFTTT](https://ifttt.com/search) has an integration with the | ||||
|   product. If it does, follow [these instructions](/integrations/doc/ifttt) | ||||
|   to set it up. | ||||
|  | ||||
| * Use a third-party webhook integration designed to work with | ||||
|   [Slack's webhook API](https://api.slack.com/messaging/webhooks) | ||||
|   pointed at Zulip's | ||||
|   [Slack-compatible webhook API](/integrations/slack/slack_incoming). | ||||
|  | ||||
| * If the product can send email notifications, you can | ||||
|   [send those emails to a channel](/help/message-a-channel-by-email). | ||||
|  | ||||
| ## Write your own integration | ||||
|  | ||||
| We've put a lot of effort into making this as easy as possible, but | ||||
| all of the options below do require some comfort writing code. If you | ||||
| need an integration and don't have an engineer on staff, [contact | ||||
| us](/help/contact-support) and we'll see what we can do. | ||||
|  | ||||
| ### Sending content into Zulip | ||||
|  | ||||
| * If the third-party service supports outgoing webhooks, you likely want to | ||||
|   build an [incoming webhook integration](/api/incoming-webhooks-overview). | ||||
|  | ||||
| * If it doesn't, you may want to write a | ||||
|   [script or plugin integration](/api/non-webhook-integrations). | ||||
|  | ||||
| * Finally, you can | ||||
|   [send messages using Zulip's API](/api/send-message). | ||||
|  | ||||
| ### Sending and receiving content | ||||
|  | ||||
| * To react to activity inside Zulip, look at Zulip's | ||||
|   [Python framework for interactive bots](/api/running-bots) or | ||||
|   [Zulip's real-time events API](/api/get-events). | ||||
|  | ||||
| * If what you want isn't covered by the above, check out the full | ||||
|   [REST API](/api/rest). The web, mobile, desktop, and terminal apps are | ||||
|   built on top of this API, so it can do anything a human user can do. Most | ||||
|   but not all of the endpoints are documented on this site; if you need | ||||
|   something that isn't there check out Zulip's | ||||
|   [REST endpoints](https://github.com/zulip/zulip/blob/main/zproject/urls.py) | ||||
|   or [contact us](/help/contact-support) and we'll help you out. | ||||
|  | ||||
| ## Related articles | ||||
|  | ||||
| * [Bots overview](/help/bots-overview) | ||||
| * [Set up integrations](/help/set-up-integrations) | ||||
| * [Add a bot or integration](/help/add-a-bot-or-integration) | ||||
| * [Generate integration URL](/help/generate-integration-url) | ||||
| * [Request an integration](/help/request-an-integration) | ||||
| @@ -1,161 +0,0 @@ | ||||
| # Message formatting | ||||
|  | ||||
| Zulip supports an extended version of Markdown for messages, as well as | ||||
| some HTML level special behavior. The Zulip help center article on [message | ||||
| formatting](/help/format-your-message-using-markdown) is the primary | ||||
| documentation for Zulip's markup features. This article is currently a | ||||
| changelog for updates to these features. | ||||
|  | ||||
| The [render a message](/api/render-message) endpoint can be used to get | ||||
| the current HTML version of any Markdown syntax for message content. | ||||
|  | ||||
| ## Code blocks | ||||
|  | ||||
| **Changes**: As of Zulip 4.0 (feature level 33), [code blocks][help-code] | ||||
| can have a `data-code-language` attribute attached to the outer HTML | ||||
| `div` element, which records the programming language that was selected | ||||
| for syntax highlighting. This field is used in the | ||||
| [playgrounds][help-playgrounds] feature for code blocks. | ||||
|  | ||||
| ## Global times | ||||
|  | ||||
| **Changes**: In Zulip 3.0 (feature level 8), added [global time | ||||
| mentions][help-global-time] to supported Markdown message formatting | ||||
| features. | ||||
|  | ||||
| ## Image previews | ||||
|  | ||||
| When a Zulip message is sent linking to an uploaded image, Zulip will | ||||
| generate an image preview element with the following format. | ||||
|  | ||||
| ``` html | ||||
| <div class="message_inline_image"> | ||||
|     <a href="/user_uploads/path/to/image.png" title="image.png"> | ||||
|         <img data-original-dimensions="1920x1080" | ||||
|           src="/user_uploads/thumbnail/path/to/image.png/840x560.webp"> | ||||
|     </a> | ||||
| </div> | ||||
| ``` | ||||
|  | ||||
| If the server has not yet generated thumbnails for the image yet at | ||||
| the time the message is sent, the `img` element will be a temporary | ||||
| loading indicator image and have the `image-loading-placeholder` | ||||
| class, which clients can use to identify loading indicators and | ||||
| replace them with a more native loading indicator element if | ||||
| desired. For example: | ||||
|  | ||||
| ``` html | ||||
| <div class="message_inline_image"> | ||||
|     <a href="/user_uploads/path/to/image.png" title="image.png"> | ||||
|         <img class="image-loading-placeholder" data-original-dimensions="1920x1080" src="/path/to/spinner.png"> | ||||
|     </a> | ||||
| </div> | ||||
| ``` | ||||
|  | ||||
| Once the server has a working thumbnail, such messages will be updated | ||||
| via an `update_message` event, with the `rendering_only: true` flag | ||||
| (telling clients not to adjust message edit history), with appropriate | ||||
| adjusted `rendered_content`. A client should process those events by | ||||
| just using the updated rendering. If thumbnailing failed, the same | ||||
| type of event will edit the message's rendered form to remove the | ||||
| image preview element, so no special client-side logic should be | ||||
| required to process such errors. | ||||
|  | ||||
| Note that in the uncommon situation that the thumbnailing system is | ||||
| backlogged, an individual message containing multiple image previews | ||||
| may be re-rendered multiple times as each image finishes thumbnailing | ||||
| and triggers a message update. | ||||
|  | ||||
| Clients are recommended to do the following when processing image | ||||
| previews: | ||||
|  | ||||
| - Clients that would like to use the image's aspect ratio to lay out | ||||
|   one or more images in the message feed may use the | ||||
|   `data-original-dimensions` attribute, which is present even if the | ||||
|   image is a placeholder spinner.  This attribute encodes the | ||||
|   dimensions of the original image as `{width}x{height}`.  These | ||||
|   dimensions are for the image as rendered, _after_ any EXIF rotation | ||||
|   and mirroring has been applied. | ||||
| - If the client would like to control the thumbnail resolution used, | ||||
|   it can replace the final section of the URL (`840x560.webp` in the | ||||
|   example above) with the `name` of its preferred format from the set | ||||
|   of supported formats provided by the server in the | ||||
|   `server_thumbnail_formats` portion of the `register` | ||||
|   response. Clients should not make any assumptions about what format | ||||
|   the server will use as the "default" thumbnail resolution, as it may | ||||
|   change over time. | ||||
| - Download button type elements should provide the original image | ||||
|   (encoded via the `href` of the containing `a` tag). | ||||
| - Lightbox elements for viewing an image should be designed to | ||||
|   immediately display any already-downloaded thumbnail while fetching | ||||
|   the original-quality image or an appropriate higher-quality | ||||
|   thumbnail from the server, to be transparently swapped in once it is | ||||
|   available. Clients that would like to size the lightbox based on the | ||||
|   size of the original image can use the `data-original-dimensions` | ||||
|   attribute, as described above. | ||||
| - Animated images will have a `data-animated` attribute on the `img` | ||||
|   tag. As detailed in `server_thumbnail_formats`, both animated and | ||||
|   still images are available for clients to use, depending on their | ||||
|   preference. See, for example, the [web | ||||
|   setting](/help/allow-image-link-previews#configure-how-animated-images-are-played) | ||||
|   to control whether animated images are autoplayed in the message | ||||
|   feed. | ||||
| - Clients should not assume that the requested format is the format | ||||
|   that they will receive; in rare cases where the client has an | ||||
|   out-of-date list of `server_thumbnail_formats`, the server will | ||||
|   provide an approximation of the client's requested format.  Because | ||||
|   of this, clients should not assume that the pixel dimensions or file | ||||
|   format match what they requested. | ||||
| - No other processing of the URLs is recommended. | ||||
|  | ||||
| **Changes**: In Zulip 9.2 (feature levels 278-279, and 287+), added | ||||
| `data-original-dimensions` to the `image-loading-placeholder` spinner | ||||
| images, containing the dimensions of the original image. | ||||
|  | ||||
| In Zulip 9.0 (feature level 276), added `data-original-dimensions` | ||||
| attribute to images that have been thumbnailed, containing the | ||||
| dimensions of the full-size version of the image. Thumbnailing itself | ||||
| was reintroduced at feature level 275. | ||||
|  | ||||
| Previously, with the exception of Zulip servers that used the beta | ||||
| Thumbor-based implementation years ago, all image previews in Zulip | ||||
| messages were not thumbnailed; the `a` tag and the `img` tag would both | ||||
| point to the original image. | ||||
|  | ||||
| Clients that correctly implement the current API should handle | ||||
| Thumbor-based older thumbnails correctly, as long as they do not | ||||
| assume that `data-original-dimension` is present. Clients should not | ||||
| assume that messages sent prior to the introduction of thumbnailing | ||||
| have been re-rendered to use the new format or have thumbnails | ||||
| available. | ||||
|  | ||||
| ## Mentions | ||||
|  | ||||
| **Changes**: In Zulip 9.0 (feature level 247), `channel` was added | ||||
| to the supported [wildcard][help-mention-all] options used in the | ||||
| [mentions][help-mentions] Markdown message formatting feature. | ||||
|  | ||||
| ## Spoilers | ||||
|  | ||||
| **Changes**: In Zulip 3.0 (feature level 15), added | ||||
| [spoilers][help-spoilers] to supported Markdown message formatting | ||||
| features. | ||||
|  | ||||
| ## Removed features | ||||
|  | ||||
| **Changes**: In Zulip 4.0 (feature level 24), the rarely used `!avatar()` | ||||
| and `!gravatar()` markup syntax, which was never documented and had an | ||||
| inconsistent syntax, were removed. | ||||
|  | ||||
| ## Related articles | ||||
|  | ||||
| * [Markdown formatting](/help/format-your-message-using-markdown) | ||||
| * [Send a message](/api/send-message) | ||||
| * [Render a message](/api/render-message) | ||||
|  | ||||
| [help-code]: /help/code-blocks | ||||
| [help-playgrounds]: /help/code-blocks#code-playgrounds | ||||
| [help-spoilers]: /help/spoilers | ||||
| [help-global-time]: /help/global-times | ||||
| [help-mentions]: /help/mention-a-user-or-group | ||||
| [help-mention-all]: /help/mention-a-user-or-group#mention-everyone-on-a-channel | ||||
| @@ -1,184 +0,0 @@ | ||||
| # Outgoing webhooks | ||||
|  | ||||
| Outgoing webhooks allow you to build or set up Zulip integrations | ||||
| which are notified when certain types of messages are sent in | ||||
| Zulip. When one of those events is triggered, we'll send an HTTP POST | ||||
| payload to the webhook's configured URL.  Webhooks can be used to | ||||
| power a wide range of Zulip integrations.  For example, the | ||||
| [Zulip Botserver][zulip-botserver] is built on top of this API. | ||||
|  | ||||
| Zulip supports outgoing webhooks both in a clean native Zulip format, | ||||
| as well as a format that's compatible with | ||||
| [Slack's outgoing webhook API][slack-outgoing-webhook], which can help | ||||
| with porting an existing Slack integration to work with Zulip. | ||||
|  | ||||
| [zulip-botserver]: /api/deploying-bots#zulip-botserver | ||||
| [slack-outgoing-webhook]: https://api.slack.com/custom-integrations/outgoing-webhooks | ||||
|  | ||||
| To register an outgoing webhook: | ||||
|  | ||||
| * Log in to the Zulip server. | ||||
| * Navigate to *Personal settings (<i class="zulip-icon zulip-icon-gear"></i>)* -> *Bots* -> | ||||
|   *Add a new bot*.  Select *Outgoing webhook* for bot type, the URL | ||||
|   you'd like Zulip to post to as the **Endpoint URL**, the format you | ||||
|   want, and click on *Create bot*. to submit the form/ | ||||
| * Your new bot user will appear in the *Active bots* panel, which you | ||||
|   can use to edit the bot's settings. | ||||
|  | ||||
| ## Triggering | ||||
|  | ||||
| There are currently two ways to trigger an outgoing webhook: | ||||
|  | ||||
| *  **@-mention** the bot user in a channel.  If the bot replies, its | ||||
|     reply will be sent to that channel and topic. | ||||
| *  **Send a direct message** with the bot as one of the recipients. | ||||
|     If the bot replies, its reply will be sent to that thread. | ||||
|  | ||||
| ## Timeouts | ||||
|  | ||||
| The remote server must respond to a `POST` request in a timely manner. | ||||
| The default timeout for outgoing webhooks is 10 seconds, though this | ||||
| can be configured by the administrator of the Zulip server by setting | ||||
| `OUTGOING_WEBHOOKS_TIMEOUT_SECONDS` in the [server's | ||||
| settings][settings]. | ||||
|  | ||||
| [settings]: https://zulip.readthedocs.io/en/latest/subsystems/settings.html#server-settings | ||||
|  | ||||
| ## Outgoing webhook format | ||||
|  | ||||
| {generate_code_example|/zulip-outgoing-webhook:post|fixture} | ||||
|  | ||||
| ### Fields documentation | ||||
|  | ||||
| {generate_return_values_table|zulip.yaml|/zulip-outgoing-webhook:post} | ||||
|  | ||||
| ## Replying with a message | ||||
|  | ||||
| Many bots implemented using this outgoing webhook API will want to | ||||
| send a reply message into Zulip.  Zulip's outgoing webhook API | ||||
| provides a convenient way to do that by simply returning an | ||||
| appropriate HTTP response to the Zulip server. | ||||
|  | ||||
| A correctly implemented bot will return a JSON object containing one | ||||
| of two possible formats, described below. | ||||
|  | ||||
| ### Example response payloads | ||||
|  | ||||
| If the bot code wants to opt out of responding, it can explicitly | ||||
| encode a JSON dictionary that contains `response_not_required` set | ||||
| to `True`, so that no response message is sent to the user.  (This | ||||
| is helpful to distinguish deliberate non-responses from bugs.) | ||||
|  | ||||
| Here's an example of the JSON your server should respond with if | ||||
| you would not like to send a response message: | ||||
|  | ||||
| ```json | ||||
| { | ||||
|     "response_not_required": true | ||||
| } | ||||
| ``` | ||||
|  | ||||
| Here's an example of the JSON your server should respond with if | ||||
| you would like to send a response message: | ||||
|  | ||||
| ```json | ||||
| { | ||||
|     "content": "Hey, we just received **something** from Zulip!" | ||||
| } | ||||
| ``` | ||||
|  | ||||
| The `content` field should contain Zulip-format Markdown. | ||||
|  | ||||
| Note that an outgoing webhook bot can use the [Zulip REST | ||||
| API](/api/rest) with its API key in case your bot needs to do | ||||
| something else, like add an emoji reaction or upload a file. | ||||
|  | ||||
| ## Slack-format webhook format | ||||
|  | ||||
| This interface translates Zulip's outgoing webhook's request into the | ||||
| format that Slack's outgoing webhook interface sends.  As a result, | ||||
| one should be able to use this to interact with third-party | ||||
| integrations designed to work with Slack's outgoing webhook interface. | ||||
| Here's how we fill in the fields that a Slack-format webhook expects: | ||||
|  | ||||
| <table class="table"> | ||||
|     <thead> | ||||
|         <tr> | ||||
|             <th>Name</th> | ||||
|             <th>Description</th> | ||||
|         </tr> | ||||
|     </thead> | ||||
|     <tbody> | ||||
|         <tr> | ||||
|             <td><code>token</code></td> | ||||
|             <td>A string of alphanumeric characters you can use to | ||||
|             authenticate the webhook request (each bot user uses a fixed token)</td> | ||||
|         </tr> | ||||
|         <tr> | ||||
|             <td><code>team_id</code></td> | ||||
|             <td>ID of the Zulip organization prefixed by "T".</td> | ||||
|         </tr> | ||||
|         <tr> | ||||
|             <td><code>team_domain</code></td> | ||||
|             <td>Hostname of the Zulip organization</td> | ||||
|         </tr> | ||||
|         <tr> | ||||
|             <td><code>channel_id</code></td> | ||||
|             <td>Channel ID prefixed by "C"</td> | ||||
|         </tr> | ||||
|         <tr> | ||||
|             <td><code>channel_name</code></td> | ||||
|             <td>Channel name</td> | ||||
|         </tr> | ||||
|         <tr> | ||||
|             <td><code>thread_ts</code></td> | ||||
|             <td>Timestamp for when message was sent</td> | ||||
|         </tr> | ||||
|         <tr> | ||||
|             <td><code>timestamp</code></td> | ||||
|             <td>Timestamp for when message was sent</td> | ||||
|         </tr> | ||||
|         <tr> | ||||
|             <td><code>user_id</code></td> | ||||
|             <td>ID of the user who sent the message prefixed by "U"</td> | ||||
|         </tr> | ||||
|         <tr> | ||||
|             <td><code>user_name</code></td> | ||||
|             <td>Full name of sender</td> | ||||
|         </tr> | ||||
|         <tr> | ||||
|             <td><code>text</code></td> | ||||
|             <td>The content of the message (in Markdown)</td> | ||||
|         </tr> | ||||
|         <tr> | ||||
|             <td><code>trigger_word</code></td> | ||||
|             <td>Trigger method</td> | ||||
|         </tr> | ||||
|         <tr> | ||||
|             <td><code>service_id</code></td> | ||||
|             <td>ID of the bot user</td> | ||||
|         </tr> | ||||
|     </tbody> | ||||
| </table> | ||||
|  | ||||
| The above data is posted as list of tuples (not JSON), here's an example: | ||||
|  | ||||
| ``` | ||||
| [('token', 'v9fpCdldZIej2bco3uoUvGp06PowKFOf'), | ||||
|  ('team_id', 'T1512'), | ||||
|  ('team_domain', 'zulip.example.com'), | ||||
|  ('channel_id', 'C123'), | ||||
|  ('channel_name', 'integrations'), | ||||
|  ('thread_ts', 1532078950), | ||||
|  ('timestamp', 1532078950), | ||||
|  ('user_id', 'U21'), | ||||
|  ('user_name', 'Full Name'), | ||||
|  ('text', '@**test**'), | ||||
|  ('trigger_word', 'mention'), | ||||
|  ('service_id', 27)] | ||||
| ``` | ||||
|  | ||||
| * For successful request, if data is returned, it returns that data, | ||||
|   else it returns a blank response. | ||||
| * For failed request, it returns the reason of failure, as returned by | ||||
|   the server, or the exception message. | ||||
| @@ -1,64 +0,0 @@ | ||||
| # Error handling | ||||
|  | ||||
| Zulip's API will always return a JSON format response. | ||||
| The HTTP status code indicates whether the request was successful | ||||
| (200 = success, 4xx = user error, 5xx = server error). | ||||
|  | ||||
| Every response, both success and error responses, will contain at least | ||||
| two keys: | ||||
|  | ||||
| - `msg`: an internationalized, human-readable error message string. | ||||
|  | ||||
| - `result`: either `"error"` or `"success"`, which is redundant with the | ||||
|   HTTP status code, but is convenient when print debugging. | ||||
|  | ||||
| Every error response will also contain an additional key: | ||||
|  | ||||
| - `code`: a machine-readable error string, with a default value of | ||||
|   `"BAD_REQUEST"` for general errors. | ||||
|  | ||||
| Clients should always check `code`, rather than `msg`, when looking for | ||||
| specific error conditions. The string values for `msg` are | ||||
| internationalized (e.g., the server will send the error message | ||||
| translated into French if the user has a French locale), so checking | ||||
| those strings will result in buggy code. | ||||
|  | ||||
| !!! tip "" | ||||
|  | ||||
|      If a client needs information that is only present in the string value | ||||
|      of `msg` for a particular error response, then the developers | ||||
|      implementing the client should [start a conversation here][api-design] | ||||
|      in order to discuss getting a specific error `code` and/or relevant | ||||
|      additional key/value pairs for that error response. | ||||
|  | ||||
| In addition to the keys described above, some error responses will | ||||
| contain other keys with further details that are useful for clients. The | ||||
| specific keys present depend on the error `code`, and are documented at | ||||
| the API endpoints where these particular errors appear. | ||||
|  | ||||
| **Changes**: Before Zulip 5.0 (feature level 76), all error responses | ||||
| did not contain a `code` key, and its absence indicated that no specific | ||||
| error `code` had been allocated for that error. | ||||
|  | ||||
| ## Common error responses | ||||
|  | ||||
| Documented below are some error responses that are common to many | ||||
| endpoints: | ||||
|  | ||||
| {generate_code_example|/rest-error-handling:post|fixture} | ||||
|  | ||||
| ## Ignored Parameters | ||||
|  | ||||
| In JSON success responses, all Zulip REST API endpoints may return | ||||
| an array of parameters sent in the request that are not supported | ||||
| by that specific endpoint. | ||||
|  | ||||
| While this can be expected, e.g., when sending both current and legacy | ||||
| names for a parameter to a Zulip server of unknown version, this often | ||||
| indicates either a bug in the client implementation or an attempt to | ||||
| configure a new feature while connected to an older Zulip server that | ||||
| does not support said feature. | ||||
|  | ||||
| {generate_code_example|/settings:patch|fixture} | ||||
|  | ||||
| [api-design]: https://chat.zulip.org/#narrow/channel/378-api-design | ||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user