mirror of
				https://github.com/zulip/zulip.git
				synced 2025-10-31 12:03:46 +00:00 
			
		
		
		
	Compare commits
	
		
			71 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | 1ba708ca96 | ||
|  | e156db2bc7 | ||
|  | d0235add03 | ||
|  | a6b06df895 | ||
|  | 2df2f7eec6 | ||
|  | ad858d2c79 | ||
|  | 5290f17adb | ||
|  | 9824a9d7cf | ||
|  | 88a2a80d81 | ||
|  | 5b16ee0c08 | ||
|  | 17dced26ff | ||
|  | fc9c5b1f43 | ||
|  | 564873a207 | ||
|  | c692263255 | ||
|  | bfe428f608 | ||
|  | d200e3547f | ||
|  | b6afa4a82b | ||
|  | 4db187856d | ||
|  | 36638c95b9 | ||
|  | 85f14eb4f7 | ||
|  | 0fab79c027 | ||
|  | 7d46bed507 | ||
|  | a89ba9c7d6 | ||
|  | 8f735f4683 | ||
|  | e7cfd30d53 | ||
|  | 10c8c0e071 | ||
|  | 9f8b5e225d | ||
|  | 62194eb20f | ||
|  | 2492f4b60e | ||
|  | 1b2967ddb5 | ||
|  | 42774b101f | ||
|  | 716cba04de | ||
|  | 332add3bb6 | ||
|  | b596cd7607 | ||
|  | 21cedabbdf | ||
|  | f910d5b8a9 | ||
|  | daf185705d | ||
|  | 1fa7081a4c | ||
|  | 0d17a5e76d | ||
|  | 9815581957 | ||
|  | 33d7aa9d47 | ||
|  | 6c3a6ef6c1 | ||
|  | a63150ca35 | ||
|  | 7ab8455596 | ||
|  | 43be62c7ef | ||
|  | 7b15ce71c2 | ||
|  | 96c5a9e303 | ||
|  | 0b337e0819 | ||
|  | d4b3c20e48 | ||
|  | 31be0f04b9 | ||
|  | 6af0e28e5d | ||
|  | 9cb538b08f | ||
|  | bf49f962c0 | ||
|  | 2a69b4f3b7 | ||
|  | 540904aa9d | ||
|  | 26bdf79642 | ||
|  | 2c1ffaceca | ||
|  | dffff73654 | ||
|  | 2f9d4f5a96 | ||
|  | ce96018af4 | ||
|  | a025fab082 | ||
|  | 812ad52007 | ||
|  | 9066fcac9a | ||
|  | a70ebdb005 | ||
|  | 956d4b2568 | ||
|  | ea2256da29 | ||
|  | d1bd8f3637 | ||
|  | 22d486bbf7 | ||
|  | 977ff62fe8 | ||
|  | 5bfc162df9 | ||
|  | 2aa643502a | 
							
								
								
									
										5
									
								
								.browserslistrc
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										5
									
								
								.browserslistrc
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,5 @@ | ||||
| > 0.15% | ||||
| > 0.15% in US | ||||
| last 2 versions | ||||
| Firefox ESR | ||||
| not dead | ||||
| @@ -1,27 +0,0 @@ | ||||
| te | ||||
| ans | ||||
| pullrequest | ||||
| ist | ||||
| cros | ||||
| wit | ||||
| nwe | ||||
| circularly | ||||
| ned | ||||
| ba | ||||
| ressemble | ||||
| ser | ||||
| sur | ||||
| hel | ||||
| fpr | ||||
| alls | ||||
| nd | ||||
| ot | ||||
| womens | ||||
| vise | ||||
| falsy | ||||
| ro | ||||
| derails | ||||
| forin | ||||
| uper | ||||
| slac | ||||
| couldn | ||||
| @@ -17,7 +17,7 @@ max_line_length = 100 | ||||
| [*.{py,pyi}] | ||||
| max_line_length = 110 | ||||
|  | ||||
| [*.{md,svg,rb,pp,yaml,yml}] | ||||
| [*.{svg,rb,pp,yaml,yml}] | ||||
| indent_size = 2 | ||||
|  | ||||
| [package.json] | ||||
|   | ||||
| @@ -4,12 +4,11 @@ | ||||
|  | ||||
| /docs/_build | ||||
| /static/generated | ||||
| /static/third | ||||
| /static/webpack-bundles | ||||
| /var/* | ||||
| !/var/puppeteer | ||||
| /var/puppeteer/* | ||||
| !/var/puppeteer/test_credentials.d.ts | ||||
| /web/generated | ||||
| /web/third | ||||
| /zulip-current-venv | ||||
| /zulip-py3-venv | ||||
|   | ||||
							
								
								
									
										150
									
								
								.eslintrc.json
									
									
									
									
									
								
							
							
						
						
									
										150
									
								
								.eslintrc.json
									
									
									
									
									
								
							| @@ -1,5 +1,4 @@ | ||||
| { | ||||
|     "root": true, | ||||
|     "env": { | ||||
|         "es2020": true, | ||||
|         "node": true | ||||
| @@ -8,29 +7,14 @@ | ||||
|         "eslint:recommended", | ||||
|         "plugin:import/errors", | ||||
|         "plugin:import/warnings", | ||||
|         "plugin:no-jquery/recommended", | ||||
|         "plugin:no-jquery/deprecated", | ||||
|         "plugin:unicorn/recommended", | ||||
|         "prettier" | ||||
|     ], | ||||
|     "parser": "@babel/eslint-parser", | ||||
|     "parserOptions": { | ||||
|         "requireConfigFile": false, | ||||
|         "warnOnUnsupportedTypeScriptVersion": false, | ||||
|         "sourceType": "unambiguous" | ||||
|     }, | ||||
|     "plugins": ["formatjs", "no-jquery"], | ||||
|     "settings": { | ||||
|         "formatjs": { | ||||
|             "additionalFunctionNames": ["$t", "$t_html"] | ||||
|         }, | ||||
|         "no-jquery": { | ||||
|             "collectionReturningPlugins": { | ||||
|                 "expectOne": "always" | ||||
|             }, | ||||
|             "variablePattern": "^\\$(?!t$|t_html$)." | ||||
|         } | ||||
|     }, | ||||
|     "reportUnusedDisableDirectives": true, | ||||
|     "rules": { | ||||
|         "array-callback-return": "error", | ||||
| @@ -40,22 +24,18 @@ | ||||
|         "curly": "error", | ||||
|         "dot-notation": "error", | ||||
|         "eqeqeq": "error", | ||||
|         "formatjs/enforce-default-message": ["error", "literal"], | ||||
|         "formatjs/enforce-placeholders": [ | ||||
|             "error", | ||||
|             {"ignoreList": ["b", "code", "em", "i", "kbd", "p", "strong"]} | ||||
|         ], | ||||
|         "formatjs/no-id": "error", | ||||
|         "guard-for-in": "error", | ||||
|         "import/extensions": "error", | ||||
|         "import/first": "error", | ||||
|         "import/newline-after-import": "error", | ||||
|         "import/no-cycle": ["error", {"ignoreExternal": true}], | ||||
|         "import/no-duplicates": "error", | ||||
|         "import/no-self-import": "error", | ||||
|         "import/no-unresolved": "off", | ||||
|         "import/no-useless-path-segments": "error", | ||||
|         "import/order": ["error", {"alphabetize": {"order": "asc"}, "newlines-between": "always"}], | ||||
|         "import/order": [ | ||||
|             "error", | ||||
|             { | ||||
|                 "alphabetize": {"order": "asc"}, | ||||
|                 "newlines-between": "always" | ||||
|             } | ||||
|         ], | ||||
|         "import/unambiguous": "error", | ||||
|         "lines-around-directive": "error", | ||||
|         "new-cap": "error", | ||||
| @@ -66,6 +46,7 @@ | ||||
|         "no-catch-shadow": "error", | ||||
|         "no-constant-condition": ["error", {"checkLoops": false}], | ||||
|         "no-div-regex": "error", | ||||
|         "no-duplicate-imports": "error", | ||||
|         "no-else-return": "error", | ||||
|         "no-eq-null": "error", | ||||
|         "no-eval": "error", | ||||
| @@ -73,8 +54,6 @@ | ||||
|         "no-implied-eval": "error", | ||||
|         "no-inner-declarations": "off", | ||||
|         "no-iterator": "error", | ||||
|         "no-jquery/no-constructor-attributes": "error", | ||||
|         "no-jquery/no-parse-html-literal": "error", | ||||
|         "no-label-var": "error", | ||||
|         "no-labels": "error", | ||||
|         "no-loop-func": "error", | ||||
| @@ -94,18 +73,19 @@ | ||||
|         "no-undef-init": "error", | ||||
|         "no-unneeded-ternary": ["error", {"defaultAssignment": false}], | ||||
|         "no-unused-expressions": "error", | ||||
|         "no-unused-vars": [ | ||||
|             "error", | ||||
|             {"args": "all", "argsIgnorePattern": "^_", "ignoreRestSiblings": true} | ||||
|         ], | ||||
|         "no-use-before-define": ["error", {"functions": false}], | ||||
|         "no-useless-concat": "error", | ||||
|         "no-useless-constructor": "error", | ||||
|         "no-var": "error", | ||||
|         "object-shorthand": ["error", "always", {"avoidExplicitReturnArrows": true}], | ||||
|         "object-shorthand": "error", | ||||
|         "one-var": ["error", "never"], | ||||
|         "prefer-arrow-callback": "error", | ||||
|         "prefer-const": ["error", {"ignoreReadBeforeAssign": true}], | ||||
|         "prefer-const": [ | ||||
|             "error", | ||||
|             { | ||||
|                 "ignoreReadBeforeAssign": true | ||||
|             } | ||||
|         ], | ||||
|         "radix": "error", | ||||
|         "sort-imports": ["error", {"ignoreDeclarationSort": true}], | ||||
|         "spaced-comment": ["error", "always", {"markers": ["/"]}], | ||||
| @@ -113,91 +93,70 @@ | ||||
|         "unicorn/consistent-function-scoping": "off", | ||||
|         "unicorn/explicit-length-check": "off", | ||||
|         "unicorn/filename-case": "off", | ||||
|         "unicorn/no-await-expression-member": "off", | ||||
|         "unicorn/no-negated-condition": "off", | ||||
|         "unicorn/no-nested-ternary": "off", | ||||
|         "unicorn/no-null": "off", | ||||
|         "unicorn/no-process-exit": "off", | ||||
|         "unicorn/no-useless-undefined": "off", | ||||
|         "unicorn/number-literal-case": "off", | ||||
|         "unicorn/numeric-separators-style": "off", | ||||
|         "unicorn/prefer-module": "off", | ||||
|         "unicorn/prefer-node-protocol": "off", | ||||
|         "unicorn/prefer-spread": "off", | ||||
|         "unicorn/prefer-ternary": "off", | ||||
|         "unicorn/prefer-top-level-await": "off", | ||||
|         "unicorn/prevent-abbreviations": "off", | ||||
|         "unicorn/switch-case-braces": "off", | ||||
|         "valid-typeof": ["error", {"requireStringLiterals": true}], | ||||
|         "yoda": "error" | ||||
|     }, | ||||
|     "overrides": [ | ||||
|         { | ||||
|             "files": ["web/tests/**"], | ||||
|             "rules": { | ||||
|                 "no-jquery/no-selector-prop": "off" | ||||
|             } | ||||
|         }, | ||||
|         { | ||||
|             "files": ["web/e2e-tests/**"], | ||||
|             "files": ["frontend_tests/puppeteer_lib/**", "frontend_tests/puppeteer_tests/**"], | ||||
|             "globals": { | ||||
|                 "$": false, | ||||
|                 "zulip_test": false | ||||
|             } | ||||
|         }, | ||||
|         { | ||||
|             "files": ["web/src/**"], | ||||
|             "files": ["static/js/**"], | ||||
|             "globals": { | ||||
|                 "StripeCheckout": false | ||||
|             } | ||||
|         }, | ||||
|         { | ||||
|             "files": ["**/*.ts"], | ||||
|             "extends": [ | ||||
|                 "plugin:@typescript-eslint/recommended", | ||||
|                 "plugin:@typescript-eslint/recommended-requiring-type-checking", | ||||
|                 "plugin:@typescript-eslint/strict", | ||||
|                 "plugin:import/typescript" | ||||
|             ], | ||||
|             "extends": ["plugin:@typescript-eslint/recommended", "plugin:import/typescript"], | ||||
|             "parserOptions": { | ||||
|                 "project": "tsconfig.json" | ||||
|             }, | ||||
|             "settings": { | ||||
|                 "import/resolver": { | ||||
|                     "node": { | ||||
|                         "extensions": [".ts", ".d.ts", ".js"] // https://github.com/import-js/eslint-plugin-import/issues/2267 | ||||
|                     } | ||||
|                 } | ||||
|             }, | ||||
|             "globals": { | ||||
|                 "JQuery": false | ||||
|             }, | ||||
|             "rules": { | ||||
|                 // Disable base rule to avoid conflict | ||||
|                 "no-use-before-define": "off", | ||||
|                 "no-duplicate-imports": "off", | ||||
|                 "no-unused-vars": "off", | ||||
|                 "no-useless-constructor": "off", | ||||
|  | ||||
|                 "@typescript-eslint/consistent-type-assertions": [ | ||||
|                     "error", | ||||
|                     {"assertionStyle": "never"} | ||||
|                 ], | ||||
|                 "@typescript-eslint/consistent-type-definitions": ["error", "type"], | ||||
|                 "@typescript-eslint/array-type": "error", | ||||
|                 "@typescript-eslint/await-thenable": "error", | ||||
|                 "@typescript-eslint/consistent-type-assertions": "error", | ||||
|                 "@typescript-eslint/consistent-type-imports": "error", | ||||
|                 "@typescript-eslint/explicit-function-return-type": [ | ||||
|                     "error", | ||||
|                     {"allowExpressions": true} | ||||
|                 ], | ||||
|                 "@typescript-eslint/member-ordering": "error", | ||||
|                 "@typescript-eslint/no-duplicate-imports": "off", | ||||
|                 "@typescript-eslint/no-explicit-any": "off", | ||||
|                 "@typescript-eslint/no-extraneous-class": "error", | ||||
|                 "@typescript-eslint/no-non-null-assertion": "off", | ||||
|                 "@typescript-eslint/no-unnecessary-condition": "off", | ||||
|                 "@typescript-eslint/no-parameter-properties": "error", | ||||
|                 "@typescript-eslint/no-unnecessary-qualifier": "error", | ||||
|                 "@typescript-eslint/no-unsafe-argument": "off", | ||||
|                 "@typescript-eslint/no-unsafe-assignment": "off", | ||||
|                 "@typescript-eslint/no-unsafe-call": "off", | ||||
|                 "@typescript-eslint/no-unsafe-member-access": "off", | ||||
|                 "@typescript-eslint/no-unsafe-return": "off", | ||||
|                 "@typescript-eslint/no-unused-vars": [ | ||||
|                     "error", | ||||
|                     {"args": "all", "argsIgnorePattern": "^_", "ignoreRestSiblings": true} | ||||
|                 ], | ||||
|                 "@typescript-eslint/no-use-before-define": ["error", {"functions": false}], | ||||
|                 "@typescript-eslint/parameter-properties": "error", | ||||
|                 "@typescript-eslint/no-unnecessary-type-assertion": "error", | ||||
|                 "@typescript-eslint/no-unused-vars": ["error", {"varsIgnorePattern": "^_"}], | ||||
|                 "@typescript-eslint/no-use-before-define": "error", | ||||
|                 "@typescript-eslint/no-useless-constructor": "error", | ||||
|                 "@typescript-eslint/prefer-includes": "error", | ||||
|                 "@typescript-eslint/prefer-regexp-exec": "error", | ||||
|                 "@typescript-eslint/prefer-string-starts-ends-with": "error", | ||||
|                 "@typescript-eslint/promise-function-async": "error", | ||||
|                 "@typescript-eslint/unified-signatures": "error", | ||||
|                 "no-undef": "error" | ||||
|             } | ||||
|         }, | ||||
| @@ -208,7 +167,7 @@ | ||||
|             } | ||||
|         }, | ||||
|         { | ||||
|             "files": ["web/e2e-tests/**", "web/tests/**"], | ||||
|             "files": ["frontend_tests/**"], | ||||
|             "globals": { | ||||
|                 "CSS": false, | ||||
|                 "document": false, | ||||
| @@ -216,14 +175,11 @@ | ||||
|                 "window": false | ||||
|             }, | ||||
|             "rules": { | ||||
|                 "formatjs/no-id": "off", | ||||
|                 "new-cap": "off", | ||||
|                 "no-sync": "off", | ||||
|                 "unicorn/prefer-prototype-methods": "off" | ||||
|                 "no-sync": "off" | ||||
|             } | ||||
|         }, | ||||
|         { | ||||
|             "files": ["web/debug-require.js"], | ||||
|             "files": ["tools/debug-require.js"], | ||||
|             "env": { | ||||
|                 "browser": true, | ||||
|                 "es2020": false | ||||
| @@ -237,27 +193,20 @@ | ||||
|             } | ||||
|         }, | ||||
|         { | ||||
|             "files": ["web/shared/**", "web/src/**", "web/third/**"], | ||||
|             "files": ["static/**"], | ||||
|             "env": { | ||||
|                 "browser": true, | ||||
|                 "node": false | ||||
|             }, | ||||
|             "globals": { | ||||
|                 "ZULIP_VERSION": false | ||||
|             }, | ||||
|             "rules": { | ||||
|                 "no-console": "error" | ||||
|             }, | ||||
|             "settings": { | ||||
|                 "import/resolver": { | ||||
|                     "webpack": { | ||||
|                         "config": "./web/webpack.config.ts" | ||||
|                     } | ||||
|                 } | ||||
|                 "import/resolver": "webpack" | ||||
|             } | ||||
|         }, | ||||
|         { | ||||
|             "files": ["web/shared/**"], | ||||
|             "files": ["static/shared/**"], | ||||
|             "env": { | ||||
|                 "browser": false, | ||||
|                 "shared-node-browser": true | ||||
| @@ -268,14 +217,13 @@ | ||||
|                     { | ||||
|                         "zones": [ | ||||
|                             { | ||||
|                                 "target": "./web/shared", | ||||
|                                 "target": "./static/shared", | ||||
|                                 "from": ".", | ||||
|                                 "except": ["./node_modules", "./web/shared"] | ||||
|                                 "except": ["./node_modules", "./static/shared"] | ||||
|                             } | ||||
|                         ] | ||||
|                     } | ||||
|                 ], | ||||
|                 "unicorn/prefer-string-replace-all": "off" | ||||
|                 ] | ||||
|             } | ||||
|         } | ||||
|     ] | ||||
|   | ||||
							
								
								
									
										19
									
								
								.gitattributes
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										19
									
								
								.gitattributes
									
									
									
									
										vendored
									
									
								
							| @@ -1,19 +1,4 @@ | ||||
| # DIFFS: Noise suppression. | ||||
| # | ||||
| # Suppress noisy generated files in diffs. | ||||
| # (When you actually want to see these diffs, use `git diff -a`.) | ||||
|  | ||||
| # Large test fixtures: | ||||
| corporate/tests/stripe_fixtures/*.json -diff | ||||
|  | ||||
|  | ||||
| # FORMATTING | ||||
|  | ||||
| # Maintain LF (Unix-style) newlines in text files. | ||||
| *   text=auto eol=lf | ||||
|  | ||||
| # Make sure various media files never get somehow auto-detected as text | ||||
| # and then newline-converted. | ||||
| *.gif binary | ||||
| *.jpg binary | ||||
| *.jpeg binary | ||||
| @@ -26,7 +11,3 @@ corporate/tests/stripe_fixtures/*.json -diff | ||||
| *.otf binary | ||||
| *.tif binary | ||||
| *.ogg binary | ||||
| *.bson binary | ||||
| *.bmp binary | ||||
| *.mp3 binary | ||||
| *.pdf binary | ||||
|   | ||||
							
								
								
									
										10
									
								
								.github/ISSUE_TEMPLATE/1_discussed_on_czo.md
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										10
									
								
								.github/ISSUE_TEMPLATE/1_discussed_on_czo.md
									
									
									
									
										vendored
									
									
								
							| @@ -1,10 +0,0 @@ | ||||
| --- | ||||
| name: Issue discussed in the Zulip development community | ||||
| about: Bug report, feature or improvement already discussed on chat.zulip.org. | ||||
| --- | ||||
|  | ||||
| <!-- Issue description --> | ||||
|  | ||||
| <!-- Link to a message in the chat.zulip.org discussion. Message links will still work even if the topic is renamed or resolved. Link back to this issue from the chat.zulip.org thread. --> | ||||
|  | ||||
| CZO thread | ||||
							
								
								
									
										17
									
								
								.github/ISSUE_TEMPLATE/2_bug_report.md
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										17
									
								
								.github/ISSUE_TEMPLATE/2_bug_report.md
									
									
									
									
										vendored
									
									
								
							| @@ -1,17 +0,0 @@ | ||||
| --- | ||||
| name: Bug report | ||||
| about: A concrete bug report with steps to reproduce the behavior. (See also "Possible bug" below.) | ||||
| labels: ["bug"] | ||||
| --- | ||||
|  | ||||
| <!-- Describe what you were expecting to see, what you saw instead, and steps to take in order to reproduce the buggy behavior. Screenshots can be helpful. --> | ||||
|  | ||||
| <!-- Check the box for the version of Zulip you are using (see https://zulip.com/help/view-zulip-version).--> | ||||
|  | ||||
| **Zulip Server and web app version:** | ||||
|  | ||||
| - [ ] Zulip Cloud (`*.zulipchat.com`) | ||||
| - [ ] Zulip Server 7.0+ | ||||
| - [ ] Zulip Server 6.0+ | ||||
| - [ ] Zulip Server 5.0 or older | ||||
| - [ ] Other or not sure | ||||
							
								
								
									
										6
									
								
								.github/ISSUE_TEMPLATE/3_feature_request.md
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										6
									
								
								.github/ISSUE_TEMPLATE/3_feature_request.md
									
									
									
									
										vendored
									
									
								
							| @@ -1,6 +0,0 @@ | ||||
| --- | ||||
| name: Feature or improvement request | ||||
| about: A specific proposal for a new feature of improvement. (See also "Feature suggestion or feedback" below.) | ||||
| --- | ||||
|  | ||||
| <!-- Describe the proposal, including how it would help you or your organization. --> | ||||
							
								
								
									
										14
									
								
								.github/ISSUE_TEMPLATE/config.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										14
									
								
								.github/ISSUE_TEMPLATE/config.yml
									
									
									
									
										vendored
									
									
								
							| @@ -1,14 +0,0 @@ | ||||
| blank_issues_enabled: true | ||||
| contact_links: | ||||
|   - name: Possible bug | ||||
|     url: https://zulip.readthedocs.io/en/latest/contributing/reporting-bugs.html | ||||
|     about: Report unexpected behavior that may be a bug. | ||||
|   - name: Feature suggestion or feedback | ||||
|     url: https://zulip.readthedocs.io/en/latest/contributing/suggesting-features.html | ||||
|     about: Start a discussion about your idea for improving Zulip. | ||||
|   - name: Issue with running or upgrading a Zulip server | ||||
|     url: https://zulip.readthedocs.io/en/latest/production/troubleshooting.html | ||||
|     about: We provide free, interactive support for the vast majority of questions about running a Zulip server. | ||||
|   - name: Other support requests and sales questions | ||||
|     url: https://zulip.com/help/contact-support | ||||
|     about: Contact us — we're happy to help! | ||||
							
								
								
									
										45
									
								
								.github/pull_request_template.md
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										45
									
								
								.github/pull_request_template.md
									
									
									
									
										vendored
									
									
								
							| @@ -1,43 +1,14 @@ | ||||
| <!-- Describe your pull request here.--> | ||||
| <!-- What's this PR for?  (Just a link to an issue is fine.) --> | ||||
|  | ||||
| Fixes: <!-- Issue link, or clear description.--> | ||||
|  | ||||
| <!-- If the PR makes UI changes, always include one or more still screenshots to demonstrate your changes. If it seems helpful, add a screen capture of the new functionality as well. | ||||
| **Testing plan:** <!-- How have you tested? --> | ||||
|  | ||||
| Tooling tips: https://zulip.readthedocs.io/en/latest/tutorials/screenshot-and-gif-software.html | ||||
| --> | ||||
|  | ||||
| **Screenshots and screen captures:** | ||||
| **GIFs or screenshots:** <!-- If a UI change.  See: | ||||
|   https://zulip.readthedocs.io/en/latest/tutorials/screenshot-and-gif-software.html | ||||
|   --> | ||||
|  | ||||
| <details> | ||||
| <summary>Self-review checklist</summary> | ||||
|  | ||||
| <!-- Prior to submitting a PR, follow our step-by-step guide to review your own code: | ||||
| https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html#how-to-review-code --> | ||||
|  | ||||
| <!-- Once you create the PR, check off all the steps below that you have completed. | ||||
| If any of these steps are not relevant or you have not completed, leave them unchecked.--> | ||||
|  | ||||
| - [ ] [Self-reviewed](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html#how-to-review-code) the changes for clarity and maintainability | ||||
|       (variable names, code reuse, readability, etc.). | ||||
|  | ||||
| Communicate decisions, questions, and potential concerns. | ||||
|  | ||||
| - [ ] Explains differences from previous plans (e.g., issue description). | ||||
| - [ ] Highlights technical choices and bugs encountered. | ||||
| - [ ] Calls out remaining decisions and concerns. | ||||
| - [ ] Automated tests verify logic where appropriate. | ||||
|  | ||||
| Individual commits are ready for review (see [commit discipline](https://zulip.readthedocs.io/en/latest/contributing/commit-discipline.html)). | ||||
|  | ||||
| - [ ] Each commit is a coherent idea. | ||||
| - [ ] Commit message(s) explain reasoning and motivation for changes. | ||||
|  | ||||
| Completed manual review and testing of the following: | ||||
|  | ||||
| - [ ] Visual appearance of the changes. | ||||
| - [ ] Responsiveness and internationalization. | ||||
| - [ ] Strings and tooltips. | ||||
| - [ ] End-to-end functionality of buttons, interactions and flows. | ||||
| - [ ] Corner cases, error conditions, and easily imagined bugs. | ||||
| </details> | ||||
| <!-- Also be sure to make clear, coherent commits: | ||||
|   https://zulip.readthedocs.io/en/latest/contributing/version-control.html | ||||
|   --> | ||||
|   | ||||
							
								
								
									
										41
									
								
								.github/workflows/cancel-previous-runs.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										41
									
								
								.github/workflows/cancel-previous-runs.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,41 @@ | ||||
| name: Cancel previous runs | ||||
| on: [push, pull_request] | ||||
|  | ||||
| defaults: | ||||
|   run: | ||||
|     shell: bash | ||||
|  | ||||
| jobs: | ||||
|   cancel: | ||||
|     name: Cancel previous runs | ||||
|     runs-on: ubuntu-latest | ||||
|     timeout-minutes: 3 | ||||
|  | ||||
|     # Don't run this job for zulip/zulip pushes since we | ||||
|     # want to run those jobs. | ||||
|     if: ${{ github.event_name != 'push' || github.event.repository.full_name != 'zulip/zulip' }} | ||||
|  | ||||
|     steps: | ||||
|       # We get workflow IDs from GitHub API so we don't have to maintain | ||||
|       # a hard-coded list of IDs which need to be updated when a workflow | ||||
|       # is added or removed. And, workflow IDs are different for other forks | ||||
|       # so this is required. | ||||
|       - name: Get workflow IDs. | ||||
|         id: workflow_ids | ||||
|         env: | ||||
|           # This is in <owner>/<repo> format e.g. zulip/zulip | ||||
|           REPOSITORY: ${{ github.repository }} | ||||
|         run: | | ||||
|           workflow_api_url=https://api.github.com/repos/$REPOSITORY/actions/workflows | ||||
|           curl $workflow_api_url -o workflows.json | ||||
|  | ||||
|           script="const {workflows} = require('./workflows'); \ | ||||
|                   const ids = workflows.map(workflow => workflow.id); \ | ||||
|                   console.log(ids.join(','));" | ||||
|           ids=$(node -e "$script") | ||||
|           echo "::set-output name=ids::$ids" | ||||
|  | ||||
|       - uses: styfle/cancel-workflow-action@0.4.1 | ||||
|         with: | ||||
|           workflow_id: ${{ steps.workflow_ids.outputs.ids }} | ||||
|           access_token: ${{ github.token }} | ||||
							
								
								
									
										36
									
								
								.github/workflows/codeql-analysis.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										36
									
								
								.github/workflows/codeql-analysis.yml
									
									
									
									
										vendored
									
									
								
							| @@ -1,40 +1,30 @@ | ||||
| name: "Code scanning" | ||||
|  | ||||
| on: | ||||
|   push: | ||||
|     branches: ["*.x", chat.zulip.org, main] | ||||
|     tags: ["*"] | ||||
|   pull_request: | ||||
|     branches: ["*.x", chat.zulip.org, main] | ||||
|   workflow_dispatch: | ||||
|  | ||||
| concurrency: | ||||
|   group: "${{ github.workflow }}-${{ github.head_ref || github.run_id }}" | ||||
|   cancel-in-progress: true | ||||
|  | ||||
| permissions: | ||||
|   contents: read | ||||
| on: [push, pull_request] | ||||
|  | ||||
| jobs: | ||||
|   CodeQL: | ||||
|     permissions: | ||||
|       actions: read # for github/codeql-action/init to get workflow details | ||||
|       contents: read # for actions/checkout to fetch code | ||||
|       security-events: write # for github/codeql-action/analyze to upload SARIF results | ||||
|     if: ${{!github.event.repository.private}} | ||||
|     runs-on: ubuntu-latest | ||||
|  | ||||
|     steps: | ||||
|       - name: Check out repository | ||||
|         uses: actions/checkout@v3 | ||||
|         uses: actions/checkout@v2 | ||||
|         with: | ||||
|           # We must fetch at least the immediate parents so that if this is | ||||
|           # a pull request then we can check out the head. | ||||
|           fetch-depth: 2 | ||||
|  | ||||
|       # If this run was triggered by a pull request event, then check out | ||||
|       # the head of the pull request instead of the merge commit. | ||||
|       - run: git checkout HEAD^2 | ||||
|         if: ${{ github.event_name == 'pull_request' }} | ||||
|  | ||||
|       # Initializes the CodeQL tools for scanning. | ||||
|       - name: Initialize CodeQL | ||||
|         uses: github/codeql-action/init@v2 | ||||
|         uses: github/codeql-action/init@v1 | ||||
|  | ||||
|         # Override language selection by uncommenting this and choosing your languages | ||||
|         # with: | ||||
|         #   languages: go, javascript, csharp, python, cpp, java | ||||
|  | ||||
|       - name: Perform CodeQL Analysis | ||||
|         uses: github/codeql-action/analyze@v2 | ||||
|         uses: github/codeql-action/analyze@v1 | ||||
|   | ||||
							
								
								
									
										24
									
								
								.github/workflows/legacy-os.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										24
									
								
								.github/workflows/legacy-os.yml
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,24 @@ | ||||
| name: Legacy OS | ||||
|  | ||||
| on: [push, pull_request] | ||||
|  | ||||
| jobs: | ||||
|   xenial: | ||||
|     name: Ubuntu 16.04 Xenial (Python 3.5, legacy) | ||||
|     runs-on: ubuntu-16.04 | ||||
|     steps: | ||||
|       - uses: actions/checkout@v2 | ||||
|       - name: Check tools/provision error message on xenial | ||||
|         run: | | ||||
|           { { ! tools/provision 2>&1 >&3; } | tee provision.err; } 3>&1 >&2 | ||||
|           grep -Fqx 'Error: ubuntu 16.04 is no longer a supported platform for Zulip.' provision.err | ||||
|       - name: Check scripts/lib/upgrade-zulip-stage-2 error message on xenial | ||||
|         run: | | ||||
|           { { ! sudo scripts/lib/upgrade-zulip-stage-2 2>&1 >&3; } | tee upgrade.err; } 3>&1 >&2 | ||||
|           grep -Fq 'upgrade-zulip-stage-2: Unsupported platform: ubuntu 16.04' upgrade.err | ||||
|  | ||||
|       - name: Report status | ||||
|         if: failure() | ||||
|         env: | ||||
|           ZULIP_BOT_KEY: ${{ secrets.ZULIP_BOT_KEY }} | ||||
|         run: tools/ci/send-failure-message | ||||
							
								
								
									
										312
									
								
								.github/workflows/production-suite.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										312
									
								
								.github/workflows/production-suite.yml
									
									
									
									
										vendored
									
									
								
							| @@ -2,51 +2,41 @@ name: Zulip production suite | ||||
|  | ||||
| on: | ||||
|   push: | ||||
|     branches: ["*.x", chat.zulip.org, main] | ||||
|     tags: ["*"] | ||||
|   pull_request: | ||||
|     paths: | ||||
|       - .github/workflows/production-suite.yml | ||||
|       - "**/migrations/**" | ||||
|       - manage.py | ||||
|       - pnpm-lock.yaml | ||||
|       - puppet/** | ||||
|       - requirements/** | ||||
|       - scripts/** | ||||
|       - static/** | ||||
|       - tools/** | ||||
|       - web/babel.config.js | ||||
|       - web/postcss.config.js | ||||
|       - web/third/** | ||||
|       - web/webpack.config.ts | ||||
|       - zerver/worker/queue_processors.py | ||||
|       - zerver/lib/push_notifications.py | ||||
|       - zerver/decorator.py | ||||
|       - zproject/** | ||||
|   workflow_dispatch: | ||||
|  | ||||
| concurrency: | ||||
|   group: "${{ github.workflow }}-${{ github.head_ref || github.run_id }}" | ||||
|   cancel-in-progress: true | ||||
|       - yarn.lock | ||||
|       - .github/workflows/production-suite.yml | ||||
|   pull_request: | ||||
|     paths: | ||||
|       - "**/migrations/**" | ||||
|       - puppet/** | ||||
|       - requirements/** | ||||
|       - scripts/** | ||||
|       - static/** | ||||
|       - tools/** | ||||
|       - zproject/** | ||||
|       - yarn.lock | ||||
|       - .github/workflows/production-suite.yml | ||||
|  | ||||
| defaults: | ||||
|   run: | ||||
|     shell: bash | ||||
|  | ||||
| permissions: | ||||
|   contents: read | ||||
|  | ||||
| jobs: | ||||
|   production_build: | ||||
|     # This job builds a release tarball from the current commit, which | ||||
|     # will be used for all of the following install/upgrade tests. | ||||
|     name: Ubuntu 20.04 production build | ||||
|     name: Bionic production build | ||||
|     runs-on: ubuntu-latest | ||||
|  | ||||
|     # Docker images are built from 'tools/ci/Dockerfile'; the comments at | ||||
|     # the top explain how to build and upload these images. | ||||
|     # Ubuntu 20.04 ships with Python 3.8.10. | ||||
|     container: zulip/ci:focal | ||||
|  | ||||
|     # This docker image was created by a generated Dockerfile at: | ||||
|     #   tools/ci/images/bionic/Dockerfile | ||||
|     # Bionic ships with Python 3.6. | ||||
|     container: zulip/ci:bionic | ||||
|     steps: | ||||
|       - name: Add required permissions | ||||
|         run: | | ||||
| @@ -64,96 +54,83 @@ jobs: | ||||
|           # cache action to work. It is owned by root currently. | ||||
|           sudo chmod -R 0777 /__w/_temp/ | ||||
|  | ||||
|       - uses: actions/checkout@v3 | ||||
|       - uses: actions/checkout@v2 | ||||
|  | ||||
|       - name: Create cache directories | ||||
|         run: | | ||||
|           dirs=(/srv/zulip-{venv,emoji}-cache) | ||||
|           dirs=(/srv/zulip-{npm,venv,emoji}-cache) | ||||
|           sudo mkdir -p "${dirs[@]}" | ||||
|           sudo chown -R github "${dirs[@]}" | ||||
|  | ||||
|       - name: Restore pnpm store | ||||
|         uses: actions/cache@v3 | ||||
|       - name: Restore node_modules cache | ||||
|         uses: actions/cache@v2 | ||||
|         with: | ||||
|           path: /__w/.pnpm-store | ||||
|           key: v1-pnpm-store-focal-${{ hashFiles('pnpm-lock.yaml') }} | ||||
|           path: /srv/zulip-npm-cache | ||||
|           key: v1-yarn-deps-${{ github.job }}-${{ hashFiles('package.json') }}-${{ hashFiles('yarn.lock') }} | ||||
|           restore-keys: v1-yarn-deps-${{ github.job }} | ||||
|  | ||||
|       - name: Restore python cache | ||||
|         uses: actions/cache@v3 | ||||
|         uses: actions/cache@v2 | ||||
|         with: | ||||
|           path: /srv/zulip-venv-cache | ||||
|           key: v1-venv-focal-${{ hashFiles('requirements/dev.txt') }} | ||||
|           restore-keys: v1-venv-focal | ||||
|           key: v1-venv-${{ github.job }}-${{ hashFiles('requirements/dev.txt') }} | ||||
|           restore-keys: v1-venv-${{ github.job }} | ||||
|  | ||||
|       - name: Restore emoji cache | ||||
|         uses: actions/cache@v3 | ||||
|         uses: actions/cache@v2 | ||||
|         with: | ||||
|           path: /srv/zulip-emoji-cache | ||||
|           key: v1-emoji-focal-${{ hashFiles('tools/setup/emoji/emoji_map.json') }}-${{ hashFiles('tools/setup/emoji/build_emoji') }}-${{ hashFiles('tools/setup/emoji/emoji_setup_utils.py') }}-${{ hashFiles('tools/setup/emoji/emoji_names.py') }}-${{ hashFiles('package.json') }} | ||||
|           restore-keys: v1-emoji-focal | ||||
|           key: v1-emoji-${{ github.job }}-${{ hashFiles('tools/setup/emoji/emoji_map.json') }}-${{ hashFiles('tools/setup/emoji/build_emoji') }}-${{ hashFiles('tools/setup/emoji/emoji_setup_utils.py') }}-${{ hashFiles('tools/setup/emoji/emoji_names.py') }}-${{ hashFiles('package.json') }} | ||||
|           restore-keys: v1-emoji-${{ github.job }} | ||||
|  | ||||
|       - name: Do Bionic hack | ||||
|         run: | | ||||
|           # Temporary hack till `sudo service redis-server start` gets fixes in Bionic. See | ||||
|           # https://chat.zulip.org/#narrow/stream/3-backend/topic/Ubuntu.20bionic.20CircleCI | ||||
|           sudo sed -i '/^bind/s/bind.*/bind 0.0.0.0/' /etc/redis/redis.conf | ||||
|  | ||||
|       - name: Build production tarball | ||||
|         run: ./tools/ci/production-build | ||||
|  | ||||
|       - name: Upload production build artifacts for install jobs | ||||
|         uses: actions/upload-artifact@v3 | ||||
|         uses: actions/upload-artifact@v2 | ||||
|         with: | ||||
|           name: production-tarball | ||||
|           path: /tmp/production-build | ||||
|           retention-days: 1 | ||||
|           retention-days: 14 | ||||
|  | ||||
|       - name: Verify pnpm store path | ||||
|         run: | | ||||
|           set -x | ||||
|           path="$(pnpm store path)" | ||||
|           [[ "$path" == /__w/.pnpm-store/* ]] | ||||
|  | ||||
|       - name: Generate failure report string | ||||
|         id: failure_report_string | ||||
|         if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }} | ||||
|         run: tools/ci/generate-failure-message >> $GITHUB_OUTPUT | ||||
|  | ||||
|       - name: Report status to CZO | ||||
|         if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }} | ||||
|         uses: zulip/github-actions-zulip/send-message@v1 | ||||
|         with: | ||||
|           api-key: ${{ secrets.ZULIP_BOT_KEY }} | ||||
|           email: "github-actions-bot@chat.zulip.org" | ||||
|           organization-url: "https://chat.zulip.org" | ||||
|           to: "automated testing" | ||||
|           topic: ${{ steps.failure_report_string.outputs.topic }} | ||||
|           type: "stream" | ||||
|           content: ${{ steps.failure_report_string.outputs.content }} | ||||
|       - name: Report status | ||||
|         if: failure() | ||||
|         env: | ||||
|           ZULIP_BOT_KEY: ${{ secrets.ZULIP_BOT_KEY }} | ||||
|         run: tools/ci/send-failure-message | ||||
|  | ||||
|   production_install: | ||||
|     # This job installs the server release tarball built above on a | ||||
|     # range of platforms, and does some basic health checks on the | ||||
|     # resulting installer Zulip server. | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         include: | ||||
|           # Docker images are built from 'tools/ci/Dockerfile'; the comments at | ||||
|           # the top explain how to build and upload these images. | ||||
|           - docker_image: zulip/ci:focal | ||||
|             name: Ubuntu 20.04 production install and PostgreSQL upgrade with pgroonga | ||||
|             os: focal | ||||
|             extra-args: "" | ||||
|           # Base images are built using `tools/ci/Dockerfile.template`. | ||||
|           # The comments at the top explain how to build and upload these images. | ||||
|           - docker_image: zulip/ci:bionic | ||||
|             name: Bionic production install | ||||
|             is_bionic: true | ||||
|             os: bionic | ||||
|  | ||||
|           - docker_image: zulip/ci:jammy | ||||
|             name: Ubuntu 22.04 production install | ||||
|             os: jammy | ||||
|             extra-args: "" | ||||
|           - docker_image: zulip/ci:focal | ||||
|             name: Focal production install | ||||
|             is_focal: true | ||||
|             os: focal | ||||
|  | ||||
|           - docker_image: zulip/ci:buster | ||||
|             name: Buster production install | ||||
|             is_buster: true | ||||
|             os: buster | ||||
|  | ||||
|           - docker_image: zulip/ci:bullseye | ||||
|             name: Debian 11 production install with custom db name and user | ||||
|             name: Bullseye production install | ||||
|             is_bullseye: true | ||||
|             os: bullseye | ||||
|             extra-args: --test-custom-db | ||||
|  | ||||
|           - docker_image: zulip/ci:bookworm | ||||
|             name: Debian 12 production install | ||||
|             os: bookworm | ||||
|             extra-args: "" | ||||
|  | ||||
|     name: ${{ matrix.name  }} | ||||
|     container: | ||||
| @@ -164,7 +141,7 @@ jobs: | ||||
|  | ||||
|     steps: | ||||
|       - name: Download built production tarball | ||||
|         uses: actions/download-artifact@v3 | ||||
|         uses: actions/download-artifact@v2 | ||||
|         with: | ||||
|           name: production-tarball | ||||
|           path: /tmp | ||||
| @@ -176,151 +153,58 @@ jobs: | ||||
|           # cache action to work. It is owned by root currently. | ||||
|           sudo chmod -R 0777 /__w/_temp/ | ||||
|  | ||||
|           # Create the zulip directory that the tools/ci/ scripts needs | ||||
|           mkdir -p /home/github/zulip | ||||
|  | ||||
|           # Since actions/download-artifact@v2 loses all the permissions | ||||
|           # of the tarball uploaded by the upload artifact fix those. | ||||
|           chmod +x /tmp/production-extract-tarball | ||||
|           chmod +x /tmp/production-upgrade-pg | ||||
|           chmod +x /tmp/production-pgroonga | ||||
|           chmod +x /tmp/production-install | ||||
|           chmod +x /tmp/production-verify | ||||
|           chmod +x /tmp/generate-failure-message | ||||
|           chmod +x /tmp/send-failure-message | ||||
|  | ||||
|       - name: Create cache directories | ||||
|         run: | | ||||
|           dirs=(/srv/zulip-{venv,emoji}-cache) | ||||
|           dirs=(/srv/zulip-{npm,venv,emoji}-cache) | ||||
|           sudo mkdir -p "${dirs[@]}" | ||||
|           sudo chown -R github "${dirs[@]}" | ||||
|  | ||||
|       - name: Restore node_modules cache | ||||
|         uses: actions/cache@v2 | ||||
|         with: | ||||
|           path: /srv/zulip-npm-cache | ||||
|           key: v1-yarn-deps-${{ matrix.os }}-${{ hashFiles('/tmp/package.json') }}-${{ hashFiles('/tmp/yarn.lock') }} | ||||
|           restore-keys: v1-yarn-deps-${{ matrix.os }} | ||||
|  | ||||
|       - name: Do Bionic hack | ||||
|         if: ${{ matrix.is_bionic }} | ||||
|         run: | | ||||
|           # Temporary hack till `sudo service redis-server start` gets fixes in Bionic. See | ||||
|           # https://chat.zulip.org/#narrow/stream/3-backend/topic/Ubuntu.20bionic.20CircleCI | ||||
|           sudo sed -i '/^bind/s/bind.*/bind 0.0.0.0/' /etc/redis/redis.conf | ||||
|  | ||||
|       - name: Production extract tarball | ||||
|         run: /tmp/production-extract-tarball | ||||
|  | ||||
|       - name: Install production | ||||
|         run: sudo /tmp/production-install ${{ matrix.extra-args }} | ||||
|         run: | | ||||
|           sudo service rabbitmq-server restart | ||||
|           sudo /tmp/production-install | ||||
|  | ||||
|       - name: Verify install | ||||
|         run: sudo /tmp/production-verify ${{ matrix.extra-args }} | ||||
|  | ||||
|       - name: Install pgroonga | ||||
|         if: ${{ matrix.os == 'focal' }} | ||||
|         run: sudo /tmp/production-pgroonga | ||||
|  | ||||
|       - name: Verify install after installing pgroonga | ||||
|         if: ${{ matrix.os == 'focal' }} | ||||
|         run: sudo /tmp/production-verify ${{ matrix.extra-args }} | ||||
|         run: sudo /tmp/production-verify | ||||
|  | ||||
|       - name: Upgrade postgresql | ||||
|         if: ${{ matrix.os == 'focal' }} | ||||
|         if: ${{ matrix.is_bionic }} | ||||
|         run: sudo /tmp/production-upgrade-pg | ||||
|  | ||||
|       - name: Verify install after upgrading postgresql | ||||
|         if: ${{ matrix.os == 'focal' }} | ||||
|         run: sudo /tmp/production-verify ${{ matrix.extra-args }} | ||||
|         if: ${{ matrix.is_bionic }} | ||||
|         run: sudo /tmp/production-verify | ||||
|  | ||||
|       - name: Generate failure report string | ||||
|         id: failure_report_string | ||||
|         if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }} | ||||
|         run: /tmp/generate-failure-message >> $GITHUB_OUTPUT | ||||
|  | ||||
|       - name: Report status to CZO | ||||
|         if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }} | ||||
|         uses: zulip/github-actions-zulip/send-message@v1 | ||||
|         with: | ||||
|           api-key: ${{ secrets.ZULIP_BOT_KEY }} | ||||
|           email: "github-actions-bot@chat.zulip.org" | ||||
|           organization-url: "https://chat.zulip.org" | ||||
|           to: "automated testing" | ||||
|           topic: ${{ steps.failure_report_string.outputs.topic }} | ||||
|           type: "stream" | ||||
|           content: ${{ steps.failure_report_string.outputs.content }} | ||||
|  | ||||
|   production_upgrade: | ||||
|     # The production upgrade job starts with a container with a | ||||
|     # previous Zulip release installed, and attempts to upgrade it to | ||||
|     # the release tarball built for the current commit being tested. | ||||
|     # | ||||
|     # This is intended to catch bugs that result in the upgrade | ||||
|     # process failing. | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         include: | ||||
|           # Docker images are built from 'tools/ci/Dockerfile.prod'; the comments at | ||||
|           # the top explain how to build and upload these images. | ||||
|           - docker_image: zulip/ci:focal-3.2 | ||||
|             name: 3.2 Version Upgrade | ||||
|             os: focal | ||||
|           - docker_image: zulip/ci:bullseye-4.2 | ||||
|             name: 4.2 Version Upgrade | ||||
|             os: bullseye | ||||
|           - docker_image: zulip/ci:bullseye-5.0 | ||||
|             name: 5.0 Version Upgrade | ||||
|             os: bullseye | ||||
|           - docker_image: zulip/ci:bullseye-6.0 | ||||
|             name: 6.0 Version Upgrade | ||||
|             os: bullseye | ||||
|  | ||||
|     name: ${{ matrix.name  }} | ||||
|     container: | ||||
|       image: ${{ matrix.docker_image }} | ||||
|       options: --init | ||||
|     runs-on: ubuntu-latest | ||||
|     needs: production_build | ||||
|  | ||||
|     steps: | ||||
|       - name: Download built production tarball | ||||
|         uses: actions/download-artifact@v3 | ||||
|         with: | ||||
|           name: production-tarball | ||||
|           path: /tmp | ||||
|  | ||||
|       - name: Add required permissions and setup | ||||
|         run: | | ||||
|           # This is the GitHub Actions specific cache directory the | ||||
|           # the current github user must be able to access for the | ||||
|           # cache action to work. It is owned by root currently. | ||||
|           sudo chmod -R 0777 /__w/_temp/ | ||||
|  | ||||
|           # Since actions/download-artifact@v2 loses all the permissions | ||||
|           # of the tarball uploaded by the upload artifact fix those. | ||||
|           chmod +x /tmp/production-upgrade | ||||
|           chmod +x /tmp/production-verify | ||||
|           chmod +x /tmp/generate-failure-message | ||||
|  | ||||
|       - name: Create cache directories | ||||
|         run: | | ||||
|           dirs=(/srv/zulip-{venv,emoji}-cache) | ||||
|           sudo mkdir -p "${dirs[@]}" | ||||
|           sudo chown -R github "${dirs[@]}" | ||||
|  | ||||
|       - name: Temporarily bootstrap PostgreSQL upgrades | ||||
|         # https://chat.zulip.org/#narrow/stream/43-automated-testing/topic/postgres.20client.20upgrade.20failures/near/1640444 | ||||
|         # On Debian, there is an ordering issue with post-install maintainer | ||||
|         # scripts when postgresql-client-common is upgraded at the same time as | ||||
|         # postgresql-client and postgresql-client-15.  Upgrade just | ||||
|         # postgresql-client-common first, so the main upgrade process can | ||||
|         # succeed.  This is a _temporary_ work-around to improve CI signal, as | ||||
|         # the failure does represent a real failure that production systems may | ||||
|         # encounter. | ||||
|         run: sudo apt-get update && sudo apt-get install -y --only-upgrade postgresql-client-common | ||||
|  | ||||
|       - name: Upgrade production | ||||
|         run: sudo /tmp/production-upgrade | ||||
|  | ||||
|         # TODO: We should be running production-verify here, but it | ||||
|         # doesn't pass yet. | ||||
|         # | ||||
|         # - name: Verify install | ||||
|         #   run: sudo /tmp/production-verify | ||||
|  | ||||
|       - name: Generate failure report string | ||||
|         id: failure_report_string | ||||
|         if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }} | ||||
|         run: /tmp/generate-failure-message >> $GITHUB_OUTPUT | ||||
|  | ||||
|       - name: Report status to CZO | ||||
|         if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }} | ||||
|         uses: zulip/github-actions-zulip/send-message@v1 | ||||
|         with: | ||||
|           api-key: ${{ secrets.ZULIP_BOT_KEY }} | ||||
|           email: "github-actions-bot@chat.zulip.org" | ||||
|           organization-url: "https://chat.zulip.org" | ||||
|           to: "automated testing" | ||||
|           topic: ${{ steps.failure_report_string.outputs.topic }} | ||||
|           type: "stream" | ||||
|           content: ${{ steps.failure_report_string.outputs.content }} | ||||
|       - name: Report status | ||||
|         if: failure() | ||||
|         env: | ||||
|           ZULIP_BOT_KEY: ${{ secrets.ZULIP_BOT_KEY }} | ||||
|         run: /tmp/send-failure-message | ||||
|   | ||||
							
								
								
									
										7
									
								
								.github/workflows/update-oneclick-apps.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										7
									
								
								.github/workflows/update-oneclick-apps.yml
									
									
									
									
										vendored
									
									
								
							| @@ -2,14 +2,11 @@ name: Update one click apps | ||||
| on: | ||||
|   release: | ||||
|     types: [published] | ||||
| permissions: | ||||
|   contents: read | ||||
|  | ||||
| jobs: | ||||
|   update-digitalocean-oneclick-app: | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - uses: actions/checkout@v3 | ||||
|       - uses: actions/checkout@v2 | ||||
|       - name: Update DigitalOcean one click app | ||||
|         env: | ||||
|           DIGITALOCEAN_API_KEY: ${{ secrets.ONE_CLICK_ACTION_DIGITALOCEAN_API_KEY }} | ||||
| @@ -22,6 +19,6 @@ jobs: | ||||
|         run: | | ||||
|           export PATH="$HOME/.local/bin:$PATH" | ||||
|           git clone https://github.com/zulip/marketplace-partners | ||||
|           pip3 install python-digitalocean zulip fab-classic PyNaCl | ||||
|           pip3 install python-digitalocean zulip fab-classic | ||||
|           echo $PATH | ||||
|           python3 tools/oneclickapps/prepare_digital_ocean_one_click_app_release.py | ||||
|   | ||||
							
								
								
									
										255
									
								
								.github/workflows/zulip-ci.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										255
									
								
								.github/workflows/zulip-ci.yml
									
									
									
									
										vendored
									
									
								
							| @@ -4,55 +4,43 @@ | ||||
|  | ||||
| name: Zulip CI | ||||
|  | ||||
| on: | ||||
|   push: | ||||
|     branches: ["*.x", chat.zulip.org, main] | ||||
|     tags: ["*"] | ||||
|   pull_request: | ||||
|   workflow_dispatch: | ||||
|  | ||||
| concurrency: | ||||
|   group: "${{ github.workflow }}-${{ github.head_ref || github.run_id }}" | ||||
|   cancel-in-progress: true | ||||
| on: [push, pull_request] | ||||
|  | ||||
| defaults: | ||||
|   run: | ||||
|     shell: bash | ||||
|  | ||||
| permissions: | ||||
|   contents: read | ||||
|  | ||||
| jobs: | ||||
|   tests: | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         include: | ||||
|           # Base images are built using `tools/ci/Dockerfile.prod.template`. | ||||
|           # The comments at the top explain how to build and upload these images. | ||||
|           # Ubuntu 20.04 ships with Python 3.8.10. | ||||
|           - docker_image: zulip/ci:focal | ||||
|             name: Ubuntu 20.04 (Python 3.8, backend + frontend) | ||||
|             os: focal | ||||
|             include_documentation_tests: false | ||||
|           # This docker image was created by a generated Dockerfile at: | ||||
|           #   tools/ci/images/bionic/Dockerfile | ||||
|           # Bionic ships with Python 3.6. | ||||
|           - docker_image: zulip/ci:bionic | ||||
|             name: Ubuntu 18.04 Bionic (Python 3.6, backend + frontend) | ||||
|             os: bionic | ||||
|             is_bionic: true | ||||
|             include_frontend_tests: true | ||||
|           # Debian 11 ships with Python 3.9.2. | ||||
|  | ||||
|           # This docker image was created by a generated Dockerfile at: | ||||
|           #   tools/ci/images/focal/Dockerfile | ||||
|           # Focal ships with Python 3.8.2. | ||||
|           - docker_image: zulip/ci:focal | ||||
|             name: Ubuntu 20.04 Focal (Python 3.8, backend) | ||||
|             os: focal | ||||
|             is_focal: true | ||||
|             include_frontend_tests: false | ||||
|  | ||||
|           # This docker image was created by a generated Dockerfile at: | ||||
|           #   tools/ci/images/focal/Dockerfile | ||||
|           # Bullseye ships with Python 3.9.2. | ||||
|           - docker_image: zulip/ci:bullseye | ||||
|             name: Debian 11 (Python 3.9, backend + documentation) | ||||
|             name: Debian 11 Bullseye (Python 3.9, backend) | ||||
|             os: bullseye | ||||
|             include_documentation_tests: true | ||||
|             include_frontend_tests: false | ||||
|           # Ubuntu 22.04 ships with Python 3.10.4. | ||||
|           - docker_image: zulip/ci:jammy | ||||
|             name: Ubuntu 22.04 (Python 3.10, backend) | ||||
|             os: jammy | ||||
|             include_documentation_tests: false | ||||
|             include_frontend_tests: false | ||||
|           # Debian 12 ships with Python 3.11.2. | ||||
|           - docker_image: zulip/ci:bookworm | ||||
|             name: Debian 12 (Python 3.11, backend) | ||||
|             os: bookworm | ||||
|             include_documentation_tests: false | ||||
|             is_bullseye: true | ||||
|             include_frontend_tests: false | ||||
|  | ||||
|     runs-on: ubuntu-latest | ||||
| @@ -60,7 +48,7 @@ jobs: | ||||
|     container: ${{ matrix.docker_image }} | ||||
|     env: | ||||
|       # GitHub Actions sets HOME to /github/home which causes | ||||
|       # problem later in provision and frontend test that runs | ||||
|       # problem later in provison and frontend test that runs | ||||
|       # tools/setup/postgresql-init-dev-db because of the .pgpass | ||||
|       # location. PostgreSQL (psql) expects .pgpass to be at | ||||
|       # /home/github/.pgpass and setting home to `/home/github/` | ||||
| @@ -68,57 +56,115 @@ jobs: | ||||
|       HOME: /home/github/ | ||||
|  | ||||
|     steps: | ||||
|       - uses: actions/checkout@v3 | ||||
|       - name: Add required permissions | ||||
|         run: | | ||||
|           # The checkout actions doesn't clone to ~/zulip or allow | ||||
|           # us to use the path option to clone outside the current | ||||
|           # /__w/zulip/zulip directory. Since this directory is owned | ||||
|           # by root we need to change it's ownership to allow the | ||||
|           # github user to clone the code here. | ||||
|           # Note: /__w/ is a docker volume mounted to $GITHUB_WORKSPACE | ||||
|           # which is /home/runner/work/. | ||||
|           sudo chown -R github . | ||||
|  | ||||
|           # This is the GitHub Actions specific cache directory the | ||||
|           # the current github user must be able to access for the | ||||
|           # cache action to work. It is owned by root currently. | ||||
|           sudo chmod -R 0777 /__w/_temp/ | ||||
|  | ||||
|       - uses: actions/checkout@v2 | ||||
|  | ||||
|       - name: Create cache directories | ||||
|         run: | | ||||
|           dirs=(/srv/zulip-{venv,emoji}-cache) | ||||
|           dirs=(/srv/zulip-{npm,venv,emoji}-cache) | ||||
|           sudo mkdir -p "${dirs[@]}" | ||||
|           sudo chown -R github "${dirs[@]}" | ||||
|  | ||||
|       - name: Restore pnpm store | ||||
|         uses: actions/cache@v3 | ||||
|       - name: Restore node_modules cache | ||||
|         uses: actions/cache@v2 | ||||
|         with: | ||||
|           path: /__w/.pnpm-store | ||||
|           key: v1-pnpm-store-${{ matrix.os }}-${{ hashFiles('pnpm-lock.yaml') }} | ||||
|           path: /srv/zulip-npm-cache | ||||
|           key: v1-yarn-deps-${{ matrix.os }}-${{ hashFiles('package.json') }}-${{ hashFiles('yarn.lock') }} | ||||
|           restore-keys: v1-yarn-deps-${{ matrix.os }} | ||||
|  | ||||
|       - name: Restore python cache | ||||
|         uses: actions/cache@v3 | ||||
|         uses: actions/cache@v2 | ||||
|         with: | ||||
|           path: /srv/zulip-venv-cache | ||||
|           key: v1-venv-${{ matrix.os }}-${{ hashFiles('requirements/dev.txt') }} | ||||
|           restore-keys: v1-venv-${{ matrix.os }} | ||||
|  | ||||
|       - name: Restore emoji cache | ||||
|         uses: actions/cache@v3 | ||||
|         uses: actions/cache@v2 | ||||
|         with: | ||||
|           path: /srv/zulip-emoji-cache | ||||
|           key: v1-emoji-${{ matrix.os }}-${{ hashFiles('tools/setup/emoji/emoji_map.json', 'tools/setup/emoji/build_emoji', 'tools/setup/emoji/emoji_setup_utils.py', 'tools/setup/emoji/emoji_names.py', 'package.json') }} | ||||
|           key: v1-emoji-${{ matrix.os }}-${{ hashFiles('tools/setup/emoji/emoji_map.json') }}-${{ hashFiles('tools/setup/emoji/build_emoji') }}-${{ hashFiles('tools/setup/emoji/emoji_setup_utils.py') }}-${{ hashFiles('tools/setup/emoji/emoji_names.py') }}-${{ hashFiles('package.json') }} | ||||
|           restore-keys: v1-emoji-${{ matrix.os }} | ||||
|  | ||||
|       - name: Do Bionic hack | ||||
|         if: ${{ matrix.is_bionic }} | ||||
|         run: | | ||||
|           # Temporary hack till `sudo service redis-server start` gets fixes in Bionic. See | ||||
|           # https://chat.zulip.org/#narrow/stream/3-backend/topic/Ubuntu.20bionic.20CircleCI | ||||
|           sudo sed -i '/^bind/s/bind.*/bind 0.0.0.0/' /etc/redis/redis.conf | ||||
|  | ||||
|       - name: Install dependencies | ||||
|         run: | | ||||
|           # This is the main setup job for the test suite | ||||
|           ./tools/ci/setup-backend --skip-dev-db-build | ||||
|           scripts/lib/clean_unused_caches.py --verbose --threshold=0 | ||||
|  | ||||
|           # Cleaning caches is mostly unnecessary in GitHub Actions, because | ||||
|           # most builds don't get to write to the cache. | ||||
|           # scripts/lib/clean-unused-caches --verbose --threshold 0 | ||||
|  | ||||
|       - name: Run tools test | ||||
|         run: | | ||||
|           source tools/ci/activate-venv | ||||
|           ./tools/test-tools | ||||
|  | ||||
|       - name: Run Codespell lint | ||||
|       - name: Run backend lint | ||||
|         run: | | ||||
|           source tools/ci/activate-venv | ||||
|           ./tools/run-codespell | ||||
|           echo "Test suite is running under $(python --version)." | ||||
|           ./tools/lint --groups=backend --skip=gitlint,mypy # gitlint disabled because flaky | ||||
|  | ||||
|       - name: Run frontend lint | ||||
|         if: ${{ matrix.include_frontend_tests }} | ||||
|         run: | | ||||
|           source tools/ci/activate-venv | ||||
|           ./tools/lint --groups=frontend --skip=gitlint # gitlint disabled because flaky | ||||
|  | ||||
|       - name: Run backend tests | ||||
|         run: | | ||||
|           source tools/ci/activate-venv | ||||
|           ./tools/test-backend --coverage --include-webhooks --no-cov-cleanup --ban-console-output | ||||
|  | ||||
|       - name: Run mypy | ||||
|         run: | | ||||
|           source tools/ci/activate-venv | ||||
|           # We run mypy after the backend tests so we get output from the | ||||
|           # backend tests, which tend to uncover more serious problems, first. | ||||
|           ./tools/run-mypy --version | ||||
|           ./tools/run-mypy | ||||
|  | ||||
|       - name: Run miscellaneous tests | ||||
|         run: | | ||||
|           source tools/ci/activate-venv | ||||
|  | ||||
|           # Currently our compiled requirements files will differ for different python versions | ||||
|           # so we will run test-locked-requirements only for Bionic. | ||||
|           # ./tools/test-locked-requirements | ||||
|           # ./tools/test-run-dev  # https://github.com/zulip/zulip/pull/14233 | ||||
|           # | ||||
|           # This test has been persistently flaky at like 1% frequency, is slow, | ||||
|           # and is for a very specific single feature, so we don't run it by default: | ||||
|           # ./tools/test-queue-worker-reload | ||||
|  | ||||
|           ./tools/test-migrations | ||||
|           ./tools/setup/optimize-svg --check | ||||
|           ./tools/setup/generate_integration_bots_avatars.py --check-missing | ||||
|  | ||||
|       # We run the tests that are only run in a specific job early, so | ||||
|       # that we get feedback to the developer about likely failures as | ||||
|       # quickly as possible. Backend/mypy failures that aren't | ||||
|       # identical across different versions are much more rare than | ||||
|       # frontend linter or node test failures. | ||||
|       - name: Run documentation and api tests | ||||
|         if: ${{ matrix.include_documentation_tests }} | ||||
|         run: | | ||||
|           source tools/ci/activate-venv | ||||
|           # In CI, we only test links we control in test-documentation to avoid flakes | ||||
| @@ -131,13 +177,7 @@ jobs: | ||||
|         run: | | ||||
|           source tools/ci/activate-venv | ||||
|           # Run the node tests first, since they're fast and deterministic | ||||
|           ./tools/test-js-with-node --coverage --parallel=1 | ||||
|  | ||||
|       - name: Run frontend lint | ||||
|         if: ${{ matrix.include_frontend_tests }} | ||||
|         run: | | ||||
|           source tools/ci/activate-venv | ||||
|           ./tools/lint --groups=frontend --skip=gitlint # gitlint disabled because flaky | ||||
|           ./tools/test-js-with-node --coverage | ||||
|  | ||||
|       - name: Check schemas | ||||
|         if: ${{ matrix.include_frontend_tests }} | ||||
| @@ -160,55 +200,6 @@ jobs: | ||||
|           source tools/ci/activate-venv | ||||
|           ./tools/test-js-with-puppeteer | ||||
|  | ||||
|       - name: Check pnpm dedupe | ||||
|         if: ${{ matrix.include_frontend_tests }} | ||||
|         run: pnpm dedupe --check | ||||
|  | ||||
|       - name: Run backend lint | ||||
|         run: | | ||||
|           source tools/ci/activate-venv | ||||
|           echo "Test suite is running under $(python --version)." | ||||
|           ./tools/lint --groups=backend --skip=gitlint,mypy # gitlint disabled because flaky | ||||
|  | ||||
|       - name: Run backend tests | ||||
|         run: | | ||||
|           source tools/ci/activate-venv | ||||
|           ./tools/test-backend --coverage --xml-report --no-html-report --include-webhooks --include-transaction-tests --no-cov-cleanup --ban-console-output | ||||
|  | ||||
|       - name: Run mypy | ||||
|         run: | | ||||
|           source tools/ci/activate-venv | ||||
|           # We run mypy after the backend tests so we get output from the | ||||
|           # backend tests, which tend to uncover more serious problems, first. | ||||
|           ./tools/run-mypy --version | ||||
|           ./tools/run-mypy | ||||
|  | ||||
|       - name: Run miscellaneous tests | ||||
|         run: | | ||||
|           source tools/ci/activate-venv | ||||
|  | ||||
|           # Currently our compiled requirements files will differ for different | ||||
|           # Python versions, so we will run test-locked-requirements only on the | ||||
|           # platform with the oldest one. | ||||
|           # ./tools/test-locked-requirements | ||||
|           # ./tools/test-run-dev  # https://github.com/zulip/zulip/pull/14233 | ||||
|           # | ||||
|           # This test has been persistently flaky at like 1% frequency, is slow, | ||||
|           # and is for a very specific single feature, so we don't run it by default: | ||||
|           # ./tools/test-queue-worker-reload | ||||
|  | ||||
|           ./tools/test-migrations | ||||
|           ./tools/setup/optimize-svg --check | ||||
|           ./tools/setup/generate_integration_bots_avatars.py --check-missing | ||||
|           ./tools/ci/check-executables | ||||
|  | ||||
|           # Ban check-database-compatibility from transitively | ||||
|           # relying on static/generated, because it might not be | ||||
|           # up-to-date at that point in upgrade-zulip-stage-2. | ||||
|           chmod 000 static/generated web/generated | ||||
|           ./scripts/lib/check-database-compatibility | ||||
|           chmod 755 static/generated web/generated | ||||
|  | ||||
|       - name: Check for untracked files | ||||
|         run: | | ||||
|           source tools/ci/activate-venv | ||||
| @@ -221,7 +212,7 @@ jobs: | ||||
|           fi | ||||
|  | ||||
|       - name: Test locked requirements | ||||
|         if: ${{ matrix.os == 'focal' }} | ||||
|         if: ${{ matrix.is_bionic }} | ||||
|         run: | | ||||
|           . /srv/zulip-py3-venv/bin/activate && \ | ||||
|           ./tools/test-locked-requirements | ||||
| @@ -229,43 +220,31 @@ jobs: | ||||
|       - name: Upload coverage reports | ||||
|  | ||||
|         # Only upload coverage when both frontend and backend | ||||
|         # tests are run. | ||||
|         # tests are ran. | ||||
|         if: ${{ matrix.include_frontend_tests }} | ||||
|         uses: codecov/codecov-action@v3 | ||||
|         with: | ||||
|           files: var/coverage.xml,var/node-coverage/lcov.info | ||||
|         run: | | ||||
|           # Codcov requires `.coverage` file to be stored in the | ||||
|           # current working directory. | ||||
|           mv ./var/.coverage ./.coverage | ||||
|           . /srv/zulip-py3-venv/bin/activate || true | ||||
|  | ||||
|           pip install codecov && codecov || echo "Error in uploading coverage reports to codecov.io." | ||||
|  | ||||
|       - name: Store Puppeteer artifacts | ||||
|         # Upload these on failure, as well | ||||
|         if: ${{ always() && matrix.include_frontend_tests }} | ||||
|         uses: actions/upload-artifact@v3 | ||||
|         uses: actions/upload-artifact@v2 | ||||
|         with: | ||||
|           name: puppeteer | ||||
|           path: ./var/puppeteer | ||||
|           retention-days: 60 | ||||
|  | ||||
|       - name: Check development database build | ||||
|         if: ${{ matrix.is_focal || matrix.is_bullseye }} | ||||
|         run: ./tools/ci/setup-backend | ||||
|  | ||||
|       - name: Verify pnpm store path | ||||
|         run: | | ||||
|           set -x | ||||
|           path="$(pnpm store path)" | ||||
|           [[ "$path" == /__w/.pnpm-store/* ]] | ||||
|  | ||||
|       - name: Generate failure report string | ||||
|         id: failure_report_string | ||||
|         if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }} | ||||
|         run: tools/ci/generate-failure-message >> $GITHUB_OUTPUT | ||||
|  | ||||
|       - name: Report status to CZO | ||||
|         if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }} | ||||
|         uses: zulip/github-actions-zulip/send-message@v1 | ||||
|         with: | ||||
|           api-key: ${{ secrets.ZULIP_BOT_KEY }} | ||||
|           email: "github-actions-bot@chat.zulip.org" | ||||
|           organization-url: "https://chat.zulip.org" | ||||
|           to: "automated testing" | ||||
|           topic: ${{ steps.failure_report_string.outputs.topic }} | ||||
|           type: "stream" | ||||
|           content: ${{ steps.failure_report_string.outputs.content }} | ||||
|       - name: Report status | ||||
|         if: failure() | ||||
|         env: | ||||
|           ZULIP_BOT_KEY: ${{ secrets.ZULIP_BOT_KEY }} | ||||
|         run: tools/ci/send-failure-message | ||||
|   | ||||
							
								
								
									
										18
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										18
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							| @@ -17,8 +17,6 @@ | ||||
| # See `git help ignore` for details on the format. | ||||
|  | ||||
| ## Config files for the dev environment | ||||
| /zproject/apns-dev.pem | ||||
| /zproject/apns-dev-key.p8 | ||||
| /zproject/dev-secrets.conf | ||||
| /tools/conf.ini | ||||
| /tools/custom_provision | ||||
| @@ -35,7 +33,9 @@ package-lock.json | ||||
| !/var/puppeteer/test_credentials.d.ts | ||||
|  | ||||
| /.dmypy.json | ||||
| /.ruff_cache | ||||
|  | ||||
| # Dockerfiles generated for continuous integration | ||||
| /tools/ci/images | ||||
|  | ||||
| # Generated i18n data | ||||
| /locale/en | ||||
| @@ -46,11 +46,11 @@ package-lock.json | ||||
| # Static build | ||||
| *.mo | ||||
| npm-debug.log | ||||
| /.pnpm-store | ||||
| /node_modules | ||||
| /prod-static | ||||
| /staticfiles.json | ||||
| /webpack-stats-production.json | ||||
| /yarn-error.log | ||||
| zulip-git-version | ||||
|  | ||||
| # Test / analysis tools | ||||
| @@ -73,21 +73,15 @@ zulip.kdev4 | ||||
| *.kate-swp | ||||
| *.sublime-project | ||||
| *.sublime-workspace | ||||
| .vscode/ | ||||
| *.DS_Store | ||||
| # VS Code. Avoid checking in .vscode in general, while still specifying | ||||
| # recommended extensions for working with this repository. | ||||
| /.vscode/**/* | ||||
| !/.vscode/extensions.json | ||||
| # .cache/ is generated by VS Code test runner | ||||
| # .cache/ is generated by Visual Studio Code's test runner | ||||
| .cache/ | ||||
| .eslintcache | ||||
|  | ||||
| # Core dump files | ||||
| core | ||||
|  | ||||
| # Static generated files for landing page. | ||||
| /static/images/landing-page/hello/generated | ||||
|  | ||||
| ## Miscellaneous | ||||
| # (Ideally this section is empty.) | ||||
| .transifexrc | ||||
|   | ||||
							
								
								
									
										4
									
								
								.gitlint
									
									
									
									
									
								
							
							
						
						
									
										4
									
								
								.gitlint
									
									
									
									
									
								
							| @@ -1,13 +1,13 @@ | ||||
| [general] | ||||
| ignore=title-trailing-punctuation, body-min-length, body-is-missing | ||||
|  | ||||
| extra-path=tools/lib/gitlint_rules.py | ||||
| extra-path=tools/lib/gitlint-rules.py | ||||
|  | ||||
| [title-match-regex] | ||||
| regex=^(.+:\ )?[A-Z].+\.$ | ||||
|  | ||||
| [title-max-length] | ||||
| line-length=72 | ||||
| line-length=76 | ||||
|  | ||||
| [body-max-line-length] | ||||
| line-length=76 | ||||
|   | ||||
							
								
								
									
										98
									
								
								.mailmap
									
									
									
									
									
								
							
							
						
						
									
										98
									
								
								.mailmap
									
									
									
									
									
								
							| @@ -1,132 +1,40 @@ | ||||
| # This file teaches `git log` and friends the canonical names | ||||
| # and email addresses to use for our contributors. | ||||
| # | ||||
| # For details on the format, see: | ||||
| #   https://git.github.io/htmldocs/gitmailmap.html | ||||
| # | ||||
| # Handy commands for examining or adding to this file: | ||||
| # | ||||
| #     # shows all names/emails after mapping, sorted: | ||||
| #   $ git shortlog -es | sort -k2 | ||||
| # | ||||
| #     # shows raw names/emails, filtered by mapped name: | ||||
| #   $ git log --format='%an %ae' --author=$NAME | uniq -c | ||||
|  | ||||
| acrefoot <acrefoot@zulip.com> <acrefoot@alum.mit.edu> | ||||
| acrefoot <acrefoot@zulip.com> <acrefoot@dropbox.com> | ||||
| acrefoot <acrefoot@zulip.com> <acrefoot@humbughq.com> | ||||
| Adam Benesh <Adam.Benesh@gmail.com> | ||||
| Adam Benesh <Adam.Benesh@gmail.com> <Adam-Daniel.Benesh@t-systems.com> | ||||
| Adarsh Tiwari <xoldyckk@gmail.com> | ||||
| Alex Vandiver <alexmv@zulip.com> <alex@chmrr.net> | ||||
| Alex Vandiver <alexmv@zulip.com> <github@chmrr.net> | ||||
| Allen Rabinovich <allenrabinovich@yahoo.com> <allenr@humbughq.com> | ||||
| Allen Rabinovich <allenrabinovich@yahoo.com> <allenr@zulip.com> | ||||
| Alya Abbott <alya@zulip.com> <2090066+alya@users.noreply.github.com> | ||||
| Alya Abbott <alya@zulip.com> <alyaabbott@elance-odesk.com> | ||||
| Aman Agrawal <amanagr@zulip.com> | ||||
| Aman Agrawal <amanagr@zulip.com> <f2016561@pilani.bits-pilani.ac.in> | ||||
| Anders Kaseorg <anders@zulip.com> <anders@zulipchat.com> | ||||
| Anders Kaseorg <anders@zulip.com> <andersk@mit.edu> | ||||
| aparna-bhatt <aparnabhatt2001@gmail.com> <86338542+aparna-bhatt@users.noreply.github.com> | ||||
| Aryan Shridhar <aryanshridhar7@gmail.com> | ||||
| Aryan Shridhar <aryanshridhar7@gmail.com> <53977614+aryanshridhar@users.noreply.github.com> | ||||
| Ashwat Kumar Singh <ashwat.kumarsingh.met20@itbhu.ac.in> | ||||
| Austin Riba <austin@zulip.com> <austin@m51.io> | ||||
| BIKI DAS <bikid475@gmail.com> | ||||
| Brijmohan Siyag <brijsiyag@gmail.com> | ||||
| Brock Whittaker <brock@zulipchat.com> <bjwhitta@asu.edu> | ||||
| Brock Whittaker <brock@zulipchat.com> <brock@zulipchat.org> | ||||
| Brock Whittaker <brock@zulipchat.com> <brockwhittaker@Brocks-MacBook.local> | ||||
| Brock Whittaker <brock@zulipchat.com> <brock@zulipchat.org> | ||||
| Chris Bobbe <cbobbe@zulip.com> <cbobbe@zulipchat.com> | ||||
| Chris Bobbe <cbobbe@zulip.com> <csbobbe@gmail.com> | ||||
| Danny Su <contact@dannysu.com> <opensource@emailengine.org> | ||||
| Dinesh <chdinesh1089@gmail.com> | ||||
| Dinesh <chdinesh1089@gmail.com> <chdinesh1089> | ||||
| Eeshan Garg <eeshan@zulip.com> <jerryguitarist@gmail.com> | ||||
| Eric Smith <erwsmith@gmail.com> <99841919+erwsmith@users.noreply.github.com> | ||||
| Evy Kassirer <evy@zulip.com> | ||||
| Evy Kassirer <evy@zulip.com> <evy.kassirer@gmail.com> | ||||
| Evy Kassirer <evy@zulip.com> <evykassirer@users.noreply.github.com> | ||||
| Ganesh Pawar <pawarg256@gmail.com> <58626718+ganpa3@users.noreply.github.com> | ||||
| Greg Price <greg@zulip.com> <gnprice@gmail.com> | ||||
| Greg Price <greg@zulip.com> <greg@zulipchat.com> | ||||
| Greg Price <greg@zulip.com> <price@mit.edu> | ||||
| Hardik Dharmani <Ddharmani99@gmail.com> <ddharmani99@gmail.com> | ||||
| Hemant Umre <hemantumre12@gmail.com> <87542880+HemantUmre12@users.noreply.github.com> | ||||
| Jai soni <jai_s@me.iitr.ac.in> | ||||
| Jai soni <jai_s@me.iitr.ac.in> <76561593+jai2201@users.noreply.github.com> | ||||
| Jeff Arnold <jbarnold@gmail.com> <jbarnold@humbughq.com> | ||||
| Jeff Arnold <jbarnold@gmail.com> <jbarnold@zulip.com> | ||||
| Jessica McKellar <jesstess@mit.edu> <jesstess@humbughq.com> | ||||
| Jessica McKellar <jesstess@mit.edu> <jesstess@zulip.com> | ||||
| Joseph Ho <josephho678@gmail.com> | ||||
| Joseph Ho <josephho678@gmail.com> <62449508+Joelute@users.noreply.github.com> | ||||
| Julia Bichler <julia.bichler@tum.de> <74348920+juliaBichler01@users.noreply.github.com> | ||||
| Karl Stolley <karl@zulip.com> <karl@stolley.dev> | ||||
| Kevin Mehall <km@kevinmehall.net> <kevin@humbughq.com> | ||||
| Kevin Mehall <km@kevinmehall.net> <kevin@zulip.com> | ||||
| Kevin Scott <kevin.scott.98@gmail.com> | ||||
| Lalit Kumar Singh <lalitkumarsingh3716@gmail.com> | ||||
| Lauryn Menard <lauryn@zulip.com> <63245456+laurynmm@users.noreply.github.com> | ||||
| Lauryn Menard <lauryn@zulip.com> <lauryn.menard@gmail.com> | ||||
| m-e-l-u-h-a-n <purushottam.tiwari.cd.cse19@itbhu.ac.in> | ||||
| m-e-l-u-h-a-n <purushottam.tiwari.cd.cse19@itbhu.ac.in> <pururshottam.tiwari.cd.cse19@itbhu.ac.in> | ||||
| Mateusz Mandera <mateusz.mandera@zulip.com> <mateusz.mandera@protonmail.com> | ||||
| Matt Keller <matt@zulip.com> | ||||
| Matt Keller <matt@zulip.com> <m@cognusion.com> | ||||
| Noble Mittal <noblemittal@outlook.com> <62551163+beingnoble03@users.noreply.github.com> | ||||
| nzai <nzaih18@gmail.com> <70953556+nzaih1999@users.noreply.github.com> | ||||
| Palash Baderia <palash.baderia@outlook.com> | ||||
| Palash Baderia <palash.baderia@outlook.com> <66828942+palashb01@users.noreply.github.com> | ||||
| Palash Raghuwanshi <singhpalash0@gmail.com> | ||||
| Parth <mittalparth22@gmail.com> | ||||
| Priyam Seth <sethpriyam1@gmail.com> <b19188@students.iitmandi.ac.in> | ||||
| Ray Kraesig <rkraesig@zulip.com> <rkraesig@zulipchat.com> | ||||
| Reid Barton <rwbarton@gmail.com> <rwbarton@humbughq.com> | ||||
| Rein Zustand (rht) <rhtbot@protonmail.com> | ||||
| Rishabh Maheshwari <b20063@students.iitmandi.ac.in> | ||||
| Rishi Gupta <rishig@zulipchat.com> <rishig+git@mit.edu> | ||||
| Rishi Gupta <rishig@zulipchat.com> <rishig@kandralabs.com> | ||||
| Rishi Gupta <rishig@zulipchat.com> <rishig@users.noreply.github.com> | ||||
| Rixant Rokaha <rixantrokaha@gmail.com> | ||||
| Rixant Rokaha <rixantrokaha@gmail.com> <rishantrokaha@gmail.com> | ||||
| Rixant Rokaha <rixantrokaha@gmail.com> <rrokaha@caldwell.edu> | ||||
| Sahil Batra <sahil@zulip.com> <35494118+sahil839@users.noreply.github.com> | ||||
| Sahil Batra <sahil@zulip.com> <sahilbatra839@gmail.com> | ||||
| Satyam Bansal <sbansal1999@gmail.com> | ||||
| Sayam Samal <samal.sayam@gmail.com> | ||||
| Reid Barton <rwbarton@gmail.com> <rwbarton@humbughq.com> | ||||
| Scott Feeney <scott@oceanbase.org> <scott@humbughq.com> | ||||
| Scott Feeney <scott@oceanbase.org> <scott@zulip.com> | ||||
| Shlok Patel <shlokcpatel2001@gmail.com> | ||||
| Somesh Ranjan <somesh.ranjan.met20@itbhu.ac.in> <77766761+somesh202@users.noreply.github.com> | ||||
| Steve Howell <showell@zulip.com> <showell30@yahoo.com> | ||||
| Steve Howell <showell@zulip.com> <showell@yahoo.com> | ||||
| Steve Howell <showell@zulip.com> <showell@zulipchat.com> | ||||
| Steve Howell <showell@zulip.com> <steve@humbughq.com> | ||||
| Steve Howell <showell@zulip.com> <steve@zulip.com> | ||||
| strifel <info@strifel.de> | ||||
| Tim Abbott <tabbott@zulip.com> | ||||
| Tim Abbott <tabbott@zulip.com> <tabbott@dropbox.com> | ||||
| Tim Abbott <tabbott@zulip.com> <tabbott@humbughq.com> | ||||
| Tim Abbott <tabbott@zulip.com> <tabbott@mit.edu> | ||||
| Tim Abbott <tabbott@zulip.com> <tabbott@zulipchat.com> | ||||
| Ujjawal Modi <umodi2003@gmail.com> <99073049+Ujjawal3@users.noreply.github.com> | ||||
| umkay <ukhan@zulipchat.com> <umaimah.k@gmail.com> | ||||
| umkay <ukhan@zulipchat.com> <umkay@users.noreply.github.com> | ||||
| Vishnu KS <vishnu@zulip.com> <hackerkid@vishnuks.com> | ||||
| Vishnu KS <vishnu@zulip.com> <yo@vishnuks.com> | ||||
| Waseem Daher <wdaher@zulip.com> <wdaher@dropbox.com> | ||||
| Waseem Daher <wdaher@zulip.com> <wdaher@humbughq.com> | ||||
| Yash RE <33805964+YashRE42@users.noreply.github.com> | ||||
| Yash RE <33805964+YashRE42@users.noreply.github.com> <YashRE42@github.com> | ||||
| Yogesh Sirsat <yogeshsirsat56@gmail.com> | ||||
| Yogesh Sirsat <yogeshsirsat56@gmail.com> <41695888+yogesh-sirsat@users.noreply.github.com> | ||||
| Zeeshan Equbal <equbalzeeshan@gmail.com> | ||||
| Zeeshan Equbal <equbalzeeshan@gmail.com> <54993043+zee-bit@users.noreply.github.com> | ||||
| Zev Benjamin <zev@zulip.com> <zev@dropbox.com> | ||||
| Zev Benjamin <zev@zulip.com> <zev@humbughq.com> | ||||
| Zev Benjamin <zev@zulip.com> <zev@mit.edu> | ||||
| Zixuan James Li <p359101898@gmail.com> | ||||
| Zixuan James Li <p359101898@gmail.com> <359101898@qq.com> | ||||
| Zixuan James Li <p359101898@gmail.com> <39874143+PIG208@users.noreply.github.com> | ||||
| Alya Abbott <alya@zulip.com> <alyaabbott@elance-odesk.com> | ||||
|   | ||||
| @@ -1,11 +1,6 @@ | ||||
| pnpm-lock.yaml | ||||
| /api_docs/**/*.md | ||||
| /corporate/tests/stripe_fixtures | ||||
| /help/**/*.md | ||||
| /locale | ||||
| /templates/**/*.md | ||||
| /static/third | ||||
| /tools/setup/emoji/emoji_map.json | ||||
| /web/third | ||||
| /zerver/tests/fixtures | ||||
| /zerver/webhooks/*/doc.md | ||||
| /zerver/webhooks/*/fixtures | ||||
|   | ||||
| @@ -1,15 +0,0 @@ | ||||
| # https://docs.readthedocs.io/en/stable/config-file/v2.html | ||||
| version: 2 | ||||
|  | ||||
| build: | ||||
|   os: ubuntu-22.04 | ||||
|   tools: | ||||
|     python: "3.10" | ||||
|  | ||||
| sphinx: | ||||
|   configuration: docs/conf.py | ||||
|   fail_on_warning: true | ||||
|  | ||||
| python: | ||||
|   install: | ||||
|     - requirements: requirements/docs.txt | ||||
							
								
								
									
										27
									
								
								.tx/config
									
									
									
									
									
								
							
							
						
						
									
										27
									
								
								.tx/config
									
									
									
									
									
								
							| @@ -1,39 +1,32 @@ | ||||
| # Migrated from transifex-client format with `tx migrate` | ||||
| # | ||||
| # See https://developers.transifex.com/docs/using-the-client which hints at | ||||
| # this format, but in general, the headings are in the format of: | ||||
| # | ||||
| # [o:<org>:p:<project>:r:<resource>] | ||||
|  | ||||
| [main] | ||||
| host = https://www.transifex.com | ||||
| lang_map = zh-Hans: zh_Hans, zh-Hant: zh_Hant | ||||
|  | ||||
| [o:zulip:p:zulip:r:djangopo] | ||||
| [zulip.djangopo] | ||||
| file_filter = locale/<lang>/LC_MESSAGES/django.po | ||||
| source_file = locale/en/LC_MESSAGES/django.po | ||||
| source_lang = en | ||||
| type = PO | ||||
|  | ||||
| [o:zulip:p:zulip:r:mobile] | ||||
| [zulip.translationsjson] | ||||
| file_filter = locale/<lang>/translations.json | ||||
| source_file = locale/en/translations.json | ||||
| source_lang = en | ||||
| type = KEYVALUEJSON | ||||
|  | ||||
| [zulip.mobile] | ||||
| file_filter = locale/<lang>/mobile.json | ||||
| source_file = locale/en/mobile.json | ||||
| source_lang = en | ||||
| type = KEYVALUEJSON | ||||
|  | ||||
| [o:zulip:p:zulip:r:translationsjson] | ||||
| file_filter = locale/<lang>/translations.json | ||||
| source_file = locale/en/translations.json | ||||
| source_lang = en | ||||
| type = KEYVALUEJSON | ||||
|  | ||||
| [o:zulip:p:zulip-test:r:djangopo] | ||||
| [zulip-test.djangopo] | ||||
| file_filter = locale/<lang>/LC_MESSAGES/django.po | ||||
| source_file = locale/en/LC_MESSAGES/django.po | ||||
| source_lang = en | ||||
| type = PO | ||||
|  | ||||
| [o:zulip:p:zulip-test:r:translationsjson] | ||||
| [zulip-test.translationsjson] | ||||
| file_filter = locale/<lang>/translations.json | ||||
| source_file = locale/en/translations.json | ||||
| source_lang = en | ||||
|   | ||||
							
								
								
									
										23
									
								
								.vscode/extensions.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										23
									
								
								.vscode/extensions.json
									
									
									
									
										vendored
									
									
								
							| @@ -1,23 +0,0 @@ | ||||
| { | ||||
|     // Recommended VS Code extensions for zulip/zulip. | ||||
|     // | ||||
|     // VS Code prompts a user to install the recommended extensions | ||||
|     // when a workspace is opened for the first time.  The user can | ||||
|     // also review the list with the 'Extensions: Show Recommended | ||||
|     // Extensions' command.  See | ||||
|     // https://code.visualstudio.com/docs/editor/extension-marketplace#_workspace-recommended-extensions | ||||
|     // for more information. | ||||
|     // | ||||
|     // Extension identifier format: ${publisher}.${name}. | ||||
|     // Example: vscode.csharp | ||||
|  | ||||
|     "recommendations": [ | ||||
|         "42crunch.vscode-openapi", | ||||
|         "dbaeumer.vscode-eslint", | ||||
|         "esbenp.prettier-vscode", | ||||
|         "ms-vscode-remote.vscode-remote-extensionpack" | ||||
|     ], | ||||
|  | ||||
|     // Extensions recommended by VS Code which are not recommended for users of zulip/zulip. | ||||
|     "unwantedRecommendations": [] | ||||
| } | ||||
| @@ -18,15 +18,15 @@ all of us and the technical communities in which we participate. | ||||
|  | ||||
| The following behaviors are expected and requested of all community members: | ||||
|  | ||||
| - Participate. In doing so, you contribute to the health and longevity of | ||||
| * Participate. In doing so, you contribute to the health and longevity of | ||||
|   the community. | ||||
| - Exercise consideration and respect in your speech and actions. | ||||
| - Attempt collaboration before conflict. Assume good faith. | ||||
| - Refrain from demeaning, discriminatory, or harassing behavior and speech. | ||||
| - Take action or alert community leaders if you notice a dangerous | ||||
| * Exercise consideration and respect in your speech and actions. | ||||
| * Attempt collaboration before conflict. Assume good faith. | ||||
| * Refrain from demeaning, discriminatory, or harassing behavior and speech. | ||||
| * Take action or alert community leaders if you notice a dangerous | ||||
|   situation, someone in distress, or violations of this code, even if they | ||||
|   seem inconsequential. | ||||
| - Community event venues may be shared with members of the public; be | ||||
| * Community event venues may be shared with members of the public; be | ||||
|   respectful to all patrons of these locations. | ||||
|  | ||||
| ## Unacceptable behavior | ||||
| @@ -34,24 +34,24 @@ The following behaviors are expected and requested of all community members: | ||||
| The following behaviors are considered harassment and are unacceptable | ||||
| within the Zulip community: | ||||
|  | ||||
| - Jokes or derogatory language that singles out members of any race, | ||||
| * Jokes or derogatory language that singles out members of any race, | ||||
|   ethnicity, culture, national origin, color, immigration status, social and | ||||
|   economic class, educational level, language proficiency, sex, sexual | ||||
|   orientation, gender identity and expression, age, size, family status, | ||||
|   political belief, religion, and mental and physical ability. | ||||
| - Violence, threats of violence, or violent language directed against | ||||
| * Violence, threats of violence, or violent language directed against | ||||
|   another person. | ||||
| - Disseminating or threatening to disseminate another person's personal | ||||
| * Disseminating or threatening to disseminate another person's personal | ||||
|   information. | ||||
| - Personal insults of any sort. | ||||
| - Posting or displaying sexually explicit or violent material. | ||||
| - Inappropriate photography or recording. | ||||
| - Deliberate intimidation, stalking, or following (online or in person). | ||||
| - Unwelcome sexual attention. This includes sexualized comments or jokes, | ||||
| * Personal insults of any sort. | ||||
| * Posting or displaying sexually explicit or violent material. | ||||
| * Inappropriate photography or recording. | ||||
| * Deliberate intimidation, stalking, or following (online or in person). | ||||
| * Unwelcome sexual attention. This includes sexualized comments or jokes, | ||||
|   inappropriate touching or groping, and unwelcomed sexual advances. | ||||
| - Sustained disruption of community events, including talks and | ||||
| * Sustained disruption of community events, including talks and | ||||
|   presentations. | ||||
| - Advocating for, or encouraging, any of the behaviors above. | ||||
| * Advocating for, or encouraging, any of the behaviors above. | ||||
|  | ||||
| ## Reporting and enforcement | ||||
|  | ||||
| @@ -102,72 +102,3 @@ This Code of Conduct is adapted from the | ||||
| under a | ||||
| [Creative Commons BY-SA](https://creativecommons.org/licenses/by-sa/4.0/) | ||||
| license. | ||||
|  | ||||
| ## Moderating the Zulip community | ||||
|  | ||||
| Anyone can help moderate the Zulip community by helping make sure that folks are | ||||
| aware of the [community guidelines](https://zulip.com/development-community/) | ||||
| and this Code of Conduct, and that we maintain a positive and respectful | ||||
| atmosphere. | ||||
|  | ||||
| Here are some guidelines for you how can help: | ||||
|  | ||||
| - Be friendly! Welcoming folks, thanking them for their feedback, ideas and effort, | ||||
|   and just trying to keep the atmosphere warm make the whole community function | ||||
|   more smoothly. New participants who feel accepted, listened to and respected | ||||
|   are likely to treat others the same way. | ||||
|  | ||||
| - Be familiar with the [community | ||||
|   guidelines](https://zulip.com/development-community/), and cite them liberally | ||||
|   when a user violates them. Be polite but firm. Some examples: | ||||
|  | ||||
|   - @user please note that there is no need to @-mention @\_**Tim Abbott** when | ||||
|     you ask a question. As noted in the [guidelines for this | ||||
|     community](https://zulip.com/development-community/): | ||||
|  | ||||
|     > Use @-mentions sparingly… there is generally no need to @-mention a | ||||
|     > core contributor unless you need their timely attention. | ||||
|  | ||||
|   - @user, please keep in mind the following [community | ||||
|     guideline](https://zulip.com/development-community/): | ||||
|  | ||||
|     > Don’t ask the same question in multiple places. Moderators read every | ||||
|     > public stream, and make sure every question gets a reply. | ||||
|  | ||||
|     I’ve gone ahead and moved the other copy of this message to this thread. | ||||
|  | ||||
|   - If asked a question in a direct message that is better discussed in a public | ||||
|     stream: | ||||
|     > Hi @user! Please start by reviewing | ||||
|     > https://zulip.com/development-community/#community-norms to learn how to | ||||
|     > get help in this community. | ||||
|  | ||||
| - Users sometimes think chat.zulip.org is a testing instance. When this happens, | ||||
|   kindly direct them to use the **#test here** stream. | ||||
|  | ||||
| - If you see a message that’s posted in the wrong place, go ahead and move it if | ||||
|   you have permissions to do so, even if you don’t plan to respond to it. | ||||
|   Leaving the “Send automated notice to new topic” option enabled helps make it | ||||
|   clear what happened to the person who sent the message. | ||||
|  | ||||
|   If you are responding to a message that's been moved, mention the user in your | ||||
|   reply, so that the mention serves as a notification of the new location for | ||||
|   their conversation. | ||||
|  | ||||
| - If a user is posting spam, please report it to an administrator. They will: | ||||
|  | ||||
|   - Change the user's name to `<name> (spammer)` and deactivate them. | ||||
|   - Delete any spam messages they posted in public streams. | ||||
|  | ||||
| - We care very much about maintaining a respectful tone in our community. If you | ||||
|   see someone being mean or rude, point out that their tone is inappropriate, | ||||
|   and ask them to communicate their perspective in a respectful way in the | ||||
|   future. If you don’t feel comfortable doing so yourself, feel free to ask a | ||||
|   member of Zulip's core team to take care of the situation. | ||||
|  | ||||
| - Try to assume the best intentions from others (given the range of | ||||
|   possibilities presented by their visible behavior), and stick with a friendly | ||||
|   and positive tone even when someone’s behavior is poor or disrespectful. | ||||
|   Everyone has bad days and stressful situations that can result in them | ||||
|   behaving not their best, and while we should be firm about our community | ||||
|   rules, we should also enforce them with kindness. | ||||
|   | ||||
							
								
								
									
										554
									
								
								CONTRIBUTING.md
									
									
									
									
									
								
							
							
						
						
									
										554
									
								
								CONTRIBUTING.md
									
									
									
									
									
								
							| @@ -1,36 +1,24 @@ | ||||
| # Contributing guide | ||||
| # Contributing to Zulip | ||||
|  | ||||
| Welcome to the Zulip community! | ||||
|  | ||||
| ## Zulip development community | ||||
| ## Community | ||||
|  | ||||
| The primary communication forum for the Zulip community is the Zulip | ||||
| server hosted at [chat.zulip.org](https://chat.zulip.org/): | ||||
| The | ||||
| [Zulip community server](https://zulip.readthedocs.io/en/latest/contributing/chat-zulip-org.html) | ||||
| is the primary communication forum for the Zulip community. It is a good | ||||
| place to start whether you have a question, are a new contributor, are a new | ||||
| user, or anything else. Make sure to read the | ||||
| [community norms](https://zulip.readthedocs.io/en/latest/contributing/chat-zulip-org.html#community-norms) | ||||
| before posting. The Zulip community is also governed by a | ||||
| [code of conduct](https://zulip.readthedocs.io/en/latest/code-of-conduct.html). | ||||
|  | ||||
| - **Users** and **administrators** of Zulip organizations stop by to | ||||
|   ask questions, offer feedback, and participate in product design | ||||
|   discussions. | ||||
| - **Contributors to the project**, including the **core Zulip | ||||
|   development team**, discuss ongoing and future projects, brainstorm | ||||
|   ideas, and generally help each other out. | ||||
|  | ||||
| Everyone is welcome to [sign up](https://chat.zulip.org/) and | ||||
| participate — we love hearing from our users! Public streams in the | ||||
| community receive thousands of messages a week. We recommend signing | ||||
| up using the special invite links for | ||||
| [users](https://chat.zulip.org/join/t5crtoe62bpcxyisiyglmtvb/), | ||||
| [self-hosters](https://chat.zulip.org/join/wnhv3jzm6afa4raenedanfno/) | ||||
| and | ||||
| [contributors](https://chat.zulip.org/join/npzwak7vpmaknrhxthna3c7p/) | ||||
| to get a curated list of initial stream subscriptions. | ||||
|  | ||||
| To learn how to get started participating in the community, including [community | ||||
| norms](https://zulip.com/development-community/#community-norms) and [where to | ||||
| post](https://zulip.com/development-community/#where-do-i-send-my-message), | ||||
| check out our [Zulip development community | ||||
| guide](https://zulip.com/development-community/). The Zulip community is | ||||
| governed by a [code of | ||||
| conduct](https://zulip.readthedocs.io/en/latest/code-of-conduct.html). | ||||
| You can subscribe to | ||||
| [zulip-devel-announce@googlegroups.com](https://groups.google.com/g/zulip-devel-announce) | ||||
| or our [Twitter](https://twitter.com/zulip) account for a very low | ||||
| traffic (<1 email/month) way to hear about things like mentorship | ||||
| opportunities with Google Summer of Code, in-person sprints at | ||||
| conferences, and other opportunities to contribute. | ||||
|  | ||||
| ## Ways to contribute | ||||
|  | ||||
| @@ -38,293 +26,198 @@ To make a code or documentation contribution, read our | ||||
| [step-by-step guide](#your-first-codebase-contribution) to getting | ||||
| started with the Zulip codebase. A small sample of the type of work that | ||||
| needs doing: | ||||
|  | ||||
| - Bug squashing and feature development on our Python/Django | ||||
| * Bug squashing and feature development on our Python/Django | ||||
|   [backend](https://github.com/zulip/zulip), web | ||||
|   [frontend](https://github.com/zulip/zulip), React Native | ||||
|   [mobile app](https://github.com/zulip/zulip-mobile), or Electron | ||||
|   [desktop app](https://github.com/zulip/zulip-desktop). | ||||
| - Building out our | ||||
| * Building out our | ||||
|   [Python API and bots](https://github.com/zulip/python-zulip-api) framework. | ||||
| - [Writing an integration](https://zulip.com/api/integrations-overview). | ||||
| - Improving our [user](https://zulip.com/help/) or | ||||
| * [Writing an integration](https://zulip.com/api/integrations-overview). | ||||
| * Improving our [user](https://zulip.com/help/) or | ||||
|   [developer](https://zulip.readthedocs.io/en/latest/) documentation. | ||||
| - [Reviewing code](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html) | ||||
| * [Reviewing code](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html) | ||||
|   and manually testing pull requests. | ||||
|  | ||||
| **Non-code contributions**: Some of the most valuable ways to contribute | ||||
| don't require touching the codebase at all. For example, you can: | ||||
| don't require touching the codebase at all. We list a few of them below: | ||||
|  | ||||
| - Report issues, including both [feature | ||||
|   requests](https://zulip.readthedocs.io/en/latest/contributing/suggesting-features.html) | ||||
|   and [bug | ||||
|   reports](https://zulip.readthedocs.io/en/latest/contributing/reporting-bugs.html). | ||||
| - [Give feedback](#user-feedback) if you are evaluating or using Zulip. | ||||
| - [Participate | ||||
|   thoughtfully](https://zulip.readthedocs.io/en/latest/contributing/design-discussions.html) | ||||
|   in design discussions. | ||||
| - [Sponsor Zulip](https://github.com/sponsors/zulip) through the GitHub sponsors program. | ||||
| - [Translate](https://zulip.readthedocs.io/en/latest/translating/translating.html) | ||||
|   Zulip into your language. | ||||
| - [Stay connected](#stay-connected) with Zulip, and [help others | ||||
|   find us](#help-others-find-zulip). | ||||
| * [Reporting issues](#reporting-issues), including both feature requests and | ||||
|   bug reports. | ||||
| * [Giving feedback](#user-feedback) if you are evaluating or using Zulip. | ||||
| * [Sponsor Zulip](https://github.com/sponsors/zulip) through the GitHub sponsors program. | ||||
| * [Translating](https://zulip.readthedocs.io/en/latest/translating/translating.html) | ||||
|   Zulip. | ||||
| * [Outreach](#zulip-outreach): Star us on GitHub, upvote us | ||||
|   on product comparison sites, or write for [the Zulip blog](https://blog.zulip.org/). | ||||
|  | ||||
| ## Your first codebase contribution | ||||
| ## Your first (codebase) contribution | ||||
|  | ||||
| This section has a step by step guide to starting as a Zulip codebase | ||||
| contributor. It's long, but don't worry about doing all the steps perfectly; | ||||
| no one gets it right the first time, and there are a lot of people available | ||||
| to help. | ||||
|  | ||||
| - First, make an account on the | ||||
|   [Zulip community server](https://zulip.com/development-community/), | ||||
|   paying special attention to the | ||||
|   [community norms](https://zulip.com/development-community/#community-norms). | ||||
|   If you'd like, introduce yourself in | ||||
| * First, make an account on the | ||||
|   [Zulip community server](https://zulip.readthedocs.io/en/latest/contributing/chat-zulip-org.html), | ||||
|   paying special attention to the community norms. If you'd like, introduce | ||||
|   yourself in | ||||
|   [#new members](https://chat.zulip.org/#narrow/stream/95-new-members), using | ||||
|   your name as the topic. Bonus: tell us about your first impressions of | ||||
|   Zulip, and anything that felt confusing/broken or interesting/helpful as you | ||||
|   started using the product. | ||||
| - Read [What makes a great Zulip contributor](#what-makes-a-great-zulip-contributor). | ||||
| - [Install the development environment](https://zulip.readthedocs.io/en/latest/development/overview.html), | ||||
|   Zulip, and anything that felt confusing/broken as you started using the | ||||
|   product. | ||||
| * Read [What makes a great Zulip contributor](#what-makes-a-great-zulip-contributor). | ||||
| * [Install the development environment](https://zulip.readthedocs.io/en/latest/development/overview.html), | ||||
|   getting help in | ||||
|   [#provision help](https://chat.zulip.org/#narrow/stream/21-provision-help) | ||||
|   [#development help](https://chat.zulip.org/#narrow/stream/49-development-help) | ||||
|   if you run into any troubles. | ||||
| - Familiarize yourself with [using the development environment](https://zulip.readthedocs.io/en/latest/development/using.html). | ||||
| - Go through the [new application feature | ||||
|   tutorial](https://zulip.readthedocs.io/en/latest/tutorials/new-feature-tutorial.html) to get familiar with | ||||
|   how the Zulip codebase is organized and how to find code in it. | ||||
| - Read the [Zulip guide to | ||||
|   Git](https://zulip.readthedocs.io/en/latest/git/index.html) if you | ||||
|   are unfamiliar with Git or Zulip's rebase-based Git workflow, | ||||
|   getting help in [#git | ||||
|   help](https://chat.zulip.org/#narrow/stream/44-git-help) if you run | ||||
|   into any troubles. Even Git experts should read the [Zulip-specific | ||||
|   Git tools | ||||
|   page](https://zulip.readthedocs.io/en/latest/git/zulip-tools.html). | ||||
| * Read the | ||||
|   [Zulip guide to Git](https://zulip.readthedocs.io/en/latest/git/index.html) | ||||
|   and do the Git tutorial (coming soon) if you are unfamiliar with | ||||
|   Git, getting help in | ||||
|   [#git help](https://chat.zulip.org/#narrow/stream/44-git-help) if | ||||
|   you run into any troubles.  Be sure to check out the | ||||
|   [extremely useful Zulip-specific tools page](https://zulip.readthedocs.io/en/latest/git/zulip-tools.html). | ||||
|  | ||||
| ### Where to look for an issue | ||||
| ### Picking an issue | ||||
|  | ||||
| Now you're ready to pick your first issue! Zulip has several repositories you | ||||
| can check out, depending on your interests. There are hundreds of open issues in | ||||
| the [main Zulip server and web app | ||||
| repository](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22) | ||||
| alone. | ||||
| Now, you're ready to pick your first issue! There are hundreds of open issues | ||||
| in the main codebase alone. This section will help you find an issue to work | ||||
| on. | ||||
|  | ||||
| You can look through issues tagged with the "help wanted" label, which is used | ||||
| to indicate the issues that are ready for contributions. Some repositories also | ||||
| use the "good first issue" label to tag issues that are especially approachable | ||||
| for new contributors. | ||||
|  | ||||
| - [Server and web app](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22) | ||||
| - [Mobile apps](https://github.com/zulip/zulip-mobile/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22) | ||||
| - [Desktop app](https://github.com/zulip/zulip-desktop/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22) | ||||
| - [Terminal app](https://github.com/zulip/zulip-terminal/issues?q=is%3Aopen+is%3Aissue+label%3A"help+wanted") | ||||
| - [Python API bindings and bots](https://github.com/zulip/python-zulip-api/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22) | ||||
|  | ||||
| ### Picking an issue to work on | ||||
|  | ||||
| There's a lot to learn while making your first pull request, so start small! | ||||
| Many first contributions have fewer than 10 lines of changes (not counting | ||||
| changes to tests). | ||||
|  | ||||
| We recommend the following process for finding an issue to work on: | ||||
|  | ||||
| 1. Read the description of an issue tagged with the "help wanted" label and make | ||||
|    sure you understand it. | ||||
| 2. If it seems promising, poke around the product | ||||
|    (on [chat.zulip.org](https://chat.zulip.org) or in the development | ||||
|    environment) until you know how the piece being | ||||
|    described fits into the bigger picture. If after some exploration the | ||||
|    description seems confusing or ambiguous, post a question on the GitHub | ||||
|    issue, as others may benefit from the clarification as well. | ||||
| 3. When you find an issue you like, try to get started working on it. See if you | ||||
|    can find the part of the code you'll need to modify (`git grep` is your | ||||
|    friend!) and get some idea of how you'll approach the problem. | ||||
| 4. If you feel lost, that's OK! Go through these steps again with another issue. | ||||
|    There's plenty to work on, and the exploration you do will help you learn | ||||
|    more about the project. | ||||
|  | ||||
| Note that you are _not_ claiming an issue while you are iterating through steps | ||||
| 1-4. _Before you claim an issue_, you should be confident that you will be able to | ||||
| tackle it effectively. | ||||
|  | ||||
| Additional tips for the [main server and web app | ||||
| repository](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22): | ||||
|  | ||||
| - We especially recommend browsing recently opened issues, as there are more | ||||
|   likely to be easy ones for you to find. | ||||
| - Take a look at issues with the ["good first issue" | ||||
|   label](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22), | ||||
|   as they are especially accessible to new contributors. However, you will | ||||
|   likely find issues without this label that are accessible as well. | ||||
| - All issues are partitioned into areas like | ||||
| * If you're interested in | ||||
|   [mobile](https://github.com/zulip/zulip-mobile/issues?q=is%3Aopen+is%3Aissue), | ||||
|   [desktop](https://github.com/zulip/zulip-desktop/issues?q=is%3Aopen+is%3Aissue), | ||||
|   or | ||||
|   [bots](https://github.com/zulip/python-zulip-api/issues?q=is%3Aopen+is%3Aissue) | ||||
|   development, check the respective links for open issues, or post in | ||||
|   [#mobile](https://chat.zulip.org/#narrow/stream/48-mobile), | ||||
|   [#desktop](https://chat.zulip.org/#narrow/stream/16-desktop), or | ||||
|   [#integration](https://chat.zulip.org/#narrow/stream/127-integrations). | ||||
| * For the main server and web repository, we recommend browsing | ||||
|   recently opened issues to look for issues you are confident you can | ||||
|   fix correctly in a way that clearly communicates why your changes | ||||
|   are the correct fix.  Our GitHub workflow bot, zulipbot, limits | ||||
|   users who have 0 commits merged to claiming a single issue labeled | ||||
|   with "good first issue" or "help wanted". | ||||
| * We also partition all of our issues in the main repo into areas like | ||||
|   admin, compose, emoji, hotkeys, i18n, onboarding, search, etc. Look | ||||
|   through our [list of labels](https://github.com/zulip/zulip/labels), and | ||||
|   click on some of the `area:` labels to see all the issues related to your | ||||
|   areas of interest. | ||||
| - Avoid issues with the "difficult" label unless you | ||||
|   understand why it is difficult and are highly confident you can resolve the | ||||
|   issue correctly and completely. | ||||
| * If the lists of issues are overwhelming, post in | ||||
|   [#new members](https://chat.zulip.org/#narrow/stream/95-new-members) with a | ||||
|   bit about your background and interests, and we'll help you out. The most | ||||
|   important thing to say is whether you're looking for a backend (Python), | ||||
|   frontend (JavaScript and TypeScript), mobile (React Native), desktop (Electron), | ||||
|   documentation (English) or visual design (JavaScript/TypeScript + CSS) issue, and a | ||||
|   bit about your programming experience and available time. | ||||
|  | ||||
| ### Claiming an issue | ||||
| We also welcome suggestions of features that you feel would be valuable or | ||||
| changes that you feel would make Zulip a better open source project. If you | ||||
| have a new feature you'd like to add, we recommend you start by posting in | ||||
| [#new members](https://chat.zulip.org/#narrow/stream/95-new-members) with the | ||||
| feature idea and the problem that you're hoping to solve. | ||||
|  | ||||
| #### In the main server/web app repository and Zulip Terminal repository | ||||
|  | ||||
| The Zulip server/web app repository | ||||
| ([`zulip/zulip`](https://github.com/zulip/zulip/)) and the Zulip Terminal | ||||
| repository ([`zulip/zulip-terminal`](https://github.com/zulip/zulip-terminal/)) | ||||
| are set up with a GitHub workflow bot called | ||||
| [Zulipbot](https://github.com/zulip/zulipbot), which manages issues and pull | ||||
| requests in order to create a better workflow for Zulip contributors. | ||||
|  | ||||
| To claim an issue in these repositories, simply post a comment that says | ||||
| `@zulipbot claim` to the issue thread. If the issue is tagged with a [help | ||||
| wanted](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22) | ||||
| label, Zulipbot will immediately assign the issue to you. | ||||
|  | ||||
| Note that new contributors can only claim one issue until their first pull request is | ||||
| merged. This is to encourage folks to finish ongoing work before starting | ||||
| something new. If you would like to pick up a new issue while waiting for review | ||||
| on an almost-ready pull request, you can post a comment to this effect on the | ||||
| issue you're interested in. | ||||
|  | ||||
| #### In other Zulip repositories | ||||
|  | ||||
| There is no bot for other Zulip repositories | ||||
| ([`zulip/zulip-mobile`](https://github.com/zulip/zulip-mobile/), etc.). If | ||||
| you are interested in claiming an issue in one of these repositories, simply | ||||
| post a comment on the issue thread saying that you'd like to work on it. There | ||||
| is no need to @-mention the issue creator in your comment. | ||||
|  | ||||
| Please follow the same guidelines as described above: find an issue labeled | ||||
| "help wanted", and only pick up one issue at a time to start with. | ||||
| Other notes: | ||||
| * For a first pull request, it's better to aim for a smaller contribution | ||||
|   than a bigger one. Many first contributions have fewer than 10 lines of | ||||
|   changes (not counting changes to tests). | ||||
| * The full list of issues explicitly looking for a contributor can be | ||||
|   found with the | ||||
|   [good first issue](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22) | ||||
|   and | ||||
|   [help wanted](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22) | ||||
|   labels.  Avoid issues with the "difficult" label unless you | ||||
|   understand why it is difficult and are confident you can resolve the | ||||
|   issue correctly and completely.  Issues without one of these labels | ||||
|   are fair game if Tim has written a clear technical design proposal | ||||
|   in the issue, or it is a bug that you can reproduce and you are | ||||
|   confident you can fix the issue correctly. | ||||
| * For most new contributors, there's a lot to learn while making your first | ||||
|   pull request. It's OK if it takes you a while; that's normal! You'll be | ||||
|   able to work a lot faster as you build experience. | ||||
|  | ||||
| ### Working on an issue | ||||
|  | ||||
| You're encouraged to ask questions on how to best implement or debug your | ||||
| changes -- the Zulip maintainers are excited to answer questions to help you | ||||
| stay unblocked and working efficiently. You can ask questions in the [Zulip | ||||
| development community](https://zulip.com/development-community/), or on the | ||||
| GitHub issue or pull request. | ||||
| To work on an issue, claim it by adding a comment with `@zulipbot claim` to | ||||
| the issue thread. [Zulipbot](https://github.com/zulip/zulipbot) is a GitHub | ||||
| workflow bot; it will assign you to the issue and label the issue as "in | ||||
| progress". Some additional notes: | ||||
|  | ||||
| To get early feedback on any UI changes, we encourage you to post screenshots of | ||||
| your work in the [#design | ||||
| stream](https://chat.zulip.org/#narrow/stream/101-design) in the [Zulip | ||||
| development community](https://zulip.com/development-community/) | ||||
| * You can only claim issues with the | ||||
|   [good first issue](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22) | ||||
|   or | ||||
|   [help wanted](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22) | ||||
|   labels. Zulipbot will give you an error if you try to claim an issue | ||||
|   without one of those labels. | ||||
| * You're encouraged to ask questions on how to best implement or debug your | ||||
|   changes -- the Zulip maintainers are excited to answer questions to help | ||||
|   you stay unblocked and working efficiently. You can ask questions on | ||||
|   chat.zulip.org, or on the GitHub issue or pull request. | ||||
| * We encourage early pull requests for work in progress. Prefix the title of | ||||
|   work in progress pull requests with `[WIP]`, and remove the prefix when | ||||
|   you think it might be mergeable and want it to be reviewed. | ||||
| * After updating a PR, add a comment to the GitHub thread mentioning that it | ||||
|   is ready for another review. GitHub only notifies maintainers of the | ||||
|   changes when you post a comment, so if you don't, your PR will likely be | ||||
|   neglected by accident! | ||||
|  | ||||
| For more advice, see [What makes a great Zulip | ||||
| contributor?](#what-makes-a-great-zulip-contributor) below. It's OK if your | ||||
| first issue takes you a while; that's normal! You'll be able to work a lot | ||||
| faster as you build experience. | ||||
| ### And beyond | ||||
|  | ||||
| ### Submitting a pull request | ||||
|  | ||||
| See the [pull request review | ||||
| process](https://zulip.readthedocs.io/en/latest/contributing/review-process.html) | ||||
| guide for detailed instructions on how to submit a pull request, and information | ||||
| on the stages of review your PR will go through. | ||||
|  | ||||
| ### Beyond the first issue | ||||
|  | ||||
| To find a second issue to work on, we recommend looking through issues with the same | ||||
| A great place to look for a second issue is to look for issues with the same | ||||
| `area:` label as the last issue you resolved. You'll be able to reuse the | ||||
| work you did learning how that part of the codebase works. Also, the path to | ||||
| becoming a core developer often involves taking ownership of one of these area | ||||
| labels. | ||||
|  | ||||
| ### Common questions | ||||
|  | ||||
| - **What if somebody is already working on the issue I want do claim?** There | ||||
|   are lots of issue to work on! If somebody else is actively working on the | ||||
|   issue, you can find a different one, or help with | ||||
|   reviewing their work. | ||||
| - **What if somebody else claims an issue while I'm figuring out whether or not to | ||||
|   work on it?** No worries! You can contribute by providing feedback on | ||||
|   their pull request. If you've made good progress in understanding part of the | ||||
|   codebase, you can also find another "help wanted" issue in the same area to | ||||
|   work on. | ||||
| - **What if there is already a pull request for the issue I want to work on?** | ||||
|   Start by reviewing the existing work. If you agree with the approach, you can | ||||
|   use the existing pull request (PR) as a starting point for your contribution. If | ||||
|   you think a different approach is needed, you can post a new PR, with a comment that clearly | ||||
|   explains _why_ you decided to start from scratch. | ||||
| - **What if I ask if someone is still working on an issue, and they don't | ||||
|   respond?** If you don't get a reply within 2-3 days, go ahead and post a comment | ||||
|   that you are working on the issue, and submit a pull request. If the original | ||||
|   assignee ends up submitting a pull request first, no worries! You can help by | ||||
|   providing feedback on their work, or submit your own PR if you think a | ||||
|   different approach is needed (as described above). | ||||
| - **Can I come up with my own feature idea and work on it?** We welcome | ||||
|   suggestions of features or other improvements that you feel would be valuable. If you | ||||
|   have a new feature you'd like to add, you can start a conversation [in our | ||||
|   development community](https://zulip.com/development-community/#where-do-i-send-my-message) | ||||
|   explaining the feature idea and the problem that you're hoping to solve. | ||||
| - **I'm waiting for the next round of review on my PR. Can I pick up | ||||
|   another issue in the meantime?** Someone's first Zulip PR often | ||||
|   requires quite a bit of iteration, so please [make sure your pull | ||||
|   request is reviewable][reviewable-pull-requests] and go through at | ||||
|   least one round of feedback from others before picking up a second | ||||
|   issue. After that, sure! If | ||||
|   [Zulipbot](https://github.com/zulip/zulipbot) does not allow you to | ||||
|   claim an issue, you can post a comment describing the status of your | ||||
|   other work on the issue you're interested in, and asking for the | ||||
|   issue to be assigned to you. Note that addressing feedback on | ||||
|   in-progress PRs should always take priority over starting a new PR. | ||||
| - **I think my PR is done, but it hasn't been merged yet. What's going on?** | ||||
|   1. **Double-check that you have addressed all the feedback**, including any comments | ||||
|      on [Git commit | ||||
|      discipline](https://zulip.readthedocs.io/en/latest/contributing/commit-discipline.html). | ||||
|   2. If all the feedback has been addressed, did you [leave a | ||||
|      comment](https://zulip.readthedocs.io/en/latest/contributing/review-process.html#how-to-help-move-the-review-process-forward) | ||||
|      explaining that you have done so and **requesting another review**? If not, | ||||
|      it may not be clear to project maintainers or reviewers that your PR is | ||||
|      ready for another look. | ||||
|   3. There may be a pause between initial rounds of review for your PR and final | ||||
|      review by project maintainers. This is normal, and we encourage you to **work | ||||
|      on other issues** while you wait. | ||||
|   4. If you think the PR is ready and haven't seen any updates for a couple | ||||
|      of weeks, it can be helpful to **leave another comment**. Summarize the | ||||
|      overall state of the review process and your work, and indicate that you | ||||
|      are waiting for a review. | ||||
|   5. Finally, **Zulip project maintainers are people too**! They may be busy | ||||
|      with other work, and sometimes they might even take a vacation. ;) It can | ||||
|      occasionally take a few weeks for a PR in the final stages of the review | ||||
|      process to be merged. | ||||
|  | ||||
| [reviewable-pull-requests]: https://zulip.readthedocs.io/en/latest/contributing/reviewable-prs.html | ||||
|  | ||||
| ## What makes a great Zulip contributor? | ||||
|  | ||||
| Zulip has a lot of experience working with new contributors. In our | ||||
| Zulip has a lot of experience working with new contributors.  In our | ||||
| experience, these are the best predictors of success: | ||||
|  | ||||
| - [Asking great questions][great-questions]. It's very hard to answer a general | ||||
|   question like, "How do I do this issue?" When asking for help, explain your | ||||
|   current understanding, including what you've done or tried so far and where | ||||
|   you got stuck. Post tracebacks or other error messages if appropriate. For | ||||
|   more advice, check out [our guide][great-questions]! | ||||
| - Learning and practicing | ||||
|   [Git commit discipline](https://zulip.readthedocs.io/en/latest/contributing/commit-discipline.html). | ||||
| - Submitting carefully tested code. See our [detailed guide on how to review | ||||
|   code](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html#how-to-review-code) | ||||
|   (yours or someone else's). | ||||
| - Posting | ||||
| * Posting good questions. This generally means explaining your current | ||||
|   understanding, saying what you've done or tried so far, and including | ||||
|   tracebacks or other error messages if appropriate. | ||||
| * Learning and practicing | ||||
|   [Git commit discipline](https://zulip.readthedocs.io/en/latest/contributing/version-control.html#commit-discipline). | ||||
| * Submitting carefully tested code. This generally means checking your work | ||||
|   through a combination of automated tests and manually clicking around the | ||||
|   UI trying to find bugs in your work. See | ||||
|   [things to look for](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html#things-to-look-for) | ||||
|   for additional ideas. | ||||
| * Posting | ||||
|   [screenshots or GIFs](https://zulip.readthedocs.io/en/latest/tutorials/screenshot-and-gif-software.html) | ||||
|   for frontend changes. | ||||
| - Working to [make your pull requests easy to | ||||
|   review](https://zulip.readthedocs.io/en/latest/contributing/reviewable-prs.html). | ||||
| - Clearly describing what you have implemented and why. For example, if your | ||||
|   implementation differs from the issue description in some way or is a partial | ||||
|   step towards the requirements described in the issue, be sure to call | ||||
|   out those differences. | ||||
| - Being responsive to feedback on pull requests. This means incorporating or | ||||
| * Being responsive to feedback on pull requests. This means incorporating or | ||||
|   responding to all suggested changes, and leaving a note if you won't be | ||||
|   able to address things within a few days. | ||||
| - Being helpful and friendly on the [Zulip community | ||||
|   server](https://zulip.com/development-community/). | ||||
| * Being helpful and friendly on chat.zulip.org. | ||||
|  | ||||
| [great-questions]: https://zulip.readthedocs.io/en/latest/contributing/asking-great-questions.html | ||||
| These are also the main criteria we use to select candidates for all | ||||
| of our outreach programs. | ||||
|  | ||||
| ## Reporting issues | ||||
|  | ||||
| If you find an easily reproducible bug and/or are experienced in reporting | ||||
| bugs, feel free to just open an issue on the relevant project on GitHub. | ||||
|  | ||||
| If you have a feature request or are not yet sure what the underlying bug | ||||
| is, the best place to post issues is | ||||
| [#issues](https://chat.zulip.org/#narrow/stream/9-issues) (or | ||||
| [#mobile](https://chat.zulip.org/#narrow/stream/48-mobile) or | ||||
| [#desktop](https://chat.zulip.org/#narrow/stream/16-desktop)) on the | ||||
| [Zulip community server](https://zulip.readthedocs.io/en/latest/contributing/chat-zulip-org.html). | ||||
| This allows us to interactively figure out what is going on, let you know if | ||||
| a similar issue has already been opened, and collect any other information | ||||
| we need. Choose a 2-4 word topic that describes the issue, explain the issue | ||||
| and how to reproduce it if known, your browser/OS if relevant, and a | ||||
| [screenshot or screenGIF](https://zulip.readthedocs.io/en/latest/tutorials/screenshot-and-gif-software.html) | ||||
| if appropriate. | ||||
|  | ||||
| **Reporting security issues**. Please do not report security issues | ||||
|   publicly, including on public streams on chat.zulip.org. You can | ||||
|   email security@zulip.com.  We create a CVE for every security | ||||
|   issue in our released software. | ||||
|  | ||||
| ## User feedback | ||||
|  | ||||
| @@ -334,67 +227,116 @@ hear about your experience with the product. If you're not sure what to | ||||
| write, here are some questions we're always very curious to know the answer | ||||
| to: | ||||
|  | ||||
| - Evaluation: What is the process by which your organization chose or will | ||||
| * Evaluation: What is the process by which your organization chose or will | ||||
|   choose a group chat product? | ||||
| - Pros and cons: What are the pros and cons of Zulip for your organization, | ||||
| * Pros and cons: What are the pros and cons of Zulip for your organization, | ||||
|   and the pros and cons of other products you are evaluating? | ||||
| - Features: What are the features that are most important for your | ||||
| * Features: What are the features that are most important for your | ||||
|   organization? In the best-case scenario, what would your chat solution do | ||||
|   for you? | ||||
| - Onboarding: If you remember it, what was your impression during your first | ||||
| * Onboarding: If you remember it, what was your impression during your first | ||||
|   few minutes of using Zulip? What did you notice, and how did you feel? Was | ||||
|   there anything that stood out to you as confusing, or broken, or great? | ||||
| - Organization: What does your organization do? How big is the organization? | ||||
| * Organization: What does your organization do? How big is the organization? | ||||
|   A link to your organization's website? | ||||
|  | ||||
| You can contact us in the [#feedback stream of the Zulip development | ||||
| community](https://chat.zulip.org/#narrow/stream/137-feedback) or | ||||
| by emailing [support@zulip.com](mailto:support@zulip.com). | ||||
|  | ||||
| ## Outreach programs | ||||
|  | ||||
| Zulip regularly participates in [Google Summer of Code | ||||
| (GSoC)](https://developers.google.com/open-source/gsoc/) and | ||||
| [Outreachy](https://www.outreachy.org/). We have been a GSoC mentoring | ||||
| organization since 2016, and we accept 15-20 GSoC participants each summer. In | ||||
| the past, we’ve also participated in [Google | ||||
| Code-In](https://developers.google.com/open-source/gci/), and hosted summer | ||||
| interns from Harvard, MIT, and Stanford. | ||||
| Zulip participates in [Google Summer of Code | ||||
| (GSoC)](https://developers.google.com/open-source/gsoc/) every year. | ||||
| In the past, we've also participated in | ||||
| [Outreachy](https://www.outreachy.org/), [Google | ||||
| Code-In](https://developers.google.com/open-source/gci/), and hosted | ||||
| summer interns from Harvard, MIT, and Stanford. | ||||
|  | ||||
| Check out our [outreach programs | ||||
| overview](https://zulip.readthedocs.io/en/latest/outreach/overview.html) to learn | ||||
| more about participating in an outreach program with Zulip. Most of our program | ||||
| participants end up sticking around the project long-term, and many have become | ||||
| core team members, maintaining important parts of the project. We hope you | ||||
| apply! | ||||
| While each third-party program has its own rules and requirements, the | ||||
| Zulip community's approaches all of these programs with these ideas in | ||||
| mind: | ||||
| * We try to make the application process as valuable for the applicant as | ||||
|   possible. Expect high-quality code reviews, a supportive community, and | ||||
|   publicly viewable patches you can link to from your resume, regardless of | ||||
|   whether you are selected. | ||||
| * To apply, you'll have to submit at least one pull request to a Zulip | ||||
|   repository.  Most students accepted to one of our programs have | ||||
|   several merged pull requests (including at least one larger PR) by | ||||
|   the time of the application deadline. | ||||
| * The main criteria we use is quality of your best contributions, and | ||||
|   the bullets listed at | ||||
|   [What makes a great Zulip contributor](#what-makes-a-great-zulip-contributor). | ||||
|   Because we focus on evaluating your best work, it doesn't hurt your | ||||
|   application to makes mistakes in your first few PRs as long as your | ||||
|   work improves. | ||||
|  | ||||
| ## Stay connected | ||||
| Most of our outreach program participants end up sticking around the | ||||
| project long-term, and many have become core team members, maintaining | ||||
| important parts of the project. We hope you apply! | ||||
|  | ||||
| Even if you are not logging into the development community on a regular basis, | ||||
| you can still stay connected with the project. | ||||
| ### Google Summer of Code | ||||
|  | ||||
| - Follow us [on Twitter](https://twitter.com/zulip). | ||||
| - Subscribe to [our blog](https://blog.zulip.org/). | ||||
| - Join or follow the project [on LinkedIn](https://www.linkedin.com/company/zulip-project/). | ||||
| The largest outreach program Zulip participates in is GSoC (14 | ||||
| students in 2017; 11 in 2018; 17 in 2019; 18 in 2020).  While we don't control how | ||||
| many slots Google allocates to Zulip, we hope to mentor a similar | ||||
| number of students in future summers. | ||||
|  | ||||
| ## Help others find Zulip | ||||
| If you're reading this well before the application deadline and want | ||||
| to make your application strong, we recommend getting involved in the | ||||
| community and fixing issues in Zulip now. Having good contributions | ||||
| and building a reputation for doing good work is the best way to have | ||||
| a strong application.  About half of Zulip's GSoC students for Summer | ||||
| 2017 had made significant contributions to the project by February | ||||
| 2017, and about half had not.  Our | ||||
| [GSoC project ideas page][gsoc-guide] has lots more details on how | ||||
| Zulip does GSoC, as well as project ideas (though the project idea | ||||
| list is maintained only during the GSoC application period, so if | ||||
| you're looking at some other time of year, the project list is likely | ||||
| out-of-date). | ||||
|  | ||||
| Here are some ways you can help others find Zulip: | ||||
| We also have in some past years run a Zulip Summer of Code (ZSoC) | ||||
| program for students who we didn't have enough slots to accept for | ||||
| GSoC but were able to find funding for.  Student expectations are the | ||||
| same as with GSoC, and it has no separate application process; your | ||||
| GSoC application is your ZSoC application.  If we'd like to select you | ||||
| for ZSoC, we'll contact you when the GSoC results are announced. | ||||
|  | ||||
| - Star us on GitHub. There are four main repositories: | ||||
| [gsoc-guide]: https://zulip.readthedocs.io/en/latest/contributing/gsoc-ideas.html | ||||
| [gsoc-faq]: https://developers.google.com/open-source/gsoc/faq | ||||
|  | ||||
| ## Zulip outreach | ||||
|  | ||||
| **Upvoting Zulip**. Upvotes and reviews make a big difference in the public | ||||
| perception of projects like Zulip. We've collected a few sites below | ||||
| where we know Zulip has been discussed. Doing everything in the following | ||||
| list typically takes about 15 minutes. | ||||
| * Star us on GitHub. There are four main repositories: | ||||
|   [server/web](https://github.com/zulip/zulip), | ||||
|   [mobile](https://github.com/zulip/zulip-mobile), | ||||
|   [desktop](https://github.com/zulip/zulip-desktop), and | ||||
|   [Python API](https://github.com/zulip/python-zulip-api). | ||||
| * [Follow us](https://twitter.com/zulip) on Twitter. | ||||
|  | ||||
| - "Like" and retweet [our tweets](https://twitter.com/zulip). | ||||
| For both of the following, you'll need to make an account on the site if you | ||||
| don't already have one. | ||||
|  | ||||
| - Upvote and post feedback on Zulip on comparison websites. A couple specific | ||||
|   ones to highlight: | ||||
| * [Like Zulip](https://alternativeto.net/software/zulip-chat-server/) on | ||||
|   AlternativeTo. We recommend upvoting a couple of other products you like | ||||
|   as well, both to give back to their community, and since single-upvote | ||||
|   accounts are generally given less weight. You can also | ||||
|   [upvote Zulip](https://alternativeto.net/software/slack/) on their page | ||||
|   for Slack. | ||||
| * [Add Zulip to your stack](https://stackshare.io/zulip) on StackShare, star | ||||
|   it, and upvote the reasons why people like Zulip that you find most | ||||
|   compelling. Again, we recommend adding a few other products that you like | ||||
|   as well. | ||||
|  | ||||
|   - [AlternativeTo](https://alternativeto.net/software/zulip-chat-server/). You can also | ||||
|     [upvote Zulip](https://alternativeto.net/software/slack/) on their page | ||||
|     for Slack. | ||||
|   - [Add Zulip to your stack](https://stackshare.io/zulip) on StackShare, star | ||||
|     it, and upvote the reasons why people like Zulip that you find most | ||||
|     compelling. | ||||
| We have a doc with more detailed instructions and a few other sites, if you | ||||
| have been using Zulip for a while and want to contribute more. | ||||
|  | ||||
| **Blog posts**. Writing a blog post about your experiences with Zulip, or | ||||
| about a technical aspect of Zulip can be a great way to spread the word | ||||
| about Zulip. | ||||
|  | ||||
| We also occasionally [publish](https://blog.zulip.org/) long-form | ||||
| articles related to Zulip. Our posts typically get tens of thousands | ||||
| of views, and we always have good ideas for blog posts that we can | ||||
| outline but don't have time to write. If you are an experienced writer | ||||
| or copyeditor, send us a portfolio; we'd love to talk! | ||||
|   | ||||
| @@ -1,25 +1,15 @@ | ||||
| # This is a multiarch Dockerfile.  See https://docs.docker.com/desktop/multi-arch/ | ||||
| # | ||||
| # To set up the first time: | ||||
| #     docker buildx create --name multiarch --use | ||||
| # | ||||
| # To build: | ||||
| #     docker buildx build --platform linux/amd64,linux/arm64 \ | ||||
| #       -f ./Dockerfile-postgresql -t zulip/zulip-postgresql:14 --push . | ||||
| # To build run `docker build -f Dockerfile-postgresql .` from the root of the | ||||
| # zulip repo. | ||||
|  | ||||
| # Currently the PostgreSQL images do not support automatic upgrading of | ||||
| # the on-disk data in volumes. So the base image cannot currently be upgraded | ||||
| # the on-disk data in volumes. So the base image can not currently be upgraded | ||||
| # without users needing a manual pgdump and restore. | ||||
|  | ||||
| # https://hub.docker.com/r/groonga/pgroonga/tags | ||||
| ARG PGROONGA_VERSION=latest | ||||
| ARG POSTGRESQL_VERSION=14 | ||||
| FROM groonga/pgroonga:$PGROONGA_VERSION-alpine-$POSTGRESQL_VERSION-slim | ||||
|  | ||||
| # Install hunspell, Zulip stop words, and run Zulip database | ||||
| # init. | ||||
| FROM groonga/pgroonga:latest-alpine-10-slim | ||||
| RUN apk add -U --no-cache hunspell-en | ||||
| RUN ln -sf /usr/share/hunspell/en_US.dic /usr/local/share/postgresql/tsearch_data/en_us.dict && ln -sf /usr/share/hunspell/en_US.aff /usr/local/share/postgresql/tsearch_data/en_us.affix | ||||
| RUN ln -sf /usr/share/hunspell/en_US.dic /usr/local/share/postgresql/tsearch_data/en_us.dict && ln -sf /usr/share/hunspell/en_US.aff /usr/local/share/postgresql/tsearch_data/en_us.affix  | ||||
| COPY puppet/zulip/files/postgresql/zulip_english.stop /usr/local/share/postgresql/tsearch_data/zulip_english.stop | ||||
| COPY scripts/setup/create-db.sql /docker-entrypoint-initdb.d/zulip-create-db.sql | ||||
| COPY scripts/setup/create-pgroonga.sql /docker-entrypoint-initdb.d/zulip-create-pgroonga.sql | ||||
|   | ||||
							
								
								
									
										113
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										113
									
								
								README.md
									
									
									
									
									
								
							| @@ -1,23 +1,16 @@ | ||||
| # Zulip overview | ||||
|  | ||||
| [Zulip](https://zulip.com) is an open-source team collaboration tool with unique | ||||
| [topic-based threading][why-zulip] that combines the best of email and chat to | ||||
| make remote work productive and delightful. Fortune 500 companies, [leading open | ||||
| source projects][rust-case-study], and thousands of other organizations use | ||||
| Zulip every day. Zulip is the only [modern team chat app][features] that is | ||||
| designed for both live and asynchronous conversations. | ||||
| Zulip is a powerful, open source group chat application that combines the | ||||
| immediacy of real-time chat with the productivity benefits of threaded | ||||
| conversations. Zulip is used by open source projects, Fortune 500 companies, | ||||
| large standards bodies, and others who need a real-time chat system that | ||||
| allows users to easily process hundreds or thousands of messages a day. With | ||||
| over 700 contributors merging over 500 commits a month, Zulip is also the | ||||
| largest and fastest growing open source group chat project. | ||||
|  | ||||
| Zulip is built by a distributed community of developers from all around the | ||||
| world, with 74+ people who have each contributed 100+ commits. With | ||||
| over 1000 contributors merging over 500 commits a month, Zulip is the | ||||
| largest and fastest growing open source team chat project. | ||||
|  | ||||
| Come find us on the [development community chat](https://zulip.com/development-community/)! | ||||
|  | ||||
| [](https://github.com/zulip/zulip/actions/workflows/zulip-ci.yml?query=branch%3Amain) | ||||
| [](https://codecov.io/gh/zulip/zulip) | ||||
| [](https://github.com/zulip/zulip/actions/workflows/zulip-ci.yml?query=branch%3Amaster) | ||||
| [](https://codecov.io/gh/zulip/zulip/branch/master) | ||||
| [][mypy-coverage] | ||||
| [](https://github.com/astral-sh/ruff) | ||||
| [](https://github.com/psf/black) | ||||
| [](https://github.com/prettier/prettier) | ||||
| [](https://github.com/zulip/zulip/releases/latest) | ||||
| @@ -27,57 +20,61 @@ Come find us on the [development community chat](https://zulip.com/development-c | ||||
| [](https://github.com/sponsors/zulip) | ||||
|  | ||||
| [mypy-coverage]: https://blog.zulip.org/2016/10/13/static-types-in-python-oh-mypy/ | ||||
| [why-zulip]: https://zulip.com/why-zulip/ | ||||
| [rust-case-study]: https://zulip.com/case-studies/rust/ | ||||
| [features]: https://zulip.com/features/ | ||||
|  | ||||
| ## Getting started | ||||
|  | ||||
| - **Contributing code**. Check out our [guide for new | ||||
|   contributors](https://zulip.readthedocs.io/en/latest/contributing/contributing.html) | ||||
|   to get started. We have invested in making Zulip’s code highly | ||||
|   readable, thoughtfully tested, and easy to modify. Beyond that, we | ||||
|   have written an extraordinary 150K words of documentation for Zulip | ||||
|   contributors. | ||||
| Click on the appropriate link below. If nothing seems to apply, | ||||
| join us on the | ||||
| [Zulip community server](https://zulip.readthedocs.io/en/latest/contributing/chat-zulip-org.html) | ||||
| and tell us what's up! | ||||
|  | ||||
| - **Contributing non-code**. [Report an | ||||
|   issue](https://zulip.readthedocs.io/en/latest/contributing/contributing.html#reporting-issues), | ||||
|   [translate](https://zulip.readthedocs.io/en/latest/translating/translating.html) | ||||
|   Zulip into your language, or [give us | ||||
|   feedback](https://zulip.readthedocs.io/en/latest/contributing/contributing.html#user-feedback). | ||||
|   We'd love to hear from you, whether you've been using Zulip for years, or are just | ||||
|   trying it out for the first time. | ||||
| You might be interested in: | ||||
|  | ||||
| - **Checking Zulip out**. The best way to see Zulip in action is to drop by the | ||||
|   [Zulip community server](https://zulip.com/development-community/). We also | ||||
|   recommend reading about Zulip's [unique | ||||
|   approach](https://zulip.com/why-zulip/) to organizing conversations. | ||||
| * **Contributing code**. Check out our | ||||
|   [guide for new contributors](https://zulip.readthedocs.io/en/latest/overview/contributing.html) | ||||
|   to get started.  Zulip prides itself on maintaining a clean and | ||||
|   well-tested codebase, and a stock of hundreds of | ||||
|   [beginner-friendly issues][beginner-friendly]. | ||||
|  | ||||
| - **Running a Zulip server**. Self-host Zulip directly on Ubuntu or Debian | ||||
|   Linux, in [Docker](https://github.com/zulip/docker-zulip), or with prebuilt | ||||
|   images for [Digital Ocean](https://marketplace.digitalocean.com/apps/zulip) and | ||||
|   [Render](https://render.com/docs/deploy-zulip). | ||||
|   Learn more about [self-hosting Zulip](https://zulip.com/self-hosting/). | ||||
| * **Contributing non-code**. | ||||
|   [Report an issue](https://zulip.readthedocs.io/en/latest/overview/contributing.html#reporting-issues), | ||||
|   [translate](https://zulip.readthedocs.io/en/latest/translating/translating.html) Zulip | ||||
|   into your language, | ||||
|   [write](https://zulip.readthedocs.io/en/latest/overview/contributing.html#zulip-outreach) | ||||
|   for the Zulip blog, or | ||||
|   [give us feedback](https://zulip.readthedocs.io/en/latest/overview/contributing.html#user-feedback). We | ||||
|   would love to hear from you, even if you're just trying the product out. | ||||
|  | ||||
| - **Using Zulip without setting up a server**. Learn about [Zulip | ||||
|   Cloud](https://zulip.com/plans/) hosting options. Zulip sponsors free [Zulip | ||||
|   Cloud Standard](https://zulip.com/plans/) for hundreds of worthy | ||||
|   organizations, including [fellow open-source | ||||
|   projects](https://zulip.com/for/open-source/). | ||||
| * **Supporting Zulip**. Advocate for your organization to use Zulip, become a [sponsor](https://github.com/sponsors/zulip), write a | ||||
|   review in the mobile app stores, or | ||||
|   [upvote Zulip](https://zulip.readthedocs.io/en/latest/overview/contributing.html#zulip-outreach) on | ||||
|   product comparison sites. | ||||
|  | ||||
| - **Participating in [outreach | ||||
|   programs](https://zulip.readthedocs.io/en/latest/contributing/contributing.html#outreach-programs)** | ||||
|   like [Google Summer of Code](https://developers.google.com/open-source/gsoc/) | ||||
|   and [Outreachy](https://www.outreachy.org/). | ||||
| * **Checking Zulip out**. The best way to see Zulip in action is to drop by | ||||
|   the | ||||
|   [Zulip community server](https://zulip.readthedocs.io/en/latest/contributing/chat-zulip-org.html). We | ||||
|   also recommend reading Zulip for | ||||
|   [open source](https://zulip.com/for/open-source/), Zulip for | ||||
|   [companies](https://zulip.com/for/companies/), or Zulip for | ||||
|   [working groups and part time communities](https://zulip.com/for/working-groups-and-communities/). | ||||
|  | ||||
| - **Supporting Zulip**. Advocate for your organization to use Zulip, become a | ||||
|   [sponsor](https://github.com/sponsors/zulip), write a review in the mobile app | ||||
|   stores, or [help others find | ||||
|   Zulip](https://zulip.readthedocs.io/en/latest/contributing/contributing.html#help-others-find-zulip). | ||||
| * **Running a Zulip server**. Use a preconfigured [DigitalOcean droplet](https://marketplace.digitalocean.com/apps/zulip), | ||||
|   [install Zulip](https://zulip.readthedocs.io/en/stable/production/install.html) | ||||
|   directly, or use Zulip's | ||||
|   experimental [Docker image](https://zulip.readthedocs.io/en/latest/production/deployment.html#zulip-in-docker). | ||||
|   Commercial support is available; see <https://zulip.com/plans> for details. | ||||
|  | ||||
| You may also be interested in reading our [blog](https://blog.zulip.org/), and | ||||
| following us on [Twitter](https://twitter.com/zulip) and | ||||
| [LinkedIn](https://www.linkedin.com/company/zulip-project/). | ||||
| * **Using Zulip without setting up a server**. <https://zulip.com> | ||||
|   offers free and commercial hosting, including providing our paid | ||||
|   plan for free to fellow open source projects. | ||||
|  | ||||
| * **Participating in [outreach | ||||
|   programs](https://zulip.readthedocs.io/en/latest/overview/contributing.html#outreach-programs)** | ||||
|   like Google Summer of Code. | ||||
|  | ||||
| You may also be interested in reading our [blog](https://blog.zulip.org/) or | ||||
| following us on [Twitter](https://twitter.com/zulip). | ||||
| Zulip is distributed under the | ||||
| [Apache 2.0](https://github.com/zulip/zulip/blob/main/LICENSE) license. | ||||
| [Apache 2.0](https://github.com/zulip/zulip/blob/master/LICENSE) license. | ||||
|  | ||||
| [beginner-friendly]: https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22 | ||||
|   | ||||
							
								
								
									
										19
									
								
								SECURITY.md
									
									
									
									
									
								
							
							
						
						
									
										19
									
								
								SECURITY.md
									
									
									
									
									
								
							| @@ -1,11 +1,14 @@ | ||||
| # Security policy | ||||
|  | ||||
| Security announcements are sent to zulip-announce@googlegroups.com, | ||||
| so you should subscribe if you are running Zulip in production. | ||||
|  | ||||
| ## Reporting a vulnerability | ||||
|  | ||||
| We love responsible reports of (potential) security issues in Zulip, | ||||
| whether in the latest release or our development branch. | ||||
|  | ||||
| Our security contact is security@zulip.com. Reporters should expect a | ||||
| Our security contact is security@zulip.com.  Reporters should expect a | ||||
| response within 24 hours. | ||||
|  | ||||
| Please include details on the issue and how you'd like to be credited | ||||
| @@ -14,13 +17,6 @@ in our release notes when we publish the fix. | ||||
| Our [security model][security-model] document may be a helpful | ||||
| resource. | ||||
|  | ||||
| ## Security announcements | ||||
|  | ||||
| We send security announcements to our [announcement mailing | ||||
| list](https://groups.google.com/g/zulip-announce). If you are running | ||||
| Zulip in production, you should subscribe, by clicking "Join group" at | ||||
| the top of that page. | ||||
|  | ||||
| ## Supported versions | ||||
|  | ||||
| Zulip provides security support for the latest major release, in the | ||||
| @@ -29,9 +25,8 @@ form of minor security/maintenance releases. | ||||
| We work hard to make [upgrades][upgrades] reliable, so that there's no | ||||
| reason to run older major releases. | ||||
|  | ||||
| See also our documentation on the [Zulip release | ||||
| lifecycle][release-lifecycle]. | ||||
| See also our documentation on the [Zulip release lifecycle][release-lifecycle] | ||||
|  | ||||
| [security-model]: https://zulip.readthedocs.io/en/latest/production/security-model.html | ||||
| [upgrades]: https://zulip.readthedocs.io/en/stable/production/upgrade.html#upgrading-to-a-release | ||||
| [release-lifecycle]: https://zulip.readthedocs.io/en/latest/overview/release-lifecycle.html | ||||
| [upgrades]: https://zulip.readthedocs.io/en/latest/production/upgrade-or-modify.html#upgrading-to-a-release | ||||
| [release-cycle]: https://zulip.readthedocs.io/en/latest/overview/release-lifecycle.html | ||||
|   | ||||
							
								
								
									
										112
									
								
								Vagrantfile
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										112
									
								
								Vagrantfile
									
									
									
									
										vendored
									
									
								
							| @@ -1,8 +1,48 @@ | ||||
| # -*- mode: ruby -*- | ||||
|  | ||||
| Vagrant.require_version ">= 2.2.6" | ||||
| VAGRANTFILE_API_VERSION = "2" | ||||
|  | ||||
| if Vagrant::VERSION == "1.8.7" | ||||
|   path = `command -v curl` | ||||
|   if path.include?("/opt/vagrant/embedded/bin/curl") | ||||
|     puts "In Vagrant 1.8.7, curl is broken. Please use Vagrant 2.0.2 " \ | ||||
|          "or run 'sudo rm -f /opt/vagrant/embedded/bin/curl' to fix the " \ | ||||
|          "issue before provisioning. See " \ | ||||
|          "https://github.com/mitchellh/vagrant/issues/7997 " \ | ||||
|          "for reference." | ||||
|     exit | ||||
|   end | ||||
| end | ||||
|  | ||||
| # Workaround: Vagrant removed the atlas.hashicorp.com to | ||||
| # vagrantcloud.com redirect in February 2018. The value of | ||||
| # DEFAULT_SERVER_URL in Vagrant versions less than 1.9.3 is | ||||
| # atlas.hashicorp.com, which means that removal broke the fetching and | ||||
| # updating of boxes (since the old URL doesn't work).  See | ||||
| # https://github.com/hashicorp/vagrant/issues/9442 | ||||
| if Vagrant::DEFAULT_SERVER_URL == "atlas.hashicorp.com" | ||||
|   Vagrant::DEFAULT_SERVER_URL.replace("https://vagrantcloud.com") | ||||
| end | ||||
|  | ||||
| # Monkey patch https://github.com/hashicorp/vagrant/pull/10879 so we | ||||
| # can fall back to another provider if docker is not installed. | ||||
| begin | ||||
|   require Vagrant.source_root.join("plugins", "providers", "docker", "provider") | ||||
| rescue LoadError | ||||
| else | ||||
|   VagrantPlugins::DockerProvider::Provider.class_eval do | ||||
|     method(:usable?).owner == singleton_class or def self.usable?(raise_error = false) | ||||
|       VagrantPlugins::DockerProvider::Driver.new.execute("docker", "version") | ||||
|       true | ||||
|     rescue Vagrant::Errors::CommandUnavailable, VagrantPlugins::DockerProvider::Errors::ExecuteError | ||||
|       raise if raise_error | ||||
|       return false | ||||
|     end | ||||
|   end | ||||
| end | ||||
|  | ||||
| Vagrant.configure(VAGRANTFILE_API_VERSION) do |config| | ||||
|  | ||||
| Vagrant.configure("2") do |config| | ||||
|   # The Zulip development environment runs on 9991 on the guest. | ||||
|   host_port = 9991 | ||||
|   http_proxy = https_proxy = no_proxy = nil | ||||
| @@ -15,10 +55,8 @@ Vagrant.configure("2") do |config| | ||||
|   ubuntu_mirror = "" | ||||
|   vboxadd_version = nil | ||||
|  | ||||
|   config.vm.box = "bento/ubuntu-20.04" | ||||
|  | ||||
|   config.vm.synced_folder ".", "/vagrant", disabled: true | ||||
|   config.vm.synced_folder ".", "/srv/zulip", docker_consistency: "z" | ||||
|   config.vm.synced_folder ".", "/srv/zulip" | ||||
|  | ||||
|   vagrant_config_file = ENV["HOME"] + "/.zulip-vagrant-config" | ||||
|   if File.file?(vagrant_config_file) | ||||
| @@ -63,7 +101,6 @@ Vagrant.configure("2") do |config| | ||||
|   config.vm.network "forwarded_port", guest: 9994, host: host_port + 3, host_ip: host_ip_addr | ||||
|   # Specify Docker provider before VirtualBox provider so it's preferred. | ||||
|   config.vm.provider "docker" do |d, override| | ||||
|     override.vm.box = nil | ||||
|     d.build_dir = File.join(__dir__, "tools", "setup", "dev-vagrant-docker") | ||||
|     d.build_args = ["--build-arg", "VAGRANT_UID=#{Process.uid}"] | ||||
|     if !ubuntu_mirror.empty? | ||||
| @@ -74,6 +111,7 @@ Vagrant.configure("2") do |config| | ||||
|   end | ||||
|  | ||||
|   config.vm.provider "virtualbox" do |vb, override| | ||||
|     override.vm.box = "hashicorp/bionic64" | ||||
|     # It's possible we can get away with just 1.5GB; more testing needed | ||||
|     vb.memory = vm_memory | ||||
|     vb.cpus = vm_num_cpus | ||||
| @@ -90,19 +128,77 @@ Vagrant.configure("2") do |config| | ||||
|   end | ||||
|  | ||||
|   config.vm.provider "hyperv" do |h, override| | ||||
|     override.vm.box = "bento/ubuntu-18.04" | ||||
|     h.memory = vm_memory | ||||
|     h.maxmemory = vm_memory | ||||
|     h.cpus = vm_num_cpus | ||||
|   end | ||||
|  | ||||
|   config.vm.provider "parallels" do |prl, override| | ||||
|     override.vm.box = "bento/ubuntu-18.04" | ||||
|     override.vm.box_version = "202005.21.0" | ||||
|     prl.memory = vm_memory | ||||
|     prl.cpus = vm_num_cpus | ||||
|   end | ||||
|  | ||||
|   $provision_script = <<SCRIPT | ||||
| set -x | ||||
| set -e | ||||
| set -o pipefail | ||||
|  | ||||
| # Code should go here, rather than tools/provision, only if it is | ||||
| # something that we don't want to happen when running provision in a | ||||
| # development environment not using Vagrant. | ||||
|  | ||||
| # Set the Ubuntu mirror | ||||
| [ ! '#{ubuntu_mirror}' ] || sudo sed -i 's|http://\\(\\w*\\.\\)*archive\\.ubuntu\\.com/ubuntu/\\? |#{ubuntu_mirror} |' /etc/apt/sources.list | ||||
|  | ||||
| # Set the MOTD on the system to have Zulip instructions | ||||
| sudo ln -nsf /srv/zulip/tools/setup/dev-motd /etc/update-motd.d/99-zulip-dev | ||||
| sudo rm -f /etc/update-motd.d/10-help-text | ||||
| sudo dpkg --purge landscape-client landscape-common ubuntu-release-upgrader-core update-manager-core update-notifier-common ubuntu-server | ||||
| sudo dpkg-divert --add --rename /etc/default/motd-news | ||||
| sudo sh -c 'echo ENABLED=0 > /etc/default/motd-news' | ||||
|  | ||||
| # Set default locale, this prevents errors if the user has another locale set. | ||||
| if ! grep -q 'LC_ALL=C.UTF-8' /etc/default/locale; then | ||||
|     echo "LC_ALL=C.UTF-8" | sudo tee -a /etc/default/locale | ||||
| fi | ||||
|  | ||||
| # Set an environment variable, so that we won't print the virtualenv | ||||
| # shell warning (it'll be wrong, since the shell is dying anyway) | ||||
| export SKIP_VENV_SHELL_WARNING=1 | ||||
|  | ||||
| # End `set -x`, so that the end of provision doesn't look like an error | ||||
| # message after a successful run. | ||||
| set +x | ||||
|  | ||||
| # Check if the zulip directory is writable | ||||
| if [ ! -w /srv/zulip ]; then | ||||
|     echo "The vagrant user is unable to write to the zulip directory." | ||||
|     echo "To fix this, run the following commands on the host machine:" | ||||
|     # sudo is required since our uid is not 1000 | ||||
|     echo '    vagrant halt -f' | ||||
|     echo '    rm -rf /PATH/TO/ZULIP/CLONE/.vagrant' | ||||
|     echo '    sudo chown -R 1000:$(id -g) /PATH/TO/ZULIP/CLONE' | ||||
|     echo "Replace /PATH/TO/ZULIP/CLONE with the path to where zulip code is cloned." | ||||
|     echo "You can resume setting up your vagrant environment by running:" | ||||
|     echo "    vagrant up" | ||||
|     exit 1 | ||||
| fi | ||||
| # Provision the development environment | ||||
| ln -nsf /srv/zulip ~/zulip | ||||
| /srv/zulip/tools/provision | ||||
|  | ||||
| # Run any custom provision hooks the user has configured | ||||
| if [ -f /srv/zulip/tools/custom_provision ]; then | ||||
|     chmod +x /srv/zulip/tools/custom_provision | ||||
|     /srv/zulip/tools/custom_provision | ||||
| fi | ||||
| SCRIPT | ||||
|  | ||||
|   config.vm.provision "shell", | ||||
|     # We want provision to be run with the permissions of the vagrant user. | ||||
|     privileged: false, | ||||
|     path: "tools/setup/vagrant-provision", | ||||
|     env: { "UBUNTU_MIRROR" => ubuntu_mirror } | ||||
|     inline: $provision_script | ||||
| end | ||||
|   | ||||
| @@ -5,10 +5,9 @@ from datetime import datetime, timedelta | ||||
| from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union | ||||
|  | ||||
| from django.conf import settings | ||||
| from django.db import connection, models | ||||
| from django.db import connection | ||||
| from django.db.models import F | ||||
| from psycopg2.sql import SQL, Composable, Identifier, Literal | ||||
| from typing_extensions import TypeAlias, override | ||||
|  | ||||
| from analytics.models import ( | ||||
|     BaseCount, | ||||
| @@ -21,15 +20,15 @@ from analytics.models import ( | ||||
| ) | ||||
| from zerver.lib.logging_util import log_to_file | ||||
| from zerver.lib.timestamp import ceiling_to_day, ceiling_to_hour, floor_to_hour, verify_UTC | ||||
| from zerver.models import Message, Realm, RealmAuditLog, Stream, UserActivityInterval, UserProfile | ||||
|  | ||||
| if settings.ZILENCER_ENABLED: | ||||
|     from zilencer.models import ( | ||||
|         RemoteInstallationCount, | ||||
|         RemoteRealm, | ||||
|         RemoteRealmCount, | ||||
|         RemoteZulipServer, | ||||
|     ) | ||||
| from zerver.models import ( | ||||
|     Message, | ||||
|     Realm, | ||||
|     RealmAuditLog, | ||||
|     Stream, | ||||
|     UserActivityInterval, | ||||
|     UserProfile, | ||||
|     models, | ||||
| ) | ||||
|  | ||||
| ## Logging setup ## | ||||
|  | ||||
| @@ -71,8 +70,7 @@ class CountStat: | ||||
|         else: | ||||
|             self.interval = self.time_increment | ||||
|  | ||||
|     @override | ||||
|     def __repr__(self) -> str: | ||||
|     def __str__(self) -> str: | ||||
|         return f"<CountStat: {self.property}>" | ||||
|  | ||||
|     def last_successful_fill(self) -> Optional[datetime]: | ||||
| @@ -178,7 +176,7 @@ def do_update_fill_state(fill_state: FillState, end_time: datetime, state: int) | ||||
|  | ||||
|  | ||||
| # We assume end_time is valid (e.g. is on a day or hour boundary as appropriate) | ||||
| # and is time-zone-aware. It is the caller's responsibility to enforce this! | ||||
| # and is timezone aware. It is the caller's responsibility to enforce this! | ||||
| def do_fill_count_stat_at_hour( | ||||
|     stat: CountStat, end_time: datetime, realm: Optional[Realm] = None | ||||
| ) -> None: | ||||
| @@ -216,7 +214,7 @@ def do_aggregate_to_summary_table( | ||||
|     # Aggregate into RealmCount | ||||
|     output_table = stat.data_collector.output_table | ||||
|     if realm is not None: | ||||
|         realm_clause: Composable = SQL("AND zerver_realm.id = {}").format(Literal(realm.id)) | ||||
|         realm_clause = SQL("AND zerver_realm.id = {}").format(Literal(realm.id)) | ||||
|     else: | ||||
|         realm_clause = SQL("") | ||||
|  | ||||
| @@ -298,10 +296,9 @@ def do_aggregate_to_summary_table( | ||||
|  | ||||
| ## Utility functions called from outside counts.py ## | ||||
|  | ||||
|  | ||||
| # called from zerver.actions; should not throw any errors | ||||
| # called from zerver/lib/actions.py; should not throw any errors | ||||
| def do_increment_logging_stat( | ||||
|     model_object_for_bucket: Union[Realm, UserProfile, Stream, "RemoteRealm", "RemoteZulipServer"], | ||||
|     zerver_object: Union[Realm, UserProfile, Stream], | ||||
|     stat: CountStat, | ||||
|     subgroup: Optional[Union[str, int, bool]], | ||||
|     event_time: datetime, | ||||
| @@ -312,37 +309,18 @@ def do_increment_logging_stat( | ||||
|  | ||||
|     table = stat.data_collector.output_table | ||||
|     if table == RealmCount: | ||||
|         assert isinstance(model_object_for_bucket, Realm) | ||||
|         id_args: Dict[ | ||||
|             str, Optional[Union[Realm, UserProfile, Stream, "RemoteRealm", "RemoteZulipServer"]] | ||||
|         ] = {"realm": model_object_for_bucket} | ||||
|         id_args = {"realm": zerver_object} | ||||
|     elif table == UserCount: | ||||
|         assert isinstance(model_object_for_bucket, UserProfile) | ||||
|         id_args = {"realm": model_object_for_bucket.realm, "user": model_object_for_bucket} | ||||
|     elif table == StreamCount: | ||||
|         assert isinstance(model_object_for_bucket, Stream) | ||||
|         id_args = {"realm": model_object_for_bucket.realm, "stream": model_object_for_bucket} | ||||
|     elif table == RemoteInstallationCount: | ||||
|         assert isinstance(model_object_for_bucket, RemoteZulipServer) | ||||
|         id_args = {"server": model_object_for_bucket, "remote_id": None} | ||||
|     elif table == RemoteRealmCount: | ||||
|         assert isinstance(model_object_for_bucket, RemoteRealm) | ||||
|         id_args = { | ||||
|             "server": model_object_for_bucket.server, | ||||
|             "remote_realm": model_object_for_bucket, | ||||
|             "remote_id": None, | ||||
|         } | ||||
|     else: | ||||
|         raise AssertionError("Unsupported CountStat output_table") | ||||
|         id_args = {"realm": zerver_object.realm, "user": zerver_object} | ||||
|     else:  # StreamCount | ||||
|         id_args = {"realm": zerver_object.realm, "stream": zerver_object} | ||||
|  | ||||
|     if stat.frequency == CountStat.DAY: | ||||
|         end_time = ceiling_to_day(event_time) | ||||
|     elif stat.frequency == CountStat.HOUR: | ||||
|     else:  # CountStat.HOUR: | ||||
|         end_time = ceiling_to_hour(event_time) | ||||
|     else: | ||||
|         raise AssertionError("Unsupported CountStat frequency") | ||||
|  | ||||
|     row, created = table._default_manager.get_or_create( | ||||
|     row, created = table.objects.get_or_create( | ||||
|         property=stat.property, | ||||
|         subgroup=subgroup, | ||||
|         end_time=end_time, | ||||
| @@ -372,7 +350,7 @@ def do_drop_single_stat(property: str) -> None: | ||||
|  | ||||
| ## DataCollector-level operations ## | ||||
|  | ||||
| QueryFn: TypeAlias = Callable[[Dict[str, Composable]], Composable] | ||||
| QueryFn = Callable[[Dict[str, Composable]], Composable] | ||||
|  | ||||
|  | ||||
| def do_pull_by_sql_query( | ||||
| @@ -380,11 +358,11 @@ def do_pull_by_sql_query( | ||||
|     start_time: datetime, | ||||
|     end_time: datetime, | ||||
|     query: QueryFn, | ||||
|     group_by: Optional[Tuple[Type[models.Model], str]], | ||||
|     group_by: Optional[Tuple[models.Model, str]], | ||||
| ) -> int: | ||||
|     if group_by is None: | ||||
|         subgroup: Composable = SQL("NULL") | ||||
|         group_by_clause: Composable = SQL("") | ||||
|         subgroup = SQL("NULL") | ||||
|         group_by_clause = SQL("") | ||||
|     else: | ||||
|         subgroup = Identifier(group_by[0]._meta.db_table, group_by[1]) | ||||
|         group_by_clause = SQL(", {}").format(subgroup) | ||||
| @@ -416,7 +394,7 @@ def do_pull_by_sql_query( | ||||
| def sql_data_collector( | ||||
|     output_table: Type[BaseCount], | ||||
|     query: QueryFn, | ||||
|     group_by: Optional[Tuple[Type[models.Model], str]], | ||||
|     group_by: Optional[Tuple[models.Model, str]], | ||||
| ) -> DataCollector: | ||||
|     def pull_function( | ||||
|         property: str, start_time: datetime, end_time: datetime, realm: Optional[Realm] = None | ||||
| @@ -470,15 +448,9 @@ def do_pull_minutes_active( | ||||
|  | ||||
| def count_message_by_user_query(realm: Optional[Realm]) -> QueryFn: | ||||
|     if realm is None: | ||||
|         realm_clause: Composable = SQL("") | ||||
|         realm_clause = SQL("") | ||||
|     else: | ||||
|         # We limit both userprofile and message so that we only see | ||||
|         # users from this realm, but also get the performance speedup | ||||
|         # of limiting messages by realm. | ||||
|         realm_clause = SQL( | ||||
|             "zerver_userprofile.realm_id = {} AND zerver_message.realm_id = {} AND" | ||||
|         ).format(Literal(realm.id), Literal(realm.id)) | ||||
|     # Uses index: zerver_message_realm_date_sent (or the only-date index) | ||||
|         realm_clause = SQL("zerver_userprofile.realm_id = {} AND").format(Literal(realm.id)) | ||||
|     return lambda kwargs: SQL( | ||||
|         """ | ||||
|     INSERT INTO analytics_usercount | ||||
| @@ -503,15 +475,9 @@ def count_message_by_user_query(realm: Optional[Realm]) -> QueryFn: | ||||
| # Note: ignores the group_by / group_by_clause. | ||||
| def count_message_type_by_user_query(realm: Optional[Realm]) -> QueryFn: | ||||
|     if realm is None: | ||||
|         realm_clause: Composable = SQL("") | ||||
|         realm_clause = SQL("") | ||||
|     else: | ||||
|         # We limit both userprofile and message so that we only see | ||||
|         # users from this realm, but also get the performance speedup | ||||
|         # of limiting messages by realm. | ||||
|         realm_clause = SQL( | ||||
|             "zerver_userprofile.realm_id = {} AND zerver_message.realm_id = {} AND" | ||||
|         ).format(Literal(realm.id), Literal(realm.id)) | ||||
|     # Uses index: zerver_message_realm_date_sent (or the only-date index) | ||||
|         realm_clause = SQL("zerver_userprofile.realm_id = {} AND").format(Literal(realm.id)) | ||||
|     return lambda kwargs: SQL( | ||||
|         """ | ||||
|     INSERT INTO analytics_usercount | ||||
| @@ -558,12 +524,9 @@ def count_message_type_by_user_query(realm: Optional[Realm]) -> QueryFn: | ||||
| # table, consider writing a new query for efficiency. | ||||
| def count_message_by_stream_query(realm: Optional[Realm]) -> QueryFn: | ||||
|     if realm is None: | ||||
|         realm_clause: Composable = SQL("") | ||||
|         realm_clause = SQL("") | ||||
|     else: | ||||
|         realm_clause = SQL( | ||||
|             "zerver_stream.realm_id = {} AND zerver_message.realm_id = {} AND" | ||||
|         ).format(Literal(realm.id), Literal(realm.id)) | ||||
|     # Uses index: zerver_message_realm_date_sent (or the only-date index) | ||||
|         realm_clause = SQL("zerver_stream.realm_id = {} AND").format(Literal(realm.id)) | ||||
|     return lambda kwargs: SQL( | ||||
|         """ | ||||
|     INSERT INTO analytics_streamcount | ||||
| @@ -595,7 +558,7 @@ def count_message_by_stream_query(realm: Optional[Realm]) -> QueryFn: | ||||
| # currently the only stat that uses this. | ||||
| def count_user_by_realm_query(realm: Optional[Realm]) -> QueryFn: | ||||
|     if realm is None: | ||||
|         realm_clause: Composable = SQL("") | ||||
|         realm_clause = SQL("") | ||||
|     else: | ||||
|         realm_clause = SQL("zerver_userprofile.realm_id = {} AND").format(Literal(realm.id)) | ||||
|     return lambda kwargs: SQL( | ||||
| @@ -625,7 +588,7 @@ def count_user_by_realm_query(realm: Optional[Realm]) -> QueryFn: | ||||
| # In particular, it's important to ensure that migrations don't cause that to happen. | ||||
| def check_realmauditlog_by_user_query(realm: Optional[Realm]) -> QueryFn: | ||||
|     if realm is None: | ||||
|         realm_clause: Composable = SQL("") | ||||
|         realm_clause = SQL("") | ||||
|     else: | ||||
|         realm_clause = SQL("realm_id = {} AND").format(Literal(realm.id)) | ||||
|     return lambda kwargs: SQL( | ||||
| @@ -665,7 +628,7 @@ def check_realmauditlog_by_user_query(realm: Optional[Realm]) -> QueryFn: | ||||
|  | ||||
| def check_useractivityinterval_by_user_query(realm: Optional[Realm]) -> QueryFn: | ||||
|     if realm is None: | ||||
|         realm_clause: Composable = SQL("") | ||||
|         realm_clause = SQL("") | ||||
|     else: | ||||
|         realm_clause = SQL("zerver_userprofile.realm_id = {} AND").format(Literal(realm.id)) | ||||
|     return lambda kwargs: SQL( | ||||
| @@ -689,7 +652,7 @@ def check_useractivityinterval_by_user_query(realm: Optional[Realm]) -> QueryFn: | ||||
|  | ||||
| def count_realm_active_humans_query(realm: Optional[Realm]) -> QueryFn: | ||||
|     if realm is None: | ||||
|         realm_clause: Composable = SQL("") | ||||
|         realm_clause = SQL("") | ||||
|     else: | ||||
|         realm_clause = SQL("realm_id = {} AND").format(Literal(realm.id)) | ||||
|     return lambda kwargs: SQL( | ||||
| @@ -841,12 +804,6 @@ def get_count_stats(realm: Optional[Realm] = None) -> Dict[str, CountStat]: | ||||
|         CountStat( | ||||
|             "minutes_active::day", DataCollector(UserCount, do_pull_minutes_active), CountStat.DAY | ||||
|         ), | ||||
|         # Tracks the number of push notifications requested by the server. | ||||
|         LoggingCountStat( | ||||
|             "mobile_pushes_sent::day", | ||||
|             RealmCount, | ||||
|             CountStat.DAY, | ||||
|         ), | ||||
|         # Rate limiting stats | ||||
|         # Used to limit the number of invitation emails sent by a realm | ||||
|         LoggingCountStat("invites_sent::day", RealmCount, CountStat.DAY), | ||||
| @@ -861,65 +818,8 @@ def get_count_stats(realm: Optional[Realm] = None) -> Dict[str, CountStat]: | ||||
|         ), | ||||
|     ] | ||||
|  | ||||
|     if settings.ZILENCER_ENABLED: | ||||
|         # See also the remote_installation versions of these in REMOTE_INSTALLATION_COUNT_STATS. | ||||
|         count_stats_.append( | ||||
|             LoggingCountStat( | ||||
|                 "mobile_pushes_received::day", | ||||
|                 RemoteRealmCount, | ||||
|                 CountStat.DAY, | ||||
|             ) | ||||
|         ) | ||||
|         count_stats_.append( | ||||
|             LoggingCountStat( | ||||
|                 "mobile_pushes_forwarded::day", | ||||
|                 RemoteRealmCount, | ||||
|                 CountStat.DAY, | ||||
|             ) | ||||
|         ) | ||||
|  | ||||
|     return OrderedDict((stat.property, stat) for stat in count_stats_) | ||||
|  | ||||
|  | ||||
| # These properties are tracked by the bouncer itself and therefore syncing them | ||||
| # from a remote server should not be allowed - or the server would be able to interfere | ||||
| # with our data. | ||||
| BOUNCER_ONLY_REMOTE_COUNT_STAT_PROPERTIES = [ | ||||
|     "mobile_pushes_received::day", | ||||
|     "mobile_pushes_forwarded::day", | ||||
| ] | ||||
|  | ||||
| # To avoid refactoring for now COUNT_STATS can be used as before | ||||
| COUNT_STATS = get_count_stats() | ||||
|  | ||||
| REMOTE_INSTALLATION_COUNT_STATS = OrderedDict() | ||||
|  | ||||
| if settings.ZILENCER_ENABLED: | ||||
|     # REMOTE_INSTALLATION_COUNT_STATS contains duplicates of the | ||||
|     # RemoteRealmCount stats declared above; it is necessary because | ||||
|     # pre-8.0 servers do not send the fields required to identify a | ||||
|     # RemoteRealm. | ||||
|  | ||||
|     # Tracks the number of push notifications requested to be sent | ||||
|     # by a remote server. | ||||
|     REMOTE_INSTALLATION_COUNT_STATS["mobile_pushes_received::day"] = LoggingCountStat( | ||||
|         "mobile_pushes_received::day", | ||||
|         RemoteInstallationCount, | ||||
|         CountStat.DAY, | ||||
|     ) | ||||
|     # Tracks the number of push notifications successfully sent to | ||||
|     # mobile devices, as requested by the remote server. Therefore | ||||
|     # this should be less than or equal to mobile_pushes_received - | ||||
|     # with potential tiny offsets resulting from a request being | ||||
|     # *received* by the bouncer right before midnight, but *sent* to | ||||
|     # the mobile device right after midnight. This would cause the | ||||
|     # increments to happen to CountStat records for different days. | ||||
|     REMOTE_INSTALLATION_COUNT_STATS["mobile_pushes_forwarded::day"] = LoggingCountStat( | ||||
|         "mobile_pushes_forwarded::day", | ||||
|         RemoteInstallationCount, | ||||
|         CountStat.DAY, | ||||
|     ) | ||||
|  | ||||
| ALL_COUNT_STATS = OrderedDict( | ||||
|     list(COUNT_STATS.items()) + list(REMOTE_INSTALLATION_COUNT_STATS.items()) | ||||
| ) | ||||
|   | ||||
| @@ -1,5 +1,5 @@ | ||||
| from math import sqrt | ||||
| from random import Random | ||||
| from random import gauss, random, seed | ||||
| from typing import List | ||||
|  | ||||
| from analytics.lib.counts import CountStat | ||||
| @@ -36,8 +36,6 @@ def generate_time_series_data( | ||||
|     partial_sum -- If True, return partial sum of the series. | ||||
|     random_seed -- Seed for random number generator. | ||||
|     """ | ||||
|     rng = Random(random_seed) | ||||
|  | ||||
|     if frequency == CountStat.HOUR: | ||||
|         length = days * 24 | ||||
|         seasonality = [non_business_hours_base] * 24 * 7 | ||||
| @@ -46,13 +44,13 @@ def generate_time_series_data( | ||||
|                 seasonality[24 * day + hour] = business_hours_base | ||||
|         holidays = [] | ||||
|         for i in range(days): | ||||
|             holidays.extend([rng.random() < holiday_rate] * 24) | ||||
|             holidays.extend([random() < holiday_rate] * 24) | ||||
|     elif frequency == CountStat.DAY: | ||||
|         length = days | ||||
|         seasonality = [8 * business_hours_base + 16 * non_business_hours_base] * 5 + [ | ||||
|             24 * non_business_hours_base | ||||
|         ] * 2 | ||||
|         holidays = [rng.random() < holiday_rate for i in range(days)] | ||||
|         holidays = [random() < holiday_rate for i in range(days)] | ||||
|     else: | ||||
|         raise AssertionError(f"Unknown frequency: {frequency}") | ||||
|     if length < 2: | ||||
| @@ -61,13 +59,14 @@ def generate_time_series_data( | ||||
|         ) | ||||
|     growth_base = growth ** (1.0 / (length - 1)) | ||||
|     values_no_noise = [ | ||||
|         seasonality[i % len(seasonality)] * (growth_base**i) for i in range(length) | ||||
|         seasonality[i % len(seasonality)] * (growth_base ** i) for i in range(length) | ||||
|     ] | ||||
|  | ||||
|     noise_scalars = [rng.gauss(0, 1)] | ||||
|     seed(random_seed) | ||||
|     noise_scalars = [gauss(0, 1)] | ||||
|     for i in range(1, length): | ||||
|         noise_scalars.append( | ||||
|             noise_scalars[-1] * autocorrelation + rng.gauss(0, 1) * (1 - autocorrelation) | ||||
|             noise_scalars[-1] * autocorrelation + gauss(0, 1) * (1 - autocorrelation) | ||||
|         ) | ||||
|  | ||||
|     values = [ | ||||
|   | ||||
| @@ -30,5 +30,4 @@ def time_range( | ||||
|     while current >= start: | ||||
|         times.append(current) | ||||
|         current -= step | ||||
|     times.reverse() | ||||
|     return times | ||||
|     return list(reversed(times)) | ||||
|   | ||||
| @@ -5,11 +5,10 @@ from typing import Any, Dict | ||||
|  | ||||
| from django.core.management.base import BaseCommand | ||||
| from django.utils.timezone import now as timezone_now | ||||
| from typing_extensions import override | ||||
|  | ||||
| from analytics.lib.counts import ALL_COUNT_STATS, CountStat | ||||
| from analytics.lib.counts import COUNT_STATS, CountStat | ||||
| from analytics.models import installation_epoch | ||||
| from zerver.lib.timestamp import TimeZoneNotUTCError, floor_to_day, floor_to_hour, verify_UTC | ||||
| from zerver.lib.timestamp import TimezoneNotUTCException, floor_to_day, floor_to_hour, verify_UTC | ||||
| from zerver.models import Realm | ||||
|  | ||||
| states = { | ||||
| @@ -25,7 +24,6 @@ class Command(BaseCommand): | ||||
|  | ||||
|     Run as a cron job that runs every hour.""" | ||||
|  | ||||
|     @override | ||||
|     def handle(self, *args: Any, **options: Any) -> None: | ||||
|         fill_state = self.get_fill_state() | ||||
|         status = fill_state["status"] | ||||
| @@ -44,13 +42,13 @@ class Command(BaseCommand): | ||||
|  | ||||
|         warning_unfilled_properties = [] | ||||
|         critical_unfilled_properties = [] | ||||
|         for property, stat in ALL_COUNT_STATS.items(): | ||||
|         for property, stat in COUNT_STATS.items(): | ||||
|             last_fill = stat.last_successful_fill() | ||||
|             if last_fill is None: | ||||
|                 last_fill = installation_epoch() | ||||
|             try: | ||||
|                 verify_UTC(last_fill) | ||||
|             except TimeZoneNotUTCError: | ||||
|             except TimezoneNotUTCException: | ||||
|                 return {"status": 2, "message": f"FillState not in UTC for {property}"} | ||||
|  | ||||
|             if stat.frequency == CountStat.DAY: | ||||
|   | ||||
| @@ -2,7 +2,6 @@ from argparse import ArgumentParser | ||||
| from typing import Any | ||||
|  | ||||
| from django.core.management.base import BaseCommand, CommandError | ||||
| from typing_extensions import override | ||||
|  | ||||
| from analytics.lib.counts import do_drop_all_analytics_tables | ||||
|  | ||||
| @@ -10,11 +9,9 @@ from analytics.lib.counts import do_drop_all_analytics_tables | ||||
| class Command(BaseCommand): | ||||
|     help = """Clear analytics tables.""" | ||||
|  | ||||
|     @override | ||||
|     def add_arguments(self, parser: ArgumentParser) -> None: | ||||
|         parser.add_argument("--force", action="store_true", help="Clear analytics tables.") | ||||
|  | ||||
|     @override | ||||
|     def handle(self, *args: Any, **options: Any) -> None: | ||||
|         if options["force"]: | ||||
|             do_drop_all_analytics_tables() | ||||
|   | ||||
| @@ -2,23 +2,20 @@ from argparse import ArgumentParser | ||||
| from typing import Any | ||||
|  | ||||
| from django.core.management.base import BaseCommand, CommandError | ||||
| from typing_extensions import override | ||||
|  | ||||
| from analytics.lib.counts import ALL_COUNT_STATS, do_drop_single_stat | ||||
| from analytics.lib.counts import COUNT_STATS, do_drop_single_stat | ||||
|  | ||||
|  | ||||
| class Command(BaseCommand): | ||||
|     help = """Clear analytics tables.""" | ||||
|  | ||||
|     @override | ||||
|     def add_arguments(self, parser: ArgumentParser) -> None: | ||||
|         parser.add_argument("--force", action="store_true", help="Actually do it.") | ||||
|         parser.add_argument("--property", help="The property of the stat to be cleared.") | ||||
|  | ||||
|     @override | ||||
|     def handle(self, *args: Any, **options: Any) -> None: | ||||
|         property = options["property"] | ||||
|         if property not in ALL_COUNT_STATS: | ||||
|         if property not in COUNT_STATS: | ||||
|             raise CommandError(f"Invalid property: {property}") | ||||
|         if not options["force"]: | ||||
|             raise CommandError("No action taken. Use --force.") | ||||
|   | ||||
| @@ -1,11 +1,9 @@ | ||||
| import os | ||||
| from datetime import timedelta | ||||
| from typing import Any, Dict, List, Mapping, Type, Union | ||||
| from typing import Any, Dict, List, Mapping, Optional, Type | ||||
| from unittest import mock | ||||
|  | ||||
| from django.core.files.uploadedfile import UploadedFile | ||||
| from django.core.management.base import BaseCommand | ||||
| from django.utils.timezone import now as timezone_now | ||||
| from typing_extensions import TypeAlias, override | ||||
|  | ||||
| from analytics.lib.counts import COUNT_STATS, CountStat, do_drop_all_analytics_tables | ||||
| from analytics.lib.fixtures import generate_time_series_data | ||||
| @@ -18,24 +16,10 @@ from analytics.models import ( | ||||
|     StreamCount, | ||||
|     UserCount, | ||||
| ) | ||||
| from zerver.actions.create_realm import do_create_realm | ||||
| from zerver.actions.users import do_change_user_role | ||||
| from zerver.lib.actions import STREAM_ASSIGNMENT_COLORS, do_change_user_role, do_create_realm | ||||
| from zerver.lib.create_user import create_user | ||||
| from zerver.lib.storage import static_path | ||||
| from zerver.lib.stream_color import STREAM_ASSIGNMENT_COLORS | ||||
| from zerver.lib.timestamp import floor_to_day | ||||
| from zerver.lib.upload import upload_message_attachment_from_request | ||||
| from zerver.models import ( | ||||
|     Client, | ||||
|     Realm, | ||||
|     RealmAuditLog, | ||||
|     Recipient, | ||||
|     Stream, | ||||
|     Subscription, | ||||
|     SystemGroups, | ||||
|     UserGroup, | ||||
|     UserProfile, | ||||
| ) | ||||
| from zerver.models import Client, Realm, Recipient, Stream, Subscription, UserProfile | ||||
|  | ||||
|  | ||||
| class Command(BaseCommand): | ||||
| @@ -69,7 +53,6 @@ class Command(BaseCommand): | ||||
|             random_seed=self.random_seed, | ||||
|         ) | ||||
|  | ||||
|     @override | ||||
|     def handle(self, *args: Any, **options: Any) -> None: | ||||
|         # TODO: This should arguably only delete the objects | ||||
|         # associated with the "analytics" realm. | ||||
| @@ -94,71 +77,37 @@ class Command(BaseCommand): | ||||
|             string_id="analytics", name="Analytics", date_created=installation_time | ||||
|         ) | ||||
|  | ||||
|         shylock = create_user( | ||||
|             "shylock@analytics.ds", | ||||
|             "Shylock", | ||||
|             realm, | ||||
|             full_name="Shylock", | ||||
|             role=UserProfile.ROLE_REALM_OWNER, | ||||
|             force_date_joined=installation_time, | ||||
|         ) | ||||
|         with mock.patch("zerver.lib.create_user.timezone_now", return_value=installation_time): | ||||
|             shylock = create_user( | ||||
|                 "shylock@analytics.ds", | ||||
|                 "Shylock", | ||||
|                 realm, | ||||
|                 full_name="Shylock", | ||||
|                 role=UserProfile.ROLE_REALM_OWNER, | ||||
|             ) | ||||
|         do_change_user_role(shylock, UserProfile.ROLE_REALM_OWNER, acting_user=None) | ||||
|  | ||||
|         # Create guest user for set_guest_users_statistic. | ||||
|         create_user( | ||||
|             "bassanio@analytics.ds", | ||||
|             "Bassanio", | ||||
|             realm, | ||||
|             full_name="Bassanio", | ||||
|             role=UserProfile.ROLE_GUEST, | ||||
|             force_date_joined=installation_time, | ||||
|         ) | ||||
|  | ||||
|         administrators_user_group = UserGroup.objects.get( | ||||
|             name=SystemGroups.ADMINISTRATORS, realm=realm, is_system_group=True | ||||
|         ) | ||||
|         stream = Stream.objects.create( | ||||
|             name="all", | ||||
|             realm=realm, | ||||
|             date_created=installation_time, | ||||
|             can_remove_subscribers_group=administrators_user_group, | ||||
|         ) | ||||
|         stream = Stream.objects.create(name="all", realm=realm, date_created=installation_time) | ||||
|         recipient = Recipient.objects.create(type_id=stream.id, type=Recipient.STREAM) | ||||
|         stream.recipient = recipient | ||||
|         stream.save(update_fields=["recipient"]) | ||||
|  | ||||
|         # Subscribe shylock to the stream to avoid invariant failures. | ||||
|         Subscription.objects.create( | ||||
|             recipient=recipient, | ||||
|             user_profile=shylock, | ||||
|             is_user_active=shylock.is_active, | ||||
|             color=STREAM_ASSIGNMENT_COLORS[0], | ||||
|         ) | ||||
|         RealmAuditLog.objects.create( | ||||
|             realm=realm, | ||||
|             modified_user=shylock, | ||||
|             modified_stream=stream, | ||||
|             event_last_message_id=0, | ||||
|             event_type=RealmAuditLog.SUBSCRIPTION_CREATED, | ||||
|             event_time=installation_time, | ||||
|         ) | ||||
|  | ||||
|         # Create an attachment in the database for set_storage_space_used_statistic. | ||||
|         IMAGE_FILE_PATH = static_path("images/test-images/checkbox.png") | ||||
|         file_info = os.stat(IMAGE_FILE_PATH) | ||||
|         file_size = file_info.st_size | ||||
|         with open(IMAGE_FILE_PATH, "rb") as fp: | ||||
|             upload_message_attachment_from_request(UploadedFile(fp), shylock, file_size) | ||||
|  | ||||
|         FixtureData: TypeAlias = Mapping[Union[str, int, None], List[int]] | ||||
|         # TODO: This should use subscribe_users_to_streams from populate_db. | ||||
|         subs = [ | ||||
|             Subscription( | ||||
|                 recipient=recipient, | ||||
|                 user_profile=shylock, | ||||
|                 is_user_active=shylock.is_active, | ||||
|                 color=STREAM_ASSIGNMENT_COLORS[0], | ||||
|             ), | ||||
|         ] | ||||
|         Subscription.objects.bulk_create(subs) | ||||
|  | ||||
|         def insert_fixture_data( | ||||
|             stat: CountStat, | ||||
|             fixture_data: FixtureData, | ||||
|             table: Type[BaseCount], | ||||
|             stat: CountStat, fixture_data: Mapping[Optional[str], List[int]], table: Type[BaseCount] | ||||
|         ) -> None: | ||||
|             end_times = time_range( | ||||
|                 last_end_time, last_end_time, stat.frequency, len(next(iter(fixture_data.values()))) | ||||
|                 last_end_time, last_end_time, stat.frequency, len(list(fixture_data.values())[0]) | ||||
|             ) | ||||
|             if table == InstallationCount: | ||||
|                 id_args: Dict[str, Any] = {} | ||||
| @@ -170,7 +119,7 @@ class Command(BaseCommand): | ||||
|                 id_args = {"stream": stream, "realm": realm} | ||||
|  | ||||
|             for subgroup, values in fixture_data.items(): | ||||
|                 table._default_manager.bulk_create( | ||||
|                 table.objects.bulk_create( | ||||
|                     table( | ||||
|                         property=stat.property, | ||||
|                         subgroup=subgroup, | ||||
| @@ -183,11 +132,11 @@ class Command(BaseCommand): | ||||
|                 ) | ||||
|  | ||||
|         stat = COUNT_STATS["1day_actives::day"] | ||||
|         realm_data: FixtureData = { | ||||
|         realm_data: Mapping[Optional[str], List[int]] = { | ||||
|             None: self.generate_fixture_data(stat, 0.08, 0.02, 3, 0.3, 6, partial_sum=True), | ||||
|         } | ||||
|         insert_fixture_data(stat, realm_data, RealmCount) | ||||
|         installation_data: FixtureData = { | ||||
|         installation_data: Mapping[Optional[str], List[int]] = { | ||||
|             None: self.generate_fixture_data(stat, 0.8, 0.2, 4, 0.3, 6, partial_sum=True), | ||||
|         } | ||||
|         insert_fixture_data(stat, installation_data, InstallationCount) | ||||
| @@ -237,7 +186,7 @@ class Command(BaseCommand): | ||||
|         ) | ||||
|  | ||||
|         stat = COUNT_STATS["messages_sent:is_bot:hour"] | ||||
|         user_data: FixtureData = { | ||||
|         user_data: Mapping[Optional[str], List[int]] = { | ||||
|             "false": self.generate_fixture_data(stat, 2, 1, 1.5, 0.6, 8, holiday_rate=0.1), | ||||
|         } | ||||
|         insert_fixture_data(stat, user_data, UserCount) | ||||
| @@ -330,7 +279,7 @@ class Command(BaseCommand): | ||||
|             "true": self.generate_fixture_data(stat, 20, 2, 3, 0.2, 3), | ||||
|         } | ||||
|         insert_fixture_data(stat, realm_data, RealmCount) | ||||
|         stream_data: Mapping[Union[int, str, None], List[int]] = { | ||||
|         stream_data: Mapping[Optional[str], List[int]] = { | ||||
|             "false": self.generate_fixture_data(stat, 10, 7, 5, 0.6, 4), | ||||
|             "true": self.generate_fixture_data(stat, 5, 3, 2, 0.4, 2), | ||||
|         } | ||||
|   | ||||
							
								
								
									
										61
									
								
								analytics/management/commands/stream_stats.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										61
									
								
								analytics/management/commands/stream_stats.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,61 @@ | ||||
| from argparse import ArgumentParser | ||||
| from typing import Any | ||||
|  | ||||
| from django.core.management.base import BaseCommand, CommandError | ||||
| from django.db.models import Q | ||||
|  | ||||
| from zerver.models import Message, Realm, Recipient, Stream, Subscription, get_realm | ||||
|  | ||||
|  | ||||
| class Command(BaseCommand): | ||||
|     help = "Generate statistics on the streams for a realm." | ||||
|  | ||||
|     def add_arguments(self, parser: ArgumentParser) -> None: | ||||
|         parser.add_argument( | ||||
|             "realms", metavar="<realm>", nargs="*", help="realm to generate statistics for" | ||||
|         ) | ||||
|  | ||||
|     def handle(self, *args: Any, **options: str) -> None: | ||||
|         if options["realms"]: | ||||
|             try: | ||||
|                 realms = [get_realm(string_id) for string_id in options["realms"]] | ||||
|             except Realm.DoesNotExist as e: | ||||
|                 raise CommandError(e) | ||||
|         else: | ||||
|             realms = Realm.objects.all() | ||||
|  | ||||
|         for realm in realms: | ||||
|             streams = Stream.objects.filter(realm=realm).exclude(Q(name__istartswith="tutorial-")) | ||||
|             # private stream count | ||||
|             private_count = 0 | ||||
|             # public stream count | ||||
|             public_count = 0 | ||||
|             for stream in streams: | ||||
|                 if stream.invite_only: | ||||
|                     private_count += 1 | ||||
|                 else: | ||||
|                     public_count += 1 | ||||
|             print("------------") | ||||
|             print(realm.string_id, end=" ") | ||||
|             print("{:>10} {} public streams and".format("(", public_count), end=" ") | ||||
|             print(f"{private_count} private streams )") | ||||
|             print("------------") | ||||
|             print("{:>25} {:>15} {:>10} {:>12}".format("stream", "subscribers", "messages", "type")) | ||||
|  | ||||
|             for stream in streams: | ||||
|                 if stream.invite_only: | ||||
|                     stream_type = "private" | ||||
|                 else: | ||||
|                     stream_type = "public" | ||||
|                 print(f"{stream.name:>25}", end=" ") | ||||
|                 recipient = Recipient.objects.filter(type=Recipient.STREAM, type_id=stream.id) | ||||
|                 print( | ||||
|                     "{:10}".format( | ||||
|                         len(Subscription.objects.filter(recipient=recipient, active=True)) | ||||
|                     ), | ||||
|                     end=" ", | ||||
|                 ) | ||||
|                 num_messages = len(Message.objects.filter(recipient=recipient)) | ||||
|                 print(f"{num_messages:12}", end=" ") | ||||
|                 print(f"{stream_type:>15}") | ||||
|             print("") | ||||
| @@ -8,11 +8,10 @@ from django.conf import settings | ||||
| from django.core.management.base import BaseCommand | ||||
| from django.utils.dateparse import parse_datetime | ||||
| from django.utils.timezone import now as timezone_now | ||||
| from typing_extensions import override | ||||
|  | ||||
| from analytics.lib.counts import ALL_COUNT_STATS, logger, process_count_stat | ||||
| from analytics.lib.counts import COUNT_STATS, logger, process_count_stat | ||||
| from scripts.lib.zulip_tools import ENDC, WARNING | ||||
| from zerver.lib.remote_server import send_analytics_to_push_bouncer | ||||
| from zerver.lib.remote_server import send_analytics_to_remote_server | ||||
| from zerver.lib.timestamp import floor_to_hour | ||||
| from zerver.models import Realm | ||||
|  | ||||
| @@ -22,7 +21,6 @@ class Command(BaseCommand): | ||||
|  | ||||
|     Run as a cron job that runs every hour.""" | ||||
|  | ||||
|     @override | ||||
|     def add_arguments(self, parser: ArgumentParser) -> None: | ||||
|         parser.add_argument( | ||||
|             "--time", | ||||
| @@ -39,7 +37,6 @@ class Command(BaseCommand): | ||||
|             "--verbose", action="store_true", help="Print timing information to stdout." | ||||
|         ) | ||||
|  | ||||
|     @override | ||||
|     def handle(self, *args: Any, **options: Any) -> None: | ||||
|         try: | ||||
|             os.mkdir(settings.ANALYTICS_LOCK_DIR) | ||||
| @@ -63,20 +60,19 @@ class Command(BaseCommand): | ||||
|             return | ||||
|  | ||||
|         fill_to_time = parse_datetime(options["time"]) | ||||
|         assert fill_to_time is not None | ||||
|         if options["utc"]: | ||||
|             fill_to_time = fill_to_time.replace(tzinfo=timezone.utc) | ||||
|         if fill_to_time.tzinfo is None: | ||||
|             raise ValueError( | ||||
|                 "--time must be time-zone-aware. Maybe you meant to use the --utc option?" | ||||
|                 "--time must be timezone aware. Maybe you meant to use the --utc option?" | ||||
|             ) | ||||
|  | ||||
|         fill_to_time = floor_to_hour(fill_to_time.astimezone(timezone.utc)) | ||||
|  | ||||
|         if options["stat"] is not None: | ||||
|             stats = [ALL_COUNT_STATS[options["stat"]]] | ||||
|             stats = [COUNT_STATS[options["stat"]]] | ||||
|         else: | ||||
|             stats = list(ALL_COUNT_STATS.values()) | ||||
|             stats = list(COUNT_STATS.values()) | ||||
|  | ||||
|         logger.info("Starting updating analytics counts through %s", fill_to_time) | ||||
|         if options["verbose"]: | ||||
| @@ -96,4 +92,4 @@ class Command(BaseCommand): | ||||
|         logger.info("Finished updating analytics counts through %s", fill_to_time) | ||||
|  | ||||
|         if settings.PUSH_NOTIFICATION_BOUNCER_URL and settings.SUBMIT_USAGE_STATISTICS: | ||||
|             send_analytics_to_push_bouncer() | ||||
|             send_analytics_to_remote_server() | ||||
|   | ||||
| @@ -4,6 +4,7 @@ from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("zerver", "0030_realm_org_type"), | ||||
|         migrations.swappable_dependency(settings.AUTH_USER_MODEL), | ||||
|   | ||||
| @@ -2,6 +2,7 @@ from django.db import migrations | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("analytics", "0001_initial"), | ||||
|     ] | ||||
|   | ||||
| @@ -2,6 +2,7 @@ from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("analytics", "0002_remove_huddlecount"), | ||||
|     ] | ||||
|   | ||||
| @@ -2,6 +2,7 @@ from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("analytics", "0003_fillstate"), | ||||
|     ] | ||||
|   | ||||
| @@ -2,6 +2,7 @@ from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("analytics", "0004_add_subgroup"), | ||||
|     ] | ||||
|   | ||||
| @@ -2,6 +2,7 @@ from django.db import migrations | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("analytics", "0005_alter_field_size"), | ||||
|     ] | ||||
|   | ||||
| @@ -3,6 +3,7 @@ from django.db import migrations | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("analytics", "0006_add_subgroup_to_unique_constraints"), | ||||
|     ] | ||||
|   | ||||
| @@ -1,33 +1,25 @@ | ||||
| # Generated by Django 1.10.5 on 2017-02-01 22:28 | ||||
| from django.db import migrations, models | ||||
| from django.db import migrations | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("zerver", "0050_userprofile_avatar_version"), | ||||
|         ("analytics", "0007_remove_interval"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddIndex( | ||||
|             model_name="realmcount", | ||||
|             index=models.Index( | ||||
|                 fields=["property", "end_time"], | ||||
|                 name="analytics_realmcount_property_end_time_3b60396b_idx", | ||||
|             ), | ||||
|         migrations.AlterIndexTogether( | ||||
|             name="realmcount", | ||||
|             index_together={("property", "end_time")}, | ||||
|         ), | ||||
|         migrations.AddIndex( | ||||
|             model_name="streamcount", | ||||
|             index=models.Index( | ||||
|                 fields=["property", "realm", "end_time"], | ||||
|                 name="analytics_streamcount_property_realm_id_end_time_155ae930_idx", | ||||
|             ), | ||||
|         migrations.AlterIndexTogether( | ||||
|             name="streamcount", | ||||
|             index_together={("property", "realm", "end_time")}, | ||||
|         ), | ||||
|         migrations.AddIndex( | ||||
|             model_name="usercount", | ||||
|             index=models.Index( | ||||
|                 fields=["property", "realm", "end_time"], | ||||
|                 name="analytics_usercount_property_realm_id_end_time_591dbec1_idx", | ||||
|             ), | ||||
|         migrations.AlterIndexTogether( | ||||
|             name="usercount", | ||||
|             index_together={("property", "realm", "end_time")}, | ||||
|         ), | ||||
|     ] | ||||
|   | ||||
| @@ -1,10 +1,10 @@ | ||||
| from django.db import migrations | ||||
| from django.db.backends.base.schema import BaseDatabaseSchemaEditor | ||||
| from django.db.backends.postgresql.schema import DatabaseSchemaEditor | ||||
| from django.db.migrations.state import StateApps | ||||
|  | ||||
|  | ||||
| def delete_messages_sent_to_stream_stat( | ||||
|     apps: StateApps, schema_editor: BaseDatabaseSchemaEditor | ||||
|     apps: StateApps, schema_editor: DatabaseSchemaEditor | ||||
| ) -> None: | ||||
|     UserCount = apps.get_model("analytics", "UserCount") | ||||
|     StreamCount = apps.get_model("analytics", "StreamCount") | ||||
| @@ -21,6 +21,7 @@ def delete_messages_sent_to_stream_stat( | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("analytics", "0008_add_count_indexes"), | ||||
|     ] | ||||
|   | ||||
| @@ -1,10 +1,10 @@ | ||||
| from django.db import migrations | ||||
| from django.db.backends.base.schema import BaseDatabaseSchemaEditor | ||||
| from django.db.backends.postgresql.schema import DatabaseSchemaEditor | ||||
| from django.db.migrations.state import StateApps | ||||
|  | ||||
|  | ||||
| def clear_message_sent_by_message_type_values( | ||||
|     apps: StateApps, schema_editor: BaseDatabaseSchemaEditor | ||||
|     apps: StateApps, schema_editor: DatabaseSchemaEditor | ||||
| ) -> None: | ||||
|     UserCount = apps.get_model("analytics", "UserCount") | ||||
|     StreamCount = apps.get_model("analytics", "StreamCount") | ||||
| @@ -21,6 +21,7 @@ def clear_message_sent_by_message_type_values( | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [("analytics", "0009_remove_messages_to_stream_stat")] | ||||
|  | ||||
|     operations = [ | ||||
|   | ||||
| @@ -1,9 +1,9 @@ | ||||
| from django.db import migrations | ||||
| from django.db.backends.base.schema import BaseDatabaseSchemaEditor | ||||
| from django.db.backends.postgresql.schema import DatabaseSchemaEditor | ||||
| from django.db.migrations.state import StateApps | ||||
|  | ||||
|  | ||||
| def clear_analytics_tables(apps: StateApps, schema_editor: BaseDatabaseSchemaEditor) -> None: | ||||
| def clear_analytics_tables(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: | ||||
|     UserCount = apps.get_model("analytics", "UserCount") | ||||
|     StreamCount = apps.get_model("analytics", "StreamCount") | ||||
|     RealmCount = apps.get_model("analytics", "RealmCount") | ||||
| @@ -18,6 +18,7 @@ def clear_analytics_tables(apps: StateApps, schema_editor: BaseDatabaseSchemaEdi | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("analytics", "0010_clear_messages_sent_values"), | ||||
|     ] | ||||
|   | ||||
| @@ -5,6 +5,7 @@ from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("analytics", "0011_clear_analytics_tables"), | ||||
|     ] | ||||
|   | ||||
| @@ -4,6 +4,7 @@ from django.db import migrations | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("analytics", "0012_add_on_delete"), | ||||
|     ] | ||||
|   | ||||
| @@ -4,6 +4,7 @@ from django.db import migrations | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("analytics", "0013_remove_anomaly"), | ||||
|     ] | ||||
|   | ||||
| @@ -1,10 +1,10 @@ | ||||
| from django.db import migrations | ||||
| from django.db.backends.base.schema import BaseDatabaseSchemaEditor | ||||
| from django.db.backends.postgresql.schema import DatabaseSchemaEditor | ||||
| from django.db.migrations.state import StateApps | ||||
| from django.db.models import Count, Sum | ||||
|  | ||||
|  | ||||
| def clear_duplicate_counts(apps: StateApps, schema_editor: BaseDatabaseSchemaEditor) -> None: | ||||
| def clear_duplicate_counts(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: | ||||
|     """This is a preparatory migration for our Analytics tables. | ||||
|  | ||||
|     The backstory is that Django's unique_together indexes do not properly | ||||
| @@ -55,6 +55,7 @@ def clear_duplicate_counts(apps: StateApps, schema_editor: BaseDatabaseSchemaEdi | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("analytics", "0014_remove_fillstate_last_modified"), | ||||
|     ] | ||||
|   | ||||
| @@ -4,6 +4,7 @@ from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("analytics", "0015_clear_duplicate_counts"), | ||||
|     ] | ||||
|   | ||||
| @@ -1,114 +0,0 @@ | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|     dependencies = [ | ||||
|         ("analytics", "0016_unique_constraint_when_subgroup_null"), | ||||
|     ] | ||||
|  | ||||
|     # If the server was installed between 7.0 and 7.4 (or main between | ||||
|     # 2c20028aa451 and 7807bff52635), it contains indexes which (when | ||||
|     # running 7.5 or 7807bff52635 or higher) are never used, because | ||||
|     # they contain an improper cast | ||||
|     # (https://code.djangoproject.com/ticket/34840). | ||||
|     # | ||||
|     # We regenerate the indexes here, by dropping and re-creating | ||||
|     # them, so that we know that they are properly formed. | ||||
|     operations = [ | ||||
|         migrations.RemoveConstraint( | ||||
|             model_name="installationcount", | ||||
|             name="unique_installation_count", | ||||
|         ), | ||||
|         migrations.AddConstraint( | ||||
|             model_name="installationcount", | ||||
|             constraint=models.UniqueConstraint( | ||||
|                 condition=models.Q(subgroup__isnull=False), | ||||
|                 fields=("property", "subgroup", "end_time"), | ||||
|                 name="unique_installation_count", | ||||
|             ), | ||||
|         ), | ||||
|         migrations.RemoveConstraint( | ||||
|             model_name="installationcount", | ||||
|             name="unique_installation_count_null_subgroup", | ||||
|         ), | ||||
|         migrations.AddConstraint( | ||||
|             model_name="installationcount", | ||||
|             constraint=models.UniqueConstraint( | ||||
|                 condition=models.Q(subgroup__isnull=True), | ||||
|                 fields=("property", "end_time"), | ||||
|                 name="unique_installation_count_null_subgroup", | ||||
|             ), | ||||
|         ), | ||||
|         migrations.RemoveConstraint( | ||||
|             model_name="realmcount", | ||||
|             name="unique_realm_count", | ||||
|         ), | ||||
|         migrations.AddConstraint( | ||||
|             model_name="realmcount", | ||||
|             constraint=models.UniqueConstraint( | ||||
|                 condition=models.Q(subgroup__isnull=False), | ||||
|                 fields=("realm", "property", "subgroup", "end_time"), | ||||
|                 name="unique_realm_count", | ||||
|             ), | ||||
|         ), | ||||
|         migrations.RemoveConstraint( | ||||
|             model_name="realmcount", | ||||
|             name="unique_realm_count_null_subgroup", | ||||
|         ), | ||||
|         migrations.AddConstraint( | ||||
|             model_name="realmcount", | ||||
|             constraint=models.UniqueConstraint( | ||||
|                 condition=models.Q(subgroup__isnull=True), | ||||
|                 fields=("realm", "property", "end_time"), | ||||
|                 name="unique_realm_count_null_subgroup", | ||||
|             ), | ||||
|         ), | ||||
|         migrations.RemoveConstraint( | ||||
|             model_name="streamcount", | ||||
|             name="unique_stream_count", | ||||
|         ), | ||||
|         migrations.AddConstraint( | ||||
|             model_name="streamcount", | ||||
|             constraint=models.UniqueConstraint( | ||||
|                 condition=models.Q(subgroup__isnull=False), | ||||
|                 fields=("stream", "property", "subgroup", "end_time"), | ||||
|                 name="unique_stream_count", | ||||
|             ), | ||||
|         ), | ||||
|         migrations.RemoveConstraint( | ||||
|             model_name="streamcount", | ||||
|             name="unique_stream_count_null_subgroup", | ||||
|         ), | ||||
|         migrations.AddConstraint( | ||||
|             model_name="streamcount", | ||||
|             constraint=models.UniqueConstraint( | ||||
|                 condition=models.Q(subgroup__isnull=True), | ||||
|                 fields=("stream", "property", "end_time"), | ||||
|                 name="unique_stream_count_null_subgroup", | ||||
|             ), | ||||
|         ), | ||||
|         migrations.RemoveConstraint( | ||||
|             model_name="usercount", | ||||
|             name="unique_user_count", | ||||
|         ), | ||||
|         migrations.AddConstraint( | ||||
|             model_name="usercount", | ||||
|             constraint=models.UniqueConstraint( | ||||
|                 condition=models.Q(subgroup__isnull=False), | ||||
|                 fields=("user", "property", "subgroup", "end_time"), | ||||
|                 name="unique_user_count", | ||||
|             ), | ||||
|         ), | ||||
|         migrations.RemoveConstraint( | ||||
|             model_name="usercount", | ||||
|             name="unique_user_count_null_subgroup", | ||||
|         ), | ||||
|         migrations.AddConstraint( | ||||
|             model_name="usercount", | ||||
|             constraint=models.UniqueConstraint( | ||||
|                 condition=models.Q(subgroup__isnull=True), | ||||
|                 fields=("user", "property", "end_time"), | ||||
|                 name="unique_user_count_null_subgroup", | ||||
|             ), | ||||
|         ), | ||||
|     ] | ||||
| @@ -1,28 +1,24 @@ | ||||
| # https://github.com/typeddjango/django-stubs/issues/1698 | ||||
| # mypy: disable-error-code="explicit-override" | ||||
|  | ||||
| import datetime | ||||
| from typing import Optional | ||||
|  | ||||
| from django.db import models | ||||
| from django.db.models import Q, UniqueConstraint | ||||
| from typing_extensions import override | ||||
|  | ||||
| from zerver.lib.timestamp import floor_to_day | ||||
| from zerver.models import Realm, Stream, UserProfile | ||||
|  | ||||
|  | ||||
| class FillState(models.Model): | ||||
|     property = models.CharField(max_length=40, unique=True) | ||||
|     end_time = models.DateTimeField() | ||||
|     property: str = models.CharField(max_length=40, unique=True) | ||||
|     end_time: datetime.datetime = models.DateTimeField() | ||||
|  | ||||
|     # Valid states are {DONE, STARTED} | ||||
|     DONE = 1 | ||||
|     STARTED = 2 | ||||
|     state = models.PositiveSmallIntegerField() | ||||
|     state: int = models.PositiveSmallIntegerField() | ||||
|  | ||||
|     @override | ||||
|     def __str__(self) -> str: | ||||
|         return f"{self.property} {self.end_time} {self.state}" | ||||
|         return f"<FillState: {self.property} {self.end_time} {self.state}>" | ||||
|  | ||||
|  | ||||
| # The earliest/starting end_time in FillState | ||||
| @@ -38,10 +34,10 @@ class BaseCount(models.Model): | ||||
|     # Note: When inheriting from BaseCount, you may want to rearrange | ||||
|     # the order of the columns in the migration to make sure they | ||||
|     # match how you'd like the table to be arranged. | ||||
|     property = models.CharField(max_length=32) | ||||
|     subgroup = models.CharField(max_length=16, null=True) | ||||
|     end_time = models.DateTimeField() | ||||
|     value = models.BigIntegerField() | ||||
|     property: str = models.CharField(max_length=32) | ||||
|     subgroup: Optional[str] = models.CharField(max_length=16, null=True) | ||||
|     end_time: datetime.datetime = models.DateTimeField() | ||||
|     value: int = models.BigIntegerField() | ||||
|  | ||||
|     class Meta: | ||||
|         abstract = True | ||||
| @@ -63,9 +59,8 @@ class InstallationCount(BaseCount): | ||||
|             ), | ||||
|         ] | ||||
|  | ||||
|     @override | ||||
|     def __str__(self) -> str: | ||||
|         return f"{self.property} {self.subgroup} {self.value}" | ||||
|         return f"<InstallationCount: {self.property} {self.subgroup} {self.value}>" | ||||
|  | ||||
|  | ||||
| class RealmCount(BaseCount): | ||||
| @@ -85,16 +80,10 @@ class RealmCount(BaseCount): | ||||
|                 name="unique_realm_count_null_subgroup", | ||||
|             ), | ||||
|         ] | ||||
|         indexes = [ | ||||
|             models.Index( | ||||
|                 fields=["property", "end_time"], | ||||
|                 name="analytics_realmcount_property_end_time_3b60396b_idx", | ||||
|             ) | ||||
|         ] | ||||
|         index_together = ["property", "end_time"] | ||||
|  | ||||
|     @override | ||||
|     def __str__(self) -> str: | ||||
|         return f"{self.realm!r} {self.property} {self.subgroup} {self.value}" | ||||
|         return f"<RealmCount: {self.realm} {self.property} {self.subgroup} {self.value}>" | ||||
|  | ||||
|  | ||||
| class UserCount(BaseCount): | ||||
| @@ -117,16 +106,10 @@ class UserCount(BaseCount): | ||||
|         ] | ||||
|         # This index dramatically improves the performance of | ||||
|         # aggregating from users to realms | ||||
|         indexes = [ | ||||
|             models.Index( | ||||
|                 fields=["property", "realm", "end_time"], | ||||
|                 name="analytics_usercount_property_realm_id_end_time_591dbec1_idx", | ||||
|             ) | ||||
|         ] | ||||
|         index_together = ["property", "realm", "end_time"] | ||||
|  | ||||
|     @override | ||||
|     def __str__(self) -> str: | ||||
|         return f"{self.user!r} {self.property} {self.subgroup} {self.value}" | ||||
|         return f"<UserCount: {self.user} {self.property} {self.subgroup} {self.value}>" | ||||
|  | ||||
|  | ||||
| class StreamCount(BaseCount): | ||||
| @@ -149,13 +132,9 @@ class StreamCount(BaseCount): | ||||
|         ] | ||||
|         # This index dramatically improves the performance of | ||||
|         # aggregating from streams to realms | ||||
|         indexes = [ | ||||
|             models.Index( | ||||
|                 fields=["property", "realm", "end_time"], | ||||
|                 name="analytics_streamcount_property_realm_id_end_time_155ae930_idx", | ||||
|             ) | ||||
|         ] | ||||
|         index_together = ["property", "realm", "end_time"] | ||||
|  | ||||
|     @override | ||||
|     def __str__(self) -> str: | ||||
|         return f"{self.stream!r} {self.property} {self.subgroup} {self.value} {self.id}" | ||||
|         return ( | ||||
|             f"<StreamCount: {self.stream} {self.property} {self.subgroup} {self.value} {self.id}>" | ||||
|         ) | ||||
|   | ||||
| @@ -1,53 +0,0 @@ | ||||
| from unittest import mock | ||||
|  | ||||
| from django.utils.timezone import now as timezone_now | ||||
|  | ||||
| from zerver.lib.test_classes import ZulipTestCase | ||||
| from zerver.models import Client, UserActivity, UserProfile | ||||
|  | ||||
|  | ||||
| class ActivityTest(ZulipTestCase): | ||||
|     @mock.patch("stripe.Customer.list", return_value=[]) | ||||
|     def test_activity(self, unused_mock: mock.Mock) -> None: | ||||
|         self.login("hamlet") | ||||
|         client, _ = Client.objects.get_or_create(name="website") | ||||
|         query = "/json/messages/flags" | ||||
|         last_visit = timezone_now() | ||||
|         count = 150 | ||||
|         for activity_user_profile in UserProfile.objects.all(): | ||||
|             UserActivity.objects.get_or_create( | ||||
|                 user_profile=activity_user_profile, | ||||
|                 client=client, | ||||
|                 query=query, | ||||
|                 count=count, | ||||
|                 last_visit=last_visit, | ||||
|             ) | ||||
|  | ||||
|         # Fails when not staff | ||||
|         result = self.client_get("/activity") | ||||
|         self.assertEqual(result.status_code, 302) | ||||
|  | ||||
|         user_profile = self.example_user("hamlet") | ||||
|         user_profile.is_staff = True | ||||
|         user_profile.save(update_fields=["is_staff"]) | ||||
|  | ||||
|         with self.assert_database_query_count(11): | ||||
|             result = self.client_get("/activity") | ||||
|             self.assertEqual(result.status_code, 200) | ||||
|  | ||||
|         with self.assert_database_query_count(4): | ||||
|             result = self.client_get("/activity/remote") | ||||
|             self.assertEqual(result.status_code, 200) | ||||
|  | ||||
|         with self.assert_database_query_count(4): | ||||
|             result = self.client_get("/activity/integrations") | ||||
|             self.assertEqual(result.status_code, 200) | ||||
|  | ||||
|         with self.assert_database_query_count(8): | ||||
|             result = self.client_get("/realm_activity/zulip/") | ||||
|             self.assertEqual(result.status_code, 200) | ||||
|  | ||||
|         iago = self.example_user("iago") | ||||
|         with self.assert_database_query_count(5): | ||||
|             result = self.client_get(f"/user_activity/{iago.id}/") | ||||
|             self.assertEqual(result.status_code, 200) | ||||
| @@ -3,14 +3,11 @@ from typing import Any, Dict, List, Optional, Tuple, Type | ||||
| from unittest import mock | ||||
|  | ||||
| import orjson | ||||
| import time_machine | ||||
| from django.apps import apps | ||||
| from django.db import models | ||||
| from django.db.models import Sum | ||||
| from django.test import override_settings | ||||
| from django.utils.timezone import now as timezone_now | ||||
| from psycopg2.sql import SQL, Literal | ||||
| from typing_extensions import override | ||||
|  | ||||
| from analytics.lib.counts import ( | ||||
|     COUNT_STATS, | ||||
| @@ -35,61 +32,39 @@ from analytics.models import ( | ||||
|     UserCount, | ||||
|     installation_epoch, | ||||
| ) | ||||
| from zerver.actions.create_realm import do_create_realm | ||||
| from zerver.actions.create_user import ( | ||||
|     do_activate_mirror_dummy_user, | ||||
| from zerver.lib.actions import ( | ||||
|     InvitationError, | ||||
|     do_activate_user, | ||||
|     do_create_realm, | ||||
|     do_create_user, | ||||
|     do_reactivate_user, | ||||
| ) | ||||
| from zerver.actions.invites import ( | ||||
|     do_deactivate_user, | ||||
|     do_invite_users, | ||||
|     do_resend_user_invite_email, | ||||
|     do_revoke_user_invite, | ||||
| ) | ||||
| from zerver.actions.message_flags import ( | ||||
|     do_mark_all_as_read, | ||||
|     do_mark_stream_messages_as_read, | ||||
|     do_reactivate_user, | ||||
|     do_resend_user_invite_email, | ||||
|     do_revoke_user_invite, | ||||
|     do_update_message_flags, | ||||
|     update_user_activity_interval, | ||||
| ) | ||||
| from zerver.actions.user_activity import update_user_activity_interval | ||||
| from zerver.actions.users import do_deactivate_user | ||||
| from zerver.lib.create_user import create_user | ||||
| from zerver.lib.exceptions import InvitationError | ||||
| from zerver.lib.push_notifications import ( | ||||
|     get_message_payload_apns, | ||||
|     get_message_payload_gcm, | ||||
|     hex_to_b64, | ||||
| ) | ||||
| from zerver.lib.test_classes import ZulipTestCase | ||||
| from zerver.lib.timestamp import TimeZoneNotUTCError, ceiling_to_day, floor_to_day | ||||
| from zerver.lib.timestamp import TimezoneNotUTCException, floor_to_day | ||||
| from zerver.lib.topic import DB_TOPIC_NAME | ||||
| from zerver.lib.utils import assert_is_not_none | ||||
| from zerver.models import ( | ||||
|     Client, | ||||
|     Huddle, | ||||
|     Message, | ||||
|     NotificationTriggers, | ||||
|     PreregistrationUser, | ||||
|     Realm, | ||||
|     RealmAuditLog, | ||||
|     Recipient, | ||||
|     Stream, | ||||
|     SystemGroups, | ||||
|     UserActivityInterval, | ||||
|     UserGroup, | ||||
|     UserProfile, | ||||
|     get_client, | ||||
|     get_user, | ||||
|     is_cross_realm_bot_email, | ||||
| ) | ||||
| from zilencer.models import ( | ||||
|     RemoteInstallationCount, | ||||
|     RemotePushDeviceToken, | ||||
|     RemoteRealm, | ||||
|     RemoteRealmCount, | ||||
|     RemoteZulipServer, | ||||
| ) | ||||
| from zilencer.views import get_last_id_from_server | ||||
|  | ||||
|  | ||||
| class AnalyticsTestCase(ZulipTestCase): | ||||
| @@ -99,21 +74,15 @@ class AnalyticsTestCase(ZulipTestCase): | ||||
|     TIME_ZERO = datetime(1988, 3, 14, tzinfo=timezone.utc) | ||||
|     TIME_LAST_HOUR = TIME_ZERO - HOUR | ||||
|  | ||||
|     @override | ||||
|     def setUp(self) -> None: | ||||
|         super().setUp() | ||||
|         self.default_realm = do_create_realm( | ||||
|             string_id="realmtest", name="Realm Test", date_created=self.TIME_ZERO - 2 * self.DAY | ||||
|         ) | ||||
|         self.administrators_user_group = UserGroup.objects.get( | ||||
|             name=SystemGroups.ADMINISTRATORS, | ||||
|             realm=self.default_realm, | ||||
|             is_system_group=True, | ||||
|         ) | ||||
|  | ||||
|         # used to generate unique names in self.create_* | ||||
|         self.name_counter = 100 | ||||
|         # used as defaults in self.assert_table_count | ||||
|         # used as defaults in self.assertCountEquals | ||||
|         self.current_property: Optional[str] = None | ||||
|  | ||||
|     # Lightweight creation of users, streams, and messages | ||||
| @@ -151,7 +120,6 @@ class AnalyticsTestCase(ZulipTestCase): | ||||
|             "name": f"stream name {self.name_counter}", | ||||
|             "realm": self.default_realm, | ||||
|             "date_created": self.TIME_LAST_HOUR, | ||||
|             "can_remove_subscribers_group": self.administrators_user_group, | ||||
|         } | ||||
|         for key, value in defaults.items(): | ||||
|             kwargs[key] = kwargs.get(key, value) | ||||
| @@ -180,18 +148,13 @@ class AnalyticsTestCase(ZulipTestCase): | ||||
|             "content": "hi", | ||||
|             "date_sent": self.TIME_LAST_HOUR, | ||||
|             "sending_client": get_client("website"), | ||||
|             "realm_id": sender.realm_id, | ||||
|         } | ||||
|         # For simplicity, this helper doesn't support creating cross-realm messages | ||||
|         # since it'd require adding an additional realm argument. | ||||
|         assert not is_cross_realm_bot_email(sender.delivery_email) | ||||
|  | ||||
|         for key, value in defaults.items(): | ||||
|             kwargs[key] = kwargs.get(key, value) | ||||
|         return Message.objects.create(**kwargs) | ||||
|  | ||||
|     # kwargs should only ever be a UserProfile or Stream. | ||||
|     def assert_table_count( | ||||
|     def assertCountEquals( | ||||
|         self, | ||||
|         table: Type[BaseCount], | ||||
|         value: int, | ||||
| @@ -203,9 +166,7 @@ class AnalyticsTestCase(ZulipTestCase): | ||||
|     ) -> None: | ||||
|         if property is None: | ||||
|             property = self.current_property | ||||
|         queryset = table._default_manager.filter(property=property, end_time=end_time).filter( | ||||
|             **kwargs | ||||
|         ) | ||||
|         queryset = table.objects.filter(property=property, end_time=end_time).filter(**kwargs) | ||||
|         if table is not InstallationCount: | ||||
|             if realm is None: | ||||
|                 realm = self.default_realm | ||||
| @@ -250,18 +211,16 @@ class AnalyticsTestCase(ZulipTestCase): | ||||
|                 kwargs[arg_keys[i]] = values[i] | ||||
|             for key, value in defaults.items(): | ||||
|                 kwargs[key] = kwargs.get(key, value) | ||||
|             if ( | ||||
|                 table not in [InstallationCount, RemoteInstallationCount, RemoteRealmCount] | ||||
|                 and "realm" not in kwargs | ||||
|             ): | ||||
|                 if "user" in kwargs: | ||||
|                     kwargs["realm"] = kwargs["user"].realm | ||||
|                 elif "stream" in kwargs: | ||||
|                     kwargs["realm"] = kwargs["stream"].realm | ||||
|                 else: | ||||
|                     kwargs["realm"] = self.default_realm | ||||
|             self.assertEqual(table._default_manager.filter(**kwargs).count(), 1) | ||||
|         self.assert_length(arg_values, table._default_manager.count()) | ||||
|             if table is not InstallationCount: | ||||
|                 if "realm" not in kwargs: | ||||
|                     if "user" in kwargs: | ||||
|                         kwargs["realm"] = kwargs["user"].realm | ||||
|                     elif "stream" in kwargs: | ||||
|                         kwargs["realm"] = kwargs["stream"].realm | ||||
|                     else: | ||||
|                         kwargs["realm"] = self.default_realm | ||||
|             self.assertEqual(table.objects.filter(**kwargs).count(), 1) | ||||
|         self.assertEqual(table.objects.count(), len(arg_values)) | ||||
|  | ||||
|  | ||||
| class TestProcessCountStat(AnalyticsTestCase): | ||||
| @@ -281,7 +240,6 @@ class TestProcessCountStat(AnalyticsTestCase): | ||||
|         self, stat: CountStat, end_time: datetime, state: int = FillState.DONE | ||||
|     ) -> None: | ||||
|         fill_state = FillState.objects.filter(property=stat.property).first() | ||||
|         assert fill_state is not None | ||||
|         self.assertEqual(fill_state.end_time, end_time) | ||||
|         self.assertEqual(fill_state.state, state) | ||||
|  | ||||
| @@ -315,7 +273,7 @@ class TestProcessCountStat(AnalyticsTestCase): | ||||
|         stat = self.make_dummy_count_stat("test stat") | ||||
|         with self.assertRaises(ValueError): | ||||
|             process_count_stat(stat, installation_epoch() + 65 * self.MINUTE) | ||||
|         with self.assertRaises(TimeZoneNotUTCError): | ||||
|         with self.assertRaises(TimezoneNotUTCException): | ||||
|             process_count_stat(stat, installation_epoch().replace(tzinfo=None)) | ||||
|  | ||||
|     # This tests the LoggingCountStat branch of the code in do_delete_counts_at_hour. | ||||
| @@ -479,7 +437,6 @@ class TestProcessCountStat(AnalyticsTestCase): | ||||
|  | ||||
|  | ||||
| class TestCountStats(AnalyticsTestCase): | ||||
|     @override | ||||
|     def setUp(self) -> None: | ||||
|         super().setUp() | ||||
|         # This tests two things for each of the queries/CountStats: Handling | ||||
| @@ -684,7 +641,7 @@ class TestCountStats(AnalyticsTestCase): | ||||
|         self.create_message(user1, recipient_huddle1) | ||||
|         self.create_message(user2, recipient_huddle2) | ||||
|  | ||||
|         # direct messages | ||||
|         # private messages | ||||
|         recipient_user1 = Recipient.objects.get(type_id=user1.id, type=Recipient.PERSONAL) | ||||
|         recipient_user2 = Recipient.objects.get(type_id=user2.id, type=Recipient.PERSONAL) | ||||
|         recipient_user3 = Recipient.objects.get(type_id=user3.id, type=Recipient.PERSONAL) | ||||
| @@ -800,9 +757,9 @@ class TestCountStats(AnalyticsTestCase): | ||||
|  | ||||
|         do_fill_count_stat_at_hour(stat, self.TIME_ZERO) | ||||
|  | ||||
|         self.assert_table_count(UserCount, 1, subgroup="private_message") | ||||
|         self.assert_table_count(UserCount, 1, subgroup="huddle_message") | ||||
|         self.assert_table_count(UserCount, 1, subgroup="public_stream") | ||||
|         self.assertCountEquals(UserCount, 1, subgroup="private_message") | ||||
|         self.assertCountEquals(UserCount, 1, subgroup="huddle_message") | ||||
|         self.assertCountEquals(UserCount, 1, subgroup="public_stream") | ||||
|  | ||||
|     def test_messages_sent_by_client(self) -> None: | ||||
|         stat = COUNT_STATS["messages_sent:client:day"] | ||||
| @@ -1372,7 +1329,7 @@ class TestLoggingCountStats(AnalyticsTestCase): | ||||
|                 "value__sum" | ||||
|             ], | ||||
|         ) | ||||
|         do_activate_mirror_dummy_user(user, acting_user=None) | ||||
|         do_activate_user(user, acting_user=None) | ||||
|         self.assertEqual( | ||||
|             1, | ||||
|             RealmCount.objects.filter(property=property, subgroup=False).aggregate(Sum("value"))[ | ||||
| @@ -1394,249 +1351,6 @@ class TestLoggingCountStats(AnalyticsTestCase): | ||||
|             ], | ||||
|         ) | ||||
|  | ||||
|     @override_settings(PUSH_NOTIFICATION_BOUNCER_URL="https://push.zulip.org.example.com") | ||||
|     def test_mobile_pushes_received_count(self) -> None: | ||||
|         self.server_uuid = "6cde5f7a-1f7e-4978-9716-49f69ebfc9fe" | ||||
|         self.server = RemoteZulipServer.objects.create( | ||||
|             uuid=self.server_uuid, | ||||
|             api_key="magic_secret_api_key", | ||||
|             hostname="demo.example.com", | ||||
|             last_updated=timezone_now(), | ||||
|         ) | ||||
|  | ||||
|         hamlet = self.example_user("hamlet") | ||||
|         token = "aaaa" | ||||
|  | ||||
|         RemotePushDeviceToken.objects.create( | ||||
|             kind=RemotePushDeviceToken.GCM, | ||||
|             token=hex_to_b64(token), | ||||
|             user_uuid=(hamlet.uuid), | ||||
|             server=self.server, | ||||
|         ) | ||||
|         RemotePushDeviceToken.objects.create( | ||||
|             kind=RemotePushDeviceToken.GCM, | ||||
|             token=hex_to_b64(token + "aa"), | ||||
|             user_uuid=(hamlet.uuid), | ||||
|             server=self.server, | ||||
|         ) | ||||
|         RemotePushDeviceToken.objects.create( | ||||
|             kind=RemotePushDeviceToken.APNS, | ||||
|             token=hex_to_b64(token), | ||||
|             user_uuid=str(hamlet.uuid), | ||||
|             server=self.server, | ||||
|         ) | ||||
|  | ||||
|         message = Message( | ||||
|             sender=hamlet, | ||||
|             recipient=self.example_user("othello").recipient, | ||||
|             realm_id=hamlet.realm_id, | ||||
|             content="This is test content", | ||||
|             rendered_content="This is test content", | ||||
|             date_sent=timezone_now(), | ||||
|             sending_client=get_client("test"), | ||||
|         ) | ||||
|         message.set_topic_name("Test topic") | ||||
|         message.save() | ||||
|         gcm_payload, gcm_options = get_message_payload_gcm(hamlet, message) | ||||
|         apns_payload = get_message_payload_apns( | ||||
|             hamlet, message, NotificationTriggers.DIRECT_MESSAGE | ||||
|         ) | ||||
|  | ||||
|         # First we'll make a request without providing realm_uuid. That means | ||||
|         # the bouncer can't increment the RemoteRealmCount stat, and only | ||||
|         # RemoteInstallationCount will be incremented. | ||||
|         payload = { | ||||
|             "user_id": hamlet.id, | ||||
|             "user_uuid": str(hamlet.uuid), | ||||
|             "gcm_payload": gcm_payload, | ||||
|             "apns_payload": apns_payload, | ||||
|             "gcm_options": gcm_options, | ||||
|         } | ||||
|         now = timezone_now() | ||||
|         with time_machine.travel(now, tick=False), mock.patch( | ||||
|             "zilencer.views.send_android_push_notification", return_value=1 | ||||
|         ), mock.patch( | ||||
|             "zilencer.views.send_apple_push_notification", return_value=1 | ||||
|         ), self.assertLogs( | ||||
|             "zilencer.views", level="INFO" | ||||
|         ): | ||||
|             result = self.uuid_post( | ||||
|                 self.server_uuid, | ||||
|                 "/api/v1/remotes/push/notify", | ||||
|                 payload, | ||||
|                 content_type="application/json", | ||||
|                 subdomain="", | ||||
|             ) | ||||
|             self.assert_json_success(result) | ||||
|  | ||||
|         # There are 3 devices we created for the user: | ||||
|         # 1. The mobile_pushes_received increment should match that number. | ||||
|         # 2. mobile_pushes_forwarded only counts successful deliveries, and we've set up | ||||
|         #    the mocks above to simulate 1 successful android and 1 successful apple delivery. | ||||
|         #    Thus the increment should be just 2. | ||||
|         self.assertTableState( | ||||
|             RemoteInstallationCount, | ||||
|             ["property", "value", "subgroup", "server", "remote_id", "end_time"], | ||||
|             [ | ||||
|                 [ | ||||
|                     "mobile_pushes_received::day", | ||||
|                     3, | ||||
|                     None, | ||||
|                     self.server, | ||||
|                     None, | ||||
|                     ceiling_to_day(now), | ||||
|                 ], | ||||
|                 [ | ||||
|                     "mobile_pushes_forwarded::day", | ||||
|                     2, | ||||
|                     None, | ||||
|                     self.server, | ||||
|                     None, | ||||
|                     ceiling_to_day(now), | ||||
|                 ], | ||||
|             ], | ||||
|         ) | ||||
|         self.assertFalse( | ||||
|             RemoteRealmCount.objects.filter(property="mobile_pushes_received::day").exists() | ||||
|         ) | ||||
|         self.assertFalse( | ||||
|             RemoteRealmCount.objects.filter(property="mobile_pushes_forwarded::day").exists() | ||||
|         ) | ||||
|  | ||||
|         # Now provide the realm_uuid. However, the RemoteRealm record doesn't exist yet, so it'll | ||||
|         # still be ignored. | ||||
|         payload = { | ||||
|             "user_id": hamlet.id, | ||||
|             "user_uuid": str(hamlet.uuid), | ||||
|             "realm_uuid": str(hamlet.realm.uuid), | ||||
|             "gcm_payload": gcm_payload, | ||||
|             "apns_payload": apns_payload, | ||||
|             "gcm_options": gcm_options, | ||||
|         } | ||||
|         with time_machine.travel(now, tick=False), mock.patch( | ||||
|             "zilencer.views.send_android_push_notification", return_value=1 | ||||
|         ), mock.patch( | ||||
|             "zilencer.views.send_apple_push_notification", return_value=1 | ||||
|         ), self.assertLogs( | ||||
|             "zilencer.views", level="INFO" | ||||
|         ): | ||||
|             result = self.uuid_post( | ||||
|                 self.server_uuid, | ||||
|                 "/api/v1/remotes/push/notify", | ||||
|                 payload, | ||||
|                 content_type="application/json", | ||||
|                 subdomain="", | ||||
|             ) | ||||
|             self.assert_json_success(result) | ||||
|  | ||||
|         # The RemoteInstallationCount records get incremented again, but the RemoteRealmCount | ||||
|         # remains ignored due to missing RemoteRealm record. | ||||
|         self.assertTableState( | ||||
|             RemoteInstallationCount, | ||||
|             ["property", "value", "subgroup", "server", "remote_id", "end_time"], | ||||
|             [ | ||||
|                 [ | ||||
|                     "mobile_pushes_received::day", | ||||
|                     6, | ||||
|                     None, | ||||
|                     self.server, | ||||
|                     None, | ||||
|                     ceiling_to_day(now), | ||||
|                 ], | ||||
|                 [ | ||||
|                     "mobile_pushes_forwarded::day", | ||||
|                     4, | ||||
|                     None, | ||||
|                     self.server, | ||||
|                     None, | ||||
|                     ceiling_to_day(now), | ||||
|                 ], | ||||
|             ], | ||||
|         ) | ||||
|         self.assertFalse( | ||||
|             RemoteRealmCount.objects.filter(property="mobile_pushes_received::day").exists() | ||||
|         ) | ||||
|         self.assertFalse( | ||||
|             RemoteRealmCount.objects.filter(property="mobile_pushes_forwarded::day").exists() | ||||
|         ) | ||||
|  | ||||
|         # Create the RemoteRealm registration and repeat the above. This time RemoteRealmCount | ||||
|         # stats should be collected. | ||||
|         realm = hamlet.realm | ||||
|         remote_realm = RemoteRealm.objects.create( | ||||
|             server=self.server, | ||||
|             uuid=realm.uuid, | ||||
|             uuid_owner_secret=realm.uuid_owner_secret, | ||||
|             host=realm.host, | ||||
|             realm_deactivated=realm.deactivated, | ||||
|             realm_date_created=realm.date_created, | ||||
|         ) | ||||
|  | ||||
|         with time_machine.travel(now, tick=False), mock.patch( | ||||
|             "zilencer.views.send_android_push_notification", return_value=1 | ||||
|         ), mock.patch( | ||||
|             "zilencer.views.send_apple_push_notification", return_value=1 | ||||
|         ), self.assertLogs( | ||||
|             "zilencer.views", level="INFO" | ||||
|         ): | ||||
|             result = self.uuid_post( | ||||
|                 self.server_uuid, | ||||
|                 "/api/v1/remotes/push/notify", | ||||
|                 payload, | ||||
|                 content_type="application/json", | ||||
|                 subdomain="", | ||||
|             ) | ||||
|             self.assert_json_success(result) | ||||
|  | ||||
|         # The RemoteInstallationCount records get incremented again, and the RemoteRealmCount | ||||
|         # gets collected. | ||||
|         self.assertTableState( | ||||
|             RemoteInstallationCount, | ||||
|             ["property", "value", "subgroup", "server", "remote_id", "end_time"], | ||||
|             [ | ||||
|                 [ | ||||
|                     "mobile_pushes_received::day", | ||||
|                     9, | ||||
|                     None, | ||||
|                     self.server, | ||||
|                     None, | ||||
|                     ceiling_to_day(now), | ||||
|                 ], | ||||
|                 [ | ||||
|                     "mobile_pushes_forwarded::day", | ||||
|                     6, | ||||
|                     None, | ||||
|                     self.server, | ||||
|                     None, | ||||
|                     ceiling_to_day(now), | ||||
|                 ], | ||||
|             ], | ||||
|         ) | ||||
|         self.assertTableState( | ||||
|             RemoteRealmCount, | ||||
|             ["property", "value", "subgroup", "server", "remote_realm", "remote_id", "end_time"], | ||||
|             [ | ||||
|                 [ | ||||
|                     "mobile_pushes_received::day", | ||||
|                     3, | ||||
|                     None, | ||||
|                     self.server, | ||||
|                     remote_realm, | ||||
|                     None, | ||||
|                     ceiling_to_day(now), | ||||
|                 ], | ||||
|                 [ | ||||
|                     "mobile_pushes_forwarded::day", | ||||
|                     2, | ||||
|                     None, | ||||
|                     self.server, | ||||
|                     remote_realm, | ||||
|                     None, | ||||
|                     ceiling_to_day(now), | ||||
|                 ], | ||||
|             ], | ||||
|         ) | ||||
|  | ||||
|     def test_invites_sent(self) -> None: | ||||
|         property = "invites_sent::day" | ||||
|  | ||||
| @@ -1650,61 +1364,34 @@ class TestLoggingCountStats(AnalyticsTestCase): | ||||
|  | ||||
|         user = self.create_user(email="first@domain.tld") | ||||
|         stream, _ = self.create_stream_with_recipient() | ||||
|  | ||||
|         invite_expires_in_minutes = 2 * 24 * 60 | ||||
|         with mock.patch("zerver.actions.invites.too_many_recent_realm_invites", return_value=False): | ||||
|             do_invite_users( | ||||
|                 user, | ||||
|                 ["user1@domain.tld", "user2@domain.tld"], | ||||
|                 [stream], | ||||
|                 invite_expires_in_minutes=invite_expires_in_minutes, | ||||
|             ) | ||||
|         do_invite_users(user, ["user1@domain.tld", "user2@domain.tld"], [stream]) | ||||
|         assertInviteCountEquals(2) | ||||
|  | ||||
|         # We currently send emails when re-inviting users that haven't | ||||
|         # turned into accounts, so count them towards the total | ||||
|         with mock.patch("zerver.actions.invites.too_many_recent_realm_invites", return_value=False): | ||||
|             do_invite_users( | ||||
|                 user, | ||||
|                 ["user1@domain.tld", "user2@domain.tld"], | ||||
|                 [stream], | ||||
|                 invite_expires_in_minutes=invite_expires_in_minutes, | ||||
|             ) | ||||
|         do_invite_users(user, ["user1@domain.tld", "user2@domain.tld"], [stream]) | ||||
|         assertInviteCountEquals(4) | ||||
|  | ||||
|         # Test mix of good and malformed invite emails | ||||
|         with self.assertRaises(InvitationError), mock.patch( | ||||
|             "zerver.actions.invites.too_many_recent_realm_invites", return_value=False | ||||
|         ): | ||||
|             do_invite_users( | ||||
|                 user, | ||||
|                 ["user3@domain.tld", "malformed"], | ||||
|                 [stream], | ||||
|                 invite_expires_in_minutes=invite_expires_in_minutes, | ||||
|             ) | ||||
|         try: | ||||
|             do_invite_users(user, ["user3@domain.tld", "malformed"], [stream]) | ||||
|         except InvitationError: | ||||
|             pass | ||||
|         assertInviteCountEquals(4) | ||||
|  | ||||
|         # Test inviting existing users | ||||
|         with self.assertRaises(InvitationError), mock.patch( | ||||
|             "zerver.actions.invites.too_many_recent_realm_invites", return_value=False | ||||
|         ): | ||||
|             do_invite_users( | ||||
|                 user, | ||||
|                 ["first@domain.tld", "user4@domain.tld"], | ||||
|                 [stream], | ||||
|                 invite_expires_in_minutes=invite_expires_in_minutes, | ||||
|             ) | ||||
|         try: | ||||
|             do_invite_users(user, ["first@domain.tld", "user4@domain.tld"], [stream]) | ||||
|         except InvitationError: | ||||
|             pass | ||||
|         assertInviteCountEquals(5) | ||||
|  | ||||
|         # Revoking invite should not give you credit | ||||
|         do_revoke_user_invite( | ||||
|             assert_is_not_none(PreregistrationUser.objects.filter(realm=user.realm).first()) | ||||
|         ) | ||||
|         do_revoke_user_invite(PreregistrationUser.objects.filter(realm=user.realm).first()) | ||||
|         assertInviteCountEquals(5) | ||||
|  | ||||
|         # Resending invite should cost you | ||||
|         with mock.patch("zerver.actions.invites.too_many_recent_realm_invites", return_value=False): | ||||
|             do_resend_user_invite_email(assert_is_not_none(PreregistrationUser.objects.first())) | ||||
|         do_resend_user_invite_email(PreregistrationUser.objects.first()) | ||||
|         assertInviteCountEquals(6) | ||||
|  | ||||
|     def test_messages_read_hour(self) -> None: | ||||
| @@ -1718,7 +1405,8 @@ class TestLoggingCountStats(AnalyticsTestCase): | ||||
|         self.subscribe(user2, stream.name) | ||||
|  | ||||
|         self.send_personal_message(user1, user2) | ||||
|         do_mark_all_as_read(user2) | ||||
|         client = get_client("website") | ||||
|         do_mark_all_as_read(user2, client) | ||||
|         self.assertEqual( | ||||
|             1, | ||||
|             UserCount.objects.filter(property=read_count_property).aggregate(Sum("value"))[ | ||||
| @@ -1734,7 +1422,7 @@ class TestLoggingCountStats(AnalyticsTestCase): | ||||
|  | ||||
|         self.send_stream_message(user1, stream.name) | ||||
|         self.send_stream_message(user1, stream.name) | ||||
|         do_mark_stream_messages_as_read(user2, assert_is_not_none(stream.recipient_id)) | ||||
|         do_mark_stream_messages_as_read(user2, stream.recipient_id) | ||||
|         self.assertEqual( | ||||
|             3, | ||||
|             UserCount.objects.filter(property=read_count_property).aggregate(Sum("value"))[ | ||||
| @@ -1749,7 +1437,7 @@ class TestLoggingCountStats(AnalyticsTestCase): | ||||
|         ) | ||||
|  | ||||
|         message = self.send_stream_message(user2, stream.name) | ||||
|         do_update_message_flags(user1, "add", "read", [message]) | ||||
|         do_update_message_flags(user1, client, "add", "read", [message]) | ||||
|         self.assertEqual( | ||||
|             4, | ||||
|             UserCount.objects.filter(property=read_count_property).aggregate(Sum("value"))[ | ||||
| @@ -1777,12 +1465,12 @@ class TestDeleteStats(AnalyticsTestCase): | ||||
|         FillState.objects.create(property="test", end_time=self.TIME_ZERO, state=FillState.DONE) | ||||
|  | ||||
|         analytics = apps.get_app_config("analytics") | ||||
|         for table in analytics.models.values(): | ||||
|             self.assertTrue(table._default_manager.exists()) | ||||
|         for table in list(analytics.models.values()): | ||||
|             self.assertTrue(table.objects.exists()) | ||||
|  | ||||
|         do_drop_all_analytics_tables() | ||||
|         for table in analytics.models.values(): | ||||
|             self.assertFalse(table._default_manager.exists()) | ||||
|         for table in list(analytics.models.values()): | ||||
|             self.assertFalse(table.objects.exists()) | ||||
|  | ||||
|     def test_do_drop_single_stat(self) -> None: | ||||
|         user = self.create_user() | ||||
| @@ -1801,17 +1489,16 @@ class TestDeleteStats(AnalyticsTestCase): | ||||
|         FillState.objects.create(property="to_save", end_time=self.TIME_ZERO, state=FillState.DONE) | ||||
|  | ||||
|         analytics = apps.get_app_config("analytics") | ||||
|         for table in analytics.models.values(): | ||||
|             self.assertTrue(table._default_manager.exists()) | ||||
|         for table in list(analytics.models.values()): | ||||
|             self.assertTrue(table.objects.exists()) | ||||
|  | ||||
|         do_drop_single_stat("to_delete") | ||||
|         for table in analytics.models.values(): | ||||
|             self.assertFalse(table._default_manager.filter(property="to_delete").exists()) | ||||
|             self.assertTrue(table._default_manager.filter(property="to_save").exists()) | ||||
|         for table in list(analytics.models.values()): | ||||
|             self.assertFalse(table.objects.filter(property="to_delete").exists()) | ||||
|             self.assertTrue(table.objects.filter(property="to_save").exists()) | ||||
|  | ||||
|  | ||||
| class TestActiveUsersAudit(AnalyticsTestCase): | ||||
|     @override | ||||
|     def setUp(self) -> None: | ||||
|         super().setUp() | ||||
|         self.user = self.create_user() | ||||
| @@ -1976,7 +1663,7 @@ class TestActiveUsersAudit(AnalyticsTestCase): | ||||
|             "email4", "password", self.default_realm, "full_name", acting_user=None | ||||
|         ) | ||||
|         do_deactivate_user(user2, acting_user=None) | ||||
|         do_activate_mirror_dummy_user(user3, acting_user=None) | ||||
|         do_activate_user(user3, acting_user=None) | ||||
|         do_reactivate_user(user4, acting_user=None) | ||||
|         end_time = floor_to_day(timezone_now()) + self.DAY | ||||
|         do_fill_count_stat_at_hour(self.stat, end_time) | ||||
| @@ -1994,7 +1681,6 @@ class TestActiveUsersAudit(AnalyticsTestCase): | ||||
|  | ||||
|  | ||||
| class TestRealmActiveHumans(AnalyticsTestCase): | ||||
|     @override | ||||
|     def setUp(self) -> None: | ||||
|         super().setUp() | ||||
|         self.stat = COUNT_STATS["realm_active_humans::day"] | ||||
| @@ -2114,26 +1800,3 @@ class TestRealmActiveHumans(AnalyticsTestCase): | ||||
|             1, | ||||
|         ) | ||||
|         self.assertEqual(RealmCount.objects.filter(property="realm_active_humans::day").count(), 1) | ||||
|  | ||||
|  | ||||
| class GetLastIdFromServerTest(ZulipTestCase): | ||||
|     def test_get_last_id_from_server_ignores_null(self) -> None: | ||||
|         """ | ||||
|         Verifies that get_last_id_from_server ignores null remote_ids, since this goes | ||||
|         against the default Postgres ordering behavior, which treats nulls as the largest value. | ||||
|         """ | ||||
|         self.server_uuid = "6cde5f7a-1f7e-4978-9716-49f69ebfc9fe" | ||||
|         self.server = RemoteZulipServer.objects.create( | ||||
|             uuid=self.server_uuid, | ||||
|             api_key="magic_secret_api_key", | ||||
|             hostname="demo.example.com", | ||||
|             last_updated=timezone_now(), | ||||
|         ) | ||||
|         first = RemoteInstallationCount.objects.create( | ||||
|             end_time=timezone_now(), server=self.server, property="test", value=1, remote_id=1 | ||||
|         ) | ||||
|         RemoteInstallationCount.objects.create( | ||||
|             end_time=timezone_now(), server=self.server, property="test2", value=1, remote_id=None | ||||
|         ) | ||||
|         result = get_last_id_from_server(self.server, RemoteInstallationCount) | ||||
|         self.assertEqual(result, first.remote_id) | ||||
|   | ||||
| @@ -22,7 +22,7 @@ class TestFixtures(ZulipTestCase): | ||||
|             frequency=CountStat.HOUR, | ||||
|         ) | ||||
|         # test we get an array of the right length with frequency=CountStat.HOUR | ||||
|         self.assert_length(data, 24) | ||||
|         self.assertEqual(len(data), 24) | ||||
|         # test that growth doesn't affect the first data point | ||||
|         self.assertEqual(data[0], 2000) | ||||
|         # test that the last data point is growth times what it otherwise would be | ||||
|   | ||||
| @@ -1,686 +0,0 @@ | ||||
| from datetime import datetime, timedelta, timezone | ||||
| from typing import List, Optional | ||||
|  | ||||
| from django.utils.timezone import now as timezone_now | ||||
| from typing_extensions import override | ||||
|  | ||||
| from analytics.lib.counts import COUNT_STATS, CountStat | ||||
| from analytics.lib.time_utils import time_range | ||||
| from analytics.models import FillState, RealmCount, StreamCount, UserCount | ||||
| from analytics.views.stats import rewrite_client_arrays, sort_by_totals, sort_client_labels | ||||
| from zerver.lib.test_classes import ZulipTestCase | ||||
| from zerver.lib.timestamp import ceiling_to_day, ceiling_to_hour, datetime_to_timestamp | ||||
| from zerver.models import Client, get_realm | ||||
|  | ||||
|  | ||||
| class TestStatsEndpoint(ZulipTestCase): | ||||
|     def test_stats(self) -> None: | ||||
|         self.user = self.example_user("hamlet") | ||||
|         self.login_user(self.user) | ||||
|         result = self.client_get("/stats") | ||||
|         self.assertEqual(result.status_code, 200) | ||||
|         # Check that we get something back | ||||
|         self.assert_in_response("Zulip analytics for", result) | ||||
|  | ||||
|     def test_guest_user_cant_access_stats(self) -> None: | ||||
|         self.user = self.example_user("polonius") | ||||
|         self.login_user(self.user) | ||||
|         result = self.client_get("/stats") | ||||
|         self.assert_json_error(result, "Not allowed for guest users", 400) | ||||
|  | ||||
|         result = self.client_get("/json/analytics/chart_data") | ||||
|         self.assert_json_error(result, "Not allowed for guest users", 400) | ||||
|  | ||||
|     def test_stats_for_realm(self) -> None: | ||||
|         user = self.example_user("hamlet") | ||||
|         self.login_user(user) | ||||
|  | ||||
|         result = self.client_get("/stats/realm/zulip/") | ||||
|         self.assertEqual(result.status_code, 302) | ||||
|  | ||||
|         result = self.client_get("/stats/realm/not_existing_realm/") | ||||
|         self.assertEqual(result.status_code, 302) | ||||
|  | ||||
|         user = self.example_user("hamlet") | ||||
|         user.is_staff = True | ||||
|         user.save(update_fields=["is_staff"]) | ||||
|  | ||||
|         result = self.client_get("/stats/realm/not_existing_realm/") | ||||
|         self.assertEqual(result.status_code, 404) | ||||
|  | ||||
|         result = self.client_get("/stats/realm/zulip/") | ||||
|         self.assertEqual(result.status_code, 200) | ||||
|         self.assert_in_response("Zulip analytics for", result) | ||||
|  | ||||
|     def test_stats_for_installation(self) -> None: | ||||
|         user = self.example_user("hamlet") | ||||
|         self.login_user(user) | ||||
|  | ||||
|         result = self.client_get("/stats/installation") | ||||
|         self.assertEqual(result.status_code, 302) | ||||
|  | ||||
|         user = self.example_user("hamlet") | ||||
|         user.is_staff = True | ||||
|         user.save(update_fields=["is_staff"]) | ||||
|  | ||||
|         result = self.client_get("/stats/installation") | ||||
|         self.assertEqual(result.status_code, 200) | ||||
|         self.assert_in_response("Zulip analytics for", result) | ||||
|  | ||||
|  | ||||
| class TestGetChartData(ZulipTestCase): | ||||
|     @override | ||||
|     def setUp(self) -> None: | ||||
|         super().setUp() | ||||
|         self.realm = get_realm("zulip") | ||||
|         self.user = self.example_user("hamlet") | ||||
|         self.stream_id = self.get_stream_id(self.get_streams(self.user)[0]) | ||||
|         self.login_user(self.user) | ||||
|         self.end_times_hour = [ | ||||
|             ceiling_to_hour(self.realm.date_created) + timedelta(hours=i) for i in range(4) | ||||
|         ] | ||||
|         self.end_times_day = [ | ||||
|             ceiling_to_day(self.realm.date_created) + timedelta(days=i) for i in range(4) | ||||
|         ] | ||||
|  | ||||
|     def data(self, i: int) -> List[int]: | ||||
|         return [0, 0, i, 0] | ||||
|  | ||||
|     def insert_data( | ||||
|         self, stat: CountStat, realm_subgroups: List[Optional[str]], user_subgroups: List[str] | ||||
|     ) -> None: | ||||
|         if stat.frequency == CountStat.HOUR: | ||||
|             insert_time = self.end_times_hour[2] | ||||
|             fill_time = self.end_times_hour[-1] | ||||
|         if stat.frequency == CountStat.DAY: | ||||
|             insert_time = self.end_times_day[2] | ||||
|             fill_time = self.end_times_day[-1] | ||||
|  | ||||
|         RealmCount.objects.bulk_create( | ||||
|             RealmCount( | ||||
|                 property=stat.property, | ||||
|                 subgroup=subgroup, | ||||
|                 end_time=insert_time, | ||||
|                 value=100 + i, | ||||
|                 realm=self.realm, | ||||
|             ) | ||||
|             for i, subgroup in enumerate(realm_subgroups) | ||||
|         ) | ||||
|         UserCount.objects.bulk_create( | ||||
|             UserCount( | ||||
|                 property=stat.property, | ||||
|                 subgroup=subgroup, | ||||
|                 end_time=insert_time, | ||||
|                 value=200 + i, | ||||
|                 realm=self.realm, | ||||
|                 user=self.user, | ||||
|             ) | ||||
|             for i, subgroup in enumerate(user_subgroups) | ||||
|         ) | ||||
|         StreamCount.objects.bulk_create( | ||||
|             StreamCount( | ||||
|                 property=stat.property, | ||||
|                 subgroup=subgroup, | ||||
|                 end_time=insert_time, | ||||
|                 value=100 + i, | ||||
|                 stream_id=self.stream_id, | ||||
|                 realm=self.realm, | ||||
|             ) | ||||
|             for i, subgroup in enumerate(realm_subgroups) | ||||
|         ) | ||||
|         FillState.objects.create(property=stat.property, end_time=fill_time, state=FillState.DONE) | ||||
|  | ||||
|     def test_number_of_humans(self) -> None: | ||||
|         stat = COUNT_STATS["realm_active_humans::day"] | ||||
|         self.insert_data(stat, [None], []) | ||||
|         stat = COUNT_STATS["1day_actives::day"] | ||||
|         self.insert_data(stat, [None], []) | ||||
|         stat = COUNT_STATS["active_users_audit:is_bot:day"] | ||||
|         self.insert_data(stat, ["false"], []) | ||||
|         result = self.client_get("/json/analytics/chart_data", {"chart_name": "number_of_humans"}) | ||||
|         data = self.assert_json_success(result) | ||||
|         self.assertEqual( | ||||
|             data, | ||||
|             { | ||||
|                 "msg": "", | ||||
|                 "end_times": [datetime_to_timestamp(dt) for dt in self.end_times_day], | ||||
|                 "frequency": CountStat.DAY, | ||||
|                 "everyone": { | ||||
|                     "_1day": self.data(100), | ||||
|                     "_15day": self.data(100), | ||||
|                     "all_time": self.data(100), | ||||
|                 }, | ||||
|                 "display_order": None, | ||||
|                 "result": "success", | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
|     def test_messages_sent_over_time(self) -> None: | ||||
|         stat = COUNT_STATS["messages_sent:is_bot:hour"] | ||||
|         self.insert_data(stat, ["true", "false"], ["false"]) | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"} | ||||
|         ) | ||||
|         data = self.assert_json_success(result) | ||||
|         self.assertEqual( | ||||
|             data, | ||||
|             { | ||||
|                 "msg": "", | ||||
|                 "end_times": [datetime_to_timestamp(dt) for dt in self.end_times_hour], | ||||
|                 "frequency": CountStat.HOUR, | ||||
|                 "everyone": {"bot": self.data(100), "human": self.data(101)}, | ||||
|                 "user": {"bot": self.data(0), "human": self.data(200)}, | ||||
|                 "display_order": None, | ||||
|                 "result": "success", | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
|     def test_messages_sent_by_message_type(self) -> None: | ||||
|         stat = COUNT_STATS["messages_sent:message_type:day"] | ||||
|         self.insert_data( | ||||
|             stat, ["public_stream", "private_message"], ["public_stream", "private_stream"] | ||||
|         ) | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", {"chart_name": "messages_sent_by_message_type"} | ||||
|         ) | ||||
|         data = self.assert_json_success(result) | ||||
|         self.assertEqual( | ||||
|             data, | ||||
|             { | ||||
|                 "msg": "", | ||||
|                 "end_times": [datetime_to_timestamp(dt) for dt in self.end_times_day], | ||||
|                 "frequency": CountStat.DAY, | ||||
|                 "everyone": { | ||||
|                     "Public streams": self.data(100), | ||||
|                     "Private streams": self.data(0), | ||||
|                     "Direct messages": self.data(101), | ||||
|                     "Group direct messages": self.data(0), | ||||
|                 }, | ||||
|                 "user": { | ||||
|                     "Public streams": self.data(200), | ||||
|                     "Private streams": self.data(201), | ||||
|                     "Direct messages": self.data(0), | ||||
|                     "Group direct messages": self.data(0), | ||||
|                 }, | ||||
|                 "display_order": [ | ||||
|                     "Direct messages", | ||||
|                     "Public streams", | ||||
|                     "Private streams", | ||||
|                     "Group direct messages", | ||||
|                 ], | ||||
|                 "result": "success", | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
|     def test_messages_sent_by_client(self) -> None: | ||||
|         stat = COUNT_STATS["messages_sent:client:day"] | ||||
|         client1 = Client.objects.create(name="client 1") | ||||
|         client2 = Client.objects.create(name="client 2") | ||||
|         client3 = Client.objects.create(name="client 3") | ||||
|         client4 = Client.objects.create(name="client 4") | ||||
|         self.insert_data( | ||||
|             stat, | ||||
|             [str(client4.id), str(client3.id), str(client2.id)], | ||||
|             [str(client3.id), str(client1.id)], | ||||
|         ) | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", {"chart_name": "messages_sent_by_client"} | ||||
|         ) | ||||
|         data = self.assert_json_success(result) | ||||
|         self.assertEqual( | ||||
|             data, | ||||
|             { | ||||
|                 "msg": "", | ||||
|                 "end_times": [datetime_to_timestamp(dt) for dt in self.end_times_day], | ||||
|                 "frequency": CountStat.DAY, | ||||
|                 "everyone": { | ||||
|                     "client 4": self.data(100), | ||||
|                     "client 3": self.data(101), | ||||
|                     "client 2": self.data(102), | ||||
|                 }, | ||||
|                 "user": {"client 3": self.data(200), "client 1": self.data(201)}, | ||||
|                 "display_order": ["client 1", "client 2", "client 3", "client 4"], | ||||
|                 "result": "success", | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
|     def test_messages_read_over_time(self) -> None: | ||||
|         stat = COUNT_STATS["messages_read::hour"] | ||||
|         self.insert_data(stat, [None], []) | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", {"chart_name": "messages_read_over_time"} | ||||
|         ) | ||||
|         data = self.assert_json_success(result) | ||||
|         self.assertEqual( | ||||
|             data, | ||||
|             { | ||||
|                 "msg": "", | ||||
|                 "end_times": [datetime_to_timestamp(dt) for dt in self.end_times_hour], | ||||
|                 "frequency": CountStat.HOUR, | ||||
|                 "everyone": {"read": self.data(100)}, | ||||
|                 "user": {"read": self.data(0)}, | ||||
|                 "display_order": None, | ||||
|                 "result": "success", | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
|     def test_messages_sent_by_stream(self) -> None: | ||||
|         stat = COUNT_STATS["messages_in_stream:is_bot:day"] | ||||
|         self.insert_data(stat, ["true", "false"], []) | ||||
|  | ||||
|         result = self.client_get( | ||||
|             f"/json/analytics/chart_data/stream/{self.stream_id}", | ||||
|             { | ||||
|                 "chart_name": "messages_sent_by_stream", | ||||
|             }, | ||||
|         ) | ||||
|         data = self.assert_json_success(result) | ||||
|         self.assertEqual( | ||||
|             data, | ||||
|             { | ||||
|                 "msg": "", | ||||
|                 "end_times": [datetime_to_timestamp(dt) for dt in self.end_times_day], | ||||
|                 "frequency": CountStat.DAY, | ||||
|                 "everyone": {"bot": self.data(100), "human": self.data(101)}, | ||||
|                 "display_order": None, | ||||
|                 "result": "success", | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
|         result = self.api_get( | ||||
|             self.example_user("polonius"), | ||||
|             f"/api/v1/analytics/chart_data/stream/{self.stream_id}", | ||||
|             { | ||||
|                 "chart_name": "messages_sent_by_stream", | ||||
|             }, | ||||
|         ) | ||||
|         self.assert_json_error(result, "Not allowed for guest users") | ||||
|  | ||||
|         # Verify we correctly forbid access to stats of streams in other realms. | ||||
|         result = self.api_get( | ||||
|             self.mit_user("sipbtest"), | ||||
|             f"/api/v1/analytics/chart_data/stream/{self.stream_id}", | ||||
|             { | ||||
|                 "chart_name": "messages_sent_by_stream", | ||||
|             }, | ||||
|             subdomain="zephyr", | ||||
|         ) | ||||
|         self.assert_json_error(result, "Invalid stream ID") | ||||
|  | ||||
|     def test_include_empty_subgroups(self) -> None: | ||||
|         FillState.objects.create( | ||||
|             property="realm_active_humans::day", | ||||
|             end_time=self.end_times_day[0], | ||||
|             state=FillState.DONE, | ||||
|         ) | ||||
|         result = self.client_get("/json/analytics/chart_data", {"chart_name": "number_of_humans"}) | ||||
|         data = self.assert_json_success(result) | ||||
|         self.assertEqual(data["everyone"], {"_1day": [0], "_15day": [0], "all_time": [0]}) | ||||
|         self.assertFalse("user" in data) | ||||
|  | ||||
|         FillState.objects.create( | ||||
|             property="messages_sent:is_bot:hour", | ||||
|             end_time=self.end_times_hour[0], | ||||
|             state=FillState.DONE, | ||||
|         ) | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"} | ||||
|         ) | ||||
|         data = self.assert_json_success(result) | ||||
|         self.assertEqual(data["everyone"], {"human": [0], "bot": [0]}) | ||||
|         self.assertEqual(data["user"], {"human": [0], "bot": [0]}) | ||||
|  | ||||
|         FillState.objects.create( | ||||
|             property="messages_sent:message_type:day", | ||||
|             end_time=self.end_times_day[0], | ||||
|             state=FillState.DONE, | ||||
|         ) | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", {"chart_name": "messages_sent_by_message_type"} | ||||
|         ) | ||||
|         data = self.assert_json_success(result) | ||||
|         self.assertEqual( | ||||
|             data["everyone"], | ||||
|             { | ||||
|                 "Public streams": [0], | ||||
|                 "Private streams": [0], | ||||
|                 "Direct messages": [0], | ||||
|                 "Group direct messages": [0], | ||||
|             }, | ||||
|         ) | ||||
|         self.assertEqual( | ||||
|             data["user"], | ||||
|             { | ||||
|                 "Public streams": [0], | ||||
|                 "Private streams": [0], | ||||
|                 "Direct messages": [0], | ||||
|                 "Group direct messages": [0], | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
|         FillState.objects.create( | ||||
|             property="messages_sent:client:day", | ||||
|             end_time=self.end_times_day[0], | ||||
|             state=FillState.DONE, | ||||
|         ) | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", {"chart_name": "messages_sent_by_client"} | ||||
|         ) | ||||
|         data = self.assert_json_success(result) | ||||
|         self.assertEqual(data["everyone"], {}) | ||||
|         self.assertEqual(data["user"], {}) | ||||
|  | ||||
|     def test_start_and_end(self) -> None: | ||||
|         stat = COUNT_STATS["realm_active_humans::day"] | ||||
|         self.insert_data(stat, [None], []) | ||||
|         stat = COUNT_STATS["1day_actives::day"] | ||||
|         self.insert_data(stat, [None], []) | ||||
|         stat = COUNT_STATS["active_users_audit:is_bot:day"] | ||||
|         self.insert_data(stat, ["false"], []) | ||||
|         end_time_timestamps = [datetime_to_timestamp(dt) for dt in self.end_times_day] | ||||
|  | ||||
|         # valid start and end | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", | ||||
|             { | ||||
|                 "chart_name": "number_of_humans", | ||||
|                 "start": end_time_timestamps[1], | ||||
|                 "end": end_time_timestamps[2], | ||||
|             }, | ||||
|         ) | ||||
|         data = self.assert_json_success(result) | ||||
|         self.assertEqual(data["end_times"], end_time_timestamps[1:3]) | ||||
|         self.assertEqual( | ||||
|             data["everyone"], {"_1day": [0, 100], "_15day": [0, 100], "all_time": [0, 100]} | ||||
|         ) | ||||
|  | ||||
|         # start later then end | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", | ||||
|             { | ||||
|                 "chart_name": "number_of_humans", | ||||
|                 "start": end_time_timestamps[2], | ||||
|                 "end": end_time_timestamps[1], | ||||
|             }, | ||||
|         ) | ||||
|         self.assert_json_error_contains(result, "Start time is later than") | ||||
|  | ||||
|     def test_min_length(self) -> None: | ||||
|         stat = COUNT_STATS["realm_active_humans::day"] | ||||
|         self.insert_data(stat, [None], []) | ||||
|         stat = COUNT_STATS["1day_actives::day"] | ||||
|         self.insert_data(stat, [None], []) | ||||
|         stat = COUNT_STATS["active_users_audit:is_bot:day"] | ||||
|         self.insert_data(stat, ["false"], []) | ||||
|         # test min_length is too short to change anything | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", {"chart_name": "number_of_humans", "min_length": 2} | ||||
|         ) | ||||
|         data = self.assert_json_success(result) | ||||
|         self.assertEqual( | ||||
|             data["end_times"], [datetime_to_timestamp(dt) for dt in self.end_times_day] | ||||
|         ) | ||||
|         self.assertEqual( | ||||
|             data["everyone"], | ||||
|             {"_1day": self.data(100), "_15day": self.data(100), "all_time": self.data(100)}, | ||||
|         ) | ||||
|         # test min_length larger than filled data | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", {"chart_name": "number_of_humans", "min_length": 5} | ||||
|         ) | ||||
|         data = self.assert_json_success(result) | ||||
|         end_times = [ | ||||
|             ceiling_to_day(self.realm.date_created) + timedelta(days=i) for i in range(-1, 4) | ||||
|         ] | ||||
|         self.assertEqual(data["end_times"], [datetime_to_timestamp(dt) for dt in end_times]) | ||||
|         self.assertEqual( | ||||
|             data["everyone"], | ||||
|             { | ||||
|                 "_1day": [0, *self.data(100)], | ||||
|                 "_15day": [0, *self.data(100)], | ||||
|                 "all_time": [0, *self.data(100)], | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
|     def test_non_existent_chart(self) -> None: | ||||
|         result = self.client_get("/json/analytics/chart_data", {"chart_name": "does_not_exist"}) | ||||
|         self.assert_json_error_contains(result, "Unknown chart name") | ||||
|  | ||||
|     def test_analytics_not_running(self) -> None: | ||||
|         realm = get_realm("zulip") | ||||
|  | ||||
|         self.assertEqual(FillState.objects.count(), 0) | ||||
|  | ||||
|         realm.date_created = timezone_now() - timedelta(days=3) | ||||
|         realm.save(update_fields=["date_created"]) | ||||
|         with self.assertLogs(level="WARNING") as m: | ||||
|             result = self.client_get( | ||||
|                 "/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"} | ||||
|             ) | ||||
|             self.assertEqual( | ||||
|                 m.output, | ||||
|                 [ | ||||
|                     f"WARNING:root:User from realm zulip attempted to access /stats, but the computed start time: {realm.date_created} (creation of realm or installation) is later than the computed end time: 0001-01-01 00:00:00+00:00 (last successful analytics update). Is the analytics cron job running?" | ||||
|                 ], | ||||
|             ) | ||||
|  | ||||
|         self.assert_json_error_contains(result, "No analytics data available") | ||||
|  | ||||
|         realm.date_created = timezone_now() - timedelta(days=1, hours=2) | ||||
|         realm.save(update_fields=["date_created"]) | ||||
|         with self.assertLogs(level="WARNING") as m: | ||||
|             result = self.client_get( | ||||
|                 "/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"} | ||||
|             ) | ||||
|             self.assertEqual( | ||||
|                 m.output, | ||||
|                 [ | ||||
|                     f"WARNING:root:User from realm zulip attempted to access /stats, but the computed start time: {realm.date_created} (creation of realm or installation) is later than the computed end time: 0001-01-01 00:00:00+00:00 (last successful analytics update). Is the analytics cron job running?" | ||||
|                 ], | ||||
|             ) | ||||
|  | ||||
|         self.assert_json_error_contains(result, "No analytics data available") | ||||
|  | ||||
|         realm.date_created = timezone_now() - timedelta(days=1, minutes=10) | ||||
|         realm.save(update_fields=["date_created"]) | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"} | ||||
|         ) | ||||
|         self.assert_json_success(result) | ||||
|  | ||||
|         realm.date_created = timezone_now() - timedelta(hours=10) | ||||
|         realm.save(update_fields=["date_created"]) | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"} | ||||
|         ) | ||||
|         self.assert_json_success(result) | ||||
|  | ||||
|         end_time = timezone_now() - timedelta(days=5) | ||||
|         fill_state = FillState.objects.create( | ||||
|             property="messages_sent:is_bot:hour", end_time=end_time, state=FillState.DONE | ||||
|         ) | ||||
|  | ||||
|         realm.date_created = timezone_now() - timedelta(days=3) | ||||
|         realm.save(update_fields=["date_created"]) | ||||
|         with self.assertLogs(level="WARNING") as m: | ||||
|             result = self.client_get( | ||||
|                 "/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"} | ||||
|             ) | ||||
|             self.assertEqual( | ||||
|                 m.output, | ||||
|                 [ | ||||
|                     f"WARNING:root:User from realm zulip attempted to access /stats, but the computed start time: {realm.date_created} (creation of realm or installation) is later than the computed end time: {end_time} (last successful analytics update). Is the analytics cron job running?" | ||||
|                 ], | ||||
|             ) | ||||
|  | ||||
|         self.assert_json_error_contains(result, "No analytics data available") | ||||
|  | ||||
|         realm.date_created = timezone_now() - timedelta(days=1, minutes=10) | ||||
|         realm.save(update_fields=["date_created"]) | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"} | ||||
|         ) | ||||
|         self.assert_json_success(result) | ||||
|  | ||||
|         end_time = timezone_now() - timedelta(days=2) | ||||
|         fill_state.end_time = end_time | ||||
|         fill_state.save(update_fields=["end_time"]) | ||||
|  | ||||
|         realm.date_created = timezone_now() - timedelta(days=3) | ||||
|         realm.save(update_fields=["date_created"]) | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"} | ||||
|         ) | ||||
|         self.assert_json_success(result) | ||||
|  | ||||
|         realm.date_created = timezone_now() - timedelta(days=1, hours=2) | ||||
|         realm.save(update_fields=["date_created"]) | ||||
|         with self.assertLogs(level="WARNING") as m: | ||||
|             result = self.client_get( | ||||
|                 "/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"} | ||||
|             ) | ||||
|             self.assertEqual( | ||||
|                 m.output, | ||||
|                 [ | ||||
|                     f"WARNING:root:User from realm zulip attempted to access /stats, but the computed start time: {realm.date_created} (creation of realm or installation) is later than the computed end time: {end_time} (last successful analytics update). Is the analytics cron job running?" | ||||
|                 ], | ||||
|             ) | ||||
|  | ||||
|         self.assert_json_error_contains(result, "No analytics data available") | ||||
|  | ||||
|         realm.date_created = timezone_now() - timedelta(days=1, minutes=10) | ||||
|         realm.save(update_fields=["date_created"]) | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"} | ||||
|         ) | ||||
|         self.assert_json_success(result) | ||||
|  | ||||
|     def test_get_chart_data_for_realm(self) -> None: | ||||
|         user = self.example_user("hamlet") | ||||
|         self.login_user(user) | ||||
|  | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data/realm/zulip", {"chart_name": "number_of_humans"} | ||||
|         ) | ||||
|         self.assert_json_error(result, "Must be an server administrator", 400) | ||||
|  | ||||
|         user = self.example_user("hamlet") | ||||
|         user.is_staff = True | ||||
|         user.save(update_fields=["is_staff"]) | ||||
|         stat = COUNT_STATS["realm_active_humans::day"] | ||||
|         self.insert_data(stat, [None], []) | ||||
|  | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data/realm/not_existing_realm", | ||||
|             {"chart_name": "number_of_humans"}, | ||||
|         ) | ||||
|         self.assert_json_error(result, "Invalid organization", 400) | ||||
|  | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data/realm/zulip", {"chart_name": "number_of_humans"} | ||||
|         ) | ||||
|         self.assert_json_success(result) | ||||
|  | ||||
|     def test_get_chart_data_for_installation(self) -> None: | ||||
|         user = self.example_user("hamlet") | ||||
|         self.login_user(user) | ||||
|  | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data/installation", {"chart_name": "number_of_humans"} | ||||
|         ) | ||||
|         self.assert_json_error(result, "Must be an server administrator", 400) | ||||
|  | ||||
|         user = self.example_user("hamlet") | ||||
|         user.is_staff = True | ||||
|         user.save(update_fields=["is_staff"]) | ||||
|         stat = COUNT_STATS["realm_active_humans::day"] | ||||
|         self.insert_data(stat, [None], []) | ||||
|  | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data/installation", {"chart_name": "number_of_humans"} | ||||
|         ) | ||||
|         self.assert_json_success(result) | ||||
|  | ||||
|  | ||||
| class TestGetChartDataHelpers(ZulipTestCase): | ||||
|     def test_sort_by_totals(self) -> None: | ||||
|         empty: List[int] = [] | ||||
|         value_arrays = {"c": [0, 1], "a": [9], "b": [1, 1, 1], "d": empty} | ||||
|         self.assertEqual(sort_by_totals(value_arrays), ["a", "b", "c", "d"]) | ||||
|  | ||||
|     def test_sort_client_labels(self) -> None: | ||||
|         data = { | ||||
|             "everyone": {"a": [16], "c": [15], "b": [14], "e": [13], "d": [12], "h": [11]}, | ||||
|             "user": {"a": [6], "b": [5], "d": [4], "e": [3], "f": [2], "g": [1]}, | ||||
|         } | ||||
|         self.assertEqual(sort_client_labels(data), ["a", "b", "c", "d", "e", "f", "g", "h"]) | ||||
|  | ||||
|  | ||||
| class TestTimeRange(ZulipTestCase): | ||||
|     def test_time_range(self) -> None: | ||||
|         HOUR = timedelta(hours=1) | ||||
|         DAY = timedelta(days=1) | ||||
|  | ||||
|         a_time = datetime(2016, 3, 14, 22, 59, tzinfo=timezone.utc) | ||||
|         floor_hour = datetime(2016, 3, 14, 22, tzinfo=timezone.utc) | ||||
|         floor_day = datetime(2016, 3, 14, tzinfo=timezone.utc) | ||||
|  | ||||
|         # test start == end | ||||
|         self.assertEqual(time_range(a_time, a_time, CountStat.HOUR, None), []) | ||||
|         self.assertEqual(time_range(a_time, a_time, CountStat.DAY, None), []) | ||||
|         # test start == end == boundary, and min_length == 0 | ||||
|         self.assertEqual(time_range(floor_hour, floor_hour, CountStat.HOUR, 0), [floor_hour]) | ||||
|         self.assertEqual(time_range(floor_day, floor_day, CountStat.DAY, 0), [floor_day]) | ||||
|         # test start and end on different boundaries | ||||
|         self.assertEqual( | ||||
|             time_range(floor_hour, floor_hour + HOUR, CountStat.HOUR, None), | ||||
|             [floor_hour, floor_hour + HOUR], | ||||
|         ) | ||||
|         self.assertEqual( | ||||
|             time_range(floor_day, floor_day + DAY, CountStat.DAY, None), | ||||
|             [floor_day, floor_day + DAY], | ||||
|         ) | ||||
|         # test min_length | ||||
|         self.assertEqual( | ||||
|             time_range(floor_hour, floor_hour + HOUR, CountStat.HOUR, 4), | ||||
|             [floor_hour - 2 * HOUR, floor_hour - HOUR, floor_hour, floor_hour + HOUR], | ||||
|         ) | ||||
|         self.assertEqual( | ||||
|             time_range(floor_day, floor_day + DAY, CountStat.DAY, 4), | ||||
|             [floor_day - 2 * DAY, floor_day - DAY, floor_day, floor_day + DAY], | ||||
|         ) | ||||
|  | ||||
|  | ||||
| class TestMapArrays(ZulipTestCase): | ||||
|     def test_map_arrays(self) -> None: | ||||
|         a = { | ||||
|             "desktop app 1.0": [1, 2, 3], | ||||
|             "desktop app 2.0": [10, 12, 13], | ||||
|             "desktop app 3.0": [21, 22, 23], | ||||
|             "website": [1, 2, 3], | ||||
|             "ZulipiOS": [1, 2, 3], | ||||
|             "ZulipElectron": [2, 5, 7], | ||||
|             "ZulipMobile": [1, 5, 7], | ||||
|             "ZulipPython": [1, 2, 3], | ||||
|             "API: Python": [1, 2, 3], | ||||
|             "SomethingRandom": [4, 5, 6], | ||||
|             "ZulipGitHubWebhook": [7, 7, 9], | ||||
|             "ZulipAndroid": [64, 63, 65], | ||||
|             "ZulipTerminal": [9, 10, 11], | ||||
|         } | ||||
|         result = rewrite_client_arrays(a) | ||||
|         self.assertEqual( | ||||
|             result, | ||||
|             { | ||||
|                 "Old desktop app": [32, 36, 39], | ||||
|                 "Old iOS app": [1, 2, 3], | ||||
|                 "Desktop app": [2, 5, 7], | ||||
|                 "Mobile app": [1, 5, 7], | ||||
|                 "Web app": [1, 2, 3], | ||||
|                 "Python API": [2, 4, 6], | ||||
|                 "SomethingRandom": [4, 5, 6], | ||||
|                 "GitHub webhook": [7, 7, 9], | ||||
|                 "Old Android app": [64, 63, 65], | ||||
|                 "Terminal app": [9, 10, 11], | ||||
|             }, | ||||
|         ) | ||||
| @@ -1,806 +0,0 @@ | ||||
| from datetime import datetime, timedelta, timezone | ||||
| from typing import TYPE_CHECKING, Optional | ||||
| from unittest import mock | ||||
|  | ||||
| import orjson | ||||
| from django.utils.timezone import now as timezone_now | ||||
| from typing_extensions import override | ||||
|  | ||||
| from corporate.lib.stripe import add_months | ||||
| from corporate.lib.support import update_realm_sponsorship_status | ||||
| from corporate.models import Customer, CustomerPlan, LicenseLedger, get_customer_by_realm | ||||
| from zerver.actions.invites import do_create_multiuse_invite_link | ||||
| from zerver.actions.realm_settings import do_change_realm_org_type, do_send_realm_reactivation_email | ||||
| from zerver.actions.user_settings import do_change_user_setting | ||||
| from zerver.lib.test_classes import ZulipTestCase | ||||
| from zerver.lib.test_helpers import reset_email_visibility_to_everyone_in_zulip_realm | ||||
| from zerver.models import ( | ||||
|     MultiuseInvite, | ||||
|     PreregistrationUser, | ||||
|     Realm, | ||||
|     UserMessage, | ||||
|     UserProfile, | ||||
|     get_org_type_display_name, | ||||
|     get_realm, | ||||
| ) | ||||
| from zilencer.lib.remote_counts import MissingDataError | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from django.test.client import _MonkeyPatchedWSGIResponse as TestHttpResponse | ||||
|  | ||||
| import uuid | ||||
|  | ||||
| from zilencer.models import RemoteZulipServer | ||||
|  | ||||
|  | ||||
| class TestRemoteServerSupportEndpoint(ZulipTestCase): | ||||
|     @override | ||||
|     def setUp(self) -> None: | ||||
|         super().setUp() | ||||
|  | ||||
|         # Set up some initial example data. | ||||
|         for i in range(20): | ||||
|             hostname = f"zulip-{i}.example.com" | ||||
|             RemoteZulipServer.objects.create( | ||||
|                 hostname=hostname, contact_email=f"admin@{hostname}", plan_type=1, uuid=uuid.uuid4() | ||||
|             ) | ||||
|  | ||||
|     def test_search(self) -> None: | ||||
|         self.login("cordelia") | ||||
|  | ||||
|         result = self.client_get("/activity/remote/support") | ||||
|         self.assertEqual(result.status_code, 302) | ||||
|         self.assertEqual(result["Location"], "/login/") | ||||
|  | ||||
|         # Iago is the user with the appropriate permissions to access this page. | ||||
|         self.login("iago") | ||||
|         assert self.example_user("iago").is_staff | ||||
|  | ||||
|         result = self.client_get("/activity/remote/support") | ||||
|         self.assert_in_success_response( | ||||
|             [ | ||||
|                 'input type="text" name="q" class="input-xxlarge search-query" placeholder="hostname or contact email"' | ||||
|             ], | ||||
|             result, | ||||
|         ) | ||||
|  | ||||
|         with mock.patch("analytics.views.support.compute_max_monthly_messages", return_value=1000): | ||||
|             result = self.client_get("/activity/remote/support", {"q": "zulip-1.example.com"}) | ||||
|         self.assert_in_success_response(["<h3>zulip-1.example.com</h3>"], result) | ||||
|         self.assert_in_success_response(["<b>Max monthly messages</b>: 1000"], result) | ||||
|         self.assert_not_in_success_response(["<h3>zulip-2.example.com</h3>"], result) | ||||
|  | ||||
|         with mock.patch( | ||||
|             "analytics.views.support.compute_max_monthly_messages", side_effect=MissingDataError | ||||
|         ): | ||||
|             result = self.client_get("/activity/remote/support", {"q": "zulip-1.example.com"}) | ||||
|         self.assert_in_success_response(["<h3>zulip-1.example.com</h3>"], result) | ||||
|         self.assert_in_success_response( | ||||
|             ["<b>Max monthly messages</b>: Recent data missing"], result | ||||
|         ) | ||||
|         self.assert_not_in_success_response(["<h3>zulip-2.example.com</h3>"], result) | ||||
|  | ||||
|         result = self.client_get("/activity/remote/support", {"q": "example.com"}) | ||||
|         for i in range(20): | ||||
|             self.assert_in_success_response([f"<h3>zulip-{i}.example.com</h3>"], result) | ||||
|  | ||||
|         result = self.client_get("/activity/remote/support", {"q": "admin@zulip-2.example.com"}) | ||||
|         self.assert_in_success_response(["<h3>zulip-2.example.com</h3>"], result) | ||||
|         self.assert_in_success_response(["<b>Contact email</b>: admin@zulip-2.example.com"], result) | ||||
|         self.assert_not_in_success_response(["<h3>zulip-1.example.com</h3>"], result) | ||||
|  | ||||
|  | ||||
| class TestSupportEndpoint(ZulipTestCase): | ||||
|     def test_search(self) -> None: | ||||
|         reset_email_visibility_to_everyone_in_zulip_realm() | ||||
|         lear_user = self.lear_user("king") | ||||
|         lear_user.is_staff = True | ||||
|         lear_user.save(update_fields=["is_staff"]) | ||||
|         lear_realm = get_realm("lear") | ||||
|  | ||||
|         def assert_user_details_in_html_response( | ||||
|             html_response: "TestHttpResponse", full_name: str, email: str, role: str | ||||
|         ) -> None: | ||||
|             self.assert_in_success_response( | ||||
|                 [ | ||||
|                     '<span class="label">user</span>\n', | ||||
|                     f"<h3>{full_name}</h3>", | ||||
|                     f"<b>Email</b>: {email}", | ||||
|                     "<b>Is active</b>: True<br />", | ||||
|                     f"<b>Role</b>: {role}<br />", | ||||
|                 ], | ||||
|                 html_response, | ||||
|             ) | ||||
|  | ||||
|         def create_invitation( | ||||
|             stream: str, invitee_email: str, realm: Optional[Realm] = None | ||||
|         ) -> None: | ||||
|             invite_expires_in_minutes = 10 * 24 * 60 | ||||
|             self.client_post( | ||||
|                 "/json/invites", | ||||
|                 { | ||||
|                     "invitee_emails": [invitee_email], | ||||
|                     "stream_ids": orjson.dumps([self.get_stream_id(stream, realm)]).decode(), | ||||
|                     "invite_expires_in_minutes": invite_expires_in_minutes, | ||||
|                     "invite_as": PreregistrationUser.INVITE_AS["MEMBER"], | ||||
|                 }, | ||||
|                 subdomain=realm.string_id if realm is not None else "zulip", | ||||
|             ) | ||||
|  | ||||
|         def check_hamlet_user_query_result(result: "TestHttpResponse") -> None: | ||||
|             assert_user_details_in_html_response( | ||||
|                 result, "King Hamlet", self.example_email("hamlet"), "Member" | ||||
|             ) | ||||
|             self.assert_in_success_response( | ||||
|                 [ | ||||
|                     f"<b>Admins</b>: {self.example_email('iago')}\n", | ||||
|                     f"<b>Owners</b>: {self.example_email('desdemona')}\n", | ||||
|                     'class="copy-button" data-copytext="{}">'.format(self.example_email("iago")), | ||||
|                     'class="copy-button" data-copytext="{}">'.format( | ||||
|                         self.example_email("desdemona") | ||||
|                     ), | ||||
|                 ], | ||||
|                 result, | ||||
|             ) | ||||
|  | ||||
|         def check_lear_user_query_result(result: "TestHttpResponse") -> None: | ||||
|             assert_user_details_in_html_response( | ||||
|                 result, lear_user.full_name, lear_user.email, "Member" | ||||
|             ) | ||||
|  | ||||
|         def check_othello_user_query_result(result: "TestHttpResponse") -> None: | ||||
|             assert_user_details_in_html_response( | ||||
|                 result, "Othello, the Moor of Venice", self.example_email("othello"), "Member" | ||||
|             ) | ||||
|  | ||||
|         def check_polonius_user_query_result(result: "TestHttpResponse") -> None: | ||||
|             assert_user_details_in_html_response( | ||||
|                 result, "Polonius", self.example_email("polonius"), "Guest" | ||||
|             ) | ||||
|  | ||||
|         def check_zulip_realm_query_result(result: "TestHttpResponse") -> None: | ||||
|             zulip_realm = get_realm("zulip") | ||||
|             first_human_user = zulip_realm.get_first_human_user() | ||||
|             assert first_human_user is not None | ||||
|             self.assert_in_success_response( | ||||
|                 [ | ||||
|                     f"<b>First human user</b>: {first_human_user.delivery_email}\n", | ||||
|                     f'<input type="hidden" name="realm_id" value="{zulip_realm.id}"', | ||||
|                     "Zulip Dev</h3>", | ||||
|                     '<option value="1" selected>Self-hosted</option>', | ||||
|                     '<option value="2" >Limited</option>', | ||||
|                     'input type="number" name="discount" value="None"', | ||||
|                     '<option value="active" selected>Active</option>', | ||||
|                     '<option value="deactivated" >Deactivated</option>', | ||||
|                     f'<option value="{zulip_realm.org_type}" selected>', | ||||
|                     'scrub-realm-button">', | ||||
|                     'data-string-id="zulip"', | ||||
|                 ], | ||||
|                 result, | ||||
|             ) | ||||
|  | ||||
|         def check_lear_realm_query_result(result: "TestHttpResponse") -> None: | ||||
|             self.assert_in_success_response( | ||||
|                 [ | ||||
|                     f'<input type="hidden" name="realm_id" value="{lear_realm.id}"', | ||||
|                     "Lear & Co.</h3>", | ||||
|                     '<option value="1" selected>Self-hosted</option>', | ||||
|                     '<option value="2" >Limited</option>', | ||||
|                     'input type="number" name="discount" value="None"', | ||||
|                     '<option value="active" selected>Active</option>', | ||||
|                     '<option value="deactivated" >Deactivated</option>', | ||||
|                     'scrub-realm-button">', | ||||
|                     'data-string-id="lear"', | ||||
|                     "<b>Name</b>: Zulip Cloud Standard", | ||||
|                     "<b>Status</b>: Active", | ||||
|                     "<b>Billing schedule</b>: Annual", | ||||
|                     "<b>Licenses</b>: 2/10 (Manual)", | ||||
|                     "<b>Price per license</b>: $80.0", | ||||
|                     "<b>Next invoice date</b>: 02 January 2017", | ||||
|                     '<option value="send_invoice" selected>', | ||||
|                     '<option value="charge_automatically" >', | ||||
|                 ], | ||||
|                 result, | ||||
|             ) | ||||
|  | ||||
|         def check_preregistration_user_query_result( | ||||
|             result: "TestHttpResponse", email: str, invite: bool = False | ||||
|         ) -> None: | ||||
|             self.assert_in_success_response( | ||||
|                 [ | ||||
|                     '<span class="label">preregistration user</span>\n', | ||||
|                     f"<b>Email</b>: {email}", | ||||
|                 ], | ||||
|                 result, | ||||
|             ) | ||||
|             if invite: | ||||
|                 self.assert_in_success_response(['<span class="label">invite</span>'], result) | ||||
|                 self.assert_in_success_response( | ||||
|                     [ | ||||
|                         "<b>Expires in</b>: 1\xa0week, 3\xa0days", | ||||
|                         "<b>Status</b>: Link has not been used", | ||||
|                     ], | ||||
|                     result, | ||||
|                 ) | ||||
|                 self.assert_in_success_response([], result) | ||||
|             else: | ||||
|                 self.assert_not_in_success_response(['<span class="label">invite</span>'], result) | ||||
|                 self.assert_in_success_response( | ||||
|                     [ | ||||
|                         "<b>Expires in</b>: 1\xa0day", | ||||
|                         "<b>Status</b>: Link has not been used", | ||||
|                     ], | ||||
|                     result, | ||||
|                 ) | ||||
|  | ||||
|         def check_realm_creation_query_result(result: "TestHttpResponse", email: str) -> None: | ||||
|             self.assert_in_success_response( | ||||
|                 [ | ||||
|                     '<span class="label">preregistration user</span>\n', | ||||
|                     '<span class="label">realm creation</span>\n', | ||||
|                     "<b>Link</b>: http://testserver/accounts/do_confirm/", | ||||
|                     "<b>Expires in</b>: 1\xa0day", | ||||
|                 ], | ||||
|                 result, | ||||
|             ) | ||||
|  | ||||
|         def check_multiuse_invite_link_query_result(result: "TestHttpResponse") -> None: | ||||
|             self.assert_in_success_response( | ||||
|                 [ | ||||
|                     '<span class="label">multiuse invite</span>\n', | ||||
|                     "<b>Link</b>: http://zulip.testserver/join/", | ||||
|                     "<b>Expires in</b>: 1\xa0week, 3\xa0days", | ||||
|                 ], | ||||
|                 result, | ||||
|             ) | ||||
|  | ||||
|         def check_realm_reactivation_link_query_result(result: "TestHttpResponse") -> None: | ||||
|             self.assert_in_success_response( | ||||
|                 [ | ||||
|                     '<span class="label">realm reactivation</span>\n', | ||||
|                     "<b>Link</b>: http://zulip.testserver/reactivate/", | ||||
|                     "<b>Expires in</b>: 1\xa0day", | ||||
|                 ], | ||||
|                 result, | ||||
|             ) | ||||
|  | ||||
|         def get_check_query_result( | ||||
|             query: str, count: int, subdomain: str = "zulip" | ||||
|         ) -> "TestHttpResponse": | ||||
|             result = self.client_get("/activity/support", {"q": query}, subdomain=subdomain) | ||||
|             self.assertEqual(result.content.decode().count("support-query-result"), count) | ||||
|             return result | ||||
|  | ||||
|         self.login("cordelia") | ||||
|  | ||||
|         result = self.client_get("/activity/support") | ||||
|         self.assertEqual(result.status_code, 302) | ||||
|         self.assertEqual(result["Location"], "/login/") | ||||
|  | ||||
|         self.login("iago") | ||||
|  | ||||
|         do_change_user_setting( | ||||
|             self.example_user("hamlet"), | ||||
|             "email_address_visibility", | ||||
|             UserProfile.EMAIL_ADDRESS_VISIBILITY_NOBODY, | ||||
|             acting_user=None, | ||||
|         ) | ||||
|  | ||||
|         customer = Customer.objects.create(realm=lear_realm, stripe_customer_id="cus_123") | ||||
|         now = datetime(2016, 1, 2, tzinfo=timezone.utc) | ||||
|         plan = CustomerPlan.objects.create( | ||||
|             customer=customer, | ||||
|             billing_cycle_anchor=now, | ||||
|             billing_schedule=CustomerPlan.ANNUAL, | ||||
|             tier=CustomerPlan.STANDARD, | ||||
|             price_per_license=8000, | ||||
|             next_invoice_date=add_months(now, 12), | ||||
|         ) | ||||
|         LicenseLedger.objects.create( | ||||
|             licenses=10, | ||||
|             licenses_at_next_renewal=10, | ||||
|             event_time=timezone_now(), | ||||
|             is_renewal=True, | ||||
|             plan=plan, | ||||
|         ) | ||||
|  | ||||
|         result = self.client_get("/activity/support") | ||||
|         self.assert_in_success_response( | ||||
|             ['<input type="text" name="q" class="input-xxlarge search-query"'], result | ||||
|         ) | ||||
|  | ||||
|         result = get_check_query_result(self.example_email("hamlet"), 1) | ||||
|         check_hamlet_user_query_result(result) | ||||
|         check_zulip_realm_query_result(result) | ||||
|  | ||||
|         # Search should be case-insensitive: | ||||
|         assert self.example_email("hamlet") != self.example_email("hamlet").upper() | ||||
|         result = get_check_query_result(self.example_email("hamlet").upper(), 1) | ||||
|         check_hamlet_user_query_result(result) | ||||
|         check_zulip_realm_query_result(result) | ||||
|  | ||||
|         result = get_check_query_result(lear_user.email, 1) | ||||
|         check_lear_user_query_result(result) | ||||
|         check_lear_realm_query_result(result) | ||||
|  | ||||
|         result = get_check_query_result(self.example_email("polonius"), 1) | ||||
|         check_polonius_user_query_result(result) | ||||
|         check_zulip_realm_query_result(result) | ||||
|  | ||||
|         result = get_check_query_result("lear", 1) | ||||
|         check_lear_realm_query_result(result) | ||||
|  | ||||
|         result = get_check_query_result("http://lear.testserver", 1) | ||||
|         check_lear_realm_query_result(result) | ||||
|  | ||||
|         with self.settings(REALM_HOSTS={"zulip": "localhost"}): | ||||
|             result = get_check_query_result("http://localhost", 1) | ||||
|             check_zulip_realm_query_result(result) | ||||
|  | ||||
|         result = get_check_query_result("hamlet@zulip.com, lear", 2) | ||||
|         check_hamlet_user_query_result(result) | ||||
|         check_zulip_realm_query_result(result) | ||||
|         check_lear_realm_query_result(result) | ||||
|  | ||||
|         result = get_check_query_result("King hamlet,lear", 2) | ||||
|         check_hamlet_user_query_result(result) | ||||
|         check_zulip_realm_query_result(result) | ||||
|         check_lear_realm_query_result(result) | ||||
|  | ||||
|         result = get_check_query_result("Othello, the Moor of Venice", 1) | ||||
|         check_othello_user_query_result(result) | ||||
|         check_zulip_realm_query_result(result) | ||||
|  | ||||
|         result = get_check_query_result("lear, Hamlet <hamlet@zulip.com>", 2) | ||||
|         check_hamlet_user_query_result(result) | ||||
|         check_zulip_realm_query_result(result) | ||||
|         check_lear_realm_query_result(result) | ||||
|  | ||||
|         with mock.patch( | ||||
|             "analytics.views.support.timezone_now", | ||||
|             return_value=timezone_now() - timedelta(minutes=50), | ||||
|         ): | ||||
|             self.client_post("/accounts/home/", {"email": self.nonreg_email("test")}) | ||||
|             self.login("iago") | ||||
|             result = get_check_query_result(self.nonreg_email("test"), 1) | ||||
|             check_preregistration_user_query_result(result, self.nonreg_email("test")) | ||||
|             check_zulip_realm_query_result(result) | ||||
|  | ||||
|             create_invitation("Denmark", self.nonreg_email("test1")) | ||||
|             result = get_check_query_result(self.nonreg_email("test1"), 1) | ||||
|             check_preregistration_user_query_result(result, self.nonreg_email("test1"), invite=True) | ||||
|             check_zulip_realm_query_result(result) | ||||
|  | ||||
|             email = self.nonreg_email("alice") | ||||
|             self.submit_realm_creation_form( | ||||
|                 email, realm_subdomain="custom-test", realm_name="Zulip test" | ||||
|             ) | ||||
|             result = get_check_query_result(email, 1) | ||||
|             check_realm_creation_query_result(result, email) | ||||
|  | ||||
|             invite_expires_in_minutes = 10 * 24 * 60 | ||||
|             do_create_multiuse_invite_link( | ||||
|                 self.example_user("hamlet"), | ||||
|                 invited_as=1, | ||||
|                 invite_expires_in_minutes=invite_expires_in_minutes, | ||||
|             ) | ||||
|             result = get_check_query_result("zulip", 2) | ||||
|             check_multiuse_invite_link_query_result(result) | ||||
|             check_zulip_realm_query_result(result) | ||||
|             MultiuseInvite.objects.all().delete() | ||||
|  | ||||
|             do_send_realm_reactivation_email(get_realm("zulip"), acting_user=None) | ||||
|             result = get_check_query_result("zulip", 2) | ||||
|             check_realm_reactivation_link_query_result(result) | ||||
|             check_zulip_realm_query_result(result) | ||||
|  | ||||
|             lear_nonreg_email = "newguy@lear.org" | ||||
|             self.client_post("/accounts/home/", {"email": lear_nonreg_email}, subdomain="lear") | ||||
|             result = get_check_query_result(lear_nonreg_email, 1) | ||||
|             check_preregistration_user_query_result(result, lear_nonreg_email) | ||||
|             check_lear_realm_query_result(result) | ||||
|  | ||||
|             self.login_user(lear_user) | ||||
|             create_invitation("general", "newguy2@lear.org", lear_realm) | ||||
|             result = get_check_query_result("newguy2@lear.org", 1, lear_realm.string_id) | ||||
|             check_preregistration_user_query_result(result, "newguy2@lear.org", invite=True) | ||||
|             check_lear_realm_query_result(result) | ||||
|  | ||||
|     def test_get_org_type_display_name(self) -> None: | ||||
|         self.assertEqual(get_org_type_display_name(Realm.ORG_TYPES["business"]["id"]), "Business") | ||||
|         self.assertEqual(get_org_type_display_name(883), "") | ||||
|  | ||||
|     def test_unspecified_org_type_correctly_displayed(self) -> None: | ||||
|         """ | ||||
|         Unspecified org type is special in that it is marked to not be shown | ||||
|         on the registration page (because organitions are not meant to be able to choose it), | ||||
|         but should be correctly shown at the /support/ endpoint. | ||||
|         """ | ||||
|         realm = get_realm("zulip") | ||||
|  | ||||
|         do_change_realm_org_type(realm, 0, acting_user=None) | ||||
|         self.assertEqual(realm.org_type, 0) | ||||
|  | ||||
|         self.login("iago") | ||||
|  | ||||
|         result = self.client_get("/activity/support", {"q": "zulip"}, subdomain="zulip") | ||||
|         self.assert_in_success_response( | ||||
|             [ | ||||
|                 f'<input type="hidden" name="realm_id" value="{realm.id}"', | ||||
|                 '<option value="0" selected>', | ||||
|             ], | ||||
|             result, | ||||
|         ) | ||||
|  | ||||
|     @mock.patch("analytics.views.support.update_realm_billing_method") | ||||
|     def test_change_billing_method(self, m: mock.Mock) -> None: | ||||
|         cordelia = self.example_user("cordelia") | ||||
|         self.login_user(cordelia) | ||||
|  | ||||
|         result = self.client_post( | ||||
|             "/activity/support", {"realm_id": f"{cordelia.realm_id}", "plan_type": "2"} | ||||
|         ) | ||||
|         self.assertEqual(result.status_code, 302) | ||||
|         self.assertEqual(result["Location"], "/login/") | ||||
|  | ||||
|         iago = self.example_user("iago") | ||||
|         self.login_user(iago) | ||||
|  | ||||
|         result = self.client_post( | ||||
|             "/activity/support", | ||||
|             {"realm_id": f"{iago.realm_id}", "billing_method": "charge_automatically"}, | ||||
|         ) | ||||
|         m.assert_called_once_with(get_realm("zulip"), charge_automatically=True, acting_user=iago) | ||||
|         self.assert_in_success_response( | ||||
|             ["Billing method of zulip updated to charge automatically"], result | ||||
|         ) | ||||
|  | ||||
|         m.reset_mock() | ||||
|  | ||||
|         result = self.client_post( | ||||
|             "/activity/support", {"realm_id": f"{iago.realm_id}", "billing_method": "send_invoice"} | ||||
|         ) | ||||
|         m.assert_called_once_with(get_realm("zulip"), charge_automatically=False, acting_user=iago) | ||||
|         self.assert_in_success_response( | ||||
|             ["Billing method of zulip updated to pay by invoice"], result | ||||
|         ) | ||||
|  | ||||
|     def test_change_realm_plan_type(self) -> None: | ||||
|         cordelia = self.example_user("cordelia") | ||||
|         self.login_user(cordelia) | ||||
|  | ||||
|         result = self.client_post( | ||||
|             "/activity/support", {"realm_id": f"{cordelia.realm_id}", "plan_type": "2"} | ||||
|         ) | ||||
|         self.assertEqual(result.status_code, 302) | ||||
|         self.assertEqual(result["Location"], "/login/") | ||||
|  | ||||
|         iago = self.example_user("iago") | ||||
|         self.login_user(iago) | ||||
|  | ||||
|         with mock.patch("analytics.views.support.do_change_realm_plan_type") as m: | ||||
|             result = self.client_post( | ||||
|                 "/activity/support", {"realm_id": f"{iago.realm_id}", "plan_type": "2"} | ||||
|             ) | ||||
|             m.assert_called_once_with(get_realm("zulip"), 2, acting_user=iago) | ||||
|             self.assert_in_success_response( | ||||
|                 ["Plan type of zulip changed from self-hosted to limited"], result | ||||
|             ) | ||||
|  | ||||
|         with mock.patch("analytics.views.support.do_change_realm_plan_type") as m: | ||||
|             result = self.client_post( | ||||
|                 "/activity/support", {"realm_id": f"{iago.realm_id}", "plan_type": "10"} | ||||
|             ) | ||||
|             m.assert_called_once_with(get_realm("zulip"), 10, acting_user=iago) | ||||
|             self.assert_in_success_response( | ||||
|                 ["Plan type of zulip changed from self-hosted to plus"], result | ||||
|             ) | ||||
|  | ||||
|     def test_change_org_type(self) -> None: | ||||
|         cordelia = self.example_user("cordelia") | ||||
|         self.login_user(cordelia) | ||||
|  | ||||
|         result = self.client_post( | ||||
|             "/activity/support", {"realm_id": f"{cordelia.realm_id}", "org_type": "70"} | ||||
|         ) | ||||
|         self.assertEqual(result.status_code, 302) | ||||
|         self.assertEqual(result["Location"], "/login/") | ||||
|  | ||||
|         iago = self.example_user("iago") | ||||
|         self.login_user(iago) | ||||
|  | ||||
|         with mock.patch("analytics.views.support.do_change_realm_org_type") as m: | ||||
|             result = self.client_post( | ||||
|                 "/activity/support", {"realm_id": f"{iago.realm_id}", "org_type": "70"} | ||||
|             ) | ||||
|             m.assert_called_once_with(get_realm("zulip"), 70, acting_user=iago) | ||||
|             self.assert_in_success_response( | ||||
|                 ["Org type of zulip changed from Business to Government"], result | ||||
|             ) | ||||
|  | ||||
|     def test_attach_discount(self) -> None: | ||||
|         cordelia = self.example_user("cordelia") | ||||
|         lear_realm = get_realm("lear") | ||||
|         self.login_user(cordelia) | ||||
|  | ||||
|         result = self.client_post( | ||||
|             "/activity/support", {"realm_id": f"{lear_realm.id}", "discount": "25"} | ||||
|         ) | ||||
|         self.assertEqual(result.status_code, 302) | ||||
|         self.assertEqual(result["Location"], "/login/") | ||||
|  | ||||
|         iago = self.example_user("iago") | ||||
|         self.login("iago") | ||||
|  | ||||
|         with mock.patch("analytics.views.support.attach_discount_to_realm") as m: | ||||
|             result = self.client_post( | ||||
|                 "/activity/support", {"realm_id": f"{lear_realm.id}", "discount": "25"} | ||||
|             ) | ||||
|             m.assert_called_once_with(get_realm("lear"), 25, acting_user=iago) | ||||
|             self.assert_in_success_response(["Discount of lear changed to 25% from 0%"], result) | ||||
|  | ||||
|     def test_change_sponsorship_status(self) -> None: | ||||
|         lear_realm = get_realm("lear") | ||||
|         self.assertIsNone(get_customer_by_realm(lear_realm)) | ||||
|  | ||||
|         cordelia = self.example_user("cordelia") | ||||
|         self.login_user(cordelia) | ||||
|  | ||||
|         result = self.client_post( | ||||
|             "/activity/support", {"realm_id": f"{lear_realm.id}", "sponsorship_pending": "true"} | ||||
|         ) | ||||
|         self.assertEqual(result.status_code, 302) | ||||
|         self.assertEqual(result["Location"], "/login/") | ||||
|  | ||||
|         iago = self.example_user("iago") | ||||
|         self.login_user(iago) | ||||
|  | ||||
|         result = self.client_post( | ||||
|             "/activity/support", {"realm_id": f"{lear_realm.id}", "sponsorship_pending": "true"} | ||||
|         ) | ||||
|         self.assert_in_success_response(["lear marked as pending sponsorship."], result) | ||||
|         customer = get_customer_by_realm(lear_realm) | ||||
|         assert customer is not None | ||||
|         self.assertTrue(customer.sponsorship_pending) | ||||
|  | ||||
|         result = self.client_post( | ||||
|             "/activity/support", {"realm_id": f"{lear_realm.id}", "sponsorship_pending": "false"} | ||||
|         ) | ||||
|         self.assert_in_success_response(["lear is no longer pending sponsorship."], result) | ||||
|         customer = get_customer_by_realm(lear_realm) | ||||
|         assert customer is not None | ||||
|         self.assertFalse(customer.sponsorship_pending) | ||||
|  | ||||
|     def test_approve_sponsorship(self) -> None: | ||||
|         support_admin = self.example_user("iago") | ||||
|         lear_realm = get_realm("lear") | ||||
|         update_realm_sponsorship_status(lear_realm, True, acting_user=support_admin) | ||||
|         king_user = self.lear_user("king") | ||||
|         king_user.role = UserProfile.ROLE_REALM_OWNER | ||||
|         king_user.save() | ||||
|  | ||||
|         cordelia = self.example_user("cordelia") | ||||
|         self.login_user(cordelia) | ||||
|  | ||||
|         result = self.client_post( | ||||
|             "/activity/support", | ||||
|             {"realm_id": f"{lear_realm.id}", "approve_sponsorship": "true"}, | ||||
|         ) | ||||
|         self.assertEqual(result.status_code, 302) | ||||
|         self.assertEqual(result["Location"], "/login/") | ||||
|  | ||||
|         iago = self.example_user("iago") | ||||
|         self.login_user(iago) | ||||
|  | ||||
|         result = self.client_post( | ||||
|             "/activity/support", | ||||
|             {"realm_id": f"{lear_realm.id}", "approve_sponsorship": "true"}, | ||||
|         ) | ||||
|         self.assert_in_success_response(["Sponsorship approved for lear"], result) | ||||
|         lear_realm.refresh_from_db() | ||||
|         self.assertEqual(lear_realm.plan_type, Realm.PLAN_TYPE_STANDARD_FREE) | ||||
|         customer = get_customer_by_realm(lear_realm) | ||||
|         assert customer is not None | ||||
|         self.assertFalse(customer.sponsorship_pending) | ||||
|         messages = UserMessage.objects.filter(user_profile=king_user) | ||||
|         self.assertIn( | ||||
|             "request for sponsored hosting has been approved", messages[0].message.content | ||||
|         ) | ||||
|         self.assert_length(messages, 1) | ||||
|  | ||||
|     def test_activate_or_deactivate_realm(self) -> None: | ||||
|         cordelia = self.example_user("cordelia") | ||||
|         lear_realm = get_realm("lear") | ||||
|         self.login_user(cordelia) | ||||
|  | ||||
|         result = self.client_post( | ||||
|             "/activity/support", {"realm_id": f"{lear_realm.id}", "status": "deactivated"} | ||||
|         ) | ||||
|         self.assertEqual(result.status_code, 302) | ||||
|         self.assertEqual(result["Location"], "/login/") | ||||
|  | ||||
|         self.login("iago") | ||||
|  | ||||
|         with mock.patch("analytics.views.support.do_deactivate_realm") as m: | ||||
|             result = self.client_post( | ||||
|                 "/activity/support", {"realm_id": f"{lear_realm.id}", "status": "deactivated"} | ||||
|             ) | ||||
|             m.assert_called_once_with(lear_realm, acting_user=self.example_user("iago")) | ||||
|             self.assert_in_success_response(["lear deactivated"], result) | ||||
|  | ||||
|         with mock.patch("analytics.views.support.do_send_realm_reactivation_email") as m: | ||||
|             result = self.client_post( | ||||
|                 "/activity/support", {"realm_id": f"{lear_realm.id}", "status": "active"} | ||||
|             ) | ||||
|             m.assert_called_once_with(lear_realm, acting_user=self.example_user("iago")) | ||||
|             self.assert_in_success_response( | ||||
|                 ["Realm reactivation email sent to admins of lear"], result | ||||
|             ) | ||||
|  | ||||
|     def test_change_subdomain(self) -> None: | ||||
|         cordelia = self.example_user("cordelia") | ||||
|         lear_realm = get_realm("lear") | ||||
|         self.login_user(cordelia) | ||||
|  | ||||
|         result = self.client_post( | ||||
|             "/activity/support", {"realm_id": f"{lear_realm.id}", "new_subdomain": "new_name"} | ||||
|         ) | ||||
|         self.assertEqual(result.status_code, 302) | ||||
|         self.assertEqual(result["Location"], "/login/") | ||||
|         self.login("iago") | ||||
|  | ||||
|         result = self.client_post( | ||||
|             "/activity/support", {"realm_id": f"{lear_realm.id}", "new_subdomain": "new-name"} | ||||
|         ) | ||||
|         self.assertEqual(result.status_code, 302) | ||||
|         self.assertEqual(result["Location"], "/activity/support?q=new-name") | ||||
|         realm_id = lear_realm.id | ||||
|         lear_realm = get_realm("new-name") | ||||
|         self.assertEqual(lear_realm.id, realm_id) | ||||
|         self.assertTrue(Realm.objects.filter(string_id="lear").exists()) | ||||
|         self.assertTrue(Realm.objects.filter(string_id="lear")[0].deactivated) | ||||
|  | ||||
|         result = self.client_post( | ||||
|             "/activity/support", {"realm_id": f"{lear_realm.id}", "new_subdomain": "new-name"} | ||||
|         ) | ||||
|         self.assert_in_success_response( | ||||
|             ["Subdomain already in use. Please choose a different one."], result | ||||
|         ) | ||||
|  | ||||
|         result = self.client_post( | ||||
|             "/activity/support", {"realm_id": f"{lear_realm.id}", "new_subdomain": "zulip"} | ||||
|         ) | ||||
|         self.assert_in_success_response( | ||||
|             ["Subdomain already in use. Please choose a different one."], result | ||||
|         ) | ||||
|  | ||||
|         result = self.client_post( | ||||
|             "/activity/support", {"realm_id": f"{lear_realm.id}", "new_subdomain": "lear"} | ||||
|         ) | ||||
|         self.assert_in_success_response( | ||||
|             ["Subdomain already in use. Please choose a different one."], result | ||||
|         ) | ||||
|  | ||||
|         # Test renaming to a "reserved" subdomain | ||||
|         result = self.client_post( | ||||
|             "/activity/support", {"realm_id": f"{lear_realm.id}", "new_subdomain": "your-org"} | ||||
|         ) | ||||
|         self.assert_in_success_response( | ||||
|             ["Subdomain reserved. Please choose a different one."], result | ||||
|         ) | ||||
|  | ||||
|     def test_downgrade_realm(self) -> None: | ||||
|         cordelia = self.example_user("cordelia") | ||||
|         self.login_user(cordelia) | ||||
|         result = self.client_post( | ||||
|             "/activity/support", {"realm_id": f"{cordelia.realm_id}", "plan_type": "2"} | ||||
|         ) | ||||
|         self.assertEqual(result.status_code, 302) | ||||
|         self.assertEqual(result["Location"], "/login/") | ||||
|  | ||||
|         iago = self.example_user("iago") | ||||
|         self.login_user(iago) | ||||
|  | ||||
|         with mock.patch("analytics.views.support.downgrade_at_the_end_of_billing_cycle") as m: | ||||
|             result = self.client_post( | ||||
|                 "/activity/support", | ||||
|                 { | ||||
|                     "realm_id": f"{iago.realm_id}", | ||||
|                     "modify_plan": "downgrade_at_billing_cycle_end", | ||||
|                 }, | ||||
|             ) | ||||
|             m.assert_called_once_with(get_realm("zulip")) | ||||
|             self.assert_in_success_response( | ||||
|                 ["zulip marked for downgrade at the end of billing cycle"], result | ||||
|             ) | ||||
|  | ||||
|         with mock.patch( | ||||
|             "analytics.views.support.downgrade_now_without_creating_additional_invoices" | ||||
|         ) as m: | ||||
|             result = self.client_post( | ||||
|                 "/activity/support", | ||||
|                 { | ||||
|                     "realm_id": f"{iago.realm_id}", | ||||
|                     "modify_plan": "downgrade_now_without_additional_licenses", | ||||
|                 }, | ||||
|             ) | ||||
|             m.assert_called_once_with(get_realm("zulip")) | ||||
|             self.assert_in_success_response( | ||||
|                 ["zulip downgraded without creating additional invoices"], result | ||||
|             ) | ||||
|  | ||||
|         with mock.patch( | ||||
|             "analytics.views.support.downgrade_now_without_creating_additional_invoices" | ||||
|         ) as m1: | ||||
|             with mock.patch("analytics.views.support.void_all_open_invoices", return_value=1) as m2: | ||||
|                 result = self.client_post( | ||||
|                     "/activity/support", | ||||
|                     { | ||||
|                         "realm_id": f"{iago.realm_id}", | ||||
|                         "modify_plan": "downgrade_now_void_open_invoices", | ||||
|                     }, | ||||
|                 ) | ||||
|                 m1.assert_called_once_with(get_realm("zulip")) | ||||
|                 m2.assert_called_once_with(get_realm("zulip")) | ||||
|                 self.assert_in_success_response( | ||||
|                     ["zulip downgraded and voided 1 open invoices"], result | ||||
|                 ) | ||||
|  | ||||
|         with mock.patch("analytics.views.support.switch_realm_from_standard_to_plus_plan") as m: | ||||
|             result = self.client_post( | ||||
|                 "/activity/support", | ||||
|                 { | ||||
|                     "realm_id": f"{iago.realm_id}", | ||||
|                     "modify_plan": "upgrade_to_plus", | ||||
|                 }, | ||||
|             ) | ||||
|             m.assert_called_once_with(get_realm("zulip")) | ||||
|             self.assert_in_success_response(["zulip upgraded to Plus"], result) | ||||
|  | ||||
|     def test_scrub_realm(self) -> None: | ||||
|         cordelia = self.example_user("cordelia") | ||||
|         lear_realm = get_realm("lear") | ||||
|         self.login_user(cordelia) | ||||
|  | ||||
|         result = self.client_post( | ||||
|             "/activity/support", {"realm_id": f"{lear_realm.id}", "discount": "25"} | ||||
|         ) | ||||
|         self.assertEqual(result.status_code, 302) | ||||
|         self.assertEqual(result["Location"], "/login/") | ||||
|  | ||||
|         self.login("iago") | ||||
|  | ||||
|         with mock.patch("analytics.views.support.do_scrub_realm") as m: | ||||
|             result = self.client_post( | ||||
|                 "/activity/support", {"realm_id": f"{lear_realm.id}", "scrub_realm": "true"} | ||||
|             ) | ||||
|             m.assert_called_once_with(lear_realm, acting_user=self.example_user("iago")) | ||||
|             self.assert_in_success_response(["lear scrubbed"], result) | ||||
|  | ||||
|         with mock.patch("analytics.views.support.do_scrub_realm") as m: | ||||
|             result = self.client_post("/activity/support", {"realm_id": f"{lear_realm.id}"}) | ||||
|             self.assert_json_error(result, "Invalid parameters") | ||||
|             m.assert_not_called() | ||||
|  | ||||
|     def test_delete_user(self) -> None: | ||||
|         cordelia = self.example_user("cordelia") | ||||
|         hamlet = self.example_user("hamlet") | ||||
|         hamlet_email = hamlet.delivery_email | ||||
|         realm = get_realm("zulip") | ||||
|         self.login_user(cordelia) | ||||
|  | ||||
|         result = self.client_post( | ||||
|             "/activity/support", {"realm_id": f"{realm.id}", "delete_user_by_id": hamlet.id} | ||||
|         ) | ||||
|         self.assertEqual(result.status_code, 302) | ||||
|         self.assertEqual(result["Location"], "/login/") | ||||
|  | ||||
|         self.login("iago") | ||||
|  | ||||
|         with mock.patch("analytics.views.support.do_delete_user_preserving_messages") as m: | ||||
|             result = self.client_post( | ||||
|                 "/activity/support", | ||||
|                 {"realm_id": f"{realm.id}", "delete_user_by_id": hamlet.id}, | ||||
|             ) | ||||
|             m.assert_called_once_with(hamlet) | ||||
|             self.assert_in_success_response([f"{hamlet_email} in zulip deleted"], result) | ||||
							
								
								
									
										1214
									
								
								analytics/tests/test_views.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1214
									
								
								analytics/tests/test_views.py
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,41 +1,30 @@ | ||||
| from typing import List, Union | ||||
|  | ||||
| from django.conf.urls import include | ||||
| from django.urls import path | ||||
| from django.urls.resolvers import URLPattern, URLResolver | ||||
|  | ||||
| from analytics.views.installation_activity import ( | ||||
|     get_installation_activity, | ||||
|     get_integrations_activity, | ||||
| ) | ||||
| from analytics.views.realm_activity import get_realm_activity | ||||
| from analytics.views.remote_activity import get_remote_server_activity | ||||
| from analytics.views.stats import ( | ||||
| from analytics.views import ( | ||||
|     get_activity, | ||||
|     get_chart_data, | ||||
|     get_chart_data_for_installation, | ||||
|     get_chart_data_for_realm, | ||||
|     get_chart_data_for_remote_installation, | ||||
|     get_chart_data_for_remote_realm, | ||||
|     get_chart_data_for_stream, | ||||
|     get_realm_activity, | ||||
|     get_user_activity, | ||||
|     stats, | ||||
|     stats_for_installation, | ||||
|     stats_for_realm, | ||||
|     stats_for_remote_installation, | ||||
|     stats_for_remote_realm, | ||||
|     support, | ||||
| ) | ||||
| from analytics.views.support import remote_servers_support, support | ||||
| from analytics.views.user_activity import get_user_activity | ||||
| from zerver.lib.rest import rest_path | ||||
|  | ||||
| i18n_urlpatterns: List[Union[URLPattern, URLResolver]] = [ | ||||
| i18n_urlpatterns = [ | ||||
|     # Server admin (user_profile.is_staff) visible stats pages | ||||
|     path("activity", get_installation_activity), | ||||
|     path("activity/remote", get_remote_server_activity), | ||||
|     path("activity/integrations", get_integrations_activity), | ||||
|     path("activity", get_activity), | ||||
|     path("activity/support", support, name="support"), | ||||
|     path("activity/remote/support", remote_servers_support, name="remote_servers_support"), | ||||
|     path("realm_activity/<realm_str>/", get_realm_activity), | ||||
|     path("user_activity/<user_profile_id>/", get_user_activity), | ||||
|     path("user_activity/<email>/", get_user_activity), | ||||
|     path("stats/realm/<realm_str>/", stats_for_realm), | ||||
|     path("stats/installation", stats_for_installation), | ||||
|     path("stats/remote/<int:remote_server_id>/installation", stats_for_remote_installation), | ||||
| @@ -57,7 +46,6 @@ i18n_urlpatterns: List[Union[URLPattern, URLResolver]] = [ | ||||
| v1_api_and_json_patterns = [ | ||||
|     # get data for the graphs at /stats | ||||
|     rest_path("analytics/chart_data", GET=get_chart_data), | ||||
|     rest_path("analytics/chart_data/stream/<stream_id>", GET=get_chart_data_for_stream), | ||||
|     rest_path("analytics/chart_data/realm/<realm_str>", GET=get_chart_data_for_realm), | ||||
|     rest_path("analytics/chart_data/installation", GET=get_chart_data_for_installation), | ||||
|     rest_path( | ||||
|   | ||||
							
								
								
									
										1791
									
								
								analytics/views.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1791
									
								
								analytics/views.py
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,200 +0,0 @@ | ||||
| import re | ||||
| import sys | ||||
| from datetime import datetime | ||||
| from typing import Any, Callable, Collection, Dict, List, Optional, Sequence, Union | ||||
| from urllib.parse import urlencode | ||||
|  | ||||
| from django.conf import settings | ||||
| from django.db import connection | ||||
| from django.db.backends.utils import CursorWrapper | ||||
| from django.template import loader | ||||
| from django.urls import reverse | ||||
| from markupsafe import Markup | ||||
| from psycopg2.sql import Composable | ||||
|  | ||||
| from zerver.lib.pysa import mark_sanitized | ||||
| from zerver.lib.url_encoding import append_url_query_string | ||||
| from zerver.models import Realm, UserActivity | ||||
|  | ||||
| if sys.version_info < (3, 9):  # nocoverage | ||||
|     from backports import zoneinfo | ||||
| else:  # nocoverage | ||||
|     import zoneinfo | ||||
|  | ||||
| eastern_tz = zoneinfo.ZoneInfo("America/New_York") | ||||
|  | ||||
|  | ||||
| if settings.BILLING_ENABLED: | ||||
|     pass | ||||
|  | ||||
|  | ||||
| def make_table( | ||||
|     title: str, cols: Sequence[str], rows: Sequence[Any], has_row_class: bool = False | ||||
| ) -> str: | ||||
|     if not has_row_class: | ||||
|  | ||||
|         def fix_row(row: Any) -> Dict[str, Any]: | ||||
|             return dict(cells=row, row_class=None) | ||||
|  | ||||
|         rows = list(map(fix_row, rows)) | ||||
|  | ||||
|     data = dict(title=title, cols=cols, rows=rows) | ||||
|  | ||||
|     content = loader.render_to_string( | ||||
|         "analytics/ad_hoc_query.html", | ||||
|         dict(data=data), | ||||
|     ) | ||||
|  | ||||
|     return content | ||||
|  | ||||
|  | ||||
| def get_page( | ||||
|     query: Composable, cols: Sequence[str], title: str, totals_columns: Sequence[int] = [] | ||||
| ) -> Dict[str, str]: | ||||
|     cursor = connection.cursor() | ||||
|     cursor.execute(query) | ||||
|     rows = cursor.fetchall() | ||||
|     rows = list(map(list, rows)) | ||||
|     cursor.close() | ||||
|  | ||||
|     def fix_rows( | ||||
|         i: int, fixup_func: Union[Callable[[str], Markup], Callable[[datetime], str]] | ||||
|     ) -> None: | ||||
|         for row in rows: | ||||
|             row[i] = fixup_func(row[i]) | ||||
|  | ||||
|     total_row = [] | ||||
|     for i, col in enumerate(cols): | ||||
|         if col == "Realm": | ||||
|             fix_rows(i, realm_activity_link) | ||||
|         elif col in ["Last time", "Last visit"]: | ||||
|             fix_rows(i, format_date_for_activity_reports) | ||||
|         elif col == "Hostname": | ||||
|             for row in rows: | ||||
|                 row[i] = remote_installation_stats_link(row[0], row[i]) | ||||
|         if len(totals_columns) > 0: | ||||
|             if i == 0: | ||||
|                 total_row.append("Total") | ||||
|             elif i in totals_columns: | ||||
|                 total_row.append(str(sum(row[i] for row in rows if row[i] is not None))) | ||||
|             else: | ||||
|                 total_row.append("") | ||||
|     if len(totals_columns) > 0: | ||||
|         rows.insert(0, total_row) | ||||
|  | ||||
|     content = make_table(title, cols, rows) | ||||
|  | ||||
|     return dict( | ||||
|         content=content, | ||||
|         title=title, | ||||
|     ) | ||||
|  | ||||
|  | ||||
| def dictfetchall(cursor: CursorWrapper) -> List[Dict[str, Any]]: | ||||
|     """Returns all rows from a cursor as a dict""" | ||||
|     desc = cursor.description | ||||
|     return [dict(zip((col[0] for col in desc), row)) for row in cursor.fetchall()] | ||||
|  | ||||
|  | ||||
| def format_date_for_activity_reports(date: Optional[datetime]) -> str: | ||||
|     if date: | ||||
|         return date.astimezone(eastern_tz).strftime("%Y-%m-%d %H:%M") | ||||
|     else: | ||||
|         return "" | ||||
|  | ||||
|  | ||||
| def user_activity_link(email: str, user_profile_id: int) -> Markup: | ||||
|     from analytics.views.user_activity import get_user_activity | ||||
|  | ||||
|     url = reverse(get_user_activity, kwargs=dict(user_profile_id=user_profile_id)) | ||||
|     return Markup('<a href="{url}">{email}</a>').format(url=url, email=email) | ||||
|  | ||||
|  | ||||
| def realm_activity_link(realm_str: str) -> Markup: | ||||
|     from analytics.views.realm_activity import get_realm_activity | ||||
|  | ||||
|     url = reverse(get_realm_activity, kwargs=dict(realm_str=realm_str)) | ||||
|     return Markup('<a href="{url}">{realm_str}</a>').format(url=url, realm_str=realm_str) | ||||
|  | ||||
|  | ||||
| def realm_stats_link(realm_str: str) -> Markup: | ||||
|     from analytics.views.stats import stats_for_realm | ||||
|  | ||||
|     url = reverse(stats_for_realm, kwargs=dict(realm_str=realm_str)) | ||||
|     return Markup('<a href="{url}"><i class="fa fa-pie-chart"></i></a>').format(url=url) | ||||
|  | ||||
|  | ||||
| def realm_support_link(realm_str: str) -> Markup: | ||||
|     support_url = reverse("support") | ||||
|     query = urlencode({"q": realm_str}) | ||||
|     url = append_url_query_string(support_url, query) | ||||
|     return Markup('<a href="{url}">{realm_str}</a>').format(url=url, realm_str=realm_str) | ||||
|  | ||||
|  | ||||
| def realm_url_link(realm_str: str) -> Markup: | ||||
|     host = Realm.host_for_subdomain(realm_str) | ||||
|     url = settings.EXTERNAL_URI_SCHEME + mark_sanitized(host) | ||||
|     return Markup('<a href="{url}"><i class="fa fa-home"></i></a>').format(url=url) | ||||
|  | ||||
|  | ||||
| def remote_installation_stats_link(server_id: int, hostname: str) -> Markup: | ||||
|     from analytics.views.stats import stats_for_remote_installation | ||||
|  | ||||
|     url = reverse(stats_for_remote_installation, kwargs=dict(remote_server_id=server_id)) | ||||
|     return Markup('<a href="{url}"><i class="fa fa-pie-chart"></i></a> {hostname}').format( | ||||
|         url=url, hostname=hostname | ||||
|     ) | ||||
|  | ||||
|  | ||||
| def get_user_activity_summary(records: Collection[UserActivity]) -> Dict[str, Any]: | ||||
|     #: The type annotation used above is clearly overly permissive. | ||||
|     #: We should perhaps use TypedDict to clearly lay out the schema | ||||
|     #: for the user activity summary. | ||||
|     summary: Dict[str, Any] = {} | ||||
|  | ||||
|     def update(action: str, record: UserActivity) -> None: | ||||
|         if action not in summary: | ||||
|             summary[action] = dict( | ||||
|                 count=record.count, | ||||
|                 last_visit=record.last_visit, | ||||
|             ) | ||||
|         else: | ||||
|             summary[action]["count"] += record.count | ||||
|             summary[action]["last_visit"] = max( | ||||
|                 summary[action]["last_visit"], | ||||
|                 record.last_visit, | ||||
|             ) | ||||
|  | ||||
|     if records: | ||||
|         first_record = next(iter(records)) | ||||
|         summary["name"] = first_record.user_profile.full_name | ||||
|         summary["user_profile_id"] = first_record.user_profile.id | ||||
|  | ||||
|     for record in records: | ||||
|         client = record.client.name | ||||
|         query = str(record.query) | ||||
|  | ||||
|         update("use", record) | ||||
|  | ||||
|         if client == "API": | ||||
|             m = re.match("/api/.*/external/(.*)", query) | ||||
|             if m: | ||||
|                 client = m.group(1) | ||||
|                 update(client, record) | ||||
|  | ||||
|         if client.startswith("desktop"): | ||||
|             update("desktop", record) | ||||
|         if client == "website": | ||||
|             update("website", record) | ||||
|         if ("send_message" in query) or re.search("/api/.*/external/.*", query): | ||||
|             update("send", record) | ||||
|         if query in [ | ||||
|             "/json/update_pointer", | ||||
|             "/json/users/me/pointer", | ||||
|             "/api/v1/update_pointer", | ||||
|             "update_pointer_backend", | ||||
|         ]: | ||||
|             update("pointer", record) | ||||
|         update(client, record) | ||||
|  | ||||
|     return summary | ||||
| @@ -1,340 +0,0 @@ | ||||
| from collections import defaultdict | ||||
| from typing import Dict, Optional | ||||
|  | ||||
| from django.conf import settings | ||||
| from django.db import connection | ||||
| from django.http import HttpRequest, HttpResponse | ||||
| from django.shortcuts import render | ||||
| from django.template import loader | ||||
| from django.utils.timezone import now as timezone_now | ||||
| from markupsafe import Markup | ||||
| from psycopg2.sql import SQL | ||||
|  | ||||
| from analytics.lib.counts import COUNT_STATS | ||||
| from analytics.views.activity_common import ( | ||||
|     dictfetchall, | ||||
|     get_page, | ||||
|     realm_activity_link, | ||||
|     realm_stats_link, | ||||
|     realm_support_link, | ||||
|     realm_url_link, | ||||
| ) | ||||
| from analytics.views.support import get_plan_name | ||||
| from zerver.decorator import require_server_admin | ||||
| from zerver.lib.request import has_request_variables | ||||
| from zerver.models import Realm, get_org_type_display_name | ||||
|  | ||||
| if settings.BILLING_ENABLED: | ||||
|     from corporate.lib.analytics import ( | ||||
|         estimate_annual_recurring_revenue_by_realm, | ||||
|         get_realms_with_default_discount_dict, | ||||
|     ) | ||||
|  | ||||
|  | ||||
| def get_realm_day_counts() -> Dict[str, Dict[str, Markup]]: | ||||
|     # To align with UTC days, we subtract an hour from end_time to | ||||
|     # get the start_time, since the hour that starts at midnight was | ||||
|     # on the previous day. | ||||
|     query = SQL( | ||||
|         """ | ||||
|         select | ||||
|             r.string_id, | ||||
|             (now()::date - (end_time - interval '1 hour')::date) age, | ||||
|             coalesce(sum(value), 0) cnt | ||||
|         from zerver_realm r | ||||
|         join analytics_realmcount rc on r.id = rc.realm_id | ||||
|         where | ||||
|             property = 'messages_sent:is_bot:hour' | ||||
|         and | ||||
|             subgroup = 'false' | ||||
|         and | ||||
|             end_time > now()::date - interval '8 day' - interval '1 hour' | ||||
|         group by | ||||
|             r.string_id, | ||||
|             age | ||||
|     """ | ||||
|     ) | ||||
|     cursor = connection.cursor() | ||||
|     cursor.execute(query) | ||||
|     rows = dictfetchall(cursor) | ||||
|     cursor.close() | ||||
|  | ||||
|     counts: Dict[str, Dict[int, int]] = defaultdict(dict) | ||||
|     for row in rows: | ||||
|         counts[row["string_id"]][row["age"]] = row["cnt"] | ||||
|  | ||||
|     def format_count(cnt: int, style: Optional[str] = None) -> Markup: | ||||
|         if style is not None: | ||||
|             good_bad = style | ||||
|         elif cnt == min_cnt: | ||||
|             good_bad = "bad" | ||||
|         elif cnt == max_cnt: | ||||
|             good_bad = "good" | ||||
|         else: | ||||
|             good_bad = "neutral" | ||||
|  | ||||
|         return Markup('<td class="number {good_bad}">{cnt}</td>').format(good_bad=good_bad, cnt=cnt) | ||||
|  | ||||
|     result = {} | ||||
|     for string_id in counts: | ||||
|         raw_cnts = [counts[string_id].get(age, 0) for age in range(8)] | ||||
|         min_cnt = min(raw_cnts[1:]) | ||||
|         max_cnt = max(raw_cnts[1:]) | ||||
|  | ||||
|         cnts = format_count(raw_cnts[0], "neutral") + Markup().join(map(format_count, raw_cnts[1:])) | ||||
|         result[string_id] = dict(cnts=cnts) | ||||
|  | ||||
|     return result | ||||
|  | ||||
|  | ||||
| def realm_summary_table() -> str: | ||||
|     now = timezone_now() | ||||
|  | ||||
|     query = SQL( | ||||
|         """ | ||||
|         SELECT | ||||
|             realm.string_id, | ||||
|             realm.date_created, | ||||
|             realm.plan_type, | ||||
|             realm.org_type, | ||||
|             coalesce(wau_table.value, 0) wau_count, | ||||
|             coalesce(dau_table.value, 0) dau_count, | ||||
|             coalesce(user_count_table.value, 0) user_profile_count, | ||||
|             coalesce(bot_count_table.value, 0) bot_count | ||||
|         FROM | ||||
|             zerver_realm as realm | ||||
|             LEFT OUTER JOIN ( | ||||
|                 SELECT | ||||
|                     value _14day_active_humans, | ||||
|                     realm_id | ||||
|                 from | ||||
|                     analytics_realmcount | ||||
|                 WHERE | ||||
|                     property = 'realm_active_humans::day' | ||||
|                     AND end_time = %(realm_active_humans_end_time)s | ||||
|             ) as _14day_active_humans_table ON realm.id = _14day_active_humans_table.realm_id | ||||
|             LEFT OUTER JOIN ( | ||||
|                 SELECT | ||||
|                     value, | ||||
|                     realm_id | ||||
|                 from | ||||
|                     analytics_realmcount | ||||
|                 WHERE | ||||
|                     property = '7day_actives::day' | ||||
|                     AND end_time = %(seven_day_actives_end_time)s | ||||
|             ) as wau_table ON realm.id = wau_table.realm_id | ||||
|             LEFT OUTER JOIN ( | ||||
|                 SELECT | ||||
|                     value, | ||||
|                     realm_id | ||||
|                 from | ||||
|                     analytics_realmcount | ||||
|                 WHERE | ||||
|                     property = '1day_actives::day' | ||||
|                     AND end_time = %(one_day_actives_end_time)s | ||||
|             ) as dau_table ON realm.id = dau_table.realm_id | ||||
|             LEFT OUTER JOIN ( | ||||
|                 SELECT | ||||
|                     value, | ||||
|                     realm_id | ||||
|                 from | ||||
|                     analytics_realmcount | ||||
|                 WHERE | ||||
|                     property = 'active_users_audit:is_bot:day' | ||||
|                     AND subgroup = 'false' | ||||
|                     AND end_time = %(active_users_audit_end_time)s | ||||
|             ) as user_count_table ON realm.id = user_count_table.realm_id | ||||
|             LEFT OUTER JOIN ( | ||||
|                 SELECT | ||||
|                     value, | ||||
|                     realm_id | ||||
|                 from | ||||
|                     analytics_realmcount | ||||
|                 WHERE | ||||
|                     property = 'active_users_audit:is_bot:day' | ||||
|                     AND subgroup = 'true' | ||||
|                     AND end_time = %(active_users_audit_end_time)s | ||||
|             ) as bot_count_table ON realm.id = bot_count_table.realm_id | ||||
|         WHERE | ||||
|             _14day_active_humans IS NOT NULL | ||||
|             or realm.plan_type = 3 | ||||
|         ORDER BY | ||||
|             dau_count DESC, | ||||
|             string_id ASC | ||||
|     """ | ||||
|     ) | ||||
|  | ||||
|     cursor = connection.cursor() | ||||
|     cursor.execute( | ||||
|         query, | ||||
|         { | ||||
|             "realm_active_humans_end_time": COUNT_STATS[ | ||||
|                 "realm_active_humans::day" | ||||
|             ].last_successful_fill(), | ||||
|             "seven_day_actives_end_time": COUNT_STATS["7day_actives::day"].last_successful_fill(), | ||||
|             "one_day_actives_end_time": COUNT_STATS["1day_actives::day"].last_successful_fill(), | ||||
|             "active_users_audit_end_time": COUNT_STATS[ | ||||
|                 "active_users_audit:is_bot:day" | ||||
|             ].last_successful_fill(), | ||||
|         }, | ||||
|     ) | ||||
|     rows = dictfetchall(cursor) | ||||
|     cursor.close() | ||||
|  | ||||
|     for row in rows: | ||||
|         row["date_created_day"] = row["date_created"].strftime("%Y-%m-%d") | ||||
|         row["age_days"] = int((now - row["date_created"]).total_seconds() / 86400) | ||||
|         row["is_new"] = row["age_days"] < 12 * 7 | ||||
|  | ||||
|     # get messages sent per day | ||||
|     counts = get_realm_day_counts() | ||||
|     for row in rows: | ||||
|         try: | ||||
|             row["history"] = counts[row["string_id"]]["cnts"] | ||||
|         except Exception: | ||||
|             row["history"] = "" | ||||
|  | ||||
|     # estimate annual subscription revenue | ||||
|     total_arr = 0 | ||||
|     if settings.BILLING_ENABLED: | ||||
|         estimated_arrs = estimate_annual_recurring_revenue_by_realm() | ||||
|         realms_with_default_discount = get_realms_with_default_discount_dict() | ||||
|  | ||||
|         for row in rows: | ||||
|             row["plan_type_string"] = get_plan_name(row["plan_type"]) | ||||
|  | ||||
|             string_id = row["string_id"] | ||||
|  | ||||
|             if string_id in estimated_arrs: | ||||
|                 row["arr"] = estimated_arrs[string_id] | ||||
|  | ||||
|             if row["plan_type"] in [Realm.PLAN_TYPE_STANDARD, Realm.PLAN_TYPE_PLUS]: | ||||
|                 row["effective_rate"] = 100 - int(realms_with_default_discount.get(string_id, 0)) | ||||
|             elif row["plan_type"] == Realm.PLAN_TYPE_STANDARD_FREE: | ||||
|                 row["effective_rate"] = 0 | ||||
|             elif ( | ||||
|                 row["plan_type"] == Realm.PLAN_TYPE_LIMITED | ||||
|                 and string_id in realms_with_default_discount | ||||
|             ): | ||||
|                 row["effective_rate"] = 100 - int(realms_with_default_discount[string_id]) | ||||
|             else: | ||||
|                 row["effective_rate"] = "" | ||||
|  | ||||
|         total_arr += sum(estimated_arrs.values()) | ||||
|  | ||||
|     for row in rows: | ||||
|         row["org_type_string"] = get_org_type_display_name(row["org_type"]) | ||||
|  | ||||
|     # formatting | ||||
|     for row in rows: | ||||
|         row["realm_url"] = realm_url_link(row["string_id"]) | ||||
|         row["stats_link"] = realm_stats_link(row["string_id"]) | ||||
|         row["support_link"] = realm_support_link(row["string_id"]) | ||||
|         row["string_id"] = realm_activity_link(row["string_id"]) | ||||
|  | ||||
|     # Count active sites | ||||
|     num_active_sites = sum(row["dau_count"] >= 5 for row in rows) | ||||
|  | ||||
|     # create totals | ||||
|     total_dau_count = 0 | ||||
|     total_user_profile_count = 0 | ||||
|     total_bot_count = 0 | ||||
|     total_wau_count = 0 | ||||
|     for row in rows: | ||||
|         total_dau_count += int(row["dau_count"]) | ||||
|         total_user_profile_count += int(row["user_profile_count"]) | ||||
|         total_bot_count += int(row["bot_count"]) | ||||
|         total_wau_count += int(row["wau_count"]) | ||||
|  | ||||
|     total_row = dict( | ||||
|         string_id="Total", | ||||
|         plan_type_string="", | ||||
|         org_type_string="", | ||||
|         effective_rate="", | ||||
|         arr=total_arr, | ||||
|         realm_url="", | ||||
|         stats_link="", | ||||
|         support_link="", | ||||
|         date_created_day="", | ||||
|         dau_count=total_dau_count, | ||||
|         user_profile_count=total_user_profile_count, | ||||
|         bot_count=total_bot_count, | ||||
|         wau_count=total_wau_count, | ||||
|     ) | ||||
|  | ||||
|     rows.insert(0, total_row) | ||||
|  | ||||
|     content = loader.render_to_string( | ||||
|         "analytics/realm_summary_table.html", | ||||
|         dict( | ||||
|             rows=rows, | ||||
|             num_active_sites=num_active_sites, | ||||
|             utctime=now.strftime("%Y-%m-%d %H:%M %Z"), | ||||
|             billing_enabled=settings.BILLING_ENABLED, | ||||
|         ), | ||||
|     ) | ||||
|     return content | ||||
|  | ||||
|  | ||||
| @require_server_admin | ||||
| @has_request_variables | ||||
| def get_installation_activity(request: HttpRequest) -> HttpResponse: | ||||
|     content: str = realm_summary_table() | ||||
|     title = "Installation activity" | ||||
|  | ||||
|     return render( | ||||
|         request, | ||||
|         "analytics/activity_details_template.html", | ||||
|         context=dict(data=content, title=title, is_home=True), | ||||
|     ) | ||||
|  | ||||
|  | ||||
| @require_server_admin | ||||
| def get_integrations_activity(request: HttpRequest) -> HttpResponse: | ||||
|     title = "Integrations by client" | ||||
|  | ||||
|     query = SQL( | ||||
|         """ | ||||
|         select | ||||
|             case | ||||
|                 when query like '%%external%%' then split_part(query, '/', 5) | ||||
|                 else client.name | ||||
|             end client_name, | ||||
|             realm.string_id, | ||||
|             sum(count) as hits, | ||||
|             max(last_visit) as last_time | ||||
|         from zerver_useractivity ua | ||||
|         join zerver_client client on client.id = ua.client_id | ||||
|         join zerver_userprofile up on up.id = ua.user_profile_id | ||||
|         join zerver_realm realm on realm.id = up.realm_id | ||||
|         where | ||||
|             (query in ('send_message_backend', '/api/v1/send_message') | ||||
|             and client.name not in ('Android', 'ZulipiOS') | ||||
|             and client.name not like 'test: Zulip%%' | ||||
|             ) | ||||
|         or | ||||
|             query like '%%external%%' | ||||
|         group by client_name, string_id | ||||
|         having max(last_visit) > now() - interval '2 week' | ||||
|         order by client_name, string_id | ||||
|     """ | ||||
|     ) | ||||
|  | ||||
|     cols = [ | ||||
|         "Client", | ||||
|         "Realm", | ||||
|         "Hits", | ||||
|         "Last time", | ||||
|     ] | ||||
|  | ||||
|     integrations_activity = get_page(query, cols, title) | ||||
|  | ||||
|     return render( | ||||
|         request, | ||||
|         "analytics/activity_details_template.html", | ||||
|         context=dict( | ||||
|             data=integrations_activity["content"], | ||||
|             title=integrations_activity["title"], | ||||
|             is_home=False, | ||||
|         ), | ||||
|     ) | ||||
| @@ -1,245 +0,0 @@ | ||||
| import itertools | ||||
| from datetime import datetime | ||||
| from typing import Any, Dict, List, Optional, Set, Tuple | ||||
|  | ||||
| from django.db import connection | ||||
| from django.db.models import QuerySet | ||||
| from django.http import HttpRequest, HttpResponse, HttpResponseNotFound | ||||
| from django.shortcuts import render | ||||
| from django.utils.timezone import now as timezone_now | ||||
| from psycopg2.sql import SQL | ||||
|  | ||||
| from analytics.views.activity_common import ( | ||||
|     format_date_for_activity_reports, | ||||
|     get_user_activity_summary, | ||||
|     make_table, | ||||
|     realm_stats_link, | ||||
|     user_activity_link, | ||||
| ) | ||||
| from zerver.decorator import require_server_admin | ||||
| from zerver.models import Realm, UserActivity | ||||
|  | ||||
|  | ||||
| def get_user_activity_records_for_realm(realm: str, is_bot: bool) -> QuerySet[UserActivity]: | ||||
|     fields = [ | ||||
|         "user_profile__full_name", | ||||
|         "user_profile__delivery_email", | ||||
|         "query", | ||||
|         "client__name", | ||||
|         "count", | ||||
|         "last_visit", | ||||
|     ] | ||||
|  | ||||
|     records = UserActivity.objects.filter( | ||||
|         user_profile__realm__string_id=realm, | ||||
|         user_profile__is_active=True, | ||||
|         user_profile__is_bot=is_bot, | ||||
|     ) | ||||
|     records = records.order_by("user_profile__delivery_email", "-last_visit") | ||||
|     records = records.select_related("user_profile", "client").only(*fields) | ||||
|     return records | ||||
|  | ||||
|  | ||||
| def realm_user_summary_table( | ||||
|     all_records: QuerySet[UserActivity], admin_emails: Set[str] | ||||
| ) -> Tuple[Dict[str, Any], str]: | ||||
|     user_records = {} | ||||
|  | ||||
|     def by_email(record: UserActivity) -> str: | ||||
|         return record.user_profile.delivery_email | ||||
|  | ||||
|     for email, records in itertools.groupby(all_records, by_email): | ||||
|         user_records[email] = get_user_activity_summary(list(records)) | ||||
|  | ||||
|     def get_last_visit(user_summary: Dict[str, Dict[str, datetime]], k: str) -> Optional[datetime]: | ||||
|         if k in user_summary: | ||||
|             return user_summary[k]["last_visit"] | ||||
|         else: | ||||
|             return None | ||||
|  | ||||
|     def get_count(user_summary: Dict[str, Dict[str, str]], k: str) -> str: | ||||
|         if k in user_summary: | ||||
|             return user_summary[k]["count"] | ||||
|         else: | ||||
|             return "" | ||||
|  | ||||
|     def is_recent(val: datetime) -> bool: | ||||
|         age = timezone_now() - val | ||||
|         return age.total_seconds() < 5 * 60 | ||||
|  | ||||
|     rows = [] | ||||
|     for email, user_summary in user_records.items(): | ||||
|         email_link = user_activity_link(email, user_summary["user_profile_id"]) | ||||
|         sent_count = get_count(user_summary, "send") | ||||
|         cells = [user_summary["name"], email_link, sent_count] | ||||
|         row_class = "" | ||||
|         for field in ["use", "send", "pointer", "desktop", "ZulipiOS", "Android"]: | ||||
|             visit = get_last_visit(user_summary, field) | ||||
|             if field == "use": | ||||
|                 if visit and is_recent(visit): | ||||
|                     row_class += " recently_active" | ||||
|                 if email in admin_emails: | ||||
|                     row_class += " admin" | ||||
|             val = format_date_for_activity_reports(visit) | ||||
|             cells.append(val) | ||||
|         row = dict(cells=cells, row_class=row_class) | ||||
|         rows.append(row) | ||||
|  | ||||
|     def by_used_time(row: Dict[str, Any]) -> str: | ||||
|         return row["cells"][3] | ||||
|  | ||||
|     rows = sorted(rows, key=by_used_time, reverse=True) | ||||
|  | ||||
|     cols = [ | ||||
|         "Name", | ||||
|         "Email", | ||||
|         "Total sent", | ||||
|         "Heard from", | ||||
|         "Message sent", | ||||
|         "Pointer motion", | ||||
|         "Desktop", | ||||
|         "ZulipiOS", | ||||
|         "Android", | ||||
|     ] | ||||
|  | ||||
|     title = "Summary" | ||||
|  | ||||
|     content = make_table(title, cols, rows, has_row_class=True) | ||||
|     return user_records, content | ||||
|  | ||||
|  | ||||
| def realm_client_table(user_summaries: Dict[str, Dict[str, Any]]) -> str: | ||||
|     exclude_keys = [ | ||||
|         "internal", | ||||
|         "name", | ||||
|         "user_profile_id", | ||||
|         "use", | ||||
|         "send", | ||||
|         "pointer", | ||||
|         "website", | ||||
|         "desktop", | ||||
|     ] | ||||
|  | ||||
|     rows = [] | ||||
|     for email, user_summary in user_summaries.items(): | ||||
|         email_link = user_activity_link(email, user_summary["user_profile_id"]) | ||||
|         name = user_summary["name"] | ||||
|         for k, v in user_summary.items(): | ||||
|             if k in exclude_keys: | ||||
|                 continue | ||||
|             client = k | ||||
|             count = v["count"] | ||||
|             last_visit = v["last_visit"] | ||||
|             row = [ | ||||
|                 format_date_for_activity_reports(last_visit), | ||||
|                 client, | ||||
|                 name, | ||||
|                 email_link, | ||||
|                 count, | ||||
|             ] | ||||
|             rows.append(row) | ||||
|  | ||||
|     rows = sorted(rows, key=lambda r: r[0], reverse=True) | ||||
|  | ||||
|     cols = [ | ||||
|         "Last visit", | ||||
|         "Client", | ||||
|         "Name", | ||||
|         "Email", | ||||
|         "Count", | ||||
|     ] | ||||
|  | ||||
|     title = "Clients" | ||||
|  | ||||
|     return make_table(title, cols, rows) | ||||
|  | ||||
|  | ||||
| def sent_messages_report(realm: str) -> str: | ||||
|     title = "Recently sent messages for " + realm | ||||
|  | ||||
|     cols = [ | ||||
|         "Date", | ||||
|         "Humans", | ||||
|         "Bots", | ||||
|     ] | ||||
|  | ||||
|     # Uses index: zerver_message_realm_date_sent | ||||
|     query = SQL( | ||||
|         """ | ||||
|         select | ||||
|             series.day::date, | ||||
|             user_messages.humans, | ||||
|             user_messages.bots | ||||
|         from ( | ||||
|             select generate_series( | ||||
|                 (now()::date - interval '2 week'), | ||||
|                 now()::date, | ||||
|                 interval '1 day' | ||||
|             ) as day | ||||
|         ) as series | ||||
|         left join ( | ||||
|             select | ||||
|                 date_sent::date date_sent, | ||||
|                 count(*) filter (where not up.is_bot) as humans, | ||||
|                 count(*) filter (where up.is_bot) as bots | ||||
|             from zerver_message m | ||||
|             join zerver_userprofile up on up.id = m.sender_id | ||||
|             join zerver_realm r on r.id = up.realm_id | ||||
|             where | ||||
|                 r.string_id = %s | ||||
|             and | ||||
|                 date_sent > now() - interval '2 week' | ||||
|             and | ||||
|                 m.realm_id = r.id | ||||
|             group by | ||||
|                 date_sent::date | ||||
|             order by | ||||
|                 date_sent::date | ||||
|         ) user_messages on | ||||
|             series.day = user_messages.date_sent | ||||
|     """ | ||||
|     ) | ||||
|     cursor = connection.cursor() | ||||
|     cursor.execute(query, [realm]) | ||||
|     rows = cursor.fetchall() | ||||
|     cursor.close() | ||||
|  | ||||
|     return make_table(title, cols, rows) | ||||
|  | ||||
|  | ||||
| @require_server_admin | ||||
| def get_realm_activity(request: HttpRequest, realm_str: str) -> HttpResponse: | ||||
|     data: List[Tuple[str, str]] = [] | ||||
|     all_user_records: Dict[str, Any] = {} | ||||
|  | ||||
|     try: | ||||
|         admins = Realm.objects.get(string_id=realm_str).get_human_admin_users() | ||||
|     except Realm.DoesNotExist: | ||||
|         return HttpResponseNotFound() | ||||
|  | ||||
|     admin_emails = {admin.delivery_email for admin in admins} | ||||
|  | ||||
|     for is_bot, page_title in [(False, "Humans"), (True, "Bots")]: | ||||
|         all_records = get_user_activity_records_for_realm(realm_str, is_bot) | ||||
|  | ||||
|         user_records, content = realm_user_summary_table(all_records, admin_emails) | ||||
|         all_user_records.update(user_records) | ||||
|  | ||||
|         data += [(page_title, content)] | ||||
|  | ||||
|     page_title = "Clients" | ||||
|     content = realm_client_table(all_user_records) | ||||
|     data += [(page_title, content)] | ||||
|  | ||||
|     page_title = "History" | ||||
|     content = sent_messages_report(realm_str) | ||||
|     data += [(page_title, content)] | ||||
|  | ||||
|     title = realm_str | ||||
|     realm_stats = realm_stats_link(realm_str) | ||||
|  | ||||
|     return render( | ||||
|         request, | ||||
|         "analytics/activity.html", | ||||
|         context=dict(data=data, realm_stats_link=realm_stats, title=title), | ||||
|     ) | ||||
| @@ -1,59 +0,0 @@ | ||||
| from django.http import HttpRequest, HttpResponse | ||||
| from django.shortcuts import render | ||||
| from psycopg2.sql import SQL | ||||
|  | ||||
| from analytics.views.activity_common import get_page | ||||
| from zerver.decorator import require_server_admin | ||||
|  | ||||
|  | ||||
| @require_server_admin | ||||
| def get_remote_server_activity(request: HttpRequest) -> HttpResponse: | ||||
|     title = "Remote servers" | ||||
|  | ||||
|     query = SQL( | ||||
|         """ | ||||
|         with icount as ( | ||||
|             select | ||||
|                 server_id, | ||||
|                 max(value) as max_value, | ||||
|                 max(end_time) as max_end_time | ||||
|             from zilencer_remoteinstallationcount | ||||
|             where | ||||
|                 property='active_users:is_bot:day' | ||||
|                 and subgroup='false' | ||||
|             group by server_id | ||||
|             ), | ||||
|         remote_push_devices as ( | ||||
|             select server_id, count(distinct(user_id)) as push_user_count from zilencer_remotepushdevicetoken | ||||
|             group by server_id | ||||
|         ) | ||||
|         select | ||||
|             rserver.id, | ||||
|             rserver.hostname, | ||||
|             rserver.contact_email, | ||||
|             max_value, | ||||
|             push_user_count, | ||||
|             max_end_time | ||||
|         from zilencer_remotezulipserver rserver | ||||
|         left join icount on icount.server_id = rserver.id | ||||
|         left join remote_push_devices on remote_push_devices.server_id = rserver.id | ||||
|         order by max_value DESC NULLS LAST, push_user_count DESC NULLS LAST | ||||
|     """ | ||||
|     ) | ||||
|  | ||||
|     cols = [ | ||||
|         "ID", | ||||
|         "Hostname", | ||||
|         "Contact email", | ||||
|         "Analytics users", | ||||
|         "Mobile users", | ||||
|         "Last update time", | ||||
|     ] | ||||
|  | ||||
|     remote_servers = get_page(query, cols, title, totals_columns=[3, 4]) | ||||
|  | ||||
|     return render( | ||||
|         request, | ||||
|         "analytics/activity_details_template.html", | ||||
|         context=dict(data=remote_servers["content"], title=remote_servers["title"], is_home=False), | ||||
|     ) | ||||
| @@ -1,578 +0,0 @@ | ||||
| import logging | ||||
| from collections import defaultdict | ||||
| from datetime import datetime, timedelta, timezone | ||||
| from typing import Any, Dict, List, Optional, Tuple, Type, TypeVar, Union, cast | ||||
|  | ||||
| from django.conf import settings | ||||
| from django.db.models import QuerySet | ||||
| from django.http import HttpRequest, HttpResponse, HttpResponseNotFound | ||||
| from django.shortcuts import render | ||||
| from django.utils import translation | ||||
| from django.utils.timezone import now as timezone_now | ||||
| from django.utils.translation import gettext as _ | ||||
| from typing_extensions import TypeAlias | ||||
|  | ||||
| from analytics.lib.counts import COUNT_STATS, CountStat | ||||
| from analytics.lib.time_utils import time_range | ||||
| from analytics.models import ( | ||||
|     BaseCount, | ||||
|     InstallationCount, | ||||
|     RealmCount, | ||||
|     StreamCount, | ||||
|     UserCount, | ||||
|     installation_epoch, | ||||
| ) | ||||
| from zerver.decorator import ( | ||||
|     require_non_guest_user, | ||||
|     require_server_admin, | ||||
|     require_server_admin_api, | ||||
|     to_utc_datetime, | ||||
|     zulip_login_required, | ||||
| ) | ||||
| from zerver.lib.exceptions import JsonableError | ||||
| from zerver.lib.i18n import get_and_set_request_language, get_language_translation_data | ||||
| from zerver.lib.request import REQ, has_request_variables | ||||
| from zerver.lib.response import json_success | ||||
| from zerver.lib.streams import access_stream_by_id | ||||
| from zerver.lib.timestamp import convert_to_UTC | ||||
| from zerver.lib.validator import to_non_negative_int | ||||
| from zerver.models import Client, Realm, Stream, UserProfile, get_realm | ||||
|  | ||||
| if settings.ZILENCER_ENABLED: | ||||
|     from zilencer.models import RemoteInstallationCount, RemoteRealmCount, RemoteZulipServer | ||||
|  | ||||
| MAX_TIME_FOR_FULL_ANALYTICS_GENERATION = timedelta(days=1, minutes=30) | ||||
|  | ||||
|  | ||||
| def is_analytics_ready(realm: Realm) -> bool: | ||||
|     return (timezone_now() - realm.date_created) > MAX_TIME_FOR_FULL_ANALYTICS_GENERATION | ||||
|  | ||||
|  | ||||
| def render_stats( | ||||
|     request: HttpRequest, | ||||
|     data_url_suffix: str, | ||||
|     realm: Optional[Realm], | ||||
|     *, | ||||
|     title: Optional[str] = None, | ||||
|     for_installation: bool = False, | ||||
|     remote: bool = False, | ||||
|     analytics_ready: bool = True, | ||||
| ) -> HttpResponse: | ||||
|     assert request.user.is_authenticated | ||||
|  | ||||
|     if realm is not None: | ||||
|         # Same query to get guest user count as in get_seat_count in corporate/lib/stripe.py. | ||||
|         guest_users = UserProfile.objects.filter( | ||||
|             realm=realm, is_active=True, is_bot=False, role=UserProfile.ROLE_GUEST | ||||
|         ).count() | ||||
|         space_used = realm.currently_used_upload_space_bytes() | ||||
|         if title: | ||||
|             pass | ||||
|         else: | ||||
|             title = realm.name or realm.string_id | ||||
|     else: | ||||
|         assert title | ||||
|         guest_users = None | ||||
|         space_used = None | ||||
|  | ||||
|     page_params = dict( | ||||
|         data_url_suffix=data_url_suffix, | ||||
|         for_installation=for_installation, | ||||
|         remote=remote, | ||||
|         upload_space_used=space_used, | ||||
|         guest_users=guest_users, | ||||
|     ) | ||||
|  | ||||
|     request_language = get_and_set_request_language( | ||||
|         request, | ||||
|         request.user.default_language, | ||||
|         translation.get_language_from_path(request.path_info), | ||||
|     ) | ||||
|  | ||||
|     page_params["translation_data"] = get_language_translation_data(request_language) | ||||
|  | ||||
|     return render( | ||||
|         request, | ||||
|         "analytics/stats.html", | ||||
|         context=dict( | ||||
|             target_name=title, | ||||
|             page_params=page_params, | ||||
|             analytics_ready=analytics_ready, | ||||
|         ), | ||||
|     ) | ||||
|  | ||||
|  | ||||
| @zulip_login_required | ||||
| def stats(request: HttpRequest) -> HttpResponse: | ||||
|     assert request.user.is_authenticated | ||||
|     realm = request.user.realm | ||||
|     if request.user.is_guest: | ||||
|         # TODO: Make @zulip_login_required pass the UserProfile so we | ||||
|         # can use @require_member_or_admin | ||||
|         raise JsonableError(_("Not allowed for guest users")) | ||||
|     return render_stats(request, "", realm, analytics_ready=is_analytics_ready(realm)) | ||||
|  | ||||
|  | ||||
| @require_server_admin | ||||
| @has_request_variables | ||||
| def stats_for_realm(request: HttpRequest, realm_str: str) -> HttpResponse: | ||||
|     try: | ||||
|         realm = get_realm(realm_str) | ||||
|     except Realm.DoesNotExist: | ||||
|         return HttpResponseNotFound() | ||||
|  | ||||
|     return render_stats( | ||||
|         request, | ||||
|         f"/realm/{realm_str}", | ||||
|         realm, | ||||
|         analytics_ready=is_analytics_ready(realm), | ||||
|     ) | ||||
|  | ||||
|  | ||||
| @require_server_admin | ||||
| @has_request_variables | ||||
| def stats_for_remote_realm( | ||||
|     request: HttpRequest, remote_server_id: int, remote_realm_id: int | ||||
| ) -> HttpResponse: | ||||
|     assert settings.ZILENCER_ENABLED | ||||
|     server = RemoteZulipServer.objects.get(id=remote_server_id) | ||||
|     return render_stats( | ||||
|         request, | ||||
|         f"/remote/{server.id}/realm/{remote_realm_id}", | ||||
|         None, | ||||
|         title=f"Realm {remote_realm_id} on server {server.hostname}", | ||||
|     ) | ||||
|  | ||||
|  | ||||
| @require_server_admin_api | ||||
| @has_request_variables | ||||
| def get_chart_data_for_realm( | ||||
|     request: HttpRequest, /, user_profile: UserProfile, realm_str: str, **kwargs: Any | ||||
| ) -> HttpResponse: | ||||
|     try: | ||||
|         realm = get_realm(realm_str) | ||||
|     except Realm.DoesNotExist: | ||||
|         raise JsonableError(_("Invalid organization")) | ||||
|  | ||||
|     return get_chart_data(request, user_profile, realm=realm, **kwargs) | ||||
|  | ||||
|  | ||||
| @require_non_guest_user | ||||
| @has_request_variables | ||||
| def get_chart_data_for_stream( | ||||
|     request: HttpRequest, /, user_profile: UserProfile, stream_id: int | ||||
| ) -> HttpResponse: | ||||
|     stream, ignored_sub = access_stream_by_id( | ||||
|         user_profile, | ||||
|         stream_id, | ||||
|         require_active=True, | ||||
|         allow_realm_admin=True, | ||||
|     ) | ||||
|  | ||||
|     return get_chart_data(request, user_profile, stream=stream) | ||||
|  | ||||
|  | ||||
| @require_server_admin_api | ||||
| @has_request_variables | ||||
| def get_chart_data_for_remote_realm( | ||||
|     request: HttpRequest, | ||||
|     /, | ||||
|     user_profile: UserProfile, | ||||
|     remote_server_id: int, | ||||
|     remote_realm_id: int, | ||||
|     **kwargs: Any, | ||||
| ) -> HttpResponse: | ||||
|     assert settings.ZILENCER_ENABLED | ||||
|     server = RemoteZulipServer.objects.get(id=remote_server_id) | ||||
|     return get_chart_data( | ||||
|         request, | ||||
|         user_profile, | ||||
|         server=server, | ||||
|         remote=True, | ||||
|         remote_realm_id=int(remote_realm_id), | ||||
|         **kwargs, | ||||
|     ) | ||||
|  | ||||
|  | ||||
| @require_server_admin | ||||
| def stats_for_installation(request: HttpRequest) -> HttpResponse: | ||||
|     assert request.user.is_authenticated | ||||
|     return render_stats(request, "/installation", None, title="installation", for_installation=True) | ||||
|  | ||||
|  | ||||
| @require_server_admin | ||||
| def stats_for_remote_installation(request: HttpRequest, remote_server_id: int) -> HttpResponse: | ||||
|     assert settings.ZILENCER_ENABLED | ||||
|     server = RemoteZulipServer.objects.get(id=remote_server_id) | ||||
|     return render_stats( | ||||
|         request, | ||||
|         f"/remote/{server.id}/installation", | ||||
|         None, | ||||
|         title=f"remote installation {server.hostname}", | ||||
|         for_installation=True, | ||||
|         remote=True, | ||||
|     ) | ||||
|  | ||||
|  | ||||
| @require_server_admin_api | ||||
| @has_request_variables | ||||
| def get_chart_data_for_installation( | ||||
|     request: HttpRequest, /, user_profile: UserProfile, chart_name: str = REQ(), **kwargs: Any | ||||
| ) -> HttpResponse: | ||||
|     return get_chart_data(request, user_profile, for_installation=True, **kwargs) | ||||
|  | ||||
|  | ||||
| @require_server_admin_api | ||||
| @has_request_variables | ||||
| def get_chart_data_for_remote_installation( | ||||
|     request: HttpRequest, | ||||
|     /, | ||||
|     user_profile: UserProfile, | ||||
|     remote_server_id: int, | ||||
|     chart_name: str = REQ(), | ||||
|     **kwargs: Any, | ||||
| ) -> HttpResponse: | ||||
|     assert settings.ZILENCER_ENABLED | ||||
|     server = RemoteZulipServer.objects.get(id=remote_server_id) | ||||
|     return get_chart_data( | ||||
|         request, | ||||
|         user_profile, | ||||
|         for_installation=True, | ||||
|         remote=True, | ||||
|         server=server, | ||||
|         **kwargs, | ||||
|     ) | ||||
|  | ||||
|  | ||||
| @require_non_guest_user | ||||
| @has_request_variables | ||||
| def get_chart_data( | ||||
|     request: HttpRequest, | ||||
|     user_profile: UserProfile, | ||||
|     chart_name: str = REQ(), | ||||
|     min_length: Optional[int] = REQ(converter=to_non_negative_int, default=None), | ||||
|     start: Optional[datetime] = REQ(converter=to_utc_datetime, default=None), | ||||
|     end: Optional[datetime] = REQ(converter=to_utc_datetime, default=None), | ||||
|     # These last several parameters are only used by functions | ||||
|     # wrapping get_chart_data; the callers are responsible for | ||||
|     # parsing/validation/authorization for them. | ||||
|     realm: Optional[Realm] = None, | ||||
|     for_installation: bool = False, | ||||
|     remote: bool = False, | ||||
|     remote_realm_id: Optional[int] = None, | ||||
|     server: Optional["RemoteZulipServer"] = None, | ||||
|     stream: Optional[Stream] = None, | ||||
| ) -> HttpResponse: | ||||
|     TableType: TypeAlias = Union[ | ||||
|         Type["RemoteInstallationCount"], | ||||
|         Type[InstallationCount], | ||||
|         Type["RemoteRealmCount"], | ||||
|         Type[RealmCount], | ||||
|     ] | ||||
|     if for_installation: | ||||
|         if remote: | ||||
|             assert settings.ZILENCER_ENABLED | ||||
|             aggregate_table: TableType = RemoteInstallationCount | ||||
|             assert server is not None | ||||
|         else: | ||||
|             aggregate_table = InstallationCount | ||||
|     else: | ||||
|         if remote: | ||||
|             assert settings.ZILENCER_ENABLED | ||||
|             aggregate_table = RemoteRealmCount | ||||
|             assert server is not None | ||||
|             assert remote_realm_id is not None | ||||
|         else: | ||||
|             aggregate_table = RealmCount | ||||
|  | ||||
|     tables: Union[ | ||||
|         Tuple[TableType], Tuple[TableType, Type[UserCount]], Tuple[TableType, Type[StreamCount]] | ||||
|     ] | ||||
|  | ||||
|     if chart_name == "number_of_humans": | ||||
|         stats = [ | ||||
|             COUNT_STATS["1day_actives::day"], | ||||
|             COUNT_STATS["realm_active_humans::day"], | ||||
|             COUNT_STATS["active_users_audit:is_bot:day"], | ||||
|         ] | ||||
|         tables = (aggregate_table,) | ||||
|         subgroup_to_label: Dict[CountStat, Dict[Optional[str], str]] = { | ||||
|             stats[0]: {None: "_1day"}, | ||||
|             stats[1]: {None: "_15day"}, | ||||
|             stats[2]: {"false": "all_time"}, | ||||
|         } | ||||
|         labels_sort_function = None | ||||
|         include_empty_subgroups = True | ||||
|     elif chart_name == "messages_sent_over_time": | ||||
|         stats = [COUNT_STATS["messages_sent:is_bot:hour"]] | ||||
|         tables = (aggregate_table, UserCount) | ||||
|         subgroup_to_label = {stats[0]: {"false": "human", "true": "bot"}} | ||||
|         labels_sort_function = None | ||||
|         include_empty_subgroups = True | ||||
|     elif chart_name == "messages_sent_by_message_type": | ||||
|         stats = [COUNT_STATS["messages_sent:message_type:day"]] | ||||
|         tables = (aggregate_table, UserCount) | ||||
|         subgroup_to_label = { | ||||
|             stats[0]: { | ||||
|                 "public_stream": _("Public streams"), | ||||
|                 "private_stream": _("Private streams"), | ||||
|                 "private_message": _("Direct messages"), | ||||
|                 "huddle_message": _("Group direct messages"), | ||||
|             } | ||||
|         } | ||||
|         labels_sort_function = lambda data: sort_by_totals(data["everyone"]) | ||||
|         include_empty_subgroups = True | ||||
|     elif chart_name == "messages_sent_by_client": | ||||
|         stats = [COUNT_STATS["messages_sent:client:day"]] | ||||
|         tables = (aggregate_table, UserCount) | ||||
|         # Note that the labels are further re-written by client_label_map | ||||
|         subgroup_to_label = { | ||||
|             stats[0]: {str(id): name for id, name in Client.objects.values_list("id", "name")} | ||||
|         } | ||||
|         labels_sort_function = sort_client_labels | ||||
|         include_empty_subgroups = False | ||||
|     elif chart_name == "messages_read_over_time": | ||||
|         stats = [COUNT_STATS["messages_read::hour"]] | ||||
|         tables = (aggregate_table, UserCount) | ||||
|         subgroup_to_label = {stats[0]: {None: "read"}} | ||||
|         labels_sort_function = None | ||||
|         include_empty_subgroups = True | ||||
|     elif chart_name == "messages_sent_by_stream": | ||||
|         if stream is None: | ||||
|             raise JsonableError( | ||||
|                 _("Missing stream for chart: {chart_name}").format(chart_name=chart_name) | ||||
|             ) | ||||
|         stats = [COUNT_STATS["messages_in_stream:is_bot:day"]] | ||||
|         tables = (aggregate_table, StreamCount) | ||||
|         subgroup_to_label = {stats[0]: {"false": "human", "true": "bot"}} | ||||
|         labels_sort_function = None | ||||
|         include_empty_subgroups = True | ||||
|     else: | ||||
|         raise JsonableError(_("Unknown chart name: {chart_name}").format(chart_name=chart_name)) | ||||
|  | ||||
|     # Most likely someone using our API endpoint. The /stats page does not | ||||
|     # pass a start or end in its requests. | ||||
|     if start is not None: | ||||
|         start = convert_to_UTC(start) | ||||
|     if end is not None: | ||||
|         end = convert_to_UTC(end) | ||||
|     if start is not None and end is not None and start > end: | ||||
|         raise JsonableError( | ||||
|             _("Start time is later than end time. Start: {start}, End: {end}").format( | ||||
|                 start=start, | ||||
|                 end=end, | ||||
|             ) | ||||
|         ) | ||||
|  | ||||
|     if realm is None: | ||||
|         # Note that this value is invalid for Remote tables; be | ||||
|         # careful not to access it in those code paths. | ||||
|         realm = user_profile.realm | ||||
|  | ||||
|     if remote: | ||||
|         # For remote servers, we don't have fillstate data, and thus | ||||
|         # should simply use the first and last data points for the | ||||
|         # table. | ||||
|         assert server is not None | ||||
|         assert aggregate_table is RemoteInstallationCount or aggregate_table is RemoteRealmCount | ||||
|         aggregate_table_remote = cast( | ||||
|             Union[Type[RemoteInstallationCount], Type[RemoteRealmCount]], aggregate_table | ||||
|         )  # https://stackoverflow.com/questions/68540528/mypy-assertions-on-the-types-of-types | ||||
|         if not aggregate_table_remote.objects.filter(server=server).exists(): | ||||
|             raise JsonableError( | ||||
|                 _("No analytics data available. Please contact your server administrator.") | ||||
|             ) | ||||
|         if start is None: | ||||
|             first = aggregate_table_remote.objects.filter(server=server).first() | ||||
|             assert first is not None | ||||
|             start = first.end_time | ||||
|         if end is None: | ||||
|             last = aggregate_table_remote.objects.filter(server=server).last() | ||||
|             assert last is not None | ||||
|             end = last.end_time | ||||
|     else: | ||||
|         # Otherwise, we can use tables on the current server to | ||||
|         # determine a nice range, and some additional validation. | ||||
|         if start is None: | ||||
|             if for_installation: | ||||
|                 start = installation_epoch() | ||||
|             else: | ||||
|                 start = realm.date_created | ||||
|         if end is None: | ||||
|             end = max( | ||||
|                 stat.last_successful_fill() or datetime.min.replace(tzinfo=timezone.utc) | ||||
|                 for stat in stats | ||||
|             ) | ||||
|  | ||||
|         if start > end and (timezone_now() - start > MAX_TIME_FOR_FULL_ANALYTICS_GENERATION): | ||||
|             logging.warning( | ||||
|                 "User from realm %s attempted to access /stats, but the computed " | ||||
|                 "start time: %s (creation of realm or installation) is later than the computed " | ||||
|                 "end time: %s (last successful analytics update). Is the " | ||||
|                 "analytics cron job running?", | ||||
|                 realm.string_id, | ||||
|                 start, | ||||
|                 end, | ||||
|             ) | ||||
|             raise JsonableError( | ||||
|                 _("No analytics data available. Please contact your server administrator.") | ||||
|             ) | ||||
|  | ||||
|     assert len({stat.frequency for stat in stats}) == 1 | ||||
|     end_times = time_range(start, end, stats[0].frequency, min_length) | ||||
|     data: Dict[str, Any] = { | ||||
|         "end_times": [int(end_time.timestamp()) for end_time in end_times], | ||||
|         "frequency": stats[0].frequency, | ||||
|     } | ||||
|  | ||||
|     aggregation_level = { | ||||
|         InstallationCount: "everyone", | ||||
|         RealmCount: "everyone", | ||||
|         UserCount: "user", | ||||
|         StreamCount: "everyone", | ||||
|     } | ||||
|     if settings.ZILENCER_ENABLED: | ||||
|         aggregation_level[RemoteInstallationCount] = "everyone" | ||||
|         aggregation_level[RemoteRealmCount] = "everyone" | ||||
|  | ||||
|     # -1 is a placeholder value, since there is no relevant filtering on InstallationCount | ||||
|     id_value = { | ||||
|         InstallationCount: -1, | ||||
|         RealmCount: realm.id, | ||||
|         UserCount: user_profile.id, | ||||
|     } | ||||
|     if stream is not None: | ||||
|         id_value[StreamCount] = stream.id | ||||
|  | ||||
|     if settings.ZILENCER_ENABLED: | ||||
|         if server is not None: | ||||
|             id_value[RemoteInstallationCount] = server.id | ||||
|         # TODO: RemoteRealmCount logic doesn't correctly handle | ||||
|         # filtering by server_id as well. | ||||
|         if remote_realm_id is not None: | ||||
|             id_value[RemoteRealmCount] = remote_realm_id | ||||
|  | ||||
|     for table in tables: | ||||
|         data[aggregation_level[table]] = {} | ||||
|         for stat in stats: | ||||
|             data[aggregation_level[table]].update( | ||||
|                 get_time_series_by_subgroup( | ||||
|                     stat, | ||||
|                     table, | ||||
|                     id_value[table], | ||||
|                     end_times, | ||||
|                     subgroup_to_label[stat], | ||||
|                     include_empty_subgroups, | ||||
|                 ) | ||||
|             ) | ||||
|  | ||||
|     if labels_sort_function is not None: | ||||
|         data["display_order"] = labels_sort_function(data) | ||||
|     else: | ||||
|         data["display_order"] = None | ||||
|     return json_success(request, data=data) | ||||
|  | ||||
|  | ||||
| def sort_by_totals(value_arrays: Dict[str, List[int]]) -> List[str]: | ||||
|     totals = sorted(((sum(values), label) for label, values in value_arrays.items()), reverse=True) | ||||
|     return [label for total, label in totals] | ||||
|  | ||||
|  | ||||
| # For any given user, we want to show a fixed set of clients in the chart, | ||||
| # regardless of the time aggregation or whether we're looking at realm or | ||||
| # user data. This fixed set ideally includes the clients most important in | ||||
| # understanding the realm's traffic and the user's traffic. This function | ||||
| # tries to rank the clients so that taking the first N elements of the | ||||
| # sorted list has a reasonable chance of doing so. | ||||
| def sort_client_labels(data: Dict[str, Dict[str, List[int]]]) -> List[str]: | ||||
|     realm_order = sort_by_totals(data["everyone"]) | ||||
|     user_order = sort_by_totals(data["user"]) | ||||
|     label_sort_values: Dict[str, float] = {} | ||||
|     for i, label in enumerate(realm_order): | ||||
|         label_sort_values[label] = i | ||||
|     for i, label in enumerate(user_order): | ||||
|         label_sort_values[label] = min(i - 0.1, label_sort_values.get(label, i)) | ||||
|     return [label for label, sort_value in sorted(label_sort_values.items(), key=lambda x: x[1])] | ||||
|  | ||||
|  | ||||
| CountT = TypeVar("CountT", bound=BaseCount) | ||||
|  | ||||
|  | ||||
| def table_filtered_to_id(table: Type[CountT], key_id: int) -> QuerySet[CountT]: | ||||
|     if table == RealmCount: | ||||
|         return table._default_manager.filter(realm_id=key_id) | ||||
|     elif table == UserCount: | ||||
|         return table._default_manager.filter(user_id=key_id) | ||||
|     elif table == StreamCount: | ||||
|         return table._default_manager.filter(stream_id=key_id) | ||||
|     elif table == InstallationCount: | ||||
|         return table._default_manager.all() | ||||
|     elif settings.ZILENCER_ENABLED and table == RemoteInstallationCount: | ||||
|         return table._default_manager.filter(server_id=key_id) | ||||
|     elif settings.ZILENCER_ENABLED and table == RemoteRealmCount: | ||||
|         return table._default_manager.filter(realm_id=key_id) | ||||
|     else: | ||||
|         raise AssertionError(f"Unknown table: {table}") | ||||
|  | ||||
|  | ||||
| def client_label_map(name: str) -> str: | ||||
|     if name == "website": | ||||
|         return "Web app" | ||||
|     if name.startswith("desktop app"): | ||||
|         return "Old desktop app" | ||||
|     if name == "ZulipElectron": | ||||
|         return "Desktop app" | ||||
|     if name == "ZulipTerminal": | ||||
|         return "Terminal app" | ||||
|     if name == "ZulipAndroid": | ||||
|         return "Old Android app" | ||||
|     if name == "ZulipiOS": | ||||
|         return "Old iOS app" | ||||
|     if name == "ZulipMobile": | ||||
|         return "Mobile app" | ||||
|     if name in ["ZulipPython", "API: Python"]: | ||||
|         return "Python API" | ||||
|     if name.startswith("Zulip") and name.endswith("Webhook"): | ||||
|         return name[len("Zulip") : -len("Webhook")] + " webhook" | ||||
|     return name | ||||
|  | ||||
|  | ||||
| def rewrite_client_arrays(value_arrays: Dict[str, List[int]]) -> Dict[str, List[int]]: | ||||
|     mapped_arrays: Dict[str, List[int]] = {} | ||||
|     for label, array in value_arrays.items(): | ||||
|         mapped_label = client_label_map(label) | ||||
|         if mapped_label in mapped_arrays: | ||||
|             for i in range(len(array)): | ||||
|                 mapped_arrays[mapped_label][i] += value_arrays[label][i] | ||||
|         else: | ||||
|             mapped_arrays[mapped_label] = [value_arrays[label][i] for i in range(len(array))] | ||||
|     return mapped_arrays | ||||
|  | ||||
|  | ||||
| def get_time_series_by_subgroup( | ||||
|     stat: CountStat, | ||||
|     table: Type[BaseCount], | ||||
|     key_id: int, | ||||
|     end_times: List[datetime], | ||||
|     subgroup_to_label: Dict[Optional[str], str], | ||||
|     include_empty_subgroups: bool, | ||||
| ) -> Dict[str, List[int]]: | ||||
|     queryset = ( | ||||
|         table_filtered_to_id(table, key_id) | ||||
|         .filter(property=stat.property) | ||||
|         .values_list("subgroup", "end_time", "value") | ||||
|     ) | ||||
|     value_dicts: Dict[Optional[str], Dict[datetime, int]] = defaultdict(lambda: defaultdict(int)) | ||||
|     for subgroup, end_time, value in queryset: | ||||
|         value_dicts[subgroup][end_time] = value | ||||
|     value_arrays = {} | ||||
|     for subgroup, label in subgroup_to_label.items(): | ||||
|         if (subgroup in value_dicts) or include_empty_subgroups: | ||||
|             value_arrays[label] = [value_dicts[subgroup][end_time] for end_time in end_times] | ||||
|  | ||||
|     if stat == COUNT_STATS["messages_sent:client:day"]: | ||||
|         # HACK: We rewrite these arrays to collapse the Client objects | ||||
|         # with similar names into a single sum, and generally give | ||||
|         # them better names | ||||
|         return rewrite_client_arrays(value_arrays) | ||||
|     return value_arrays | ||||
| @@ -1,465 +0,0 @@ | ||||
| import urllib | ||||
| from contextlib import suppress | ||||
| from dataclasses import dataclass | ||||
| from datetime import timedelta | ||||
| from decimal import Decimal | ||||
| from typing import Any, Dict, Iterable, List, Optional, Union | ||||
| from urllib.parse import urlencode | ||||
|  | ||||
| from django.conf import settings | ||||
| from django.core.exceptions import ValidationError | ||||
| from django.core.validators import URLValidator | ||||
| from django.db.models import Q | ||||
| from django.http import HttpRequest, HttpResponse, HttpResponseRedirect | ||||
| from django.shortcuts import render | ||||
| from django.urls import reverse | ||||
| from django.utils.timesince import timesince | ||||
| from django.utils.timezone import now as timezone_now | ||||
| from django.utils.translation import gettext as _ | ||||
|  | ||||
| from confirmation.models import Confirmation, confirmation_url | ||||
| from confirmation.settings import STATUS_USED | ||||
| from zerver.actions.create_realm import do_change_realm_subdomain | ||||
| from zerver.actions.realm_settings import ( | ||||
|     do_change_realm_org_type, | ||||
|     do_change_realm_plan_type, | ||||
|     do_deactivate_realm, | ||||
|     do_scrub_realm, | ||||
|     do_send_realm_reactivation_email, | ||||
| ) | ||||
| from zerver.actions.users import do_delete_user_preserving_messages | ||||
| from zerver.decorator import require_server_admin | ||||
| from zerver.forms import check_subdomain_available | ||||
| from zerver.lib.exceptions import JsonableError | ||||
| from zerver.lib.realm_icon import realm_icon_url | ||||
| from zerver.lib.request import REQ, has_request_variables | ||||
| from zerver.lib.subdomains import get_subdomain_from_hostname | ||||
| from zerver.lib.validator import check_bool, check_string_in, to_decimal, to_non_negative_int | ||||
| from zerver.models import ( | ||||
|     MultiuseInvite, | ||||
|     PreregistrationRealm, | ||||
|     PreregistrationUser, | ||||
|     Realm, | ||||
|     RealmReactivationStatus, | ||||
|     UserProfile, | ||||
|     get_org_type_display_name, | ||||
|     get_realm, | ||||
|     get_user_profile_by_id, | ||||
| ) | ||||
| from zerver.views.invite import get_invitee_emails_set | ||||
|  | ||||
| if settings.ZILENCER_ENABLED: | ||||
|     from zilencer.lib.remote_counts import MissingDataError, compute_max_monthly_messages | ||||
|     from zilencer.models import RemoteZulipServer | ||||
|  | ||||
| if settings.BILLING_ENABLED: | ||||
|     from corporate.lib.stripe import ( | ||||
|         RealmBillingSession, | ||||
|         downgrade_at_the_end_of_billing_cycle, | ||||
|         downgrade_now_without_creating_additional_invoices, | ||||
|         get_latest_seat_count, | ||||
|         switch_realm_from_standard_to_plus_plan, | ||||
|         void_all_open_invoices, | ||||
|     ) | ||||
|     from corporate.lib.support import ( | ||||
|         approve_realm_sponsorship, | ||||
|         attach_discount_to_realm, | ||||
|         get_discount_for_realm, | ||||
|         update_realm_billing_method, | ||||
|         update_realm_sponsorship_status, | ||||
|     ) | ||||
|     from corporate.models import ( | ||||
|         Customer, | ||||
|         CustomerPlan, | ||||
|         get_current_plan_by_realm, | ||||
|         get_customer_by_realm, | ||||
|     ) | ||||
|  | ||||
|  | ||||
| def get_plan_name(plan_type: int) -> str: | ||||
|     return { | ||||
|         Realm.PLAN_TYPE_SELF_HOSTED: "self-hosted", | ||||
|         Realm.PLAN_TYPE_LIMITED: "limited", | ||||
|         Realm.PLAN_TYPE_STANDARD: "standard", | ||||
|         Realm.PLAN_TYPE_STANDARD_FREE: "open source", | ||||
|         Realm.PLAN_TYPE_PLUS: "plus", | ||||
|     }[plan_type] | ||||
|  | ||||
|  | ||||
| def get_confirmations( | ||||
|     types: List[int], object_ids: Iterable[int], hostname: Optional[str] = None | ||||
| ) -> List[Dict[str, Any]]: | ||||
|     lowest_datetime = timezone_now() - timedelta(days=30) | ||||
|     confirmations = Confirmation.objects.filter( | ||||
|         type__in=types, object_id__in=object_ids, date_sent__gte=lowest_datetime | ||||
|     ) | ||||
|     confirmation_dicts = [] | ||||
|     for confirmation in confirmations: | ||||
|         realm = confirmation.realm | ||||
|         content_object = confirmation.content_object | ||||
|  | ||||
|         type = confirmation.type | ||||
|         expiry_date = confirmation.expiry_date | ||||
|  | ||||
|         assert content_object is not None | ||||
|         if hasattr(content_object, "status"): | ||||
|             if content_object.status == STATUS_USED: | ||||
|                 link_status = "Link has been used" | ||||
|             else: | ||||
|                 link_status = "Link has not been used" | ||||
|         else: | ||||
|             link_status = "" | ||||
|  | ||||
|         now = timezone_now() | ||||
|         if expiry_date is None: | ||||
|             expires_in = "Never" | ||||
|         elif now < expiry_date: | ||||
|             expires_in = timesince(now, expiry_date) | ||||
|         else: | ||||
|             expires_in = "Expired" | ||||
|  | ||||
|         url = confirmation_url(confirmation.confirmation_key, realm, type) | ||||
|         confirmation_dicts.append( | ||||
|             { | ||||
|                 "object": confirmation.content_object, | ||||
|                 "url": url, | ||||
|                 "type": type, | ||||
|                 "link_status": link_status, | ||||
|                 "expires_in": expires_in, | ||||
|             } | ||||
|         ) | ||||
|     return confirmation_dicts | ||||
|  | ||||
|  | ||||
| VALID_MODIFY_PLAN_METHODS = [ | ||||
|     "downgrade_at_billing_cycle_end", | ||||
|     "downgrade_now_without_additional_licenses", | ||||
|     "downgrade_now_void_open_invoices", | ||||
|     "upgrade_to_plus", | ||||
| ] | ||||
|  | ||||
| VALID_STATUS_VALUES = [ | ||||
|     "active", | ||||
|     "deactivated", | ||||
| ] | ||||
|  | ||||
| VALID_BILLING_METHODS = [ | ||||
|     "send_invoice", | ||||
|     "charge_automatically", | ||||
| ] | ||||
|  | ||||
|  | ||||
| @dataclass | ||||
| class PlanData: | ||||
|     customer: Optional["Customer"] = None | ||||
|     current_plan: Optional["CustomerPlan"] = None | ||||
|     licenses: Optional[int] = None | ||||
|     licenses_used: Optional[int] = None | ||||
|  | ||||
|  | ||||
| @require_server_admin | ||||
| @has_request_variables | ||||
| def support( | ||||
|     request: HttpRequest, | ||||
|     realm_id: Optional[int] = REQ(default=None, converter=to_non_negative_int), | ||||
|     plan_type: Optional[int] = REQ(default=None, converter=to_non_negative_int), | ||||
|     discount: Optional[Decimal] = REQ(default=None, converter=to_decimal), | ||||
|     new_subdomain: Optional[str] = REQ(default=None), | ||||
|     status: Optional[str] = REQ(default=None, str_validator=check_string_in(VALID_STATUS_VALUES)), | ||||
|     billing_method: Optional[str] = REQ( | ||||
|         default=None, str_validator=check_string_in(VALID_BILLING_METHODS) | ||||
|     ), | ||||
|     sponsorship_pending: Optional[bool] = REQ(default=None, json_validator=check_bool), | ||||
|     approve_sponsorship: bool = REQ(default=False, json_validator=check_bool), | ||||
|     modify_plan: Optional[str] = REQ( | ||||
|         default=None, str_validator=check_string_in(VALID_MODIFY_PLAN_METHODS) | ||||
|     ), | ||||
|     scrub_realm: bool = REQ(default=False, json_validator=check_bool), | ||||
|     delete_user_by_id: Optional[int] = REQ(default=None, converter=to_non_negative_int), | ||||
|     query: Optional[str] = REQ("q", default=None), | ||||
|     org_type: Optional[int] = REQ(default=None, converter=to_non_negative_int), | ||||
| ) -> HttpResponse: | ||||
|     context: Dict[str, Any] = {} | ||||
|  | ||||
|     if "success_message" in request.session: | ||||
|         context["success_message"] = request.session["success_message"] | ||||
|         del request.session["success_message"] | ||||
|  | ||||
|     acting_user = request.user | ||||
|     assert isinstance(acting_user, UserProfile) | ||||
|     if settings.BILLING_ENABLED and request.method == "POST": | ||||
|         # We check that request.POST only has two keys in it: The | ||||
|         # realm_id and a field to change. | ||||
|         keys = set(request.POST.keys()) | ||||
|         if "csrfmiddlewaretoken" in keys: | ||||
|             keys.remove("csrfmiddlewaretoken") | ||||
|         if len(keys) != 2: | ||||
|             raise JsonableError(_("Invalid parameters")) | ||||
|  | ||||
|         assert realm_id is not None | ||||
|         realm = Realm.objects.get(id=realm_id) | ||||
|  | ||||
|         if plan_type is not None: | ||||
|             current_plan_type = realm.plan_type | ||||
|             do_change_realm_plan_type(realm, plan_type, acting_user=acting_user) | ||||
|             msg = f"Plan type of {realm.string_id} changed from {get_plan_name(current_plan_type)} to {get_plan_name(plan_type)} " | ||||
|             context["success_message"] = msg | ||||
|         elif org_type is not None: | ||||
|             current_realm_type = realm.org_type | ||||
|             do_change_realm_org_type(realm, org_type, acting_user=acting_user) | ||||
|             msg = f"Org type of {realm.string_id} changed from {get_org_type_display_name(current_realm_type)} to {get_org_type_display_name(org_type)} " | ||||
|             context["success_message"] = msg | ||||
|         elif discount is not None: | ||||
|             current_discount = get_discount_for_realm(realm) or 0 | ||||
|             attach_discount_to_realm(realm, discount, acting_user=acting_user) | ||||
|             context[ | ||||
|                 "success_message" | ||||
|             ] = f"Discount of {realm.string_id} changed to {discount}% from {current_discount}%." | ||||
|         elif new_subdomain is not None: | ||||
|             old_subdomain = realm.string_id | ||||
|             try: | ||||
|                 check_subdomain_available(new_subdomain) | ||||
|             except ValidationError as error: | ||||
|                 context["error_message"] = error.message | ||||
|             else: | ||||
|                 do_change_realm_subdomain(realm, new_subdomain, acting_user=acting_user) | ||||
|                 request.session[ | ||||
|                     "success_message" | ||||
|                 ] = f"Subdomain changed from {old_subdomain} to {new_subdomain}" | ||||
|                 return HttpResponseRedirect( | ||||
|                     reverse("support") + "?" + urlencode({"q": new_subdomain}) | ||||
|                 ) | ||||
|         elif status is not None: | ||||
|             if status == "active": | ||||
|                 do_send_realm_reactivation_email(realm, acting_user=acting_user) | ||||
|                 context[ | ||||
|                     "success_message" | ||||
|                 ] = f"Realm reactivation email sent to admins of {realm.string_id}." | ||||
|             elif status == "deactivated": | ||||
|                 do_deactivate_realm(realm, acting_user=acting_user) | ||||
|                 context["success_message"] = f"{realm.string_id} deactivated." | ||||
|         elif billing_method is not None: | ||||
|             if billing_method == "send_invoice": | ||||
|                 update_realm_billing_method( | ||||
|                     realm, charge_automatically=False, acting_user=acting_user | ||||
|                 ) | ||||
|                 context[ | ||||
|                     "success_message" | ||||
|                 ] = f"Billing method of {realm.string_id} updated to pay by invoice." | ||||
|             elif billing_method == "charge_automatically": | ||||
|                 update_realm_billing_method( | ||||
|                     realm, charge_automatically=True, acting_user=acting_user | ||||
|                 ) | ||||
|                 context[ | ||||
|                     "success_message" | ||||
|                 ] = f"Billing method of {realm.string_id} updated to charge automatically." | ||||
|         elif sponsorship_pending is not None: | ||||
|             if sponsorship_pending: | ||||
|                 update_realm_sponsorship_status(realm, True, acting_user=acting_user) | ||||
|                 context["success_message"] = f"{realm.string_id} marked as pending sponsorship." | ||||
|             else: | ||||
|                 update_realm_sponsorship_status(realm, False, acting_user=acting_user) | ||||
|                 context["success_message"] = f"{realm.string_id} is no longer pending sponsorship." | ||||
|         elif approve_sponsorship: | ||||
|             approve_realm_sponsorship(realm, acting_user=acting_user) | ||||
|             context["success_message"] = f"Sponsorship approved for {realm.string_id}" | ||||
|         elif modify_plan is not None: | ||||
|             if modify_plan == "downgrade_at_billing_cycle_end": | ||||
|                 downgrade_at_the_end_of_billing_cycle(realm) | ||||
|                 context[ | ||||
|                     "success_message" | ||||
|                 ] = f"{realm.string_id} marked for downgrade at the end of billing cycle" | ||||
|             elif modify_plan == "downgrade_now_without_additional_licenses": | ||||
|                 downgrade_now_without_creating_additional_invoices(realm) | ||||
|                 context[ | ||||
|                     "success_message" | ||||
|                 ] = f"{realm.string_id} downgraded without creating additional invoices" | ||||
|             elif modify_plan == "downgrade_now_void_open_invoices": | ||||
|                 downgrade_now_without_creating_additional_invoices(realm) | ||||
|                 voided_invoices_count = void_all_open_invoices(realm) | ||||
|                 context[ | ||||
|                     "success_message" | ||||
|                 ] = f"{realm.string_id} downgraded and voided {voided_invoices_count} open invoices" | ||||
|             elif modify_plan == "upgrade_to_plus": | ||||
|                 switch_realm_from_standard_to_plus_plan(realm) | ||||
|                 context["success_message"] = f"{realm.string_id} upgraded to Plus" | ||||
|         elif scrub_realm: | ||||
|             do_scrub_realm(realm, acting_user=acting_user) | ||||
|             context["success_message"] = f"{realm.string_id} scrubbed." | ||||
|         elif delete_user_by_id: | ||||
|             user_profile_for_deletion = get_user_profile_by_id(delete_user_by_id) | ||||
|             user_email = user_profile_for_deletion.delivery_email | ||||
|             assert user_profile_for_deletion.realm == realm | ||||
|             do_delete_user_preserving_messages(user_profile_for_deletion) | ||||
|             context["success_message"] = f"{user_email} in {realm.subdomain} deleted." | ||||
|  | ||||
|     if query: | ||||
|         key_words = get_invitee_emails_set(query) | ||||
|  | ||||
|         case_insensitive_users_q = Q() | ||||
|         for key_word in key_words: | ||||
|             case_insensitive_users_q |= Q(delivery_email__iexact=key_word) | ||||
|         users = set(UserProfile.objects.filter(case_insensitive_users_q)) | ||||
|         realms = set(Realm.objects.filter(string_id__in=key_words)) | ||||
|  | ||||
|         for key_word in key_words: | ||||
|             try: | ||||
|                 URLValidator()(key_word) | ||||
|                 parse_result = urllib.parse.urlparse(key_word) | ||||
|                 hostname = parse_result.hostname | ||||
|                 assert hostname is not None | ||||
|                 if parse_result.port: | ||||
|                     hostname = f"{hostname}:{parse_result.port}" | ||||
|                 subdomain = get_subdomain_from_hostname(hostname) | ||||
|                 with suppress(Realm.DoesNotExist): | ||||
|                     realms.add(get_realm(subdomain)) | ||||
|             except ValidationError: | ||||
|                 users.update(UserProfile.objects.filter(full_name__iexact=key_word)) | ||||
|  | ||||
|         # full_names can have , in them | ||||
|         users.update(UserProfile.objects.filter(full_name__iexact=query)) | ||||
|  | ||||
|         context["users"] = users | ||||
|         context["realms"] = realms | ||||
|  | ||||
|         confirmations: List[Dict[str, Any]] = [] | ||||
|  | ||||
|         preregistration_user_ids = [ | ||||
|             user.id for user in PreregistrationUser.objects.filter(email__in=key_words) | ||||
|         ] | ||||
|         confirmations += get_confirmations( | ||||
|             [Confirmation.USER_REGISTRATION, Confirmation.INVITATION], | ||||
|             preregistration_user_ids, | ||||
|             hostname=request.get_host(), | ||||
|         ) | ||||
|  | ||||
|         preregistration_realm_ids = [ | ||||
|             user.id for user in PreregistrationRealm.objects.filter(email__in=key_words) | ||||
|         ] | ||||
|         confirmations += get_confirmations( | ||||
|             [Confirmation.REALM_CREATION], | ||||
|             preregistration_realm_ids, | ||||
|             hostname=request.get_host(), | ||||
|         ) | ||||
|  | ||||
|         multiuse_invite_ids = [ | ||||
|             invite.id for invite in MultiuseInvite.objects.filter(realm__in=realms) | ||||
|         ] | ||||
|         confirmations += get_confirmations([Confirmation.MULTIUSE_INVITE], multiuse_invite_ids) | ||||
|  | ||||
|         realm_reactivation_status_objects = RealmReactivationStatus.objects.filter(realm__in=realms) | ||||
|         confirmations += get_confirmations( | ||||
|             [Confirmation.REALM_REACTIVATION], [obj.id for obj in realm_reactivation_status_objects] | ||||
|         ) | ||||
|  | ||||
|         context["confirmations"] = confirmations | ||||
|  | ||||
|         # We want a union of all realms that might appear in the search result, | ||||
|         # but not necessary as a separate result item. | ||||
|         # Therefore, we do not modify the realms object in the context. | ||||
|         all_realms = realms.union( | ||||
|             [ | ||||
|                 confirmation["object"].realm | ||||
|                 for confirmation in confirmations | ||||
|                 # For confirmations, we only display realm details when the type is USER_REGISTRATION | ||||
|                 # or INVITATION. | ||||
|                 if confirmation["type"] in (Confirmation.USER_REGISTRATION, Confirmation.INVITATION) | ||||
|             ] | ||||
|             + [user.realm for user in users] | ||||
|         ) | ||||
|         plan_data: Dict[int, PlanData] = {} | ||||
|         for realm in all_realms: | ||||
|             current_plan = get_current_plan_by_realm(realm) | ||||
|             plan_data[realm.id] = PlanData( | ||||
|                 customer=get_customer_by_realm(realm), | ||||
|                 current_plan=current_plan, | ||||
|             ) | ||||
|             if current_plan is not None: | ||||
|                 billing_session = RealmBillingSession(user=None, realm=realm) | ||||
|                 new_plan, last_ledger_entry = billing_session.make_end_of_cycle_updates_if_needed( | ||||
|                     current_plan, timezone_now() | ||||
|                 ) | ||||
|                 if last_ledger_entry is not None: | ||||
|                     if new_plan is not None: | ||||
|                         plan_data[realm.id].current_plan = new_plan | ||||
|                     else: | ||||
|                         plan_data[realm.id].current_plan = current_plan | ||||
|                     plan_data[realm.id].licenses = last_ledger_entry.licenses | ||||
|                     plan_data[realm.id].licenses_used = get_latest_seat_count(realm) | ||||
|         context["plan_data"] = plan_data | ||||
|  | ||||
|     def get_realm_owner_emails_as_string(realm: Realm) -> str: | ||||
|         return ", ".join( | ||||
|             realm.get_human_owner_users() | ||||
|             .order_by("delivery_email") | ||||
|             .values_list("delivery_email", flat=True) | ||||
|         ) | ||||
|  | ||||
|     def get_realm_admin_emails_as_string(realm: Realm) -> str: | ||||
|         return ", ".join( | ||||
|             realm.get_human_admin_users(include_realm_owners=False) | ||||
|             .order_by("delivery_email") | ||||
|             .values_list("delivery_email", flat=True) | ||||
|         ) | ||||
|  | ||||
|     context["get_realm_owner_emails_as_string"] = get_realm_owner_emails_as_string | ||||
|     context["get_realm_admin_emails_as_string"] = get_realm_admin_emails_as_string | ||||
|     context["get_discount_for_realm"] = get_discount_for_realm | ||||
|     context["get_org_type_display_name"] = get_org_type_display_name | ||||
|     context["realm_icon_url"] = realm_icon_url | ||||
|     context["Confirmation"] = Confirmation | ||||
|     context["sorted_realm_types"] = sorted( | ||||
|         Realm.ORG_TYPES.values(), key=lambda d: d["display_order"] | ||||
|     ) | ||||
|  | ||||
|     return render(request, "analytics/support.html", context=context) | ||||
|  | ||||
|  | ||||
| def get_remote_servers_for_support( | ||||
|     email_to_search: Optional[str], hostname_to_search: Optional[str] | ||||
| ) -> List["RemoteZulipServer"]: | ||||
|     if not email_to_search and not hostname_to_search: | ||||
|         return [] | ||||
|  | ||||
|     remote_servers_query = RemoteZulipServer.objects.order_by("id") | ||||
|     if email_to_search: | ||||
|         remote_servers_query = remote_servers_query.filter(contact_email__iexact=email_to_search) | ||||
|     elif hostname_to_search: | ||||
|         remote_servers_query = remote_servers_query.filter(hostname__icontains=hostname_to_search) | ||||
|  | ||||
|     return list(remote_servers_query) | ||||
|  | ||||
|  | ||||
| @require_server_admin | ||||
| @has_request_variables | ||||
| def remote_servers_support( | ||||
|     request: HttpRequest, query: Optional[str] = REQ("q", default=None) | ||||
| ) -> HttpResponse: | ||||
|     email_to_search = None | ||||
|     hostname_to_search = None | ||||
|     if query: | ||||
|         if "@" in query: | ||||
|             email_to_search = query | ||||
|         else: | ||||
|             hostname_to_search = query | ||||
|  | ||||
|     remote_servers = get_remote_servers_for_support( | ||||
|         email_to_search=email_to_search, hostname_to_search=hostname_to_search | ||||
|     ) | ||||
|     remote_server_to_max_monthly_messages: Dict[int, Union[int, str]] = dict() | ||||
|     for remote_server in remote_servers: | ||||
|         try: | ||||
|             remote_server_to_max_monthly_messages[remote_server.id] = compute_max_monthly_messages( | ||||
|                 remote_server | ||||
|             ) | ||||
|         except MissingDataError: | ||||
|             remote_server_to_max_monthly_messages[remote_server.id] = "Recent data missing" | ||||
|  | ||||
|     return render( | ||||
|         request, | ||||
|         "analytics/remote_server_support.html", | ||||
|         context=dict( | ||||
|             remote_servers=remote_servers, | ||||
|             remote_server_to_max_monthly_messages=remote_server_to_max_monthly_messages, | ||||
|         ), | ||||
|     ) | ||||
| @@ -1,106 +0,0 @@ | ||||
| from typing import Any, Dict, List, Tuple | ||||
|  | ||||
| from django.conf import settings | ||||
| from django.db.models import QuerySet | ||||
| from django.http import HttpRequest, HttpResponse | ||||
| from django.shortcuts import render | ||||
|  | ||||
| from analytics.views.activity_common import ( | ||||
|     format_date_for_activity_reports, | ||||
|     get_user_activity_summary, | ||||
|     make_table, | ||||
| ) | ||||
| from zerver.decorator import require_server_admin | ||||
| from zerver.models import UserActivity, UserProfile, get_user_profile_by_id | ||||
|  | ||||
| if settings.BILLING_ENABLED: | ||||
|     pass | ||||
|  | ||||
|  | ||||
| def get_user_activity_records( | ||||
|     user_profile: UserProfile, | ||||
| ) -> QuerySet[UserActivity]: | ||||
|     fields = [ | ||||
|         "user_profile__full_name", | ||||
|         "query", | ||||
|         "client__name", | ||||
|         "count", | ||||
|         "last_visit", | ||||
|     ] | ||||
|  | ||||
|     records = UserActivity.objects.filter( | ||||
|         user_profile=user_profile, | ||||
|     ) | ||||
|     records = records.order_by("-last_visit") | ||||
|     records = records.select_related("user_profile", "client").only(*fields) | ||||
|     return records | ||||
|  | ||||
|  | ||||
| def raw_user_activity_table(records: QuerySet[UserActivity]) -> str: | ||||
|     cols = [ | ||||
|         "query", | ||||
|         "client", | ||||
|         "count", | ||||
|         "last_visit", | ||||
|     ] | ||||
|  | ||||
|     def row(record: UserActivity) -> List[Any]: | ||||
|         return [ | ||||
|             record.query, | ||||
|             record.client.name, | ||||
|             record.count, | ||||
|             format_date_for_activity_reports(record.last_visit), | ||||
|         ] | ||||
|  | ||||
|     rows = list(map(row, records)) | ||||
|     title = "Raw data" | ||||
|     return make_table(title, cols, rows) | ||||
|  | ||||
|  | ||||
| def user_activity_summary_table(user_summary: Dict[str, Dict[str, Any]]) -> str: | ||||
|     rows = [] | ||||
|     for k, v in user_summary.items(): | ||||
|         if k in ("name", "user_profile_id"): | ||||
|             continue | ||||
|         client = k | ||||
|         count = v["count"] | ||||
|         last_visit = v["last_visit"] | ||||
|         row = [ | ||||
|             format_date_for_activity_reports(last_visit), | ||||
|             client, | ||||
|             count, | ||||
|         ] | ||||
|         rows.append(row) | ||||
|  | ||||
|     rows = sorted(rows, key=lambda r: r[0], reverse=True) | ||||
|  | ||||
|     cols = [ | ||||
|         "last_visit", | ||||
|         "client", | ||||
|         "count", | ||||
|     ] | ||||
|  | ||||
|     title = "User activity" | ||||
|     return make_table(title, cols, rows) | ||||
|  | ||||
|  | ||||
| @require_server_admin | ||||
| def get_user_activity(request: HttpRequest, user_profile_id: int) -> HttpResponse: | ||||
|     user_profile = get_user_profile_by_id(user_profile_id) | ||||
|     records = get_user_activity_records(user_profile) | ||||
|  | ||||
|     data: List[Tuple[str, str]] = [] | ||||
|     user_summary = get_user_activity_summary(records) | ||||
|     content = user_activity_summary_table(user_summary) | ||||
|  | ||||
|     data += [("Summary", content)] | ||||
|  | ||||
|     content = raw_user_activity_table(records) | ||||
|     data += [("Info", content)] | ||||
|  | ||||
|     title = user_profile.delivery_email | ||||
|     return render( | ||||
|         request, | ||||
|         "analytics/activity.html", | ||||
|         context=dict(data=data, title=title), | ||||
|     ) | ||||
| @@ -1,31 +0,0 @@ | ||||
| {generate_api_header(API_ENDPOINT_NAME)} | ||||
|  | ||||
| ## Usage examples | ||||
|  | ||||
| {start_tabs} | ||||
|  | ||||
| {generate_code_example(python)|API_ENDPOINT_NAME|example} | ||||
|  | ||||
| {generate_code_example(javascript)|API_ENDPOINT_NAME|example} | ||||
|  | ||||
| {tab|curl} | ||||
|  | ||||
| {generate_code_example(curl)|API_ENDPOINT_NAME|example} | ||||
|  | ||||
| {end_tabs} | ||||
|  | ||||
| ## Parameters | ||||
|  | ||||
| {generate_api_arguments_table|zulip.yaml|API_ENDPOINT_NAME} | ||||
|  | ||||
| {generate_parameter_description(API_ENDPOINT_NAME)} | ||||
|  | ||||
| ## Response | ||||
|  | ||||
| {generate_return_values_table|zulip.yaml|API_ENDPOINT_NAME} | ||||
|  | ||||
| {generate_response_description(API_ENDPOINT_NAME)} | ||||
|  | ||||
| #### Example response(s) | ||||
|  | ||||
| {generate_code_example|API_ENDPOINT_NAME|fixture} | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,124 +0,0 @@ | ||||
| # Construct a narrow | ||||
|  | ||||
| A **narrow** is a set of filters for Zulip messages, that can be based | ||||
| on many different factors (like sender, stream, topic, search | ||||
| keywords, etc.). Narrows are used in various places in the the Zulip | ||||
| API (most importantly, in the API for fetching messages). | ||||
|  | ||||
| It is simplest to explain the algorithm for encoding a search as a | ||||
| narrow using a single example. Consider the following search query | ||||
| (written as it would be entered in the Zulip web app's search box). | ||||
| It filters for messages sent to stream `announce`, not sent by | ||||
| `iago@zulip.com`, and containing the words `cool` and `sunglasses`: | ||||
|  | ||||
| ``` | ||||
| stream:announce -sender:iago@zulip.com cool sunglasses | ||||
| ``` | ||||
|  | ||||
| This query would be JSON-encoded for use in the Zulip API using JSON | ||||
| as a list of simple objects, as follows: | ||||
|  | ||||
| ```json | ||||
| [ | ||||
|     { | ||||
|         "operator": "stream", | ||||
|         "operand": "announce" | ||||
|     }, | ||||
|     { | ||||
|         "operator": "sender", | ||||
|         "operand": "iago@zulip.com", | ||||
|         "negated": true | ||||
|     }, | ||||
|     { | ||||
|         "operator": "search", | ||||
|         "operand": "cool sunglasses" | ||||
|     } | ||||
| ] | ||||
| ``` | ||||
|  | ||||
| The Zulip help center article on [searching for messages](/help/search-for-messages) | ||||
| documents the majority of the search/narrow options supported by the | ||||
| Zulip API. | ||||
|  | ||||
| Note that many narrows, including all that lack a `stream` or `streams` | ||||
| operator, search the current user's personal message history. See | ||||
| [searching shared history](/help/search-for-messages#searching-shared-history) | ||||
| for details. | ||||
|  | ||||
| **Changes**: In Zulip 7.0 (feature level 177), support was added | ||||
| for three filters related to direct messages: `is:dm`, `dm` and | ||||
| `dm-including`. The `dm` operator replaced and deprecated the | ||||
| `pm-with` operator. The `is:dm` filter replaced and deprecated | ||||
| the `is:private` filter. The `dm-including` operator replaced and | ||||
| deprecated the `group-pm-with` operator. | ||||
|  | ||||
| The `dm-including` and `group-pm-with` operators return slightly | ||||
| different results. For example, `dm-including:1234` returns all | ||||
| direct messages (1-on-1 and group) that include the current user | ||||
| and the user with the unique user ID of `1234`. On the other hand, | ||||
| `group-pm-with:1234` returned only group direct messages that included | ||||
| the current user and the user with the unique user ID of `1234`. | ||||
|  | ||||
| Both `dm` and `is:dm` are aliases of `pm-with` and `is:private` | ||||
| respectively, and return the same exact results that the deprecated | ||||
| filters did. | ||||
|  | ||||
| ## Narrows that use IDs | ||||
|  | ||||
| ### Message IDs | ||||
|  | ||||
| The `near` and `id` operators, documented in the help center, use message | ||||
| IDs for their operands. | ||||
|  | ||||
| * `near:12345`: Search messages around the message with ID `12345`. | ||||
| * `id:12345`: Search for only message with ID `12345`. | ||||
|  | ||||
| The message ID operand for the `id` operator may be encoded as either a | ||||
| number or a string. The message ID operand for the `near` operator must | ||||
| be encoded as a string. | ||||
|  | ||||
| **Changes**: Prior to Zulip 8.0 (feature level 194), the message ID | ||||
| operand for the `id` operator needed to be encoded as a string. | ||||
|  | ||||
|  | ||||
| ```json | ||||
| [ | ||||
|     { | ||||
|         "operator": "id", | ||||
|         "operand": 12345 | ||||
|     } | ||||
| ] | ||||
| ``` | ||||
|  | ||||
| ### Stream and user IDs | ||||
|  | ||||
| There are a few additional narrow/search options (new in Zulip 2.1) | ||||
| that use either stream IDs or user IDs that are not documented in the | ||||
| help center because they are primarily useful to API clients: | ||||
|  | ||||
| * `stream:1234`: Search messages sent to the stream with ID `1234`. | ||||
| * `sender:1234`: Search messages sent by user ID `1234`. | ||||
| * `dm:1234`: Search the direct message conversation between | ||||
|   you and user ID `1234`. | ||||
| * `dm:1234,5678`: Search the direct message conversation between | ||||
|   you, user ID `1234`, and user ID `5678`. | ||||
| * `dm-including:1234`: Search all direct messages (1-on-1 and group) | ||||
|   that include you and user ID `1234`. | ||||
|  | ||||
| The operands for these search options must be encoded either as an | ||||
| integer ID or a JSON list of integer IDs. For example, to query | ||||
| messages sent by a user 1234 to a direct message thread with yourself, | ||||
| user 1234, and user 5678, the correct JSON-encoded query is: | ||||
|  | ||||
| ```json | ||||
| [ | ||||
|     { | ||||
|         "operator": "dm", | ||||
|         "operand": [1234, 5678] | ||||
|     }, | ||||
|     { | ||||
|         "operator": "sender", | ||||
|         "operand": 1234 | ||||
|     } | ||||
| ] | ||||
| ``` | ||||
| @@ -1,49 +0,0 @@ | ||||
| {generate_api_header(/scheduled_messages:post)} | ||||
|  | ||||
| ## Usage examples | ||||
|  | ||||
| {start_tabs} | ||||
|  | ||||
| {generate_code_example(python)|/scheduled_messages:post|example} | ||||
|  | ||||
| {generate_code_example(javascript)|/scheduled_messages:post|example} | ||||
|  | ||||
| {tab|curl} | ||||
|  | ||||
| ``` curl | ||||
| # Create a scheduled stream message | ||||
| curl -X POST {{ api_url }}/v1/scheduled_messages \ | ||||
|     -u BOT_EMAIL_ADDRESS:BOT_API_KEY \ | ||||
|     --data-urlencode type=stream \ | ||||
|     --data-urlencode to=9 \ | ||||
|     --data-urlencode topic=Hello \ | ||||
|     --data-urlencode 'content=Nice to meet everyone!' \ | ||||
|     --data-urlencode scheduled_delivery_timestamp=3165826990 | ||||
|  | ||||
| # Create a scheduled direct message | ||||
| curl -X POST {{ api_url }}/v1/messages \ | ||||
|     -u BOT_EMAIL_ADDRESS:BOT_API_KEY \ | ||||
|     --data-urlencode type=direct \ | ||||
|     --data-urlencode 'to=[9, 10]' \ | ||||
|     --data-urlencode 'content=Can we meet on Monday?' \ | ||||
|     --data-urlencode scheduled_delivery_timestamp=3165826990 | ||||
|  | ||||
| ``` | ||||
|  | ||||
| {end_tabs} | ||||
|  | ||||
| ## Parameters | ||||
|  | ||||
| {generate_api_arguments_table|zulip.yaml|/scheduled_messages:post} | ||||
|  | ||||
| {generate_parameter_description(/scheduled_messages:post)} | ||||
|  | ||||
| ## Response | ||||
|  | ||||
| {generate_return_values_table|zulip.yaml|/scheduled_messages:post} | ||||
|  | ||||
| {generate_response_description(/scheduled_messages:post)} | ||||
|  | ||||
| #### Example response(s) | ||||
|  | ||||
| {generate_code_example|/scheduled_messages:post|fixture} | ||||
| @@ -1,80 +0,0 @@ | ||||
| # HTTP headers | ||||
|  | ||||
| This page documents the HTTP headers used by the Zulip API. | ||||
|  | ||||
| Most important is that API clients authenticate to the server using | ||||
| HTTP Basic authentication. If you're using the official [Python or | ||||
| JavaScript bindings](/api/installation-instructions), this is taken | ||||
| care of when you configure said bindings. | ||||
|  | ||||
| Otherwise, see the `curl` example on each endpoint's documentation | ||||
| page, which details the request format. | ||||
|  | ||||
| Documented below are additional HTTP headers and header conventions | ||||
| generally used by Zulip: | ||||
|  | ||||
| ## The `User-Agent` header | ||||
|  | ||||
| Clients are not required to pass a `User-Agent` HTTP header, but we | ||||
| highly recommend doing so when writing an integration. It's easy to do | ||||
| and it can help save time when debugging issues related to an API | ||||
| client. | ||||
|  | ||||
| If provided, the Zulip server will parse the `User-Agent` HTTP header | ||||
| in order to identify specific clients and integrations. This | ||||
| information is used by the server for logging, [usage | ||||
| statistics](/help/analytics), and on rare occasions, for | ||||
| backwards-compatibility logic to preserve support for older versions | ||||
| of official clients. | ||||
|  | ||||
| Official Zulip clients and integrations use a `User-Agent` that starts | ||||
| with something like `ZulipMobile/20.0.103 `, encoding the name of the | ||||
| application and it's version. | ||||
|  | ||||
| Zulip's official API bindings have reasonable defaults for | ||||
| `User-Agent`. For example, the official Zulip Python bindings have a | ||||
| default `User-Agent` starting with `ZulipPython/{version}`, where | ||||
| `version` is the version of the library. | ||||
|  | ||||
| You can give your bot/integration its own name by passing the `client` | ||||
| parameter when initializing the Python bindings. For example, the | ||||
| official Zulip Nagios integration is initialized like this: | ||||
|  | ||||
| ``` python | ||||
| client = zulip.Client( | ||||
|     config_file=opts.config, client=f"ZulipNagios/{VERSION}" | ||||
| ) | ||||
| ``` | ||||
|  | ||||
| If you are working on an integration that you plan to share outside | ||||
| your organization, you can get help picking a good name in | ||||
| `#integrations` in the [Zulip development | ||||
| community](https://zulip.com/development-community). | ||||
|  | ||||
| ## Rate-limiting response headers | ||||
|  | ||||
| To help clients avoid exceeding rate limits, Zulip sets the following | ||||
| HTTP headers in all API responses: | ||||
|  | ||||
| * `X-RateLimit-Remaining`: The number of additional requests of this | ||||
|   type that the client can send before exceeding its limit. | ||||
| * `X-RateLimit-Limit`: The limit that would be applicable to a client | ||||
|   that had not made any recent requests of this type. This is useful | ||||
|   for designing a client's burst behavior so as to avoid ever reaching | ||||
|   a rate limit. | ||||
| * `X-RateLimit-Reset`: The time at which the client will no longer | ||||
|   have any rate limits applied to it (and thus could do a burst of | ||||
|   `X-RateLimit-Limit` requests). | ||||
|  | ||||
| [Zulip's rate limiting rules are configurable][rate-limiting-rules], | ||||
| and can vary by server and over time. The default configuration | ||||
| currently limits: | ||||
|  | ||||
| * Every user is limited to 200 total API requests per minute. | ||||
| * Separate, much lower limits for authentication/login attempts. | ||||
|  | ||||
| When the Zulip server has configured multiple rate limits that apply | ||||
| to a given request, the values returned will be for the strictest | ||||
| limit. | ||||
|  | ||||
| [rate-limiting-rules]: https://zulip.readthedocs.io/en/latest/production/security-model.html#rate-limiting | ||||
| @@ -1,119 +0,0 @@ | ||||
| #### Messages | ||||
|  | ||||
| * [Send a message](/api/send-message) | ||||
| * [Upload a file](/api/upload-file) | ||||
| * [Edit a message](/api/update-message) | ||||
| * [Delete a message](/api/delete-message) | ||||
| * [Get messages](/api/get-messages) | ||||
| * [Construct a narrow](/api/construct-narrow) | ||||
| * [Add an emoji reaction](/api/add-reaction) | ||||
| * [Remove an emoji reaction](/api/remove-reaction) | ||||
| * [Render a message](/api/render-message) | ||||
| * [Fetch a single message](/api/get-message) | ||||
| * [Check if messages match narrow](/api/check-messages-match-narrow) | ||||
| * [Get a message's edit history](/api/get-message-history) | ||||
| * [Update personal message flags](/api/update-message-flags) | ||||
| * [Update personal message flags for narrow](/api/update-message-flags-for-narrow) | ||||
| * [Mark all messages as read](/api/mark-all-as-read) | ||||
| * [Mark messages in a stream as read](/api/mark-stream-as-read) | ||||
| * [Mark messages in a topic as read](/api/mark-topic-as-read) | ||||
| * [Get a message's read receipts](/api/get-read-receipts) | ||||
|  | ||||
| #### Scheduled messages | ||||
|  | ||||
| * [Get scheduled messages](/api/get-scheduled-messages) | ||||
| * [Create a scheduled message](/api/create-scheduled-message) | ||||
| * [Edit a scheduled message](/api/update-scheduled-message) | ||||
| * [Delete a scheduled message](/api/delete-scheduled-message) | ||||
|  | ||||
| #### Drafts | ||||
|  | ||||
| * [Get drafts](/api/get-drafts) | ||||
| * [Create drafts](/api/create-drafts) | ||||
| * [Edit a draft](/api/edit-draft) | ||||
| * [Delete a draft](/api/delete-draft) | ||||
|  | ||||
| #### Streams | ||||
|  | ||||
| * [Get subscribed streams](/api/get-subscriptions) | ||||
| * [Subscribe to a stream](/api/subscribe) | ||||
| * [Unsubscribe from a stream](/api/unsubscribe) | ||||
| * [Get subscription status](/api/get-subscription-status) | ||||
| * [Get all subscribers](/api/get-subscribers) | ||||
| * [Update subscription settings](/api/update-subscription-settings) | ||||
| * [Get all streams](/api/get-streams) | ||||
| * [Get a stream by ID](/api/get-stream-by-id) | ||||
| * [Get stream ID](/api/get-stream-id) | ||||
| * [Create a stream](/api/create-stream) | ||||
| * [Update a stream](/api/update-stream) | ||||
| * [Archive a stream](/api/archive-stream) | ||||
| * [Get stream's email address](/api/get-stream-email-address) | ||||
| * [Get topics in a stream](/api/get-stream-topics) | ||||
| * [Topic muting](/api/mute-topic) | ||||
| * [Update personal preferences for a topic](/api/update-user-topic) | ||||
| * [Delete a topic](/api/delete-topic) | ||||
| * [Add a default stream](/api/add-default-stream) | ||||
| * [Remove a default stream](/api/remove-default-stream) | ||||
|  | ||||
| #### Users | ||||
|  | ||||
| * [Get all users](/api/get-users) | ||||
| * [Get own user](/api/get-own-user) | ||||
| * [Get a user](/api/get-user) | ||||
| * [Get a user by email](/api/get-user-by-email) | ||||
| * [Update a user](/api/update-user) | ||||
| * [Update your status](/api/update-status) | ||||
| * [Create a user](/api/create-user) | ||||
| * [Deactivate a user](/api/deactivate-user) | ||||
| * [Reactivate a user](/api/reactivate-user) | ||||
| * [Deactivate own user](/api/deactivate-own-user) | ||||
| * [Set "typing" status](/api/set-typing-status) | ||||
| * [Get user presence](/api/get-user-presence) | ||||
| * [Get presence of all users](/api/get-presence) | ||||
| * [Get attachments](/api/get-attachments) | ||||
| * [Delete an attachment](/api/remove-attachment) | ||||
| * [Update settings](/api/update-settings) | ||||
| * [Get user groups](/api/get-user-groups) | ||||
| * [Create a user group](/api/create-user-group) | ||||
| * [Update a user group](/api/update-user-group) | ||||
| * [Delete a user group](/api/remove-user-group) | ||||
| * [Update user group members](/api/update-user-group-members) | ||||
| * [Update user group subgroups](/api/update-user-group-subgroups) | ||||
| * [Get user group membership status](/api/get-is-user-group-member) | ||||
| * [Get user group members](/api/get-user-group-members) | ||||
| * [Get subgroups of user group](/api/get-user-group-subgroups) | ||||
| * [Mute a user](/api/mute-user) | ||||
| * [Unmute a user](/api/unmute-user) | ||||
| * [Get all alert words](/api/get-alert-words) | ||||
| * [Add alert words](/api/add-alert-words) | ||||
| * [Remove alert words](/api/remove-alert-words) | ||||
|  | ||||
| #### Server & organizations | ||||
|  | ||||
| * [Get server settings](/api/get-server-settings) | ||||
| * [Get linkifiers](/api/get-linkifiers) | ||||
| * [Add a linkifier](/api/add-linkifier) | ||||
| * [Update a linkifier](/api/update-linkifier) | ||||
| * [Remove a linkifier](/api/remove-linkifier) | ||||
| * [Reorder linkifiers](/api/reorder-linkifiers) | ||||
| * [Add a code playground](/api/add-code-playground) | ||||
| * [Remove a code playground](/api/remove-code-playground) | ||||
| * [Get all custom emoji](/api/get-custom-emoji) | ||||
| * [Upload custom emoji](/api/upload-custom-emoji) | ||||
| * [Deactivate custom emoji](/api/deactivate-custom-emoji) | ||||
| * [Get all custom profile fields](/api/get-custom-profile-fields) | ||||
| * [Reorder custom profile fields](/api/reorder-custom-profile-fields) | ||||
| * [Create a custom profile field](/api/create-custom-profile-field) | ||||
| * [Update realm-level defaults of user settings](/api/update-realm-user-settings-defaults) | ||||
|  | ||||
| #### Real-time events | ||||
|  | ||||
| * [Real time events API](/api/real-time-events) | ||||
| * [Register an event queue](/api/register-queue) | ||||
| * [Get events from an event queue](/api/get-events) | ||||
| * [Delete an event queue](/api/delete-queue) | ||||
|  | ||||
| #### Specialty endpoints | ||||
|  | ||||
| * [Fetch an API key (production)](/api/fetch-api-key) | ||||
| * [Fetch an API key (development only)](/api/dev-fetch-api-key) | ||||
| @@ -1,26 +0,0 @@ | ||||
| # The Zulip API | ||||
|  | ||||
| Zulip's APIs allow you to integrate other services with Zulip.  This | ||||
| guide should help you find the API you need: | ||||
|  | ||||
| * First, check if the tool you'd like to integrate with Zulip | ||||
|   [already has a native integration](/integrations/). | ||||
| * Next, check if [Zapier](https://zapier.com/apps) or | ||||
|   [IFTTT](https://ifttt.com/search) has an integration. | ||||
|   [Zulip's Zapier integration](/integrations/doc/zapier) and | ||||
|   [Zulip's IFTTT integration](/integrations/doc/ifttt) often allow | ||||
|   integrating a new service with Zulip without writing any code. | ||||
| * If you'd like to send content into Zulip, you can | ||||
|   [write a native incoming webhook integration](/api/incoming-webhooks-overview) | ||||
|   or use [Zulip's API for sending messages](/api/send-message). | ||||
| * If you're building an interactive bot that reacts to activity inside | ||||
|   Zulip, you'll want to look at Zulip's | ||||
|   [Python framework for interactive bots](/api/running-bots) or | ||||
|   [Zulip's real-time events API](/api/get-events). | ||||
|  | ||||
| And if you still need to build your own integration with Zulip, check out | ||||
| the full [REST API](/api/rest), generally starting with | ||||
| [installing the API client bindings](/api/installation-instructions). | ||||
|  | ||||
| In case you already know how you want to build your integration and you're | ||||
| just looking for an API key, we've got you covered [here](/api/api-keys). | ||||
| @@ -1,34 +0,0 @@ | ||||
| # Error handling | ||||
|  | ||||
| Zulip's API will always return a JSON format response. | ||||
| The HTTP status code indicates whether the request was successful | ||||
| (200 = success, 40x = user error, 50x = server error).  Every response | ||||
| will contain at least two keys: `msg` (a human-readable error message) | ||||
| and `result`, which will be either `error` or `success` (this is | ||||
| redundant with the HTTP status code, but is convenient when printing | ||||
| responses while debugging). | ||||
|  | ||||
| For some common errors, Zulip provides a `code` attribute.  Where | ||||
| present, clients should check `code`, rather than `msg`, when looking | ||||
| for specific error conditions, since the `msg` strings are | ||||
| internationalized (e.g. the server will send the error message | ||||
| translated into French if the user has a French locale). | ||||
|  | ||||
| Each endpoint documents its own unique errors; documented below are | ||||
| errors common to many endpoints: | ||||
|  | ||||
| {generate_code_example|/rest-error-handling:post|fixture} | ||||
|  | ||||
| ## Ignored Parameters | ||||
|  | ||||
| In JSON success responses, all Zulip REST API endpoints may return | ||||
| an array of parameters sent in the request that are not supported | ||||
| by that specific endpoint. | ||||
|  | ||||
| While this can be expected, e.g. when sending both current and legacy | ||||
| names for a parameter to a Zulip server of unknown version, this often | ||||
| indicates either a bug in the client implementation or an attempt to | ||||
| configure a new feature while connected to an older Zulip server that | ||||
| does not support said feature. | ||||
|  | ||||
| {generate_code_example|/settings:patch|fixture} | ||||
| @@ -1,120 +0,0 @@ | ||||
| # Roles and permissions | ||||
|  | ||||
| Zulip offers several levels of permissions based on a | ||||
| [user's role](/help/roles-and-permissions) in a Zulip organization. | ||||
|  | ||||
| Here are some important details to note when working with these | ||||
| roles and permissions in Zulip's API: | ||||
|  | ||||
| ## A user's role | ||||
|  | ||||
| A user's account data include a `role` property, which contains the | ||||
| user's role in the Zulip organization. These roles are encoded as: | ||||
|  | ||||
| * Organization owner: 100 | ||||
|  | ||||
| * Organization administrator: 200 | ||||
|  | ||||
| * Organization moderator: 300 | ||||
|  | ||||
| * Member: 400 | ||||
|  | ||||
| * Guest: 600 | ||||
|  | ||||
| User account data also include these boolean properties that duplicate | ||||
| the related roles above: | ||||
|  | ||||
| * `is_owner` specifying whether the user is an organization owner. | ||||
|  | ||||
| * `is_admin` specifying whether the user is an organization administrator. | ||||
|  | ||||
| * `is_guest` specifying whether the user is a guest user. | ||||
|  | ||||
| These are intended as conveniences for simple clients, and clients | ||||
| should prefer using the `role` field, since only that one is updated | ||||
| by the [events API](/api/get-events). | ||||
|  | ||||
| Note that [`POST /register`](/api/register-queue) also returns an | ||||
| `is_moderator` boolean property specifying whether the current user is | ||||
| an organization moderator. | ||||
|  | ||||
| Additionally, user account data include an `is_billing_admin` property | ||||
| specifying whether the user is a billing administrator for the Zulip | ||||
| organization, which is not related to one of the roles listed above, | ||||
| but rather allows for specific permissions related to billing | ||||
| administration in [paid Zulip Cloud plans](https://zulip.com/plans/). | ||||
|  | ||||
| ### User account data in the API | ||||
|  | ||||
| Endpoints that return the user account data / properties mentioned | ||||
| above are: | ||||
|  | ||||
| * [`GET /users`](/api/get-users) | ||||
|  | ||||
| * [`GET /users/{user_id}`](/api/get-user) | ||||
|  | ||||
| * [`GET /users/{email}`](/api/get-user-by-email) | ||||
|  | ||||
| * [`GET /users/me`](/api/get-own-user) | ||||
|  | ||||
| * [`GET /events`](/api/get-events) | ||||
|  | ||||
| * [`POST /register`](/api/register-queue) | ||||
|  | ||||
| Note that the [`POST /register` endpoint](/api/register-queue) returns | ||||
| the above boolean properties to describe the role of the current user, | ||||
| when `realm_user` is present in `fetch_event_types`. | ||||
|  | ||||
| Additionally, the specific events returned by the | ||||
| [`GET /events` endpoint](/api/get-events) containing data related | ||||
| to user accounts and roles are the [`realm_user` add | ||||
| event](/api/get-events#realm_user-add), and the | ||||
| [`realm_user` update event](/api/get-events#realm_user-update). | ||||
|  | ||||
| ## Permission levels | ||||
|  | ||||
| Many areas of Zulip are customizable by the roles | ||||
| above, such as (but not limited to) [restricting message editing and | ||||
| deletion](/help/restrict-message-editing-and-deletion) and | ||||
| [streams permissions](/help/stream-permissions). The potential | ||||
| permission levels are: | ||||
|  | ||||
| * Everyone / Any user including Guests (least restrictive) | ||||
|  | ||||
| * Members | ||||
|  | ||||
| * Full members | ||||
|  | ||||
| * Moderators | ||||
|  | ||||
| * Administrators | ||||
|  | ||||
| * Owners | ||||
|  | ||||
| * Nobody (most restrictive) | ||||
|  | ||||
| These permission levels and policies in the API are designed to be | ||||
| cutoffs in that users with the specified role and above have the | ||||
| specified ability or access. For example, a permission level documented | ||||
| as 'moderators only' includes organization moderators, administrators, | ||||
| and owners. | ||||
|  | ||||
| Note that specific settings and policies in the Zulip API that use these | ||||
| permission levels will likely support a subset of those listed above. | ||||
|  | ||||
| ## Determining if a user is a full member | ||||
|  | ||||
| When a Zulip organization has set up a [waiting period before new members | ||||
| turn into full members](/help/restrict-permissions-of-new-members), | ||||
| clients will need to determine if a user's account has aged past the | ||||
| organization's waiting period threshold. | ||||
|  | ||||
| The `realm_waiting_period_threshold`, which is the number of days until | ||||
| a user's account is treated as a full member, is returned by the | ||||
| [`POST /register` endpoint](/api/register-queue) when `realm` is present | ||||
| in `fetch_event_types`. | ||||
|  | ||||
| Clients can compare the `realm_waiting_period_threshold` to a user | ||||
| accounts's `date_joined` property, which is the time the user account | ||||
| was created, to determine if a user has the permissions of a full | ||||
| member or a new member. | ||||
| @@ -1,77 +0,0 @@ | ||||
| {generate_api_header(/messages:post)} | ||||
|  | ||||
| ## Usage examples | ||||
|  | ||||
| {start_tabs} | ||||
|  | ||||
| {generate_code_example(python)|/messages:post|example} | ||||
|  | ||||
| {generate_code_example(javascript)|/messages:post|example} | ||||
|  | ||||
| {tab|curl} | ||||
|  | ||||
| ``` curl | ||||
| # For stream messages | ||||
| curl -X POST {{ api_url }}/v1/messages \ | ||||
|     -u BOT_EMAIL_ADDRESS:BOT_API_KEY \ | ||||
|     --data-urlencode type=stream \ | ||||
|     --data-urlencode 'to="Denmark"' \ | ||||
|     --data-urlencode topic=Castle \ | ||||
|     --data-urlencode 'content=I come not, friends, to steal away your hearts.' | ||||
|  | ||||
| # For direct messages | ||||
| curl -X POST {{ api_url }}/v1/messages \ | ||||
|     -u BOT_EMAIL_ADDRESS:BOT_API_KEY \ | ||||
|     --data-urlencode type=direct \ | ||||
|     --data-urlencode 'to=[9]' \ | ||||
|     --data-urlencode 'content=With mirth and laughter let old wrinkles come.' | ||||
| ``` | ||||
|  | ||||
| {tab|zulip-send} | ||||
|  | ||||
| You can use `zulip-send` | ||||
| (available after you `pip install zulip`) to easily send Zulips from | ||||
| the command-line, providing the message content via STDIN. | ||||
|  | ||||
| ```bash | ||||
| # For stream messages | ||||
| zulip-send --stream Denmark --subject Castle \ | ||||
|     --user othello-bot@example.com --api-key a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5 | ||||
|  | ||||
| # For direct messages | ||||
| zulip-send hamlet@example.com \ | ||||
|     --user othello-bot@example.com --api-key a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5 | ||||
| ``` | ||||
|  | ||||
| #### Passing in the message on the command-line | ||||
|  | ||||
| If you'd like, you can also provide the message on the command-line with the | ||||
| `-m` or `--message` flag, as follows: | ||||
|  | ||||
|  | ||||
| ```bash | ||||
| zulip-send --stream Denmark --subject Castle \ | ||||
|     --message 'I come not, friends, to steal away your hearts.' \ | ||||
|     --user othello-bot@example.com --api-key a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5 | ||||
| ``` | ||||
|  | ||||
| You can omit the `user` and `api-key` parameters if you have a `~/.zuliprc` | ||||
| file. | ||||
|  | ||||
| {end_tabs} | ||||
|  | ||||
| ## Parameters | ||||
|  | ||||
| {generate_api_arguments_table|zulip.yaml|/messages:post} | ||||
|  | ||||
| {generate_parameter_description(/messages:post)} | ||||
|  | ||||
| ## Response | ||||
|  | ||||
| {generate_return_values_table|zulip.yaml|/messages:post} | ||||
|  | ||||
| {generate_response_description(/messages:post)} | ||||
|  | ||||
| #### Example response(s) | ||||
|  | ||||
| {generate_code_example|/messages:post|fixture} | ||||
| @@ -1,27 +0,0 @@ | ||||
| ## Integrations | ||||
|  | ||||
| * [Overview](/api/integrations-overview) | ||||
| * [Incoming webhook integrations](/api/incoming-webhooks-overview) | ||||
| * [Hello world walkthrough](/api/incoming-webhooks-walkthrough) | ||||
| * [Non-webhook integrations](/api/non-webhook-integrations) | ||||
|  | ||||
| ## Interactive bots (beta) | ||||
|  | ||||
| * [Running bots](/api/running-bots) | ||||
| * [Deploying bots](/api/deploying-bots) | ||||
| * [Writing bots](/api/writing-bots) | ||||
| * [Outgoing webhooks](/api/outgoing-webhooks) | ||||
|  | ||||
| ## REST API | ||||
|  | ||||
| * [Overview](/api/rest) | ||||
| * [Installation instructions](/api/installation-instructions) | ||||
| * [API keys](/api/api-keys) | ||||
| * [Configuring the Python bindings](/api/configuring-python-bindings) | ||||
| * [HTTP headers](/api/http-headers) | ||||
| * [Error handling](/api/rest-error-handling) | ||||
| * [Roles and permissions](/api/roles-and-permissions) | ||||
| * [Client libraries](/api/client-libraries) | ||||
| * [API changelog](/api/changelog) | ||||
|  | ||||
| {!rest-endpoints.md!} | ||||
							
								
								
									
										26
									
								
								babel.config.js
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										26
									
								
								babel.config.js
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,26 @@ | ||||
| "use strict"; | ||||
|  | ||||
| module.exports = { | ||||
|     plugins: [ | ||||
|         [ | ||||
|             "formatjs", | ||||
|             { | ||||
|                 additionalFunctionNames: ["$t", "$t_html"], | ||||
|                 overrideIdFn: (id, defaultMessage) => defaultMessage, | ||||
|             }, | ||||
|         ], | ||||
|     ], | ||||
|     presets: [ | ||||
|         [ | ||||
|             "@babel/preset-env", | ||||
|             { | ||||
|                 corejs: "3.6", | ||||
|                 loose: true, // Loose mode for…of loops are 5× faster in Firefox | ||||
|                 shippedProposals: true, | ||||
|                 useBuiltIns: "usage", | ||||
|             }, | ||||
|         ], | ||||
|         "@babel/typescript", | ||||
|     ], | ||||
|     sourceType: "unambiguous", | ||||
| }; | ||||
| @@ -3,6 +3,7 @@ from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("contenttypes", "0001_initial"), | ||||
|     ] | ||||
|   | ||||
| @@ -3,6 +3,7 @@ from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("confirmation", "0001_initial"), | ||||
|     ] | ||||
|   | ||||
| @@ -3,6 +3,7 @@ from django.db import migrations | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("confirmation", "0002_realmcreationkey"), | ||||
|     ] | ||||
|   | ||||
| @@ -3,6 +3,7 @@ from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("confirmation", "0003_emailchangeconfirmation"), | ||||
|     ] | ||||
|   | ||||
| @@ -4,6 +4,7 @@ from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("zerver", "0124_stream_enable_notifications"), | ||||
|         ("confirmation", "0004_remove_confirmationmanager"), | ||||
|   | ||||
| @@ -4,6 +4,7 @@ from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("confirmation", "0005_confirmation_realm"), | ||||
|     ] | ||||
|   | ||||
| @@ -4,6 +4,7 @@ from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("confirmation", "0006_realmcreationkey_presume_email_valid"), | ||||
|     ] | ||||
|   | ||||
| @@ -1,16 +0,0 @@ | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|     dependencies = [ | ||||
|         ("confirmation", "0007_add_indexes"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name="confirmation", | ||||
|             name="expiry_date", | ||||
|             field=models.DateTimeField(db_index=True, null=True), | ||||
|             preserve_default=False, | ||||
|         ), | ||||
|     ] | ||||
| @@ -1,70 +0,0 @@ | ||||
| # Generated by Django 3.1.7 on 2021-03-31 20:47 | ||||
|  | ||||
| import time | ||||
| from datetime import timedelta | ||||
|  | ||||
| from django.conf import settings | ||||
| from django.db import migrations, transaction | ||||
| from django.db.backends.base.schema import BaseDatabaseSchemaEditor | ||||
| from django.db.migrations.state import StateApps | ||||
|  | ||||
|  | ||||
| def set_expiry_date_for_existing_confirmations( | ||||
|     apps: StateApps, schema_editor: BaseDatabaseSchemaEditor | ||||
| ) -> None: | ||||
|     Confirmation = apps.get_model("confirmation", "Confirmation") | ||||
|     if not Confirmation.objects.exists(): | ||||
|         return | ||||
|  | ||||
|     # The values at the time of this migration | ||||
|     INVITATION = 2 | ||||
|     UNSUBSCRIBE = 4 | ||||
|     MULTIUSE_INVITE = 6 | ||||
|  | ||||
|     @transaction.atomic | ||||
|     def backfill_confirmations_between(lower_bound: int, upper_bound: int) -> None: | ||||
|         confirmations = Confirmation.objects.filter(id__gte=lower_bound, id__lte=upper_bound) | ||||
|         for confirmation in confirmations: | ||||
|             if confirmation.type in (INVITATION, MULTIUSE_INVITE): | ||||
|                 confirmation.expiry_date = confirmation.date_sent + timedelta( | ||||
|                     days=settings.INVITATION_LINK_VALIDITY_DAYS | ||||
|                 ) | ||||
|             elif confirmation.type == UNSUBSCRIBE: | ||||
|                 # Unsubscribe links never expire, which we apparently implement as in 1M days. | ||||
|                 confirmation.expiry_date = confirmation.date_sent + timedelta(days=1000000) | ||||
|             else: | ||||
|                 confirmation.expiry_date = confirmation.date_sent + timedelta( | ||||
|                     days=settings.CONFIRMATION_LINK_DEFAULT_VALIDITY_DAYS | ||||
|                 ) | ||||
|         Confirmation.objects.bulk_update(confirmations, ["expiry_date"]) | ||||
|  | ||||
|     # Because the ranges in this code are inclusive, subtracting 1 offers round numbers. | ||||
|     BATCH_SIZE = 1000 - 1 | ||||
|  | ||||
|     first_id = Confirmation.objects.earliest("id").id | ||||
|     last_id = Confirmation.objects.latest("id").id | ||||
|  | ||||
|     id_range_lower_bound = first_id | ||||
|     id_range_upper_bound = first_id + BATCH_SIZE | ||||
|     while id_range_lower_bound <= last_id: | ||||
|         print(f"Processed {id_range_lower_bound} / {last_id}") | ||||
|         backfill_confirmations_between(id_range_lower_bound, id_range_upper_bound) | ||||
|         id_range_lower_bound = id_range_upper_bound + 1 | ||||
|         id_range_upper_bound = id_range_lower_bound + BATCH_SIZE | ||||
|         time.sleep(0.1) | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|     atomic = False | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("confirmation", "0008_confirmation_expiry_date"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.RunPython( | ||||
|             set_expiry_date_for_existing_confirmations, | ||||
|             reverse_code=migrations.RunPython.noop, | ||||
|             elidable=True, | ||||
|         ), | ||||
|     ] | ||||
| @@ -1,17 +0,0 @@ | ||||
| # Generated by Django 3.2.5 on 2021-08-02 19:03 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|     dependencies = [ | ||||
|         ("confirmation", "0009_confirmation_expiry_date_backfill"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AlterField( | ||||
|             model_name="confirmation", | ||||
|             name="expiry_date", | ||||
|             field=models.DateTimeField(db_index=True), | ||||
|         ), | ||||
|     ] | ||||
| @@ -1,17 +0,0 @@ | ||||
| # Generated by Django 3.2.9 on 2021-11-30 17:44 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|     dependencies = [ | ||||
|         ("confirmation", "0010_alter_confirmation_expiry_date"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AlterField( | ||||
|             model_name="confirmation", | ||||
|             name="expiry_date", | ||||
|             field=models.DateTimeField(db_index=True, null=True), | ||||
|         ), | ||||
|     ] | ||||
| @@ -4,7 +4,7 @@ __revision__ = "$Id: models.py 28 2009-10-22 15:03:02Z jarek.zgoda $" | ||||
| import datetime | ||||
| import secrets | ||||
| from base64 import b32encode | ||||
| from typing import List, Mapping, Optional, Union | ||||
| from typing import Mapping, Optional, Union | ||||
| from urllib.parse import urljoin | ||||
|  | ||||
| from django.conf import settings | ||||
| @@ -13,25 +13,14 @@ from django.contrib.contenttypes.models import ContentType | ||||
| from django.db import models | ||||
| from django.db.models import CASCADE | ||||
| from django.http import HttpRequest, HttpResponse | ||||
| from django.template.response import TemplateResponse | ||||
| from django.shortcuts import render | ||||
| from django.urls import reverse | ||||
| from django.utils.timezone import now as timezone_now | ||||
| from typing_extensions import TypeAlias, override | ||||
|  | ||||
| from confirmation import settings as confirmation_settings | ||||
| from zerver.lib.types import UnspecifiedValue | ||||
| from zerver.models import ( | ||||
|     EmailChangeStatus, | ||||
|     MultiuseInvite, | ||||
|     PreregistrationRealm, | ||||
|     PreregistrationUser, | ||||
|     Realm, | ||||
|     RealmReactivationStatus, | ||||
|     UserProfile, | ||||
| ) | ||||
| from zerver.models import EmailChangeStatus, MultiuseInvite, PreregistrationUser, Realm, UserProfile | ||||
|  | ||||
|  | ||||
| class ConfirmationKeyError(Exception): | ||||
| class ConfirmationKeyException(Exception): | ||||
|     WRONG_LENGTH = 1 | ||||
|     EXPIRED = 2 | ||||
|     DOES_NOT_EXIST = 3 | ||||
| @@ -42,13 +31,13 @@ class ConfirmationKeyError(Exception): | ||||
|  | ||||
|  | ||||
| def render_confirmation_key_error( | ||||
|     request: HttpRequest, exception: ConfirmationKeyError | ||||
|     request: HttpRequest, exception: ConfirmationKeyException | ||||
| ) -> HttpResponse: | ||||
|     if exception.error_type == ConfirmationKeyError.WRONG_LENGTH: | ||||
|         return TemplateResponse(request, "confirmation/link_malformed.html", status=404) | ||||
|     if exception.error_type == ConfirmationKeyError.EXPIRED: | ||||
|         return TemplateResponse(request, "confirmation/link_expired.html", status=404) | ||||
|     return TemplateResponse(request, "confirmation/link_does_not_exist.html", status=404) | ||||
|     if exception.error_type == ConfirmationKeyException.WRONG_LENGTH: | ||||
|         return render(request, "confirmation/link_malformed.html") | ||||
|     if exception.error_type == ConfirmationKeyException.EXPIRED: | ||||
|         return render(request, "confirmation/link_expired.html") | ||||
|     return render(request, "confirmation/link_does_not_exist.html") | ||||
|  | ||||
|  | ||||
| def generate_key() -> str: | ||||
| @@ -56,98 +45,48 @@ def generate_key() -> str: | ||||
|     return b32encode(secrets.token_bytes(15)).decode().lower() | ||||
|  | ||||
|  | ||||
| ConfirmationObjT: TypeAlias = Union[ | ||||
|     MultiuseInvite, | ||||
|     PreregistrationRealm, | ||||
|     PreregistrationUser, | ||||
|     EmailChangeStatus, | ||||
|     UserProfile, | ||||
|     RealmReactivationStatus, | ||||
| ] | ||||
| ConfirmationObjT = Union[MultiuseInvite, PreregistrationUser, EmailChangeStatus] | ||||
|  | ||||
|  | ||||
| def get_object_from_key( | ||||
|     confirmation_key: str, confirmation_types: List[int], *, mark_object_used: bool | ||||
|     confirmation_key: str, confirmation_type: int, activate_object: bool = True | ||||
| ) -> ConfirmationObjT: | ||||
|     """Access a confirmation object from one of the provided confirmation | ||||
|     types with the provided key. | ||||
|  | ||||
|     The mark_object_used parameter determines whether to mark the | ||||
|     confirmation object as used (which generally prevents it from | ||||
|     being used again). It should always be False for MultiuseInvite | ||||
|     objects, since they are intended to be used multiple times. | ||||
|     """ | ||||
|  | ||||
|     # Confirmation keys used to be 40 characters | ||||
|     if len(confirmation_key) not in (24, 40): | ||||
|         raise ConfirmationKeyError(ConfirmationKeyError.WRONG_LENGTH) | ||||
|         raise ConfirmationKeyException(ConfirmationKeyException.WRONG_LENGTH) | ||||
|     try: | ||||
|         confirmation = Confirmation.objects.get( | ||||
|             confirmation_key=confirmation_key, type__in=confirmation_types | ||||
|             confirmation_key=confirmation_key, type=confirmation_type | ||||
|         ) | ||||
|     except Confirmation.DoesNotExist: | ||||
|         raise ConfirmationKeyError(ConfirmationKeyError.DOES_NOT_EXIST) | ||||
|         raise ConfirmationKeyException(ConfirmationKeyException.DOES_NOT_EXIST) | ||||
|  | ||||
|     if confirmation.expiry_date is not None and timezone_now() > confirmation.expiry_date: | ||||
|         raise ConfirmationKeyError(ConfirmationKeyError.EXPIRED) | ||||
|     time_elapsed = timezone_now() - confirmation.date_sent | ||||
|     if time_elapsed.total_seconds() > _properties[confirmation.type].validity_in_days * 24 * 3600: | ||||
|         raise ConfirmationKeyException(ConfirmationKeyException.EXPIRED) | ||||
|  | ||||
|     obj = confirmation.content_object | ||||
|     assert obj is not None | ||||
|  | ||||
|     used_value = confirmation_settings.STATUS_USED | ||||
|     revoked_value = confirmation_settings.STATUS_REVOKED | ||||
|     if hasattr(obj, "status") and obj.status in [used_value, revoked_value]: | ||||
|         # Confirmations where the object has the status attribute are one-time use | ||||
|         # and are marked after being used (or revoked). | ||||
|         raise ConfirmationKeyError(ConfirmationKeyError.EXPIRED) | ||||
|  | ||||
|     if mark_object_used: | ||||
|         # MultiuseInvite objects do not use the STATUS_USED status, since they are | ||||
|         # intended to be used more than once. | ||||
|         assert confirmation.type != Confirmation.MULTIUSE_INVITE | ||||
|         assert hasattr(obj, "status") | ||||
|         obj.status = getattr(settings, "STATUS_USED", 1) | ||||
|     if activate_object and hasattr(obj, "status"): | ||||
|         obj.status = getattr(settings, "STATUS_ACTIVE", 1) | ||||
|         obj.save(update_fields=["status"]) | ||||
|     return obj | ||||
|  | ||||
|  | ||||
| def create_confirmation_link( | ||||
|     obj: ConfirmationObjT, | ||||
|     confirmation_type: int, | ||||
|     *, | ||||
|     validity_in_minutes: Union[Optional[int], UnspecifiedValue] = UnspecifiedValue(), | ||||
|     url_args: Mapping[str, str] = {}, | ||||
|     realm_creation: bool = False, | ||||
|     obj: ContentType, confirmation_type: int, url_args: Mapping[str, str] = {} | ||||
| ) -> str: | ||||
|     # validity_in_minutes is an override for the default values which are | ||||
|     # determined by the confirmation_type - its main purpose is for use | ||||
|     # in tests which may want to have control over the exact expiration time. | ||||
|     key = generate_key() | ||||
|     if realm_creation: | ||||
|         realm = None | ||||
|     else: | ||||
|         assert not isinstance(obj, PreregistrationRealm) | ||||
|     realm = None | ||||
|     if hasattr(obj, "realm"): | ||||
|         realm = obj.realm | ||||
|  | ||||
|     current_time = timezone_now() | ||||
|     expiry_date = None | ||||
|     if not isinstance(validity_in_minutes, UnspecifiedValue): | ||||
|         if validity_in_minutes is None: | ||||
|             expiry_date = None | ||||
|         else: | ||||
|             assert validity_in_minutes is not None | ||||
|             expiry_date = current_time + datetime.timedelta(minutes=validity_in_minutes) | ||||
|     else: | ||||
|         expiry_date = current_time + datetime.timedelta( | ||||
|             days=_properties[confirmation_type].validity_in_days | ||||
|         ) | ||||
|     elif isinstance(obj, Realm): | ||||
|         realm = obj | ||||
|  | ||||
|     Confirmation.objects.create( | ||||
|         content_object=obj, | ||||
|         date_sent=current_time, | ||||
|         date_sent=timezone_now(), | ||||
|         confirmation_key=key, | ||||
|         realm=realm, | ||||
|         expiry_date=expiry_date, | ||||
|         type=confirmation_type, | ||||
|     ) | ||||
|     return confirmation_url(key, realm, confirmation_type, url_args) | ||||
| @@ -169,12 +108,11 @@ def confirmation_url( | ||||
|  | ||||
| class Confirmation(models.Model): | ||||
|     content_type = models.ForeignKey(ContentType, on_delete=CASCADE) | ||||
|     object_id = models.PositiveIntegerField(db_index=True) | ||||
|     object_id: int = models.PositiveIntegerField(db_index=True) | ||||
|     content_object = GenericForeignKey("content_type", "object_id") | ||||
|     date_sent = models.DateTimeField(db_index=True) | ||||
|     confirmation_key = models.CharField(max_length=40, db_index=True) | ||||
|     expiry_date = models.DateTimeField(db_index=True, null=True) | ||||
|     realm = models.ForeignKey(Realm, null=True, on_delete=CASCADE) | ||||
|     date_sent: datetime.datetime = models.DateTimeField(db_index=True) | ||||
|     confirmation_key: str = models.CharField(max_length=40, db_index=True) | ||||
|     realm: Optional[Realm] = models.ForeignKey(Realm, null=True, on_delete=CASCADE) | ||||
|  | ||||
|     # The following list is the set of valid types | ||||
|     USER_REGISTRATION = 1 | ||||
| @@ -185,15 +123,14 @@ class Confirmation(models.Model): | ||||
|     MULTIUSE_INVITE = 6 | ||||
|     REALM_CREATION = 7 | ||||
|     REALM_REACTIVATION = 8 | ||||
|     type = models.PositiveSmallIntegerField() | ||||
|     type: int = models.PositiveSmallIntegerField() | ||||
|  | ||||
|     def __str__(self) -> str: | ||||
|         return f"<Confirmation: {self.content_object}>" | ||||
|  | ||||
|     class Meta: | ||||
|         unique_together = ("type", "confirmation_key") | ||||
|  | ||||
|     @override | ||||
|     def __str__(self) -> str: | ||||
|         return f"{self.content_object!r}" | ||||
|  | ||||
|  | ||||
| class ConfirmationType: | ||||
|     def __init__( | ||||
| @@ -206,9 +143,9 @@ class ConfirmationType: | ||||
|  | ||||
|  | ||||
| _properties = { | ||||
|     Confirmation.USER_REGISTRATION: ConfirmationType("get_prereg_key_and_redirect"), | ||||
|     Confirmation.USER_REGISTRATION: ConfirmationType("check_prereg_key_and_redirect"), | ||||
|     Confirmation.INVITATION: ConfirmationType( | ||||
|         "get_prereg_key_and_redirect", validity_in_days=settings.INVITATION_LINK_VALIDITY_DAYS | ||||
|         "check_prereg_key_and_redirect", validity_in_days=settings.INVITATION_LINK_VALIDITY_DAYS | ||||
|     ), | ||||
|     Confirmation.EMAIL_CHANGE: ConfirmationType("confirm_email_change"), | ||||
|     Confirmation.UNSUBSCRIBE: ConfirmationType( | ||||
| @@ -218,7 +155,7 @@ _properties = { | ||||
|     Confirmation.MULTIUSE_INVITE: ConfirmationType( | ||||
|         "join", validity_in_days=settings.INVITATION_LINK_VALIDITY_DAYS | ||||
|     ), | ||||
|     Confirmation.REALM_CREATION: ConfirmationType("get_prereg_key_and_redirect"), | ||||
|     Confirmation.REALM_CREATION: ConfirmationType("check_prereg_key_and_redirect"), | ||||
|     Confirmation.REALM_REACTIVATION: ConfirmationType("realm_reactivation"), | ||||
| } | ||||
|  | ||||
| @@ -249,10 +186,10 @@ def validate_key(creation_key: Optional[str]) -> Optional["RealmCreationKey"]: | ||||
|     try: | ||||
|         key_record = RealmCreationKey.objects.get(creation_key=creation_key) | ||||
|     except RealmCreationKey.DoesNotExist: | ||||
|         raise RealmCreationKey.InvalidError | ||||
|         raise RealmCreationKey.Invalid() | ||||
|     time_elapsed = timezone_now() - key_record.date_created | ||||
|     if time_elapsed.total_seconds() > settings.REALM_CREATION_LINK_VALIDITY_DAYS * 24 * 3600: | ||||
|         raise RealmCreationKey.InvalidError | ||||
|         raise RealmCreationKey.Invalid() | ||||
|     return key_record | ||||
|  | ||||
|  | ||||
| @@ -273,7 +210,7 @@ class RealmCreationKey(models.Model): | ||||
|  | ||||
|     # True just if we should presume the email address the user enters | ||||
|     # is theirs, and skip sending mail to it to confirm that. | ||||
|     presume_email_valid = models.BooleanField(default=False) | ||||
|     presume_email_valid: bool = models.BooleanField(default=False) | ||||
|  | ||||
|     class InvalidError(Exception): | ||||
|     class Invalid(Exception): | ||||
|         pass | ||||
|   | ||||
| @@ -2,5 +2,5 @@ | ||||
|  | ||||
| __revision__ = "$Id: settings.py 12 2008-11-23 19:38:52Z jarek.zgoda $" | ||||
|  | ||||
| STATUS_USED = 1 | ||||
| STATUS_ACTIVE = 1 | ||||
| STATUS_REVOKED = 2 | ||||
|   | ||||
| @@ -1,39 +0,0 @@ | ||||
| from decimal import Decimal | ||||
| from typing import Any, Dict | ||||
|  | ||||
| from django.utils.timezone import now as timezone_now | ||||
|  | ||||
| from corporate.lib.stripe import renewal_amount | ||||
| from corporate.models import Customer, CustomerPlan | ||||
| from zerver.lib.utils import assert_is_not_none | ||||
|  | ||||
|  | ||||
| def get_realms_with_default_discount_dict() -> Dict[str, Decimal]: | ||||
|     realms_with_default_discount: Dict[str, Any] = {} | ||||
|     customers = ( | ||||
|         Customer.objects.exclude(default_discount=None) | ||||
|         .exclude(default_discount=0) | ||||
|         .exclude(realm=None) | ||||
|     ) | ||||
|     for customer in customers: | ||||
|         assert customer.realm is not None | ||||
|         realms_with_default_discount[customer.realm.string_id] = assert_is_not_none( | ||||
|             customer.default_discount | ||||
|         ) | ||||
|     return realms_with_default_discount | ||||
|  | ||||
|  | ||||
| def estimate_annual_recurring_revenue_by_realm() -> Dict[str, int]:  # nocoverage | ||||
|     annual_revenue = {} | ||||
|     for plan in CustomerPlan.objects.filter(status=CustomerPlan.ACTIVE).select_related( | ||||
|         "customer__realm" | ||||
|     ): | ||||
|         if plan.customer.realm is not None: | ||||
|             # TODO: figure out what to do for plans that don't automatically | ||||
|             # renew, but which probably will renew | ||||
|             renewal_cents = renewal_amount(plan, timezone_now()) | ||||
|             if plan.billing_schedule == CustomerPlan.MONTHLY: | ||||
|                 renewal_cents *= 12 | ||||
|             # TODO: Decimal stuff | ||||
|             annual_revenue[plan.customer.realm.string_id] = int(renewal_cents / 100) | ||||
|     return annual_revenue | ||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user