mirror of
				https://github.com/zulip/zulip.git
				synced 2025-10-31 03:53:50 +00:00 
			
		
		
		
	Compare commits
	
		
			1 Commits
		
	
	
		
			shared-0.0
			...
			enterprise
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | 2738b7f3a8 | 
| @@ -1,5 +0,0 @@ | ||||
| > 0.15% | ||||
| > 0.15% in US | ||||
| last 2 versions | ||||
| Firefox ESR | ||||
| not dead | ||||
							
								
								
									
										12
									
								
								.codecov.yml
									
									
									
									
									
								
							
							
						
						
									
										12
									
								
								.codecov.yml
									
									
									
									
									
								
							| @@ -1,12 +0,0 @@ | ||||
| comment: off | ||||
|  | ||||
| coverage: | ||||
|   status: | ||||
|     project: | ||||
|       default: | ||||
|         target: auto | ||||
|         # Codecov has the tendency to report a lot of false negatives, | ||||
|         # so we basically suppress comments completely. | ||||
|         threshold: 50% | ||||
|         base: auto | ||||
|     patch: off | ||||
| @@ -1,18 +0,0 @@ | ||||
| te | ||||
| ans | ||||
| pullrequest | ||||
| ist | ||||
| cros | ||||
| wit | ||||
| nwe | ||||
| circularly | ||||
| ned | ||||
| ba | ||||
| ressemble | ||||
| ser | ||||
| sur | ||||
| hel | ||||
| fpr | ||||
| alls | ||||
| nd | ||||
| ot | ||||
| @@ -1,24 +0,0 @@ | ||||
| root = true | ||||
|  | ||||
| [*] | ||||
| end_of_line = lf | ||||
| charset = utf-8 | ||||
| indent_size = 4 | ||||
| indent_style = space | ||||
| insert_final_newline = true | ||||
| trim_trailing_whitespace = true | ||||
|  | ||||
| binary_next_line = true  # for shfmt | ||||
| switch_case_indent = true  # for shfmt | ||||
|  | ||||
| [{*.{js,json,ts},check-openapi}] | ||||
| max_line_length = 100 | ||||
|  | ||||
| [*.{py,pyi}] | ||||
| max_line_length = 110 | ||||
|  | ||||
| [*.{md,svg,rb,pp,yaml,yml}] | ||||
| indent_size = 2 | ||||
|  | ||||
| [package.json] | ||||
| indent_size = 2 | ||||
| @@ -1,14 +0,0 @@ | ||||
| # This is intended for generated files and vendored third-party files. | ||||
| # For our source code, instead of adding files here, consider using | ||||
| # specific eslint-disable comments in the files themselves. | ||||
|  | ||||
| /docs/_build | ||||
| /static/generated | ||||
| /static/third | ||||
| /static/webpack-bundles | ||||
| /var/* | ||||
| !/var/puppeteer | ||||
| /var/puppeteer/* | ||||
| !/var/puppeteer/test_credentials.d.ts | ||||
| /zulip-current-venv | ||||
| /zulip-py3-venv | ||||
							
								
								
									
										261
									
								
								.eslintrc.json
									
									
									
									
									
								
							
							
						
						
									
										261
									
								
								.eslintrc.json
									
									
									
									
									
								
							| @@ -1,261 +0,0 @@ | ||||
| { | ||||
|     "env": { | ||||
|         "es2020": true, | ||||
|         "node": true | ||||
|     }, | ||||
|     "extends": [ | ||||
|         "eslint:recommended", | ||||
|         "plugin:import/errors", | ||||
|         "plugin:import/warnings", | ||||
|         "plugin:unicorn/recommended", | ||||
|         "prettier" | ||||
|     ], | ||||
|     "parser": "@babel/eslint-parser", | ||||
|     "parserOptions": { | ||||
|         "warnOnUnsupportedTypeScriptVersion": false, | ||||
|         "sourceType": "unambiguous" | ||||
|     }, | ||||
|     "plugins": ["formatjs"], | ||||
|     "settings": { | ||||
|         "additionalFunctionNames": ["$t", "$t_html"] | ||||
|     }, | ||||
|     "reportUnusedDisableDirectives": true, | ||||
|     "rules": { | ||||
|         "array-callback-return": "error", | ||||
|         "arrow-body-style": "error", | ||||
|         "block-scoped-var": "error", | ||||
|         "consistent-return": "error", | ||||
|         "curly": "error", | ||||
|         "dot-notation": "error", | ||||
|         "eqeqeq": "error", | ||||
|         "formatjs/enforce-default-message": ["error", "literal"], | ||||
|         "formatjs/enforce-placeholders": [ | ||||
|             "error", | ||||
|             {"ignoreList": ["b", "code", "em", "i", "kbd", "p", "strong"]} | ||||
|         ], | ||||
|         "formatjs/no-id": "error", | ||||
|         "guard-for-in": "error", | ||||
|         "import/extensions": "error", | ||||
|         "import/first": "error", | ||||
|         "import/newline-after-import": "error", | ||||
|         "import/no-self-import": "error", | ||||
|         "import/no-useless-path-segments": "error", | ||||
|         "import/order": [ | ||||
|             "error", | ||||
|             { | ||||
|                 "alphabetize": {"order": "asc"}, | ||||
|                 "newlines-between": "always" | ||||
|             } | ||||
|         ], | ||||
|         "import/unambiguous": "error", | ||||
|         "lines-around-directive": "error", | ||||
|         "new-cap": "error", | ||||
|         "no-alert": "error", | ||||
|         "no-array-constructor": "error", | ||||
|         "no-bitwise": "error", | ||||
|         "no-caller": "error", | ||||
|         "no-catch-shadow": "error", | ||||
|         "no-constant-condition": ["error", {"checkLoops": false}], | ||||
|         "no-div-regex": "error", | ||||
|         "no-duplicate-imports": "error", | ||||
|         "no-else-return": "error", | ||||
|         "no-eq-null": "error", | ||||
|         "no-eval": "error", | ||||
|         "no-implicit-coercion": "error", | ||||
|         "no-implied-eval": "error", | ||||
|         "no-inner-declarations": "off", | ||||
|         "no-iterator": "error", | ||||
|         "no-label-var": "error", | ||||
|         "no-labels": "error", | ||||
|         "no-loop-func": "error", | ||||
|         "no-multi-str": "error", | ||||
|         "no-native-reassign": "error", | ||||
|         "no-new-func": "error", | ||||
|         "no-new-object": "error", | ||||
|         "no-new-wrappers": "error", | ||||
|         "no-octal-escape": "error", | ||||
|         "no-plusplus": "error", | ||||
|         "no-proto": "error", | ||||
|         "no-return-assign": "error", | ||||
|         "no-script-url": "error", | ||||
|         "no-self-compare": "error", | ||||
|         "no-sync": "error", | ||||
|         "no-throw-literal": "error", | ||||
|         "no-undef-init": "error", | ||||
|         "no-unneeded-ternary": ["error", {"defaultAssignment": false}], | ||||
|         "no-unused-expressions": "error", | ||||
|         "no-use-before-define": ["error", {"functions": false}], | ||||
|         "no-useless-concat": "error", | ||||
|         "no-useless-constructor": "error", | ||||
|         "no-var": "error", | ||||
|         "object-shorthand": "error", | ||||
|         "one-var": ["error", "never"], | ||||
|         "prefer-arrow-callback": "error", | ||||
|         "prefer-const": [ | ||||
|             "error", | ||||
|             { | ||||
|                 "ignoreReadBeforeAssign": true | ||||
|             } | ||||
|         ], | ||||
|         "radix": "error", | ||||
|         "sort-imports": ["error", {"ignoreDeclarationSort": true}], | ||||
|         "spaced-comment": ["error", "always", {"markers": ["/"]}], | ||||
|         "strict": "error", | ||||
|         "unicorn/consistent-function-scoping": "off", | ||||
|         "unicorn/explicit-length-check": "off", | ||||
|         "unicorn/filename-case": "off", | ||||
|         "unicorn/no-await-expression-member": "off", | ||||
|         "unicorn/no-nested-ternary": "off", | ||||
|         "unicorn/no-null": "off", | ||||
|         "unicorn/no-process-exit": "off", | ||||
|         "unicorn/no-useless-undefined": "off", | ||||
|         "unicorn/number-literal-case": "off", | ||||
|         "unicorn/numeric-separators-style": "off", | ||||
|         "unicorn/prefer-module": "off", | ||||
|         "unicorn/prefer-node-protocol": "off", | ||||
|         "unicorn/prefer-spread": "off", | ||||
|         "unicorn/prefer-ternary": "off", | ||||
|         "unicorn/prevent-abbreviations": "off", | ||||
|         "valid-typeof": ["error", {"requireStringLiterals": true}], | ||||
|         "yoda": "error" | ||||
|     }, | ||||
|     "overrides": [ | ||||
|         { | ||||
|             "files": ["frontend_tests/puppeteer_lib/**", "frontend_tests/puppeteer_tests/**"], | ||||
|             "globals": { | ||||
|                 "$": false, | ||||
|                 "zulip_test": false | ||||
|             } | ||||
|         }, | ||||
|         { | ||||
|             "files": ["static/js/**"], | ||||
|             "globals": { | ||||
|                 "StripeCheckout": false | ||||
|             } | ||||
|         }, | ||||
|         { | ||||
|             "files": ["**/*.ts"], | ||||
|             "extends": [ | ||||
|                 "plugin:@typescript-eslint/recommended-requiring-type-checking", | ||||
|                 "plugin:import/typescript" | ||||
|             ], | ||||
|             "parserOptions": { | ||||
|                 "project": "tsconfig.json" | ||||
|             }, | ||||
|             "settings": { | ||||
|                 "import/resolver": { | ||||
|                     "node": { | ||||
|                         "extensions": [".ts", ".d.ts", ".js"] // https://github.com/import-js/eslint-plugin-import/issues/2267 | ||||
|                     } | ||||
|                 } | ||||
|             }, | ||||
|             "globals": { | ||||
|                 "JQuery": false | ||||
|             }, | ||||
|             "rules": { | ||||
|                 // Disable base rule to avoid conflict | ||||
|                 "no-duplicate-imports": "off", | ||||
|                 "no-unused-vars": "off", | ||||
|                 "no-useless-constructor": "off", | ||||
|                 "no-use-before-define": "off", | ||||
|  | ||||
|                 "@typescript-eslint/array-type": "error", | ||||
|                 "@typescript-eslint/consistent-type-assertions": "error", | ||||
|                 "@typescript-eslint/consistent-type-imports": "error", | ||||
|                 "@typescript-eslint/explicit-function-return-type": [ | ||||
|                     "error", | ||||
|                     {"allowExpressions": true} | ||||
|                 ], | ||||
|                 "@typescript-eslint/member-ordering": "error", | ||||
|                 "@typescript-eslint/no-duplicate-imports": "off", | ||||
|                 "@typescript-eslint/no-explicit-any": "off", | ||||
|                 "@typescript-eslint/no-extraneous-class": "error", | ||||
|                 "@typescript-eslint/no-non-null-assertion": "off", | ||||
|                 "@typescript-eslint/no-parameter-properties": "error", | ||||
|                 "@typescript-eslint/no-unnecessary-qualifier": "error", | ||||
|                 "@typescript-eslint/no-unused-vars": ["error", {"varsIgnorePattern": "^_"}], | ||||
|                 "@typescript-eslint/no-unsafe-argument": "off", | ||||
|                 "@typescript-eslint/no-unsafe-assignment": "off", | ||||
|                 "@typescript-eslint/no-unsafe-call": "off", | ||||
|                 "@typescript-eslint/no-unsafe-member-access": "off", | ||||
|                 "@typescript-eslint/no-unsafe-return": "off", | ||||
|                 "@typescript-eslint/no-use-before-define": "error", | ||||
|                 "@typescript-eslint/no-useless-constructor": "error", | ||||
|                 "@typescript-eslint/prefer-includes": "error", | ||||
|                 "@typescript-eslint/prefer-string-starts-ends-with": "error", | ||||
|                 "@typescript-eslint/promise-function-async": "error", | ||||
|                 "@typescript-eslint/unified-signatures": "error", | ||||
|                 "no-undef": "error" | ||||
|             } | ||||
|         }, | ||||
|         { | ||||
|             "files": ["**/*.d.ts"], | ||||
|             "rules": { | ||||
|                 "import/unambiguous": "off" | ||||
|             } | ||||
|         }, | ||||
|         { | ||||
|             "files": ["frontend_tests/**"], | ||||
|             "globals": { | ||||
|                 "CSS": false, | ||||
|                 "document": false, | ||||
|                 "navigator": false, | ||||
|                 "window": false | ||||
|             }, | ||||
|             "rules": { | ||||
|                 "formatjs/no-id": "off", | ||||
|                 "new-cap": "off", | ||||
|                 "no-sync": "off", | ||||
|                 "unicorn/prefer-prototype-methods": "off" | ||||
|             } | ||||
|         }, | ||||
|         { | ||||
|             "files": ["tools/debug-require.js"], | ||||
|             "env": { | ||||
|                 "browser": true, | ||||
|                 "es2020": false | ||||
|             }, | ||||
|             "rules": { | ||||
|                 // Don’t require ES features that PhantomJS doesn’t support | ||||
|                 // TODO: Toggle these settings now that we don't use PhantomJS | ||||
|                 "no-var": "off", | ||||
|                 "object-shorthand": "off", | ||||
|                 "prefer-arrow-callback": "off" | ||||
|             } | ||||
|         }, | ||||
|         { | ||||
|             "files": ["static/**"], | ||||
|             "env": { | ||||
|                 "browser": true, | ||||
|                 "node": false | ||||
|             }, | ||||
|             "rules": { | ||||
|                 "no-console": "error" | ||||
|             }, | ||||
|             "settings": { | ||||
|                 "import/resolver": "webpack" | ||||
|             } | ||||
|         }, | ||||
|         { | ||||
|             "files": ["static/shared/**"], | ||||
|             "env": { | ||||
|                 "browser": false, | ||||
|                 "shared-node-browser": true | ||||
|             }, | ||||
|             "rules": { | ||||
|                 "import/no-restricted-paths": [ | ||||
|                     "error", | ||||
|                     { | ||||
|                         "zones": [ | ||||
|                             { | ||||
|                                 "target": "./static/shared", | ||||
|                                 "from": ".", | ||||
|                                 "except": ["./node_modules", "./static/shared"] | ||||
|                             } | ||||
|                         ] | ||||
|                     } | ||||
|                 ] | ||||
|             } | ||||
|         } | ||||
|     ] | ||||
| } | ||||
							
								
								
									
										52
									
								
								.gitattributes
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										52
									
								
								.gitattributes
									
									
									
									
										vendored
									
									
								
							| @@ -1,32 +1,20 @@ | ||||
| # DIFFS: Noise suppression. | ||||
| # | ||||
| # Suppress noisy generated files in diffs. | ||||
| # (When you actually want to see these diffs, use `git diff -a`.) | ||||
|  | ||||
| # Large test fixtures: | ||||
| corporate/tests/stripe_fixtures/*.json -diff | ||||
|  | ||||
|  | ||||
| # FORMATTING | ||||
|  | ||||
| # Maintain LF (Unix-style) newlines in text files. | ||||
| *   text=auto eol=lf | ||||
|  | ||||
| # Make sure various media files never get somehow auto-detected as text | ||||
| # and then newline-converted. | ||||
| *.gif binary | ||||
| *.jpg binary | ||||
| *.jpeg binary | ||||
| *.eot binary | ||||
| *.woff binary | ||||
| *.woff2 binary | ||||
| *.svg binary | ||||
| *.ttf binary | ||||
| *.png binary | ||||
| *.otf binary | ||||
| *.tif binary | ||||
| *.ogg binary | ||||
| *.bson binary | ||||
| *.bmp binary | ||||
| *.mp3 binary | ||||
| *.pdf binary | ||||
| .gitignore export-ignore | ||||
| .gitattributes export-ignore | ||||
| /analytics export-ignore | ||||
| /assets export-ignore | ||||
| /bots export-ignore | ||||
| /corporate export-ignore | ||||
| /static export-ignore | ||||
| /tools export-ignore | ||||
| /zilencer export-ignore | ||||
| /templates/analytics export-ignore | ||||
| /templates/corporate export-ignore | ||||
| /templates/zilencer export-ignore | ||||
| /puppet/zulip_internal export-ignore | ||||
| /zproject/local_settings.py export-ignore | ||||
| /zproject/test_settings.py export-ignore | ||||
| /zerver/fixtures export-ignore | ||||
| /zerver/tests.py export-ignore | ||||
| /zerver/tests export-ignore | ||||
| /node_modules export-ignore | ||||
| /humbug export-ignore | ||||
|   | ||||
							
								
								
									
										3
									
								
								.github/FUNDING.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										3
									
								
								.github/FUNDING.yml
									
									
									
									
										vendored
									
									
								
							| @@ -1,3 +0,0 @@ | ||||
| github: zulip | ||||
| patreon: zulip | ||||
| open_collective: zulip | ||||
							
								
								
									
										11
									
								
								.github/pull_request_template.md
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										11
									
								
								.github/pull_request_template.md
									
									
									
									
										vendored
									
									
								
							| @@ -1,11 +0,0 @@ | ||||
| <!-- What's this PR for?  (Just a link to an issue is fine.) --> | ||||
|  | ||||
| **Testing plan:** <!-- How have you tested? --> | ||||
|  | ||||
| **GIFs or screenshots:** <!-- If a UI change.  See: | ||||
|   https://zulip.readthedocs.io/en/latest/tutorials/screenshot-and-gif-software.html | ||||
|   --> | ||||
|  | ||||
| <!-- Also be sure to make clear, coherent commits: | ||||
|   https://zulip.readthedocs.io/en/latest/contributing/version-control.html | ||||
|   --> | ||||
							
								
								
									
										43
									
								
								.github/workflows/cancel-previous-runs.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										43
									
								
								.github/workflows/cancel-previous-runs.yml
									
									
									
									
										vendored
									
									
								
							| @@ -1,43 +0,0 @@ | ||||
| name: Cancel previous runs | ||||
| on: [push, pull_request] | ||||
|  | ||||
| defaults: | ||||
|   run: | ||||
|     shell: bash | ||||
|  | ||||
| jobs: | ||||
|   cancel: | ||||
|     name: Cancel previous runs | ||||
|     runs-on: ubuntu-latest | ||||
|     timeout-minutes: 3 | ||||
|  | ||||
|     # Don't run this job for zulip/zulip pushes since we | ||||
|     # want to run those jobs. | ||||
|     if: ${{ github.event_name != 'push' || github.event.repository.full_name != 'zulip/zulip' }} | ||||
|  | ||||
|     steps: | ||||
|       # We get workflow IDs from GitHub API so we don't have to maintain | ||||
|       # a hard-coded list of IDs which need to be updated when a workflow | ||||
|       # is added or removed. And, workflow IDs are different for other forks | ||||
|       # so this is required. | ||||
|       - name: Get workflow IDs. | ||||
|         id: workflow_ids | ||||
|         continue-on-error: true # Don't fail this job on failure | ||||
|         env: | ||||
|           # This is in <owner>/<repo> format e.g. zulip/zulip | ||||
|           REPOSITORY: ${{ github.repository }} | ||||
|         run: | | ||||
|           workflow_api_url=https://api.github.com/repos/$REPOSITORY/actions/workflows | ||||
|           curl -fL $workflow_api_url -o workflows.json | ||||
|  | ||||
|           script="const {workflows} = require('./workflows'); \ | ||||
|                   const ids = workflows.map(workflow => workflow.id); \ | ||||
|                   console.log(ids.join(','));" | ||||
|           ids=$(node -e "$script") | ||||
|           echo "::set-output name=ids::$ids" | ||||
|  | ||||
|       - uses: styfle/cancel-workflow-action@0.9.0 | ||||
|         continue-on-error: true # Don't fail this job on failure | ||||
|         with: | ||||
|           workflow_id: ${{ steps.workflow_ids.outputs.ids }} | ||||
|           access_token: ${{ github.token }} | ||||
							
								
								
									
										27
									
								
								.github/workflows/codeql-analysis.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										27
									
								
								.github/workflows/codeql-analysis.yml
									
									
									
									
										vendored
									
									
								
							| @@ -1,27 +0,0 @@ | ||||
| name: "Code scanning" | ||||
|  | ||||
| on: | ||||
|   push: | ||||
|     branches-ignore: | ||||
|       - dependabot/** # https://github.com/github/codeql-action/pull/435 | ||||
|   pull_request: {} | ||||
|  | ||||
| jobs: | ||||
|   CodeQL: | ||||
|     if: ${{!github.event.repository.private}} | ||||
|     runs-on: ubuntu-latest | ||||
|  | ||||
|     steps: | ||||
|       - name: Check out repository | ||||
|         uses: actions/checkout@v2 | ||||
|  | ||||
|       # Initializes the CodeQL tools for scanning. | ||||
|       - name: Initialize CodeQL | ||||
|         uses: github/codeql-action/init@v1 | ||||
|  | ||||
|         # Override language selection by uncommenting this and choosing your languages | ||||
|         # with: | ||||
|         #   languages: go, javascript, csharp, python, cpp, java | ||||
|  | ||||
|       - name: Perform CodeQL Analysis | ||||
|         uses: github/codeql-action/analyze@v1 | ||||
							
								
								
									
										266
									
								
								.github/workflows/production-suite.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										266
									
								
								.github/workflows/production-suite.yml
									
									
									
									
										vendored
									
									
								
							| @@ -1,266 +0,0 @@ | ||||
| name: Zulip production suite | ||||
|  | ||||
| on: | ||||
|   push: {} | ||||
|   pull_request: | ||||
|     paths: | ||||
|       - .github/workflows/production-suite.yml | ||||
|       - "**/migrations/**" | ||||
|       - babel.config.js | ||||
|       - manage.py | ||||
|       - postcss.config.js | ||||
|       - puppet/** | ||||
|       - requirements/** | ||||
|       - scripts/** | ||||
|       - static/assets/** | ||||
|       - static/third/** | ||||
|       - tools/** | ||||
|       - webpack.config.ts | ||||
|       - yarn.lock | ||||
|       - zerver/worker/queue_processors.py | ||||
|       - zerver/lib/push_notifications.py | ||||
|       - zerver/decorator.py | ||||
|       - zproject/** | ||||
|  | ||||
| defaults: | ||||
|   run: | ||||
|     shell: bash | ||||
|  | ||||
| jobs: | ||||
|   production_build: | ||||
|     # This job builds a release tarball from the current commit, which | ||||
|     # will be used for all of the following install/upgrade tests. | ||||
|     name: Debian 10 production build | ||||
|     runs-on: ubuntu-latest | ||||
|  | ||||
|     # Docker images are built from 'tools/ci/Dockerfile'; the comments at | ||||
|     # the top explain how to build and upload these images. | ||||
|     # Debian 10 ships with Python 3.7.3. | ||||
|     container: zulip/ci:buster | ||||
|     steps: | ||||
|       - name: Add required permissions | ||||
|         run: | | ||||
|           # The checkout actions doesn't clone to ~/zulip or allow | ||||
|           # us to use the path option to clone outside the current | ||||
|           # /__w/zulip/zulip directory. Since this directory is owned | ||||
|           # by root we need to change it's ownership to allow the | ||||
|           # github user to clone the code here. | ||||
|           # Note: /__w/ is a docker volume mounted to $GITHUB_WORKSPACE | ||||
|           # which is /home/runner/work/. | ||||
|           sudo chown -R github . | ||||
|  | ||||
|           # This is the GitHub Actions specific cache directory the | ||||
|           # the current github user must be able to access for the | ||||
|           # cache action to work. It is owned by root currently. | ||||
|           sudo chmod -R 0777 /__w/_temp/ | ||||
|  | ||||
|       - uses: actions/checkout@v2 | ||||
|  | ||||
|       - name: Create cache directories | ||||
|         run: | | ||||
|           dirs=(/srv/zulip-{npm,venv,emoji}-cache) | ||||
|           sudo mkdir -p "${dirs[@]}" | ||||
|           sudo chown -R github "${dirs[@]}" | ||||
|  | ||||
|       - name: Restore node_modules cache | ||||
|         uses: actions/cache@v2 | ||||
|         with: | ||||
|           path: /srv/zulip-npm-cache | ||||
|           key: v1-yarn-deps-buster-${{ hashFiles('package.json') }}-${{ hashFiles('yarn.lock') }} | ||||
|           restore-keys: v1-yarn-deps-buster | ||||
|  | ||||
|       - name: Restore python cache | ||||
|         uses: actions/cache@v2 | ||||
|         with: | ||||
|           path: /srv/zulip-venv-cache | ||||
|           key: v1-venv-buster-${{ hashFiles('requirements/dev.txt') }} | ||||
|           restore-keys: v1-venv-buster | ||||
|  | ||||
|       - name: Restore emoji cache | ||||
|         uses: actions/cache@v2 | ||||
|         with: | ||||
|           path: /srv/zulip-emoji-cache | ||||
|           key: v1-emoji-buster-${{ hashFiles('tools/setup/emoji/emoji_map.json') }}-${{ hashFiles('tools/setup/emoji/build_emoji') }}-${{ hashFiles('tools/setup/emoji/emoji_setup_utils.py') }}-${{ hashFiles('tools/setup/emoji/emoji_names.py') }}-${{ hashFiles('package.json') }} | ||||
|           restore-keys: v1-emoji-buster | ||||
|  | ||||
|       - name: Build production tarball | ||||
|         run: ./tools/ci/production-build | ||||
|  | ||||
|       - name: Upload production build artifacts for install jobs | ||||
|         uses: actions/upload-artifact@v2 | ||||
|         with: | ||||
|           name: production-tarball | ||||
|           path: /tmp/production-build | ||||
|           retention-days: 14 | ||||
|  | ||||
|       - name: Report status | ||||
|         if: failure() | ||||
|         env: | ||||
|           ZULIP_BOT_KEY: ${{ secrets.ZULIP_BOT_KEY }} | ||||
|         run: tools/ci/send-failure-message | ||||
|  | ||||
|   production_install: | ||||
|     # This job installs the server release tarball built above on a | ||||
|     # range of platforms, and does some basic health checks on the | ||||
|     # resulting installer Zulip server. | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         include: | ||||
|           # Docker images are built from 'tools/ci/Dockerfile'; the comments at | ||||
|           # the top explain how to build and upload these images. | ||||
|           - docker_image: zulip/ci:focal | ||||
|             name: Ubuntu 20.04 production install | ||||
|             os: focal | ||||
|             extra_args: "" | ||||
|  | ||||
|           - docker_image: zulip/ci:buster | ||||
|             name: Debian 10 production install with custom db name and user | ||||
|             os: buster | ||||
|             extra_args: --test-custom-db | ||||
|  | ||||
|           - docker_image: zulip/ci:bullseye | ||||
|             name: Debian 11 production install | ||||
|             os: bullseye | ||||
|             extra_args: "" | ||||
|  | ||||
|     name: ${{ matrix.name  }} | ||||
|     container: | ||||
|       image: ${{ matrix.docker_image }} | ||||
|       options: --init | ||||
|     runs-on: ubuntu-latest | ||||
|     needs: production_build | ||||
|  | ||||
|     steps: | ||||
|       - name: Download built production tarball | ||||
|         uses: actions/download-artifact@v2 | ||||
|         with: | ||||
|           name: production-tarball | ||||
|           path: /tmp | ||||
|  | ||||
|       - name: Add required permissions and setup | ||||
|         run: | | ||||
|           # This is the GitHub Actions specific cache directory the | ||||
|           # the current github user must be able to access for the | ||||
|           # cache action to work. It is owned by root currently. | ||||
|           sudo chmod -R 0777 /__w/_temp/ | ||||
|  | ||||
|           # Since actions/download-artifact@v2 loses all the permissions | ||||
|           # of the tarball uploaded by the upload artifact fix those. | ||||
|           chmod +x /tmp/production-upgrade-pg | ||||
|           chmod +x /tmp/production-pgroonga | ||||
|           chmod +x /tmp/production-install | ||||
|           chmod +x /tmp/production-verify | ||||
|           chmod +x /tmp/send-failure-message | ||||
|  | ||||
|       - name: Create cache directories | ||||
|         run: | | ||||
|           dirs=(/srv/zulip-{npm,venv,emoji}-cache) | ||||
|           sudo mkdir -p "${dirs[@]}" | ||||
|           sudo chown -R github "${dirs[@]}" | ||||
|  | ||||
|       - name: Restore node_modules cache | ||||
|         uses: actions/cache@v2 | ||||
|         with: | ||||
|           path: /srv/zulip-npm-cache | ||||
|           key: v1-yarn-deps-${{ matrix.os }}-${{ hashFiles('/tmp/package.json') }}-${{ hashFiles('/tmp/yarn.lock') }} | ||||
|           restore-keys: v1-yarn-deps-${{ matrix.os }} | ||||
|  | ||||
|       - name: Install production | ||||
|         run: | | ||||
|           sudo service rabbitmq-server restart | ||||
|           sudo /tmp/production-install ${{ matrix.extra-args }} | ||||
|  | ||||
|       - name: Verify install | ||||
|         run: sudo /tmp/production-verify ${{ matrix.extra-args }} | ||||
|  | ||||
|       - name: Install pgroonga | ||||
|         if: ${{ matrix.os == 'focal' }} | ||||
|         run: sudo /tmp/production-pgroonga | ||||
|  | ||||
|       - name: Verify install after installing pgroonga | ||||
|         if: ${{ matrix.os == 'focal' }} | ||||
|         run: sudo /tmp/production-verify ${{ matrix.extra-args }} | ||||
|  | ||||
|       - name: Upgrade postgresql | ||||
|         if: ${{ matrix.os == 'focal' }} | ||||
|         run: sudo /tmp/production-upgrade-pg | ||||
|  | ||||
|       - name: Verify install after upgrading postgresql | ||||
|         if: ${{ matrix.os == 'focal' }} | ||||
|         run: sudo /tmp/production-verify ${{ matrix.extra-args }} | ||||
|  | ||||
|       - name: Report status | ||||
|         if: failure() | ||||
|         env: | ||||
|           ZULIP_BOT_KEY: ${{ secrets.ZULIP_BOT_KEY }} | ||||
|         run: /tmp/send-failure-message | ||||
|  | ||||
|   production_upgrade: | ||||
|     # The production upgrade job starts with a container with a | ||||
|     # previous Zulip release installed, and attempts to upgrade it to | ||||
|     # the release tarball built for the current commit being tested. | ||||
|     # | ||||
|     # This is intended to catch bugs that result in the upgrade | ||||
|     # process failing. | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         include: | ||||
|           # Docker images are built from 'tools/ci/Dockerfile'; the comments at | ||||
|           # the top explain how to build and upload these images. | ||||
|           - docker_image: zulip/ci:buster-3.4 | ||||
|             name: 3.4 Version Upgrade | ||||
|             os: buster | ||||
|  | ||||
|           - docker_image: zulip/ci:bullseye-4.10 | ||||
|             name: 4.10 Version Upgrade | ||||
|             os: bullseye | ||||
|  | ||||
|     name: ${{ matrix.name  }} | ||||
|     container: | ||||
|       image: ${{ matrix.docker_image }} | ||||
|       options: --init | ||||
|     runs-on: ubuntu-latest | ||||
|     needs: production_build | ||||
|  | ||||
|     steps: | ||||
|       - name: Download built production tarball | ||||
|         uses: actions/download-artifact@v2 | ||||
|         with: | ||||
|           name: production-tarball | ||||
|           path: /tmp | ||||
|  | ||||
|       - name: Add required permissions and setup | ||||
|         run: | | ||||
|           # This is the GitHub Actions specific cache directory the | ||||
|           # the current github user must be able to access for the | ||||
|           # cache action to work. It is owned by root currently. | ||||
|           sudo chmod -R 0777 /__w/_temp/ | ||||
|  | ||||
|           # Since actions/download-artifact@v2 loses all the permissions | ||||
|           # of the tarball uploaded by the upload artifact fix those. | ||||
|           chmod +x /tmp/production-upgrade | ||||
|           chmod +x /tmp/production-verify | ||||
|           chmod +x /tmp/send-failure-message | ||||
|  | ||||
|       - name: Create cache directories | ||||
|         run: | | ||||
|           dirs=(/srv/zulip-{npm,venv,emoji}-cache) | ||||
|           sudo mkdir -p "${dirs[@]}" | ||||
|           sudo chown -R github "${dirs[@]}" | ||||
|  | ||||
|       - name: Upgrade production | ||||
|         run: sudo /tmp/production-upgrade | ||||
|  | ||||
|         # TODO: We should be running production-verify here, but it | ||||
|         # doesn't pass yet. | ||||
|         # | ||||
|         # - name: Verify install | ||||
|         #   run: sudo /tmp/production-verify | ||||
|  | ||||
|       - name: Report status | ||||
|         if: failure() | ||||
|         env: | ||||
|           ZULIP_BOT_KEY: ${{ secrets.ZULIP_BOT_KEY }} | ||||
|         run: /tmp/send-failure-message | ||||
							
								
								
									
										24
									
								
								.github/workflows/update-oneclick-apps.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										24
									
								
								.github/workflows/update-oneclick-apps.yml
									
									
									
									
										vendored
									
									
								
							| @@ -1,24 +0,0 @@ | ||||
| name: Update one click apps | ||||
| on: | ||||
|   release: | ||||
|     types: [published] | ||||
| jobs: | ||||
|   update-digitalocean-oneclick-app: | ||||
|     runs-on: ubuntu-latest | ||||
|     steps: | ||||
|       - uses: actions/checkout@v2 | ||||
|       - name: Update DigitalOcean one click app | ||||
|         env: | ||||
|           DIGITALOCEAN_API_KEY: ${{ secrets.ONE_CLICK_ACTION_DIGITALOCEAN_API_KEY }} | ||||
|           ZULIP_API_KEY: ${{ secrets.ONE_CLICK_ACTION_ZULIP_BOT_API_KEY }} | ||||
|           ZULIP_EMAIL: ${{ secrets.ONE_CLICK_ACTION_ZULIP_BOT_EMAIL }} | ||||
|           ZULIP_SITE: https://chat.zulip.org | ||||
|           ONE_CLICK_ACTION_STREAM: kandra ops | ||||
|           PYTHON_DIGITALOCEAN_REQUEST_TIMEOUT_SEC: 30 | ||||
|           RELEASE_VERSION: ${{ github.event.release.tag_name }} | ||||
|         run: | | ||||
|           export PATH="$HOME/.local/bin:$PATH" | ||||
|           git clone https://github.com/zulip/marketplace-partners | ||||
|           pip3 install python-digitalocean zulip fab-classic | ||||
|           echo $PATH | ||||
|           python3 tools/oneclickapps/prepare_digital_ocean_one_click_app_release.py | ||||
							
								
								
									
										226
									
								
								.github/workflows/zulip-ci.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										226
									
								
								.github/workflows/zulip-ci.yml
									
									
									
									
										vendored
									
									
								
							| @@ -1,226 +0,0 @@ | ||||
| # NOTE: Everything test in this file should be in `tools/test-all`.  If there's a | ||||
| # reason not to run it there, it should be there as a comment | ||||
| # explaining why. | ||||
|  | ||||
| name: Zulip CI | ||||
|  | ||||
| on: [push, pull_request] | ||||
|  | ||||
| defaults: | ||||
|   run: | ||||
|     shell: bash | ||||
|  | ||||
| jobs: | ||||
|   tests: | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|       matrix: | ||||
|         include: | ||||
|           # Base images are built using `tools/ci/Dockerfile.prod.template`. | ||||
|           # The comments at the top explain how to build and upload these images. | ||||
|           # Debian 10 ships with Python 3.7.3. | ||||
|           - docker_image: zulip/ci:buster | ||||
|             name: Debian 10 Buster (Python 3.7, backend + frontend) | ||||
|             os: buster | ||||
|             include_frontend_tests: true | ||||
|           # Ubuntu 20.04 ships with Python 3.8.2. | ||||
|           - docker_image: zulip/ci:focal | ||||
|             name: Ubuntu 20.04 Focal (Python 3.8, backend) | ||||
|             os: focal | ||||
|             include_frontend_tests: false | ||||
|           # Debian 11 ships with Python 3.9.2. | ||||
|           - docker_image: zulip/ci:bullseye | ||||
|             name: Debian 11 Bullseye (Python 3.9, backend) | ||||
|             os: bullseye | ||||
|             include_frontend_tests: false | ||||
|  | ||||
|     runs-on: ubuntu-latest | ||||
|     name: ${{ matrix.name }} | ||||
|     container: ${{ matrix.docker_image }} | ||||
|     env: | ||||
|       # GitHub Actions sets HOME to /github/home which causes | ||||
|       # problem later in provision and frontend test that runs | ||||
|       # tools/setup/postgresql-init-dev-db because of the .pgpass | ||||
|       # location. PostgreSQL (psql) expects .pgpass to be at | ||||
|       # /home/github/.pgpass and setting home to `/home/github/` | ||||
|       # ensures it written there because we write it to ~/.pgpass. | ||||
|       HOME: /home/github/ | ||||
|  | ||||
|     steps: | ||||
|       - uses: actions/checkout@v2 | ||||
|  | ||||
|       - name: Create cache directories | ||||
|         run: | | ||||
|           dirs=(/srv/zulip-{npm,venv,emoji}-cache) | ||||
|           sudo mkdir -p "${dirs[@]}" | ||||
|           sudo chown -R github "${dirs[@]}" | ||||
|  | ||||
|       - name: Restore node_modules cache | ||||
|         uses: actions/cache@v2 | ||||
|         with: | ||||
|           path: /srv/zulip-npm-cache | ||||
|           key: v1-yarn-deps-${{ matrix.os }}-${{ hashFiles('package.json') }}-${{ hashFiles('yarn.lock') }} | ||||
|           restore-keys: v1-yarn-deps-${{ matrix.os }} | ||||
|  | ||||
|       - name: Restore python cache | ||||
|         uses: actions/cache@v2 | ||||
|         with: | ||||
|           path: /srv/zulip-venv-cache | ||||
|           key: v1-venv-${{ matrix.os }}-${{ hashFiles('requirements/dev.txt') }} | ||||
|           restore-keys: v1-venv-${{ matrix.os }} | ||||
|  | ||||
|       - name: Restore emoji cache | ||||
|         uses: actions/cache@v2 | ||||
|         with: | ||||
|           path: /srv/zulip-emoji-cache | ||||
|           key: v1-emoji-${{ matrix.os }}-${{ hashFiles('tools/setup/emoji/emoji_map.json') }}-${{ hashFiles('tools/setup/emoji/build_emoji') }}-${{ hashFiles('tools/setup/emoji/emoji_setup_utils.py') }}-${{ hashFiles('tools/setup/emoji/emoji_names.py') }}-${{ hashFiles('package.json') }} | ||||
|           restore-keys: v1-emoji-${{ matrix.os }} | ||||
|  | ||||
|       - name: Install dependencies | ||||
|         run: | | ||||
|           # This is the main setup job for the test suite | ||||
|           ./tools/ci/setup-backend --skip-dev-db-build | ||||
|  | ||||
|           # Cleaning caches is mostly unnecessary in GitHub Actions, because | ||||
|           # most builds don't get to write to the cache. | ||||
|           # scripts/lib/clean_unused_caches.py --verbose --threshold 0 | ||||
|  | ||||
|       - name: Run tools test | ||||
|         run: | | ||||
|           source tools/ci/activate-venv | ||||
|           ./tools/test-tools | ||||
|  | ||||
|       - name: Run Codespell lint | ||||
|         run: | | ||||
|           source tools/ci/activate-venv | ||||
|           ./tools/run-codespell | ||||
|  | ||||
|       - name: Run backend lint | ||||
|         run: | | ||||
|           source tools/ci/activate-venv | ||||
|           echo "Test suite is running under $(python --version)." | ||||
|           ./tools/lint --groups=backend --skip=gitlint,mypy # gitlint disabled because flaky | ||||
|  | ||||
|       - name: Run frontend lint | ||||
|         if: ${{ matrix.include_frontend_tests }} | ||||
|         run: | | ||||
|           source tools/ci/activate-venv | ||||
|           ./tools/lint --groups=frontend --skip=gitlint # gitlint disabled because flaky | ||||
|  | ||||
|       - name: Run backend tests | ||||
|         run: | | ||||
|           source tools/ci/activate-venv | ||||
|           ./tools/test-backend --coverage --include-webhooks --no-cov-cleanup --ban-console-output | ||||
|  | ||||
|       - name: Run mypy | ||||
|         run: | | ||||
|           source tools/ci/activate-venv | ||||
|           # We run mypy after the backend tests so we get output from the | ||||
|           # backend tests, which tend to uncover more serious problems, first. | ||||
|           ./tools/run-mypy --version | ||||
|           ./tools/run-mypy | ||||
|  | ||||
|       - name: Run miscellaneous tests | ||||
|         run: | | ||||
|           source tools/ci/activate-venv | ||||
|  | ||||
|           # Currently our compiled requirements files will differ for different python versions | ||||
|           # so we will run test-locked-requirements only for Debian 10. | ||||
|           # ./tools/test-locked-requirements | ||||
|           # ./tools/test-run-dev  # https://github.com/zulip/zulip/pull/14233 | ||||
|           # | ||||
|           # This test has been persistently flaky at like 1% frequency, is slow, | ||||
|           # and is for a very specific single feature, so we don't run it by default: | ||||
|           # ./tools/test-queue-worker-reload | ||||
|  | ||||
|           ./tools/test-migrations | ||||
|           ./tools/setup/optimize-svg --check | ||||
|           ./tools/setup/generate_integration_bots_avatars.py --check-missing | ||||
|  | ||||
|           # Ban check-database-compatibility.py from transitively | ||||
|           # relying on static/generated, because it might not be | ||||
|           # up-to-date at that point in upgrade-zulip-stage-2. | ||||
|           chmod 000 static/generated | ||||
|           ./scripts/lib/check-database-compatibility.py | ||||
|           chmod 755 static/generated | ||||
|  | ||||
|       - name: Run documentation and api tests | ||||
|         run: | | ||||
|           source tools/ci/activate-venv | ||||
|           # In CI, we only test links we control in test-documentation to avoid flakes | ||||
|           ./tools/test-documentation --skip-external-links | ||||
|           ./tools/test-help-documentation --skip-external-links | ||||
|           ./tools/test-api | ||||
|  | ||||
|       - name: Run node tests | ||||
|         if: ${{ matrix.include_frontend_tests }} | ||||
|         run: | | ||||
|           source tools/ci/activate-venv | ||||
|           # Run the node tests first, since they're fast and deterministic | ||||
|           ./tools/test-js-with-node --coverage --parallel=1 | ||||
|  | ||||
|       - name: Check schemas | ||||
|         if: ${{ matrix.include_frontend_tests }} | ||||
|         run: | | ||||
|           source tools/ci/activate-venv | ||||
|           # Check that various schemas are consistent. (is fast) | ||||
|           ./tools/check-schemas | ||||
|  | ||||
|       - name: Check capitalization of strings | ||||
|         if: ${{ matrix.include_frontend_tests }} | ||||
|         run: | | ||||
|           source tools/ci/activate-venv | ||||
|           ./manage.py makemessages --locale en | ||||
|           PYTHONWARNINGS=ignore ./tools/check-capitalization --no-generate | ||||
|           PYTHONWARNINGS=ignore ./tools/check-frontend-i18n --no-generate | ||||
|  | ||||
|       - name: Run puppeteer tests | ||||
|         if: ${{ matrix.include_frontend_tests }} | ||||
|         run: | | ||||
|           source tools/ci/activate-venv | ||||
|           ./tools/test-js-with-puppeteer | ||||
|  | ||||
|       - name: Check for untracked files | ||||
|         run: | | ||||
|           source tools/ci/activate-venv | ||||
|           # This final check looks for untracked files that may have been | ||||
|           # created by test-backend or provision. | ||||
|           untracked="$(git ls-files --exclude-standard --others)" | ||||
|           if [ -n "$untracked" ]; then | ||||
|               printf >&2 "Error: untracked files:\n%s\n" "$untracked" | ||||
|               exit 1 | ||||
|           fi | ||||
|  | ||||
|       - name: Test locked requirements | ||||
|         if: ${{ matrix.os == 'buster' }} | ||||
|         run: | | ||||
|           . /srv/zulip-py3-venv/bin/activate && \ | ||||
|           ./tools/test-locked-requirements | ||||
|  | ||||
|       - name: Upload coverage reports | ||||
|  | ||||
|         # Only upload coverage when both frontend and backend | ||||
|         # tests are run. | ||||
|         if: ${{ matrix.include_frontend_tests }} | ||||
|         uses: codecov/codecov-action@v2 | ||||
|         with: | ||||
|           files: var/coverage.xml,var/node-coverage/lcov.info | ||||
|  | ||||
|       - name: Store Puppeteer artifacts | ||||
|         # Upload these on failure, as well | ||||
|         if: ${{ always() && matrix.include_frontend_tests }} | ||||
|         uses: actions/upload-artifact@v2 | ||||
|         with: | ||||
|           name: puppeteer | ||||
|           path: ./var/puppeteer | ||||
|           retention-days: 60 | ||||
|  | ||||
|       - name: Check development database build | ||||
|         if: ${{ matrix.os == 'focal' || matrix.os == 'bullseye' || matrix.os == 'jammy' }} | ||||
|         run: ./tools/ci/setup-backend | ||||
|  | ||||
|       - name: Report status | ||||
|         if: failure() | ||||
|         env: | ||||
|           ZULIP_BOT_KEY: ${{ secrets.ZULIP_BOT_KEY }} | ||||
|         run: tools/ci/send-failure-message | ||||
							
								
								
									
										99
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										99
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							| @@ -1,87 +1,28 @@ | ||||
| # Quick format and style primer: | ||||
| # | ||||
| #  * If a pattern is meant only for a specific location, it should have a | ||||
| #    leading slash, like `/staticfiles.json`. | ||||
| #    * In principle any non-trailing slash (like `zproject/dev-secrets.conf`) | ||||
| #      will do, but this makes a confusing pattern.  Adding a leading slash | ||||
| #      is clearer. | ||||
| # | ||||
| #  * Patterns like `.vscode/` without slashes, or with only a trailing slash, | ||||
| #    match in any subdirectory. | ||||
| # | ||||
| #  * Subdirectories with several internal things to ignore get their own | ||||
| #    `.gitignore` files. | ||||
| # | ||||
| #  * Comments must be on their own line.  (Otherwise they don't work.) | ||||
| # | ||||
| # See `git help ignore` for details on the format. | ||||
|  | ||||
| ## Config files for the dev environment | ||||
| /zproject/dev-secrets.conf | ||||
| /tools/conf.ini | ||||
| /tools/custom_provision | ||||
| /tools/droplets/conf.ini | ||||
|  | ||||
| ## Byproducts of setting up and using the dev environment | ||||
| *.pyc | ||||
| package-lock.json | ||||
|  | ||||
| /.vagrant | ||||
| /var/* | ||||
| !/var/puppeteer | ||||
| /var/puppeteer/* | ||||
| !/var/puppeteer/test_credentials.d.ts | ||||
|  | ||||
| /.dmypy.json | ||||
|  | ||||
| # Generated i18n data | ||||
| /locale/en | ||||
| /locale/language_options.json | ||||
| /locale/language_name_map.json | ||||
| /locale/*/mobile.json | ||||
|  | ||||
| # Static build | ||||
| *.mo | ||||
| npm-debug.log | ||||
| /node_modules | ||||
| /prod-static | ||||
| /staticfiles.json | ||||
| /webpack-stats-production.json | ||||
| /yarn-error.log | ||||
| zulip-git-version | ||||
|  | ||||
| # Test / analysis tools | ||||
| .coverage | ||||
|  | ||||
| ## Files (or really symlinks) created in a prod deployment | ||||
| /zproject/prod_settings.py | ||||
| /zulip-current-venv | ||||
| /zulip-py3-venv | ||||
|  | ||||
| ## Files left by various editors and local environments | ||||
| # (Ideally these should be in everyone's respective personal gitignore files.) | ||||
| *~ | ||||
| /all_messages_log.* | ||||
| /event_log/* | ||||
| /server.log | ||||
| /update-prod-static.log | ||||
| /prod-static | ||||
| /errors/* | ||||
| *.sw[po] | ||||
| .idea | ||||
| .DS_Store | ||||
| event_queues.pickle | ||||
| stats/ | ||||
| zerver/fixtures/available-migrations | ||||
| zerver/fixtures/migration-status | ||||
| zerver/fixtures/test_data1.json | ||||
| .kdev4 | ||||
| zulip.kdev4 | ||||
| memcached_prefix | ||||
| coverage/ | ||||
| /queue_error | ||||
| /workers.log | ||||
| .test-js-with-node.html | ||||
| digest.log | ||||
| errors.log | ||||
| manage.log | ||||
| .kateproject.d/ | ||||
| .kateproject | ||||
| *.kate-swp | ||||
| *.sublime-project | ||||
| *.sublime-workspace | ||||
| *.DS_Store | ||||
| # VS Code. Avoid checking in .vscode in general, while still specifying | ||||
| # recommended extensions for working with this repository. | ||||
| /.vscode/**/* | ||||
| !/.vscode/extensions.json | ||||
| # .cache/ is generated by VS Code test runner | ||||
| .cache/ | ||||
| .eslintcache | ||||
|  | ||||
| # Core dump files | ||||
| core | ||||
|  | ||||
| ## Miscellaneous | ||||
| # (Ideally this section is empty.) | ||||
| .transifexrc | ||||
|   | ||||
							
								
								
									
										13
									
								
								.gitlint
									
									
									
									
									
								
							
							
						
						
									
										13
									
								
								.gitlint
									
									
									
									
									
								
							| @@ -1,13 +0,0 @@ | ||||
| [general] | ||||
| ignore=title-trailing-punctuation, body-min-length, body-is-missing | ||||
|  | ||||
| extra-path=tools/lib/gitlint-rules.py | ||||
|  | ||||
| [title-match-regex] | ||||
| regex=^(.+:\ )?[A-Z].+\.$ | ||||
|  | ||||
| [title-max-length] | ||||
| line-length=76 | ||||
|  | ||||
| [body-max-line-length] | ||||
| line-length=76 | ||||
							
								
								
									
										43
									
								
								.mailmap
									
									
									
									
									
								
							
							
						
						
									
										43
									
								
								.mailmap
									
									
									
									
									
								
							| @@ -1,43 +0,0 @@ | ||||
| Alex Vandiver <alexmv@zulip.com> <alex@chmrr.net> | ||||
| Alex Vandiver <alexmv@zulip.com> <github@chmrr.net> | ||||
| Allen Rabinovich <allenrabinovich@yahoo.com> <allenr@humbughq.com> | ||||
| Allen Rabinovich <allenrabinovich@yahoo.com> <allenr@zulip.com> | ||||
| Aman Agrawal <amanagr@zulip.com> <f2016561@pilani.bits-pilani.ac.in> | ||||
| Anders Kaseorg <anders@zulip.com> <anders@zulipchat.com> | ||||
| Anders Kaseorg <anders@zulip.com> <andersk@mit.edu> | ||||
| Brock Whittaker <brock@zulipchat.com> <bjwhitta@asu.edu> | ||||
| Brock Whittaker <brock@zulipchat.com> <brockwhittaker@Brocks-MacBook.local> | ||||
| Brock Whittaker <brock@zulipchat.com> <brock@zulipchat.org> | ||||
| Chris Bobbe <cbobbe@zulip.com> <cbobbe@zulipchat.com> | ||||
| Chris Bobbe <cbobbe@zulip.com> <csbobbe@gmail.com> | ||||
| Greg Price <greg@zulip.com> <gnprice@gmail.com> | ||||
| Greg Price <greg@zulip.com> <greg@zulipchat.com> | ||||
| Greg Price <greg@zulip.com> <price@mit.edu> | ||||
| Jeff Arnold <jbarnold@gmail.com> <jbarnold@humbughq.com> | ||||
| Jeff Arnold <jbarnold@gmail.com> <jbarnold@zulip.com> | ||||
| Jessica McKellar <jesstess@mit.edu> <jesstess@humbughq.com> | ||||
| Jessica McKellar <jesstess@mit.edu> <jesstess@zulip.com> | ||||
| Kevin Mehall <km@kevinmehall.net> <kevin@humbughq.com> | ||||
| Kevin Mehall <km@kevinmehall.net> <kevin@zulip.com> | ||||
| Ray Kraesig <rkraesig@zulip.com> <rkraesig@zulipchat.com> | ||||
| Rishi Gupta <rishig@zulipchat.com> <rishig+git@mit.edu> | ||||
| Rishi Gupta <rishig@zulipchat.com> <rishig@kandralabs.com> | ||||
| Rishi Gupta <rishig@zulipchat.com> <rishig@users.noreply.github.com> | ||||
| Reid Barton <rwbarton@gmail.com> <rwbarton@humbughq.com> | ||||
| Scott Feeney <scott@oceanbase.org> <scott@humbughq.com> | ||||
| Scott Feeney <scott@oceanbase.org> <scott@zulip.com> | ||||
| Steve Howell <showell@zulip.com> <showell30@yahoo.com> | ||||
| Steve Howell <showell@zulip.com> <showell@yahoo.com> | ||||
| Steve Howell <showell@zulip.com> <showell@zulipchat.com> | ||||
| Steve Howell <showell@zulip.com> <steve@humbughq.com> | ||||
| Steve Howell <showell@zulip.com> <steve@zulip.com> | ||||
| Tim Abbott <tabbott@zulip.com> <tabbott@dropbox.com> | ||||
| Tim Abbott <tabbott@zulip.com> <tabbott@humbughq.com> | ||||
| Tim Abbott <tabbott@zulip.com> <tabbott@mit.edu> | ||||
| Tim Abbott <tabbott@zulip.com> <tabbott@zulipchat.com> | ||||
| Vishnu KS <vishnu@zulip.com> <hackerkid@vishnuks.com> | ||||
| Vishnu KS <vishnu@zulip.com> <yo@vishnuks.com> | ||||
| Alya Abbott <alya@zulip.com> <alyaabbott@elance-odesk.com> | ||||
| Sahil Batra <sahil@zulip.com> <sahilbatra839@gmail.com> | ||||
| Yash RE <33805964+YashRE42@users.noreply.github.com> <YashRE42@github.com> | ||||
| Yash RE <33805964+YashRE42@users.noreply.github.com> | ||||
| @@ -1,8 +0,0 @@ | ||||
| /corporate/tests/stripe_fixtures | ||||
| /locale | ||||
| /static/third | ||||
| /templates/**/*.md | ||||
| /tools/setup/emoji/emoji_map.json | ||||
| /zerver/tests/fixtures | ||||
| /zerver/webhooks/*/doc.md | ||||
| /zerver/webhooks/*/fixtures | ||||
| @@ -1,15 +0,0 @@ | ||||
| { | ||||
|   "source_directories": ["."], | ||||
|   "taint_models_path": [ | ||||
|       "stubs/taint", | ||||
|       "zulip-py3-venv/lib/pyre_check/taint/" | ||||
|   ], | ||||
|   "search_path": [ | ||||
|       "stubs/", | ||||
|       "zulip-py3-venv/lib/pyre_check/stubs/" | ||||
|   ], | ||||
|   "typeshed": "zulip-py3-venv/lib/pyre_check/typeshed/", | ||||
|   "exclude": [ | ||||
|       "/srv/zulip/zulip-py3-venv/.*" | ||||
|   ] | ||||
| } | ||||
| @@ -1 +0,0 @@ | ||||
| sonar.inclusions=**/*.py,**/*.html | ||||
							
								
								
									
										33
									
								
								.tx/config
									
									
									
									
									
								
							
							
						
						
									
										33
									
								
								.tx/config
									
									
									
									
									
								
							| @@ -1,33 +0,0 @@ | ||||
| [main] | ||||
| host = https://www.transifex.com | ||||
| lang_map = zh-Hans: zh_Hans, zh-Hant: zh_Hant | ||||
|  | ||||
| [zulip.djangopo] | ||||
| file_filter = locale/<lang>/LC_MESSAGES/django.po | ||||
| source_file = locale/en/LC_MESSAGES/django.po | ||||
| source_lang = en | ||||
| type = PO | ||||
|  | ||||
| [zulip.translationsjson] | ||||
| file_filter = locale/<lang>/translations.json | ||||
| source_file = locale/en/translations.json | ||||
| source_lang = en | ||||
| type = KEYVALUEJSON | ||||
|  | ||||
| [zulip.mobile] | ||||
| file_filter = locale/<lang>/mobile.json | ||||
| source_file = locale/en/mobile.json | ||||
| source_lang = en | ||||
| type = KEYVALUEJSON | ||||
|  | ||||
| [zulip-test.djangopo] | ||||
| file_filter = locale/<lang>/LC_MESSAGES/django.po | ||||
| source_file = locale/en/LC_MESSAGES/django.po | ||||
| source_lang = en | ||||
| type = PO | ||||
|  | ||||
| [zulip-test.translationsjson] | ||||
| file_filter = locale/<lang>/translations.json | ||||
| source_file = locale/en/translations.json | ||||
| source_lang = en | ||||
| type = KEYVALUEJSON | ||||
							
								
								
									
										23
									
								
								.vscode/extensions.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										23
									
								
								.vscode/extensions.json
									
									
									
									
										vendored
									
									
								
							| @@ -1,23 +0,0 @@ | ||||
| { | ||||
|     // Recommended VS Code extensions for zulip/zulip. | ||||
|     // | ||||
|     // VS Code prompts a user to install the recommended extensions | ||||
|     // when a workspace is opened for the first time.  The user can | ||||
|     // also review the list with the 'Extensions: Show Recommended | ||||
|     // Extensions' command.  See | ||||
|     // https://code.visualstudio.com/docs/editor/extension-marketplace#_workspace-recommended-extensions | ||||
|     // for more information. | ||||
|     // | ||||
|     // Extension identifier format: ${publisher}.${name}. | ||||
|     // Example: vscode.csharp | ||||
|  | ||||
|     "recommendations": [ | ||||
|         "42crunch.vscode-openapi", | ||||
|         "dbaeumer.vscode-eslint", | ||||
|         "esbenp.prettier-vscode", | ||||
|         "ms-vscode-remote.vscode-remote-extensionpack" | ||||
|     ], | ||||
|  | ||||
|     // Extensions recommended by VS Code which are not recommended for users of zulip/zulip. | ||||
|     "unwantedRecommendations": [] | ||||
| } | ||||
| @@ -1,104 +0,0 @@ | ||||
| # Zulip Code of Conduct | ||||
|  | ||||
| Like the technical community as a whole, the Zulip team and community is | ||||
| made up of a mixture of professionals and volunteers from all over the | ||||
| world, working on every aspect of the mission, including mentorship, | ||||
| teaching, and connecting people. | ||||
|  | ||||
| Diversity is one of our huge strengths, but it can also lead to | ||||
| communication issues and unhappiness. To that end, we have a few ground | ||||
| rules that we ask people to adhere to. This code applies equally to | ||||
| founders, mentors, and those seeking help and guidance. | ||||
|  | ||||
| This isn't an exhaustive list of things that you can't do. Rather, take it | ||||
| in the spirit in which it's intended --- a guide to make it easier to enrich | ||||
| all of us and the technical communities in which we participate. | ||||
|  | ||||
| ## Expected behavior | ||||
|  | ||||
| The following behaviors are expected and requested of all community members: | ||||
|  | ||||
| - Participate. In doing so, you contribute to the health and longevity of | ||||
|   the community. | ||||
| - Exercise consideration and respect in your speech and actions. | ||||
| - Attempt collaboration before conflict. Assume good faith. | ||||
| - Refrain from demeaning, discriminatory, or harassing behavior and speech. | ||||
| - Take action or alert community leaders if you notice a dangerous | ||||
|   situation, someone in distress, or violations of this code, even if they | ||||
|   seem inconsequential. | ||||
| - Community event venues may be shared with members of the public; be | ||||
|   respectful to all patrons of these locations. | ||||
|  | ||||
| ## Unacceptable behavior | ||||
|  | ||||
| The following behaviors are considered harassment and are unacceptable | ||||
| within the Zulip community: | ||||
|  | ||||
| - Jokes or derogatory language that singles out members of any race, | ||||
|   ethnicity, culture, national origin, color, immigration status, social and | ||||
|   economic class, educational level, language proficiency, sex, sexual | ||||
|   orientation, gender identity and expression, age, size, family status, | ||||
|   political belief, religion, and mental and physical ability. | ||||
| - Violence, threats of violence, or violent language directed against | ||||
|   another person. | ||||
| - Disseminating or threatening to disseminate another person's personal | ||||
|   information. | ||||
| - Personal insults of any sort. | ||||
| - Posting or displaying sexually explicit or violent material. | ||||
| - Inappropriate photography or recording. | ||||
| - Deliberate intimidation, stalking, or following (online or in person). | ||||
| - Unwelcome sexual attention. This includes sexualized comments or jokes, | ||||
|   inappropriate touching or groping, and unwelcomed sexual advances. | ||||
| - Sustained disruption of community events, including talks and | ||||
|   presentations. | ||||
| - Advocating for, or encouraging, any of the behaviors above. | ||||
|  | ||||
| ## Reporting and enforcement | ||||
|  | ||||
| Harassment and other code of conduct violations reduce the value of the | ||||
| community for everyone. If someone makes you or anyone else feel unsafe or | ||||
| unwelcome, please report it to the community organizers at | ||||
| zulip-code-of-conduct@googlegroups.com as soon as possible. You can make a | ||||
| report either personally or anonymously. | ||||
|  | ||||
| If a community member engages in unacceptable behavior, the community | ||||
| organizers may take any action they deem appropriate, up to and including a | ||||
| temporary ban or permanent expulsion from the community without warning (and | ||||
| without refund in the case of a paid event). | ||||
|  | ||||
| If someone outside the development community (e.g. a user of the Zulip | ||||
| software) engages in unacceptable behavior that affects someone in the | ||||
| community, we still want to know. Even if we don't have direct control over | ||||
| the violator, the community organizers can still support the people | ||||
| affected, reduce the chance of a similar violation in the future, and take | ||||
| any direct action we can. | ||||
|  | ||||
| The nature of reporting means it can only help after the fact. If you see | ||||
| something you can do while a violation is happening, do it. A lot of the | ||||
| harms of harassment and other violations can be mitigated by the victim | ||||
| knowing that the other people present are on their side. | ||||
|  | ||||
| All reports will be kept confidential. In some cases, we may determine that a | ||||
| public statement will need to be made. In such cases, the identities of all | ||||
| victims and reporters will remain confidential unless those individuals | ||||
| instruct us otherwise. | ||||
|  | ||||
| ## Scope | ||||
|  | ||||
| We expect all community participants (contributors, paid or otherwise, | ||||
| sponsors, and other guests) to abide by this Code of Conduct in all | ||||
| community venues, online and in-person, as well as in all private | ||||
| communications pertaining to community business. | ||||
|  | ||||
| This Code of Conduct and its related procedures also applies to unacceptable | ||||
| behavior occurring outside the scope of community activities when such | ||||
| behavior has the potential to adversely affect the safety and well-being of | ||||
| community members. | ||||
|  | ||||
| ## License and attribution | ||||
|  | ||||
| This Code of Conduct is adapted from the | ||||
| [Django Code of Conduct](https://www.djangoproject.com/conduct/), and is | ||||
| under a | ||||
| [Creative Commons BY-SA](https://creativecommons.org/licenses/by-sa/4.0/) | ||||
| license. | ||||
							
								
								
									
										455
									
								
								CONTRIBUTING.md
									
									
									
									
									
								
							
							
						
						
									
										455
									
								
								CONTRIBUTING.md
									
									
									
									
									
								
							| @@ -1,455 +0,0 @@ | ||||
| # Contributing to Zulip | ||||
|  | ||||
| Welcome to the Zulip community! | ||||
|  | ||||
| ## Community | ||||
|  | ||||
| The | ||||
| [Zulip community server](https://zulip.com/development-community/) | ||||
| is the primary communication forum for the Zulip community. It is a good | ||||
| place to start whether you have a question, are a new contributor, are a new | ||||
| user, or anything else. Please review our | ||||
| [community norms](https://zulip.com/development-community/#community-norms) | ||||
| before posting. The Zulip community is also governed by a | ||||
| [code of conduct](https://zulip.readthedocs.io/en/latest/code-of-conduct.html). | ||||
|  | ||||
| ## Ways to contribute | ||||
|  | ||||
| To make a code or documentation contribution, read our | ||||
| [step-by-step guide](#your-first-codebase-contribution) to getting | ||||
| started with the Zulip codebase. A small sample of the type of work that | ||||
| needs doing: | ||||
|  | ||||
| - Bug squashing and feature development on our Python/Django | ||||
|   [backend](https://github.com/zulip/zulip), web | ||||
|   [frontend](https://github.com/zulip/zulip), React Native | ||||
|   [mobile app](https://github.com/zulip/zulip-mobile), or Electron | ||||
|   [desktop app](https://github.com/zulip/zulip-desktop). | ||||
| - Building out our | ||||
|   [Python API and bots](https://github.com/zulip/python-zulip-api) framework. | ||||
| - [Writing an integration](https://zulip.com/api/integrations-overview). | ||||
| - Improving our [user](https://zulip.com/help/) or | ||||
|   [developer](https://zulip.readthedocs.io/en/latest/) documentation. | ||||
| - [Reviewing code](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html) | ||||
|   and manually testing pull requests. | ||||
|  | ||||
| **Non-code contributions**: Some of the most valuable ways to contribute | ||||
| don't require touching the codebase at all. For example, you can: | ||||
|  | ||||
| - [Report issues](#reporting-issues), including both feature requests and | ||||
|   bug reports. | ||||
| - [Give feedback](#user-feedback) if you are evaluating or using Zulip. | ||||
| - [Sponsor Zulip](https://github.com/sponsors/zulip) through the GitHub sponsors program. | ||||
| - [Translate](https://zulip.readthedocs.io/en/latest/translating/translating.html) | ||||
|   Zulip into your language. | ||||
| - [Stay connected](#stay-connected) with Zulip, and [help others | ||||
|   find us](#help-others-find-zulip). | ||||
|  | ||||
| ## Your first codebase contribution | ||||
|  | ||||
| This section has a step by step guide to starting as a Zulip codebase | ||||
| contributor. It's long, but don't worry about doing all the steps perfectly; | ||||
| no one gets it right the first time, and there are a lot of people available | ||||
| to help. | ||||
|  | ||||
| - First, make an account on the | ||||
|   [Zulip community server](https://zulip.com/development-community/), | ||||
|   paying special attention to the community norms. If you'd like, introduce | ||||
|   yourself in | ||||
|   [#new members](https://chat.zulip.org/#narrow/stream/95-new-members), using | ||||
|   your name as the topic. Bonus: tell us about your first impressions of | ||||
|   Zulip, and anything that felt confusing/broken as you started using the | ||||
|   product. | ||||
| - Read [What makes a great Zulip contributor](#what-makes-a-great-zulip-contributor). | ||||
| - [Install the development environment](https://zulip.readthedocs.io/en/latest/development/overview.html), | ||||
|   getting help in | ||||
|   [#provision help](https://chat.zulip.org/#narrow/stream/21-provision-help) | ||||
|   if you run into any troubles. | ||||
| - Familiarize yourself with [using the development environment](https://zulip.readthedocs.io/en/latest/development/using.html). | ||||
| - Go through the [new application feature | ||||
|   tutorial](https://zulip.readthedocs.io/en/latest/tutorials/new-feature-tutorial.html) to get familiar with | ||||
|   how the Zulip codebase is organized and how to find code in it. | ||||
| - Read the [Zulip guide to | ||||
|   Git](https://zulip.readthedocs.io/en/latest/git/index.html) if you | ||||
|   are unfamiliar with Git or Zulip's rebase-based Git workflow, | ||||
|   getting help in [#git | ||||
|   help](https://chat.zulip.org/#narrow/stream/44-git-help) if you run | ||||
|   into any troubles. Even Git experts should read the [Zulip-specific | ||||
|   Git tools | ||||
|   page](https://zulip.readthedocs.io/en/latest/git/zulip-tools.html). | ||||
|  | ||||
| ### Where to look for an issue | ||||
|  | ||||
| Now you're ready to pick your first issue! Zulip has several repositories you | ||||
| can check out, depending on your interests. There are hundreds of open issues in | ||||
| the [main Zulip server and web app | ||||
| repository](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22) | ||||
| alone. | ||||
|  | ||||
| You can look through issues tagged with the "help wanted" label, which is used | ||||
| to indicate the issues that are ready for contributions. Some repositories also | ||||
| use the "good first issue" label to tag issues that are especially approachable | ||||
| for new contributors. | ||||
|  | ||||
| - [Server and web app](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22) | ||||
| - [Mobile apps](https://github.com/zulip/zulip-mobile/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22) | ||||
| - [Desktop app](https://github.com/zulip/zulip-desktop/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22) | ||||
| - [Terminal app](https://github.com/zulip/zulip-terminal/issues?q=is%3Aopen+is%3Aissue+label%3A"help+wanted") | ||||
| - [Python API bindings and bots](https://github.com/zulip/python-zulip-api/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22) | ||||
|  | ||||
| ### Picking an issue to work on | ||||
|  | ||||
| There's a lot to learn while making your first pull request, so start small! | ||||
| Many first contributions have fewer than 10 lines of changes (not counting | ||||
| changes to tests). | ||||
|  | ||||
| We recommend the following process for finding an issue to work on: | ||||
|  | ||||
| 1. Read the description of an issue tagged with the "help wanted" label and make | ||||
|    sure you understand it. | ||||
| 2. If it seems promising, poke around the product | ||||
|    (on [chat.zulip.org](https://chat.zulip.org) or in the development | ||||
|    environment) until you know how the piece being | ||||
|    described fits into the bigger picture. If after some exploration the | ||||
|    description seems confusing or ambiguous, post a question on the GitHub | ||||
|    issue, as others may benefit from the clarification as well. | ||||
| 3. When you find an issue you like, try to get started working on it. See if you | ||||
|    can find the part of the code you'll need to modify (`git grep` is your | ||||
|    friend!) and get some idea of how you'll approach the problem. | ||||
| 4. If you feel lost, that's OK! Go through these steps again with another issue. | ||||
|    There's plenty to work on, and the exploration you do will help you learn | ||||
|    more about the project. | ||||
|  | ||||
| Note that you are _not_ claiming an issue while you are iterating through steps | ||||
| 1-4. _Before you claim an issue_, you should be confident that you will be able to | ||||
| tackle it effectively. | ||||
|  | ||||
| If the lists of issues are overwhelming, you can post in | ||||
| [#new members](https://chat.zulip.org/#narrow/stream/95-new-members) with a | ||||
| bit about your background and interests, and we'll help you out. The most | ||||
| important thing to say is whether you're looking for a backend (Python), | ||||
| frontend (JavaScript and TypeScript), mobile (React Native), desktop (Electron), | ||||
| documentation (English) or visual design (JavaScript/TypeScript + CSS) issue, and a | ||||
| bit about your programming experience and available time. | ||||
|  | ||||
| Additional tips for the [main server and web app | ||||
| repository](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22): | ||||
|  | ||||
| - We especially recommend browsing recently opened issues, as there are more | ||||
|   likely to be easy ones for you to find. | ||||
| - All issues are partitioned into areas like | ||||
|   admin, compose, emoji, hotkeys, i18n, onboarding, search, etc. Look | ||||
|   through our [list of labels](https://github.com/zulip/zulip/labels), and | ||||
|   click on some of the `area:` labels to see all the issues related to your | ||||
|   areas of interest. | ||||
| - Avoid issues with the "difficult" label unless you | ||||
|   understand why it is difficult and are highly confident you can resolve the | ||||
|   issue correctly and completely. | ||||
|  | ||||
| ### Claiming an issue | ||||
|  | ||||
| #### In the main server and web app repository | ||||
|  | ||||
| After making sure the issue is tagged with a [help | ||||
| wanted](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22) | ||||
| label, post a comment with `@zulipbot claim` to the issue thread. | ||||
| [Zulipbot](https://github.com/zulip/zulipbot) is a GitHub workflow bot; it will | ||||
| assign you to the issue and label the issue as "in progress". | ||||
|  | ||||
| New contributors can only claim one issue until their first pull request is | ||||
| merged. This is to encourage folks to finish ongoing work before starting | ||||
| something new. If you would like to pick up a new issue while waiting for review | ||||
| on an almost-ready pull request, you can post a comment to this effect on the | ||||
| issue you're interested in. | ||||
|  | ||||
| #### In other Zulip repositories | ||||
|  | ||||
| There is no bot for other repositories, so you can simply post a comment saying | ||||
| that you'd like to work on the issue. | ||||
|  | ||||
| Please follow the same guidelines as described above: find an issue labeled | ||||
| "help wanted", and only pick up one issue at a time to start with. | ||||
|  | ||||
| ### Working on an issue | ||||
|  | ||||
| You're encouraged to ask questions on how to best implement or debug your | ||||
| changes -- the Zulip maintainers are excited to answer questions to help you | ||||
| stay unblocked and working efficiently. You can ask questions in the [Zulip | ||||
| development community](https://zulip.com/development-community/), or on the | ||||
| GitHub issue or pull request. | ||||
|  | ||||
| To get early feedback on any UI changes, we encourage you to post screenshots of | ||||
| your work in the [#design | ||||
| stream](https://chat.zulip.org/#narrow/stream/101-design) in the [Zulip | ||||
| development community](https://zulip.com/development-community/) | ||||
|  | ||||
| For more advice, see [What makes a great Zulip | ||||
| contributor?](https://zulip.readthedocs.io/en/latest/overview/contributing.html#what-makes-a-great-zulip-contributor) | ||||
| below. | ||||
|  | ||||
| ### Submitting a pull request | ||||
|  | ||||
| When you believe your code is ready, follow the [guide on how to review | ||||
| code](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html#how-to-review-code) | ||||
| to review your own work. You can often find things you missed by taking a step | ||||
| back to look over your work before asking others to do so. Catching mistakes | ||||
| yourself will help your PRs be merged faster, and folks will appreciate the | ||||
| quality and professionalism of your work. | ||||
|  | ||||
| Then, submit your changes. Carefully reading our [Git guide][git-guide], and in | ||||
| particular the section on [making a pull request][git-guide-make-pr], | ||||
| will help avoid many common mistakes. | ||||
|  | ||||
| Once you are satisfied with the quality of your PR, follow the | ||||
| [guidelines on asking for a code | ||||
| review](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html#asking-for-a-code-review) | ||||
| to request a review. If you are not sure what's best, simply post a | ||||
| comment on the main GitHub thread for your PR clearly indicating that | ||||
| it is ready for review, and the project maintainers will take a look | ||||
| and follow up with next steps. | ||||
|  | ||||
| It's OK if your first issue takes you a while; that's normal! You'll be | ||||
| able to work a lot faster as you build experience. | ||||
|  | ||||
| If it helps your workflow, you can submit a work-in-progress pull | ||||
| request before your work is ready for review. Simply prefix the title | ||||
| of work in progress pull requests with `[WIP]`, and then remove the | ||||
| prefix when you think it's time for someone else to review your work. | ||||
|  | ||||
| [git-guide]: https://zulip.readthedocs.io/en/latest/git/ | ||||
| [git-guide-make-pr]: https://zulip.readthedocs.io/en/latest/git/pull-requests.html | ||||
|  | ||||
| ### Beyond the first issue | ||||
|  | ||||
| To find a second issue to work on, we recommend looking through issues with the same | ||||
| `area:` label as the last issue you resolved. You'll be able to reuse the | ||||
| work you did learning how that part of the codebase works. Also, the path to | ||||
| becoming a core developer often involves taking ownership of one of these area | ||||
| labels. | ||||
|  | ||||
| ### Common questions | ||||
|  | ||||
| - **What if somebody is already working on the issue I want do claim?** There | ||||
|   are lots of issue to work on! If somebody else is actively working on the | ||||
|   issue, you can find a different one, or help with | ||||
|   reviewing their work. | ||||
| - **What if somebody else claims an issue while I'm figuring out whether or not to | ||||
|   work on it?** No worries! You can contribute by providing feedback on | ||||
|   their pull request. If you've made good progress in understanding part of the | ||||
|   codebase, you can also find another "help wanted" issue in the same area to | ||||
|   work on. | ||||
| - **What if there is already a pull request for the issue I want to work on?** | ||||
|   Start by reviewing the existing work. If you agree with the approach, you can | ||||
|   use the existing pull request (PR) as a starting point for your contribution. If | ||||
|   you think a different approach is needed, you can post a new PR, with a comment that clearly | ||||
|   explains _why_ you decided to start from scratch. | ||||
| - **Can I come up with my own feature idea and work on it?** We welcome | ||||
|   suggestions of features or other improvements that you feel would be valuable. If you | ||||
|   have a new feature you'd like to add, you can start a conversation [in our | ||||
|   development community](https://zulip.com/development-community/#where-do-i-send-my-message) | ||||
|   explaining the feature idea and the problem that you're hoping to solve. | ||||
| - **I think my PR is done, but it hasn't been merged yet. What's going on?** | ||||
|   1. **Double-check that you have addressed all the feedback**, including any comments | ||||
|      on [Git commit | ||||
|      discipline](https://zulip.readthedocs.io/en/latest/contributing/version-control.html#commit-discipline). | ||||
|   2. If all the feedback has been addressed, did you leave a comment explaining that | ||||
|      you have done so and **requesting another review**? If not, it may not be a | ||||
|      clear to project maintainers that your PR is ready for another look. | ||||
|   3. It is common for PRs to require **multiple rounds of review**. For example, | ||||
|      prior to getting code review from project maintainers, you may receive | ||||
|      feedback on the UI (without regard for the implementation), and your code | ||||
|      may be [reviewed by other | ||||
|      contributors](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html). | ||||
|      This helps us make good use of project maintainers' time, and helps you | ||||
|      make progress on the PR by getting more frequent feedback. | ||||
|   4. If you think the PR is ready and haven't seen any updates for a couple | ||||
|      of weeks, it can be helpful to post a **comment summarizing your | ||||
|      understanding of the state of the review process**. Your comment should | ||||
|      make it easy to understand what has been done and what remains by: | ||||
|      - Summarizing the changes made since the last review you received. | ||||
|      - Highlighting remaining questions or decisions, with links to any | ||||
|        relevant chat.zulip.org threads. | ||||
|      - Providing updated screenshots and information on manual testing if | ||||
|        appropriate. | ||||
|   5. Finally, **Zulip project maintainers are people too**! They may be busy | ||||
|      with other work, and sometimes they might even take a vacation. ;) It can | ||||
|      occasionally take a few weeks for a PR in the final stages of the review | ||||
|      process to be merged. | ||||
|  | ||||
| ## What makes a great Zulip contributor? | ||||
|  | ||||
| Zulip has a lot of experience working with new contributors. In our | ||||
| experience, these are the best predictors of success: | ||||
|  | ||||
| - Posting good questions. It's very hard to answer a general question like, "How | ||||
|   do I do this issue?" When asking for help, explain | ||||
|   your current understanding, including what you've done or tried so far and where | ||||
|   you got stuck. Post tracebacks or other error messages if appropriate. For | ||||
|   more information, check out the ["Getting help" section of our community | ||||
|   guidelines](https://zulip.com/development-community/#getting-help) and | ||||
|   [this essay][good-questions-blog] for some good advice. | ||||
| - Learning and practicing | ||||
|   [Git commit discipline](https://zulip.readthedocs.io/en/latest/contributing/version-control.html#commit-discipline). | ||||
| - Submitting carefully tested code. See our [detailed guide on how to review | ||||
|   code](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html#how-to-review-code) | ||||
|   (yours or someone else's). | ||||
| - Posting | ||||
|   [screenshots or GIFs](https://zulip.readthedocs.io/en/latest/tutorials/screenshot-and-gif-software.html) | ||||
|   for frontend changes. | ||||
| - Clearly describing what you have implemented and why. For example, if your | ||||
|   implementation differs from the issue description in some way or is a partial | ||||
|   step towards the requirements described in the issue, be sure to call | ||||
|   out those differences. | ||||
| - Being responsive to feedback on pull requests. This means incorporating or | ||||
|   responding to all suggested changes, and leaving a note if you won't be | ||||
|   able to address things within a few days. | ||||
| - Being helpful and friendly on the [Zulip community | ||||
|   server](https://zulip.com/development-community/). | ||||
|  | ||||
| [good-questions-blog]: https://jvns.ca/blog/good-questions/ | ||||
|  | ||||
| These are also the main criteria we use to select candidates for all | ||||
| of our outreach programs. | ||||
|  | ||||
| ## Reporting issues | ||||
|  | ||||
| If you find an easily reproducible bug and/or are experienced in reporting | ||||
| bugs, feel free to just open an issue on the relevant project on GitHub. | ||||
|  | ||||
| If you have a feature request or are not yet sure what the underlying bug | ||||
| is, the best place to post issues is | ||||
| [#issues](https://chat.zulip.org/#narrow/stream/9-issues) (or | ||||
| [#mobile](https://chat.zulip.org/#narrow/stream/48-mobile) or | ||||
| [#desktop](https://chat.zulip.org/#narrow/stream/16-desktop)) on the | ||||
| [Zulip community server](https://zulip.com/development-community/). | ||||
| This allows us to interactively figure out what is going on, let you know if | ||||
| a similar issue has already been opened, and collect any other information | ||||
| we need. Choose a 2-4 word topic that describes the issue, explain the issue | ||||
| and how to reproduce it if known, your browser/OS if relevant, and a | ||||
| [screenshot or screenGIF](https://zulip.readthedocs.io/en/latest/tutorials/screenshot-and-gif-software.html) | ||||
| if appropriate. | ||||
|  | ||||
| **Reporting security issues**. Please do not report security issues | ||||
| publicly, including on public streams on chat.zulip.org. You can | ||||
| email [security@zulip.com](mailto:security@zulip.com). We create a CVE for every | ||||
| security issue in our released software. | ||||
|  | ||||
| ## User feedback | ||||
|  | ||||
| Nearly every feature we develop starts with a user request. If you are part | ||||
| of a group that is either using or considering using Zulip, we would love to | ||||
| hear about your experience with the product. If you're not sure what to | ||||
| write, here are some questions we're always very curious to know the answer | ||||
| to: | ||||
|  | ||||
| - Evaluation: What is the process by which your organization chose or will | ||||
|   choose a group chat product? | ||||
| - Pros and cons: What are the pros and cons of Zulip for your organization, | ||||
|   and the pros and cons of other products you are evaluating? | ||||
| - Features: What are the features that are most important for your | ||||
|   organization? In the best-case scenario, what would your chat solution do | ||||
|   for you? | ||||
| - Onboarding: If you remember it, what was your impression during your first | ||||
|   few minutes of using Zulip? What did you notice, and how did you feel? Was | ||||
|   there anything that stood out to you as confusing, or broken, or great? | ||||
| - Organization: What does your organization do? How big is the organization? | ||||
|   A link to your organization's website? | ||||
|  | ||||
| You can contact us in the [#feedback stream of the Zulip development | ||||
| community](https://chat.zulip.org/#narrow/stream/137-feedback) or | ||||
| by emailing [support@zulip.com](mailto:support@zulip.com). | ||||
|  | ||||
| ## Outreach programs | ||||
|  | ||||
| Zulip participates in [Google Summer of Code | ||||
| (GSoC)](https://developers.google.com/open-source/gsoc/) every year. | ||||
| In the past, we've also participated in | ||||
| [Outreachy](https://www.outreachy.org/), [Google | ||||
| Code-In](https://developers.google.com/open-source/gci/), and hosted | ||||
| summer interns from Harvard, MIT, and Stanford. | ||||
|  | ||||
| While each third-party program has its own rules and requirements, the | ||||
| Zulip community's approaches all of these programs with these ideas in | ||||
| mind: | ||||
|  | ||||
| - We try to make the application process as valuable for the applicant as | ||||
|   possible. Expect high-quality code reviews, a supportive community, and | ||||
|   publicly viewable patches you can link to from your resume, regardless of | ||||
|   whether you are selected. | ||||
| - To apply, you'll have to submit at least one pull request to a Zulip | ||||
|   repository. Most students accepted to one of our programs have | ||||
|   several merged pull requests (including at least one larger PR) by | ||||
|   the time of the application deadline. | ||||
| - The main criteria we use is quality of your best contributions, and | ||||
|   the bullets listed at | ||||
|   [What makes a great Zulip contributor](#what-makes-a-great-zulip-contributor). | ||||
|   Because we focus on evaluating your best work, it doesn't hurt your | ||||
|   application to makes mistakes in your first few PRs as long as your | ||||
|   work improves. | ||||
|  | ||||
| Most of our outreach program participants end up sticking around the | ||||
| project long-term, and many have become core team members, maintaining | ||||
| important parts of the project. We hope you apply! | ||||
|  | ||||
| ### Google Summer of Code | ||||
|  | ||||
| The largest outreach program Zulip participates in is GSoC (14 | ||||
| students in 2017; 11 in 2018; 17 in 2019; 18 in 2020; 18 in 2021). While we | ||||
| don't control how | ||||
| many slots Google allocates to Zulip, we hope to mentor a similar | ||||
| number of students in future summers. Check out our [blog | ||||
| post](https://blog.zulip.com/2021/09/30/google-summer-of-code-2021/) to learn | ||||
| about the GSoC 2021 experience and our participants' accomplishments. | ||||
|  | ||||
| If you're reading this well before the application deadline and want | ||||
| to make your application strong, we recommend getting involved in the | ||||
| community and fixing issues in Zulip now. Having good contributions | ||||
| and building a reputation for doing good work is the best way to have | ||||
| a strong application. | ||||
|  | ||||
| Our [GSoC program page][gsoc-guide] has lots more details on how | ||||
| Zulip does GSoC, as well as project ideas. Note, however, that the project idea | ||||
| list is maintained only during the GSoC application period, so if | ||||
| you're looking at some other time of year, the project list is likely | ||||
| out-of-date. | ||||
|  | ||||
| In some years, we have also run a Zulip Summer of Code (ZSoC) | ||||
| program for students who we wanted to accept into GSoC but did not have an | ||||
| official slot for. Student expectations are the | ||||
| same as with GSoC, and ZSoC has no separate application process; your | ||||
| GSoC application is your ZSoC application. If we'd like to select you | ||||
| for ZSoC, we'll contact you when the GSoC results are announced. | ||||
|  | ||||
| [gsoc-guide]: https://zulip.readthedocs.io/en/latest/contributing/gsoc.html | ||||
| [gsoc-faq]: https://developers.google.com/open-source/gsoc/faq | ||||
|  | ||||
| ## Stay connected | ||||
|  | ||||
| Even if you are not logging into the development community on a regular basis, | ||||
| you can still stay connected with the project. | ||||
|  | ||||
| - Follow us [on Twitter](https://twitter.com/zulip). | ||||
| - Subscribe to [our blog](https://blog.zulip.org/). | ||||
| - Join or follow the project [on LinkedIn](https://www.linkedin.com/company/zulip-project/). | ||||
|  | ||||
| ## Help others find Zulip | ||||
|  | ||||
| Here are some ways you can help others find Zulip: | ||||
|  | ||||
| - Star us on GitHub. There are four main repositories: | ||||
|   [server/web](https://github.com/zulip/zulip), | ||||
|   [mobile](https://github.com/zulip/zulip-mobile), | ||||
|   [desktop](https://github.com/zulip/zulip-desktop), and | ||||
|   [Python API](https://github.com/zulip/python-zulip-api). | ||||
|  | ||||
| - "Like" and retweet [our tweets](https://twitter.com/zulip). | ||||
|  | ||||
| - Upvote and post feedback on Zulip on comparison websites. A couple specific | ||||
|   ones to highlight: | ||||
|  | ||||
|   - [AlternativeTo](https://alternativeto.net/software/zulip-chat-server/). You can also | ||||
|     [upvote Zulip](https://alternativeto.net/software/slack/) on their page | ||||
|     for Slack. | ||||
|   - [Add Zulip to your stack](https://stackshare.io/zulip) on StackShare, star | ||||
|     it, and upvote the reasons why people like Zulip that you find most | ||||
|     compelling. | ||||
| @@ -1,15 +0,0 @@ | ||||
| # To build run `docker build -f Dockerfile-postgresql .` from the root of the | ||||
| # zulip repo. | ||||
|  | ||||
| # Currently the PostgreSQL images do not support automatic upgrading of | ||||
| # the on-disk data in volumes. So the base image can not currently be upgraded | ||||
| # without users needing a manual pgdump and restore. | ||||
|  | ||||
| # Install hunspell, Zulip stop words, and run Zulip database | ||||
| # init. | ||||
| FROM groonga/pgroonga:latest-alpine-10-slim | ||||
| RUN apk add -U --no-cache hunspell-en | ||||
| RUN ln -sf /usr/share/hunspell/en_US.dic /usr/local/share/postgresql/tsearch_data/en_us.dict && ln -sf /usr/share/hunspell/en_US.aff /usr/local/share/postgresql/tsearch_data/en_us.affix  | ||||
| COPY puppet/zulip/files/postgresql/zulip_english.stop /usr/local/share/postgresql/tsearch_data/zulip_english.stop | ||||
| COPY scripts/setup/create-db.sql /docker-entrypoint-initdb.d/zulip-create-db.sql | ||||
| COPY scripts/setup/create-pgroonga.sql /docker-entrypoint-initdb.d/zulip-create-pgroonga.sql | ||||
							
								
								
									
										203
									
								
								LICENSE
									
									
									
									
									
								
							
							
						
						
									
										203
									
								
								LICENSE
									
									
									
									
									
								
							| @@ -1,202 +1,5 @@ | ||||
| Copyright <20> 2012-2013 Zulip, Inc. | ||||
|  | ||||
|                                  Apache License | ||||
|                            Version 2.0, January 2004 | ||||
|                         http://www.apache.org/licenses/ | ||||
| This software is licensed under the Zulip Enterprise License Agreement. | ||||
|  | ||||
|    TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION | ||||
|  | ||||
|    1. Definitions. | ||||
|  | ||||
|       "License" shall mean the terms and conditions for use, reproduction, | ||||
|       and distribution as defined by Sections 1 through 9 of this document. | ||||
|  | ||||
|       "Licensor" shall mean the copyright owner or entity authorized by | ||||
|       the copyright owner that is granting the License. | ||||
|  | ||||
|       "Legal Entity" shall mean the union of the acting entity and all | ||||
|       other entities that control, are controlled by, or are under common | ||||
|       control with that entity. For the purposes of this definition, | ||||
|       "control" means (i) the power, direct or indirect, to cause the | ||||
|       direction or management of such entity, whether by contract or | ||||
|       otherwise, or (ii) ownership of fifty percent (50%) or more of the | ||||
|       outstanding shares, or (iii) beneficial ownership of such entity. | ||||
|  | ||||
|       "You" (or "Your") shall mean an individual or Legal Entity | ||||
|       exercising permissions granted by this License. | ||||
|  | ||||
|       "Source" form shall mean the preferred form for making modifications, | ||||
|       including but not limited to software source code, documentation | ||||
|       source, and configuration files. | ||||
|  | ||||
|       "Object" form shall mean any form resulting from mechanical | ||||
|       transformation or translation of a Source form, including but | ||||
|       not limited to compiled object code, generated documentation, | ||||
|       and conversions to other media types. | ||||
|  | ||||
|       "Work" shall mean the work of authorship, whether in Source or | ||||
|       Object form, made available under the License, as indicated by a | ||||
|       copyright notice that is included in or attached to the work | ||||
|       (an example is provided in the Appendix below). | ||||
|  | ||||
|       "Derivative Works" shall mean any work, whether in Source or Object | ||||
|       form, that is based on (or derived from) the Work and for which the | ||||
|       editorial revisions, annotations, elaborations, or other modifications | ||||
|       represent, as a whole, an original work of authorship. For the purposes | ||||
|       of this License, Derivative Works shall not include works that remain | ||||
|       separable from, or merely link (or bind by name) to the interfaces of, | ||||
|       the Work and Derivative Works thereof. | ||||
|  | ||||
|       "Contribution" shall mean any work of authorship, including | ||||
|       the original version of the Work and any modifications or additions | ||||
|       to that Work or Derivative Works thereof, that is intentionally | ||||
|       submitted to Licensor for inclusion in the Work by the copyright owner | ||||
|       or by an individual or Legal Entity authorized to submit on behalf of | ||||
|       the copyright owner. For the purposes of this definition, "submitted" | ||||
|       means any form of electronic, verbal, or written communication sent | ||||
|       to the Licensor or its representatives, including but not limited to | ||||
|       communication on electronic mailing lists, source code control systems, | ||||
|       and issue tracking systems that are managed by, or on behalf of, the | ||||
|       Licensor for the purpose of discussing and improving the Work, but | ||||
|       excluding communication that is conspicuously marked or otherwise | ||||
|       designated in writing by the copyright owner as "Not a Contribution." | ||||
|  | ||||
|       "Contributor" shall mean Licensor and any individual or Legal Entity | ||||
|       on behalf of whom a Contribution has been received by Licensor and | ||||
|       subsequently incorporated within the Work. | ||||
|  | ||||
|    2. Grant of Copyright License. Subject to the terms and conditions of | ||||
|       this License, each Contributor hereby grants to You a perpetual, | ||||
|       worldwide, non-exclusive, no-charge, royalty-free, irrevocable | ||||
|       copyright license to reproduce, prepare Derivative Works of, | ||||
|       publicly display, publicly perform, sublicense, and distribute the | ||||
|       Work and such Derivative Works in Source or Object form. | ||||
|  | ||||
|    3. Grant of Patent License. Subject to the terms and conditions of | ||||
|       this License, each Contributor hereby grants to You a perpetual, | ||||
|       worldwide, non-exclusive, no-charge, royalty-free, irrevocable | ||||
|       (except as stated in this section) patent license to make, have made, | ||||
|       use, offer to sell, sell, import, and otherwise transfer the Work, | ||||
|       where such license applies only to those patent claims licensable | ||||
|       by such Contributor that are necessarily infringed by their | ||||
|       Contribution(s) alone or by combination of their Contribution(s) | ||||
|       with the Work to which such Contribution(s) was submitted. If You | ||||
|       institute patent litigation against any entity (including a | ||||
|       cross-claim or counterclaim in a lawsuit) alleging that the Work | ||||
|       or a Contribution incorporated within the Work constitutes direct | ||||
|       or contributory patent infringement, then any patent licenses | ||||
|       granted to You under this License for that Work shall terminate | ||||
|       as of the date such litigation is filed. | ||||
|  | ||||
|    4. Redistribution. You may reproduce and distribute copies of the | ||||
|       Work or Derivative Works thereof in any medium, with or without | ||||
|       modifications, and in Source or Object form, provided that You | ||||
|       meet the following conditions: | ||||
|  | ||||
|       (a) You must give any other recipients of the Work or | ||||
|           Derivative Works a copy of this License; and | ||||
|  | ||||
|       (b) You must cause any modified files to carry prominent notices | ||||
|           stating that You changed the files; and | ||||
|  | ||||
|       (c) You must retain, in the Source form of any Derivative Works | ||||
|           that You distribute, all copyright, patent, trademark, and | ||||
|           attribution notices from the Source form of the Work, | ||||
|           excluding those notices that do not pertain to any part of | ||||
|           the Derivative Works; and | ||||
|  | ||||
|       (d) If the Work includes a "NOTICE" text file as part of its | ||||
|           distribution, then any Derivative Works that You distribute must | ||||
|           include a readable copy of the attribution notices contained | ||||
|           within such NOTICE file, excluding those notices that do not | ||||
|           pertain to any part of the Derivative Works, in at least one | ||||
|           of the following places: within a NOTICE text file distributed | ||||
|           as part of the Derivative Works; within the Source form or | ||||
|           documentation, if provided along with the Derivative Works; or, | ||||
|           within a display generated by the Derivative Works, if and | ||||
|           wherever such third-party notices normally appear. The contents | ||||
|           of the NOTICE file are for informational purposes only and | ||||
|           do not modify the License. You may add Your own attribution | ||||
|           notices within Derivative Works that You distribute, alongside | ||||
|           or as an addendum to the NOTICE text from the Work, provided | ||||
|           that such additional attribution notices cannot be construed | ||||
|           as modifying the License. | ||||
|  | ||||
|       You may add Your own copyright statement to Your modifications and | ||||
|       may provide additional or different license terms and conditions | ||||
|       for use, reproduction, or distribution of Your modifications, or | ||||
|       for any such Derivative Works as a whole, provided Your use, | ||||
|       reproduction, and distribution of the Work otherwise complies with | ||||
|       the conditions stated in this License. | ||||
|  | ||||
|    5. Submission of Contributions. Unless You explicitly state otherwise, | ||||
|       any Contribution intentionally submitted for inclusion in the Work | ||||
|       by You to the Licensor shall be under the terms and conditions of | ||||
|       this License, without any additional terms or conditions. | ||||
|       Notwithstanding the above, nothing herein shall supersede or modify | ||||
|       the terms of any separate license agreement you may have executed | ||||
|       with Licensor regarding such Contributions. | ||||
|  | ||||
|    6. Trademarks. This License does not grant permission to use the trade | ||||
|       names, trademarks, service marks, or product names of the Licensor, | ||||
|       except as required for reasonable and customary use in describing the | ||||
|       origin of the Work and reproducing the content of the NOTICE file. | ||||
|  | ||||
|    7. Disclaimer of Warranty. Unless required by applicable law or | ||||
|       agreed to in writing, Licensor provides the Work (and each | ||||
|       Contributor provides its Contributions) on an "AS IS" BASIS, | ||||
|       WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or | ||||
|       implied, including, without limitation, any warranties or conditions | ||||
|       of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A | ||||
|       PARTICULAR PURPOSE. You are solely responsible for determining the | ||||
|       appropriateness of using or redistributing the Work and assume any | ||||
|       risks associated with Your exercise of permissions under this License. | ||||
|  | ||||
|    8. Limitation of Liability. In no event and under no legal theory, | ||||
|       whether in tort (including negligence), contract, or otherwise, | ||||
|       unless required by applicable law (such as deliberate and grossly | ||||
|       negligent acts) or agreed to in writing, shall any Contributor be | ||||
|       liable to You for damages, including any direct, indirect, special, | ||||
|       incidental, or consequential damages of any character arising as a | ||||
|       result of this License or out of the use or inability to use the | ||||
|       Work (including but not limited to damages for loss of goodwill, | ||||
|       work stoppage, computer failure or malfunction, or any and all | ||||
|       other commercial damages or losses), even if such Contributor | ||||
|       has been advised of the possibility of such damages. | ||||
|  | ||||
|    9. Accepting Warranty or Additional Liability. While redistributing | ||||
|       the Work or Derivative Works thereof, You may choose to offer, | ||||
|       and charge a fee for, acceptance of support, warranty, indemnity, | ||||
|       or other liability obligations and/or rights consistent with this | ||||
|       License. However, in accepting such obligations, You may act only | ||||
|       on Your own behalf and on Your sole responsibility, not on behalf | ||||
|       of any other Contributor, and only if You agree to indemnify, | ||||
|       defend, and hold each Contributor harmless for any liability | ||||
|       incurred by, or claims asserted against, such Contributor by reason | ||||
|       of your accepting any such warranty or additional liability. | ||||
|  | ||||
|    END OF TERMS AND CONDITIONS | ||||
|  | ||||
|    APPENDIX: How to apply the Apache License to your work. | ||||
|  | ||||
|       To apply the Apache License to your work, attach the following | ||||
|       boilerplate notice, with the fields enclosed by brackets "[]" | ||||
|       replaced with your own identifying information. (Don't include | ||||
|       the brackets!)  The text should be enclosed in the appropriate | ||||
|       comment syntax for the file format. We also recommend that a | ||||
|       file or class name and description of purpose be included on the | ||||
|       same "printed page" as the copyright notice for easier | ||||
|       identification within third-party archives. | ||||
|  | ||||
|    Copyright [yyyy] [name of copyright owner] | ||||
|  | ||||
|    Licensed under the Apache License, Version 2.0 (the "License"); | ||||
|    you may not use this file except in compliance with the License. | ||||
|    You may obtain a copy of the License at | ||||
|  | ||||
|        http://www.apache.org/licenses/LICENSE-2.0 | ||||
|  | ||||
|    Unless required by applicable law or agreed to in writing, software | ||||
|    distributed under the License is distributed on an "AS IS" BASIS, | ||||
|    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||||
|    See the License for the specific language governing permissions and | ||||
|    limitations under the License. | ||||
| Zulip can be reached at support@zulip.com. | ||||
|   | ||||
							
								
								
									
										18
									
								
								NOTICE
									
									
									
									
									
								
							
							
						
						
									
										18
									
								
								NOTICE
									
									
									
									
									
								
							| @@ -1,18 +0,0 @@ | ||||
| Copyright 2012–2015 Dropbox, Inc., 2015–2021 Kandra Labs, Inc., and contributors | ||||
|  | ||||
| Licensed under the Apache License, Version 2.0 (the "License"); | ||||
| you may not use this project except in compliance with the License. | ||||
| You may obtain a copy of the License at | ||||
|  | ||||
|     http://www.apache.org/licenses/LICENSE-2.0 | ||||
|  | ||||
| Unless required by applicable law or agreed to in writing, software | ||||
| distributed under the License is distributed on an "AS IS" BASIS, | ||||
| WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||||
| See the License for the specific language governing permissions and | ||||
| limitations under the License. | ||||
|  | ||||
| The software includes some works released by third parties under other | ||||
| free and open source licenses. Those works are redistributed under the | ||||
| license terms under which the works were received. For more details, | ||||
| see the ``docs/THIRDPARTY`` file included with this distribution. | ||||
							
								
								
									
										87
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										87
									
								
								README.md
									
									
									
									
									
								
							| @@ -1,87 +0,0 @@ | ||||
| # Zulip overview | ||||
|  | ||||
| [Zulip](https://zulip.com) is an open-source team collaboration tool with unique | ||||
| [topic-based threading][why-zulip] that combines the best of email and chat to | ||||
| make remote work productive and delightful. Fortune 500 companies, [leading open | ||||
| source projects][rust-case-study], and thousands of other organizations use | ||||
| Zulip every day. Zulip is the only [modern team chat app][features] that is | ||||
| designed for both live and asynchronous conversations. | ||||
|  | ||||
| Zulip is built by a distributed community of developers from all around the | ||||
| world, with 74+ people who have each contributed 100+ commits. With | ||||
| over 1000 contributors merging over 500 commits a month, Zulip is the | ||||
| largest and fastest growing open source team chat project. | ||||
|  | ||||
| [](https://github.com/zulip/zulip/actions/workflows/zulip-ci.yml?query=branch%3Amain) | ||||
| [](https://codecov.io/gh/zulip/zulip) | ||||
| [][mypy-coverage] | ||||
| [](https://github.com/psf/black) | ||||
| [](https://github.com/prettier/prettier) | ||||
| [](https://github.com/zulip/zulip/releases/latest) | ||||
| [](https://zulip.readthedocs.io/en/latest/) | ||||
| [](https://chat.zulip.org) | ||||
| [](https://twitter.com/zulip) | ||||
| [](https://github.com/sponsors/zulip) | ||||
|  | ||||
| [mypy-coverage]: https://blog.zulip.org/2016/10/13/static-types-in-python-oh-mypy/ | ||||
| [why-zulip]: https://zulip.com/why-zulip/ | ||||
| [rust-case-study]: https://zulip.com/case-studies/rust/ | ||||
| [features]: https://zulip.com/features/ | ||||
|  | ||||
| ## Getting started | ||||
|  | ||||
| Click on the appropriate link below. If nothing seems to apply, | ||||
| join us on the | ||||
| [Zulip community server](https://zulip.com/development-community/) | ||||
| and tell us what's up! | ||||
|  | ||||
| You might be interested in: | ||||
|  | ||||
| - **Contributing code**. Check out our | ||||
|   [guide for new contributors](https://zulip.readthedocs.io/en/latest/overview/contributing.html) | ||||
|   to get started. Zulip prides itself on maintaining a clean and | ||||
|   well-tested codebase, and a stock of hundreds of | ||||
|   [beginner-friendly issues][beginner-friendly]. | ||||
|  | ||||
| - **Contributing non-code**. | ||||
|   [Report an issue](https://zulip.readthedocs.io/en/latest/overview/contributing.html#reporting-issues), | ||||
|   [translate](https://zulip.readthedocs.io/en/latest/translating/translating.html) Zulip | ||||
|   into your language, | ||||
|   [write](https://zulip.readthedocs.io/en/latest/overview/contributing.html#zulip-outreach) | ||||
|   for the Zulip blog, or | ||||
|   [give us feedback](https://zulip.readthedocs.io/en/latest/overview/contributing.html#user-feedback). We | ||||
|   would love to hear from you, even if you're just trying the product out. | ||||
|  | ||||
| - **Supporting Zulip**. Advocate for your organization to use Zulip, become a [sponsor](https://github.com/sponsors/zulip), write a | ||||
|   review in the mobile app stores, or | ||||
|   [upvote Zulip](https://zulip.readthedocs.io/en/latest/overview/contributing.html#zulip-outreach) on | ||||
|   product comparison sites. | ||||
|  | ||||
| - **Checking Zulip out**. The best way to see Zulip in action is to drop by | ||||
|   the | ||||
|   [Zulip community server](https://zulip.com/development-community/). We | ||||
|   also recommend reading Zulip for | ||||
|   [open source](https://zulip.com/for/open-source/), Zulip for | ||||
|   [companies](https://zulip.com/for/companies/), or Zulip for | ||||
|   [communities](https://zulip.com/for/working-groups-and-communities/). | ||||
|  | ||||
| - **Running a Zulip server**. Use a preconfigured [DigitalOcean droplet](https://marketplace.digitalocean.com/apps/zulip), | ||||
|   [install Zulip](https://zulip.readthedocs.io/en/stable/production/install.html) | ||||
|   directly, or use Zulip's | ||||
|   experimental [Docker image](https://zulip.readthedocs.io/en/latest/production/deployment.html#zulip-in-docker). | ||||
|   Commercial support is available; see <https://zulip.com/plans> for details. | ||||
|  | ||||
| - **Using Zulip without setting up a server**. <https://zulip.com> | ||||
|   offers free and commercial hosting, including providing our paid | ||||
|   plan for free to fellow open source projects. | ||||
|  | ||||
| - **Participating in [outreach | ||||
|   programs](https://zulip.readthedocs.io/en/latest/overview/contributing.html#outreach-programs)** | ||||
|   like Google Summer of Code. | ||||
|  | ||||
| You may also be interested in reading our [blog](https://blog.zulip.org/) or | ||||
| following us on [Twitter](https://twitter.com/zulip). | ||||
| Zulip is distributed under the | ||||
| [Apache 2.0](https://github.com/zulip/zulip/blob/main/LICENSE) license. | ||||
|  | ||||
| [beginner-friendly]: https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22 | ||||
							
								
								
									
										37
									
								
								SECURITY.md
									
									
									
									
									
								
							
							
						
						
									
										37
									
								
								SECURITY.md
									
									
									
									
									
								
							| @@ -1,37 +0,0 @@ | ||||
| # Security policy | ||||
|  | ||||
| ## Reporting a vulnerability | ||||
|  | ||||
| We love responsible reports of (potential) security issues in Zulip, | ||||
| whether in the latest release or our development branch. | ||||
|  | ||||
| Our security contact is security@zulip.com. Reporters should expect a | ||||
| response within 24 hours. | ||||
|  | ||||
| Please include details on the issue and how you'd like to be credited | ||||
| in our release notes when we publish the fix. | ||||
|  | ||||
| Our [security model][security-model] document may be a helpful | ||||
| resource. | ||||
|  | ||||
| ## Security announcements | ||||
|  | ||||
| We send security announcements to our [announcement mailing | ||||
| list](https://groups.google.com/g/zulip-announce). If you are running | ||||
| Zulip in production, you should subscribe, by clicking "Join group" at | ||||
| the top of that page. | ||||
|  | ||||
| ## Supported versions | ||||
|  | ||||
| Zulip provides security support for the latest major release, in the | ||||
| form of minor security/maintenance releases. | ||||
|  | ||||
| We work hard to make [upgrades][upgrades] reliable, so that there's no | ||||
| reason to run older major releases. | ||||
|  | ||||
| See also our documentation on the [Zulip release | ||||
| lifecycle][release-lifecycle]. | ||||
|  | ||||
| [security-model]: https://zulip.readthedocs.io/en/latest/production/security-model.html | ||||
| [upgrades]: https://zulip.readthedocs.io/en/latest/production/upgrade-or-modify.html#upgrading-to-a-release | ||||
| [release-lifecycle]: https://zulip.readthedocs.io/en/latest/overview/release-lifecycle.html | ||||
							
								
								
									
										108
									
								
								Vagrantfile
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										108
									
								
								Vagrantfile
									
									
									
									
										vendored
									
									
								
							| @@ -1,108 +0,0 @@ | ||||
| # -*- mode: ruby -*- | ||||
|  | ||||
| Vagrant.require_version ">= 2.2.6" | ||||
|  | ||||
| Vagrant.configure("2") do |config| | ||||
|   # The Zulip development environment runs on 9991 on the guest. | ||||
|   host_port = 9991 | ||||
|   http_proxy = https_proxy = no_proxy = nil | ||||
|   host_ip_addr = "127.0.0.1" | ||||
|  | ||||
|   # System settings for the virtual machine. | ||||
|   vm_num_cpus = "2" | ||||
|   vm_memory = "2048" | ||||
|  | ||||
|   debian_mirror = "" | ||||
|   vboxadd_version = nil | ||||
|  | ||||
|   config.vm.synced_folder ".", "/vagrant", disabled: true | ||||
|   config.vm.synced_folder ".", "/srv/zulip" | ||||
|  | ||||
|   vagrant_config_file = ENV["HOME"] + "/.zulip-vagrant-config" | ||||
|   if File.file?(vagrant_config_file) | ||||
|     IO.foreach(vagrant_config_file) do |line| | ||||
|       line.chomp! | ||||
|       key, value = line.split(nil, 2) | ||||
|       case key | ||||
|       when /^([#;]|$)/ # ignore comments | ||||
|       when "HTTP_PROXY"; http_proxy = value | ||||
|       when "HTTPS_PROXY"; https_proxy = value | ||||
|       when "NO_PROXY"; no_proxy = value | ||||
|       when "HOST_PORT"; host_port = value.to_i | ||||
|       when "HOST_IP_ADDR"; host_ip_addr = value | ||||
|       when "GUEST_CPUS"; vm_num_cpus = value | ||||
|       when "GUEST_MEMORY_MB"; vm_memory = value | ||||
|       when "DEBIAN_MIRROR"; debian_mirror = value | ||||
|       when "VBOXADD_VERSION"; vboxadd_version = value | ||||
|       end | ||||
|     end | ||||
|   end | ||||
|  | ||||
|   if Vagrant.has_plugin?("vagrant-proxyconf") | ||||
|     if !http_proxy.nil? | ||||
|       config.proxy.http = http_proxy | ||||
|     end | ||||
|     if !https_proxy.nil? | ||||
|       config.proxy.https = https_proxy | ||||
|     end | ||||
|     if !no_proxy.nil? | ||||
|       config.proxy.no_proxy = no_proxy | ||||
|     end | ||||
|   elsif !http_proxy.nil? or !https_proxy.nil? | ||||
|     # This prints twice due to https://github.com/hashicorp/vagrant/issues/7504 | ||||
|     # We haven't figured out a workaround. | ||||
|     puts "You have specified value for proxy in ~/.zulip-vagrant-config file but did not " \ | ||||
|          "install the vagrant-proxyconf plugin. To install it, run `vagrant plugin install " \ | ||||
|          "vagrant-proxyconf` in a terminal.  This error will appear twice." | ||||
|     exit | ||||
|   end | ||||
|  | ||||
|   config.vm.network "forwarded_port", guest: 9991, host: host_port, host_ip: host_ip_addr | ||||
|   config.vm.network "forwarded_port", guest: 9994, host: host_port + 3, host_ip: host_ip_addr | ||||
|   # Specify Docker provider before VirtualBox provider so it's preferred. | ||||
|   config.vm.provider "docker" do |d, override| | ||||
|     d.build_dir = File.join(__dir__, "tools", "setup", "dev-vagrant-docker") | ||||
|     d.build_args = ["--build-arg", "VAGRANT_UID=#{Process.uid}"] | ||||
|     if !debian_mirror.empty? | ||||
|       d.build_args += ["--build-arg", "DEBIAN_MIRROR=#{debian_mirror}"] | ||||
|     end | ||||
|     d.has_ssh = true | ||||
|     d.create_args = ["--ulimit", "nofile=1024:65536"] | ||||
|   end | ||||
|  | ||||
|   config.vm.provider "virtualbox" do |vb, override| | ||||
|     override.vm.box = "bento/debian-10" | ||||
|     # It's possible we can get away with just 1.5GB; more testing needed | ||||
|     vb.memory = vm_memory | ||||
|     vb.cpus = vm_num_cpus | ||||
|  | ||||
|     if !vboxadd_version.nil? | ||||
|       override.vbguest.installer = Class.new(VagrantVbguest::Installers::Debian) do | ||||
|         define_method(:host_version) do |reload = false| | ||||
|           VagrantVbguest::Version(vboxadd_version) | ||||
|         end | ||||
|       end | ||||
|       override.vbguest.allow_downgrade = true | ||||
|       override.vbguest.iso_path = "https://download.virtualbox.org/virtualbox/#{vboxadd_version}/VBoxGuestAdditions_#{vboxadd_version}.iso" | ||||
|     end | ||||
|   end | ||||
|  | ||||
|   config.vm.provider "hyperv" do |h, override| | ||||
|     override.vm.box = "bento/debian-10" | ||||
|     h.memory = vm_memory | ||||
|     h.maxmemory = vm_memory | ||||
|     h.cpus = vm_num_cpus | ||||
|   end | ||||
|  | ||||
|   config.vm.provider "parallels" do |prl, override| | ||||
|     override.vm.box = "bento/debian-10" | ||||
|     prl.memory = vm_memory | ||||
|     prl.cpus = vm_num_cpus | ||||
|   end | ||||
|  | ||||
|   config.vm.provision "shell", | ||||
|     # We want provision to be run with the permissions of the vagrant user. | ||||
|     privileged: false, | ||||
|     path: "tools/setup/vagrant-provision", | ||||
|     env: { "DEBIAN_MIRROR" => debian_mirror } | ||||
| end | ||||
| @@ -1,820 +0,0 @@ | ||||
| import logging | ||||
| import time | ||||
| from collections import OrderedDict, defaultdict | ||||
| from datetime import datetime, timedelta | ||||
| from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union | ||||
|  | ||||
| from django.conf import settings | ||||
| from django.db import connection, models | ||||
| from django.db.models import F | ||||
| from psycopg2.sql import SQL, Composable, Identifier, Literal | ||||
|  | ||||
| from analytics.models import ( | ||||
|     BaseCount, | ||||
|     FillState, | ||||
|     InstallationCount, | ||||
|     RealmCount, | ||||
|     StreamCount, | ||||
|     UserCount, | ||||
|     installation_epoch, | ||||
| ) | ||||
| from zerver.lib.logging_util import log_to_file | ||||
| from zerver.lib.timestamp import ceiling_to_day, ceiling_to_hour, floor_to_hour, verify_UTC | ||||
| from zerver.models import Message, Realm, RealmAuditLog, Stream, UserActivityInterval, UserProfile | ||||
|  | ||||
| ## Logging setup ## | ||||
|  | ||||
| logger = logging.getLogger("zulip.management") | ||||
| log_to_file(logger, settings.ANALYTICS_LOG_PATH) | ||||
|  | ||||
| # You can't subtract timedelta.max from a datetime, so use this instead | ||||
| TIMEDELTA_MAX = timedelta(days=365 * 1000) | ||||
|  | ||||
| ## Class definitions ## | ||||
|  | ||||
|  | ||||
| class CountStat: | ||||
|     HOUR = "hour" | ||||
|     DAY = "day" | ||||
|     FREQUENCIES = frozenset([HOUR, DAY]) | ||||
|  | ||||
|     @property | ||||
|     def time_increment(self) -> timedelta: | ||||
|         if self.frequency == CountStat.HOUR: | ||||
|             return timedelta(hours=1) | ||||
|         return timedelta(days=1) | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         property: str, | ||||
|         data_collector: "DataCollector", | ||||
|         frequency: str, | ||||
|         interval: Optional[timedelta] = None, | ||||
|     ) -> None: | ||||
|         self.property = property | ||||
|         self.data_collector = data_collector | ||||
|         # might have to do something different for bitfields | ||||
|         if frequency not in self.FREQUENCIES: | ||||
|             raise AssertionError(f"Unknown frequency: {frequency}") | ||||
|         self.frequency = frequency | ||||
|         if interval is not None: | ||||
|             self.interval = interval | ||||
|         else: | ||||
|             self.interval = self.time_increment | ||||
|  | ||||
|     def __str__(self) -> str: | ||||
|         return f"<CountStat: {self.property}>" | ||||
|  | ||||
|     def last_successful_fill(self) -> Optional[datetime]: | ||||
|         fillstate = FillState.objects.filter(property=self.property).first() | ||||
|         if fillstate is None: | ||||
|             return None | ||||
|         if fillstate.state == FillState.DONE: | ||||
|             return fillstate.end_time | ||||
|         return fillstate.end_time - self.time_increment | ||||
|  | ||||
|  | ||||
| class LoggingCountStat(CountStat): | ||||
|     def __init__(self, property: str, output_table: Type[BaseCount], frequency: str) -> None: | ||||
|         CountStat.__init__(self, property, DataCollector(output_table, None), frequency) | ||||
|  | ||||
|  | ||||
| class DependentCountStat(CountStat): | ||||
|     def __init__( | ||||
|         self, | ||||
|         property: str, | ||||
|         data_collector: "DataCollector", | ||||
|         frequency: str, | ||||
|         interval: Optional[timedelta] = None, | ||||
|         dependencies: Sequence[str] = [], | ||||
|     ) -> None: | ||||
|         CountStat.__init__(self, property, data_collector, frequency, interval=interval) | ||||
|         self.dependencies = dependencies | ||||
|  | ||||
|  | ||||
| class DataCollector: | ||||
|     def __init__( | ||||
|         self, | ||||
|         output_table: Type[BaseCount], | ||||
|         pull_function: Optional[Callable[[str, datetime, datetime, Optional[Realm]], int]], | ||||
|     ) -> None: | ||||
|         self.output_table = output_table | ||||
|         self.pull_function = pull_function | ||||
|  | ||||
|  | ||||
| ## CountStat-level operations ## | ||||
|  | ||||
|  | ||||
| def process_count_stat( | ||||
|     stat: CountStat, fill_to_time: datetime, realm: Optional[Realm] = None | ||||
| ) -> None: | ||||
|     # TODO: The realm argument is not yet supported, in that we don't | ||||
|     # have a solution for how to update FillState if it is passed.  It | ||||
|     # exists solely as partial plumbing for when we do fully implement | ||||
|     # doing single-realm analytics runs for use cases like data import. | ||||
|     # | ||||
|     # Also, note that for the realm argument to be properly supported, | ||||
|     # the CountStat object passed in needs to have come from | ||||
|     # E.g. get_count_stats(realm), i.e. have the realm_id already | ||||
|     # entered into the SQL query defined by the CountState object. | ||||
|     verify_UTC(fill_to_time) | ||||
|     if floor_to_hour(fill_to_time) != fill_to_time: | ||||
|         raise ValueError(f"fill_to_time must be on an hour boundary: {fill_to_time}") | ||||
|  | ||||
|     fill_state = FillState.objects.filter(property=stat.property).first() | ||||
|     if fill_state is None: | ||||
|         currently_filled = installation_epoch() | ||||
|         fill_state = FillState.objects.create( | ||||
|             property=stat.property, end_time=currently_filled, state=FillState.DONE | ||||
|         ) | ||||
|         logger.info("INITIALIZED %s %s", stat.property, currently_filled) | ||||
|     elif fill_state.state == FillState.STARTED: | ||||
|         logger.info("UNDO START %s %s", stat.property, fill_state.end_time) | ||||
|         do_delete_counts_at_hour(stat, fill_state.end_time) | ||||
|         currently_filled = fill_state.end_time - stat.time_increment | ||||
|         do_update_fill_state(fill_state, currently_filled, FillState.DONE) | ||||
|         logger.info("UNDO DONE %s", stat.property) | ||||
|     elif fill_state.state == FillState.DONE: | ||||
|         currently_filled = fill_state.end_time | ||||
|     else: | ||||
|         raise AssertionError(f"Unknown value for FillState.state: {fill_state.state}.") | ||||
|  | ||||
|     if isinstance(stat, DependentCountStat): | ||||
|         for dependency in stat.dependencies: | ||||
|             dependency_fill_time = COUNT_STATS[dependency].last_successful_fill() | ||||
|             if dependency_fill_time is None: | ||||
|                 logger.warning( | ||||
|                     "DependentCountStat %s run before dependency %s.", stat.property, dependency | ||||
|                 ) | ||||
|                 return | ||||
|             fill_to_time = min(fill_to_time, dependency_fill_time) | ||||
|  | ||||
|     currently_filled = currently_filled + stat.time_increment | ||||
|     while currently_filled <= fill_to_time: | ||||
|         logger.info("START %s %s", stat.property, currently_filled) | ||||
|         start = time.time() | ||||
|         do_update_fill_state(fill_state, currently_filled, FillState.STARTED) | ||||
|         do_fill_count_stat_at_hour(stat, currently_filled, realm) | ||||
|         do_update_fill_state(fill_state, currently_filled, FillState.DONE) | ||||
|         end = time.time() | ||||
|         currently_filled = currently_filled + stat.time_increment | ||||
|         logger.info("DONE %s (%dms)", stat.property, (end - start) * 1000) | ||||
|  | ||||
|  | ||||
| def do_update_fill_state(fill_state: FillState, end_time: datetime, state: int) -> None: | ||||
|     fill_state.end_time = end_time | ||||
|     fill_state.state = state | ||||
|     fill_state.save() | ||||
|  | ||||
|  | ||||
| # We assume end_time is valid (e.g. is on a day or hour boundary as appropriate) | ||||
| # and is time-zone-aware. It is the caller's responsibility to enforce this! | ||||
| def do_fill_count_stat_at_hour( | ||||
|     stat: CountStat, end_time: datetime, realm: Optional[Realm] = None | ||||
| ) -> None: | ||||
|     start_time = end_time - stat.interval | ||||
|     if not isinstance(stat, LoggingCountStat): | ||||
|         timer = time.time() | ||||
|         assert stat.data_collector.pull_function is not None | ||||
|         rows_added = stat.data_collector.pull_function(stat.property, start_time, end_time, realm) | ||||
|         logger.info( | ||||
|             "%s run pull_function (%dms/%sr)", | ||||
|             stat.property, | ||||
|             (time.time() - timer) * 1000, | ||||
|             rows_added, | ||||
|         ) | ||||
|     do_aggregate_to_summary_table(stat, end_time, realm) | ||||
|  | ||||
|  | ||||
| def do_delete_counts_at_hour(stat: CountStat, end_time: datetime) -> None: | ||||
|     if isinstance(stat, LoggingCountStat): | ||||
|         InstallationCount.objects.filter(property=stat.property, end_time=end_time).delete() | ||||
|         if stat.data_collector.output_table in [UserCount, StreamCount]: | ||||
|             RealmCount.objects.filter(property=stat.property, end_time=end_time).delete() | ||||
|     else: | ||||
|         UserCount.objects.filter(property=stat.property, end_time=end_time).delete() | ||||
|         StreamCount.objects.filter(property=stat.property, end_time=end_time).delete() | ||||
|         RealmCount.objects.filter(property=stat.property, end_time=end_time).delete() | ||||
|         InstallationCount.objects.filter(property=stat.property, end_time=end_time).delete() | ||||
|  | ||||
|  | ||||
| def do_aggregate_to_summary_table( | ||||
|     stat: CountStat, end_time: datetime, realm: Optional[Realm] = None | ||||
| ) -> None: | ||||
|     cursor = connection.cursor() | ||||
|  | ||||
|     # Aggregate into RealmCount | ||||
|     output_table = stat.data_collector.output_table | ||||
|     if realm is not None: | ||||
|         realm_clause = SQL("AND zerver_realm.id = {}").format(Literal(realm.id)) | ||||
|     else: | ||||
|         realm_clause = SQL("") | ||||
|  | ||||
|     if output_table in (UserCount, StreamCount): | ||||
|         realmcount_query = SQL( | ||||
|             """ | ||||
|             INSERT INTO analytics_realmcount | ||||
|                 (realm_id, value, property, subgroup, end_time) | ||||
|             SELECT | ||||
|                 zerver_realm.id, COALESCE(sum({output_table}.value), 0), %(property)s, | ||||
|                 {output_table}.subgroup, %(end_time)s | ||||
|             FROM zerver_realm | ||||
|             JOIN {output_table} | ||||
|             ON | ||||
|                 zerver_realm.id = {output_table}.realm_id | ||||
|             WHERE | ||||
|                 {output_table}.property = %(property)s AND | ||||
|                 {output_table}.end_time = %(end_time)s | ||||
|                 {realm_clause} | ||||
|             GROUP BY zerver_realm.id, {output_table}.subgroup | ||||
|         """ | ||||
|         ).format( | ||||
|             output_table=Identifier(output_table._meta.db_table), | ||||
|             realm_clause=realm_clause, | ||||
|         ) | ||||
|         start = time.time() | ||||
|         cursor.execute( | ||||
|             realmcount_query, | ||||
|             { | ||||
|                 "property": stat.property, | ||||
|                 "end_time": end_time, | ||||
|             }, | ||||
|         ) | ||||
|         end = time.time() | ||||
|         logger.info( | ||||
|             "%s RealmCount aggregation (%dms/%sr)", | ||||
|             stat.property, | ||||
|             (end - start) * 1000, | ||||
|             cursor.rowcount, | ||||
|         ) | ||||
|  | ||||
|     if realm is None: | ||||
|         # Aggregate into InstallationCount.  Only run if we just | ||||
|         # processed counts for all realms. | ||||
|         # | ||||
|         # TODO: Add support for updating installation data after | ||||
|         # changing an individual realm's values. | ||||
|         installationcount_query = SQL( | ||||
|             """ | ||||
|             INSERT INTO analytics_installationcount | ||||
|                 (value, property, subgroup, end_time) | ||||
|             SELECT | ||||
|                 sum(value), %(property)s, analytics_realmcount.subgroup, %(end_time)s | ||||
|             FROM analytics_realmcount | ||||
|             WHERE | ||||
|                 property = %(property)s AND | ||||
|                 end_time = %(end_time)s | ||||
|             GROUP BY analytics_realmcount.subgroup | ||||
|         """ | ||||
|         ) | ||||
|         start = time.time() | ||||
|         cursor.execute( | ||||
|             installationcount_query, | ||||
|             { | ||||
|                 "property": stat.property, | ||||
|                 "end_time": end_time, | ||||
|             }, | ||||
|         ) | ||||
|         end = time.time() | ||||
|         logger.info( | ||||
|             "%s InstallationCount aggregation (%dms/%sr)", | ||||
|             stat.property, | ||||
|             (end - start) * 1000, | ||||
|             cursor.rowcount, | ||||
|         ) | ||||
|  | ||||
|     cursor.close() | ||||
|  | ||||
|  | ||||
| ## Utility functions called from outside counts.py ## | ||||
|  | ||||
| # called from zerver/lib/actions.py; should not throw any errors | ||||
| def do_increment_logging_stat( | ||||
|     zerver_object: Union[Realm, UserProfile, Stream], | ||||
|     stat: CountStat, | ||||
|     subgroup: Optional[Union[str, int, bool]], | ||||
|     event_time: datetime, | ||||
|     increment: int = 1, | ||||
| ) -> None: | ||||
|     if not increment: | ||||
|         return | ||||
|  | ||||
|     table = stat.data_collector.output_table | ||||
|     if table == RealmCount: | ||||
|         assert isinstance(zerver_object, Realm) | ||||
|         id_args: Dict[str, Union[Realm, UserProfile, Stream]] = {"realm": zerver_object} | ||||
|     elif table == UserCount: | ||||
|         assert isinstance(zerver_object, UserProfile) | ||||
|         id_args = {"realm": zerver_object.realm, "user": zerver_object} | ||||
|     else:  # StreamCount | ||||
|         assert isinstance(zerver_object, Stream) | ||||
|         id_args = {"realm": zerver_object.realm, "stream": zerver_object} | ||||
|  | ||||
|     if stat.frequency == CountStat.DAY: | ||||
|         end_time = ceiling_to_day(event_time) | ||||
|     else:  # CountStat.HOUR: | ||||
|         end_time = ceiling_to_hour(event_time) | ||||
|  | ||||
|     row, created = table.objects.get_or_create( | ||||
|         property=stat.property, | ||||
|         subgroup=subgroup, | ||||
|         end_time=end_time, | ||||
|         defaults={"value": increment}, | ||||
|         **id_args, | ||||
|     ) | ||||
|     if not created: | ||||
|         row.value = F("value") + increment | ||||
|         row.save(update_fields=["value"]) | ||||
|  | ||||
|  | ||||
| def do_drop_all_analytics_tables() -> None: | ||||
|     UserCount.objects.all().delete() | ||||
|     StreamCount.objects.all().delete() | ||||
|     RealmCount.objects.all().delete() | ||||
|     InstallationCount.objects.all().delete() | ||||
|     FillState.objects.all().delete() | ||||
|  | ||||
|  | ||||
| def do_drop_single_stat(property: str) -> None: | ||||
|     UserCount.objects.filter(property=property).delete() | ||||
|     StreamCount.objects.filter(property=property).delete() | ||||
|     RealmCount.objects.filter(property=property).delete() | ||||
|     InstallationCount.objects.filter(property=property).delete() | ||||
|     FillState.objects.filter(property=property).delete() | ||||
|  | ||||
|  | ||||
| ## DataCollector-level operations ## | ||||
|  | ||||
| QueryFn = Callable[[Dict[str, Composable]], Composable] | ||||
|  | ||||
|  | ||||
| def do_pull_by_sql_query( | ||||
|     property: str, | ||||
|     start_time: datetime, | ||||
|     end_time: datetime, | ||||
|     query: QueryFn, | ||||
|     group_by: Optional[Tuple[Type[models.Model], str]], | ||||
| ) -> int: | ||||
|     if group_by is None: | ||||
|         subgroup: Composable = SQL("NULL") | ||||
|         group_by_clause = SQL("") | ||||
|     else: | ||||
|         subgroup = Identifier(group_by[0]._meta.db_table, group_by[1]) | ||||
|         group_by_clause = SQL(", {}").format(subgroup) | ||||
|  | ||||
|     # We do string replacement here because cursor.execute will reject a | ||||
|     # group_by_clause given as a param. | ||||
|     # We pass in the datetimes as params to cursor.execute so that we don't have to | ||||
|     # think about how to convert python datetimes to SQL datetimes. | ||||
|     query_ = query( | ||||
|         { | ||||
|             "subgroup": subgroup, | ||||
|             "group_by_clause": group_by_clause, | ||||
|         } | ||||
|     ) | ||||
|     cursor = connection.cursor() | ||||
|     cursor.execute( | ||||
|         query_, | ||||
|         { | ||||
|             "property": property, | ||||
|             "time_start": start_time, | ||||
|             "time_end": end_time, | ||||
|         }, | ||||
|     ) | ||||
|     rowcount = cursor.rowcount | ||||
|     cursor.close() | ||||
|     return rowcount | ||||
|  | ||||
|  | ||||
| def sql_data_collector( | ||||
|     output_table: Type[BaseCount], | ||||
|     query: QueryFn, | ||||
|     group_by: Optional[Tuple[Type[models.Model], str]], | ||||
| ) -> DataCollector: | ||||
|     def pull_function( | ||||
|         property: str, start_time: datetime, end_time: datetime, realm: Optional[Realm] = None | ||||
|     ) -> int: | ||||
|         # The pull function type needs to accept a Realm argument | ||||
|         # because the 'minutes_active::day' CountStat uses | ||||
|         # DataCollector directly for do_pull_minutes_active, which | ||||
|         # requires the realm argument.  We ignore it here, because the | ||||
|         # realm should have been already encoded in the `query` we're | ||||
|         # passed. | ||||
|         return do_pull_by_sql_query(property, start_time, end_time, query, group_by) | ||||
|  | ||||
|     return DataCollector(output_table, pull_function) | ||||
|  | ||||
|  | ||||
| def do_pull_minutes_active( | ||||
|     property: str, start_time: datetime, end_time: datetime, realm: Optional[Realm] = None | ||||
| ) -> int: | ||||
|     user_activity_intervals = ( | ||||
|         UserActivityInterval.objects.filter( | ||||
|             end__gt=start_time, | ||||
|             start__lt=end_time, | ||||
|         ) | ||||
|         .select_related( | ||||
|             "user_profile", | ||||
|         ) | ||||
|         .values_list("user_profile_id", "user_profile__realm_id", "start", "end") | ||||
|     ) | ||||
|  | ||||
|     seconds_active: Dict[Tuple[int, int], float] = defaultdict(float) | ||||
|     for user_id, realm_id, interval_start, interval_end in user_activity_intervals: | ||||
|         if realm is None or realm.id == realm_id: | ||||
|             start = max(start_time, interval_start) | ||||
|             end = min(end_time, interval_end) | ||||
|             seconds_active[(user_id, realm_id)] += (end - start).total_seconds() | ||||
|  | ||||
|     rows = [ | ||||
|         UserCount( | ||||
|             user_id=ids[0], | ||||
|             realm_id=ids[1], | ||||
|             property=property, | ||||
|             end_time=end_time, | ||||
|             value=int(seconds // 60), | ||||
|         ) | ||||
|         for ids, seconds in seconds_active.items() | ||||
|         if seconds >= 60 | ||||
|     ] | ||||
|     UserCount.objects.bulk_create(rows) | ||||
|     return len(rows) | ||||
|  | ||||
|  | ||||
| def count_message_by_user_query(realm: Optional[Realm]) -> QueryFn: | ||||
|     if realm is None: | ||||
|         realm_clause = SQL("") | ||||
|     else: | ||||
|         realm_clause = SQL("zerver_userprofile.realm_id = {} AND").format(Literal(realm.id)) | ||||
|     return lambda kwargs: SQL( | ||||
|         """ | ||||
|     INSERT INTO analytics_usercount | ||||
|         (user_id, realm_id, value, property, subgroup, end_time) | ||||
|     SELECT | ||||
|         zerver_userprofile.id, zerver_userprofile.realm_id, count(*), | ||||
|         %(property)s, {subgroup}, %(time_end)s | ||||
|     FROM zerver_userprofile | ||||
|     JOIN zerver_message | ||||
|     ON | ||||
|         zerver_userprofile.id = zerver_message.sender_id | ||||
|     WHERE | ||||
|         zerver_userprofile.date_joined < %(time_end)s AND | ||||
|         zerver_message.date_sent >= %(time_start)s AND | ||||
|         {realm_clause} | ||||
|         zerver_message.date_sent < %(time_end)s | ||||
|     GROUP BY zerver_userprofile.id {group_by_clause} | ||||
| """ | ||||
|     ).format(**kwargs, realm_clause=realm_clause) | ||||
|  | ||||
|  | ||||
| # Note: ignores the group_by / group_by_clause. | ||||
| def count_message_type_by_user_query(realm: Optional[Realm]) -> QueryFn: | ||||
|     if realm is None: | ||||
|         realm_clause = SQL("") | ||||
|     else: | ||||
|         realm_clause = SQL("zerver_userprofile.realm_id = {} AND").format(Literal(realm.id)) | ||||
|     return lambda kwargs: SQL( | ||||
|         """ | ||||
|     INSERT INTO analytics_usercount | ||||
|             (realm_id, user_id, value, property, subgroup, end_time) | ||||
|     SELECT realm_id, id, SUM(count) AS value, %(property)s, message_type, %(time_end)s | ||||
|     FROM | ||||
|     ( | ||||
|         SELECT zerver_userprofile.realm_id, zerver_userprofile.id, count(*), | ||||
|         CASE WHEN | ||||
|                   zerver_recipient.type = 1 THEN 'private_message' | ||||
|              WHEN | ||||
|                   zerver_recipient.type = 3 THEN 'huddle_message' | ||||
|              WHEN | ||||
|                   zerver_stream.invite_only = TRUE THEN 'private_stream' | ||||
|              ELSE 'public_stream' | ||||
|         END | ||||
|         message_type | ||||
|  | ||||
|         FROM zerver_userprofile | ||||
|         JOIN zerver_message | ||||
|         ON | ||||
|             zerver_userprofile.id = zerver_message.sender_id AND | ||||
|             zerver_message.date_sent >= %(time_start)s AND | ||||
|             {realm_clause} | ||||
|             zerver_message.date_sent < %(time_end)s | ||||
|         JOIN zerver_recipient | ||||
|         ON | ||||
|             zerver_message.recipient_id = zerver_recipient.id | ||||
|         LEFT JOIN zerver_stream | ||||
|         ON | ||||
|             zerver_recipient.type_id = zerver_stream.id | ||||
|         GROUP BY | ||||
|             zerver_userprofile.realm_id, zerver_userprofile.id, | ||||
|             zerver_recipient.type, zerver_stream.invite_only | ||||
|     ) AS subquery | ||||
|     GROUP BY realm_id, id, message_type | ||||
| """ | ||||
|     ).format(**kwargs, realm_clause=realm_clause) | ||||
|  | ||||
|  | ||||
| # This query joins to the UserProfile table since all current queries that | ||||
| # use this also subgroup on UserProfile.is_bot. If in the future there is a | ||||
| # stat that counts messages by stream and doesn't need the UserProfile | ||||
| # table, consider writing a new query for efficiency. | ||||
| def count_message_by_stream_query(realm: Optional[Realm]) -> QueryFn: | ||||
|     if realm is None: | ||||
|         realm_clause = SQL("") | ||||
|     else: | ||||
|         realm_clause = SQL("zerver_stream.realm_id = {} AND").format(Literal(realm.id)) | ||||
|     return lambda kwargs: SQL( | ||||
|         """ | ||||
|     INSERT INTO analytics_streamcount | ||||
|         (stream_id, realm_id, value, property, subgroup, end_time) | ||||
|     SELECT | ||||
|         zerver_stream.id, zerver_stream.realm_id, count(*), %(property)s, {subgroup}, %(time_end)s | ||||
|     FROM zerver_stream | ||||
|     JOIN zerver_recipient | ||||
|     ON | ||||
|         zerver_stream.id = zerver_recipient.type_id | ||||
|     JOIN zerver_message | ||||
|     ON | ||||
|         zerver_recipient.id = zerver_message.recipient_id | ||||
|     JOIN zerver_userprofile | ||||
|     ON | ||||
|         zerver_message.sender_id = zerver_userprofile.id | ||||
|     WHERE | ||||
|         zerver_stream.date_created < %(time_end)s AND | ||||
|         zerver_recipient.type = 2 AND | ||||
|         zerver_message.date_sent >= %(time_start)s AND | ||||
|         {realm_clause} | ||||
|         zerver_message.date_sent < %(time_end)s | ||||
|     GROUP BY zerver_stream.id {group_by_clause} | ||||
| """ | ||||
|     ).format(**kwargs, realm_clause=realm_clause) | ||||
|  | ||||
|  | ||||
| # Hardcodes the query needed by active_users:is_bot:day, since that is | ||||
| # currently the only stat that uses this. | ||||
| def count_user_by_realm_query(realm: Optional[Realm]) -> QueryFn: | ||||
|     if realm is None: | ||||
|         realm_clause = SQL("") | ||||
|     else: | ||||
|         realm_clause = SQL("zerver_userprofile.realm_id = {} AND").format(Literal(realm.id)) | ||||
|     return lambda kwargs: SQL( | ||||
|         """ | ||||
|     INSERT INTO analytics_realmcount | ||||
|         (realm_id, value, property, subgroup, end_time) | ||||
|     SELECT | ||||
|         zerver_realm.id, count(*), %(property)s, {subgroup}, %(time_end)s | ||||
|     FROM zerver_realm | ||||
|     JOIN zerver_userprofile | ||||
|     ON | ||||
|         zerver_realm.id = zerver_userprofile.realm_id | ||||
|     WHERE | ||||
|         zerver_realm.date_created < %(time_end)s AND | ||||
|         zerver_userprofile.date_joined >= %(time_start)s AND | ||||
|         zerver_userprofile.date_joined < %(time_end)s AND | ||||
|         {realm_clause} | ||||
|         zerver_userprofile.is_active = TRUE | ||||
|     GROUP BY zerver_realm.id {group_by_clause} | ||||
| """ | ||||
|     ).format(**kwargs, realm_clause=realm_clause) | ||||
|  | ||||
|  | ||||
| # Currently hardcodes the query needed for active_users_audit:is_bot:day. | ||||
| # Assumes that a user cannot have two RealmAuditLog entries with the same event_time and | ||||
| # event_type in [RealmAuditLog.USER_CREATED, USER_DEACTIVATED, etc]. | ||||
| # In particular, it's important to ensure that migrations don't cause that to happen. | ||||
| def check_realmauditlog_by_user_query(realm: Optional[Realm]) -> QueryFn: | ||||
|     if realm is None: | ||||
|         realm_clause = SQL("") | ||||
|     else: | ||||
|         realm_clause = SQL("realm_id = {} AND").format(Literal(realm.id)) | ||||
|     return lambda kwargs: SQL( | ||||
|         """ | ||||
|     INSERT INTO analytics_usercount | ||||
|         (user_id, realm_id, value, property, subgroup, end_time) | ||||
|     SELECT | ||||
|         ral1.modified_user_id, ral1.realm_id, 1, %(property)s, {subgroup}, %(time_end)s | ||||
|     FROM zerver_realmauditlog ral1 | ||||
|     JOIN ( | ||||
|         SELECT modified_user_id, max(event_time) AS max_event_time | ||||
|         FROM zerver_realmauditlog | ||||
|         WHERE | ||||
|             event_type in ({user_created}, {user_activated}, {user_deactivated}, {user_reactivated}) AND | ||||
|             {realm_clause} | ||||
|             event_time < %(time_end)s | ||||
|         GROUP BY modified_user_id | ||||
|     ) ral2 | ||||
|     ON | ||||
|         ral1.event_time = max_event_time AND | ||||
|         ral1.modified_user_id = ral2.modified_user_id | ||||
|     JOIN zerver_userprofile | ||||
|     ON | ||||
|         ral1.modified_user_id = zerver_userprofile.id | ||||
|     WHERE | ||||
|         ral1.event_type in ({user_created}, {user_activated}, {user_reactivated}) | ||||
|     """ | ||||
|     ).format( | ||||
|         **kwargs, | ||||
|         user_created=Literal(RealmAuditLog.USER_CREATED), | ||||
|         user_activated=Literal(RealmAuditLog.USER_ACTIVATED), | ||||
|         user_deactivated=Literal(RealmAuditLog.USER_DEACTIVATED), | ||||
|         user_reactivated=Literal(RealmAuditLog.USER_REACTIVATED), | ||||
|         realm_clause=realm_clause, | ||||
|     ) | ||||
|  | ||||
|  | ||||
| def check_useractivityinterval_by_user_query(realm: Optional[Realm]) -> QueryFn: | ||||
|     if realm is None: | ||||
|         realm_clause = SQL("") | ||||
|     else: | ||||
|         realm_clause = SQL("zerver_userprofile.realm_id = {} AND").format(Literal(realm.id)) | ||||
|     return lambda kwargs: SQL( | ||||
|         """ | ||||
|     INSERT INTO analytics_usercount | ||||
|         (user_id, realm_id, value, property, subgroup, end_time) | ||||
|     SELECT | ||||
|         zerver_userprofile.id, zerver_userprofile.realm_id, 1, %(property)s, {subgroup}, %(time_end)s | ||||
|     FROM zerver_userprofile | ||||
|     JOIN zerver_useractivityinterval | ||||
|     ON | ||||
|         zerver_userprofile.id = zerver_useractivityinterval.user_profile_id | ||||
|     WHERE | ||||
|         zerver_useractivityinterval.end >= %(time_start)s AND | ||||
|         {realm_clause} | ||||
|         zerver_useractivityinterval.start < %(time_end)s | ||||
|     GROUP BY zerver_userprofile.id {group_by_clause} | ||||
| """ | ||||
|     ).format(**kwargs, realm_clause=realm_clause) | ||||
|  | ||||
|  | ||||
| def count_realm_active_humans_query(realm: Optional[Realm]) -> QueryFn: | ||||
|     if realm is None: | ||||
|         realm_clause = SQL("") | ||||
|     else: | ||||
|         realm_clause = SQL("realm_id = {} AND").format(Literal(realm.id)) | ||||
|     return lambda kwargs: SQL( | ||||
|         """ | ||||
|     INSERT INTO analytics_realmcount | ||||
|         (realm_id, value, property, subgroup, end_time) | ||||
|     SELECT | ||||
|         usercount1.realm_id, count(*), %(property)s, NULL, %(time_end)s | ||||
|     FROM ( | ||||
|         SELECT realm_id, user_id | ||||
|         FROM analytics_usercount | ||||
|         WHERE | ||||
|             property = 'active_users_audit:is_bot:day' AND | ||||
|             subgroup = 'false' AND | ||||
|             {realm_clause} | ||||
|             end_time = %(time_end)s | ||||
|     ) usercount1 | ||||
|     JOIN ( | ||||
|         SELECT realm_id, user_id | ||||
|         FROM analytics_usercount | ||||
|         WHERE | ||||
|             property = '15day_actives::day' AND | ||||
|             {realm_clause} | ||||
|             end_time = %(time_end)s | ||||
|     ) usercount2 | ||||
|     ON | ||||
|         usercount1.user_id = usercount2.user_id | ||||
|     GROUP BY usercount1.realm_id | ||||
| """ | ||||
|     ).format(**kwargs, realm_clause=realm_clause) | ||||
|  | ||||
|  | ||||
| # Currently unused and untested | ||||
| count_stream_by_realm_query = lambda kwargs: SQL( | ||||
|     """ | ||||
|     INSERT INTO analytics_realmcount | ||||
|         (realm_id, value, property, subgroup, end_time) | ||||
|     SELECT | ||||
|         zerver_realm.id, count(*), %(property)s, {subgroup}, %(time_end)s | ||||
|     FROM zerver_realm | ||||
|     JOIN zerver_stream | ||||
|     ON | ||||
|         zerver_realm.id = zerver_stream.realm_id AND | ||||
|     WHERE | ||||
|         zerver_realm.date_created < %(time_end)s AND | ||||
|         zerver_stream.date_created >= %(time_start)s AND | ||||
|         zerver_stream.date_created < %(time_end)s | ||||
|     GROUP BY zerver_realm.id {group_by_clause} | ||||
| """ | ||||
| ).format(**kwargs) | ||||
|  | ||||
|  | ||||
| def get_count_stats(realm: Optional[Realm] = None) -> Dict[str, CountStat]: | ||||
|     ## CountStat declarations ## | ||||
|  | ||||
|     count_stats_ = [ | ||||
|         # Messages sent stats | ||||
|         # Stats that count the number of messages sent in various ways. | ||||
|         # These are also the set of stats that read from the Message table. | ||||
|         CountStat( | ||||
|             "messages_sent:is_bot:hour", | ||||
|             sql_data_collector( | ||||
|                 UserCount, count_message_by_user_query(realm), (UserProfile, "is_bot") | ||||
|             ), | ||||
|             CountStat.HOUR, | ||||
|         ), | ||||
|         CountStat( | ||||
|             "messages_sent:message_type:day", | ||||
|             sql_data_collector(UserCount, count_message_type_by_user_query(realm), None), | ||||
|             CountStat.DAY, | ||||
|         ), | ||||
|         CountStat( | ||||
|             "messages_sent:client:day", | ||||
|             sql_data_collector( | ||||
|                 UserCount, count_message_by_user_query(realm), (Message, "sending_client_id") | ||||
|             ), | ||||
|             CountStat.DAY, | ||||
|         ), | ||||
|         CountStat( | ||||
|             "messages_in_stream:is_bot:day", | ||||
|             sql_data_collector( | ||||
|                 StreamCount, count_message_by_stream_query(realm), (UserProfile, "is_bot") | ||||
|             ), | ||||
|             CountStat.DAY, | ||||
|         ), | ||||
|         # Number of users stats | ||||
|         # Stats that count the number of active users in the UserProfile.is_active sense. | ||||
|         # 'active_users_audit:is_bot:day' is the canonical record of which users were | ||||
|         # active on which days (in the UserProfile.is_active sense). | ||||
|         # Important that this stay a daily stat, so that 'realm_active_humans::day' works as expected. | ||||
|         CountStat( | ||||
|             "active_users_audit:is_bot:day", | ||||
|             sql_data_collector( | ||||
|                 UserCount, check_realmauditlog_by_user_query(realm), (UserProfile, "is_bot") | ||||
|             ), | ||||
|             CountStat.DAY, | ||||
|         ), | ||||
|         # Important note: LoggingCountStat objects aren't passed the | ||||
|         # Realm argument, because by nature they have a logging | ||||
|         # structure, not a pull-from-database structure, so there's no | ||||
|         # way to compute them for a single realm after the fact (the | ||||
|         # use case for passing a Realm argument). | ||||
|         # Sanity check on 'active_users_audit:is_bot:day', and a archetype for future LoggingCountStats. | ||||
|         # In RealmCount, 'active_users_audit:is_bot:day' should be the partial | ||||
|         # sum sequence of 'active_users_log:is_bot:day', for any realm that | ||||
|         # started after the latter stat was introduced. | ||||
|         LoggingCountStat("active_users_log:is_bot:day", RealmCount, CountStat.DAY), | ||||
|         # Another sanity check on 'active_users_audit:is_bot:day'. Is only an | ||||
|         # approximation, e.g. if a user is deactivated between the end of the | ||||
|         # day and when this stat is run, they won't be counted. However, is the | ||||
|         # simplest of the three to inspect by hand. | ||||
|         CountStat( | ||||
|             "active_users:is_bot:day", | ||||
|             sql_data_collector( | ||||
|                 RealmCount, count_user_by_realm_query(realm), (UserProfile, "is_bot") | ||||
|             ), | ||||
|             CountStat.DAY, | ||||
|             interval=TIMEDELTA_MAX, | ||||
|         ), | ||||
|         # Messages read stats.  messages_read::hour is the total | ||||
|         # number of messages read, whereas | ||||
|         # messages_read_interactions::hour tries to count the total | ||||
|         # number of UI interactions resulting in messages being marked | ||||
|         # as read (imperfect because of batching of some request | ||||
|         # types, but less likely to be overwhelmed by a single bulk | ||||
|         # operation). | ||||
|         LoggingCountStat("messages_read::hour", UserCount, CountStat.HOUR), | ||||
|         LoggingCountStat("messages_read_interactions::hour", UserCount, CountStat.HOUR), | ||||
|         # User activity stats | ||||
|         # Stats that measure user activity in the UserActivityInterval sense. | ||||
|         CountStat( | ||||
|             "1day_actives::day", | ||||
|             sql_data_collector(UserCount, check_useractivityinterval_by_user_query(realm), None), | ||||
|             CountStat.DAY, | ||||
|             interval=timedelta(days=1) - UserActivityInterval.MIN_INTERVAL_LENGTH, | ||||
|         ), | ||||
|         CountStat( | ||||
|             "7day_actives::day", | ||||
|             sql_data_collector(UserCount, check_useractivityinterval_by_user_query(realm), None), | ||||
|             CountStat.DAY, | ||||
|             interval=timedelta(days=7) - UserActivityInterval.MIN_INTERVAL_LENGTH, | ||||
|         ), | ||||
|         CountStat( | ||||
|             "15day_actives::day", | ||||
|             sql_data_collector(UserCount, check_useractivityinterval_by_user_query(realm), None), | ||||
|             CountStat.DAY, | ||||
|             interval=timedelta(days=15) - UserActivityInterval.MIN_INTERVAL_LENGTH, | ||||
|         ), | ||||
|         CountStat( | ||||
|             "minutes_active::day", DataCollector(UserCount, do_pull_minutes_active), CountStat.DAY | ||||
|         ), | ||||
|         # Rate limiting stats | ||||
|         # Used to limit the number of invitation emails sent by a realm | ||||
|         LoggingCountStat("invites_sent::day", RealmCount, CountStat.DAY), | ||||
|         # Dependent stats | ||||
|         # Must come after their dependencies. | ||||
|         # Canonical account of the number of active humans in a realm on each day. | ||||
|         DependentCountStat( | ||||
|             "realm_active_humans::day", | ||||
|             sql_data_collector(RealmCount, count_realm_active_humans_query(realm), None), | ||||
|             CountStat.DAY, | ||||
|             dependencies=["active_users_audit:is_bot:day", "15day_actives::day"], | ||||
|         ), | ||||
|     ] | ||||
|  | ||||
|     return OrderedDict((stat.property, stat) for stat in count_stats_) | ||||
|  | ||||
|  | ||||
| # To avoid refactoring for now COUNT_STATS can be used as before | ||||
| COUNT_STATS = get_count_stats() | ||||
| @@ -1,79 +0,0 @@ | ||||
| from math import sqrt | ||||
| from random import gauss, random, seed | ||||
| from typing import List | ||||
|  | ||||
| from analytics.lib.counts import CountStat | ||||
|  | ||||
|  | ||||
| def generate_time_series_data( | ||||
|     days: int = 100, | ||||
|     business_hours_base: float = 10, | ||||
|     non_business_hours_base: float = 10, | ||||
|     growth: float = 1, | ||||
|     autocorrelation: float = 0, | ||||
|     spikiness: float = 1, | ||||
|     holiday_rate: float = 0, | ||||
|     frequency: str = CountStat.DAY, | ||||
|     partial_sum: bool = False, | ||||
|     random_seed: int = 26, | ||||
| ) -> List[int]: | ||||
|     """ | ||||
|     Generate semi-realistic looking time series data for testing analytics graphs. | ||||
|  | ||||
|     days -- Number of days of data. Is the number of data points generated if | ||||
|         frequency is CountStat.DAY. | ||||
|     business_hours_base -- Average value during a business hour (or day) at beginning of | ||||
|         time series, if frequency is CountStat.HOUR (CountStat.DAY, respectively). | ||||
|     non_business_hours_base -- The above, for non-business hours/days. | ||||
|     growth -- Ratio between average values at end of time series and beginning of time series. | ||||
|     autocorrelation -- Makes neighboring data points look more like each other. At 0 each | ||||
|         point is unaffected by the previous point, and at 1 each point is a deterministic | ||||
|         function of the previous point. | ||||
|     spikiness -- 0 means no randomness (other than holiday_rate), higher values increase | ||||
|         the variance. | ||||
|     holiday_rate -- Fraction of days randomly set to 0, largely for testing how we handle 0s. | ||||
|     frequency -- Should be CountStat.HOUR or CountStat.DAY. | ||||
|     partial_sum -- If True, return partial sum of the series. | ||||
|     random_seed -- Seed for random number generator. | ||||
|     """ | ||||
|     if frequency == CountStat.HOUR: | ||||
|         length = days * 24 | ||||
|         seasonality = [non_business_hours_base] * 24 * 7 | ||||
|         for day in range(5): | ||||
|             for hour in range(8): | ||||
|                 seasonality[24 * day + hour] = business_hours_base | ||||
|         holidays = [] | ||||
|         for i in range(days): | ||||
|             holidays.extend([random() < holiday_rate] * 24) | ||||
|     elif frequency == CountStat.DAY: | ||||
|         length = days | ||||
|         seasonality = [8 * business_hours_base + 16 * non_business_hours_base] * 5 + [ | ||||
|             24 * non_business_hours_base | ||||
|         ] * 2 | ||||
|         holidays = [random() < holiday_rate for i in range(days)] | ||||
|     else: | ||||
|         raise AssertionError(f"Unknown frequency: {frequency}") | ||||
|     if length < 2: | ||||
|         raise AssertionError( | ||||
|             f"Must be generating at least 2 data points. Currently generating {length}" | ||||
|         ) | ||||
|     growth_base = growth ** (1.0 / (length - 1)) | ||||
|     values_no_noise = [ | ||||
|         seasonality[i % len(seasonality)] * (growth_base**i) for i in range(length) | ||||
|     ] | ||||
|  | ||||
|     seed(random_seed) | ||||
|     noise_scalars = [gauss(0, 1)] | ||||
|     for i in range(1, length): | ||||
|         noise_scalars.append( | ||||
|             noise_scalars[-1] * autocorrelation + gauss(0, 1) * (1 - autocorrelation) | ||||
|         ) | ||||
|  | ||||
|     values = [ | ||||
|         0 if holiday else int(v + sqrt(v) * noise_scalar * spikiness) | ||||
|         for v, noise_scalar, holiday in zip(values_no_noise, noise_scalars, holidays) | ||||
|     ] | ||||
|     if partial_sum: | ||||
|         for i in range(1, length): | ||||
|             values[i] = values[i - 1] + values[i] | ||||
|     return [max(v, 0) for v in values] | ||||
| @@ -1,33 +0,0 @@ | ||||
| from datetime import datetime, timedelta | ||||
| from typing import List, Optional | ||||
|  | ||||
| from analytics.lib.counts import CountStat | ||||
| from zerver.lib.timestamp import floor_to_day, floor_to_hour, verify_UTC | ||||
|  | ||||
|  | ||||
| # If min_length is None, returns end_times from ceiling(start) to floor(end), inclusive. | ||||
| # If min_length is greater than 0, pads the list to the left. | ||||
| # So informally, time_range(Sep 20, Sep 22, day, None) returns [Sep 20, Sep 21, Sep 22], | ||||
| # and time_range(Sep 20, Sep 22, day, 5) returns [Sep 18, Sep 19, Sep 20, Sep 21, Sep 22] | ||||
| def time_range( | ||||
|     start: datetime, end: datetime, frequency: str, min_length: Optional[int] | ||||
| ) -> List[datetime]: | ||||
|     verify_UTC(start) | ||||
|     verify_UTC(end) | ||||
|     if frequency == CountStat.HOUR: | ||||
|         end = floor_to_hour(end) | ||||
|         step = timedelta(hours=1) | ||||
|     elif frequency == CountStat.DAY: | ||||
|         end = floor_to_day(end) | ||||
|         step = timedelta(days=1) | ||||
|     else: | ||||
|         raise AssertionError(f"Unknown frequency: {frequency}") | ||||
|  | ||||
|     times = [] | ||||
|     if min_length is not None: | ||||
|         start = min(start, end - (min_length - 1) * step) | ||||
|     current = end | ||||
|     while current >= start: | ||||
|         times.append(current) | ||||
|         current -= step | ||||
|     return list(reversed(times)) | ||||
							
								
								
									
										57
									
								
								analytics/management/commands/active_user_stats.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										57
									
								
								analytics/management/commands/active_user_stats.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,57 @@ | ||||
| from __future__ import absolute_import | ||||
|  | ||||
| from django.core.management.base import BaseCommand | ||||
|  | ||||
| from zerver.models import UserPresence, UserActivity | ||||
| from zerver.lib.utils import statsd, statsd_key | ||||
|  | ||||
| from datetime import datetime, timedelta | ||||
| from collections import defaultdict | ||||
|  | ||||
| class Command(BaseCommand): | ||||
|     help = """Sends active user statistics to statsd. | ||||
|  | ||||
|     Run as a cron job that runs every 10 minutes.""" | ||||
|  | ||||
|     def handle(self, *args, **options): | ||||
|         # Get list of all active users in the last 1 week | ||||
|         cutoff = datetime.now() - timedelta(minutes=30, hours=168) | ||||
|  | ||||
|         users = UserPresence.objects.select_related().filter(timestamp__gt=cutoff) | ||||
|  | ||||
|         # Calculate 10min, 2hrs, 12hrs, 1day, 2 business days (TODO business days), 1 week bucket of stats | ||||
|         hour_buckets = [0.16, 2, 12, 24, 48, 168] | ||||
|         user_info = defaultdict(dict) | ||||
|  | ||||
|         for last_presence in users: | ||||
|             if last_presence.status == UserPresence.IDLE: | ||||
|                 known_active = last_presence.timestamp - timedelta(minutes=30) | ||||
|             else: | ||||
|                 known_active = last_presence.timestamp | ||||
|  | ||||
|             for bucket in hour_buckets: | ||||
|                 if not bucket in user_info[last_presence.user_profile.realm.domain]: | ||||
|                     user_info[last_presence.user_profile.realm.domain][bucket] = [] | ||||
|                 if datetime.now(known_active.tzinfo) - known_active < timedelta(hours=bucket): | ||||
|                     user_info[last_presence.user_profile.realm.domain][bucket].append(last_presence.user_profile.email) | ||||
|  | ||||
|         for realm, buckets in user_info.items(): | ||||
|             print("Realm %s" % realm) | ||||
|             for hr, users in sorted(buckets.items()): | ||||
|                 print("\tUsers for %s: %s" % (hr, len(users))) | ||||
|                 statsd.gauge("users.active.%s.%shr" %  (statsd_key(realm, True), statsd_key(hr, True)), len(users)) | ||||
|  | ||||
|         # Also do stats for how many users have been reading the app. | ||||
|         users_reading = UserActivity.objects.select_related().filter(query="/json/update_message_flags") | ||||
|         user_info = defaultdict(dict) | ||||
|         for activity in users_reading: | ||||
|             for bucket in hour_buckets: | ||||
|                 if not bucket in user_info[activity.user_profile.realm.domain]: | ||||
|                     user_info[activity.user_profile.realm.domain][bucket] = [] | ||||
|                 if datetime.now(activity.last_visit.tzinfo) - activity.last_visit < timedelta(hours=bucket): | ||||
|                     user_info[activity.user_profile.realm.domain][bucket].append(activity.user_profile.email) | ||||
|         for realm, buckets in user_info.items(): | ||||
|             print("Realm %s" % realm) | ||||
|             for hr, users in sorted(buckets.items()): | ||||
|                 print("\tUsers reading for %s: %s" % (hr, len(users))) | ||||
|                 statsd.gauge("users.reading.%s.%shr" %  (statsd_key(realm, True), statsd_key(hr, True)), len(users)) | ||||
							
								
								
									
										24
									
								
								analytics/management/commands/active_user_stats_by_day.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										24
									
								
								analytics/management/commands/active_user_stats_by_day.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,24 @@ | ||||
| from __future__ import absolute_import | ||||
|  | ||||
| import datetime | ||||
| import pytz | ||||
|  | ||||
| from optparse import make_option | ||||
| from django.core.management.base import BaseCommand | ||||
| from zerver.lib.statistics import activity_averages_during_day | ||||
|  | ||||
| class Command(BaseCommand): | ||||
|     help = "Generate statistics on user activity for a given day." | ||||
|  | ||||
|     option_list = BaseCommand.option_list + \ | ||||
|         (make_option('--date', default=None, action='store', | ||||
|                      help="Day to query in format 2013-12-05.  Default is yesterday"),) | ||||
|  | ||||
|     def handle(self, *args, **options): | ||||
|         if options["date"] is None: | ||||
|             date = datetime.datetime.now() - datetime.timedelta(days=1) | ||||
|         else: | ||||
|             date = datetime.datetime.strptime(options["date"], "%Y-%m-%d") | ||||
|         print "Activity data for", date | ||||
|         print activity_averages_during_day(date) | ||||
|         print "Please note that the total registered user count is a total for today" | ||||
							
								
								
									
										81
									
								
								analytics/management/commands/analyze_mit.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										81
									
								
								analytics/management/commands/analyze_mit.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,81 @@ | ||||
| from __future__ import absolute_import | ||||
|  | ||||
| from optparse import make_option | ||||
| from django.core.management.base import BaseCommand | ||||
| from zerver.models import Recipient, Message | ||||
| from zerver.lib.timestamp import timestamp_to_datetime | ||||
| import datetime | ||||
| import time | ||||
| import logging | ||||
|  | ||||
| def compute_stats(log_level): | ||||
|     logger = logging.getLogger() | ||||
|     logger.setLevel(log_level) | ||||
|  | ||||
|     one_week_ago = timestamp_to_datetime(time.time()) - datetime.timedelta(weeks=1) | ||||
|     mit_query = Message.objects.filter(sender__realm__domain="mit.edu", | ||||
|                                        recipient__type=Recipient.STREAM, | ||||
|                                        pub_date__gt=one_week_ago) | ||||
|     for bot_sender_start in ["imap.", "rcmd.", "sys."]: | ||||
|         mit_query = mit_query.exclude(sender__email__startswith=(bot_sender_start)) | ||||
|     # Filtering for "/" covers tabbott/extra@ and all the daemon/foo bots. | ||||
|     mit_query = mit_query.exclude(sender__email__contains=("/")) | ||||
|     mit_query = mit_query.exclude(sender__email__contains=("aim.com")) | ||||
|     mit_query = mit_query.exclude( | ||||
|         sender__email__in=["rss@mit.edu", "bash@mit.edu", "apache@mit.edu", | ||||
|                            "bitcoin@mit.edu", "lp@mit.edu", "clocks@mit.edu", | ||||
|                            "root@mit.edu", "nagios@mit.edu", | ||||
|                            "www-data|local-realm@mit.edu"]) | ||||
|     user_counts = {} | ||||
|     for m in mit_query.select_related("sending_client", "sender"): | ||||
|         email = m.sender.email | ||||
|         user_counts.setdefault(email, {}) | ||||
|         user_counts[email].setdefault(m.sending_client.name, 0) | ||||
|         user_counts[email][m.sending_client.name] += 1 | ||||
|  | ||||
|     total_counts = {} | ||||
|     total_user_counts = {} | ||||
|     for email, counts in user_counts.items(): | ||||
|         total_user_counts.setdefault(email, 0) | ||||
|         for client_name, count in counts.items(): | ||||
|             total_counts.setdefault(client_name, 0) | ||||
|             total_counts[client_name] += count | ||||
|             total_user_counts[email] += count | ||||
|  | ||||
|     logging.debug("%40s | %10s | %s" % ("User", "Messages", "Percentage Zulip")) | ||||
|     top_percents = {} | ||||
|     for size in [10, 25, 50, 100, 200, len(total_user_counts.keys())]: | ||||
|         top_percents[size] = 0 | ||||
|     for i, email in enumerate(sorted(total_user_counts.keys(), | ||||
|                                      key=lambda x: -total_user_counts[x])): | ||||
|         percent_zulip = round(100 - (user_counts[email].get("zephyr_mirror", 0)) * 100. / | ||||
|                                total_user_counts[email], 1) | ||||
|         for size in top_percents.keys(): | ||||
|             top_percents.setdefault(size, 0) | ||||
|             if i < size: | ||||
|                 top_percents[size] += (percent_zulip * 1.0 / size) | ||||
|  | ||||
|         logging.debug("%40s | %10s | %s%%" % (email, total_user_counts[email], | ||||
|                                               percent_zulip)) | ||||
|  | ||||
|     logging.info("") | ||||
|     for size in sorted(top_percents.keys()): | ||||
|         logging.info("Top %6s | %s%%" % (size, round(top_percents[size], 1))) | ||||
|  | ||||
|     grand_total = sum(total_counts.values()) | ||||
|     print grand_total | ||||
|     logging.info("%15s | %s" % ("Client", "Percentage")) | ||||
|     for client in total_counts.keys(): | ||||
|         logging.info("%15s | %s%%" % (client, round(100. * total_counts[client] / grand_total, 1))) | ||||
|  | ||||
| class Command(BaseCommand): | ||||
|     option_list = BaseCommand.option_list + \ | ||||
|         (make_option('--verbose', default=False, action='store_true'),) | ||||
|  | ||||
|     help = "Compute statistics on MIT Zephyr usage." | ||||
|  | ||||
|     def handle(self, *args, **options): | ||||
|         level = logging.INFO | ||||
|         if options["verbose"]: | ||||
|             level = logging.DEBUG | ||||
|         compute_stats(level) | ||||
							
								
								
									
										57
									
								
								analytics/management/commands/analyze_user_activity.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										57
									
								
								analytics/management/commands/analyze_user_activity.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,57 @@ | ||||
| from __future__ import absolute_import | ||||
|  | ||||
| from zerver.lib.statistics import seconds_usage_between | ||||
|  | ||||
| from optparse import make_option | ||||
| from django.core.management.base import BaseCommand | ||||
| from zerver.models import UserProfile | ||||
| import datetime | ||||
| from django.utils.timezone import utc | ||||
|  | ||||
| def analyze_activity(options): | ||||
|     day_start = datetime.datetime.strptime(options["date"], "%Y-%m-%d").replace(tzinfo=utc) | ||||
|     day_end = day_start + datetime.timedelta(days=options["duration"]) | ||||
|  | ||||
|     user_profile_query = UserProfile.objects.all() | ||||
|     if options["realm"]: | ||||
|         user_profile_query = user_profile_query.filter(realm__domain=options["realm"]) | ||||
|  | ||||
|     print "Per-user online duration:\n" | ||||
|     total_duration = datetime.timedelta(0) | ||||
|     for user_profile in user_profile_query: | ||||
|         duration = seconds_usage_between(user_profile, day_start, day_end) | ||||
|  | ||||
|         if duration == datetime.timedelta(0): | ||||
|             continue | ||||
|  | ||||
|         total_duration += duration | ||||
|         print "%-*s%s" % (37, user_profile.email, duration, ) | ||||
|  | ||||
|     print "\nTotal Duration:                      %s" % (total_duration,) | ||||
|     print "\nTotal Duration in minutes:           %s" % (total_duration.total_seconds() / 60.,) | ||||
|     print "Total Duration amortized to a month: %s" % (total_duration.total_seconds() * 30. / 60.,) | ||||
|  | ||||
| class Command(BaseCommand): | ||||
|     help = """Report analytics of user activity on a per-user and realm basis. | ||||
|  | ||||
| This command aggregates user activity data that is collected by each user using Zulip. It attempts | ||||
| to approximate how much each user has been using Zulip per day, measured by recording each 15 minute | ||||
| period where some activity has occurred (mouse move or keyboard activity). | ||||
|  | ||||
| It will correctly not count server-initiated reloads in the activity statistics. | ||||
|  | ||||
| The duration flag can be used to control how many days to show usage duration for | ||||
|  | ||||
| Usage: python manage.py analyze_user_activity [--realm=zulip.com] [--date=2013-09-10] [--duration=1] | ||||
|  | ||||
| By default, if no date is selected 2013-09-10 is used. If no realm is provided, information | ||||
| is shown for all realms""" | ||||
|  | ||||
|     option_list = BaseCommand.option_list + ( | ||||
|         make_option('--realm', action='store'), | ||||
|         make_option('--date', action='store', default="2013-09-06"), | ||||
|         make_option('--duration', action='store', default=1, type=int, help="How many days to show usage information for"), | ||||
|         ) | ||||
|  | ||||
|     def handle(self, *args, **options): | ||||
|         analyze_activity(options) | ||||
| @@ -1,90 +0,0 @@ | ||||
| import os | ||||
| import time | ||||
| from datetime import timedelta | ||||
| from typing import Any, Dict | ||||
|  | ||||
| from django.core.management.base import BaseCommand | ||||
| from django.utils.timezone import now as timezone_now | ||||
|  | ||||
| from analytics.lib.counts import COUNT_STATS, CountStat | ||||
| from analytics.models import installation_epoch | ||||
| from zerver.lib.timestamp import TimeZoneNotUTCException, floor_to_day, floor_to_hour, verify_UTC | ||||
| from zerver.models import Realm | ||||
|  | ||||
| states = { | ||||
|     0: "OK", | ||||
|     1: "WARNING", | ||||
|     2: "CRITICAL", | ||||
|     3: "UNKNOWN", | ||||
| } | ||||
|  | ||||
|  | ||||
| class Command(BaseCommand): | ||||
|     help = """Checks FillState table. | ||||
|  | ||||
|     Run as a cron job that runs every hour.""" | ||||
|  | ||||
|     def handle(self, *args: Any, **options: Any) -> None: | ||||
|         fill_state = self.get_fill_state() | ||||
|         status = fill_state["status"] | ||||
|         message = fill_state["message"] | ||||
|  | ||||
|         state_file_path = "/var/lib/nagios_state/check-analytics-state" | ||||
|         state_file_tmp = state_file_path + "-tmp" | ||||
|  | ||||
|         with open(state_file_tmp, "w") as f: | ||||
|             f.write(f"{int(time.time())}|{status}|{states[status]}|{message}\n") | ||||
|         os.rename(state_file_tmp, state_file_path) | ||||
|  | ||||
|     def get_fill_state(self) -> Dict[str, Any]: | ||||
|         if not Realm.objects.exists(): | ||||
|             return {"status": 0, "message": "No realms exist, so not checking FillState."} | ||||
|  | ||||
|         warning_unfilled_properties = [] | ||||
|         critical_unfilled_properties = [] | ||||
|         for property, stat in COUNT_STATS.items(): | ||||
|             last_fill = stat.last_successful_fill() | ||||
|             if last_fill is None: | ||||
|                 last_fill = installation_epoch() | ||||
|             try: | ||||
|                 verify_UTC(last_fill) | ||||
|             except TimeZoneNotUTCException: | ||||
|                 return {"status": 2, "message": f"FillState not in UTC for {property}"} | ||||
|  | ||||
|             if stat.frequency == CountStat.DAY: | ||||
|                 floor_function = floor_to_day | ||||
|                 warning_threshold = timedelta(hours=26) | ||||
|                 critical_threshold = timedelta(hours=50) | ||||
|             else:  # CountStat.HOUR | ||||
|                 floor_function = floor_to_hour | ||||
|                 warning_threshold = timedelta(minutes=90) | ||||
|                 critical_threshold = timedelta(minutes=150) | ||||
|  | ||||
|             if floor_function(last_fill) != last_fill: | ||||
|                 return { | ||||
|                     "status": 2, | ||||
|                     "message": f"FillState not on {stat.frequency} boundary for {property}", | ||||
|                 } | ||||
|  | ||||
|             time_to_last_fill = timezone_now() - last_fill | ||||
|             if time_to_last_fill > critical_threshold: | ||||
|                 critical_unfilled_properties.append(property) | ||||
|             elif time_to_last_fill > warning_threshold: | ||||
|                 warning_unfilled_properties.append(property) | ||||
|  | ||||
|         if len(critical_unfilled_properties) == 0 and len(warning_unfilled_properties) == 0: | ||||
|             return {"status": 0, "message": "FillState looks fine."} | ||||
|         if len(critical_unfilled_properties) == 0: | ||||
|             return { | ||||
|                 "status": 1, | ||||
|                 "message": "Missed filling {} once.".format( | ||||
|                     ", ".join(warning_unfilled_properties), | ||||
|                 ), | ||||
|             } | ||||
|         return { | ||||
|             "status": 2, | ||||
|             "message": "Missed filling {} once. Missed filling {} at least twice.".format( | ||||
|                 ", ".join(warning_unfilled_properties), | ||||
|                 ", ".join(critical_unfilled_properties), | ||||
|             ), | ||||
|         } | ||||
| @@ -1,21 +0,0 @@ | ||||
| from argparse import ArgumentParser | ||||
| from typing import Any | ||||
|  | ||||
| from django.core.management.base import BaseCommand, CommandError | ||||
|  | ||||
| from analytics.lib.counts import do_drop_all_analytics_tables | ||||
|  | ||||
|  | ||||
| class Command(BaseCommand): | ||||
|     help = """Clear analytics tables.""" | ||||
|  | ||||
|     def add_arguments(self, parser: ArgumentParser) -> None: | ||||
|         parser.add_argument("--force", action="store_true", help="Clear analytics tables.") | ||||
|  | ||||
|     def handle(self, *args: Any, **options: Any) -> None: | ||||
|         if options["force"]: | ||||
|             do_drop_all_analytics_tables() | ||||
|         else: | ||||
|             raise CommandError( | ||||
|                 "Would delete all data from analytics tables (!); use --force to do so." | ||||
|             ) | ||||
| @@ -1,23 +0,0 @@ | ||||
| from argparse import ArgumentParser | ||||
| from typing import Any | ||||
|  | ||||
| from django.core.management.base import BaseCommand, CommandError | ||||
|  | ||||
| from analytics.lib.counts import COUNT_STATS, do_drop_single_stat | ||||
|  | ||||
|  | ||||
| class Command(BaseCommand): | ||||
|     help = """Clear analytics tables.""" | ||||
|  | ||||
|     def add_arguments(self, parser: ArgumentParser) -> None: | ||||
|         parser.add_argument("--force", action="store_true", help="Actually do it.") | ||||
|         parser.add_argument("--property", help="The property of the stat to be cleared.") | ||||
|  | ||||
|     def handle(self, *args: Any, **options: Any) -> None: | ||||
|         property = options["property"] | ||||
|         if property not in COUNT_STATS: | ||||
|             raise CommandError(f"Invalid property: {property}") | ||||
|         if not options["force"]: | ||||
|             raise CommandError("No action taken. Use --force.") | ||||
|  | ||||
|         do_drop_single_stat(property) | ||||
							
								
								
									
										69
									
								
								analytics/management/commands/client_activity.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										69
									
								
								analytics/management/commands/client_activity.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,69 @@ | ||||
| from __future__ import absolute_import | ||||
|  | ||||
| from django.core.management.base import BaseCommand | ||||
| from django.db.models import Count | ||||
|  | ||||
| from zerver.models import UserActivity, UserProfile, Realm, \ | ||||
|     get_realm, get_user_profile_by_email | ||||
|  | ||||
| import datetime | ||||
|  | ||||
| class Command(BaseCommand): | ||||
|     help = """Report rough client activity globally, for a realm, or for a user | ||||
|  | ||||
| Usage examples: | ||||
|  | ||||
| python manage.py client_activity | ||||
| python manage.py client_activity zulip.com | ||||
| python manage.py client_activity jesstess@zulip.com""" | ||||
|  | ||||
|     def compute_activity(self, user_activity_objects): | ||||
|         # Report data from the past week. | ||||
|         # | ||||
|         # This is a rough report of client activity because we inconsistently | ||||
|         # register activity from various clients; think of it as telling you | ||||
|         # approximately how many people from a group have used a particular | ||||
|         # client recently. For example, this might be useful to get a sense of | ||||
|         # how popular different versions of a desktop client are. | ||||
|         # | ||||
|         # Importantly, this does NOT tell you anything about the relative | ||||
|         # volumes of requests from clients. | ||||
|         threshold = datetime.datetime.now() - datetime.timedelta(days=7) | ||||
|         client_counts = user_activity_objects.filter( | ||||
|             last_visit__gt=threshold).values("client__name").annotate( | ||||
|             count=Count('client__name')) | ||||
|  | ||||
|         total = 0 | ||||
|         counts = [] | ||||
|         for client_type in client_counts: | ||||
|             count = client_type["count"] | ||||
|             client = client_type["client__name"] | ||||
|             total += count | ||||
|             counts.append((count, client)) | ||||
|  | ||||
|         counts.sort() | ||||
|  | ||||
|         for count in counts: | ||||
|             print "%25s %15d" % (count[1], count[0]) | ||||
|         print "Total:", total | ||||
|  | ||||
|  | ||||
|     def handle(self, *args, **options): | ||||
|         if len(args) == 0: | ||||
|             # Report global activity. | ||||
|             self.compute_activity(UserActivity.objects.all()) | ||||
|         elif len(args) == 1: | ||||
|             try: | ||||
|                 # Report activity for a user. | ||||
|                 user_profile = get_user_profile_by_email(args[0]) | ||||
|                 self.compute_activity(UserActivity.objects.filter( | ||||
|                         user_profile=user_profile)) | ||||
|             except UserProfile.DoesNotExist: | ||||
|                 try: | ||||
|                     # Report activity for a realm. | ||||
|                     realm = get_realm(args[0]) | ||||
|                     self.compute_activity(UserActivity.objects.filter( | ||||
|                             user_profile__realm=realm)) | ||||
|                 except Realm.DoesNotExist: | ||||
|                     print "Unknown user or domain %s" % (args[0],) | ||||
|                     exit(1) | ||||
| @@ -1,304 +0,0 @@ | ||||
| from datetime import timedelta | ||||
| from typing import Any, Dict, List, Mapping, Type, Union | ||||
| from unittest import mock | ||||
|  | ||||
| from django.core.management.base import BaseCommand | ||||
| from django.utils.timezone import now as timezone_now | ||||
|  | ||||
| from analytics.lib.counts import COUNT_STATS, CountStat, do_drop_all_analytics_tables | ||||
| from analytics.lib.fixtures import generate_time_series_data | ||||
| from analytics.lib.time_utils import time_range | ||||
| from analytics.models import ( | ||||
|     BaseCount, | ||||
|     FillState, | ||||
|     InstallationCount, | ||||
|     RealmCount, | ||||
|     StreamCount, | ||||
|     UserCount, | ||||
| ) | ||||
| from zerver.lib.actions import STREAM_ASSIGNMENT_COLORS, do_change_user_role, do_create_realm | ||||
| from zerver.lib.create_user import create_user | ||||
| from zerver.lib.timestamp import floor_to_day | ||||
| from zerver.models import Client, Realm, Recipient, Stream, Subscription, UserProfile | ||||
|  | ||||
|  | ||||
| class Command(BaseCommand): | ||||
|     help = """Populates analytics tables with randomly generated data.""" | ||||
|  | ||||
|     DAYS_OF_DATA = 100 | ||||
|     random_seed = 26 | ||||
|  | ||||
|     def generate_fixture_data( | ||||
|         self, | ||||
|         stat: CountStat, | ||||
|         business_hours_base: float, | ||||
|         non_business_hours_base: float, | ||||
|         growth: float, | ||||
|         autocorrelation: float, | ||||
|         spikiness: float, | ||||
|         holiday_rate: float = 0, | ||||
|         partial_sum: bool = False, | ||||
|     ) -> List[int]: | ||||
|         self.random_seed += 1 | ||||
|         return generate_time_series_data( | ||||
|             days=self.DAYS_OF_DATA, | ||||
|             business_hours_base=business_hours_base, | ||||
|             non_business_hours_base=non_business_hours_base, | ||||
|             growth=growth, | ||||
|             autocorrelation=autocorrelation, | ||||
|             spikiness=spikiness, | ||||
|             holiday_rate=holiday_rate, | ||||
|             frequency=stat.frequency, | ||||
|             partial_sum=partial_sum, | ||||
|             random_seed=self.random_seed, | ||||
|         ) | ||||
|  | ||||
|     def handle(self, *args: Any, **options: Any) -> None: | ||||
|         # TODO: This should arguably only delete the objects | ||||
|         # associated with the "analytics" realm. | ||||
|         do_drop_all_analytics_tables() | ||||
|  | ||||
|         # This also deletes any objects with this realm as a foreign key | ||||
|         Realm.objects.filter(string_id="analytics").delete() | ||||
|  | ||||
|         # Because we just deleted a bunch of objects in the database | ||||
|         # directly (rather than deleting individual objects in Django, | ||||
|         # in which case our post_save hooks would have flushed the | ||||
|         # individual objects from memcached for us), we need to flush | ||||
|         # memcached in order to ensure deleted objects aren't still | ||||
|         # present in the memcached cache. | ||||
|         from zerver.apps import flush_cache | ||||
|  | ||||
|         flush_cache(None) | ||||
|  | ||||
|         installation_time = timezone_now() - timedelta(days=self.DAYS_OF_DATA) | ||||
|         last_end_time = floor_to_day(timezone_now()) | ||||
|         realm = do_create_realm( | ||||
|             string_id="analytics", name="Analytics", date_created=installation_time | ||||
|         ) | ||||
|  | ||||
|         with mock.patch("zerver.lib.create_user.timezone_now", return_value=installation_time): | ||||
|             shylock = create_user( | ||||
|                 "shylock@analytics.ds", | ||||
|                 "Shylock", | ||||
|                 realm, | ||||
|                 full_name="Shylock", | ||||
|                 role=UserProfile.ROLE_REALM_OWNER, | ||||
|             ) | ||||
|         do_change_user_role(shylock, UserProfile.ROLE_REALM_OWNER, acting_user=None) | ||||
|         stream = Stream.objects.create(name="all", realm=realm, date_created=installation_time) | ||||
|         recipient = Recipient.objects.create(type_id=stream.id, type=Recipient.STREAM) | ||||
|         stream.recipient = recipient | ||||
|         stream.save(update_fields=["recipient"]) | ||||
|  | ||||
|         # Subscribe shylock to the stream to avoid invariant failures. | ||||
|         # TODO: This should use subscribe_users_to_streams from populate_db. | ||||
|         subs = [ | ||||
|             Subscription( | ||||
|                 recipient=recipient, | ||||
|                 user_profile=shylock, | ||||
|                 is_user_active=shylock.is_active, | ||||
|                 color=STREAM_ASSIGNMENT_COLORS[0], | ||||
|             ), | ||||
|         ] | ||||
|         Subscription.objects.bulk_create(subs) | ||||
|  | ||||
|         FixtureData = Mapping[Union[str, int, None], List[int]] | ||||
|  | ||||
|         def insert_fixture_data( | ||||
|             stat: CountStat, | ||||
|             fixture_data: FixtureData, | ||||
|             table: Type[BaseCount], | ||||
|         ) -> None: | ||||
|             end_times = time_range( | ||||
|                 last_end_time, last_end_time, stat.frequency, len(list(fixture_data.values())[0]) | ||||
|             ) | ||||
|             if table == InstallationCount: | ||||
|                 id_args: Dict[str, Any] = {} | ||||
|             if table == RealmCount: | ||||
|                 id_args = {"realm": realm} | ||||
|             if table == UserCount: | ||||
|                 id_args = {"realm": realm, "user": shylock} | ||||
|             if table == StreamCount: | ||||
|                 id_args = {"stream": stream, "realm": realm} | ||||
|  | ||||
|             for subgroup, values in fixture_data.items(): | ||||
|                 table.objects.bulk_create( | ||||
|                     table( | ||||
|                         property=stat.property, | ||||
|                         subgroup=subgroup, | ||||
|                         end_time=end_time, | ||||
|                         value=value, | ||||
|                         **id_args, | ||||
|                     ) | ||||
|                     for end_time, value in zip(end_times, values) | ||||
|                     if value != 0 | ||||
|                 ) | ||||
|  | ||||
|         stat = COUNT_STATS["1day_actives::day"] | ||||
|         realm_data: FixtureData = { | ||||
|             None: self.generate_fixture_data(stat, 0.08, 0.02, 3, 0.3, 6, partial_sum=True), | ||||
|         } | ||||
|         insert_fixture_data(stat, realm_data, RealmCount) | ||||
|         installation_data: FixtureData = { | ||||
|             None: self.generate_fixture_data(stat, 0.8, 0.2, 4, 0.3, 6, partial_sum=True), | ||||
|         } | ||||
|         insert_fixture_data(stat, installation_data, InstallationCount) | ||||
|         FillState.objects.create( | ||||
|             property=stat.property, end_time=last_end_time, state=FillState.DONE | ||||
|         ) | ||||
|  | ||||
|         stat = COUNT_STATS["7day_actives::day"] | ||||
|         realm_data = { | ||||
|             None: self.generate_fixture_data(stat, 0.2, 0.07, 3, 0.3, 6, partial_sum=True), | ||||
|         } | ||||
|         insert_fixture_data(stat, realm_data, RealmCount) | ||||
|         installation_data = { | ||||
|             None: self.generate_fixture_data(stat, 2, 0.7, 4, 0.3, 6, partial_sum=True), | ||||
|         } | ||||
|         insert_fixture_data(stat, installation_data, InstallationCount) | ||||
|         FillState.objects.create( | ||||
|             property=stat.property, end_time=last_end_time, state=FillState.DONE | ||||
|         ) | ||||
|  | ||||
|         stat = COUNT_STATS["realm_active_humans::day"] | ||||
|         realm_data = { | ||||
|             None: self.generate_fixture_data(stat, 0.8, 0.08, 3, 0.5, 3, partial_sum=True), | ||||
|         } | ||||
|         insert_fixture_data(stat, realm_data, RealmCount) | ||||
|         installation_data = { | ||||
|             None: self.generate_fixture_data(stat, 1, 0.3, 4, 0.5, 3, partial_sum=True), | ||||
|         } | ||||
|         insert_fixture_data(stat, installation_data, InstallationCount) | ||||
|         FillState.objects.create( | ||||
|             property=stat.property, end_time=last_end_time, state=FillState.DONE | ||||
|         ) | ||||
|  | ||||
|         stat = COUNT_STATS["active_users_audit:is_bot:day"] | ||||
|         realm_data = { | ||||
|             "false": self.generate_fixture_data(stat, 1, 0.2, 3.5, 0.8, 2, partial_sum=True), | ||||
|             "true": self.generate_fixture_data(stat, 0.3, 0.05, 3, 0.3, 2, partial_sum=True), | ||||
|         } | ||||
|         insert_fixture_data(stat, realm_data, RealmCount) | ||||
|         installation_data = { | ||||
|             "false": self.generate_fixture_data(stat, 3, 1, 4, 0.8, 2, partial_sum=True), | ||||
|             "true": self.generate_fixture_data(stat, 1, 0.4, 4, 0.8, 2, partial_sum=True), | ||||
|         } | ||||
|         insert_fixture_data(stat, installation_data, InstallationCount) | ||||
|         FillState.objects.create( | ||||
|             property=stat.property, end_time=last_end_time, state=FillState.DONE | ||||
|         ) | ||||
|  | ||||
|         stat = COUNT_STATS["messages_sent:is_bot:hour"] | ||||
|         user_data: FixtureData = { | ||||
|             "false": self.generate_fixture_data(stat, 2, 1, 1.5, 0.6, 8, holiday_rate=0.1), | ||||
|         } | ||||
|         insert_fixture_data(stat, user_data, UserCount) | ||||
|         realm_data = { | ||||
|             "false": self.generate_fixture_data(stat, 35, 15, 6, 0.6, 4), | ||||
|             "true": self.generate_fixture_data(stat, 15, 15, 3, 0.4, 2), | ||||
|         } | ||||
|         insert_fixture_data(stat, realm_data, RealmCount) | ||||
|         installation_data = { | ||||
|             "false": self.generate_fixture_data(stat, 350, 150, 6, 0.6, 4), | ||||
|             "true": self.generate_fixture_data(stat, 150, 150, 3, 0.4, 2), | ||||
|         } | ||||
|         insert_fixture_data(stat, installation_data, InstallationCount) | ||||
|         FillState.objects.create( | ||||
|             property=stat.property, end_time=last_end_time, state=FillState.DONE | ||||
|         ) | ||||
|  | ||||
|         stat = COUNT_STATS["messages_sent:message_type:day"] | ||||
|         user_data = { | ||||
|             "public_stream": self.generate_fixture_data(stat, 1.5, 1, 3, 0.6, 8), | ||||
|             "private_message": self.generate_fixture_data(stat, 0.5, 0.3, 1, 0.6, 8), | ||||
|             "huddle_message": self.generate_fixture_data(stat, 0.2, 0.2, 2, 0.6, 8), | ||||
|         } | ||||
|         insert_fixture_data(stat, user_data, UserCount) | ||||
|         realm_data = { | ||||
|             "public_stream": self.generate_fixture_data(stat, 30, 8, 5, 0.6, 4), | ||||
|             "private_stream": self.generate_fixture_data(stat, 7, 7, 5, 0.6, 4), | ||||
|             "private_message": self.generate_fixture_data(stat, 13, 5, 5, 0.6, 4), | ||||
|             "huddle_message": self.generate_fixture_data(stat, 6, 3, 3, 0.6, 4), | ||||
|         } | ||||
|         insert_fixture_data(stat, realm_data, RealmCount) | ||||
|         installation_data = { | ||||
|             "public_stream": self.generate_fixture_data(stat, 300, 80, 5, 0.6, 4), | ||||
|             "private_stream": self.generate_fixture_data(stat, 70, 70, 5, 0.6, 4), | ||||
|             "private_message": self.generate_fixture_data(stat, 130, 50, 5, 0.6, 4), | ||||
|             "huddle_message": self.generate_fixture_data(stat, 60, 30, 3, 0.6, 4), | ||||
|         } | ||||
|         insert_fixture_data(stat, installation_data, InstallationCount) | ||||
|         FillState.objects.create( | ||||
|             property=stat.property, end_time=last_end_time, state=FillState.DONE | ||||
|         ) | ||||
|  | ||||
|         website, created = Client.objects.get_or_create(name="website") | ||||
|         old_desktop, created = Client.objects.get_or_create(name="desktop app Linux 0.3.7") | ||||
|         android, created = Client.objects.get_or_create(name="ZulipAndroid") | ||||
|         iOS, created = Client.objects.get_or_create(name="ZulipiOS") | ||||
|         react_native, created = Client.objects.get_or_create(name="ZulipMobile") | ||||
|         API, created = Client.objects.get_or_create(name="API: Python") | ||||
|         zephyr_mirror, created = Client.objects.get_or_create(name="zephyr_mirror") | ||||
|         unused, created = Client.objects.get_or_create(name="unused") | ||||
|         long_webhook, created = Client.objects.get_or_create(name="ZulipLooooooooooongNameWebhook") | ||||
|  | ||||
|         stat = COUNT_STATS["messages_sent:client:day"] | ||||
|         user_data = { | ||||
|             website.id: self.generate_fixture_data(stat, 2, 1, 1.5, 0.6, 8), | ||||
|             zephyr_mirror.id: self.generate_fixture_data(stat, 0, 0.3, 1.5, 0.6, 8), | ||||
|         } | ||||
|         insert_fixture_data(stat, user_data, UserCount) | ||||
|         realm_data = { | ||||
|             website.id: self.generate_fixture_data(stat, 30, 20, 5, 0.6, 3), | ||||
|             old_desktop.id: self.generate_fixture_data(stat, 5, 3, 8, 0.6, 3), | ||||
|             android.id: self.generate_fixture_data(stat, 5, 5, 2, 0.6, 3), | ||||
|             iOS.id: self.generate_fixture_data(stat, 5, 5, 2, 0.6, 3), | ||||
|             react_native.id: self.generate_fixture_data(stat, 5, 5, 10, 0.6, 3), | ||||
|             API.id: self.generate_fixture_data(stat, 5, 5, 5, 0.6, 3), | ||||
|             zephyr_mirror.id: self.generate_fixture_data(stat, 1, 1, 3, 0.6, 3), | ||||
|             unused.id: self.generate_fixture_data(stat, 0, 0, 0, 0, 0), | ||||
|             long_webhook.id: self.generate_fixture_data(stat, 5, 5, 2, 0.6, 3), | ||||
|         } | ||||
|         insert_fixture_data(stat, realm_data, RealmCount) | ||||
|         installation_data = { | ||||
|             website.id: self.generate_fixture_data(stat, 300, 200, 5, 0.6, 3), | ||||
|             old_desktop.id: self.generate_fixture_data(stat, 50, 30, 8, 0.6, 3), | ||||
|             android.id: self.generate_fixture_data(stat, 50, 50, 2, 0.6, 3), | ||||
|             iOS.id: self.generate_fixture_data(stat, 50, 50, 2, 0.6, 3), | ||||
|             react_native.id: self.generate_fixture_data(stat, 5, 5, 10, 0.6, 3), | ||||
|             API.id: self.generate_fixture_data(stat, 50, 50, 5, 0.6, 3), | ||||
|             zephyr_mirror.id: self.generate_fixture_data(stat, 10, 10, 3, 0.6, 3), | ||||
|             unused.id: self.generate_fixture_data(stat, 0, 0, 0, 0, 0), | ||||
|             long_webhook.id: self.generate_fixture_data(stat, 50, 50, 2, 0.6, 3), | ||||
|         } | ||||
|         insert_fixture_data(stat, installation_data, InstallationCount) | ||||
|         FillState.objects.create( | ||||
|             property=stat.property, end_time=last_end_time, state=FillState.DONE | ||||
|         ) | ||||
|  | ||||
|         stat = COUNT_STATS["messages_in_stream:is_bot:day"] | ||||
|         realm_data = { | ||||
|             "false": self.generate_fixture_data(stat, 30, 5, 6, 0.6, 4), | ||||
|             "true": self.generate_fixture_data(stat, 20, 2, 3, 0.2, 3), | ||||
|         } | ||||
|         insert_fixture_data(stat, realm_data, RealmCount) | ||||
|         stream_data: Mapping[Union[int, str, None], List[int]] = { | ||||
|             "false": self.generate_fixture_data(stat, 10, 7, 5, 0.6, 4), | ||||
|             "true": self.generate_fixture_data(stat, 5, 3, 2, 0.4, 2), | ||||
|         } | ||||
|         insert_fixture_data(stat, stream_data, StreamCount) | ||||
|         FillState.objects.create( | ||||
|             property=stat.property, end_time=last_end_time, state=FillState.DONE | ||||
|         ) | ||||
|  | ||||
|         stat = COUNT_STATS["messages_read::hour"] | ||||
|         user_data = { | ||||
|             None: self.generate_fixture_data(stat, 7, 3, 2, 0.6, 8, holiday_rate=0.1), | ||||
|         } | ||||
|         insert_fixture_data(stat, user_data, UserCount) | ||||
|         realm_data = {None: self.generate_fixture_data(stat, 50, 35, 6, 0.6, 4)} | ||||
|         insert_fixture_data(stat, realm_data, RealmCount) | ||||
|         FillState.objects.create( | ||||
|             property=stat.property, end_time=last_end_time, state=FillState.DONE | ||||
|         ) | ||||
							
								
								
									
										148
									
								
								analytics/management/commands/realm_stats.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										148
									
								
								analytics/management/commands/realm_stats.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,148 @@ | ||||
| from __future__ import absolute_import | ||||
|  | ||||
| import datetime | ||||
| import pytz | ||||
|  | ||||
| from django.core.management.base import BaseCommand | ||||
| from django.db.models import Count | ||||
| from zerver.models import UserProfile, Realm, Stream, Message, Recipient, UserActivity, \ | ||||
|     Subscription, UserMessage | ||||
|  | ||||
| MOBILE_CLIENT_LIST = ["Android", "ios"] | ||||
| HUMAN_CLIENT_LIST = MOBILE_CLIENT_LIST + ["website"] | ||||
|  | ||||
| human_messages = Message.objects.filter(sending_client__name__in=HUMAN_CLIENT_LIST) | ||||
|  | ||||
| class Command(BaseCommand): | ||||
|     help = "Generate statistics on realm activity." | ||||
|  | ||||
|     def active_users(self, realm): | ||||
|         # Has been active (on the website, for now) in the last 7 days. | ||||
|         activity_cutoff = datetime.datetime.now(tz=pytz.utc) - datetime.timedelta(days=7) | ||||
|         return [activity.user_profile for activity in \ | ||||
|                     UserActivity.objects.filter(user_profile__realm=realm, | ||||
|                                                 user_profile__is_active=True, | ||||
|                                                 last_visit__gt=activity_cutoff, | ||||
|                                                 query="/json/update_pointer", | ||||
|                                                 client__name="website")] | ||||
|  | ||||
|     def messages_sent_by(self, user, days_ago): | ||||
|         sent_time_cutoff = datetime.datetime.now(tz=pytz.utc) - datetime.timedelta(days=days_ago) | ||||
|         return human_messages.filter(sender=user, pub_date__gt=sent_time_cutoff).count() | ||||
|  | ||||
|     def total_messages(self, realm, days_ago): | ||||
|         sent_time_cutoff = datetime.datetime.now(tz=pytz.utc) - datetime.timedelta(days=days_ago) | ||||
|         return Message.objects.filter(sender__realm=realm, pub_date__gt=sent_time_cutoff).count() | ||||
|  | ||||
|     def human_messages(self, realm, days_ago): | ||||
|         sent_time_cutoff = datetime.datetime.now(tz=pytz.utc) - datetime.timedelta(days=days_ago) | ||||
|         return human_messages.filter(sender__realm=realm, pub_date__gt=sent_time_cutoff).count() | ||||
|  | ||||
|     def api_messages(self, realm, days_ago): | ||||
|         return (self.total_messages(realm, days_ago) - self.human_messages(realm, days_ago)) | ||||
|  | ||||
|     def stream_messages(self, realm, days_ago): | ||||
|         sent_time_cutoff = datetime.datetime.now(tz=pytz.utc) - datetime.timedelta(days=days_ago) | ||||
|         return human_messages.filter(sender__realm=realm, pub_date__gt=sent_time_cutoff, | ||||
|                                      recipient__type=Recipient.STREAM).count() | ||||
|  | ||||
|     def private_messages(self, realm, days_ago): | ||||
|         sent_time_cutoff = datetime.datetime.now(tz=pytz.utc) - datetime.timedelta(days=days_ago) | ||||
|         return human_messages.filter(sender__realm=realm, pub_date__gt=sent_time_cutoff).exclude( | ||||
|             recipient__type=Recipient.STREAM).exclude(recipient__type=Recipient.HUDDLE).count() | ||||
|  | ||||
|     def group_private_messages(self, realm, days_ago): | ||||
|         sent_time_cutoff = datetime.datetime.now(tz=pytz.utc) - datetime.timedelta(days=days_ago) | ||||
|         return human_messages.filter(sender__realm=realm, pub_date__gt=sent_time_cutoff).exclude( | ||||
|             recipient__type=Recipient.STREAM).exclude(recipient__type=Recipient.PERSONAL).count() | ||||
|  | ||||
|     def report_percentage(self, numerator, denominator, text): | ||||
|         if not denominator: | ||||
|             fraction = 0.0 | ||||
|         else: | ||||
|             fraction = numerator / float(denominator) | ||||
|         print "%.2f%% of" % (fraction * 100,), text | ||||
|  | ||||
|     def handle(self, *args, **options): | ||||
|         if args: | ||||
|             try: | ||||
|                 realms = [Realm.objects.get(domain=domain) for domain in args] | ||||
|             except Realm.DoesNotExist, e: | ||||
|                 print e | ||||
|                 exit(1) | ||||
|         else: | ||||
|             realms = Realm.objects.all() | ||||
|  | ||||
|         for realm in realms: | ||||
|             print realm.domain | ||||
|  | ||||
|             user_profiles = UserProfile.objects.filter(realm=realm, is_active=True) | ||||
|             active_users = self.active_users(realm) | ||||
|             num_active = len(active_users) | ||||
|  | ||||
|             print "%d active users (%d total)" % (num_active, len(user_profiles)) | ||||
|             streams = Stream.objects.filter(realm=realm).extra( | ||||
|                 tables=['zerver_subscription', 'zerver_recipient'], | ||||
|                 where=['zerver_subscription.recipient_id = zerver_recipient.id', | ||||
|                        'zerver_recipient.type = 2', | ||||
|                        'zerver_recipient.type_id = zerver_stream.id', | ||||
|                        'zerver_subscription.active = true']).annotate(count=Count("name")) | ||||
|             print "%d streams" % (streams.count(),) | ||||
|  | ||||
|             for days_ago in (1, 7, 30): | ||||
|                 print "In last %d days, users sent:" % (days_ago,) | ||||
|                 sender_quantities = [self.messages_sent_by(user, days_ago) for user in user_profiles] | ||||
|                 for quantity in sorted(sender_quantities, reverse=True): | ||||
|                     print quantity, | ||||
|                 print "" | ||||
|  | ||||
|                 print "%d stream messages" % (self.stream_messages(realm, days_ago),) | ||||
|                 print "%d one-on-one private messages" % (self.private_messages(realm, days_ago),) | ||||
|                 print "%d messages sent via the API" % (self.api_messages(realm, days_ago),) | ||||
|                 print "%d group private messages" % (self.group_private_messages(realm, days_ago),) | ||||
|  | ||||
|             num_notifications_enabled = len(filter(lambda x: x.enable_desktop_notifications == True, | ||||
|                                                    active_users)) | ||||
|             self.report_percentage(num_notifications_enabled, num_active, | ||||
|                                    "active users have desktop notifications enabled") | ||||
|  | ||||
|             num_enter_sends = len(filter(lambda x: x.enter_sends, active_users)) | ||||
|             self.report_percentage(num_enter_sends, num_active, | ||||
|                                    "active users have enter-sends") | ||||
|  | ||||
|             all_message_count = human_messages.filter(sender__realm=realm).count() | ||||
|             multi_paragraph_message_count = human_messages.filter( | ||||
|                 sender__realm=realm, content__contains="\n\n").count() | ||||
|             self.report_percentage(multi_paragraph_message_count, all_message_count, | ||||
|                                    "all messages are multi-paragraph") | ||||
|  | ||||
|             # Starred messages | ||||
|             starrers = UserMessage.objects.filter(user_profile__in=user_profiles, | ||||
|                                                   flags=UserMessage.flags.starred).values( | ||||
|                 "user_profile").annotate(count=Count("user_profile")) | ||||
|             print "%d users have starred %d messages" % ( | ||||
|                 len(starrers), sum([elt["count"] for elt in starrers])) | ||||
|  | ||||
|             active_user_subs = Subscription.objects.filter( | ||||
|                 user_profile__in=user_profiles, active=True) | ||||
|  | ||||
|             # Streams not in home view | ||||
|             non_home_view = active_user_subs.filter(in_home_view=False).values( | ||||
|                 "user_profile").annotate(count=Count("user_profile")) | ||||
|             print "%d users have %d streams not in home view" % ( | ||||
|                 len(non_home_view), sum([elt["count"] for elt in non_home_view])) | ||||
|  | ||||
|             # Code block markup | ||||
|             markup_messages = human_messages.filter( | ||||
|                 sender__realm=realm, content__contains="~~~").values( | ||||
|                 "sender").annotate(count=Count("sender")) | ||||
|             print "%d users have used code block markup on %s messages" % ( | ||||
|                 len(markup_messages), sum([elt["count"] for elt in markup_messages])) | ||||
|  | ||||
|             # Notifications for stream messages | ||||
|             notifications = active_user_subs.filter(notifications=True).values( | ||||
|                 "user_profile").annotate(count=Count("user_profile")) | ||||
|             print "%d users receive desktop notifications for %d streams" % ( | ||||
|                 len(notifications), sum([elt["count"] for elt in notifications])) | ||||
|  | ||||
|             print "" | ||||
							
								
								
									
										36
									
								
								analytics/management/commands/stream_stats.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										36
									
								
								analytics/management/commands/stream_stats.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,36 @@ | ||||
| from __future__ import absolute_import | ||||
|  | ||||
| from django.core.management.base import BaseCommand | ||||
| from django.db.models import Q | ||||
| from zerver.models import Realm, Stream, Message, Subscription, Recipient | ||||
|  | ||||
| class Command(BaseCommand): | ||||
|     help = "Generate statistics on the streams for a realm." | ||||
|  | ||||
|     def handle(self, *args, **options): | ||||
|         if args: | ||||
|             try: | ||||
|                 realms = [Realm.objects.get(domain=domain) for domain in args] | ||||
|             except Realm.DoesNotExist, e: | ||||
|                 print e | ||||
|                 exit(1) | ||||
|         else: | ||||
|             realms = Realm.objects.all() | ||||
|  | ||||
|         for realm in realms: | ||||
|             print realm.domain | ||||
|             print "------------" | ||||
|             print "%25s %15s %10s" % ("stream", "subscribers", "messages") | ||||
|             streams = Stream.objects.filter(realm=realm).exclude(Q(name__istartswith="tutorial-")) | ||||
|             invite_only_count = 0 | ||||
|             for stream in streams: | ||||
|                 if stream.invite_only: | ||||
|                     invite_only_count += 1 | ||||
|                     continue | ||||
|                 print "%25s" % (stream.name,), | ||||
|                 recipient = Recipient.objects.filter(type=Recipient.STREAM, type_id=stream.id) | ||||
|                 print "%10d" % (len(Subscription.objects.filter(recipient=recipient, active=True)),), | ||||
|                 num_messages = len(Message.objects.filter(recipient=recipient)) | ||||
|                 print "%12d" % (num_messages,) | ||||
|             print "%d invite-only streams" % (invite_only_count,) | ||||
|             print "" | ||||
| @@ -1,96 +0,0 @@ | ||||
| import os | ||||
| import time | ||||
| from argparse import ArgumentParser | ||||
| from datetime import timezone | ||||
| from typing import Any, Dict | ||||
|  | ||||
| from django.conf import settings | ||||
| from django.core.management.base import BaseCommand | ||||
| from django.utils.dateparse import parse_datetime | ||||
| from django.utils.timezone import now as timezone_now | ||||
|  | ||||
| from analytics.lib.counts import COUNT_STATS, logger, process_count_stat | ||||
| from scripts.lib.zulip_tools import ENDC, WARNING | ||||
| from zerver.lib.remote_server import send_analytics_to_remote_server | ||||
| from zerver.lib.timestamp import floor_to_hour | ||||
| from zerver.models import Realm | ||||
|  | ||||
|  | ||||
| class Command(BaseCommand): | ||||
|     help = """Fills Analytics tables. | ||||
|  | ||||
|     Run as a cron job that runs every hour.""" | ||||
|  | ||||
|     def add_arguments(self, parser: ArgumentParser) -> None: | ||||
|         parser.add_argument( | ||||
|             "--time", | ||||
|             "-t", | ||||
|             help="Update stat tables from current state to " | ||||
|             "--time. Defaults to the current time.", | ||||
|             default=timezone_now().isoformat(), | ||||
|         ) | ||||
|         parser.add_argument("--utc", action="store_true", help="Interpret --time in UTC.") | ||||
|         parser.add_argument( | ||||
|             "--stat", "-s", help="CountStat to process. If omitted, all stats are processed." | ||||
|         ) | ||||
|         parser.add_argument( | ||||
|             "--verbose", action="store_true", help="Print timing information to stdout." | ||||
|         ) | ||||
|  | ||||
|     def handle(self, *args: Any, **options: Any) -> None: | ||||
|         try: | ||||
|             os.mkdir(settings.ANALYTICS_LOCK_DIR) | ||||
|         except OSError: | ||||
|             print( | ||||
|                 f"{WARNING}Analytics lock {settings.ANALYTICS_LOCK_DIR} is unavailable;" | ||||
|                 f" exiting.{ENDC}" | ||||
|             ) | ||||
|             return | ||||
|  | ||||
|         try: | ||||
|             self.run_update_analytics_counts(options) | ||||
|         finally: | ||||
|             os.rmdir(settings.ANALYTICS_LOCK_DIR) | ||||
|  | ||||
|     def run_update_analytics_counts(self, options: Dict[str, Any]) -> None: | ||||
|         # installation_epoch relies on there being at least one realm; we | ||||
|         # shouldn't run the analytics code if that condition isn't satisfied | ||||
|         if not Realm.objects.exists(): | ||||
|             logger.info("No realms, stopping update_analytics_counts") | ||||
|             return | ||||
|  | ||||
|         fill_to_time = parse_datetime(options["time"]) | ||||
|         assert fill_to_time is not None | ||||
|         if options["utc"]: | ||||
|             fill_to_time = fill_to_time.replace(tzinfo=timezone.utc) | ||||
|         if fill_to_time.tzinfo is None: | ||||
|             raise ValueError( | ||||
|                 "--time must be time-zone-aware. Maybe you meant to use the --utc option?" | ||||
|             ) | ||||
|  | ||||
|         fill_to_time = floor_to_hour(fill_to_time.astimezone(timezone.utc)) | ||||
|  | ||||
|         if options["stat"] is not None: | ||||
|             stats = [COUNT_STATS[options["stat"]]] | ||||
|         else: | ||||
|             stats = list(COUNT_STATS.values()) | ||||
|  | ||||
|         logger.info("Starting updating analytics counts through %s", fill_to_time) | ||||
|         if options["verbose"]: | ||||
|             start = time.time() | ||||
|             last = start | ||||
|  | ||||
|         for stat in stats: | ||||
|             process_count_stat(stat, fill_to_time) | ||||
|             if options["verbose"]: | ||||
|                 print(f"Updated {stat.property} in {time.time() - last:.3f}s") | ||||
|                 last = time.time() | ||||
|  | ||||
|         if options["verbose"]: | ||||
|             print( | ||||
|                 f"Finished updating analytics counts through {fill_to_time} in {time.time() - start:.3f}s" | ||||
|             ) | ||||
|         logger.info("Finished updating analytics counts through %s", fill_to_time) | ||||
|  | ||||
|         if settings.PUSH_NOTIFICATION_BOUNCER_URL and settings.SUBMIT_USAGE_STATISTICS: | ||||
|             send_analytics_to_remote_server() | ||||
							
								
								
									
										37
									
								
								analytics/management/commands/user_stats.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										37
									
								
								analytics/management/commands/user_stats.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,37 @@ | ||||
| from __future__ import absolute_import | ||||
|  | ||||
| import datetime | ||||
| import pytz | ||||
|  | ||||
| from django.core.management.base import BaseCommand | ||||
| from zerver.models import UserProfile, Realm, Stream, Message | ||||
|  | ||||
| class Command(BaseCommand): | ||||
|     help = "Generate statistics on user activity." | ||||
|  | ||||
|     def messages_sent_by(self, user, week): | ||||
|         start = datetime.datetime.now(tz=pytz.utc) - datetime.timedelta(days=(week + 1)*7) | ||||
|         end = datetime.datetime.now(tz=pytz.utc) - datetime.timedelta(days=week*7) | ||||
|         return Message.objects.filter(sender=user, pub_date__gt=start, pub_date__lte=end).count() | ||||
|  | ||||
|     def handle(self, *args, **options): | ||||
|         if args: | ||||
|             try: | ||||
|                 realms = [Realm.objects.get(domain=domain) for domain in args] | ||||
|             except Realm.DoesNotExist, e: | ||||
|                 print e | ||||
|                 exit(1) | ||||
|         else: | ||||
|             realms = Realm.objects.all() | ||||
|  | ||||
|         for realm in realms: | ||||
|             print realm.domain | ||||
|             user_profiles = UserProfile.objects.filter(realm=realm, is_active=True) | ||||
|             print "%d users" % (len(user_profiles),) | ||||
|             print "%d streams" % (len(Stream.objects.filter(realm=realm)),) | ||||
|  | ||||
|             for user_profile in user_profiles: | ||||
|                 print "%35s" % (user_profile.email,), | ||||
|                 for week in range(10): | ||||
|                     print "%5d" % (self.messages_sent_by(user_profile, week)), | ||||
|                 print "" | ||||
| @@ -1,209 +0,0 @@ | ||||
| import django.db.models.deletion | ||||
| from django.conf import settings | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("zerver", "0030_realm_org_type"), | ||||
|         migrations.swappable_dependency(settings.AUTH_USER_MODEL), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.CreateModel( | ||||
|             name="Anomaly", | ||||
|             fields=[ | ||||
|                 ( | ||||
|                     "id", | ||||
|                     models.AutoField( | ||||
|                         verbose_name="ID", serialize=False, auto_created=True, primary_key=True | ||||
|                     ), | ||||
|                 ), | ||||
|                 ("info", models.CharField(max_length=1000)), | ||||
|             ], | ||||
|             bases=(models.Model,), | ||||
|         ), | ||||
|         migrations.CreateModel( | ||||
|             name="HuddleCount", | ||||
|             fields=[ | ||||
|                 ( | ||||
|                     "id", | ||||
|                     models.AutoField( | ||||
|                         verbose_name="ID", serialize=False, auto_created=True, primary_key=True | ||||
|                     ), | ||||
|                 ), | ||||
|                 ( | ||||
|                     "huddle", | ||||
|                     models.ForeignKey( | ||||
|                         on_delete=django.db.models.deletion.CASCADE, to="zerver.Recipient" | ||||
|                     ), | ||||
|                 ), | ||||
|                 ( | ||||
|                     "user", | ||||
|                     models.ForeignKey( | ||||
|                         on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL | ||||
|                     ), | ||||
|                 ), | ||||
|                 ("property", models.CharField(max_length=40)), | ||||
|                 ("end_time", models.DateTimeField()), | ||||
|                 ("interval", models.CharField(max_length=20)), | ||||
|                 ("value", models.BigIntegerField()), | ||||
|                 ( | ||||
|                     "anomaly", | ||||
|                     models.ForeignKey( | ||||
|                         on_delete=django.db.models.deletion.CASCADE, | ||||
|                         to="analytics.Anomaly", | ||||
|                         null=True, | ||||
|                     ), | ||||
|                 ), | ||||
|             ], | ||||
|             bases=(models.Model,), | ||||
|         ), | ||||
|         migrations.CreateModel( | ||||
|             name="InstallationCount", | ||||
|             fields=[ | ||||
|                 ( | ||||
|                     "id", | ||||
|                     models.AutoField( | ||||
|                         verbose_name="ID", serialize=False, auto_created=True, primary_key=True | ||||
|                     ), | ||||
|                 ), | ||||
|                 ("property", models.CharField(max_length=40)), | ||||
|                 ("end_time", models.DateTimeField()), | ||||
|                 ("interval", models.CharField(max_length=20)), | ||||
|                 ("value", models.BigIntegerField()), | ||||
|                 ( | ||||
|                     "anomaly", | ||||
|                     models.ForeignKey( | ||||
|                         on_delete=django.db.models.deletion.CASCADE, | ||||
|                         to="analytics.Anomaly", | ||||
|                         null=True, | ||||
|                     ), | ||||
|                 ), | ||||
|             ], | ||||
|             bases=(models.Model,), | ||||
|         ), | ||||
|         migrations.CreateModel( | ||||
|             name="RealmCount", | ||||
|             fields=[ | ||||
|                 ( | ||||
|                     "id", | ||||
|                     models.AutoField( | ||||
|                         verbose_name="ID", serialize=False, auto_created=True, primary_key=True | ||||
|                     ), | ||||
|                 ), | ||||
|                 ( | ||||
|                     "realm", | ||||
|                     models.ForeignKey( | ||||
|                         on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm" | ||||
|                     ), | ||||
|                 ), | ||||
|                 ("property", models.CharField(max_length=40)), | ||||
|                 ("end_time", models.DateTimeField()), | ||||
|                 ("interval", models.CharField(max_length=20)), | ||||
|                 ("value", models.BigIntegerField()), | ||||
|                 ( | ||||
|                     "anomaly", | ||||
|                     models.ForeignKey( | ||||
|                         on_delete=django.db.models.deletion.CASCADE, | ||||
|                         to="analytics.Anomaly", | ||||
|                         null=True, | ||||
|                     ), | ||||
|                 ), | ||||
|             ], | ||||
|             bases=(models.Model,), | ||||
|         ), | ||||
|         migrations.CreateModel( | ||||
|             name="StreamCount", | ||||
|             fields=[ | ||||
|                 ( | ||||
|                     "id", | ||||
|                     models.AutoField( | ||||
|                         verbose_name="ID", serialize=False, auto_created=True, primary_key=True | ||||
|                     ), | ||||
|                 ), | ||||
|                 ( | ||||
|                     "realm", | ||||
|                     models.ForeignKey( | ||||
|                         on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm" | ||||
|                     ), | ||||
|                 ), | ||||
|                 ( | ||||
|                     "stream", | ||||
|                     models.ForeignKey( | ||||
|                         on_delete=django.db.models.deletion.CASCADE, to="zerver.Stream" | ||||
|                     ), | ||||
|                 ), | ||||
|                 ("property", models.CharField(max_length=40)), | ||||
|                 ("end_time", models.DateTimeField()), | ||||
|                 ("interval", models.CharField(max_length=20)), | ||||
|                 ("value", models.BigIntegerField()), | ||||
|                 ( | ||||
|                     "anomaly", | ||||
|                     models.ForeignKey( | ||||
|                         on_delete=django.db.models.deletion.CASCADE, | ||||
|                         to="analytics.Anomaly", | ||||
|                         null=True, | ||||
|                     ), | ||||
|                 ), | ||||
|             ], | ||||
|             bases=(models.Model,), | ||||
|         ), | ||||
|         migrations.CreateModel( | ||||
|             name="UserCount", | ||||
|             fields=[ | ||||
|                 ( | ||||
|                     "id", | ||||
|                     models.AutoField( | ||||
|                         verbose_name="ID", serialize=False, auto_created=True, primary_key=True | ||||
|                     ), | ||||
|                 ), | ||||
|                 ( | ||||
|                     "realm", | ||||
|                     models.ForeignKey( | ||||
|                         on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm" | ||||
|                     ), | ||||
|                 ), | ||||
|                 ( | ||||
|                     "user", | ||||
|                     models.ForeignKey( | ||||
|                         on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL | ||||
|                     ), | ||||
|                 ), | ||||
|                 ("property", models.CharField(max_length=40)), | ||||
|                 ("end_time", models.DateTimeField()), | ||||
|                 ("interval", models.CharField(max_length=20)), | ||||
|                 ("value", models.BigIntegerField()), | ||||
|                 ( | ||||
|                     "anomaly", | ||||
|                     models.ForeignKey( | ||||
|                         on_delete=django.db.models.deletion.CASCADE, | ||||
|                         to="analytics.Anomaly", | ||||
|                         null=True, | ||||
|                     ), | ||||
|                 ), | ||||
|             ], | ||||
|             bases=(models.Model,), | ||||
|         ), | ||||
|         migrations.AlterUniqueTogether( | ||||
|             name="usercount", | ||||
|             unique_together={("user", "property", "end_time", "interval")}, | ||||
|         ), | ||||
|         migrations.AlterUniqueTogether( | ||||
|             name="streamcount", | ||||
|             unique_together={("stream", "property", "end_time", "interval")}, | ||||
|         ), | ||||
|         migrations.AlterUniqueTogether( | ||||
|             name="realmcount", | ||||
|             unique_together={("realm", "property", "end_time", "interval")}, | ||||
|         ), | ||||
|         migrations.AlterUniqueTogether( | ||||
|             name="installationcount", | ||||
|             unique_together={("property", "end_time", "interval")}, | ||||
|         ), | ||||
|         migrations.AlterUniqueTogether( | ||||
|             name="huddlecount", | ||||
|             unique_together={("huddle", "property", "end_time", "interval")}, | ||||
|         ), | ||||
|     ] | ||||
| @@ -1,30 +0,0 @@ | ||||
| from django.db import migrations | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("analytics", "0001_initial"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AlterUniqueTogether( | ||||
|             name="huddlecount", | ||||
|             unique_together=set(), | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name="huddlecount", | ||||
|             name="anomaly", | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name="huddlecount", | ||||
|             name="huddle", | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name="huddlecount", | ||||
|             name="user", | ||||
|         ), | ||||
|         migrations.DeleteModel( | ||||
|             name="HuddleCount", | ||||
|         ), | ||||
|     ] | ||||
| @@ -1,27 +0,0 @@ | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("analytics", "0002_remove_huddlecount"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.CreateModel( | ||||
|             name="FillState", | ||||
|             fields=[ | ||||
|                 ( | ||||
|                     "id", | ||||
|                     models.AutoField( | ||||
|                         verbose_name="ID", serialize=False, auto_created=True, primary_key=True | ||||
|                     ), | ||||
|                 ), | ||||
|                 ("property", models.CharField(unique=True, max_length=40)), | ||||
|                 ("end_time", models.DateTimeField()), | ||||
|                 ("state", models.PositiveSmallIntegerField()), | ||||
|                 ("last_modified", models.DateTimeField(auto_now=True)), | ||||
|             ], | ||||
|             bases=(models.Model,), | ||||
|         ), | ||||
|     ] | ||||
| @@ -1,31 +0,0 @@ | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("analytics", "0003_fillstate"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AddField( | ||||
|             model_name="installationcount", | ||||
|             name="subgroup", | ||||
|             field=models.CharField(max_length=16, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name="realmcount", | ||||
|             name="subgroup", | ||||
|             field=models.CharField(max_length=16, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name="streamcount", | ||||
|             name="subgroup", | ||||
|             field=models.CharField(max_length=16, null=True), | ||||
|         ), | ||||
|         migrations.AddField( | ||||
|             model_name="usercount", | ||||
|             name="subgroup", | ||||
|             field=models.CharField(max_length=16, null=True), | ||||
|         ), | ||||
|     ] | ||||
| @@ -1,51 +0,0 @@ | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("analytics", "0004_add_subgroup"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AlterField( | ||||
|             model_name="installationcount", | ||||
|             name="interval", | ||||
|             field=models.CharField(max_length=8), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name="installationcount", | ||||
|             name="property", | ||||
|             field=models.CharField(max_length=32), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name="realmcount", | ||||
|             name="interval", | ||||
|             field=models.CharField(max_length=8), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name="realmcount", | ||||
|             name="property", | ||||
|             field=models.CharField(max_length=32), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name="streamcount", | ||||
|             name="interval", | ||||
|             field=models.CharField(max_length=8), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name="streamcount", | ||||
|             name="property", | ||||
|             field=models.CharField(max_length=32), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name="usercount", | ||||
|             name="interval", | ||||
|             field=models.CharField(max_length=8), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name="usercount", | ||||
|             name="property", | ||||
|             field=models.CharField(max_length=32), | ||||
|         ), | ||||
|     ] | ||||
| @@ -1,27 +0,0 @@ | ||||
| from django.db import migrations | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("analytics", "0005_alter_field_size"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AlterUniqueTogether( | ||||
|             name="installationcount", | ||||
|             unique_together={("property", "subgroup", "end_time", "interval")}, | ||||
|         ), | ||||
|         migrations.AlterUniqueTogether( | ||||
|             name="realmcount", | ||||
|             unique_together={("realm", "property", "subgroup", "end_time", "interval")}, | ||||
|         ), | ||||
|         migrations.AlterUniqueTogether( | ||||
|             name="streamcount", | ||||
|             unique_together={("stream", "property", "subgroup", "end_time", "interval")}, | ||||
|         ), | ||||
|         migrations.AlterUniqueTogether( | ||||
|             name="usercount", | ||||
|             unique_together={("user", "property", "subgroup", "end_time", "interval")}, | ||||
|         ), | ||||
|     ] | ||||
| @@ -1,44 +0,0 @@ | ||||
| # Generated by Django 1.10.4 on 2017-01-16 20:50 | ||||
| from django.db import migrations | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("analytics", "0006_add_subgroup_to_unique_constraints"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AlterUniqueTogether( | ||||
|             name="installationcount", | ||||
|             unique_together={("property", "subgroup", "end_time")}, | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name="installationcount", | ||||
|             name="interval", | ||||
|         ), | ||||
|         migrations.AlterUniqueTogether( | ||||
|             name="realmcount", | ||||
|             unique_together={("realm", "property", "subgroup", "end_time")}, | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name="realmcount", | ||||
|             name="interval", | ||||
|         ), | ||||
|         migrations.AlterUniqueTogether( | ||||
|             name="streamcount", | ||||
|             unique_together={("stream", "property", "subgroup", "end_time")}, | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name="streamcount", | ||||
|             name="interval", | ||||
|         ), | ||||
|         migrations.AlterUniqueTogether( | ||||
|             name="usercount", | ||||
|             unique_together={("user", "property", "subgroup", "end_time")}, | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name="usercount", | ||||
|             name="interval", | ||||
|         ), | ||||
|     ] | ||||
| @@ -1,25 +0,0 @@ | ||||
| # Generated by Django 1.10.5 on 2017-02-01 22:28 | ||||
| from django.db import migrations | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("zerver", "0050_userprofile_avatar_version"), | ||||
|         ("analytics", "0007_remove_interval"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AlterIndexTogether( | ||||
|             name="realmcount", | ||||
|             index_together={("property", "end_time")}, | ||||
|         ), | ||||
|         migrations.AlterIndexTogether( | ||||
|             name="streamcount", | ||||
|             index_together={("property", "realm", "end_time")}, | ||||
|         ), | ||||
|         migrations.AlterIndexTogether( | ||||
|             name="usercount", | ||||
|             index_together={("property", "realm", "end_time")}, | ||||
|         ), | ||||
|     ] | ||||
| @@ -1,31 +0,0 @@ | ||||
| from django.db import migrations | ||||
| from django.db.backends.postgresql.schema import DatabaseSchemaEditor | ||||
| from django.db.migrations.state import StateApps | ||||
|  | ||||
|  | ||||
| def delete_messages_sent_to_stream_stat( | ||||
|     apps: StateApps, schema_editor: DatabaseSchemaEditor | ||||
| ) -> None: | ||||
|     UserCount = apps.get_model("analytics", "UserCount") | ||||
|     StreamCount = apps.get_model("analytics", "StreamCount") | ||||
|     RealmCount = apps.get_model("analytics", "RealmCount") | ||||
|     InstallationCount = apps.get_model("analytics", "InstallationCount") | ||||
|     FillState = apps.get_model("analytics", "FillState") | ||||
|  | ||||
|     property = "messages_sent_to_stream:is_bot" | ||||
|     UserCount.objects.filter(property=property).delete() | ||||
|     StreamCount.objects.filter(property=property).delete() | ||||
|     RealmCount.objects.filter(property=property).delete() | ||||
|     InstallationCount.objects.filter(property=property).delete() | ||||
|     FillState.objects.filter(property=property).delete() | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("analytics", "0008_add_count_indexes"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.RunPython(delete_messages_sent_to_stream_stat), | ||||
|     ] | ||||
| @@ -1,29 +0,0 @@ | ||||
| from django.db import migrations | ||||
| from django.db.backends.postgresql.schema import DatabaseSchemaEditor | ||||
| from django.db.migrations.state import StateApps | ||||
|  | ||||
|  | ||||
| def clear_message_sent_by_message_type_values( | ||||
|     apps: StateApps, schema_editor: DatabaseSchemaEditor | ||||
| ) -> None: | ||||
|     UserCount = apps.get_model("analytics", "UserCount") | ||||
|     StreamCount = apps.get_model("analytics", "StreamCount") | ||||
|     RealmCount = apps.get_model("analytics", "RealmCount") | ||||
|     InstallationCount = apps.get_model("analytics", "InstallationCount") | ||||
|     FillState = apps.get_model("analytics", "FillState") | ||||
|  | ||||
|     property = "messages_sent:message_type:day" | ||||
|     UserCount.objects.filter(property=property).delete() | ||||
|     StreamCount.objects.filter(property=property).delete() | ||||
|     RealmCount.objects.filter(property=property).delete() | ||||
|     InstallationCount.objects.filter(property=property).delete() | ||||
|     FillState.objects.filter(property=property).delete() | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [("analytics", "0009_remove_messages_to_stream_stat")] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.RunPython(clear_message_sent_by_message_type_values), | ||||
|     ] | ||||
| @@ -1,28 +0,0 @@ | ||||
| from django.db import migrations | ||||
| from django.db.backends.postgresql.schema import DatabaseSchemaEditor | ||||
| from django.db.migrations.state import StateApps | ||||
|  | ||||
|  | ||||
| def clear_analytics_tables(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: | ||||
|     UserCount = apps.get_model("analytics", "UserCount") | ||||
|     StreamCount = apps.get_model("analytics", "StreamCount") | ||||
|     RealmCount = apps.get_model("analytics", "RealmCount") | ||||
|     InstallationCount = apps.get_model("analytics", "InstallationCount") | ||||
|     FillState = apps.get_model("analytics", "FillState") | ||||
|  | ||||
|     UserCount.objects.all().delete() | ||||
|     StreamCount.objects.all().delete() | ||||
|     RealmCount.objects.all().delete() | ||||
|     InstallationCount.objects.all().delete() | ||||
|     FillState.objects.all().delete() | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("analytics", "0010_clear_messages_sent_values"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.RunPython(clear_analytics_tables), | ||||
|     ] | ||||
| @@ -1,42 +0,0 @@ | ||||
| # Generated by Django 1.11.6 on 2018-01-29 08:14 | ||||
|  | ||||
| import django.db.models.deletion | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("analytics", "0011_clear_analytics_tables"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AlterField( | ||||
|             model_name="installationcount", | ||||
|             name="anomaly", | ||||
|             field=models.ForeignKey( | ||||
|                 null=True, on_delete=django.db.models.deletion.SET_NULL, to="analytics.Anomaly" | ||||
|             ), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name="realmcount", | ||||
|             name="anomaly", | ||||
|             field=models.ForeignKey( | ||||
|                 null=True, on_delete=django.db.models.deletion.SET_NULL, to="analytics.Anomaly" | ||||
|             ), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name="streamcount", | ||||
|             name="anomaly", | ||||
|             field=models.ForeignKey( | ||||
|                 null=True, on_delete=django.db.models.deletion.SET_NULL, to="analytics.Anomaly" | ||||
|             ), | ||||
|         ), | ||||
|         migrations.AlterField( | ||||
|             model_name="usercount", | ||||
|             name="anomaly", | ||||
|             field=models.ForeignKey( | ||||
|                 null=True, on_delete=django.db.models.deletion.SET_NULL, to="analytics.Anomaly" | ||||
|             ), | ||||
|         ), | ||||
|     ] | ||||
| @@ -1,32 +0,0 @@ | ||||
| # Generated by Django 1.11.18 on 2019-02-02 02:47 | ||||
|  | ||||
| from django.db import migrations | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("analytics", "0012_add_on_delete"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.RemoveField( | ||||
|             model_name="installationcount", | ||||
|             name="anomaly", | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name="realmcount", | ||||
|             name="anomaly", | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name="streamcount", | ||||
|             name="anomaly", | ||||
|         ), | ||||
|         migrations.RemoveField( | ||||
|             model_name="usercount", | ||||
|             name="anomaly", | ||||
|         ), | ||||
|         migrations.DeleteModel( | ||||
|             name="Anomaly", | ||||
|         ), | ||||
|     ] | ||||
| @@ -1,17 +0,0 @@ | ||||
| # Generated by Django 1.11.26 on 2020-01-27 04:32 | ||||
|  | ||||
| from django.db import migrations | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("analytics", "0013_remove_anomaly"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.RemoveField( | ||||
|             model_name="fillstate", | ||||
|             name="last_modified", | ||||
|         ), | ||||
|     ] | ||||
| @@ -1,65 +0,0 @@ | ||||
| from django.db import migrations | ||||
| from django.db.backends.postgresql.schema import DatabaseSchemaEditor | ||||
| from django.db.migrations.state import StateApps | ||||
| from django.db.models import Count, Sum | ||||
|  | ||||
|  | ||||
| def clear_duplicate_counts(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: | ||||
|     """This is a preparatory migration for our Analytics tables. | ||||
|  | ||||
|     The backstory is that Django's unique_together indexes do not properly | ||||
|     handle the subgroup=None corner case (allowing duplicate rows that have a | ||||
|     subgroup of None), which meant that in race conditions, rather than updating | ||||
|     an existing row for the property/(realm, stream, user)/time with subgroup=None, Django would | ||||
|     create a duplicate row. | ||||
|  | ||||
|     In the next migration, we'll add a proper constraint to fix this bug, but | ||||
|     we need to fix any existing problematic rows before we can add that constraint. | ||||
|  | ||||
|     We fix this in an appropriate fashion for each type of CountStat object; mainly | ||||
|     this means deleting the extra rows, but for LoggingCountStat objects, we need to | ||||
|     additionally combine the sums. | ||||
|     """ | ||||
|     count_tables = dict( | ||||
|         realm=apps.get_model("analytics", "RealmCount"), | ||||
|         user=apps.get_model("analytics", "UserCount"), | ||||
|         stream=apps.get_model("analytics", "StreamCount"), | ||||
|         installation=apps.get_model("analytics", "InstallationCount"), | ||||
|     ) | ||||
|  | ||||
|     for name, count_table in count_tables.items(): | ||||
|         value = [name, "property", "end_time"] | ||||
|         if name == "installation": | ||||
|             value = ["property", "end_time"] | ||||
|         counts = ( | ||||
|             count_table.objects.filter(subgroup=None) | ||||
|             .values(*value) | ||||
|             .annotate(Count("id"), Sum("value")) | ||||
|             .filter(id__count__gt=1) | ||||
|         ) | ||||
|  | ||||
|         for count in counts: | ||||
|             count.pop("id__count") | ||||
|             total_value = count.pop("value__sum") | ||||
|             duplicate_counts = list(count_table.objects.filter(**count)) | ||||
|             first_count = duplicate_counts[0] | ||||
|             if count["property"] in ["invites_sent::day", "active_users_log:is_bot:day"]: | ||||
|                 # For LoggingCountStat objects, the right fix is to combine the totals; | ||||
|                 # for other CountStat objects, we expect the duplicates to have the same value. | ||||
|                 # And so all we need to do is delete them. | ||||
|                 first_count.value = total_value | ||||
|                 first_count.save() | ||||
|             to_cleanup = duplicate_counts[1:] | ||||
|             for duplicate_count in to_cleanup: | ||||
|                 duplicate_count.delete() | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("analytics", "0014_remove_fillstate_last_modified"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.RunPython(clear_duplicate_counts, reverse_code=migrations.RunPython.noop), | ||||
|     ] | ||||
| @@ -1,93 +0,0 @@ | ||||
| # Generated by Django 2.2.10 on 2020-02-29 19:40 | ||||
|  | ||||
| from django.db import migrations, models | ||||
|  | ||||
|  | ||||
| class Migration(migrations.Migration): | ||||
|  | ||||
|     dependencies = [ | ||||
|         ("analytics", "0015_clear_duplicate_counts"), | ||||
|     ] | ||||
|  | ||||
|     operations = [ | ||||
|         migrations.AlterUniqueTogether( | ||||
|             name="installationcount", | ||||
|             unique_together=set(), | ||||
|         ), | ||||
|         migrations.AlterUniqueTogether( | ||||
|             name="realmcount", | ||||
|             unique_together=set(), | ||||
|         ), | ||||
|         migrations.AlterUniqueTogether( | ||||
|             name="streamcount", | ||||
|             unique_together=set(), | ||||
|         ), | ||||
|         migrations.AlterUniqueTogether( | ||||
|             name="usercount", | ||||
|             unique_together=set(), | ||||
|         ), | ||||
|         migrations.AddConstraint( | ||||
|             model_name="installationcount", | ||||
|             constraint=models.UniqueConstraint( | ||||
|                 condition=models.Q(subgroup__isnull=False), | ||||
|                 fields=("property", "subgroup", "end_time"), | ||||
|                 name="unique_installation_count", | ||||
|             ), | ||||
|         ), | ||||
|         migrations.AddConstraint( | ||||
|             model_name="installationcount", | ||||
|             constraint=models.UniqueConstraint( | ||||
|                 condition=models.Q(subgroup__isnull=True), | ||||
|                 fields=("property", "end_time"), | ||||
|                 name="unique_installation_count_null_subgroup", | ||||
|             ), | ||||
|         ), | ||||
|         migrations.AddConstraint( | ||||
|             model_name="realmcount", | ||||
|             constraint=models.UniqueConstraint( | ||||
|                 condition=models.Q(subgroup__isnull=False), | ||||
|                 fields=("realm", "property", "subgroup", "end_time"), | ||||
|                 name="unique_realm_count", | ||||
|             ), | ||||
|         ), | ||||
|         migrations.AddConstraint( | ||||
|             model_name="realmcount", | ||||
|             constraint=models.UniqueConstraint( | ||||
|                 condition=models.Q(subgroup__isnull=True), | ||||
|                 fields=("realm", "property", "end_time"), | ||||
|                 name="unique_realm_count_null_subgroup", | ||||
|             ), | ||||
|         ), | ||||
|         migrations.AddConstraint( | ||||
|             model_name="streamcount", | ||||
|             constraint=models.UniqueConstraint( | ||||
|                 condition=models.Q(subgroup__isnull=False), | ||||
|                 fields=("stream", "property", "subgroup", "end_time"), | ||||
|                 name="unique_stream_count", | ||||
|             ), | ||||
|         ), | ||||
|         migrations.AddConstraint( | ||||
|             model_name="streamcount", | ||||
|             constraint=models.UniqueConstraint( | ||||
|                 condition=models.Q(subgroup__isnull=True), | ||||
|                 fields=("stream", "property", "end_time"), | ||||
|                 name="unique_stream_count_null_subgroup", | ||||
|             ), | ||||
|         ), | ||||
|         migrations.AddConstraint( | ||||
|             model_name="usercount", | ||||
|             constraint=models.UniqueConstraint( | ||||
|                 condition=models.Q(subgroup__isnull=False), | ||||
|                 fields=("user", "property", "subgroup", "end_time"), | ||||
|                 name="unique_user_count", | ||||
|             ), | ||||
|         ), | ||||
|         migrations.AddConstraint( | ||||
|             model_name="usercount", | ||||
|             constraint=models.UniqueConstraint( | ||||
|                 condition=models.Q(subgroup__isnull=True), | ||||
|                 fields=("user", "property", "end_time"), | ||||
|                 name="unique_user_count_null_subgroup", | ||||
|             ), | ||||
|         ), | ||||
|     ] | ||||
| @@ -1,140 +0,0 @@ | ||||
| import datetime | ||||
| from typing import Optional | ||||
|  | ||||
| from django.db import models | ||||
| from django.db.models import Q, UniqueConstraint | ||||
|  | ||||
| from zerver.lib.timestamp import floor_to_day | ||||
| from zerver.models import Realm, Stream, UserProfile | ||||
|  | ||||
|  | ||||
| class FillState(models.Model): | ||||
|     property: str = models.CharField(max_length=40, unique=True) | ||||
|     end_time: datetime.datetime = models.DateTimeField() | ||||
|  | ||||
|     # Valid states are {DONE, STARTED} | ||||
|     DONE = 1 | ||||
|     STARTED = 2 | ||||
|     state: int = models.PositiveSmallIntegerField() | ||||
|  | ||||
|     def __str__(self) -> str: | ||||
|         return f"<FillState: {self.property} {self.end_time} {self.state}>" | ||||
|  | ||||
|  | ||||
| # The earliest/starting end_time in FillState | ||||
| # We assume there is at least one realm | ||||
| def installation_epoch() -> datetime.datetime: | ||||
|     earliest_realm_creation = Realm.objects.aggregate(models.Min("date_created"))[ | ||||
|         "date_created__min" | ||||
|     ] | ||||
|     return floor_to_day(earliest_realm_creation) | ||||
|  | ||||
|  | ||||
| class BaseCount(models.Model): | ||||
|     # Note: When inheriting from BaseCount, you may want to rearrange | ||||
|     # the order of the columns in the migration to make sure they | ||||
|     # match how you'd like the table to be arranged. | ||||
|     property: str = models.CharField(max_length=32) | ||||
|     subgroup: Optional[str] = models.CharField(max_length=16, null=True) | ||||
|     end_time: datetime.datetime = models.DateTimeField() | ||||
|     value: int = models.BigIntegerField() | ||||
|  | ||||
|     class Meta: | ||||
|         abstract = True | ||||
|  | ||||
|  | ||||
| class InstallationCount(BaseCount): | ||||
|     class Meta: | ||||
|         # Handles invalid duplicate InstallationCount data | ||||
|         constraints = [ | ||||
|             UniqueConstraint( | ||||
|                 fields=["property", "subgroup", "end_time"], | ||||
|                 condition=Q(subgroup__isnull=False), | ||||
|                 name="unique_installation_count", | ||||
|             ), | ||||
|             UniqueConstraint( | ||||
|                 fields=["property", "end_time"], | ||||
|                 condition=Q(subgroup__isnull=True), | ||||
|                 name="unique_installation_count_null_subgroup", | ||||
|             ), | ||||
|         ] | ||||
|  | ||||
|     def __str__(self) -> str: | ||||
|         return f"<InstallationCount: {self.property} {self.subgroup} {self.value}>" | ||||
|  | ||||
|  | ||||
| class RealmCount(BaseCount): | ||||
|     realm = models.ForeignKey(Realm, on_delete=models.CASCADE) | ||||
|  | ||||
|     class Meta: | ||||
|         # Handles invalid duplicate RealmCount data | ||||
|         constraints = [ | ||||
|             UniqueConstraint( | ||||
|                 fields=["realm", "property", "subgroup", "end_time"], | ||||
|                 condition=Q(subgroup__isnull=False), | ||||
|                 name="unique_realm_count", | ||||
|             ), | ||||
|             UniqueConstraint( | ||||
|                 fields=["realm", "property", "end_time"], | ||||
|                 condition=Q(subgroup__isnull=True), | ||||
|                 name="unique_realm_count_null_subgroup", | ||||
|             ), | ||||
|         ] | ||||
|         index_together = ["property", "end_time"] | ||||
|  | ||||
|     def __str__(self) -> str: | ||||
|         return f"<RealmCount: {self.realm} {self.property} {self.subgroup} {self.value}>" | ||||
|  | ||||
|  | ||||
| class UserCount(BaseCount): | ||||
|     user = models.ForeignKey(UserProfile, on_delete=models.CASCADE) | ||||
|     realm = models.ForeignKey(Realm, on_delete=models.CASCADE) | ||||
|  | ||||
|     class Meta: | ||||
|         # Handles invalid duplicate UserCount data | ||||
|         constraints = [ | ||||
|             UniqueConstraint( | ||||
|                 fields=["user", "property", "subgroup", "end_time"], | ||||
|                 condition=Q(subgroup__isnull=False), | ||||
|                 name="unique_user_count", | ||||
|             ), | ||||
|             UniqueConstraint( | ||||
|                 fields=["user", "property", "end_time"], | ||||
|                 condition=Q(subgroup__isnull=True), | ||||
|                 name="unique_user_count_null_subgroup", | ||||
|             ), | ||||
|         ] | ||||
|         # This index dramatically improves the performance of | ||||
|         # aggregating from users to realms | ||||
|         index_together = ["property", "realm", "end_time"] | ||||
|  | ||||
|     def __str__(self) -> str: | ||||
|         return f"<UserCount: {self.user} {self.property} {self.subgroup} {self.value}>" | ||||
|  | ||||
|  | ||||
| class StreamCount(BaseCount): | ||||
|     stream = models.ForeignKey(Stream, on_delete=models.CASCADE) | ||||
|     realm = models.ForeignKey(Realm, on_delete=models.CASCADE) | ||||
|  | ||||
|     class Meta: | ||||
|         # Handles invalid duplicate StreamCount data | ||||
|         constraints = [ | ||||
|             UniqueConstraint( | ||||
|                 fields=["stream", "property", "subgroup", "end_time"], | ||||
|                 condition=Q(subgroup__isnull=False), | ||||
|                 name="unique_stream_count", | ||||
|             ), | ||||
|             UniqueConstraint( | ||||
|                 fields=["stream", "property", "end_time"], | ||||
|                 condition=Q(subgroup__isnull=True), | ||||
|                 name="unique_stream_count_null_subgroup", | ||||
|             ), | ||||
|         ] | ||||
|         # This index dramatically improves the performance of | ||||
|         # aggregating from streams to realms | ||||
|         index_together = ["property", "realm", "end_time"] | ||||
|  | ||||
|     def __str__(self) -> str: | ||||
|         return ( | ||||
|             f"<StreamCount: {self.stream} {self.property} {self.subgroup} {self.value} {self.id}>" | ||||
|         ) | ||||
| @@ -1,55 +0,0 @@ | ||||
| from unittest import mock | ||||
|  | ||||
| from django.utils.timezone import now as timezone_now | ||||
|  | ||||
| from zerver.lib.test_classes import ZulipTestCase | ||||
| from zerver.lib.test_helpers import queries_captured | ||||
| from zerver.models import Client, UserActivity, UserProfile, flush_per_request_caches | ||||
|  | ||||
|  | ||||
| class ActivityTest(ZulipTestCase): | ||||
|     @mock.patch("stripe.Customer.list", return_value=[]) | ||||
|     def test_activity(self, unused_mock: mock.Mock) -> None: | ||||
|         self.login("hamlet") | ||||
|         client, _ = Client.objects.get_or_create(name="website") | ||||
|         query = "/json/messages/flags" | ||||
|         last_visit = timezone_now() | ||||
|         count = 150 | ||||
|         for activity_user_profile in UserProfile.objects.all(): | ||||
|             UserActivity.objects.get_or_create( | ||||
|                 user_profile=activity_user_profile, | ||||
|                 client=client, | ||||
|                 query=query, | ||||
|                 count=count, | ||||
|                 last_visit=last_visit, | ||||
|             ) | ||||
|  | ||||
|         # Fails when not staff | ||||
|         result = self.client_get("/activity") | ||||
|         self.assertEqual(result.status_code, 302) | ||||
|  | ||||
|         user_profile = self.example_user("hamlet") | ||||
|         user_profile.is_staff = True | ||||
|         user_profile.save(update_fields=["is_staff"]) | ||||
|  | ||||
|         flush_per_request_caches() | ||||
|         with queries_captured() as queries: | ||||
|             result = self.client_get("/activity") | ||||
|             self.assertEqual(result.status_code, 200) | ||||
|  | ||||
|         self.assert_length(queries, 19) | ||||
|  | ||||
|         flush_per_request_caches() | ||||
|         with queries_captured() as queries: | ||||
|             result = self.client_get("/realm_activity/zulip/") | ||||
|             self.assertEqual(result.status_code, 200) | ||||
|  | ||||
|         self.assert_length(queries, 8) | ||||
|  | ||||
|         iago = self.example_user("iago") | ||||
|         flush_per_request_caches() | ||||
|         with queries_captured() as queries: | ||||
|             result = self.client_get(f"/user_activity/{iago.id}/") | ||||
|             self.assertEqual(result.status_code, 200) | ||||
|  | ||||
|         self.assert_length(queries, 5) | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,40 +0,0 @@ | ||||
| from analytics.lib.counts import CountStat | ||||
| from analytics.lib.fixtures import generate_time_series_data | ||||
| from zerver.lib.test_classes import ZulipTestCase | ||||
|  | ||||
|  | ||||
| # A very light test suite; the code being tested is not run in production. | ||||
| class TestFixtures(ZulipTestCase): | ||||
|     def test_deterministic_settings(self) -> None: | ||||
|         # test basic business_hour / non_business_hour calculation | ||||
|         # test we get an array of the right length with frequency=CountStat.DAY | ||||
|         data = generate_time_series_data( | ||||
|             days=7, business_hours_base=20, non_business_hours_base=15, spikiness=0 | ||||
|         ) | ||||
|         self.assertEqual(data, [400, 400, 400, 400, 400, 360, 360]) | ||||
|  | ||||
|         data = generate_time_series_data( | ||||
|             days=1, | ||||
|             business_hours_base=2000, | ||||
|             non_business_hours_base=1500, | ||||
|             growth=2, | ||||
|             spikiness=0, | ||||
|             frequency=CountStat.HOUR, | ||||
|         ) | ||||
|         # test we get an array of the right length with frequency=CountStat.HOUR | ||||
|         self.assert_length(data, 24) | ||||
|         # test that growth doesn't affect the first data point | ||||
|         self.assertEqual(data[0], 2000) | ||||
|         # test that the last data point is growth times what it otherwise would be | ||||
|         self.assertEqual(data[-1], 1500 * 2) | ||||
|  | ||||
|         # test autocorrelation == 1, since that's the easiest value to test | ||||
|         data = generate_time_series_data( | ||||
|             days=1, | ||||
|             business_hours_base=2000, | ||||
|             non_business_hours_base=2000, | ||||
|             autocorrelation=1, | ||||
|             frequency=CountStat.HOUR, | ||||
|         ) | ||||
|         self.assertEqual(data[0], data[1]) | ||||
|         self.assertEqual(data[0], data[-1]) | ||||
| @@ -1,639 +0,0 @@ | ||||
| from datetime import datetime, timedelta, timezone | ||||
| from typing import List, Optional | ||||
|  | ||||
| from django.utils.timezone import now as timezone_now | ||||
|  | ||||
| from analytics.lib.counts import COUNT_STATS, CountStat | ||||
| from analytics.lib.time_utils import time_range | ||||
| from analytics.models import FillState, RealmCount, UserCount | ||||
| from analytics.views.stats import rewrite_client_arrays, sort_by_totals, sort_client_labels | ||||
| from zerver.lib.test_classes import ZulipTestCase | ||||
| from zerver.lib.timestamp import ceiling_to_day, ceiling_to_hour, datetime_to_timestamp | ||||
| from zerver.models import Client, get_realm | ||||
|  | ||||
|  | ||||
| class TestStatsEndpoint(ZulipTestCase): | ||||
|     def test_stats(self) -> None: | ||||
|         self.user = self.example_user("hamlet") | ||||
|         self.login_user(self.user) | ||||
|         result = self.client_get("/stats") | ||||
|         self.assertEqual(result.status_code, 200) | ||||
|         # Check that we get something back | ||||
|         self.assert_in_response("Zulip analytics for", result) | ||||
|  | ||||
|     def test_guest_user_cant_access_stats(self) -> None: | ||||
|         self.user = self.example_user("polonius") | ||||
|         self.login_user(self.user) | ||||
|         result = self.client_get("/stats") | ||||
|         self.assert_json_error(result, "Not allowed for guest users", 400) | ||||
|  | ||||
|         result = self.client_get("/json/analytics/chart_data") | ||||
|         self.assert_json_error(result, "Not allowed for guest users", 400) | ||||
|  | ||||
|     def test_stats_for_realm(self) -> None: | ||||
|         user = self.example_user("hamlet") | ||||
|         self.login_user(user) | ||||
|  | ||||
|         result = self.client_get("/stats/realm/zulip/") | ||||
|         self.assertEqual(result.status_code, 302) | ||||
|  | ||||
|         result = self.client_get("/stats/realm/not_existing_realm/") | ||||
|         self.assertEqual(result.status_code, 302) | ||||
|  | ||||
|         user = self.example_user("hamlet") | ||||
|         user.is_staff = True | ||||
|         user.save(update_fields=["is_staff"]) | ||||
|  | ||||
|         result = self.client_get("/stats/realm/not_existing_realm/") | ||||
|         self.assertEqual(result.status_code, 404) | ||||
|  | ||||
|         result = self.client_get("/stats/realm/zulip/") | ||||
|         self.assertEqual(result.status_code, 200) | ||||
|         self.assert_in_response("Zulip analytics for", result) | ||||
|  | ||||
|     def test_stats_for_installation(self) -> None: | ||||
|         user = self.example_user("hamlet") | ||||
|         self.login_user(user) | ||||
|  | ||||
|         result = self.client_get("/stats/installation") | ||||
|         self.assertEqual(result.status_code, 302) | ||||
|  | ||||
|         user = self.example_user("hamlet") | ||||
|         user.is_staff = True | ||||
|         user.save(update_fields=["is_staff"]) | ||||
|  | ||||
|         result = self.client_get("/stats/installation") | ||||
|         self.assertEqual(result.status_code, 200) | ||||
|         self.assert_in_response("Zulip analytics for", result) | ||||
|  | ||||
|  | ||||
| class TestGetChartData(ZulipTestCase): | ||||
|     def setUp(self) -> None: | ||||
|         super().setUp() | ||||
|         self.realm = get_realm("zulip") | ||||
|         self.user = self.example_user("hamlet") | ||||
|         self.login_user(self.user) | ||||
|         self.end_times_hour = [ | ||||
|             ceiling_to_hour(self.realm.date_created) + timedelta(hours=i) for i in range(4) | ||||
|         ] | ||||
|         self.end_times_day = [ | ||||
|             ceiling_to_day(self.realm.date_created) + timedelta(days=i) for i in range(4) | ||||
|         ] | ||||
|  | ||||
|     def data(self, i: int) -> List[int]: | ||||
|         return [0, 0, i, 0] | ||||
|  | ||||
|     def insert_data( | ||||
|         self, stat: CountStat, realm_subgroups: List[Optional[str]], user_subgroups: List[str] | ||||
|     ) -> None: | ||||
|         if stat.frequency == CountStat.HOUR: | ||||
|             insert_time = self.end_times_hour[2] | ||||
|             fill_time = self.end_times_hour[-1] | ||||
|         if stat.frequency == CountStat.DAY: | ||||
|             insert_time = self.end_times_day[2] | ||||
|             fill_time = self.end_times_day[-1] | ||||
|  | ||||
|         RealmCount.objects.bulk_create( | ||||
|             RealmCount( | ||||
|                 property=stat.property, | ||||
|                 subgroup=subgroup, | ||||
|                 end_time=insert_time, | ||||
|                 value=100 + i, | ||||
|                 realm=self.realm, | ||||
|             ) | ||||
|             for i, subgroup in enumerate(realm_subgroups) | ||||
|         ) | ||||
|         UserCount.objects.bulk_create( | ||||
|             UserCount( | ||||
|                 property=stat.property, | ||||
|                 subgroup=subgroup, | ||||
|                 end_time=insert_time, | ||||
|                 value=200 + i, | ||||
|                 realm=self.realm, | ||||
|                 user=self.user, | ||||
|             ) | ||||
|             for i, subgroup in enumerate(user_subgroups) | ||||
|         ) | ||||
|         FillState.objects.create(property=stat.property, end_time=fill_time, state=FillState.DONE) | ||||
|  | ||||
|     def test_number_of_humans(self) -> None: | ||||
|         stat = COUNT_STATS["realm_active_humans::day"] | ||||
|         self.insert_data(stat, [None], []) | ||||
|         stat = COUNT_STATS["1day_actives::day"] | ||||
|         self.insert_data(stat, [None], []) | ||||
|         stat = COUNT_STATS["active_users_audit:is_bot:day"] | ||||
|         self.insert_data(stat, ["false"], []) | ||||
|         result = self.client_get("/json/analytics/chart_data", {"chart_name": "number_of_humans"}) | ||||
|         self.assert_json_success(result) | ||||
|         data = result.json() | ||||
|         self.assertEqual( | ||||
|             data, | ||||
|             { | ||||
|                 "msg": "", | ||||
|                 "end_times": [datetime_to_timestamp(dt) for dt in self.end_times_day], | ||||
|                 "frequency": CountStat.DAY, | ||||
|                 "everyone": { | ||||
|                     "_1day": self.data(100), | ||||
|                     "_15day": self.data(100), | ||||
|                     "all_time": self.data(100), | ||||
|                 }, | ||||
|                 "display_order": None, | ||||
|                 "result": "success", | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
|     def test_messages_sent_over_time(self) -> None: | ||||
|         stat = COUNT_STATS["messages_sent:is_bot:hour"] | ||||
|         self.insert_data(stat, ["true", "false"], ["false"]) | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"} | ||||
|         ) | ||||
|         self.assert_json_success(result) | ||||
|         data = result.json() | ||||
|         self.assertEqual( | ||||
|             data, | ||||
|             { | ||||
|                 "msg": "", | ||||
|                 "end_times": [datetime_to_timestamp(dt) for dt in self.end_times_hour], | ||||
|                 "frequency": CountStat.HOUR, | ||||
|                 "everyone": {"bot": self.data(100), "human": self.data(101)}, | ||||
|                 "user": {"bot": self.data(0), "human": self.data(200)}, | ||||
|                 "display_order": None, | ||||
|                 "result": "success", | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
|     def test_messages_sent_by_message_type(self) -> None: | ||||
|         stat = COUNT_STATS["messages_sent:message_type:day"] | ||||
|         self.insert_data( | ||||
|             stat, ["public_stream", "private_message"], ["public_stream", "private_stream"] | ||||
|         ) | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", {"chart_name": "messages_sent_by_message_type"} | ||||
|         ) | ||||
|         self.assert_json_success(result) | ||||
|         data = result.json() | ||||
|         self.assertEqual( | ||||
|             data, | ||||
|             { | ||||
|                 "msg": "", | ||||
|                 "end_times": [datetime_to_timestamp(dt) for dt in self.end_times_day], | ||||
|                 "frequency": CountStat.DAY, | ||||
|                 "everyone": { | ||||
|                     "Public streams": self.data(100), | ||||
|                     "Private streams": self.data(0), | ||||
|                     "Private messages": self.data(101), | ||||
|                     "Group private messages": self.data(0), | ||||
|                 }, | ||||
|                 "user": { | ||||
|                     "Public streams": self.data(200), | ||||
|                     "Private streams": self.data(201), | ||||
|                     "Private messages": self.data(0), | ||||
|                     "Group private messages": self.data(0), | ||||
|                 }, | ||||
|                 "display_order": [ | ||||
|                     "Private messages", | ||||
|                     "Public streams", | ||||
|                     "Private streams", | ||||
|                     "Group private messages", | ||||
|                 ], | ||||
|                 "result": "success", | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
|     def test_messages_sent_by_client(self) -> None: | ||||
|         stat = COUNT_STATS["messages_sent:client:day"] | ||||
|         client1 = Client.objects.create(name="client 1") | ||||
|         client2 = Client.objects.create(name="client 2") | ||||
|         client3 = Client.objects.create(name="client 3") | ||||
|         client4 = Client.objects.create(name="client 4") | ||||
|         self.insert_data( | ||||
|             stat, | ||||
|             [str(client4.id), str(client3.id), str(client2.id)], | ||||
|             [str(client3.id), str(client1.id)], | ||||
|         ) | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", {"chart_name": "messages_sent_by_client"} | ||||
|         ) | ||||
|         self.assert_json_success(result) | ||||
|         data = result.json() | ||||
|         self.assertEqual( | ||||
|             data, | ||||
|             { | ||||
|                 "msg": "", | ||||
|                 "end_times": [datetime_to_timestamp(dt) for dt in self.end_times_day], | ||||
|                 "frequency": CountStat.DAY, | ||||
|                 "everyone": { | ||||
|                     "client 4": self.data(100), | ||||
|                     "client 3": self.data(101), | ||||
|                     "client 2": self.data(102), | ||||
|                 }, | ||||
|                 "user": {"client 3": self.data(200), "client 1": self.data(201)}, | ||||
|                 "display_order": ["client 1", "client 2", "client 3", "client 4"], | ||||
|                 "result": "success", | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
|     def test_messages_read_over_time(self) -> None: | ||||
|         stat = COUNT_STATS["messages_read::hour"] | ||||
|         self.insert_data(stat, [None], []) | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", {"chart_name": "messages_read_over_time"} | ||||
|         ) | ||||
|         self.assert_json_success(result) | ||||
|         data = result.json() | ||||
|         self.assertEqual( | ||||
|             data, | ||||
|             { | ||||
|                 "msg": "", | ||||
|                 "end_times": [datetime_to_timestamp(dt) for dt in self.end_times_hour], | ||||
|                 "frequency": CountStat.HOUR, | ||||
|                 "everyone": {"read": self.data(100)}, | ||||
|                 "user": {"read": self.data(0)}, | ||||
|                 "display_order": None, | ||||
|                 "result": "success", | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
|     def test_include_empty_subgroups(self) -> None: | ||||
|         FillState.objects.create( | ||||
|             property="realm_active_humans::day", | ||||
|             end_time=self.end_times_day[0], | ||||
|             state=FillState.DONE, | ||||
|         ) | ||||
|         result = self.client_get("/json/analytics/chart_data", {"chart_name": "number_of_humans"}) | ||||
|         self.assert_json_success(result) | ||||
|         data = result.json() | ||||
|         self.assertEqual(data["everyone"], {"_1day": [0], "_15day": [0], "all_time": [0]}) | ||||
|         self.assertFalse("user" in data) | ||||
|  | ||||
|         FillState.objects.create( | ||||
|             property="messages_sent:is_bot:hour", | ||||
|             end_time=self.end_times_hour[0], | ||||
|             state=FillState.DONE, | ||||
|         ) | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"} | ||||
|         ) | ||||
|         self.assert_json_success(result) | ||||
|         data = result.json() | ||||
|         self.assertEqual(data["everyone"], {"human": [0], "bot": [0]}) | ||||
|         self.assertEqual(data["user"], {"human": [0], "bot": [0]}) | ||||
|  | ||||
|         FillState.objects.create( | ||||
|             property="messages_sent:message_type:day", | ||||
|             end_time=self.end_times_day[0], | ||||
|             state=FillState.DONE, | ||||
|         ) | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", {"chart_name": "messages_sent_by_message_type"} | ||||
|         ) | ||||
|         self.assert_json_success(result) | ||||
|         data = result.json() | ||||
|         self.assertEqual( | ||||
|             data["everyone"], | ||||
|             { | ||||
|                 "Public streams": [0], | ||||
|                 "Private streams": [0], | ||||
|                 "Private messages": [0], | ||||
|                 "Group private messages": [0], | ||||
|             }, | ||||
|         ) | ||||
|         self.assertEqual( | ||||
|             data["user"], | ||||
|             { | ||||
|                 "Public streams": [0], | ||||
|                 "Private streams": [0], | ||||
|                 "Private messages": [0], | ||||
|                 "Group private messages": [0], | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
|         FillState.objects.create( | ||||
|             property="messages_sent:client:day", | ||||
|             end_time=self.end_times_day[0], | ||||
|             state=FillState.DONE, | ||||
|         ) | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", {"chart_name": "messages_sent_by_client"} | ||||
|         ) | ||||
|         self.assert_json_success(result) | ||||
|         data = result.json() | ||||
|         self.assertEqual(data["everyone"], {}) | ||||
|         self.assertEqual(data["user"], {}) | ||||
|  | ||||
|     def test_start_and_end(self) -> None: | ||||
|         stat = COUNT_STATS["realm_active_humans::day"] | ||||
|         self.insert_data(stat, [None], []) | ||||
|         stat = COUNT_STATS["1day_actives::day"] | ||||
|         self.insert_data(stat, [None], []) | ||||
|         stat = COUNT_STATS["active_users_audit:is_bot:day"] | ||||
|         self.insert_data(stat, ["false"], []) | ||||
|         end_time_timestamps = [datetime_to_timestamp(dt) for dt in self.end_times_day] | ||||
|  | ||||
|         # valid start and end | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", | ||||
|             { | ||||
|                 "chart_name": "number_of_humans", | ||||
|                 "start": end_time_timestamps[1], | ||||
|                 "end": end_time_timestamps[2], | ||||
|             }, | ||||
|         ) | ||||
|         self.assert_json_success(result) | ||||
|         data = result.json() | ||||
|         self.assertEqual(data["end_times"], end_time_timestamps[1:3]) | ||||
|         self.assertEqual( | ||||
|             data["everyone"], {"_1day": [0, 100], "_15day": [0, 100], "all_time": [0, 100]} | ||||
|         ) | ||||
|  | ||||
|         # start later then end | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", | ||||
|             { | ||||
|                 "chart_name": "number_of_humans", | ||||
|                 "start": end_time_timestamps[2], | ||||
|                 "end": end_time_timestamps[1], | ||||
|             }, | ||||
|         ) | ||||
|         self.assert_json_error_contains(result, "Start time is later than") | ||||
|  | ||||
|     def test_min_length(self) -> None: | ||||
|         stat = COUNT_STATS["realm_active_humans::day"] | ||||
|         self.insert_data(stat, [None], []) | ||||
|         stat = COUNT_STATS["1day_actives::day"] | ||||
|         self.insert_data(stat, [None], []) | ||||
|         stat = COUNT_STATS["active_users_audit:is_bot:day"] | ||||
|         self.insert_data(stat, ["false"], []) | ||||
|         # test min_length is too short to change anything | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", {"chart_name": "number_of_humans", "min_length": 2} | ||||
|         ) | ||||
|         self.assert_json_success(result) | ||||
|         data = result.json() | ||||
|         self.assertEqual( | ||||
|             data["end_times"], [datetime_to_timestamp(dt) for dt in self.end_times_day] | ||||
|         ) | ||||
|         self.assertEqual( | ||||
|             data["everyone"], | ||||
|             {"_1day": self.data(100), "_15day": self.data(100), "all_time": self.data(100)}, | ||||
|         ) | ||||
|         # test min_length larger than filled data | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", {"chart_name": "number_of_humans", "min_length": 5} | ||||
|         ) | ||||
|         self.assert_json_success(result) | ||||
|         data = result.json() | ||||
|         end_times = [ | ||||
|             ceiling_to_day(self.realm.date_created) + timedelta(days=i) for i in range(-1, 4) | ||||
|         ] | ||||
|         self.assertEqual(data["end_times"], [datetime_to_timestamp(dt) for dt in end_times]) | ||||
|         self.assertEqual( | ||||
|             data["everyone"], | ||||
|             { | ||||
|                 "_1day": [0, *self.data(100)], | ||||
|                 "_15day": [0, *self.data(100)], | ||||
|                 "all_time": [0, *self.data(100)], | ||||
|             }, | ||||
|         ) | ||||
|  | ||||
|     def test_non_existent_chart(self) -> None: | ||||
|         result = self.client_get("/json/analytics/chart_data", {"chart_name": "does_not_exist"}) | ||||
|         self.assert_json_error_contains(result, "Unknown chart name") | ||||
|  | ||||
|     def test_analytics_not_running(self) -> None: | ||||
|         realm = get_realm("zulip") | ||||
|  | ||||
|         self.assertEqual(FillState.objects.count(), 0) | ||||
|  | ||||
|         realm.date_created = timezone_now() - timedelta(days=3) | ||||
|         realm.save(update_fields=["date_created"]) | ||||
|         with self.assertLogs(level="WARNING") as m: | ||||
|             result = self.client_get( | ||||
|                 "/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"} | ||||
|             ) | ||||
|             self.assertEqual( | ||||
|                 m.output, | ||||
|                 [ | ||||
|                     f"WARNING:root:User from realm zulip attempted to access /stats, but the computed start time: {realm.date_created} (creation of realm or installation) is later than the computed end time: 0001-01-01 00:00:00+00:00 (last successful analytics update). Is the analytics cron job running?" | ||||
|                 ], | ||||
|             ) | ||||
|  | ||||
|         self.assert_json_error_contains(result, "No analytics data available") | ||||
|  | ||||
|         realm.date_created = timezone_now() - timedelta(days=1, hours=2) | ||||
|         realm.save(update_fields=["date_created"]) | ||||
|         with self.assertLogs(level="WARNING") as m: | ||||
|             result = self.client_get( | ||||
|                 "/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"} | ||||
|             ) | ||||
|             self.assertEqual( | ||||
|                 m.output, | ||||
|                 [ | ||||
|                     f"WARNING:root:User from realm zulip attempted to access /stats, but the computed start time: {realm.date_created} (creation of realm or installation) is later than the computed end time: 0001-01-01 00:00:00+00:00 (last successful analytics update). Is the analytics cron job running?" | ||||
|                 ], | ||||
|             ) | ||||
|  | ||||
|         self.assert_json_error_contains(result, "No analytics data available") | ||||
|  | ||||
|         realm.date_created = timezone_now() - timedelta(days=1, minutes=10) | ||||
|         realm.save(update_fields=["date_created"]) | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"} | ||||
|         ) | ||||
|         self.assert_json_success(result) | ||||
|  | ||||
|         realm.date_created = timezone_now() - timedelta(hours=10) | ||||
|         realm.save(update_fields=["date_created"]) | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"} | ||||
|         ) | ||||
|         self.assert_json_success(result) | ||||
|  | ||||
|         end_time = timezone_now() - timedelta(days=5) | ||||
|         fill_state = FillState.objects.create( | ||||
|             property="messages_sent:is_bot:hour", end_time=end_time, state=FillState.DONE | ||||
|         ) | ||||
|  | ||||
|         realm.date_created = timezone_now() - timedelta(days=3) | ||||
|         realm.save(update_fields=["date_created"]) | ||||
|         with self.assertLogs(level="WARNING") as m: | ||||
|             result = self.client_get( | ||||
|                 "/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"} | ||||
|             ) | ||||
|             self.assertEqual( | ||||
|                 m.output, | ||||
|                 [ | ||||
|                     f"WARNING:root:User from realm zulip attempted to access /stats, but the computed start time: {realm.date_created} (creation of realm or installation) is later than the computed end time: {end_time} (last successful analytics update). Is the analytics cron job running?" | ||||
|                 ], | ||||
|             ) | ||||
|  | ||||
|         self.assert_json_error_contains(result, "No analytics data available") | ||||
|  | ||||
|         realm.date_created = timezone_now() - timedelta(days=1, minutes=10) | ||||
|         realm.save(update_fields=["date_created"]) | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"} | ||||
|         ) | ||||
|         self.assert_json_success(result) | ||||
|  | ||||
|         end_time = timezone_now() - timedelta(days=2) | ||||
|         fill_state.end_time = end_time | ||||
|         fill_state.save(update_fields=["end_time"]) | ||||
|  | ||||
|         realm.date_created = timezone_now() - timedelta(days=3) | ||||
|         realm.save(update_fields=["date_created"]) | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"} | ||||
|         ) | ||||
|         self.assert_json_success(result) | ||||
|  | ||||
|         realm.date_created = timezone_now() - timedelta(days=1, hours=2) | ||||
|         realm.save(update_fields=["date_created"]) | ||||
|         with self.assertLogs(level="WARNING") as m: | ||||
|             result = self.client_get( | ||||
|                 "/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"} | ||||
|             ) | ||||
|             self.assertEqual( | ||||
|                 m.output, | ||||
|                 [ | ||||
|                     f"WARNING:root:User from realm zulip attempted to access /stats, but the computed start time: {realm.date_created} (creation of realm or installation) is later than the computed end time: {end_time} (last successful analytics update). Is the analytics cron job running?" | ||||
|                 ], | ||||
|             ) | ||||
|  | ||||
|         self.assert_json_error_contains(result, "No analytics data available") | ||||
|  | ||||
|         realm.date_created = timezone_now() - timedelta(days=1, minutes=10) | ||||
|         realm.save(update_fields=["date_created"]) | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"} | ||||
|         ) | ||||
|         self.assert_json_success(result) | ||||
|  | ||||
|     def test_get_chart_data_for_realm(self) -> None: | ||||
|         user = self.example_user("hamlet") | ||||
|         self.login_user(user) | ||||
|  | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data/realm/zulip", {"chart_name": "number_of_humans"} | ||||
|         ) | ||||
|         self.assert_json_error(result, "Must be an server administrator", 400) | ||||
|  | ||||
|         user = self.example_user("hamlet") | ||||
|         user.is_staff = True | ||||
|         user.save(update_fields=["is_staff"]) | ||||
|         stat = COUNT_STATS["realm_active_humans::day"] | ||||
|         self.insert_data(stat, [None], []) | ||||
|  | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data/realm/not_existing_realm", | ||||
|             {"chart_name": "number_of_humans"}, | ||||
|         ) | ||||
|         self.assert_json_error(result, "Invalid organization", 400) | ||||
|  | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data/realm/zulip", {"chart_name": "number_of_humans"} | ||||
|         ) | ||||
|         self.assert_json_success(result) | ||||
|  | ||||
|     def test_get_chart_data_for_installation(self) -> None: | ||||
|         user = self.example_user("hamlet") | ||||
|         self.login_user(user) | ||||
|  | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data/installation", {"chart_name": "number_of_humans"} | ||||
|         ) | ||||
|         self.assert_json_error(result, "Must be an server administrator", 400) | ||||
|  | ||||
|         user = self.example_user("hamlet") | ||||
|         user.is_staff = True | ||||
|         user.save(update_fields=["is_staff"]) | ||||
|         stat = COUNT_STATS["realm_active_humans::day"] | ||||
|         self.insert_data(stat, [None], []) | ||||
|  | ||||
|         result = self.client_get( | ||||
|             "/json/analytics/chart_data/installation", {"chart_name": "number_of_humans"} | ||||
|         ) | ||||
|         self.assert_json_success(result) | ||||
|  | ||||
|  | ||||
| class TestGetChartDataHelpers(ZulipTestCase): | ||||
|     def test_sort_by_totals(self) -> None: | ||||
|         empty: List[int] = [] | ||||
|         value_arrays = {"c": [0, 1], "a": [9], "b": [1, 1, 1], "d": empty} | ||||
|         self.assertEqual(sort_by_totals(value_arrays), ["a", "b", "c", "d"]) | ||||
|  | ||||
|     def test_sort_client_labels(self) -> None: | ||||
|         data = { | ||||
|             "everyone": {"a": [16], "c": [15], "b": [14], "e": [13], "d": [12], "h": [11]}, | ||||
|             "user": {"a": [6], "b": [5], "d": [4], "e": [3], "f": [2], "g": [1]}, | ||||
|         } | ||||
|         self.assertEqual(sort_client_labels(data), ["a", "b", "c", "d", "e", "f", "g", "h"]) | ||||
|  | ||||
|  | ||||
| class TestTimeRange(ZulipTestCase): | ||||
|     def test_time_range(self) -> None: | ||||
|         HOUR = timedelta(hours=1) | ||||
|         DAY = timedelta(days=1) | ||||
|  | ||||
|         a_time = datetime(2016, 3, 14, 22, 59, tzinfo=timezone.utc) | ||||
|         floor_hour = datetime(2016, 3, 14, 22, tzinfo=timezone.utc) | ||||
|         floor_day = datetime(2016, 3, 14, tzinfo=timezone.utc) | ||||
|  | ||||
|         # test start == end | ||||
|         self.assertEqual(time_range(a_time, a_time, CountStat.HOUR, None), []) | ||||
|         self.assertEqual(time_range(a_time, a_time, CountStat.DAY, None), []) | ||||
|         # test start == end == boundary, and min_length == 0 | ||||
|         self.assertEqual(time_range(floor_hour, floor_hour, CountStat.HOUR, 0), [floor_hour]) | ||||
|         self.assertEqual(time_range(floor_day, floor_day, CountStat.DAY, 0), [floor_day]) | ||||
|         # test start and end on different boundaries | ||||
|         self.assertEqual( | ||||
|             time_range(floor_hour, floor_hour + HOUR, CountStat.HOUR, None), | ||||
|             [floor_hour, floor_hour + HOUR], | ||||
|         ) | ||||
|         self.assertEqual( | ||||
|             time_range(floor_day, floor_day + DAY, CountStat.DAY, None), | ||||
|             [floor_day, floor_day + DAY], | ||||
|         ) | ||||
|         # test min_length | ||||
|         self.assertEqual( | ||||
|             time_range(floor_hour, floor_hour + HOUR, CountStat.HOUR, 4), | ||||
|             [floor_hour - 2 * HOUR, floor_hour - HOUR, floor_hour, floor_hour + HOUR], | ||||
|         ) | ||||
|         self.assertEqual( | ||||
|             time_range(floor_day, floor_day + DAY, CountStat.DAY, 4), | ||||
|             [floor_day - 2 * DAY, floor_day - DAY, floor_day, floor_day + DAY], | ||||
|         ) | ||||
|  | ||||
|  | ||||
| class TestMapArrays(ZulipTestCase): | ||||
|     def test_map_arrays(self) -> None: | ||||
|         a = { | ||||
|             "desktop app 1.0": [1, 2, 3], | ||||
|             "desktop app 2.0": [10, 12, 13], | ||||
|             "desktop app 3.0": [21, 22, 23], | ||||
|             "website": [1, 2, 3], | ||||
|             "ZulipiOS": [1, 2, 3], | ||||
|             "ZulipElectron": [2, 5, 7], | ||||
|             "ZulipMobile": [1, 5, 7], | ||||
|             "ZulipPython": [1, 2, 3], | ||||
|             "API: Python": [1, 2, 3], | ||||
|             "SomethingRandom": [4, 5, 6], | ||||
|             "ZulipGitHubWebhook": [7, 7, 9], | ||||
|             "ZulipAndroid": [64, 63, 65], | ||||
|         } | ||||
|         result = rewrite_client_arrays(a) | ||||
|         self.assertEqual( | ||||
|             result, | ||||
|             { | ||||
|                 "Old desktop app": [32, 36, 39], | ||||
|                 "Old iOS app": [1, 2, 3], | ||||
|                 "Desktop app": [2, 5, 7], | ||||
|                 "Mobile app": [1, 5, 7], | ||||
|                 "Website": [1, 2, 3], | ||||
|                 "Python API": [2, 4, 6], | ||||
|                 "SomethingRandom": [4, 5, 6], | ||||
|                 "GitHub webhook": [7, 7, 9], | ||||
|                 "Old Android app": [64, 63, 65], | ||||
|             }, | ||||
|         ) | ||||
| @@ -1,632 +0,0 @@ | ||||
| from datetime import datetime, timedelta, timezone | ||||
| from unittest import mock | ||||
|  | ||||
| import orjson | ||||
| from django.http import HttpResponse | ||||
| from django.utils.timezone import now as timezone_now | ||||
|  | ||||
| from corporate.lib.stripe import add_months, update_sponsorship_status | ||||
| from corporate.models import Customer, CustomerPlan, LicenseLedger, get_customer_by_realm | ||||
| from zerver.lib.actions import ( | ||||
|     do_create_multiuse_invite_link, | ||||
|     do_send_realm_reactivation_email, | ||||
|     do_set_realm_property, | ||||
| ) | ||||
| from zerver.lib.test_classes import ZulipTestCase | ||||
| from zerver.lib.test_helpers import reset_emails_in_zulip_realm | ||||
| from zerver.models import ( | ||||
|     MultiuseInvite, | ||||
|     PreregistrationUser, | ||||
|     Realm, | ||||
|     UserMessage, | ||||
|     UserProfile, | ||||
|     get_org_type_display_name, | ||||
|     get_realm, | ||||
| ) | ||||
|  | ||||
|  | ||||
| class TestSupportEndpoint(ZulipTestCase): | ||||
|     def test_search(self) -> None: | ||||
|         reset_emails_in_zulip_realm() | ||||
|  | ||||
|         def assert_user_details_in_html_response( | ||||
|             html_response: HttpResponse, full_name: str, email: str, role: str | ||||
|         ) -> None: | ||||
|             self.assert_in_success_response( | ||||
|                 [ | ||||
|                     '<span class="label">user</span>\n', | ||||
|                     f"<h3>{full_name}</h3>", | ||||
|                     f"<b>Email</b>: {email}", | ||||
|                     "<b>Is active</b>: True<br />", | ||||
|                     f"<b>Role</b>: {role}<br />", | ||||
|                 ], | ||||
|                 html_response, | ||||
|             ) | ||||
|  | ||||
|         def check_hamlet_user_query_result(result: HttpResponse) -> None: | ||||
|             assert_user_details_in_html_response( | ||||
|                 result, "King Hamlet", self.example_email("hamlet"), "Member" | ||||
|             ) | ||||
|             self.assert_in_success_response( | ||||
|                 [ | ||||
|                     f"<b>Admins</b>: {self.example_email('iago')}\n", | ||||
|                     f"<b>Owners</b>: {self.example_email('desdemona')}\n", | ||||
|                     'class="copy-button" data-copytext="{}">'.format(self.example_email("iago")), | ||||
|                     'class="copy-button" data-copytext="{}">'.format( | ||||
|                         self.example_email("desdemona") | ||||
|                     ), | ||||
|                 ], | ||||
|                 result, | ||||
|             ) | ||||
|  | ||||
|         def check_othello_user_query_result(result: HttpResponse) -> None: | ||||
|             assert_user_details_in_html_response( | ||||
|                 result, "Othello, the Moor of Venice", self.example_email("othello"), "Member" | ||||
|             ) | ||||
|  | ||||
|         def check_polonius_user_query_result(result: HttpResponse) -> None: | ||||
|             assert_user_details_in_html_response( | ||||
|                 result, "Polonius", self.example_email("polonius"), "Guest" | ||||
|             ) | ||||
|  | ||||
|         def check_zulip_realm_query_result(result: HttpResponse) -> None: | ||||
|             zulip_realm = get_realm("zulip") | ||||
|             first_human_user = zulip_realm.get_first_human_user() | ||||
|             assert first_human_user is not None | ||||
|             self.assert_in_success_response( | ||||
|                 [ | ||||
|                     f"<b>First human user</b>: {first_human_user.delivery_email}\n", | ||||
|                     f'<input type="hidden" name="realm_id" value="{zulip_realm.id}"', | ||||
|                     "Zulip Dev</h3>", | ||||
|                     '<option value="1" selected>Self-hosted</option>', | ||||
|                     '<option value="2" >Limited</option>', | ||||
|                     'input type="number" name="discount" value="None"', | ||||
|                     '<option value="active" selected>Active</option>', | ||||
|                     '<option value="deactivated" >Deactivated</option>', | ||||
|                     f'<option value="{zulip_realm.org_type}" selected>', | ||||
|                     'scrub-realm-button">', | ||||
|                     'data-string-id="zulip"', | ||||
|                 ], | ||||
|                 result, | ||||
|             ) | ||||
|  | ||||
|         def check_lear_realm_query_result(result: HttpResponse) -> None: | ||||
|             lear_realm = get_realm("lear") | ||||
|             self.assert_in_success_response( | ||||
|                 [ | ||||
|                     f'<input type="hidden" name="realm_id" value="{lear_realm.id}"', | ||||
|                     "Lear & Co.</h3>", | ||||
|                     '<option value="1" selected>Self-hosted</option>', | ||||
|                     '<option value="2" >Limited</option>', | ||||
|                     'input type="number" name="discount" value="None"', | ||||
|                     '<option value="active" selected>Active</option>', | ||||
|                     '<option value="deactivated" >Deactivated</option>', | ||||
|                     'scrub-realm-button">', | ||||
|                     'data-string-id="lear"', | ||||
|                     "<b>Name</b>: Zulip Cloud Standard", | ||||
|                     "<b>Status</b>: Active", | ||||
|                     "<b>Billing schedule</b>: Annual", | ||||
|                     "<b>Licenses</b>: 2/10 (Manual)", | ||||
|                     "<b>Price per license</b>: $80.0", | ||||
|                     "<b>Next invoice date</b>: 02 January 2017", | ||||
|                     '<option value="send_invoice" selected>', | ||||
|                     '<option value="charge_automatically" >', | ||||
|                 ], | ||||
|                 result, | ||||
|             ) | ||||
|  | ||||
|         def check_preregistration_user_query_result( | ||||
|             result: HttpResponse, email: str, invite: bool = False | ||||
|         ) -> None: | ||||
|             self.assert_in_success_response( | ||||
|                 [ | ||||
|                     '<span class="label">preregistration user</span>\n', | ||||
|                     f"<b>Email</b>: {email}", | ||||
|                 ], | ||||
|                 result, | ||||
|             ) | ||||
|             if invite: | ||||
|                 self.assert_in_success_response(['<span class="label">invite</span>'], result) | ||||
|                 self.assert_in_success_response( | ||||
|                     [ | ||||
|                         "<b>Expires in</b>: 1\xa0week, 3\xa0days", | ||||
|                         "<b>Status</b>: Link has never been clicked", | ||||
|                     ], | ||||
|                     result, | ||||
|                 ) | ||||
|                 self.assert_in_success_response([], result) | ||||
|             else: | ||||
|                 self.assert_not_in_success_response(['<span class="label">invite</span>'], result) | ||||
|                 self.assert_in_success_response( | ||||
|                     [ | ||||
|                         "<b>Expires in</b>: 1\xa0day", | ||||
|                         "<b>Status</b>: Link has never been clicked", | ||||
|                     ], | ||||
|                     result, | ||||
|                 ) | ||||
|  | ||||
|         def check_realm_creation_query_result(result: HttpResponse, email: str) -> None: | ||||
|             self.assert_in_success_response( | ||||
|                 [ | ||||
|                     '<span class="label">preregistration user</span>\n', | ||||
|                     '<span class="label">realm creation</span>\n', | ||||
|                     "<b>Link</b>: http://testserver/accounts/do_confirm/", | ||||
|                     "<b>Expires in</b>: 1\xa0day", | ||||
|                 ], | ||||
|                 result, | ||||
|             ) | ||||
|  | ||||
|         def check_multiuse_invite_link_query_result(result: HttpResponse) -> None: | ||||
|             self.assert_in_success_response( | ||||
|                 [ | ||||
|                     '<span class="label">multiuse invite</span>\n', | ||||
|                     "<b>Link</b>: http://zulip.testserver/join/", | ||||
|                     "<b>Expires in</b>: 1\xa0week, 3\xa0days", | ||||
|                 ], | ||||
|                 result, | ||||
|             ) | ||||
|  | ||||
|         def check_realm_reactivation_link_query_result(result: HttpResponse) -> None: | ||||
|             self.assert_in_success_response( | ||||
|                 [ | ||||
|                     '<span class="label">realm reactivation</span>\n', | ||||
|                     "<b>Link</b>: http://zulip.testserver/reactivate/", | ||||
|                     "<b>Expires in</b>: 1\xa0day", | ||||
|                 ], | ||||
|                 result, | ||||
|             ) | ||||
|  | ||||
|         self.login("cordelia") | ||||
|  | ||||
|         result = self.client_get("/activity/support") | ||||
|         self.assertEqual(result.status_code, 302) | ||||
|         self.assertEqual(result["Location"], "/login/") | ||||
|  | ||||
|         self.login("iago") | ||||
|  | ||||
|         do_set_realm_property( | ||||
|             get_realm("zulip"), | ||||
|             "email_address_visibility", | ||||
|             Realm.EMAIL_ADDRESS_VISIBILITY_NOBODY, | ||||
|             acting_user=None, | ||||
|         ) | ||||
|  | ||||
|         customer = Customer.objects.create(realm=get_realm("lear"), stripe_customer_id="cus_123") | ||||
|         now = datetime(2016, 1, 2, tzinfo=timezone.utc) | ||||
|         plan = CustomerPlan.objects.create( | ||||
|             customer=customer, | ||||
|             billing_cycle_anchor=now, | ||||
|             billing_schedule=CustomerPlan.ANNUAL, | ||||
|             tier=CustomerPlan.STANDARD, | ||||
|             price_per_license=8000, | ||||
|             next_invoice_date=add_months(now, 12), | ||||
|         ) | ||||
|         LicenseLedger.objects.create( | ||||
|             licenses=10, | ||||
|             licenses_at_next_renewal=10, | ||||
|             event_time=timezone_now(), | ||||
|             is_renewal=True, | ||||
|             plan=plan, | ||||
|         ) | ||||
|  | ||||
|         result = self.client_get("/activity/support") | ||||
|         self.assert_in_success_response( | ||||
|             ['<input type="text" name="q" class="input-xxlarge search-query"'], result | ||||
|         ) | ||||
|  | ||||
|         result = self.client_get("/activity/support", {"q": self.example_email("hamlet")}) | ||||
|         check_hamlet_user_query_result(result) | ||||
|         check_zulip_realm_query_result(result) | ||||
|  | ||||
|         result = self.client_get("/activity/support", {"q": self.example_email("polonius")}) | ||||
|         check_polonius_user_query_result(result) | ||||
|         check_zulip_realm_query_result(result) | ||||
|  | ||||
|         result = self.client_get("/activity/support", {"q": "lear"}) | ||||
|         check_lear_realm_query_result(result) | ||||
|  | ||||
|         result = self.client_get("/activity/support", {"q": "http://lear.testserver"}) | ||||
|         check_lear_realm_query_result(result) | ||||
|  | ||||
|         with self.settings(REALM_HOSTS={"zulip": "localhost"}): | ||||
|             result = self.client_get("/activity/support", {"q": "http://localhost"}) | ||||
|             check_zulip_realm_query_result(result) | ||||
|  | ||||
|         result = self.client_get("/activity/support", {"q": "hamlet@zulip.com, lear"}) | ||||
|         check_hamlet_user_query_result(result) | ||||
|         check_zulip_realm_query_result(result) | ||||
|         check_lear_realm_query_result(result) | ||||
|  | ||||
|         result = self.client_get("/activity/support", {"q": "King hamlet,lear"}) | ||||
|         check_hamlet_user_query_result(result) | ||||
|         check_zulip_realm_query_result(result) | ||||
|         check_lear_realm_query_result(result) | ||||
|  | ||||
|         result = self.client_get("/activity/support", {"q": "Othello, the Moor of Venice"}) | ||||
|         check_othello_user_query_result(result) | ||||
|         check_zulip_realm_query_result(result) | ||||
|  | ||||
|         result = self.client_get("/activity/support", {"q": "lear, Hamlet <hamlet@zulip.com>"}) | ||||
|         check_hamlet_user_query_result(result) | ||||
|         check_zulip_realm_query_result(result) | ||||
|         check_lear_realm_query_result(result) | ||||
|  | ||||
|         with mock.patch( | ||||
|             "analytics.views.support.timezone_now", | ||||
|             return_value=timezone_now() - timedelta(minutes=50), | ||||
|         ): | ||||
|             self.client_post("/accounts/home/", {"email": self.nonreg_email("test")}) | ||||
|             self.login("iago") | ||||
|             result = self.client_get("/activity/support", {"q": self.nonreg_email("test")}) | ||||
|             check_preregistration_user_query_result(result, self.nonreg_email("test")) | ||||
|             check_zulip_realm_query_result(result) | ||||
|  | ||||
|             invite_expires_in_days = 10 | ||||
|             stream_ids = [self.get_stream_id("Denmark")] | ||||
|             invitee_emails = [self.nonreg_email("test1")] | ||||
|             self.client_post( | ||||
|                 "/json/invites", | ||||
|                 { | ||||
|                     "invitee_emails": invitee_emails, | ||||
|                     "stream_ids": orjson.dumps(stream_ids).decode(), | ||||
|                     "invite_expires_in_days": invite_expires_in_days, | ||||
|                     "invite_as": PreregistrationUser.INVITE_AS["MEMBER"], | ||||
|                 }, | ||||
|             ) | ||||
|             result = self.client_get("/activity/support", {"q": self.nonreg_email("test1")}) | ||||
|             check_preregistration_user_query_result(result, self.nonreg_email("test1"), invite=True) | ||||
|             check_zulip_realm_query_result(result) | ||||
|  | ||||
|             email = self.nonreg_email("alice") | ||||
|             self.client_post("/new/", {"email": email}) | ||||
|             result = self.client_get("/activity/support", {"q": email}) | ||||
|             check_realm_creation_query_result(result, email) | ||||
|  | ||||
|             do_create_multiuse_invite_link( | ||||
|                 self.example_user("hamlet"), | ||||
|                 invited_as=1, | ||||
|                 invite_expires_in_days=invite_expires_in_days, | ||||
|             ) | ||||
|             result = self.client_get("/activity/support", {"q": "zulip"}) | ||||
|             check_multiuse_invite_link_query_result(result) | ||||
|             check_zulip_realm_query_result(result) | ||||
|             MultiuseInvite.objects.all().delete() | ||||
|  | ||||
|             do_send_realm_reactivation_email(get_realm("zulip"), acting_user=None) | ||||
|             result = self.client_get("/activity/support", {"q": "zulip"}) | ||||
|             check_realm_reactivation_link_query_result(result) | ||||
|             check_zulip_realm_query_result(result) | ||||
|  | ||||
|     def test_get_org_type_display_name(self) -> None: | ||||
|         self.assertEqual(get_org_type_display_name(Realm.ORG_TYPES["business"]["id"]), "Business") | ||||
|         self.assertEqual(get_org_type_display_name(883), "") | ||||
|  | ||||
|     @mock.patch("analytics.views.support.update_billing_method_of_current_plan") | ||||
|     def test_change_billing_method(self, m: mock.Mock) -> None: | ||||
|         cordelia = self.example_user("cordelia") | ||||
|         self.login_user(cordelia) | ||||
|  | ||||
|         result = self.client_post( | ||||
|             "/activity/support", {"realm_id": f"{cordelia.realm_id}", "plan_type": "2"} | ||||
|         ) | ||||
|         self.assertEqual(result.status_code, 302) | ||||
|         self.assertEqual(result["Location"], "/login/") | ||||
|  | ||||
|         iago = self.example_user("iago") | ||||
|         self.login_user(iago) | ||||
|  | ||||
|         result = self.client_post( | ||||
|             "/activity/support", | ||||
|             {"realm_id": f"{iago.realm_id}", "billing_method": "charge_automatically"}, | ||||
|         ) | ||||
|         m.assert_called_once_with(get_realm("zulip"), charge_automatically=True, acting_user=iago) | ||||
|         self.assert_in_success_response( | ||||
|             ["Billing method of zulip updated to charge automatically"], result | ||||
|         ) | ||||
|  | ||||
|         m.reset_mock() | ||||
|  | ||||
|         result = self.client_post( | ||||
|             "/activity/support", {"realm_id": f"{iago.realm_id}", "billing_method": "send_invoice"} | ||||
|         ) | ||||
|         m.assert_called_once_with(get_realm("zulip"), charge_automatically=False, acting_user=iago) | ||||
|         self.assert_in_success_response( | ||||
|             ["Billing method of zulip updated to pay by invoice"], result | ||||
|         ) | ||||
|  | ||||
|     def test_change_realm_plan_type(self) -> None: | ||||
|         cordelia = self.example_user("cordelia") | ||||
|         self.login_user(cordelia) | ||||
|  | ||||
|         result = self.client_post( | ||||
|             "/activity/support", {"realm_id": f"{cordelia.realm_id}", "plan_type": "2"} | ||||
|         ) | ||||
|         self.assertEqual(result.status_code, 302) | ||||
|         self.assertEqual(result["Location"], "/login/") | ||||
|  | ||||
|         iago = self.example_user("iago") | ||||
|         self.login_user(iago) | ||||
|  | ||||
|         with mock.patch("analytics.views.support.do_change_realm_plan_type") as m: | ||||
|             result = self.client_post( | ||||
|                 "/activity/support", {"realm_id": f"{iago.realm_id}", "plan_type": "2"} | ||||
|             ) | ||||
|             m.assert_called_once_with(get_realm("zulip"), 2, acting_user=iago) | ||||
|             self.assert_in_success_response( | ||||
|                 ["Plan type of zulip changed from self-hosted to limited"], result | ||||
|             ) | ||||
|  | ||||
|         with mock.patch("analytics.views.support.do_change_realm_plan_type") as m: | ||||
|             result = self.client_post( | ||||
|                 "/activity/support", {"realm_id": f"{iago.realm_id}", "plan_type": "10"} | ||||
|             ) | ||||
|             m.assert_called_once_with(get_realm("zulip"), 10, acting_user=iago) | ||||
|             self.assert_in_success_response( | ||||
|                 ["Plan type of zulip changed from self-hosted to plus"], result | ||||
|             ) | ||||
|  | ||||
|     def test_change_org_type(self) -> None: | ||||
|         cordelia = self.example_user("cordelia") | ||||
|         self.login_user(cordelia) | ||||
|  | ||||
|         result = self.client_post( | ||||
|             "/activity/support", {"realm_id": f"{cordelia.realm_id}", "org_type": "70"} | ||||
|         ) | ||||
|         self.assertEqual(result.status_code, 302) | ||||
|         self.assertEqual(result["Location"], "/login/") | ||||
|  | ||||
|         iago = self.example_user("iago") | ||||
|         self.login_user(iago) | ||||
|  | ||||
|         with mock.patch("analytics.views.support.do_change_realm_org_type") as m: | ||||
|             result = self.client_post( | ||||
|                 "/activity/support", {"realm_id": f"{iago.realm_id}", "org_type": "70"} | ||||
|             ) | ||||
|             m.assert_called_once_with(get_realm("zulip"), 70, acting_user=iago) | ||||
|             self.assert_in_success_response( | ||||
|                 ["Org type of zulip changed from Business to Government"], result | ||||
|             ) | ||||
|  | ||||
|     def test_attach_discount(self) -> None: | ||||
|         cordelia = self.example_user("cordelia") | ||||
|         lear_realm = get_realm("lear") | ||||
|         self.login_user(cordelia) | ||||
|  | ||||
|         result = self.client_post( | ||||
|             "/activity/support", {"realm_id": f"{lear_realm.id}", "discount": "25"} | ||||
|         ) | ||||
|         self.assertEqual(result.status_code, 302) | ||||
|         self.assertEqual(result["Location"], "/login/") | ||||
|  | ||||
|         iago = self.example_user("iago") | ||||
|         self.login("iago") | ||||
|  | ||||
|         with mock.patch("analytics.views.support.attach_discount_to_realm") as m: | ||||
|             result = self.client_post( | ||||
|                 "/activity/support", {"realm_id": f"{lear_realm.id}", "discount": "25"} | ||||
|             ) | ||||
|             m.assert_called_once_with(get_realm("lear"), 25, acting_user=iago) | ||||
|             self.assert_in_success_response(["Discount of lear changed to 25% from 0%"], result) | ||||
|  | ||||
|     def test_change_sponsorship_status(self) -> None: | ||||
|         lear_realm = get_realm("lear") | ||||
|         self.assertIsNone(get_customer_by_realm(lear_realm)) | ||||
|  | ||||
|         cordelia = self.example_user("cordelia") | ||||
|         self.login_user(cordelia) | ||||
|  | ||||
|         result = self.client_post( | ||||
|             "/activity/support", {"realm_id": f"{lear_realm.id}", "sponsorship_pending": "true"} | ||||
|         ) | ||||
|         self.assertEqual(result.status_code, 302) | ||||
|         self.assertEqual(result["Location"], "/login/") | ||||
|  | ||||
|         iago = self.example_user("iago") | ||||
|         self.login_user(iago) | ||||
|  | ||||
|         result = self.client_post( | ||||
|             "/activity/support", {"realm_id": f"{lear_realm.id}", "sponsorship_pending": "true"} | ||||
|         ) | ||||
|         self.assert_in_success_response(["lear marked as pending sponsorship."], result) | ||||
|         customer = get_customer_by_realm(lear_realm) | ||||
|         assert customer is not None | ||||
|         self.assertTrue(customer.sponsorship_pending) | ||||
|  | ||||
|         result = self.client_post( | ||||
|             "/activity/support", {"realm_id": f"{lear_realm.id}", "sponsorship_pending": "false"} | ||||
|         ) | ||||
|         self.assert_in_success_response(["lear is no longer pending sponsorship."], result) | ||||
|         customer = get_customer_by_realm(lear_realm) | ||||
|         assert customer is not None | ||||
|         self.assertFalse(customer.sponsorship_pending) | ||||
|  | ||||
|     def test_approve_sponsorship(self) -> None: | ||||
|         lear_realm = get_realm("lear") | ||||
|         update_sponsorship_status(lear_realm, True, acting_user=None) | ||||
|         king_user = self.lear_user("king") | ||||
|         king_user.role = UserProfile.ROLE_REALM_OWNER | ||||
|         king_user.save() | ||||
|  | ||||
|         cordelia = self.example_user("cordelia") | ||||
|         self.login_user(cordelia) | ||||
|  | ||||
|         result = self.client_post( | ||||
|             "/activity/support", | ||||
|             {"realm_id": f"{lear_realm.id}", "approve_sponsorship": "true"}, | ||||
|         ) | ||||
|         self.assertEqual(result.status_code, 302) | ||||
|         self.assertEqual(result["Location"], "/login/") | ||||
|  | ||||
|         iago = self.example_user("iago") | ||||
|         self.login_user(iago) | ||||
|  | ||||
|         result = self.client_post( | ||||
|             "/activity/support", | ||||
|             {"realm_id": f"{lear_realm.id}", "approve_sponsorship": "true"}, | ||||
|         ) | ||||
|         self.assert_in_success_response(["Sponsorship approved for lear"], result) | ||||
|         lear_realm.refresh_from_db() | ||||
|         self.assertEqual(lear_realm.plan_type, Realm.PLAN_TYPE_STANDARD_FREE) | ||||
|         customer = get_customer_by_realm(lear_realm) | ||||
|         assert customer is not None | ||||
|         self.assertFalse(customer.sponsorship_pending) | ||||
|         messages = UserMessage.objects.filter(user_profile=king_user) | ||||
|         self.assertIn( | ||||
|             "request for sponsored hosting has been approved", messages[0].message.content | ||||
|         ) | ||||
|         self.assert_length(messages, 1) | ||||
|  | ||||
|     def test_activate_or_deactivate_realm(self) -> None: | ||||
|         cordelia = self.example_user("cordelia") | ||||
|         lear_realm = get_realm("lear") | ||||
|         self.login_user(cordelia) | ||||
|  | ||||
|         result = self.client_post( | ||||
|             "/activity/support", {"realm_id": f"{lear_realm.id}", "status": "deactivated"} | ||||
|         ) | ||||
|         self.assertEqual(result.status_code, 302) | ||||
|         self.assertEqual(result["Location"], "/login/") | ||||
|  | ||||
|         self.login("iago") | ||||
|  | ||||
|         with mock.patch("analytics.views.support.do_deactivate_realm") as m: | ||||
|             result = self.client_post( | ||||
|                 "/activity/support", {"realm_id": f"{lear_realm.id}", "status": "deactivated"} | ||||
|             ) | ||||
|             m.assert_called_once_with(lear_realm, acting_user=self.example_user("iago")) | ||||
|             self.assert_in_success_response(["lear deactivated"], result) | ||||
|  | ||||
|         with mock.patch("analytics.views.support.do_send_realm_reactivation_email") as m: | ||||
|             result = self.client_post( | ||||
|                 "/activity/support", {"realm_id": f"{lear_realm.id}", "status": "active"} | ||||
|             ) | ||||
|             m.assert_called_once_with(lear_realm, acting_user=self.example_user("iago")) | ||||
|             self.assert_in_success_response( | ||||
|                 ["Realm reactivation email sent to admins of lear"], result | ||||
|             ) | ||||
|  | ||||
|     def test_change_subdomain(self) -> None: | ||||
|         cordelia = self.example_user("cordelia") | ||||
|         lear_realm = get_realm("lear") | ||||
|         self.login_user(cordelia) | ||||
|  | ||||
|         result = self.client_post( | ||||
|             "/activity/support", {"realm_id": f"{lear_realm.id}", "new_subdomain": "new_name"} | ||||
|         ) | ||||
|         self.assertEqual(result.status_code, 302) | ||||
|         self.assertEqual(result["Location"], "/login/") | ||||
|         self.login("iago") | ||||
|  | ||||
|         result = self.client_post( | ||||
|             "/activity/support", {"realm_id": f"{lear_realm.id}", "new_subdomain": "new-name"} | ||||
|         ) | ||||
|         self.assertEqual(result.status_code, 302) | ||||
|         self.assertEqual(result["Location"], "/activity/support?q=new-name") | ||||
|         realm_id = lear_realm.id | ||||
|         lear_realm = get_realm("new-name") | ||||
|         self.assertEqual(lear_realm.id, realm_id) | ||||
|         self.assertTrue(Realm.objects.filter(string_id="lear").exists()) | ||||
|         self.assertTrue(Realm.objects.filter(string_id="lear")[0].deactivated) | ||||
|  | ||||
|         result = self.client_post( | ||||
|             "/activity/support", {"realm_id": f"{lear_realm.id}", "new_subdomain": "new-name"} | ||||
|         ) | ||||
|         self.assert_in_success_response( | ||||
|             ["Subdomain unavailable. Please choose a different one."], result | ||||
|         ) | ||||
|  | ||||
|         result = self.client_post( | ||||
|             "/activity/support", {"realm_id": f"{lear_realm.id}", "new_subdomain": "zulip"} | ||||
|         ) | ||||
|         self.assert_in_success_response( | ||||
|             ["Subdomain unavailable. Please choose a different one."], result | ||||
|         ) | ||||
|  | ||||
|         result = self.client_post( | ||||
|             "/activity/support", {"realm_id": f"{lear_realm.id}", "new_subdomain": "lear"} | ||||
|         ) | ||||
|         self.assert_in_success_response( | ||||
|             ["Subdomain unavailable. Please choose a different one."], result | ||||
|         ) | ||||
|  | ||||
|     def test_downgrade_realm(self) -> None: | ||||
|         cordelia = self.example_user("cordelia") | ||||
|         self.login_user(cordelia) | ||||
|         result = self.client_post( | ||||
|             "/activity/support", {"realm_id": f"{cordelia.realm_id}", "plan_type": "2"} | ||||
|         ) | ||||
|         self.assertEqual(result.status_code, 302) | ||||
|         self.assertEqual(result["Location"], "/login/") | ||||
|  | ||||
|         iago = self.example_user("iago") | ||||
|         self.login_user(iago) | ||||
|  | ||||
|         with mock.patch("analytics.views.support.downgrade_at_the_end_of_billing_cycle") as m: | ||||
|             result = self.client_post( | ||||
|                 "/activity/support", | ||||
|                 { | ||||
|                     "realm_id": f"{iago.realm_id}", | ||||
|                     "downgrade_method": "downgrade_at_billing_cycle_end", | ||||
|                 }, | ||||
|             ) | ||||
|             m.assert_called_once_with(get_realm("zulip")) | ||||
|             self.assert_in_success_response( | ||||
|                 ["zulip marked for downgrade at the end of billing cycle"], result | ||||
|             ) | ||||
|  | ||||
|         with mock.patch( | ||||
|             "analytics.views.support.downgrade_now_without_creating_additional_invoices" | ||||
|         ) as m: | ||||
|             result = self.client_post( | ||||
|                 "/activity/support", | ||||
|                 { | ||||
|                     "realm_id": f"{iago.realm_id}", | ||||
|                     "downgrade_method": "downgrade_now_without_additional_licenses", | ||||
|                 }, | ||||
|             ) | ||||
|             m.assert_called_once_with(get_realm("zulip")) | ||||
|             self.assert_in_success_response( | ||||
|                 ["zulip downgraded without creating additional invoices"], result | ||||
|             ) | ||||
|  | ||||
|         with mock.patch( | ||||
|             "analytics.views.support.downgrade_now_without_creating_additional_invoices" | ||||
|         ) as m1: | ||||
|             with mock.patch("analytics.views.support.void_all_open_invoices", return_value=1) as m2: | ||||
|                 result = self.client_post( | ||||
|                     "/activity/support", | ||||
|                     { | ||||
|                         "realm_id": f"{iago.realm_id}", | ||||
|                         "downgrade_method": "downgrade_now_void_open_invoices", | ||||
|                     }, | ||||
|                 ) | ||||
|                 m1.assert_called_once_with(get_realm("zulip")) | ||||
|                 m2.assert_called_once_with(get_realm("zulip")) | ||||
|                 self.assert_in_success_response( | ||||
|                     ["zulip downgraded and voided 1 open invoices"], result | ||||
|                 ) | ||||
|  | ||||
|     def test_scrub_realm(self) -> None: | ||||
|         cordelia = self.example_user("cordelia") | ||||
|         lear_realm = get_realm("lear") | ||||
|         self.login_user(cordelia) | ||||
|  | ||||
|         result = self.client_post( | ||||
|             "/activity/support", {"realm_id": f"{lear_realm.id}", "discount": "25"} | ||||
|         ) | ||||
|         self.assertEqual(result.status_code, 302) | ||||
|         self.assertEqual(result["Location"], "/login/") | ||||
|  | ||||
|         self.login("iago") | ||||
|  | ||||
|         with mock.patch("analytics.views.support.do_scrub_realm") as m: | ||||
|             result = self.client_post( | ||||
|                 "/activity/support", {"realm_id": f"{lear_realm.id}", "scrub_realm": "true"} | ||||
|             ) | ||||
|             m.assert_called_once_with(lear_realm, acting_user=self.example_user("iago")) | ||||
|             self.assert_in_success_response(["lear scrubbed"], result) | ||||
|  | ||||
|         with mock.patch("analytics.views.support.do_scrub_realm") as m: | ||||
|             result = self.client_post("/activity/support", {"realm_id": f"{lear_realm.id}"}) | ||||
|             self.assert_json_error(result, "Invalid parameters") | ||||
|             m.assert_not_called() | ||||
| @@ -1,69 +1,8 @@ | ||||
| from typing import List, Union | ||||
| from django.conf.urls import patterns, url | ||||
|  | ||||
| from django.conf.urls import include | ||||
| from django.urls import path | ||||
| from django.urls.resolvers import URLPattern, URLResolver | ||||
|  | ||||
| from analytics.views.installation_activity import get_installation_activity | ||||
| from analytics.views.realm_activity import get_realm_activity | ||||
| from analytics.views.stats import ( | ||||
|     get_chart_data, | ||||
|     get_chart_data_for_installation, | ||||
|     get_chart_data_for_realm, | ||||
|     get_chart_data_for_remote_installation, | ||||
|     get_chart_data_for_remote_realm, | ||||
|     stats, | ||||
|     stats_for_installation, | ||||
|     stats_for_realm, | ||||
|     stats_for_remote_installation, | ||||
|     stats_for_remote_realm, | ||||
| urlpatterns = patterns('analytics.views', | ||||
|     url(r'^activity$', 'get_activity'), | ||||
|     url(r'^realm_activity/(?P<realm>[\S]+)/$', 'get_realm_activity'), | ||||
|     url(r'^user_activity/(?P<email>[\S]+)/$', 'get_user_activity'), | ||||
| ) | ||||
| from analytics.views.support import support | ||||
| from analytics.views.user_activity import get_user_activity | ||||
| from zerver.lib.rest import rest_path | ||||
|  | ||||
| i18n_urlpatterns: List[Union[URLPattern, URLResolver]] = [ | ||||
|     # Server admin (user_profile.is_staff) visible stats pages | ||||
|     path("activity", get_installation_activity), | ||||
|     path("activity/support", support, name="support"), | ||||
|     path("realm_activity/<realm_str>/", get_realm_activity), | ||||
|     path("user_activity/<user_profile_id>/", get_user_activity), | ||||
|     path("stats/realm/<realm_str>/", stats_for_realm), | ||||
|     path("stats/installation", stats_for_installation), | ||||
|     path("stats/remote/<int:remote_server_id>/installation", stats_for_remote_installation), | ||||
|     path( | ||||
|         "stats/remote/<int:remote_server_id>/realm/<int:remote_realm_id>/", stats_for_remote_realm | ||||
|     ), | ||||
|     # User-visible stats page | ||||
|     path("stats", stats, name="stats"), | ||||
| ] | ||||
|  | ||||
| # These endpoints are a part of the API (V1), which uses: | ||||
| # * REST verbs | ||||
| # * Basic auth (username:password is email:apiKey) | ||||
| # * Takes and returns json-formatted data | ||||
| # | ||||
| # See rest_dispatch in zerver.lib.rest for an explanation of auth methods used | ||||
| # | ||||
| # All of these paths are accessed by either a /json or /api prefix | ||||
| v1_api_and_json_patterns = [ | ||||
|     # get data for the graphs at /stats | ||||
|     rest_path("analytics/chart_data", GET=get_chart_data), | ||||
|     rest_path("analytics/chart_data/realm/<realm_str>", GET=get_chart_data_for_realm), | ||||
|     rest_path("analytics/chart_data/installation", GET=get_chart_data_for_installation), | ||||
|     rest_path( | ||||
|         "analytics/chart_data/remote/<int:remote_server_id>/installation", | ||||
|         GET=get_chart_data_for_remote_installation, | ||||
|     ), | ||||
|     rest_path( | ||||
|         "analytics/chart_data/remote/<int:remote_server_id>/realm/<int:remote_realm_id>", | ||||
|         GET=get_chart_data_for_remote_realm, | ||||
|     ), | ||||
| ] | ||||
|  | ||||
| i18n_urlpatterns += [ | ||||
|     path("api/v1/", include(v1_api_and_json_patterns)), | ||||
|     path("json/", include(v1_api_and_json_patterns)), | ||||
| ] | ||||
|  | ||||
| urlpatterns = i18n_urlpatterns | ||||
|   | ||||
							
								
								
									
										880
									
								
								analytics/views.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										880
									
								
								analytics/views.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,880 @@ | ||||
| from django.db import connection | ||||
| from django.template import RequestContext, loader | ||||
| from django.utils.html import mark_safe | ||||
| from django.shortcuts import render_to_response | ||||
| from django.core import urlresolvers | ||||
| from django.http import HttpResponseNotFound | ||||
|  | ||||
| from zerver.decorator import has_request_variables, REQ, zulip_internal | ||||
| from zerver.models import get_realm, UserActivity, UserActivityInterval, Realm | ||||
| from zerver.lib.timestamp import timestamp_to_datetime | ||||
|  | ||||
| from collections import defaultdict | ||||
| from datetime import datetime, timedelta | ||||
| import itertools | ||||
| import time | ||||
| import re | ||||
| import pytz | ||||
| eastern_tz = pytz.timezone('US/Eastern') | ||||
|  | ||||
| def make_table(title, cols, rows, has_row_class=False): | ||||
|  | ||||
|     if not has_row_class: | ||||
|         def fix_row(row): | ||||
|             return dict(cells=row, row_class=None) | ||||
|         rows = map(fix_row, rows) | ||||
|  | ||||
|     data = dict(title=title, cols=cols, rows=rows) | ||||
|  | ||||
|     content = loader.render_to_string( | ||||
|         'analytics/ad_hoc_query.html', | ||||
|         dict(data=data) | ||||
|     ) | ||||
|  | ||||
|     return content | ||||
|  | ||||
| def dictfetchall(cursor): | ||||
|     "Returns all rows from a cursor as a dict" | ||||
|     desc = cursor.description | ||||
|     return [ | ||||
|         dict(zip([col[0] for col in desc], row)) | ||||
|         for row in cursor.fetchall() | ||||
|     ] | ||||
|  | ||||
|  | ||||
| def get_realm_day_counts(): | ||||
|     query = ''' | ||||
|         select | ||||
|             r.domain, | ||||
|             (now()::date - pub_date::date) age, | ||||
|             count(*) cnt | ||||
|         from zerver_message m | ||||
|         join zerver_userprofile up on up.id = m.sender_id | ||||
|         join zerver_realm r on r.id = up.realm_id | ||||
|         join zerver_client c on c.id = m.sending_client_id | ||||
|         where | ||||
|             (not up.is_bot) | ||||
|         and | ||||
|             pub_date > now()::date - interval '8 day' | ||||
|         and | ||||
|             c.name not in ('zephyr_mirror', 'ZulipMonitoring') | ||||
|         group by | ||||
|             r.domain, | ||||
|             age | ||||
|         order by | ||||
|             r.domain, | ||||
|             age | ||||
|     ''' | ||||
|     cursor = connection.cursor() | ||||
|     cursor.execute(query) | ||||
|     rows = dictfetchall(cursor) | ||||
|     cursor.close() | ||||
|  | ||||
|     counts = defaultdict(dict) | ||||
|     for row in rows: | ||||
|         counts[row['domain']][row['age']] = row['cnt'] | ||||
|  | ||||
|  | ||||
|     result = {} | ||||
|     for domain in counts: | ||||
|         cnts = [counts[domain].get(age, 0) for age in range(8)] | ||||
|         min_cnt = min(cnts) | ||||
|         max_cnt = max(cnts) | ||||
|  | ||||
|         def format_count(cnt): | ||||
|             if cnt == min_cnt: | ||||
|                 good_bad = 'bad' | ||||
|             elif cnt == max_cnt: | ||||
|                 good_bad = 'good' | ||||
|             else: | ||||
|                 good_bad = 'neutral' | ||||
|  | ||||
|             return '<td class="number %s">%s</td>' % (good_bad, cnt) | ||||
|  | ||||
|         cnts = ''.join(map(format_count, cnts)) | ||||
|         result[domain] = dict(cnts=cnts) | ||||
|  | ||||
|     return result | ||||
|  | ||||
| def realm_summary_table(realm_minutes): | ||||
|     query = ''' | ||||
|         SELECT | ||||
|             realm.domain, | ||||
|             coalesce(user_counts.active_user_count, 0) active_user_count, | ||||
|             coalesce(at_risk_counts.at_risk_count, 0) at_risk_count, | ||||
|             ( | ||||
|                 SELECT | ||||
|                     count(*) | ||||
|                 FROM zerver_userprofile up | ||||
|                 WHERE up.realm_id = realm.id | ||||
|                 AND is_active | ||||
|                 AND not is_bot | ||||
|             ) user_profile_count, | ||||
|             ( | ||||
|                 SELECT | ||||
|                     count(*) | ||||
|                 FROM zerver_userprofile up | ||||
|                 WHERE up.realm_id = realm.id | ||||
|                 AND is_active | ||||
|                 AND is_bot | ||||
|             ) bot_count | ||||
|         FROM zerver_realm realm | ||||
|         LEFT OUTER JOIN | ||||
|             ( | ||||
|                 SELECT | ||||
|                     up.realm_id realm_id, | ||||
|                     count(distinct(ua.user_profile_id)) active_user_count | ||||
|                 FROM zerver_useractivity ua | ||||
|                 JOIN zerver_userprofile up | ||||
|                     ON up.id = ua.user_profile_id | ||||
|                 WHERE | ||||
|                     query in ( | ||||
|                         '/json/send_message', | ||||
|                         'send_message_backend', | ||||
|                         '/api/v1/send_message', | ||||
|                         '/json/update_pointer' | ||||
|                     ) | ||||
|                 AND | ||||
|                     last_visit > now() - interval '1 day' | ||||
|                 AND | ||||
|                     not is_bot | ||||
|                 GROUP BY realm_id | ||||
|             ) user_counts | ||||
|             ON user_counts.realm_id = realm.id | ||||
|         LEFT OUTER JOIN | ||||
|             ( | ||||
|                 SELECT | ||||
|                     realm_id, | ||||
|                     count(*) at_risk_count | ||||
|                 FROM ( | ||||
|                     SELECT | ||||
|                         realm.id as realm_id, | ||||
|                         up.email | ||||
|                     FROM zerver_useractivity ua | ||||
|                     JOIN zerver_userprofile up | ||||
|                         ON up.id = ua.user_profile_id | ||||
|                     JOIN zerver_realm realm | ||||
|                         ON realm.id = up.realm_id | ||||
|                     WHERE up.is_active | ||||
|                     AND (not up.is_bot) | ||||
|                     AND | ||||
|                         ua.query in ( | ||||
|                             '/json/send_message', | ||||
|                             'send_message_backend', | ||||
|                            '/api/v1/send_message', | ||||
|                             '/json/update_pointer' | ||||
|                         ) | ||||
|                     GROUP by realm.id, up.email | ||||
|                     HAVING max(last_visit) between | ||||
|                         now() - interval '7 day' and | ||||
|                         now() - interval '1 day' | ||||
|                 ) as at_risk_users | ||||
|                 GROUP BY realm_id | ||||
|             ) at_risk_counts | ||||
|             ON at_risk_counts.realm_id = realm.id | ||||
|         WHERE | ||||
|             realm.domain not in ('customer4.invalid', 'wdaher.com') | ||||
|         AND EXISTS ( | ||||
|                 SELECT * | ||||
|                 FROM zerver_useractivity ua | ||||
|                 JOIN zerver_userprofile up | ||||
|                     ON up.id = ua.user_profile_id | ||||
|                 WHERE | ||||
|                     query in ( | ||||
|                         '/json/send_message', | ||||
|                         '/api/v1/send_message', | ||||
|                         'send_message_backend', | ||||
|                         '/json/update_pointer' | ||||
|                     ) | ||||
|                 AND | ||||
|                     up.realm_id = realm.id | ||||
|                 AND | ||||
|                     last_visit > now() - interval '2 week' | ||||
|         ) | ||||
|         ORDER BY active_user_count DESC, domain ASC | ||||
|         ''' | ||||
|  | ||||
|     cursor = connection.cursor() | ||||
|     cursor.execute(query) | ||||
|     rows = dictfetchall(cursor) | ||||
|     cursor.close() | ||||
|  | ||||
|     # get messages sent per day | ||||
|     counts = get_realm_day_counts() | ||||
|     for row in rows: | ||||
|         try: | ||||
|             row['history'] = counts[row['domain']]['cnts'] | ||||
|         except: | ||||
|             row['history'] = '' | ||||
|  | ||||
|     # augment data with realm_minutes | ||||
|     total_hours = 0 | ||||
|     for row in rows: | ||||
|         domain = row['domain'] | ||||
|         minutes = realm_minutes.get(domain, 0) | ||||
|         hours = minutes / 60.0 | ||||
|         total_hours += hours | ||||
|         row['hours'] = str(int(hours)) | ||||
|         try: | ||||
|             row['hours_per_user'] = '%.1f' % (hours / row['active_user_count'],) | ||||
|         except: | ||||
|             pass | ||||
|  | ||||
|     # formatting | ||||
|     for row in rows: | ||||
|         row['domain'] = realm_activity_link(row['domain']) | ||||
|  | ||||
|     # Count active sites | ||||
|     def meets_goal(row): | ||||
|         # The wdaher.com realm doesn't count toward company goals for | ||||
|         # obvious reasons, and customer4.invalid is essentially a dup | ||||
|         # for users.customer4.invalid. | ||||
|         if row['domain'] in ['customer4.invalid', 'wdaher.com']: | ||||
|             return False | ||||
|         return row['active_user_count'] >= 5 | ||||
|  | ||||
|     num_active_sites = len(filter(meets_goal, rows)) | ||||
|  | ||||
|     # create totals | ||||
|     total_active_user_count = 0 | ||||
|     total_user_profile_count = 0 | ||||
|     total_bot_count = 0 | ||||
|     for row in rows: | ||||
|         total_active_user_count += int(row['active_user_count']) | ||||
|         total_user_profile_count += int(row['user_profile_count']) | ||||
|         total_bot_count += int(row['bot_count']) | ||||
|  | ||||
|  | ||||
|     rows.append(dict( | ||||
|         domain='Total', | ||||
|         active_user_count=total_active_user_count, | ||||
|         user_profile_count=total_user_profile_count, | ||||
|         bot_count=total_bot_count, | ||||
|         hours=int(total_hours) | ||||
|     )) | ||||
|  | ||||
|     content = loader.render_to_string( | ||||
|         'analytics/realm_summary_table.html', | ||||
|         dict(rows=rows, num_active_sites=num_active_sites) | ||||
|     ) | ||||
|     return content | ||||
|  | ||||
|  | ||||
| def user_activity_intervals(): | ||||
|     day_end = timestamp_to_datetime(time.time()) | ||||
|     day_start = day_end - timedelta(hours=24) | ||||
|  | ||||
|     output = "Per-user online duration for the last 24 hours:\n" | ||||
|     total_duration = timedelta(0) | ||||
|  | ||||
|     all_intervals = UserActivityInterval.objects.filter( | ||||
|         end__gte=day_start, | ||||
|         start__lte=day_end | ||||
|     ).select_related( | ||||
|         'user_profile', | ||||
|         'user_profile__realm' | ||||
|     ).only( | ||||
|         'start', | ||||
|         'end', | ||||
|         'user_profile__email', | ||||
|         'user_profile__realm__domain' | ||||
|     ).order_by( | ||||
|         'user_profile__realm__domain', | ||||
|         'user_profile__email' | ||||
|     ) | ||||
|  | ||||
|     by_domain = lambda row: row.user_profile.realm.domain | ||||
|     by_email = lambda row: row.user_profile.email | ||||
|  | ||||
|     realm_minutes = {} | ||||
|  | ||||
|     for domain, realm_intervals in itertools.groupby(all_intervals, by_domain): | ||||
|         realm_duration = timedelta(0) | ||||
|         output += '<hr>%s\n' % (domain,) | ||||
|         for email, intervals in itertools.groupby(realm_intervals, by_email): | ||||
|             duration = timedelta(0) | ||||
|             for interval in intervals: | ||||
|                 start = max(day_start, interval.start) | ||||
|                 end = min(day_end, interval.end) | ||||
|                 duration += end - start | ||||
|  | ||||
|             total_duration += duration | ||||
|             realm_duration += duration | ||||
|             output += "  %-*s%s\n" % (37, email, duration, ) | ||||
|  | ||||
|         realm_minutes[domain] = realm_duration.total_seconds() / 60 | ||||
|  | ||||
|     output += "\nTotal Duration:                      %s\n" % (total_duration,) | ||||
|     output += "\nTotal Duration in minutes:           %s\n" % (total_duration.total_seconds() / 60.,) | ||||
|     output += "Total Duration amortized to a month: %s" % (total_duration.total_seconds() * 30. / 60.,) | ||||
|     content = mark_safe('<pre>' + output + '</pre>') | ||||
|     return content, realm_minutes | ||||
|  | ||||
| def sent_messages_report(realm): | ||||
|     title = 'Recently sent messages for ' + realm | ||||
|  | ||||
|     cols = [ | ||||
|         'Date', | ||||
|         'Humans', | ||||
|         'Bots' | ||||
|     ] | ||||
|  | ||||
|     query = ''' | ||||
|         select | ||||
|             series.day::date, | ||||
|             humans.cnt, | ||||
|             bots.cnt | ||||
|         from ( | ||||
|             select generate_series( | ||||
|                 (now()::date - interval '2 week'), | ||||
|                 now()::date, | ||||
|                 interval '1 day' | ||||
|             ) as day | ||||
|         ) as series | ||||
|         left join ( | ||||
|             select | ||||
|                 pub_date::date pub_date, | ||||
|                 count(*) cnt | ||||
|             from zerver_message m | ||||
|             join zerver_userprofile up on up.id = m.sender_id | ||||
|             join zerver_realm r on r.id = up.realm_id | ||||
|             where | ||||
|                 r.domain = %s | ||||
|             and | ||||
|                 (not up.is_bot) | ||||
|             and | ||||
|                 pub_date > now() - interval '2 week' | ||||
|             group by | ||||
|                 pub_date::date | ||||
|             order by | ||||
|                 pub_date::date | ||||
|         ) humans on | ||||
|             series.day = humans.pub_date | ||||
|         left join ( | ||||
|             select | ||||
|                 pub_date::date pub_date, | ||||
|                 count(*) cnt | ||||
|             from zerver_message m | ||||
|             join zerver_userprofile up on up.id = m.sender_id | ||||
|             join zerver_realm r on r.id = up.realm_id | ||||
|             where | ||||
|                 r.domain = %s | ||||
|             and | ||||
|                 up.is_bot | ||||
|             and | ||||
|                 pub_date > now() - interval '2 week' | ||||
|             group by | ||||
|                 pub_date::date | ||||
|             order by | ||||
|                 pub_date::date | ||||
|         ) bots on | ||||
|             series.day = bots.pub_date | ||||
|     ''' | ||||
|     cursor = connection.cursor() | ||||
|     cursor.execute(query, [realm, realm]) | ||||
|     rows = cursor.fetchall() | ||||
|     cursor.close() | ||||
|  | ||||
|     return make_table(title, cols, rows) | ||||
|  | ||||
| def ad_hoc_queries(): | ||||
|     def get_page(query, cols, title): | ||||
|         cursor = connection.cursor() | ||||
|         cursor.execute(query) | ||||
|         rows = cursor.fetchall() | ||||
|         rows = map(list, rows) | ||||
|         cursor.close() | ||||
|  | ||||
|         def fix_rows(i, fixup_func): | ||||
|             for row in rows: | ||||
|                 row[i] = fixup_func(row[i]) | ||||
|  | ||||
|         for i, col in enumerate(cols): | ||||
|             if col == 'Domain': | ||||
|                 fix_rows(i, realm_activity_link) | ||||
|             elif col in ['Last time', 'Last visit']: | ||||
|                 fix_rows(i, format_date_for_activity_reports) | ||||
|  | ||||
|         content = make_table(title, cols, rows) | ||||
|  | ||||
|         return dict( | ||||
|             content=content, | ||||
|             title=title | ||||
|         ) | ||||
|  | ||||
|     pages = [] | ||||
|  | ||||
|     ### | ||||
|  | ||||
|     for mobile_type in ['Android', 'ZulipiOS']: | ||||
|         title = '%s usage' % (mobile_type,) | ||||
|  | ||||
|         query = ''' | ||||
|             select | ||||
|                 realm.domain, | ||||
|                 up.id user_id, | ||||
|                 client.name, | ||||
|                 sum(count) as hits, | ||||
|                 max(last_visit) as last_time | ||||
|             from zerver_useractivity ua | ||||
|             join zerver_client client on client.id = ua.client_id | ||||
|             join zerver_userprofile up on up.id = ua.user_profile_id | ||||
|             join zerver_realm realm on realm.id = up.realm_id | ||||
|             where | ||||
|                 client.name like '%s' | ||||
|             group by domain, up.id, client.name | ||||
|             having max(last_visit) > now() - interval '2 week' | ||||
|             order by domain, up.id, client.name | ||||
|         ''' % (mobile_type,) | ||||
|  | ||||
|         cols = [ | ||||
|             'Domain', | ||||
|             'User id', | ||||
|             'Name', | ||||
|             'Hits', | ||||
|             'Last time' | ||||
|         ] | ||||
|  | ||||
|         pages.append(get_page(query, cols, title)) | ||||
|  | ||||
|     ### | ||||
|  | ||||
|     title = 'Desktop users' | ||||
|  | ||||
|     query = ''' | ||||
|         select | ||||
|             realm.domain, | ||||
|             client.name, | ||||
|             sum(count) as hits, | ||||
|             max(last_visit) as last_time | ||||
|         from zerver_useractivity ua | ||||
|         join zerver_client client on client.id = ua.client_id | ||||
|         join zerver_userprofile up on up.id = ua.user_profile_id | ||||
|         join zerver_realm realm on realm.id = up.realm_id | ||||
|         where | ||||
|             client.name like 'desktop%%' | ||||
|         group by domain, client.name | ||||
|         having max(last_visit) > now() - interval '2 week' | ||||
|         order by domain, client.name | ||||
|     ''' | ||||
|  | ||||
|     cols = [ | ||||
|         'Domain', | ||||
|         'Client', | ||||
|         'Hits', | ||||
|         'Last time' | ||||
|     ] | ||||
|  | ||||
|     pages.append(get_page(query, cols, title)) | ||||
|  | ||||
|     ### | ||||
|  | ||||
|     title = 'Integrations by domain' | ||||
|  | ||||
|     query = ''' | ||||
|         select | ||||
|             realm.domain, | ||||
|             case | ||||
|                 when query like '%%external%%' then split_part(query, '/', 5) | ||||
|                 else client.name | ||||
|             end client_name, | ||||
|             sum(count) as hits, | ||||
|             max(last_visit) as last_time | ||||
|         from zerver_useractivity ua | ||||
|         join zerver_client client on client.id = ua.client_id | ||||
|         join zerver_userprofile up on up.id = ua.user_profile_id | ||||
|         join zerver_realm realm on realm.id = up.realm_id | ||||
|         where | ||||
|             (query in ('send_message_backend', '/api/v1/send_message') | ||||
|             and client.name not in ('Android', 'ZulipiOS') | ||||
|             and client.name not like 'test: Zulip%%' | ||||
|             ) | ||||
|         or | ||||
|             query like '%%external%%' | ||||
|         group by domain, client_name | ||||
|         having max(last_visit) > now() - interval '2 week' | ||||
|         order by domain, client_name | ||||
|     ''' | ||||
|  | ||||
|     cols = [ | ||||
|         'Domain', | ||||
|         'Client', | ||||
|         'Hits', | ||||
|         'Last time' | ||||
|     ] | ||||
|  | ||||
|     pages.append(get_page(query, cols, title)) | ||||
|  | ||||
|     ### | ||||
|  | ||||
|     title = 'Integrations by client' | ||||
|  | ||||
|     query = ''' | ||||
|         select | ||||
|             case | ||||
|                 when query like '%%external%%' then split_part(query, '/', 5) | ||||
|                 else client.name | ||||
|             end client_name, | ||||
|             realm.domain, | ||||
|             sum(count) as hits, | ||||
|             max(last_visit) as last_time | ||||
|         from zerver_useractivity ua | ||||
|         join zerver_client client on client.id = ua.client_id | ||||
|         join zerver_userprofile up on up.id = ua.user_profile_id | ||||
|         join zerver_realm realm on realm.id = up.realm_id | ||||
|         where | ||||
|             (query in ('send_message_backend', '/api/v1/send_message') | ||||
|             and client.name not in ('Android', 'ZulipiOS') | ||||
|             and client.name not like 'test: Zulip%%' | ||||
|             ) | ||||
|         or | ||||
|             query like '%%external%%' | ||||
|         group by client_name, domain | ||||
|         having max(last_visit) > now() - interval '2 week' | ||||
|         order by client_name, domain | ||||
|     ''' | ||||
|  | ||||
|     cols = [ | ||||
|         'Client', | ||||
|         'Domain', | ||||
|         'Hits', | ||||
|         'Last time' | ||||
|     ] | ||||
|  | ||||
|     pages.append(get_page(query, cols, title)) | ||||
|  | ||||
|     return pages | ||||
|  | ||||
| @zulip_internal | ||||
| @has_request_variables | ||||
| def get_activity(request): | ||||
|     duration_content, realm_minutes = user_activity_intervals() | ||||
|     counts_content = realm_summary_table(realm_minutes) | ||||
|     data = [ | ||||
|         ('Counts', counts_content), | ||||
|         ('Durations', duration_content), | ||||
|     ] | ||||
|     for page in ad_hoc_queries(): | ||||
|         data.append((page['title'], page['content'])) | ||||
|  | ||||
|     title = 'Activity' | ||||
|  | ||||
|     return render_to_response( | ||||
|         'analytics/activity.html', | ||||
|         dict(data=data, title=title, is_home=True), | ||||
|         context_instance=RequestContext(request) | ||||
|     ) | ||||
|  | ||||
| def get_user_activity_records_for_realm(realm, is_bot): | ||||
|     fields = [ | ||||
|         'user_profile__full_name', | ||||
|         'user_profile__email', | ||||
|         'query', | ||||
|         'client__name', | ||||
|         'count', | ||||
|         'last_visit', | ||||
|     ] | ||||
|  | ||||
|     records = UserActivity.objects.filter( | ||||
|             user_profile__realm__domain=realm, | ||||
|             user_profile__is_active=True, | ||||
|             user_profile__is_bot=is_bot | ||||
|     ) | ||||
|     records = records.order_by("user_profile__email", "-last_visit") | ||||
|     records = records.select_related('user_profile', 'client').only(*fields) | ||||
|     return records | ||||
|  | ||||
| def get_user_activity_records_for_email(email): | ||||
|     fields = [ | ||||
|         'user_profile__full_name', | ||||
|         'query', | ||||
|         'client__name', | ||||
|         'count', | ||||
|         'last_visit' | ||||
|     ] | ||||
|  | ||||
|     records = UserActivity.objects.filter( | ||||
|             user_profile__email=email | ||||
|     ) | ||||
|     records = records.order_by("-last_visit") | ||||
|     records = records.select_related('user_profile', 'client').only(*fields) | ||||
|     return records | ||||
|  | ||||
| def raw_user_activity_table(records): | ||||
|     cols = [ | ||||
|         'query', | ||||
|         'client', | ||||
|         'count', | ||||
|         'last_visit' | ||||
|     ] | ||||
|  | ||||
|     def row(record): | ||||
|         return [ | ||||
|                 record.query, | ||||
|                 record.client.name, | ||||
|                 record.count, | ||||
|                 format_date_for_activity_reports(record.last_visit) | ||||
|         ] | ||||
|  | ||||
|     rows = map(row, records) | ||||
|     title = 'Raw Data' | ||||
|     return make_table(title, cols, rows) | ||||
|  | ||||
| def get_user_activity_summary(records): | ||||
|     summary = {} | ||||
|     def update(action, record): | ||||
|         if action not in summary: | ||||
|             summary[action] = dict( | ||||
|                     count=record.count, | ||||
|                     last_visit=record.last_visit | ||||
|             ) | ||||
|         else: | ||||
|             summary[action]['count'] += record.count | ||||
|             summary[action]['last_visit'] = max( | ||||
|                     summary[action]['last_visit'], | ||||
|                     record.last_visit | ||||
|             ) | ||||
|  | ||||
|     if records: | ||||
|         summary['name'] = records[0].user_profile.full_name | ||||
|  | ||||
|     for record in records: | ||||
|         client = record.client.name | ||||
|         query = record.query | ||||
|  | ||||
|         update('use', record) | ||||
|  | ||||
|         if client == 'API': | ||||
|             m = re.match('/api/.*/external/(.*)', query) | ||||
|             if m: | ||||
|                 client = m.group(1) | ||||
|                 update(client, record) | ||||
|  | ||||
|         if client.startswith('desktop'): | ||||
|             update('desktop', record) | ||||
|         if client == 'website': | ||||
|             update('website', record) | ||||
|         if ('send_message' in query) or re.search('/api/.*/external/.*', query): | ||||
|             update('send', record) | ||||
|         if query in ['/json/update_pointer', '/api/v1/update_pointer']: | ||||
|             update('pointer', record) | ||||
|         update(client, record) | ||||
|  | ||||
|  | ||||
|     return summary | ||||
|  | ||||
| def format_date_for_activity_reports(date): | ||||
|     if date: | ||||
|         return date.astimezone(eastern_tz).strftime('%Y-%m-%d %H:%M') | ||||
|     else: | ||||
|         return '' | ||||
|  | ||||
| def user_activity_link(email): | ||||
|     url_name = 'analytics.views.get_user_activity' | ||||
|     url = urlresolvers.reverse(url_name, kwargs=dict(email=email)) | ||||
|     email_link = '<a href="%s">%s</a>' % (url, email) | ||||
|     return mark_safe(email_link) | ||||
|  | ||||
| def realm_activity_link(realm): | ||||
|     url_name = 'analytics.views.get_realm_activity' | ||||
|     url = urlresolvers.reverse(url_name, kwargs=dict(realm=realm)) | ||||
|     realm_link = '<a href="%s">%s</a>' % (url, realm) | ||||
|     return mark_safe(realm_link) | ||||
|  | ||||
| def realm_client_table(user_summaries): | ||||
|     exclude_keys = [ | ||||
|             'internal', | ||||
|             'name', | ||||
|             'use', | ||||
|             'send', | ||||
|             'pointer', | ||||
|             'website', | ||||
|             'desktop', | ||||
|     ] | ||||
|  | ||||
|     rows = [] | ||||
|     for email, user_summary in user_summaries.items(): | ||||
|         email_link = user_activity_link(email) | ||||
|         name = user_summary['name'] | ||||
|         for k, v in user_summary.items(): | ||||
|             if k in exclude_keys: | ||||
|                 continue | ||||
|             client = k | ||||
|             count = v['count'] | ||||
|             last_visit = v['last_visit'] | ||||
|             row = [ | ||||
|                     format_date_for_activity_reports(last_visit), | ||||
|                     client, | ||||
|                     name, | ||||
|                     email_link, | ||||
|                     count, | ||||
|             ] | ||||
|             rows.append(row) | ||||
|  | ||||
|     rows = sorted(rows, key=lambda r: r[0], reverse=True) | ||||
|  | ||||
|     cols = [ | ||||
|             'Last visit', | ||||
|             'Client', | ||||
|             'Name', | ||||
|             'Email', | ||||
|             'Count', | ||||
|     ] | ||||
|  | ||||
|     title = 'Clients' | ||||
|  | ||||
|     return make_table(title, cols, rows) | ||||
|  | ||||
| def user_activity_summary_table(user_summary): | ||||
|     rows = [] | ||||
|     for k, v in user_summary.items(): | ||||
|         if k == 'name': | ||||
|             continue | ||||
|         client = k | ||||
|         count = v['count'] | ||||
|         last_visit = v['last_visit'] | ||||
|         row = [ | ||||
|                 format_date_for_activity_reports(last_visit), | ||||
|                 client, | ||||
|                 count, | ||||
|         ] | ||||
|         rows.append(row) | ||||
|  | ||||
|     rows = sorted(rows, key=lambda r: r[0], reverse=True) | ||||
|  | ||||
|     cols = [ | ||||
|             'last_visit', | ||||
|             'client', | ||||
|             'count', | ||||
|     ] | ||||
|  | ||||
|     title = 'User Activity' | ||||
|     return make_table(title, cols, rows) | ||||
|  | ||||
| def realm_user_summary_table(all_records, admin_emails): | ||||
|     user_records = {} | ||||
|  | ||||
|     def by_email(record): | ||||
|         return record.user_profile.email | ||||
|  | ||||
|     for email, records in itertools.groupby(all_records, by_email): | ||||
|         user_records[email] = get_user_activity_summary(list(records)) | ||||
|  | ||||
|     def get_last_visit(user_summary, k): | ||||
|         if k in user_summary: | ||||
|             return user_summary[k]['last_visit'] | ||||
|         else: | ||||
|             return None | ||||
|  | ||||
|     def get_count(user_summary, k): | ||||
|         if k in user_summary: | ||||
|             return user_summary[k]['count'] | ||||
|         else: | ||||
|             return '' | ||||
|  | ||||
|     def is_recent(val): | ||||
|         age = datetime.now(val.tzinfo) - val | ||||
|         return age.total_seconds() < 5 * 60 | ||||
|  | ||||
|     rows = [] | ||||
|     for email, user_summary in user_records.items(): | ||||
|         email_link = user_activity_link(email) | ||||
|         sent_count = get_count(user_summary, 'send') | ||||
|         cells = [user_summary['name'], email_link, sent_count] | ||||
|         row_class = '' | ||||
|         for field in ['use', 'send', 'pointer', 'desktop', 'ZulipiOS', 'Android']: | ||||
|             val = get_last_visit(user_summary, field) | ||||
|             if field == 'use': | ||||
|                 if val and is_recent(val): | ||||
|                     row_class += ' recently_active' | ||||
|                 if email in admin_emails: | ||||
|                     row_class += ' admin' | ||||
|             val = format_date_for_activity_reports(val) | ||||
|             cells.append(val) | ||||
|         row = dict(cells=cells, row_class=row_class) | ||||
|         rows.append(row) | ||||
|  | ||||
|     def by_used_time(row): | ||||
|         return row['cells'][3] | ||||
|  | ||||
|     rows = sorted(rows, key=by_used_time, reverse=True) | ||||
|  | ||||
|     cols = [ | ||||
|             'Name', | ||||
|             'Email', | ||||
|             'Total sent', | ||||
|             'Heard from', | ||||
|             'Message sent', | ||||
|             'Pointer motion', | ||||
|             'Desktop', | ||||
|             'ZulipiOS', | ||||
|             'Android' | ||||
|     ] | ||||
|  | ||||
|     title = 'Summary' | ||||
|  | ||||
|     content = make_table(title, cols, rows, has_row_class=True) | ||||
|     return user_records, content | ||||
|  | ||||
| @zulip_internal | ||||
| def get_realm_activity(request, realm): | ||||
|     data = [] | ||||
|     all_records = {} | ||||
|     all_user_records = {} | ||||
|  | ||||
|     try: | ||||
|         admins = Realm.objects.get(domain=realm).get_admin_users() | ||||
|     except Realm.DoesNotExist: | ||||
|         return HttpResponseNotFound("Realm %s does not exist" % (realm,)) | ||||
|  | ||||
|     admin_emails = {admin.email for admin in admins} | ||||
|  | ||||
|     for is_bot, page_title in [(False,  'Humans'), (True, 'Bots')]: | ||||
|         all_records = get_user_activity_records_for_realm(realm, is_bot) | ||||
|         all_records = list(all_records) | ||||
|  | ||||
|         user_records, content = realm_user_summary_table(all_records, admin_emails) | ||||
|         all_user_records.update(user_records) | ||||
|  | ||||
|         data += [(page_title, content)] | ||||
|  | ||||
|     page_title = 'Clients' | ||||
|     content = realm_client_table(all_user_records) | ||||
|     data += [(page_title, content)] | ||||
|  | ||||
|  | ||||
|     page_title = 'History' | ||||
|     content = sent_messages_report(realm) | ||||
|     data += [(page_title, content)] | ||||
|  | ||||
|     fix_name = lambda realm: realm.replace('.', '_') | ||||
|  | ||||
|     realm_link = 'https://stats1.zulip.net:444/render/?from=-7days' | ||||
|     realm_link += '&target=stats.gauges.staging.users.active.%s.0_16hr' % (fix_name(realm),) | ||||
|  | ||||
|     title = realm | ||||
|     return render_to_response( | ||||
|         'analytics/activity.html', | ||||
|         dict(data=data, realm_link=realm_link, title=title), | ||||
|         context_instance=RequestContext(request) | ||||
|     ) | ||||
|  | ||||
| @zulip_internal | ||||
| def get_user_activity(request, email): | ||||
|     records = get_user_activity_records_for_email(email) | ||||
|  | ||||
|     data = [] | ||||
|     user_summary = get_user_activity_summary(records) | ||||
|     content = user_activity_summary_table(user_summary) | ||||
|  | ||||
|     data += [('Summary', content)] | ||||
|  | ||||
|     content = raw_user_activity_table(records) | ||||
|     data += [('Info', content)] | ||||
|  | ||||
|     title = email | ||||
|     return render_to_response( | ||||
|         'analytics/activity.html', | ||||
|         dict(data=data, title=title), | ||||
|         context_instance=RequestContext(request) | ||||
|     ) | ||||
| @@ -1,137 +0,0 @@ | ||||
| import re | ||||
| from datetime import datetime | ||||
| from html import escape | ||||
| from typing import Any, Dict, List, Optional, Sequence | ||||
|  | ||||
| import pytz | ||||
| from django.conf import settings | ||||
| from django.db.backends.utils import CursorWrapper | ||||
| from django.db.models.query import QuerySet | ||||
| from django.template import loader | ||||
| from django.urls import reverse | ||||
| from markupsafe import Markup as mark_safe | ||||
|  | ||||
| eastern_tz = pytz.timezone("US/Eastern") | ||||
|  | ||||
|  | ||||
| if settings.BILLING_ENABLED: | ||||
|     pass | ||||
|  | ||||
|  | ||||
| def make_table( | ||||
|     title: str, cols: Sequence[str], rows: Sequence[Any], has_row_class: bool = False | ||||
| ) -> str: | ||||
|  | ||||
|     if not has_row_class: | ||||
|  | ||||
|         def fix_row(row: Any) -> Dict[str, Any]: | ||||
|             return dict(cells=row, row_class=None) | ||||
|  | ||||
|         rows = list(map(fix_row, rows)) | ||||
|  | ||||
|     data = dict(title=title, cols=cols, rows=rows) | ||||
|  | ||||
|     content = loader.render_to_string( | ||||
|         "analytics/ad_hoc_query.html", | ||||
|         dict(data=data), | ||||
|     ) | ||||
|  | ||||
|     return content | ||||
|  | ||||
|  | ||||
| def dictfetchall(cursor: CursorWrapper) -> List[Dict[str, Any]]: | ||||
|     "Returns all rows from a cursor as a dict" | ||||
|     desc = cursor.description | ||||
|     return [dict(zip((col[0] for col in desc), row)) for row in cursor.fetchall()] | ||||
|  | ||||
|  | ||||
| def format_date_for_activity_reports(date: Optional[datetime]) -> str: | ||||
|     if date: | ||||
|         return date.astimezone(eastern_tz).strftime("%Y-%m-%d %H:%M") | ||||
|     else: | ||||
|         return "" | ||||
|  | ||||
|  | ||||
| def user_activity_link(email: str, user_profile_id: int) -> mark_safe: | ||||
|     from analytics.views.user_activity import get_user_activity | ||||
|  | ||||
|     url = reverse(get_user_activity, kwargs=dict(user_profile_id=user_profile_id)) | ||||
|     email_link = f'<a href="{escape(url)}">{escape(email)}</a>' | ||||
|     return mark_safe(email_link) | ||||
|  | ||||
|  | ||||
| def realm_activity_link(realm_str: str) -> mark_safe: | ||||
|     from analytics.views.realm_activity import get_realm_activity | ||||
|  | ||||
|     url = reverse(get_realm_activity, kwargs=dict(realm_str=realm_str)) | ||||
|     realm_link = f'<a href="{escape(url)}">{escape(realm_str)}</a>' | ||||
|     return mark_safe(realm_link) | ||||
|  | ||||
|  | ||||
| def realm_stats_link(realm_str: str) -> mark_safe: | ||||
|     from analytics.views.stats import stats_for_realm | ||||
|  | ||||
|     url = reverse(stats_for_realm, kwargs=dict(realm_str=realm_str)) | ||||
|     stats_link = f'<a href="{escape(url)}"><i class="fa fa-pie-chart"></i>{escape(realm_str)}</a>' | ||||
|     return mark_safe(stats_link) | ||||
|  | ||||
|  | ||||
| def remote_installation_stats_link(server_id: int, hostname: str) -> mark_safe: | ||||
|     from analytics.views.stats import stats_for_remote_installation | ||||
|  | ||||
|     url = reverse(stats_for_remote_installation, kwargs=dict(remote_server_id=server_id)) | ||||
|     stats_link = f'<a href="{escape(url)}"><i class="fa fa-pie-chart"></i>{escape(hostname)}</a>' | ||||
|     return mark_safe(stats_link) | ||||
|  | ||||
|  | ||||
| def get_user_activity_summary(records: List[QuerySet]) -> Dict[str, Any]: | ||||
|     #: The type annotation used above is clearly overly permissive. | ||||
|     #: We should perhaps use TypedDict to clearly lay out the schema | ||||
|     #: for the user activity summary. | ||||
|     summary: Dict[str, Any] = {} | ||||
|  | ||||
|     def update(action: str, record: QuerySet) -> None: | ||||
|         if action not in summary: | ||||
|             summary[action] = dict( | ||||
|                 count=record.count, | ||||
|                 last_visit=record.last_visit, | ||||
|             ) | ||||
|         else: | ||||
|             summary[action]["count"] += record.count | ||||
|             summary[action]["last_visit"] = max( | ||||
|                 summary[action]["last_visit"], | ||||
|                 record.last_visit, | ||||
|             ) | ||||
|  | ||||
|     if records: | ||||
|         summary["name"] = records[0].user_profile.full_name | ||||
|         summary["user_profile_id"] = records[0].user_profile.id | ||||
|  | ||||
|     for record in records: | ||||
|         client = record.client.name | ||||
|         query = str(record.query) | ||||
|  | ||||
|         update("use", record) | ||||
|  | ||||
|         if client == "API": | ||||
|             m = re.match("/api/.*/external/(.*)", query) | ||||
|             if m: | ||||
|                 client = m.group(1) | ||||
|                 update(client, record) | ||||
|  | ||||
|         if client.startswith("desktop"): | ||||
|             update("desktop", record) | ||||
|         if client == "website": | ||||
|             update("website", record) | ||||
|         if ("send_message" in query) or re.search("/api/.*/external/.*", query): | ||||
|             update("send", record) | ||||
|         if query in [ | ||||
|             "/json/update_pointer", | ||||
|             "/json/users/me/pointer", | ||||
|             "/api/v1/update_pointer", | ||||
|             "update_pointer_backend", | ||||
|         ]: | ||||
|             update("pointer", record) | ||||
|         update(client, record) | ||||
|  | ||||
|     return summary | ||||
| @@ -1,622 +0,0 @@ | ||||
| import itertools | ||||
| import time | ||||
| from collections import defaultdict | ||||
| from datetime import datetime, timedelta | ||||
| from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union | ||||
|  | ||||
| from django.conf import settings | ||||
| from django.db import connection | ||||
| from django.http import HttpRequest, HttpResponse | ||||
| from django.shortcuts import render | ||||
| from django.template import loader | ||||
| from django.utils.timezone import now as timezone_now | ||||
| from markupsafe import Markup as mark_safe | ||||
| from psycopg2.sql import SQL, Composable, Literal | ||||
|  | ||||
| from analytics.lib.counts import COUNT_STATS | ||||
| from analytics.views.activity_common import ( | ||||
|     dictfetchall, | ||||
|     format_date_for_activity_reports, | ||||
|     make_table, | ||||
|     realm_activity_link, | ||||
|     realm_stats_link, | ||||
|     remote_installation_stats_link, | ||||
| ) | ||||
| from analytics.views.support import get_plan_name | ||||
| from zerver.decorator import require_server_admin | ||||
| from zerver.lib.request import has_request_variables | ||||
| from zerver.lib.timestamp import timestamp_to_datetime | ||||
| from zerver.models import Realm, UserActivityInterval, UserProfile, get_org_type_display_name | ||||
|  | ||||
| if settings.BILLING_ENABLED: | ||||
|     from corporate.lib.stripe import ( | ||||
|         estimate_annual_recurring_revenue_by_realm, | ||||
|         get_realms_to_default_discount_dict, | ||||
|     ) | ||||
|  | ||||
|  | ||||
| def get_realm_day_counts() -> Dict[str, Dict[str, str]]: | ||||
|     query = SQL( | ||||
|         """ | ||||
|         select | ||||
|             r.string_id, | ||||
|             (now()::date - date_sent::date) age, | ||||
|             count(*) cnt | ||||
|         from zerver_message m | ||||
|         join zerver_userprofile up on up.id = m.sender_id | ||||
|         join zerver_realm r on r.id = up.realm_id | ||||
|         join zerver_client c on c.id = m.sending_client_id | ||||
|         where | ||||
|             (not up.is_bot) | ||||
|         and | ||||
|             date_sent > now()::date - interval '8 day' | ||||
|         and | ||||
|             c.name not in ('zephyr_mirror', 'ZulipMonitoring') | ||||
|         group by | ||||
|             r.string_id, | ||||
|             age | ||||
|         order by | ||||
|             r.string_id, | ||||
|             age | ||||
|     """ | ||||
|     ) | ||||
|     cursor = connection.cursor() | ||||
|     cursor.execute(query) | ||||
|     rows = dictfetchall(cursor) | ||||
|     cursor.close() | ||||
|  | ||||
|     counts: Dict[str, Dict[int, int]] = defaultdict(dict) | ||||
|     for row in rows: | ||||
|         counts[row["string_id"]][row["age"]] = row["cnt"] | ||||
|  | ||||
|     result = {} | ||||
|     for string_id in counts: | ||||
|         raw_cnts = [counts[string_id].get(age, 0) for age in range(8)] | ||||
|         min_cnt = min(raw_cnts[1:]) | ||||
|         max_cnt = max(raw_cnts[1:]) | ||||
|  | ||||
|         def format_count(cnt: int, style: Optional[str] = None) -> str: | ||||
|             if style is not None: | ||||
|                 good_bad = style | ||||
|             elif cnt == min_cnt: | ||||
|                 good_bad = "bad" | ||||
|             elif cnt == max_cnt: | ||||
|                 good_bad = "good" | ||||
|             else: | ||||
|                 good_bad = "neutral" | ||||
|  | ||||
|             return f'<td class="number {good_bad}">{cnt}</td>' | ||||
|  | ||||
|         cnts = format_count(raw_cnts[0], "neutral") + "".join(map(format_count, raw_cnts[1:])) | ||||
|         result[string_id] = dict(cnts=cnts) | ||||
|  | ||||
|     return result | ||||
|  | ||||
|  | ||||
| def realm_summary_table(realm_minutes: Dict[str, float]) -> str: | ||||
|     now = timezone_now() | ||||
|  | ||||
|     query = SQL( | ||||
|         """ | ||||
|         SELECT | ||||
|             realm.string_id, | ||||
|             realm.date_created, | ||||
|             realm.plan_type, | ||||
|             realm.org_type, | ||||
|             coalesce(wau_table.value, 0) wau_count, | ||||
|             coalesce(dau_table.value, 0) dau_count, | ||||
|             coalesce(user_count_table.value, 0) user_profile_count, | ||||
|             coalesce(bot_count_table.value, 0) bot_count | ||||
|         FROM | ||||
|             zerver_realm as realm | ||||
|             LEFT OUTER JOIN ( | ||||
|                 SELECT | ||||
|                     value _14day_active_humans, | ||||
|                     realm_id | ||||
|                 from | ||||
|                     analytics_realmcount | ||||
|                 WHERE | ||||
|                     property = 'realm_active_humans::day' | ||||
|                     AND end_time = %(realm_active_humans_end_time)s | ||||
|             ) as _14day_active_humans_table ON realm.id = _14day_active_humans_table.realm_id | ||||
|             LEFT OUTER JOIN ( | ||||
|                 SELECT | ||||
|                     value, | ||||
|                     realm_id | ||||
|                 from | ||||
|                     analytics_realmcount | ||||
|                 WHERE | ||||
|                     property = '7day_actives::day' | ||||
|                     AND end_time = %(seven_day_actives_end_time)s | ||||
|             ) as wau_table ON realm.id = wau_table.realm_id | ||||
|             LEFT OUTER JOIN ( | ||||
|                 SELECT | ||||
|                     value, | ||||
|                     realm_id | ||||
|                 from | ||||
|                     analytics_realmcount | ||||
|                 WHERE | ||||
|                     property = '1day_actives::day' | ||||
|                     AND end_time = %(one_day_actives_end_time)s | ||||
|             ) as dau_table ON realm.id = dau_table.realm_id | ||||
|             LEFT OUTER JOIN ( | ||||
|                 SELECT | ||||
|                     value, | ||||
|                     realm_id | ||||
|                 from | ||||
|                     analytics_realmcount | ||||
|                 WHERE | ||||
|                     property = 'active_users_audit:is_bot:day' | ||||
|                     AND subgroup = 'false' | ||||
|                     AND end_time = %(active_users_audit_end_time)s | ||||
|             ) as user_count_table ON realm.id = user_count_table.realm_id | ||||
|             LEFT OUTER JOIN ( | ||||
|                 SELECT | ||||
|                     value, | ||||
|                     realm_id | ||||
|                 from | ||||
|                     analytics_realmcount | ||||
|                 WHERE | ||||
|                     property = 'active_users_audit:is_bot:day' | ||||
|                     AND subgroup = 'true' | ||||
|                     AND end_time = %(active_users_audit_end_time)s | ||||
|             ) as bot_count_table ON realm.id = bot_count_table.realm_id | ||||
|         WHERE | ||||
|             _14day_active_humans IS NOT NULL | ||||
|             or realm.plan_type = 3 | ||||
|         ORDER BY | ||||
|             dau_count DESC, | ||||
|             string_id ASC | ||||
|     """ | ||||
|     ) | ||||
|  | ||||
|     cursor = connection.cursor() | ||||
|     cursor.execute( | ||||
|         query, | ||||
|         { | ||||
|             "realm_active_humans_end_time": COUNT_STATS[ | ||||
|                 "realm_active_humans::day" | ||||
|             ].last_successful_fill(), | ||||
|             "seven_day_actives_end_time": COUNT_STATS["7day_actives::day"].last_successful_fill(), | ||||
|             "one_day_actives_end_time": COUNT_STATS["1day_actives::day"].last_successful_fill(), | ||||
|             "active_users_audit_end_time": COUNT_STATS[ | ||||
|                 "active_users_audit:is_bot:day" | ||||
|             ].last_successful_fill(), | ||||
|         }, | ||||
|     ) | ||||
|     rows = dictfetchall(cursor) | ||||
|     cursor.close() | ||||
|  | ||||
|     # Fetch all the realm administrator users | ||||
|     realm_owners: Dict[str, List[str]] = defaultdict(list) | ||||
|     for up in UserProfile.objects.select_related("realm").filter( | ||||
|         role=UserProfile.ROLE_REALM_OWNER, | ||||
|         is_active=True, | ||||
|     ): | ||||
|         realm_owners[up.realm.string_id].append(up.delivery_email) | ||||
|  | ||||
|     for row in rows: | ||||
|         row["date_created_day"] = row["date_created"].strftime("%Y-%m-%d") | ||||
|         row["age_days"] = int((now - row["date_created"]).total_seconds() / 86400) | ||||
|         row["is_new"] = row["age_days"] < 12 * 7 | ||||
|         row["realm_owner_emails"] = ", ".join(realm_owners[row["string_id"]]) | ||||
|  | ||||
|     # get messages sent per day | ||||
|     counts = get_realm_day_counts() | ||||
|     for row in rows: | ||||
|         try: | ||||
|             row["history"] = counts[row["string_id"]]["cnts"] | ||||
|         except Exception: | ||||
|             row["history"] = "" | ||||
|  | ||||
|     # estimate annual subscription revenue | ||||
|     total_arr = 0 | ||||
|     if settings.BILLING_ENABLED: | ||||
|         estimated_arrs = estimate_annual_recurring_revenue_by_realm() | ||||
|         realms_to_default_discount = get_realms_to_default_discount_dict() | ||||
|  | ||||
|         for row in rows: | ||||
|             row["plan_type_string"] = get_plan_name(row["plan_type"]) | ||||
|  | ||||
|             string_id = row["string_id"] | ||||
|  | ||||
|             if string_id in estimated_arrs: | ||||
|                 row["arr"] = estimated_arrs[string_id] | ||||
|  | ||||
|             if row["plan_type"] in [Realm.PLAN_TYPE_STANDARD, Realm.PLAN_TYPE_PLUS]: | ||||
|                 row["effective_rate"] = 100 - int(realms_to_default_discount.get(string_id, 0)) | ||||
|             elif row["plan_type"] == Realm.PLAN_TYPE_STANDARD_FREE: | ||||
|                 row["effective_rate"] = 0 | ||||
|             elif ( | ||||
|                 row["plan_type"] == Realm.PLAN_TYPE_LIMITED | ||||
|                 and string_id in realms_to_default_discount | ||||
|             ): | ||||
|                 row["effective_rate"] = 100 - int(realms_to_default_discount[string_id]) | ||||
|             else: | ||||
|                 row["effective_rate"] = "" | ||||
|  | ||||
|         total_arr += sum(estimated_arrs.values()) | ||||
|  | ||||
|     for row in rows: | ||||
|         row["org_type_string"] = get_org_type_display_name(row["org_type"]) | ||||
|  | ||||
|     # augment data with realm_minutes | ||||
|     total_hours = 0.0 | ||||
|     for row in rows: | ||||
|         string_id = row["string_id"] | ||||
|         minutes = realm_minutes.get(string_id, 0.0) | ||||
|         hours = minutes / 60.0 | ||||
|         total_hours += hours | ||||
|         row["hours"] = str(int(hours)) | ||||
|         try: | ||||
|             row["hours_per_user"] = "{:.1f}".format(hours / row["dau_count"]) | ||||
|         except Exception: | ||||
|             pass | ||||
|  | ||||
|     # formatting | ||||
|     for row in rows: | ||||
|         row["stats_link"] = realm_stats_link(row["string_id"]) | ||||
|         row["string_id"] = realm_activity_link(row["string_id"]) | ||||
|  | ||||
|     # Count active sites | ||||
|     def meets_goal(row: Dict[str, int]) -> bool: | ||||
|         return row["dau_count"] >= 5 | ||||
|  | ||||
|     num_active_sites = len(list(filter(meets_goal, rows))) | ||||
|  | ||||
|     # create totals | ||||
|     total_dau_count = 0 | ||||
|     total_user_profile_count = 0 | ||||
|     total_bot_count = 0 | ||||
|     total_wau_count = 0 | ||||
|     for row in rows: | ||||
|         total_dau_count += int(row["dau_count"]) | ||||
|         total_user_profile_count += int(row["user_profile_count"]) | ||||
|         total_bot_count += int(row["bot_count"]) | ||||
|         total_wau_count += int(row["wau_count"]) | ||||
|  | ||||
|     total_row = dict( | ||||
|         string_id="Total", | ||||
|         plan_type_string="", | ||||
|         org_type_string="", | ||||
|         effective_rate="", | ||||
|         arr=total_arr, | ||||
|         stats_link="", | ||||
|         date_created_day="", | ||||
|         realm_owner_emails="", | ||||
|         dau_count=total_dau_count, | ||||
|         user_profile_count=total_user_profile_count, | ||||
|         bot_count=total_bot_count, | ||||
|         hours=int(total_hours), | ||||
|         wau_count=total_wau_count, | ||||
|     ) | ||||
|  | ||||
|     rows.insert(0, total_row) | ||||
|  | ||||
|     content = loader.render_to_string( | ||||
|         "analytics/realm_summary_table.html", | ||||
|         dict( | ||||
|             rows=rows, | ||||
|             num_active_sites=num_active_sites, | ||||
|             utctime=now.strftime("%Y-%m-%d %H:%MZ"), | ||||
|             billing_enabled=settings.BILLING_ENABLED, | ||||
|         ), | ||||
|     ) | ||||
|     return content | ||||
|  | ||||
|  | ||||
| def user_activity_intervals() -> Tuple[mark_safe, Dict[str, float]]: | ||||
|     day_end = timestamp_to_datetime(time.time()) | ||||
|     day_start = day_end - timedelta(hours=24) | ||||
|  | ||||
|     output = "Per-user online duration for the last 24 hours:\n" | ||||
|     total_duration = timedelta(0) | ||||
|  | ||||
|     all_intervals = ( | ||||
|         UserActivityInterval.objects.filter( | ||||
|             end__gte=day_start, | ||||
|             start__lte=day_end, | ||||
|         ) | ||||
|         .select_related( | ||||
|             "user_profile", | ||||
|             "user_profile__realm", | ||||
|         ) | ||||
|         .only( | ||||
|             "start", | ||||
|             "end", | ||||
|             "user_profile__delivery_email", | ||||
|             "user_profile__realm__string_id", | ||||
|         ) | ||||
|         .order_by( | ||||
|             "user_profile__realm__string_id", | ||||
|             "user_profile__delivery_email", | ||||
|         ) | ||||
|     ) | ||||
|  | ||||
|     by_string_id = lambda row: row.user_profile.realm.string_id | ||||
|     by_email = lambda row: row.user_profile.delivery_email | ||||
|  | ||||
|     realm_minutes = {} | ||||
|  | ||||
|     for string_id, realm_intervals in itertools.groupby(all_intervals, by_string_id): | ||||
|         realm_duration = timedelta(0) | ||||
|         output += f"<hr>{string_id}\n" | ||||
|         for email, intervals in itertools.groupby(realm_intervals, by_email): | ||||
|             duration = timedelta(0) | ||||
|             for interval in intervals: | ||||
|                 start = max(day_start, interval.start) | ||||
|                 end = min(day_end, interval.end) | ||||
|                 duration += end - start | ||||
|  | ||||
|             total_duration += duration | ||||
|             realm_duration += duration | ||||
|             output += f"  {email:<37}{duration}\n" | ||||
|  | ||||
|         realm_minutes[string_id] = realm_duration.total_seconds() / 60 | ||||
|  | ||||
|     output += f"\nTotal duration:                      {total_duration}\n" | ||||
|     output += f"\nTotal duration in minutes:           {total_duration.total_seconds() / 60.}\n" | ||||
|     output += f"Total duration amortized to a month: {total_duration.total_seconds() * 30. / 60.}" | ||||
|     content = mark_safe("<pre>" + output + "</pre>") | ||||
|     return content, realm_minutes | ||||
|  | ||||
|  | ||||
| def ad_hoc_queries() -> List[Dict[str, str]]: | ||||
|     def get_page( | ||||
|         query: Composable, cols: Sequence[str], title: str, totals_columns: Sequence[int] = [] | ||||
|     ) -> Dict[str, str]: | ||||
|         cursor = connection.cursor() | ||||
|         cursor.execute(query) | ||||
|         rows = cursor.fetchall() | ||||
|         rows = list(map(list, rows)) | ||||
|         cursor.close() | ||||
|  | ||||
|         def fix_rows( | ||||
|             i: int, fixup_func: Union[Callable[[str], mark_safe], Callable[[datetime], str]] | ||||
|         ) -> None: | ||||
|             for row in rows: | ||||
|                 row[i] = fixup_func(row[i]) | ||||
|  | ||||
|         total_row = [] | ||||
|         for i, col in enumerate(cols): | ||||
|             if col == "Realm": | ||||
|                 fix_rows(i, realm_activity_link) | ||||
|             elif col in ["Last time", "Last visit"]: | ||||
|                 fix_rows(i, format_date_for_activity_reports) | ||||
|             elif col == "Hostname": | ||||
|                 for row in rows: | ||||
|                     row[i] = remote_installation_stats_link(row[0], row[i]) | ||||
|             if len(totals_columns) > 0: | ||||
|                 if i == 0: | ||||
|                     total_row.append("Total") | ||||
|                 elif i in totals_columns: | ||||
|                     total_row.append(str(sum(row[i] for row in rows if row[i] is not None))) | ||||
|                 else: | ||||
|                     total_row.append("") | ||||
|         if len(totals_columns) > 0: | ||||
|             rows.insert(0, total_row) | ||||
|  | ||||
|         content = make_table(title, cols, rows) | ||||
|  | ||||
|         return dict( | ||||
|             content=content, | ||||
|             title=title, | ||||
|         ) | ||||
|  | ||||
|     pages = [] | ||||
|  | ||||
|     ### | ||||
|  | ||||
|     for mobile_type in ["Android", "ZulipiOS"]: | ||||
|         title = f"{mobile_type} usage" | ||||
|  | ||||
|         query = SQL( | ||||
|             """ | ||||
|             select | ||||
|                 realm.string_id, | ||||
|                 up.id user_id, | ||||
|                 client.name, | ||||
|                 sum(count) as hits, | ||||
|                 max(last_visit) as last_time | ||||
|             from zerver_useractivity ua | ||||
|             join zerver_client client on client.id = ua.client_id | ||||
|             join zerver_userprofile up on up.id = ua.user_profile_id | ||||
|             join zerver_realm realm on realm.id = up.realm_id | ||||
|             where | ||||
|                 client.name like {mobile_type} | ||||
|             group by string_id, up.id, client.name | ||||
|             having max(last_visit) > now() - interval '2 week' | ||||
|             order by string_id, up.id, client.name | ||||
|         """ | ||||
|         ).format( | ||||
|             mobile_type=Literal(mobile_type), | ||||
|         ) | ||||
|  | ||||
|         cols = [ | ||||
|             "Realm", | ||||
|             "User id", | ||||
|             "Name", | ||||
|             "Hits", | ||||
|             "Last time", | ||||
|         ] | ||||
|  | ||||
|         pages.append(get_page(query, cols, title)) | ||||
|  | ||||
|     ### | ||||
|  | ||||
|     title = "Desktop users" | ||||
|  | ||||
|     query = SQL( | ||||
|         """ | ||||
|         select | ||||
|             realm.string_id, | ||||
|             client.name, | ||||
|             sum(count) as hits, | ||||
|             max(last_visit) as last_time | ||||
|         from zerver_useractivity ua | ||||
|         join zerver_client client on client.id = ua.client_id | ||||
|         join zerver_userprofile up on up.id = ua.user_profile_id | ||||
|         join zerver_realm realm on realm.id = up.realm_id | ||||
|         where | ||||
|             client.name like 'desktop%%' | ||||
|         group by string_id, client.name | ||||
|         having max(last_visit) > now() - interval '2 week' | ||||
|         order by string_id, client.name | ||||
|     """ | ||||
|     ) | ||||
|  | ||||
|     cols = [ | ||||
|         "Realm", | ||||
|         "Client", | ||||
|         "Hits", | ||||
|         "Last time", | ||||
|     ] | ||||
|  | ||||
|     pages.append(get_page(query, cols, title)) | ||||
|  | ||||
|     ### | ||||
|  | ||||
|     title = "Integrations by realm" | ||||
|  | ||||
|     query = SQL( | ||||
|         """ | ||||
|         select | ||||
|             realm.string_id, | ||||
|             case | ||||
|                 when query like '%%external%%' then split_part(query, '/', 5) | ||||
|                 else client.name | ||||
|             end client_name, | ||||
|             sum(count) as hits, | ||||
|             max(last_visit) as last_time | ||||
|         from zerver_useractivity ua | ||||
|         join zerver_client client on client.id = ua.client_id | ||||
|         join zerver_userprofile up on up.id = ua.user_profile_id | ||||
|         join zerver_realm realm on realm.id = up.realm_id | ||||
|         where | ||||
|             (query in ('send_message_backend', '/api/v1/send_message') | ||||
|             and client.name not in ('Android', 'ZulipiOS') | ||||
|             and client.name not like 'test: Zulip%%' | ||||
|             ) | ||||
|         or | ||||
|             query like '%%external%%' | ||||
|         group by string_id, client_name | ||||
|         having max(last_visit) > now() - interval '2 week' | ||||
|         order by string_id, client_name | ||||
|     """ | ||||
|     ) | ||||
|  | ||||
|     cols = [ | ||||
|         "Realm", | ||||
|         "Client", | ||||
|         "Hits", | ||||
|         "Last time", | ||||
|     ] | ||||
|  | ||||
|     pages.append(get_page(query, cols, title)) | ||||
|  | ||||
|     ### | ||||
|  | ||||
|     title = "Integrations by client" | ||||
|  | ||||
|     query = SQL( | ||||
|         """ | ||||
|         select | ||||
|             case | ||||
|                 when query like '%%external%%' then split_part(query, '/', 5) | ||||
|                 else client.name | ||||
|             end client_name, | ||||
|             realm.string_id, | ||||
|             sum(count) as hits, | ||||
|             max(last_visit) as last_time | ||||
|         from zerver_useractivity ua | ||||
|         join zerver_client client on client.id = ua.client_id | ||||
|         join zerver_userprofile up on up.id = ua.user_profile_id | ||||
|         join zerver_realm realm on realm.id = up.realm_id | ||||
|         where | ||||
|             (query in ('send_message_backend', '/api/v1/send_message') | ||||
|             and client.name not in ('Android', 'ZulipiOS') | ||||
|             and client.name not like 'test: Zulip%%' | ||||
|             ) | ||||
|         or | ||||
|             query like '%%external%%' | ||||
|         group by client_name, string_id | ||||
|         having max(last_visit) > now() - interval '2 week' | ||||
|         order by client_name, string_id | ||||
|     """ | ||||
|     ) | ||||
|  | ||||
|     cols = [ | ||||
|         "Client", | ||||
|         "Realm", | ||||
|         "Hits", | ||||
|         "Last time", | ||||
|     ] | ||||
|  | ||||
|     pages.append(get_page(query, cols, title)) | ||||
|  | ||||
|     title = "Remote Zulip servers" | ||||
|  | ||||
|     query = SQL( | ||||
|         """ | ||||
|         with icount as ( | ||||
|             select | ||||
|                 server_id, | ||||
|                 max(value) as max_value, | ||||
|                 max(end_time) as max_end_time | ||||
|             from zilencer_remoteinstallationcount | ||||
|             where | ||||
|                 property='active_users:is_bot:day' | ||||
|                 and subgroup='false' | ||||
|             group by server_id | ||||
|             ), | ||||
|         remote_push_devices as ( | ||||
|             select server_id, count(distinct(user_id)) as push_user_count from zilencer_remotepushdevicetoken | ||||
|             group by server_id | ||||
|         ) | ||||
|         select | ||||
|             rserver.id, | ||||
|             rserver.hostname, | ||||
|             rserver.contact_email, | ||||
|             max_value, | ||||
|             push_user_count, | ||||
|             max_end_time | ||||
|         from zilencer_remotezulipserver rserver | ||||
|         left join icount on icount.server_id = rserver.id | ||||
|         left join remote_push_devices on remote_push_devices.server_id = rserver.id | ||||
|         order by max_value DESC NULLS LAST, push_user_count DESC NULLS LAST | ||||
|     """ | ||||
|     ) | ||||
|  | ||||
|     cols = [ | ||||
|         "ID", | ||||
|         "Hostname", | ||||
|         "Contact email", | ||||
|         "Analytics users", | ||||
|         "Mobile users", | ||||
|         "Last update time", | ||||
|     ] | ||||
|  | ||||
|     pages.append(get_page(query, cols, title, totals_columns=[3, 4])) | ||||
|  | ||||
|     return pages | ||||
|  | ||||
|  | ||||
| @require_server_admin | ||||
| @has_request_variables | ||||
| def get_installation_activity(request: HttpRequest) -> HttpResponse: | ||||
|     duration_content, realm_minutes = user_activity_intervals() | ||||
|     counts_content: str = realm_summary_table(realm_minutes) | ||||
|     data = [ | ||||
|         ("Counts", counts_content), | ||||
|         ("Durations", duration_content), | ||||
|     ] | ||||
|     for page in ad_hoc_queries(): | ||||
|         data.append((page["title"], page["content"])) | ||||
|  | ||||
|     title = "Activity" | ||||
|  | ||||
|     return render( | ||||
|         request, | ||||
|         "analytics/activity.html", | ||||
|         context=dict(data=data, title=title, is_home=True), | ||||
|     ) | ||||
| @@ -1,259 +0,0 @@ | ||||
| import itertools | ||||
| from datetime import datetime | ||||
| from typing import Any, Dict, List, Optional, Set, Tuple | ||||
|  | ||||
| from django.db import connection | ||||
| from django.db.models.query import QuerySet | ||||
| from django.http import HttpRequest, HttpResponse, HttpResponseNotFound | ||||
| from django.shortcuts import render | ||||
| from django.utils.timezone import now as timezone_now | ||||
| from psycopg2.sql import SQL | ||||
|  | ||||
| from analytics.views.activity_common import ( | ||||
|     format_date_for_activity_reports, | ||||
|     get_user_activity_summary, | ||||
|     make_table, | ||||
|     user_activity_link, | ||||
| ) | ||||
| from zerver.decorator import require_server_admin | ||||
| from zerver.models import Realm, UserActivity | ||||
|  | ||||
|  | ||||
| def get_user_activity_records_for_realm(realm: str, is_bot: bool) -> QuerySet: | ||||
|     fields = [ | ||||
|         "user_profile__full_name", | ||||
|         "user_profile__delivery_email", | ||||
|         "query", | ||||
|         "client__name", | ||||
|         "count", | ||||
|         "last_visit", | ||||
|     ] | ||||
|  | ||||
|     records = UserActivity.objects.filter( | ||||
|         user_profile__realm__string_id=realm, | ||||
|         user_profile__is_active=True, | ||||
|         user_profile__is_bot=is_bot, | ||||
|     ) | ||||
|     records = records.order_by("user_profile__delivery_email", "-last_visit") | ||||
|     records = records.select_related("user_profile", "client").only(*fields) | ||||
|     return records | ||||
|  | ||||
|  | ||||
| def realm_user_summary_table( | ||||
|     all_records: List[QuerySet], admin_emails: Set[str] | ||||
| ) -> Tuple[Dict[str, Any], str]: | ||||
|     user_records = {} | ||||
|  | ||||
|     def by_email(record: QuerySet) -> str: | ||||
|         return record.user_profile.delivery_email | ||||
|  | ||||
|     for email, records in itertools.groupby(all_records, by_email): | ||||
|         user_records[email] = get_user_activity_summary(list(records)) | ||||
|  | ||||
|     def get_last_visit(user_summary: Dict[str, Dict[str, datetime]], k: str) -> Optional[datetime]: | ||||
|         if k in user_summary: | ||||
|             return user_summary[k]["last_visit"] | ||||
|         else: | ||||
|             return None | ||||
|  | ||||
|     def get_count(user_summary: Dict[str, Dict[str, str]], k: str) -> str: | ||||
|         if k in user_summary: | ||||
|             return user_summary[k]["count"] | ||||
|         else: | ||||
|             return "" | ||||
|  | ||||
|     def is_recent(val: datetime) -> bool: | ||||
|         age = timezone_now() - val | ||||
|         return age.total_seconds() < 5 * 60 | ||||
|  | ||||
|     rows = [] | ||||
|     for email, user_summary in user_records.items(): | ||||
|         email_link = user_activity_link(email, user_summary["user_profile_id"]) | ||||
|         sent_count = get_count(user_summary, "send") | ||||
|         cells = [user_summary["name"], email_link, sent_count] | ||||
|         row_class = "" | ||||
|         for field in ["use", "send", "pointer", "desktop", "ZulipiOS", "Android"]: | ||||
|             visit = get_last_visit(user_summary, field) | ||||
|             if field == "use": | ||||
|                 if visit and is_recent(visit): | ||||
|                     row_class += " recently_active" | ||||
|                 if email in admin_emails: | ||||
|                     row_class += " admin" | ||||
|             val = format_date_for_activity_reports(visit) | ||||
|             cells.append(val) | ||||
|         row = dict(cells=cells, row_class=row_class) | ||||
|         rows.append(row) | ||||
|  | ||||
|     def by_used_time(row: Dict[str, Any]) -> str: | ||||
|         return row["cells"][3] | ||||
|  | ||||
|     rows = sorted(rows, key=by_used_time, reverse=True) | ||||
|  | ||||
|     cols = [ | ||||
|         "Name", | ||||
|         "Email", | ||||
|         "Total sent", | ||||
|         "Heard from", | ||||
|         "Message sent", | ||||
|         "Pointer motion", | ||||
|         "Desktop", | ||||
|         "ZulipiOS", | ||||
|         "Android", | ||||
|     ] | ||||
|  | ||||
|     title = "Summary" | ||||
|  | ||||
|     content = make_table(title, cols, rows, has_row_class=True) | ||||
|     return user_records, content | ||||
|  | ||||
|  | ||||
| def realm_client_table(user_summaries: Dict[str, Dict[str, Any]]) -> str: | ||||
|     exclude_keys = [ | ||||
|         "internal", | ||||
|         "name", | ||||
|         "user_profile_id", | ||||
|         "use", | ||||
|         "send", | ||||
|         "pointer", | ||||
|         "website", | ||||
|         "desktop", | ||||
|     ] | ||||
|  | ||||
|     rows = [] | ||||
|     for email, user_summary in user_summaries.items(): | ||||
|         email_link = user_activity_link(email, user_summary["user_profile_id"]) | ||||
|         name = user_summary["name"] | ||||
|         for k, v in user_summary.items(): | ||||
|             if k in exclude_keys: | ||||
|                 continue | ||||
|             client = k | ||||
|             count = v["count"] | ||||
|             last_visit = v["last_visit"] | ||||
|             row = [ | ||||
|                 format_date_for_activity_reports(last_visit), | ||||
|                 client, | ||||
|                 name, | ||||
|                 email_link, | ||||
|                 count, | ||||
|             ] | ||||
|             rows.append(row) | ||||
|  | ||||
|     rows = sorted(rows, key=lambda r: r[0], reverse=True) | ||||
|  | ||||
|     cols = [ | ||||
|         "Last visit", | ||||
|         "Client", | ||||
|         "Name", | ||||
|         "Email", | ||||
|         "Count", | ||||
|     ] | ||||
|  | ||||
|     title = "Clients" | ||||
|  | ||||
|     return make_table(title, cols, rows) | ||||
|  | ||||
|  | ||||
| def sent_messages_report(realm: str) -> str: | ||||
|     title = "Recently sent messages for " + realm | ||||
|  | ||||
|     cols = [ | ||||
|         "Date", | ||||
|         "Humans", | ||||
|         "Bots", | ||||
|     ] | ||||
|  | ||||
|     query = SQL( | ||||
|         """ | ||||
|         select | ||||
|             series.day::date, | ||||
|             humans.cnt, | ||||
|             bots.cnt | ||||
|         from ( | ||||
|             select generate_series( | ||||
|                 (now()::date - interval '2 week'), | ||||
|                 now()::date, | ||||
|                 interval '1 day' | ||||
|             ) as day | ||||
|         ) as series | ||||
|         left join ( | ||||
|             select | ||||
|                 date_sent::date date_sent, | ||||
|                 count(*) cnt | ||||
|             from zerver_message m | ||||
|             join zerver_userprofile up on up.id = m.sender_id | ||||
|             join zerver_realm r on r.id = up.realm_id | ||||
|             where | ||||
|                 r.string_id = %s | ||||
|             and | ||||
|                 (not up.is_bot) | ||||
|             and | ||||
|                 date_sent > now() - interval '2 week' | ||||
|             group by | ||||
|                 date_sent::date | ||||
|             order by | ||||
|                 date_sent::date | ||||
|         ) humans on | ||||
|             series.day = humans.date_sent | ||||
|         left join ( | ||||
|             select | ||||
|                 date_sent::date date_sent, | ||||
|                 count(*) cnt | ||||
|             from zerver_message m | ||||
|             join zerver_userprofile up on up.id = m.sender_id | ||||
|             join zerver_realm r on r.id = up.realm_id | ||||
|             where | ||||
|                 r.string_id = %s | ||||
|             and | ||||
|                 up.is_bot | ||||
|             and | ||||
|                 date_sent > now() - interval '2 week' | ||||
|             group by | ||||
|                 date_sent::date | ||||
|             order by | ||||
|                 date_sent::date | ||||
|         ) bots on | ||||
|             series.day = bots.date_sent | ||||
|     """ | ||||
|     ) | ||||
|     cursor = connection.cursor() | ||||
|     cursor.execute(query, [realm, realm]) | ||||
|     rows = cursor.fetchall() | ||||
|     cursor.close() | ||||
|  | ||||
|     return make_table(title, cols, rows) | ||||
|  | ||||
|  | ||||
| @require_server_admin | ||||
| def get_realm_activity(request: HttpRequest, realm_str: str) -> HttpResponse: | ||||
|     data: List[Tuple[str, str]] = [] | ||||
|     all_user_records: Dict[str, Any] = {} | ||||
|  | ||||
|     try: | ||||
|         admins = Realm.objects.get(string_id=realm_str).get_human_admin_users() | ||||
|     except Realm.DoesNotExist: | ||||
|         return HttpResponseNotFound() | ||||
|  | ||||
|     admin_emails = {admin.delivery_email for admin in admins} | ||||
|  | ||||
|     for is_bot, page_title in [(False, "Humans"), (True, "Bots")]: | ||||
|         all_records = list(get_user_activity_records_for_realm(realm_str, is_bot)) | ||||
|  | ||||
|         user_records, content = realm_user_summary_table(all_records, admin_emails) | ||||
|         all_user_records.update(user_records) | ||||
|  | ||||
|         data += [(page_title, content)] | ||||
|  | ||||
|     page_title = "Clients" | ||||
|     content = realm_client_table(all_user_records) | ||||
|     data += [(page_title, content)] | ||||
|  | ||||
|     page_title = "History" | ||||
|     content = sent_messages_report(realm_str) | ||||
|     data += [(page_title, content)] | ||||
|  | ||||
|     title = realm_str | ||||
|     return render( | ||||
|         request, | ||||
|         "analytics/activity.html", | ||||
|         context=dict(data=data, realm_link=None, title=title), | ||||
|     ) | ||||
| @@ -1,514 +0,0 @@ | ||||
| import logging | ||||
| from collections import defaultdict | ||||
| from datetime import datetime, timedelta, timezone | ||||
| from typing import Any, Dict, List, Optional, Tuple, Type, Union, cast | ||||
|  | ||||
| from django.conf import settings | ||||
| from django.db.models.query import QuerySet | ||||
| from django.http import HttpRequest, HttpResponse, HttpResponseNotFound | ||||
| from django.shortcuts import render | ||||
| from django.utils import translation | ||||
| from django.utils.timezone import now as timezone_now | ||||
| from django.utils.translation import gettext as _ | ||||
|  | ||||
| from analytics.lib.counts import COUNT_STATS, CountStat | ||||
| from analytics.lib.time_utils import time_range | ||||
| from analytics.models import ( | ||||
|     BaseCount, | ||||
|     InstallationCount, | ||||
|     RealmCount, | ||||
|     StreamCount, | ||||
|     UserCount, | ||||
|     installation_epoch, | ||||
| ) | ||||
| from zerver.decorator import ( | ||||
|     require_non_guest_user, | ||||
|     require_server_admin, | ||||
|     require_server_admin_api, | ||||
|     to_utc_datetime, | ||||
|     zulip_login_required, | ||||
| ) | ||||
| from zerver.lib.exceptions import JsonableError | ||||
| from zerver.lib.i18n import get_and_set_request_language, get_language_translation_data | ||||
| from zerver.lib.request import REQ, has_request_variables | ||||
| from zerver.lib.response import json_success | ||||
| from zerver.lib.timestamp import convert_to_UTC | ||||
| from zerver.lib.validator import to_non_negative_int | ||||
| from zerver.models import Client, Realm, UserProfile, get_realm | ||||
|  | ||||
| if settings.ZILENCER_ENABLED: | ||||
|     from zilencer.models import RemoteInstallationCount, RemoteRealmCount, RemoteZulipServer | ||||
|  | ||||
| MAX_TIME_FOR_FULL_ANALYTICS_GENERATION = timedelta(days=1, minutes=30) | ||||
|  | ||||
|  | ||||
| def is_analytics_ready(realm: Realm) -> bool: | ||||
|     return (timezone_now() - realm.date_created) > MAX_TIME_FOR_FULL_ANALYTICS_GENERATION | ||||
|  | ||||
|  | ||||
| def render_stats( | ||||
|     request: HttpRequest, | ||||
|     data_url_suffix: str, | ||||
|     target_name: str, | ||||
|     for_installation: bool = False, | ||||
|     remote: bool = False, | ||||
|     analytics_ready: bool = True, | ||||
| ) -> HttpResponse: | ||||
|     assert request.user.is_authenticated | ||||
|     page_params = dict( | ||||
|         data_url_suffix=data_url_suffix, | ||||
|         for_installation=for_installation, | ||||
|         remote=remote, | ||||
|     ) | ||||
|  | ||||
|     request_language = get_and_set_request_language( | ||||
|         request, | ||||
|         request.user.default_language, | ||||
|         translation.get_language_from_path(request.path_info), | ||||
|     ) | ||||
|  | ||||
|     page_params["translation_data"] = get_language_translation_data(request_language) | ||||
|  | ||||
|     return render( | ||||
|         request, | ||||
|         "analytics/stats.html", | ||||
|         context=dict( | ||||
|             target_name=target_name, page_params=page_params, analytics_ready=analytics_ready | ||||
|         ), | ||||
|     ) | ||||
|  | ||||
|  | ||||
| @zulip_login_required | ||||
| def stats(request: HttpRequest) -> HttpResponse: | ||||
|     assert request.user.is_authenticated | ||||
|     realm = request.user.realm | ||||
|     if request.user.is_guest: | ||||
|         # TODO: Make @zulip_login_required pass the UserProfile so we | ||||
|         # can use @require_member_or_admin | ||||
|         raise JsonableError(_("Not allowed for guest users")) | ||||
|     return render_stats( | ||||
|         request, "", realm.name or realm.string_id, analytics_ready=is_analytics_ready(realm) | ||||
|     ) | ||||
|  | ||||
|  | ||||
| @require_server_admin | ||||
| @has_request_variables | ||||
| def stats_for_realm(request: HttpRequest, realm_str: str) -> HttpResponse: | ||||
|     try: | ||||
|         realm = get_realm(realm_str) | ||||
|     except Realm.DoesNotExist: | ||||
|         return HttpResponseNotFound() | ||||
|  | ||||
|     return render_stats( | ||||
|         request, | ||||
|         f"/realm/{realm_str}", | ||||
|         realm.name or realm.string_id, | ||||
|         analytics_ready=is_analytics_ready(realm), | ||||
|     ) | ||||
|  | ||||
|  | ||||
| @require_server_admin | ||||
| @has_request_variables | ||||
| def stats_for_remote_realm( | ||||
|     request: HttpRequest, remote_server_id: int, remote_realm_id: int | ||||
| ) -> HttpResponse: | ||||
|     assert settings.ZILENCER_ENABLED | ||||
|     server = RemoteZulipServer.objects.get(id=remote_server_id) | ||||
|     return render_stats( | ||||
|         request, | ||||
|         f"/remote/{server.id}/realm/{remote_realm_id}", | ||||
|         f"Realm {remote_realm_id} on server {server.hostname}", | ||||
|     ) | ||||
|  | ||||
|  | ||||
| @require_server_admin_api | ||||
| @has_request_variables | ||||
| def get_chart_data_for_realm( | ||||
|     request: HttpRequest, user_profile: UserProfile, realm_str: str, **kwargs: Any | ||||
| ) -> HttpResponse: | ||||
|     try: | ||||
|         realm = get_realm(realm_str) | ||||
|     except Realm.DoesNotExist: | ||||
|         raise JsonableError(_("Invalid organization")) | ||||
|  | ||||
|     return get_chart_data(request=request, user_profile=user_profile, realm=realm, **kwargs) | ||||
|  | ||||
|  | ||||
| @require_server_admin_api | ||||
| @has_request_variables | ||||
| def get_chart_data_for_remote_realm( | ||||
|     request: HttpRequest, | ||||
|     user_profile: UserProfile, | ||||
|     remote_server_id: int, | ||||
|     remote_realm_id: int, | ||||
|     **kwargs: Any, | ||||
| ) -> HttpResponse: | ||||
|     assert settings.ZILENCER_ENABLED | ||||
|     server = RemoteZulipServer.objects.get(id=remote_server_id) | ||||
|     return get_chart_data( | ||||
|         request=request, | ||||
|         user_profile=user_profile, | ||||
|         server=server, | ||||
|         remote=True, | ||||
|         remote_realm_id=int(remote_realm_id), | ||||
|         **kwargs, | ||||
|     ) | ||||
|  | ||||
|  | ||||
| @require_server_admin | ||||
| def stats_for_installation(request: HttpRequest) -> HttpResponse: | ||||
|     return render_stats(request, "/installation", "installation", True) | ||||
|  | ||||
|  | ||||
| @require_server_admin | ||||
| def stats_for_remote_installation(request: HttpRequest, remote_server_id: int) -> HttpResponse: | ||||
|     assert settings.ZILENCER_ENABLED | ||||
|     server = RemoteZulipServer.objects.get(id=remote_server_id) | ||||
|     return render_stats( | ||||
|         request, | ||||
|         f"/remote/{server.id}/installation", | ||||
|         f"remote installation {server.hostname}", | ||||
|         True, | ||||
|         True, | ||||
|     ) | ||||
|  | ||||
|  | ||||
| @require_server_admin_api | ||||
| @has_request_variables | ||||
| def get_chart_data_for_installation( | ||||
|     request: HttpRequest, user_profile: UserProfile, chart_name: str = REQ(), **kwargs: Any | ||||
| ) -> HttpResponse: | ||||
|     return get_chart_data( | ||||
|         request=request, user_profile=user_profile, for_installation=True, **kwargs | ||||
|     ) | ||||
|  | ||||
|  | ||||
| @require_server_admin_api | ||||
| @has_request_variables | ||||
| def get_chart_data_for_remote_installation( | ||||
|     request: HttpRequest, | ||||
|     user_profile: UserProfile, | ||||
|     remote_server_id: int, | ||||
|     chart_name: str = REQ(), | ||||
|     **kwargs: Any, | ||||
| ) -> HttpResponse: | ||||
|     assert settings.ZILENCER_ENABLED | ||||
|     server = RemoteZulipServer.objects.get(id=remote_server_id) | ||||
|     return get_chart_data( | ||||
|         request=request, | ||||
|         user_profile=user_profile, | ||||
|         for_installation=True, | ||||
|         remote=True, | ||||
|         server=server, | ||||
|         **kwargs, | ||||
|     ) | ||||
|  | ||||
|  | ||||
| @require_non_guest_user | ||||
| @has_request_variables | ||||
| def get_chart_data( | ||||
|     request: HttpRequest, | ||||
|     user_profile: UserProfile, | ||||
|     chart_name: str = REQ(), | ||||
|     min_length: Optional[int] = REQ(converter=to_non_negative_int, default=None), | ||||
|     start: Optional[datetime] = REQ(converter=to_utc_datetime, default=None), | ||||
|     end: Optional[datetime] = REQ(converter=to_utc_datetime, default=None), | ||||
|     realm: Optional[Realm] = None, | ||||
|     for_installation: bool = False, | ||||
|     remote: bool = False, | ||||
|     remote_realm_id: Optional[int] = None, | ||||
|     server: Optional["RemoteZulipServer"] = None, | ||||
| ) -> HttpResponse: | ||||
|     TableType = Union[ | ||||
|         Type["RemoteInstallationCount"], | ||||
|         Type[InstallationCount], | ||||
|         Type["RemoteRealmCount"], | ||||
|         Type[RealmCount], | ||||
|     ] | ||||
|     if for_installation: | ||||
|         if remote: | ||||
|             assert settings.ZILENCER_ENABLED | ||||
|             aggregate_table: TableType = RemoteInstallationCount | ||||
|             assert server is not None | ||||
|         else: | ||||
|             aggregate_table = InstallationCount | ||||
|     else: | ||||
|         if remote: | ||||
|             assert settings.ZILENCER_ENABLED | ||||
|             aggregate_table = RemoteRealmCount | ||||
|             assert server is not None | ||||
|             assert remote_realm_id is not None | ||||
|         else: | ||||
|             aggregate_table = RealmCount | ||||
|  | ||||
|     tables: Union[Tuple[TableType], Tuple[TableType, Type[UserCount]]] | ||||
|  | ||||
|     if chart_name == "number_of_humans": | ||||
|         stats = [ | ||||
|             COUNT_STATS["1day_actives::day"], | ||||
|             COUNT_STATS["realm_active_humans::day"], | ||||
|             COUNT_STATS["active_users_audit:is_bot:day"], | ||||
|         ] | ||||
|         tables = (aggregate_table,) | ||||
|         subgroup_to_label: Dict[CountStat, Dict[Optional[str], str]] = { | ||||
|             stats[0]: {None: "_1day"}, | ||||
|             stats[1]: {None: "_15day"}, | ||||
|             stats[2]: {"false": "all_time"}, | ||||
|         } | ||||
|         labels_sort_function = None | ||||
|         include_empty_subgroups = True | ||||
|     elif chart_name == "messages_sent_over_time": | ||||
|         stats = [COUNT_STATS["messages_sent:is_bot:hour"]] | ||||
|         tables = (aggregate_table, UserCount) | ||||
|         subgroup_to_label = {stats[0]: {"false": "human", "true": "bot"}} | ||||
|         labels_sort_function = None | ||||
|         include_empty_subgroups = True | ||||
|     elif chart_name == "messages_sent_by_message_type": | ||||
|         stats = [COUNT_STATS["messages_sent:message_type:day"]] | ||||
|         tables = (aggregate_table, UserCount) | ||||
|         subgroup_to_label = { | ||||
|             stats[0]: { | ||||
|                 "public_stream": _("Public streams"), | ||||
|                 "private_stream": _("Private streams"), | ||||
|                 "private_message": _("Private messages"), | ||||
|                 "huddle_message": _("Group private messages"), | ||||
|             } | ||||
|         } | ||||
|         labels_sort_function = lambda data: sort_by_totals(data["everyone"]) | ||||
|         include_empty_subgroups = True | ||||
|     elif chart_name == "messages_sent_by_client": | ||||
|         stats = [COUNT_STATS["messages_sent:client:day"]] | ||||
|         tables = (aggregate_table, UserCount) | ||||
|         # Note that the labels are further re-written by client_label_map | ||||
|         subgroup_to_label = { | ||||
|             stats[0]: {str(id): name for id, name in Client.objects.values_list("id", "name")} | ||||
|         } | ||||
|         labels_sort_function = sort_client_labels | ||||
|         include_empty_subgroups = False | ||||
|     elif chart_name == "messages_read_over_time": | ||||
|         stats = [COUNT_STATS["messages_read::hour"]] | ||||
|         tables = (aggregate_table, UserCount) | ||||
|         subgroup_to_label = {stats[0]: {None: "read"}} | ||||
|         labels_sort_function = None | ||||
|         include_empty_subgroups = True | ||||
|     else: | ||||
|         raise JsonableError(_("Unknown chart name: {}").format(chart_name)) | ||||
|  | ||||
|     # Most likely someone using our API endpoint. The /stats page does not | ||||
|     # pass a start or end in its requests. | ||||
|     if start is not None: | ||||
|         start = convert_to_UTC(start) | ||||
|     if end is not None: | ||||
|         end = convert_to_UTC(end) | ||||
|     if start is not None and end is not None and start > end: | ||||
|         raise JsonableError( | ||||
|             _("Start time is later than end time. Start: {start}, End: {end}").format( | ||||
|                 start=start, | ||||
|                 end=end, | ||||
|             ) | ||||
|         ) | ||||
|  | ||||
|     if realm is None: | ||||
|         # Note that this value is invalid for Remote tables; be | ||||
|         # careful not to access it in those code paths. | ||||
|         realm = user_profile.realm | ||||
|  | ||||
|     if remote: | ||||
|         # For remote servers, we don't have fillstate data, and thus | ||||
|         # should simply use the first and last data points for the | ||||
|         # table. | ||||
|         assert server is not None | ||||
|         assert aggregate_table is RemoteInstallationCount or aggregate_table is RemoteRealmCount | ||||
|         aggregate_table_remote = cast( | ||||
|             Union[Type[RemoteInstallationCount], Type[RemoteRealmCount]], aggregate_table | ||||
|         )  # https://stackoverflow.com/questions/68540528/mypy-assertions-on-the-types-of-types | ||||
|         if not aggregate_table_remote.objects.filter(server=server).exists(): | ||||
|             raise JsonableError( | ||||
|                 _("No analytics data available. Please contact your server administrator.") | ||||
|             ) | ||||
|         if start is None: | ||||
|             first = aggregate_table_remote.objects.filter(server=server).first() | ||||
|             assert first is not None | ||||
|             start = first.end_time | ||||
|         if end is None: | ||||
|             last = aggregate_table_remote.objects.filter(server=server).last() | ||||
|             assert last is not None | ||||
|             end = last.end_time | ||||
|     else: | ||||
|         # Otherwise, we can use tables on the current server to | ||||
|         # determine a nice range, and some additional validation. | ||||
|         if start is None: | ||||
|             if for_installation: | ||||
|                 start = installation_epoch() | ||||
|             else: | ||||
|                 start = realm.date_created | ||||
|         if end is None: | ||||
|             end = max( | ||||
|                 stat.last_successful_fill() or datetime.min.replace(tzinfo=timezone.utc) | ||||
|                 for stat in stats | ||||
|             ) | ||||
|  | ||||
|         if start > end and (timezone_now() - start > MAX_TIME_FOR_FULL_ANALYTICS_GENERATION): | ||||
|             logging.warning( | ||||
|                 "User from realm %s attempted to access /stats, but the computed " | ||||
|                 "start time: %s (creation of realm or installation) is later than the computed " | ||||
|                 "end time: %s (last successful analytics update). Is the " | ||||
|                 "analytics cron job running?", | ||||
|                 realm.string_id, | ||||
|                 start, | ||||
|                 end, | ||||
|             ) | ||||
|             raise JsonableError( | ||||
|                 _("No analytics data available. Please contact your server administrator.") | ||||
|             ) | ||||
|  | ||||
|     assert len({stat.frequency for stat in stats}) == 1 | ||||
|     end_times = time_range(start, end, stats[0].frequency, min_length) | ||||
|     data: Dict[str, Any] = { | ||||
|         "end_times": [int(end_time.timestamp()) for end_time in end_times], | ||||
|         "frequency": stats[0].frequency, | ||||
|     } | ||||
|  | ||||
|     aggregation_level = { | ||||
|         InstallationCount: "everyone", | ||||
|         RealmCount: "everyone", | ||||
|         UserCount: "user", | ||||
|     } | ||||
|     if settings.ZILENCER_ENABLED: | ||||
|         aggregation_level[RemoteInstallationCount] = "everyone" | ||||
|         aggregation_level[RemoteRealmCount] = "everyone" | ||||
|  | ||||
|     # -1 is a placeholder value, since there is no relevant filtering on InstallationCount | ||||
|     id_value = { | ||||
|         InstallationCount: -1, | ||||
|         RealmCount: realm.id, | ||||
|         UserCount: user_profile.id, | ||||
|     } | ||||
|     if settings.ZILENCER_ENABLED: | ||||
|         if server is not None: | ||||
|             id_value[RemoteInstallationCount] = server.id | ||||
|         # TODO: RemoteRealmCount logic doesn't correctly handle | ||||
|         # filtering by server_id as well. | ||||
|         if remote_realm_id is not None: | ||||
|             id_value[RemoteRealmCount] = remote_realm_id | ||||
|  | ||||
|     for table in tables: | ||||
|         data[aggregation_level[table]] = {} | ||||
|         for stat in stats: | ||||
|             data[aggregation_level[table]].update( | ||||
|                 get_time_series_by_subgroup( | ||||
|                     stat, | ||||
|                     table, | ||||
|                     id_value[table], | ||||
|                     end_times, | ||||
|                     subgroup_to_label[stat], | ||||
|                     include_empty_subgroups, | ||||
|                 ) | ||||
|             ) | ||||
|  | ||||
|     if labels_sort_function is not None: | ||||
|         data["display_order"] = labels_sort_function(data) | ||||
|     else: | ||||
|         data["display_order"] = None | ||||
|     return json_success(request, data=data) | ||||
|  | ||||
|  | ||||
| def sort_by_totals(value_arrays: Dict[str, List[int]]) -> List[str]: | ||||
|     totals = [(sum(values), label) for label, values in value_arrays.items()] | ||||
|     totals.sort(reverse=True) | ||||
|     return [label for total, label in totals] | ||||
|  | ||||
|  | ||||
| # For any given user, we want to show a fixed set of clients in the chart, | ||||
| # regardless of the time aggregation or whether we're looking at realm or | ||||
| # user data. This fixed set ideally includes the clients most important in | ||||
| # understanding the realm's traffic and the user's traffic. This function | ||||
| # tries to rank the clients so that taking the first N elements of the | ||||
| # sorted list has a reasonable chance of doing so. | ||||
| def sort_client_labels(data: Dict[str, Dict[str, List[int]]]) -> List[str]: | ||||
|     realm_order = sort_by_totals(data["everyone"]) | ||||
|     user_order = sort_by_totals(data["user"]) | ||||
|     label_sort_values: Dict[str, float] = {} | ||||
|     for i, label in enumerate(realm_order): | ||||
|         label_sort_values[label] = i | ||||
|     for i, label in enumerate(user_order): | ||||
|         label_sort_values[label] = min(i - 0.1, label_sort_values.get(label, i)) | ||||
|     return [label for label, sort_value in sorted(label_sort_values.items(), key=lambda x: x[1])] | ||||
|  | ||||
|  | ||||
| def table_filtered_to_id(table: Type[BaseCount], key_id: int) -> QuerySet: | ||||
|     if table == RealmCount: | ||||
|         return RealmCount.objects.filter(realm_id=key_id) | ||||
|     elif table == UserCount: | ||||
|         return UserCount.objects.filter(user_id=key_id) | ||||
|     elif table == StreamCount: | ||||
|         return StreamCount.objects.filter(stream_id=key_id) | ||||
|     elif table == InstallationCount: | ||||
|         return InstallationCount.objects.all() | ||||
|     elif settings.ZILENCER_ENABLED and table == RemoteInstallationCount: | ||||
|         return RemoteInstallationCount.objects.filter(server_id=key_id) | ||||
|     elif settings.ZILENCER_ENABLED and table == RemoteRealmCount: | ||||
|         return RemoteRealmCount.objects.filter(realm_id=key_id) | ||||
|     else: | ||||
|         raise AssertionError(f"Unknown table: {table}") | ||||
|  | ||||
|  | ||||
| def client_label_map(name: str) -> str: | ||||
|     if name == "website": | ||||
|         return "Website" | ||||
|     if name.startswith("desktop app"): | ||||
|         return "Old desktop app" | ||||
|     if name == "ZulipElectron": | ||||
|         return "Desktop app" | ||||
|     if name == "ZulipAndroid": | ||||
|         return "Old Android app" | ||||
|     if name == "ZulipiOS": | ||||
|         return "Old iOS app" | ||||
|     if name == "ZulipMobile": | ||||
|         return "Mobile app" | ||||
|     if name in ["ZulipPython", "API: Python"]: | ||||
|         return "Python API" | ||||
|     if name.startswith("Zulip") and name.endswith("Webhook"): | ||||
|         return name[len("Zulip") : -len("Webhook")] + " webhook" | ||||
|     return name | ||||
|  | ||||
|  | ||||
| def rewrite_client_arrays(value_arrays: Dict[str, List[int]]) -> Dict[str, List[int]]: | ||||
|     mapped_arrays: Dict[str, List[int]] = {} | ||||
|     for label, array in value_arrays.items(): | ||||
|         mapped_label = client_label_map(label) | ||||
|         if mapped_label in mapped_arrays: | ||||
|             for i in range(0, len(array)): | ||||
|                 mapped_arrays[mapped_label][i] += value_arrays[label][i] | ||||
|         else: | ||||
|             mapped_arrays[mapped_label] = [value_arrays[label][i] for i in range(0, len(array))] | ||||
|     return mapped_arrays | ||||
|  | ||||
|  | ||||
| def get_time_series_by_subgroup( | ||||
|     stat: CountStat, | ||||
|     table: Type[BaseCount], | ||||
|     key_id: int, | ||||
|     end_times: List[datetime], | ||||
|     subgroup_to_label: Dict[Optional[str], str], | ||||
|     include_empty_subgroups: bool, | ||||
| ) -> Dict[str, List[int]]: | ||||
|     queryset = ( | ||||
|         table_filtered_to_id(table, key_id) | ||||
|         .filter(property=stat.property) | ||||
|         .values_list("subgroup", "end_time", "value") | ||||
|     ) | ||||
|     value_dicts: Dict[Optional[str], Dict[datetime, int]] = defaultdict(lambda: defaultdict(int)) | ||||
|     for subgroup, end_time, value in queryset: | ||||
|         value_dicts[subgroup][end_time] = value | ||||
|     value_arrays = {} | ||||
|     for subgroup, label in subgroup_to_label.items(): | ||||
|         if (subgroup in value_dicts) or include_empty_subgroups: | ||||
|             value_arrays[label] = [value_dicts[subgroup][end_time] for end_time in end_times] | ||||
|  | ||||
|     if stat == COUNT_STATS["messages_sent:client:day"]: | ||||
|         # HACK: We rewrite these arrays to collapse the Client objects | ||||
|         # with similar names into a single sum, and generally give | ||||
|         # them better names | ||||
|         return rewrite_client_arrays(value_arrays) | ||||
|     return value_arrays | ||||
| @@ -1,343 +0,0 @@ | ||||
| import urllib | ||||
| from datetime import timedelta | ||||
| from decimal import Decimal | ||||
| from typing import Any, Dict, List, Optional | ||||
| from urllib.parse import urlencode | ||||
|  | ||||
| from django.conf import settings | ||||
| from django.core.exceptions import ValidationError | ||||
| from django.core.validators import URLValidator | ||||
| from django.http import HttpRequest, HttpResponse, HttpResponseRedirect | ||||
| from django.shortcuts import render | ||||
| from django.urls import reverse | ||||
| from django.utils.timesince import timesince | ||||
| from django.utils.timezone import now as timezone_now | ||||
| from django.utils.translation import gettext as _ | ||||
|  | ||||
| from confirmation.models import Confirmation, confirmation_url | ||||
| from confirmation.settings import STATUS_ACTIVE | ||||
| from zerver.decorator import require_server_admin | ||||
| from zerver.forms import check_subdomain_available | ||||
| from zerver.lib.actions import ( | ||||
|     do_change_realm_org_type, | ||||
|     do_change_realm_plan_type, | ||||
|     do_change_realm_subdomain, | ||||
|     do_deactivate_realm, | ||||
|     do_scrub_realm, | ||||
|     do_send_realm_reactivation_email, | ||||
| ) | ||||
| from zerver.lib.exceptions import JsonableError | ||||
| from zerver.lib.realm_icon import realm_icon_url | ||||
| from zerver.lib.request import REQ, has_request_variables | ||||
| from zerver.lib.subdomains import get_subdomain_from_hostname | ||||
| from zerver.lib.validator import check_bool, check_string_in, to_decimal, to_non_negative_int | ||||
| from zerver.models import ( | ||||
|     MultiuseInvite, | ||||
|     PreregistrationUser, | ||||
|     Realm, | ||||
|     UserProfile, | ||||
|     get_org_type_display_name, | ||||
|     get_realm, | ||||
| ) | ||||
| from zerver.views.invite import get_invitee_emails_set | ||||
|  | ||||
| if settings.BILLING_ENABLED: | ||||
|     from corporate.lib.stripe import approve_sponsorship as do_approve_sponsorship | ||||
|     from corporate.lib.stripe import ( | ||||
|         attach_discount_to_realm, | ||||
|         downgrade_at_the_end_of_billing_cycle, | ||||
|         downgrade_now_without_creating_additional_invoices, | ||||
|         get_discount_for_realm, | ||||
|         get_latest_seat_count, | ||||
|         make_end_of_cycle_updates_if_needed, | ||||
|         update_billing_method_of_current_plan, | ||||
|         update_sponsorship_status, | ||||
|         void_all_open_invoices, | ||||
|     ) | ||||
|     from corporate.models import get_current_plan_by_realm, get_customer_by_realm | ||||
|  | ||||
|  | ||||
| def get_plan_name(plan_type: int) -> str: | ||||
|     return { | ||||
|         Realm.PLAN_TYPE_SELF_HOSTED: "self-hosted", | ||||
|         Realm.PLAN_TYPE_LIMITED: "limited", | ||||
|         Realm.PLAN_TYPE_STANDARD: "standard", | ||||
|         Realm.PLAN_TYPE_STANDARD_FREE: "open source", | ||||
|         Realm.PLAN_TYPE_PLUS: "plus", | ||||
|     }[plan_type] | ||||
|  | ||||
|  | ||||
| def get_confirmations( | ||||
|     types: List[int], object_ids: List[int], hostname: Optional[str] = None | ||||
| ) -> List[Dict[str, Any]]: | ||||
|     lowest_datetime = timezone_now() - timedelta(days=30) | ||||
|     confirmations = Confirmation.objects.filter( | ||||
|         type__in=types, object_id__in=object_ids, date_sent__gte=lowest_datetime | ||||
|     ) | ||||
|     confirmation_dicts = [] | ||||
|     for confirmation in confirmations: | ||||
|         realm = confirmation.realm | ||||
|         content_object = confirmation.content_object | ||||
|  | ||||
|         type = confirmation.type | ||||
|         expiry_date = confirmation.expiry_date | ||||
|  | ||||
|         assert content_object is not None | ||||
|         if hasattr(content_object, "status"): | ||||
|             if content_object.status == STATUS_ACTIVE: | ||||
|                 link_status = "Link has been clicked" | ||||
|             else: | ||||
|                 link_status = "Link has never been clicked" | ||||
|         else: | ||||
|             link_status = "" | ||||
|  | ||||
|         now = timezone_now() | ||||
|         if expiry_date is None: | ||||
|             expires_in = "Never" | ||||
|         elif now < expiry_date: | ||||
|             expires_in = timesince(now, expiry_date) | ||||
|         else: | ||||
|             expires_in = "Expired" | ||||
|  | ||||
|         url = confirmation_url(confirmation.confirmation_key, realm, type) | ||||
|         confirmation_dicts.append( | ||||
|             { | ||||
|                 "object": confirmation.content_object, | ||||
|                 "url": url, | ||||
|                 "type": type, | ||||
|                 "link_status": link_status, | ||||
|                 "expires_in": expires_in, | ||||
|             } | ||||
|         ) | ||||
|     return confirmation_dicts | ||||
|  | ||||
|  | ||||
| VALID_DOWNGRADE_METHODS = [ | ||||
|     "downgrade_at_billing_cycle_end", | ||||
|     "downgrade_now_without_additional_licenses", | ||||
|     "downgrade_now_void_open_invoices", | ||||
| ] | ||||
|  | ||||
| VALID_STATUS_VALUES = [ | ||||
|     "active", | ||||
|     "deactivated", | ||||
| ] | ||||
|  | ||||
| VALID_BILLING_METHODS = [ | ||||
|     "send_invoice", | ||||
|     "charge_automatically", | ||||
| ] | ||||
|  | ||||
|  | ||||
| @require_server_admin | ||||
| @has_request_variables | ||||
| def support( | ||||
|     request: HttpRequest, | ||||
|     realm_id: Optional[int] = REQ(default=None, converter=to_non_negative_int), | ||||
|     plan_type: Optional[int] = REQ(default=None, converter=to_non_negative_int), | ||||
|     discount: Optional[Decimal] = REQ(default=None, converter=to_decimal), | ||||
|     new_subdomain: Optional[str] = REQ(default=None), | ||||
|     status: Optional[str] = REQ(default=None, str_validator=check_string_in(VALID_STATUS_VALUES)), | ||||
|     billing_method: Optional[str] = REQ( | ||||
|         default=None, str_validator=check_string_in(VALID_BILLING_METHODS) | ||||
|     ), | ||||
|     sponsorship_pending: Optional[bool] = REQ(default=None, json_validator=check_bool), | ||||
|     approve_sponsorship: Optional[bool] = REQ(default=None, json_validator=check_bool), | ||||
|     downgrade_method: Optional[str] = REQ( | ||||
|         default=None, str_validator=check_string_in(VALID_DOWNGRADE_METHODS) | ||||
|     ), | ||||
|     scrub_realm: Optional[bool] = REQ(default=None, json_validator=check_bool), | ||||
|     query: Optional[str] = REQ("q", default=None), | ||||
|     org_type: Optional[int] = REQ(default=None, converter=to_non_negative_int), | ||||
| ) -> HttpResponse: | ||||
|     context: Dict[str, Any] = {} | ||||
|  | ||||
|     if "success_message" in request.session: | ||||
|         context["success_message"] = request.session["success_message"] | ||||
|         del request.session["success_message"] | ||||
|  | ||||
|     if settings.BILLING_ENABLED and request.method == "POST": | ||||
|         # We check that request.POST only has two keys in it: The | ||||
|         # realm_id and a field to change. | ||||
|         keys = set(request.POST.keys()) | ||||
|         if "csrfmiddlewaretoken" in keys: | ||||
|             keys.remove("csrfmiddlewaretoken") | ||||
|         if len(keys) != 2: | ||||
|             raise JsonableError(_("Invalid parameters")) | ||||
|  | ||||
|         realm = Realm.objects.get(id=realm_id) | ||||
|  | ||||
|         acting_user = request.user | ||||
|         assert isinstance(acting_user, UserProfile) | ||||
|         if plan_type is not None: | ||||
|             current_plan_type = realm.plan_type | ||||
|             do_change_realm_plan_type(realm, plan_type, acting_user=acting_user) | ||||
|             msg = f"Plan type of {realm.string_id} changed from {get_plan_name(current_plan_type)} to {get_plan_name(plan_type)} " | ||||
|             context["success_message"] = msg | ||||
|         elif org_type is not None: | ||||
|             current_realm_type = realm.org_type | ||||
|             do_change_realm_org_type(realm, org_type, acting_user=acting_user) | ||||
|             msg = f"Org type of {realm.string_id} changed from {get_org_type_display_name(current_realm_type)} to {get_org_type_display_name(org_type)} " | ||||
|             context["success_message"] = msg | ||||
|         elif discount is not None: | ||||
|             current_discount = get_discount_for_realm(realm) or 0 | ||||
|             attach_discount_to_realm(realm, discount, acting_user=acting_user) | ||||
|             context[ | ||||
|                 "success_message" | ||||
|             ] = f"Discount of {realm.string_id} changed to {discount}% from {current_discount}%." | ||||
|         elif new_subdomain is not None: | ||||
|             old_subdomain = realm.string_id | ||||
|             try: | ||||
|                 check_subdomain_available(new_subdomain) | ||||
|             except ValidationError as error: | ||||
|                 context["error_message"] = error.message | ||||
|             else: | ||||
|                 do_change_realm_subdomain(realm, new_subdomain, acting_user=acting_user) | ||||
|                 request.session[ | ||||
|                     "success_message" | ||||
|                 ] = f"Subdomain changed from {old_subdomain} to {new_subdomain}" | ||||
|                 return HttpResponseRedirect( | ||||
|                     reverse("support") + "?" + urlencode({"q": new_subdomain}) | ||||
|                 ) | ||||
|         elif status is not None: | ||||
|             if status == "active": | ||||
|                 do_send_realm_reactivation_email(realm, acting_user=acting_user) | ||||
|                 context[ | ||||
|                     "success_message" | ||||
|                 ] = f"Realm reactivation email sent to admins of {realm.string_id}." | ||||
|             elif status == "deactivated": | ||||
|                 do_deactivate_realm(realm, acting_user=acting_user) | ||||
|                 context["success_message"] = f"{realm.string_id} deactivated." | ||||
|         elif billing_method is not None: | ||||
|             if billing_method == "send_invoice": | ||||
|                 update_billing_method_of_current_plan( | ||||
|                     realm, charge_automatically=False, acting_user=acting_user | ||||
|                 ) | ||||
|                 context[ | ||||
|                     "success_message" | ||||
|                 ] = f"Billing method of {realm.string_id} updated to pay by invoice." | ||||
|             elif billing_method == "charge_automatically": | ||||
|                 update_billing_method_of_current_plan( | ||||
|                     realm, charge_automatically=True, acting_user=acting_user | ||||
|                 ) | ||||
|                 context[ | ||||
|                     "success_message" | ||||
|                 ] = f"Billing method of {realm.string_id} updated to charge automatically." | ||||
|         elif sponsorship_pending is not None: | ||||
|             if sponsorship_pending: | ||||
|                 update_sponsorship_status(realm, True, acting_user=acting_user) | ||||
|                 context["success_message"] = f"{realm.string_id} marked as pending sponsorship." | ||||
|             else: | ||||
|                 update_sponsorship_status(realm, False, acting_user=acting_user) | ||||
|                 context["success_message"] = f"{realm.string_id} is no longer pending sponsorship." | ||||
|         elif approve_sponsorship: | ||||
|             do_approve_sponsorship(realm, acting_user=acting_user) | ||||
|             context["success_message"] = f"Sponsorship approved for {realm.string_id}" | ||||
|         elif downgrade_method is not None: | ||||
|             if downgrade_method == "downgrade_at_billing_cycle_end": | ||||
|                 downgrade_at_the_end_of_billing_cycle(realm) | ||||
|                 context[ | ||||
|                     "success_message" | ||||
|                 ] = f"{realm.string_id} marked for downgrade at the end of billing cycle" | ||||
|             elif downgrade_method == "downgrade_now_without_additional_licenses": | ||||
|                 downgrade_now_without_creating_additional_invoices(realm) | ||||
|                 context[ | ||||
|                     "success_message" | ||||
|                 ] = f"{realm.string_id} downgraded without creating additional invoices" | ||||
|             elif downgrade_method == "downgrade_now_void_open_invoices": | ||||
|                 downgrade_now_without_creating_additional_invoices(realm) | ||||
|                 voided_invoices_count = void_all_open_invoices(realm) | ||||
|                 context[ | ||||
|                     "success_message" | ||||
|                 ] = f"{realm.string_id} downgraded and voided {voided_invoices_count} open invoices" | ||||
|         elif scrub_realm: | ||||
|             do_scrub_realm(realm, acting_user=acting_user) | ||||
|             context["success_message"] = f"{realm.string_id} scrubbed." | ||||
|  | ||||
|     if query: | ||||
|         key_words = get_invitee_emails_set(query) | ||||
|  | ||||
|         users = set(UserProfile.objects.filter(delivery_email__in=key_words)) | ||||
|         realms = set(Realm.objects.filter(string_id__in=key_words)) | ||||
|  | ||||
|         for key_word in key_words: | ||||
|             try: | ||||
|                 URLValidator()(key_word) | ||||
|                 parse_result = urllib.parse.urlparse(key_word) | ||||
|                 hostname = parse_result.hostname | ||||
|                 assert hostname is not None | ||||
|                 if parse_result.port: | ||||
|                     hostname = f"{hostname}:{parse_result.port}" | ||||
|                 subdomain = get_subdomain_from_hostname(hostname) | ||||
|                 try: | ||||
|                     realms.add(get_realm(subdomain)) | ||||
|                 except Realm.DoesNotExist: | ||||
|                     pass | ||||
|             except ValidationError: | ||||
|                 users.update(UserProfile.objects.filter(full_name__iexact=key_word)) | ||||
|  | ||||
|         for realm in realms: | ||||
|             realm.customer = get_customer_by_realm(realm) | ||||
|  | ||||
|             current_plan = get_current_plan_by_realm(realm) | ||||
|             if current_plan is not None: | ||||
|                 new_plan, last_ledger_entry = make_end_of_cycle_updates_if_needed( | ||||
|                     current_plan, timezone_now() | ||||
|                 ) | ||||
|                 if last_ledger_entry is not None: | ||||
|                     if new_plan is not None: | ||||
|                         realm.current_plan = new_plan | ||||
|                     else: | ||||
|                         realm.current_plan = current_plan | ||||
|                     realm.current_plan.licenses = last_ledger_entry.licenses | ||||
|                     realm.current_plan.licenses_used = get_latest_seat_count(realm) | ||||
|  | ||||
|         # full_names can have , in them | ||||
|         users.update(UserProfile.objects.filter(full_name__iexact=query)) | ||||
|  | ||||
|         context["users"] = users | ||||
|         context["realms"] = realms | ||||
|  | ||||
|         confirmations: List[Dict[str, Any]] = [] | ||||
|  | ||||
|         preregistration_users = PreregistrationUser.objects.filter(email__in=key_words) | ||||
|         confirmations += get_confirmations( | ||||
|             [Confirmation.USER_REGISTRATION, Confirmation.INVITATION, Confirmation.REALM_CREATION], | ||||
|             preregistration_users, | ||||
|             hostname=request.get_host(), | ||||
|         ) | ||||
|  | ||||
|         multiuse_invites = MultiuseInvite.objects.filter(realm__in=realms) | ||||
|         confirmations += get_confirmations([Confirmation.MULTIUSE_INVITE], multiuse_invites) | ||||
|  | ||||
|         confirmations += get_confirmations( | ||||
|             [Confirmation.REALM_REACTIVATION], [realm.id for realm in realms] | ||||
|         ) | ||||
|  | ||||
|         context["confirmations"] = confirmations | ||||
|  | ||||
|     def get_realm_owner_emails_as_string(realm: Realm) -> str: | ||||
|         return ", ".join( | ||||
|             realm.get_human_owner_users() | ||||
|             .order_by("delivery_email") | ||||
|             .values_list("delivery_email", flat=True) | ||||
|         ) | ||||
|  | ||||
|     def get_realm_admin_emails_as_string(realm: Realm) -> str: | ||||
|         return ", ".join( | ||||
|             realm.get_human_admin_users(include_realm_owners=False) | ||||
|             .order_by("delivery_email") | ||||
|             .values_list("delivery_email", flat=True) | ||||
|         ) | ||||
|  | ||||
|     context["get_realm_owner_emails_as_string"] = get_realm_owner_emails_as_string | ||||
|     context["get_realm_admin_emails_as_string"] = get_realm_admin_emails_as_string | ||||
|     context["get_discount_for_realm"] = get_discount_for_realm | ||||
|     context["get_org_type_display_name"] = get_org_type_display_name | ||||
|     context["realm_icon_url"] = realm_icon_url | ||||
|     context["Confirmation"] = Confirmation | ||||
|     context["sorted_realm_types"] = sorted( | ||||
|         Realm.ORG_TYPES.values(), key=lambda d: d["display_order"] | ||||
|     ) | ||||
|  | ||||
|     return render(request, "analytics/support.html", context=context) | ||||
| @@ -1,104 +0,0 @@ | ||||
| from typing import Any, Dict, List, Tuple | ||||
|  | ||||
| from django.conf import settings | ||||
| from django.db.models.query import QuerySet | ||||
| from django.http import HttpRequest, HttpResponse | ||||
| from django.shortcuts import render | ||||
|  | ||||
| from analytics.views.activity_common import ( | ||||
|     format_date_for_activity_reports, | ||||
|     get_user_activity_summary, | ||||
|     make_table, | ||||
| ) | ||||
| from zerver.decorator import require_server_admin | ||||
| from zerver.models import UserActivity, UserProfile, get_user_profile_by_id | ||||
|  | ||||
| if settings.BILLING_ENABLED: | ||||
|     pass | ||||
|  | ||||
|  | ||||
| def get_user_activity_records(user_profile: UserProfile) -> List[QuerySet]: | ||||
|     fields = [ | ||||
|         "user_profile__full_name", | ||||
|         "query", | ||||
|         "client__name", | ||||
|         "count", | ||||
|         "last_visit", | ||||
|     ] | ||||
|  | ||||
|     records = UserActivity.objects.filter( | ||||
|         user_profile=user_profile, | ||||
|     ) | ||||
|     records = records.order_by("-last_visit") | ||||
|     records = records.select_related("user_profile", "client").only(*fields) | ||||
|     return records | ||||
|  | ||||
|  | ||||
| def raw_user_activity_table(records: List[QuerySet]) -> str: | ||||
|     cols = [ | ||||
|         "query", | ||||
|         "client", | ||||
|         "count", | ||||
|         "last_visit", | ||||
|     ] | ||||
|  | ||||
|     def row(record: QuerySet) -> List[Any]: | ||||
|         return [ | ||||
|             record.query, | ||||
|             record.client.name, | ||||
|             record.count, | ||||
|             format_date_for_activity_reports(record.last_visit), | ||||
|         ] | ||||
|  | ||||
|     rows = list(map(row, records)) | ||||
|     title = "Raw data" | ||||
|     return make_table(title, cols, rows) | ||||
|  | ||||
|  | ||||
| def user_activity_summary_table(user_summary: Dict[str, Dict[str, Any]]) -> str: | ||||
|     rows = [] | ||||
|     for k, v in user_summary.items(): | ||||
|         if k == "name" or k == "user_profile_id": | ||||
|             continue | ||||
|         client = k | ||||
|         count = v["count"] | ||||
|         last_visit = v["last_visit"] | ||||
|         row = [ | ||||
|             format_date_for_activity_reports(last_visit), | ||||
|             client, | ||||
|             count, | ||||
|         ] | ||||
|         rows.append(row) | ||||
|  | ||||
|     rows = sorted(rows, key=lambda r: r[0], reverse=True) | ||||
|  | ||||
|     cols = [ | ||||
|         "last_visit", | ||||
|         "client", | ||||
|         "count", | ||||
|     ] | ||||
|  | ||||
|     title = "User activity" | ||||
|     return make_table(title, cols, rows) | ||||
|  | ||||
|  | ||||
| @require_server_admin | ||||
| def get_user_activity(request: HttpRequest, user_profile_id: int) -> HttpResponse: | ||||
|     user_profile = get_user_profile_by_id(user_profile_id) | ||||
|     records = get_user_activity_records(user_profile) | ||||
|  | ||||
|     data: List[Tuple[str, str]] = [] | ||||
|     user_summary = get_user_activity_summary(records) | ||||
|     content = user_activity_summary_table(user_summary) | ||||
|  | ||||
|     data += [("Summary", content)] | ||||
|  | ||||
|     content = raw_user_activity_table(records) | ||||
|     data += [("Info", content)] | ||||
|  | ||||
|     title = user_profile.delivery_email | ||||
|     return render( | ||||
|         request, | ||||
|         "analytics/activity.html", | ||||
|         context=dict(data=data, title=title), | ||||
|     ) | ||||
							
								
								
									
										10
									
								
								api/MANIFEST.in
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										10
									
								
								api/MANIFEST.in
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,10 @@ | ||||
| recursive-include integrations * | ||||
| include README.md | ||||
| include examples/zuliprc | ||||
| include examples/send-message | ||||
| include examples/subscribe | ||||
| include examples/get-public-streams | ||||
| include examples/unsubscribe | ||||
| include examples/list-members | ||||
| include examples/list-subscriptions | ||||
| include examples/print-messages | ||||
							
								
								
									
										106
									
								
								api/README.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										106
									
								
								api/README.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,106 @@ | ||||
| #### Dependencies | ||||
|  | ||||
| The [Zulip API](https://zulip.com/api) Python bindings require the | ||||
| following Python libraries: | ||||
|  | ||||
| * simplejson | ||||
| * requests (version >= 0.12.1) | ||||
|  | ||||
|  | ||||
| #### Installing | ||||
|  | ||||
| This package uses distutils, so you can just run: | ||||
|  | ||||
|     python setup.py install | ||||
|  | ||||
| #### Using the API | ||||
|  | ||||
| For now, the only fully supported API operation is sending a message. | ||||
| The other API queries work, but are under active development, so | ||||
| please make sure we know you're using them so that we can notify you | ||||
| as we make any changes to them. | ||||
|  | ||||
| The easiest way to use these API bindings is to base your tools off | ||||
| of the example tools under examples/ in this distribution. | ||||
|  | ||||
| If you place your API key in the config file `~/.zuliprc` the Python | ||||
| API bindings will automatically read it in. The format of the config | ||||
| file is as follows: | ||||
|  | ||||
|     [api] | ||||
|     key=<api key from the web interface> | ||||
|     email=<your email address> | ||||
|  | ||||
| If you are using Zulip Enterprise, you should also add | ||||
|  | ||||
|     site=<your Zulip Enterprise server's URI> | ||||
|  | ||||
| Alternatively, you may explicitly use "--user" and "--api-key" in our | ||||
| examples, which is especially useful if you are running several bots | ||||
| which share a home directory.  There is also a "--site" option for | ||||
| setting the Zulip Enterprise server on the command line. | ||||
|  | ||||
| You can obtain your Zulip API key, create bots, and manage bots all | ||||
| from your Zulip [settings page](https://zulip.com/#settings). | ||||
|  | ||||
| A typical simple bot sending API messages will look as follows: | ||||
|  | ||||
| At the top of the file: | ||||
|  | ||||
|     # Make sure the Zulip API distribution's root directory is in sys.path, then: | ||||
|     import zulip | ||||
|     zulip_client = zulip.Client(email="your-bot@example.com", client="MyTestClient/0.1") | ||||
|  | ||||
| When you want to send a message: | ||||
|  | ||||
|     message = { | ||||
|       "type": "stream", | ||||
|       "to": ["support"], | ||||
|       "subject": "your subject", | ||||
|       "content": "your content", | ||||
|     } | ||||
|     zulip_client.send_message(message) | ||||
|  | ||||
| Additional examples: | ||||
|  | ||||
|     client.send_message({'type': 'stream', 'content': 'Zulip rules!', | ||||
|                          'subject': 'feedback', 'to': ['support']}) | ||||
|     client.send_message({'type': 'private', 'content': 'Zulip rules!', | ||||
|                          'to': ['user1@example.com', 'user2@example.com']}) | ||||
|  | ||||
| send_message() returns a dict guaranteed to contain the following | ||||
| keys: msg, result.  For successful calls, result will be "success" and | ||||
| msg will be the empty string.  On error, result will be "error" and | ||||
| msg will describe what went wrong. | ||||
|  | ||||
| #### Logging | ||||
| The Zulip API comes with a ZulipStream class which can be used with the | ||||
| logging module: | ||||
|  | ||||
| ``` | ||||
| import zulip | ||||
| import logging | ||||
| stream = zulip.ZulipStream(type="stream", to=["support"], subject="your subject") | ||||
| logger = logging.getLogger("your_logger") | ||||
| logger.addHandler(logging.StreamHandler(stream)) | ||||
| logger.setLevel(logging.DEBUG) | ||||
| logger.info("This is an INFO test.") | ||||
| logger.debug("This is a DEBUG test.") | ||||
| logger.warn("This is a WARN test.") | ||||
| logger.error("This is a ERROR test.") | ||||
| ``` | ||||
|  | ||||
| #### Sending messages | ||||
|  | ||||
| You can use the included `zulip-send` script to send messages via the | ||||
| API directly from existing scripts. | ||||
|  | ||||
|     zulip-send hamlet@example.com cordelia@example.com -m \ | ||||
|         "Conscience doth make cowards of us all." | ||||
|  | ||||
| Alternatively, if you don't want to use your ~/.zuliprc file: | ||||
|  | ||||
|     zulip-send --user shakespeare-bot@example.com \ | ||||
|         --api-key a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5 \ | ||||
|         hamlet@example.com cordelia@example.com -m \ | ||||
|         "Conscience doth make cowards of us all." | ||||
							
								
								
									
										126
									
								
								api/bin/zulip-send
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										126
									
								
								api/bin/zulip-send
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,126 @@ | ||||
| #!/usr/bin/env python | ||||
| # -*- coding: utf-8 -*- | ||||
| # zulip-send -- Sends a message to the specified recipients. | ||||
|  | ||||
| # Copyright © 2012 Zulip, Inc. | ||||
| # | ||||
| # Permission is hereby granted, free of charge, to any person obtaining a copy | ||||
| # of this software and associated documentation files (the "Software"), to deal | ||||
| # in the Software without restriction, including without limitation the rights | ||||
| # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | ||||
| # copies of the Software, and to permit persons to whom the Software is | ||||
| # furnished to do so, subject to the following conditions: | ||||
| # | ||||
| # The above copyright notice and this permission notice shall be included in | ||||
| # all copies or substantial portions of the Software. | ||||
| # | ||||
| # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | ||||
| # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | ||||
| # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | ||||
| # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | ||||
| # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | ||||
| # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN | ||||
| # THE SOFTWARE. | ||||
|  | ||||
| import sys | ||||
| import os | ||||
| import optparse | ||||
| import logging | ||||
|  | ||||
|  | ||||
| logging.basicConfig() | ||||
|  | ||||
| log = logging.getLogger('zulip-send') | ||||
|  | ||||
| def do_send_message(client, message_data ): | ||||
|     '''Sends a message and optionally prints status about the same.''' | ||||
|  | ||||
|     if message_data['type'] == 'stream': | ||||
|         log.info('Sending message to stream "%s", subject "%s"... ' % \ | ||||
|             (message_data['to'], message_data['subject'])) | ||||
|     else: | ||||
|         log.info('Sending message to %s... ' % message_data['to']) | ||||
|     response = client.send_message(message_data) | ||||
|     if response['result'] == 'success': | ||||
|         log.info('Message sent.') | ||||
|         return True | ||||
|     else: | ||||
|         log.error(response['msg']) | ||||
|         return False | ||||
|  | ||||
| def main(argv=None): | ||||
|     if argv is None: | ||||
|         argv = sys.argv | ||||
|  | ||||
|     usage = """%prog [options] [recipient...] | ||||
|  | ||||
|     Sends a message specified recipients. | ||||
|  | ||||
|     Examples: %prog --stream denmark --subject castle -m "Something is rotten in the state of Denmark." | ||||
|               %prog hamlet@example.com cordelia@example.com -m "Conscience doth make cowards of us all." | ||||
|  | ||||
|     These examples assume you have a proper '~/.zuliprc'. You may also set your credentials with the | ||||
|     '--user' and '--api-key' arguments. | ||||
|     """ | ||||
|  | ||||
|     sys.path.append(os.path.join(os.path.dirname(__file__), '..')) | ||||
|  | ||||
|     import zulip | ||||
|  | ||||
|     parser = optparse.OptionParser(usage=usage) | ||||
|  | ||||
|     # Grab parser options from the API common set | ||||
|     parser.add_option_group(zulip.generate_option_group(parser)) | ||||
|  | ||||
|     parser.add_option('-m', '--message', | ||||
|                       help='Specifies the message to send, prevents interactive prompting.') | ||||
|  | ||||
|     group = optparse.OptionGroup(parser, 'Stream parameters') | ||||
|     group.add_option('-s', '--stream', | ||||
|                       dest='stream', | ||||
|                       action='store', | ||||
|                       help='Allows the user to specify a stream for the message.') | ||||
|     group.add_option('-S', '--subject', | ||||
|                       dest='subject', | ||||
|                       action='store', | ||||
|                       help='Allows the user to specify a subject for the message.') | ||||
|     parser.add_option_group(group) | ||||
|  | ||||
|  | ||||
|     (options, recipients) = parser.parse_args(argv[1:]) | ||||
|  | ||||
|     if options.verbose: | ||||
|         logging.getLogger().setLevel(logging.INFO) | ||||
|     # Sanity check user data | ||||
|     if len(recipients) != 0 and (options.stream or options.subject): | ||||
|         parser.error('You cannot specify both a username and a stream/subject.') | ||||
|     if len(recipients) == 0 and (bool(options.stream) != bool(options.subject)): | ||||
|         parser.error('Stream messages must have a subject') | ||||
|     if len(recipients) == 0 and not (options.stream and options.subject): | ||||
|         parser.error('You must specify a stream/subject or at least one recipient.') | ||||
|  | ||||
|     client = zulip.init_from_options(options) | ||||
|  | ||||
|     if not options.message: | ||||
|         options.message = sys.stdin.read() | ||||
|  | ||||
|     if options.stream: | ||||
|         message_data = { | ||||
|             'type': 'stream', | ||||
|             'content': options.message, | ||||
|             'subject': options.subject, | ||||
|             'to': options.stream, | ||||
|         } | ||||
|     else: | ||||
|         message_data = { | ||||
|             'type': 'private', | ||||
|             'content': options.message, | ||||
|             'to': recipients, | ||||
|         } | ||||
|  | ||||
|     if not do_send_message(client, message_data): | ||||
|         return 1 | ||||
|  | ||||
|  | ||||
| if __name__ == '__main__': | ||||
|     sys.exit(main()) | ||||
							
								
								
									
										54
									
								
								api/examples/create-user
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										54
									
								
								api/examples/create-user
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,54 @@ | ||||
| #!/usr/bin/python | ||||
| # -*- coding: utf-8 -*- | ||||
|  | ||||
| # Copyright © 2012-2014 Zulip, Inc. | ||||
| # | ||||
| # Permission is hereby granted, free of charge, to any person obtaining a copy | ||||
| # of this software and associated documentation files (the "Software"), to deal | ||||
| # in the Software without restriction, including without limitation the rights | ||||
| # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | ||||
| # copies of the Software, and to permit persons to whom the Software is | ||||
| # furnished to do so, subject to the following conditions: | ||||
| # | ||||
| # The above copyright notice and this permission notice shall be included in | ||||
| # all copies or substantial portions of the Software. | ||||
| # | ||||
| # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | ||||
| # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | ||||
| # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | ||||
| # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | ||||
| # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | ||||
| # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN | ||||
| # THE SOFTWARE. | ||||
|  | ||||
| import sys | ||||
| from os import path | ||||
| import optparse | ||||
|  | ||||
| usage = """create-user --new-email=<email address> --new-password=<password> --new-full-name=<full name> --new-short-name=<short name> [options] | ||||
|  | ||||
| Create a user. You must be a realm admin to use this API, and the user | ||||
| will be created in your realm. | ||||
|  | ||||
| Example: create-user --site=http://localhost:9991 --user=rwbarton@zulip.com --new-email=jarthur@zulip.com --new-password=random17 --new-full-name 'J. Arthur Random' --new-short-name='jarthur' | ||||
| """ | ||||
|  | ||||
| sys.path.append(path.join(path.dirname(__file__), '..')) | ||||
| import zulip | ||||
|  | ||||
| parser = optparse.OptionParser(usage=usage) | ||||
| parser.add_option_group(zulip.generate_option_group(parser)) | ||||
| parser.add_option('--new-email') | ||||
| parser.add_option('--new-password') | ||||
| parser.add_option('--new-full-name') | ||||
| parser.add_option('--new-short-name') | ||||
| (options, args) = parser.parse_args() | ||||
|  | ||||
| client = zulip.init_from_options(options) | ||||
|  | ||||
| print client.create_user({ | ||||
|         'email': options.new_email, | ||||
|         'password': options.new_password, | ||||
|         'full_name': options.new_full_name, | ||||
|         'short_name': options.new_short_name | ||||
|         }) | ||||
							
								
								
									
										56
									
								
								api/examples/edit-message
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										56
									
								
								api/examples/edit-message
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,56 @@ | ||||
| #!/usr/bin/env python | ||||
| # -*- coding: utf-8 -*- | ||||
|  | ||||
| # Copyright © 2012 Zulip, Inc. | ||||
| # | ||||
| # Permission is hereby granted, free of charge, to any person obtaining a copy | ||||
| # of this software and associated documentation files (the "Software"), to deal | ||||
| # in the Software without restriction, including without limitation the rights | ||||
| # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | ||||
| # copies of the Software, and to permit persons to whom the Software is | ||||
| # furnished to do so, subject to the following conditions: | ||||
| # | ||||
| # The above copyright notice and this permission notice shall be included in | ||||
| # all copies or substantial portions of the Software. | ||||
| # | ||||
| # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | ||||
| # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | ||||
| # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | ||||
| # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | ||||
| # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | ||||
| # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN | ||||
| # THE SOFTWARE. | ||||
|  | ||||
| import sys | ||||
| import os | ||||
| import optparse | ||||
|  | ||||
| usage = """edit-message [options] --message=<msg_id> --subject=<new subject> --content=<new content> --user=<sender's email address> --api-key=<sender's api key> | ||||
|  | ||||
| Edits a message that you sent | ||||
|  | ||||
| Example: edit-message --message-id="348135" --subject="my subject" --content="test message" --user=othello-bot@example.com --api-key=a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5 | ||||
|  | ||||
| You can omit --user and --api-key arguments if you have a properly set up ~/.zuliprc | ||||
| """ | ||||
|  | ||||
| sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..')) | ||||
| import zulip | ||||
|  | ||||
| parser = optparse.OptionParser(usage=usage) | ||||
| parser.add_option('--message-id', default="") | ||||
| parser.add_option('--subject', default="") | ||||
| parser.add_option('--content',   default="") | ||||
| parser.add_option_group(zulip.generate_option_group(parser)) | ||||
| (options, args) = parser.parse_args() | ||||
|  | ||||
| client = zulip.init_from_options(options) | ||||
|  | ||||
| message_data = { | ||||
|     "message_id": options.message_id, | ||||
| } | ||||
| if options.subject != "": | ||||
|     message_data["subject"] = options.subject | ||||
| if options.content != "": | ||||
|     message_data["content"] = options.content | ||||
| print client.update_message(message_data) | ||||
							
								
								
									
										46
									
								
								api/examples/get-public-streams
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										46
									
								
								api/examples/get-public-streams
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,46 @@ | ||||
| #!/usr/bin/env python | ||||
| # -*- coding: utf-8 -*- | ||||
|  | ||||
| # Copyright © 2012 Zulip, Inc. | ||||
| # | ||||
| # Permission is hereby granted, free of charge, to any person obtaining a copy | ||||
| # of this software and associated documentation files (the "Software"), to deal | ||||
| # in the Software without restriction, including without limitation the rights | ||||
| # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | ||||
| # copies of the Software, and to permit persons to whom the Software is | ||||
| # furnished to do so, subject to the following conditions: | ||||
| # | ||||
| # The above copyright notice and this permission notice shall be included in | ||||
| # all copies or substantial portions of the Software. | ||||
| # | ||||
| # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | ||||
| # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | ||||
| # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | ||||
| # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | ||||
| # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | ||||
| # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN | ||||
| # THE SOFTWARE. | ||||
|  | ||||
| import sys | ||||
| import os | ||||
| import optparse | ||||
|  | ||||
| usage = """get-public-streams --user=<bot's email address> --api-key=<bot's api key> [options] | ||||
|  | ||||
| Prints out all the public streams in the realm. | ||||
|  | ||||
| Example: get-public-streams --user=othello-bot@example.com --api-key=a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5 | ||||
|  | ||||
| You can omit --user and --api-key arguments if you have a properly set up ~/.zuliprc | ||||
| """ | ||||
|  | ||||
| sys.path.append(os.path.join(os.path.dirname(__file__), '..')) | ||||
| import zulip | ||||
|  | ||||
| parser = optparse.OptionParser(usage=usage) | ||||
| parser.add_option_group(zulip.generate_option_group(parser)) | ||||
| (options, args) = parser.parse_args() | ||||
|  | ||||
| client = zulip.init_from_options(options) | ||||
|  | ||||
| print client.get_streams(include_public=True, include_subscribed=False) | ||||
							
								
								
									
										45
									
								
								api/examples/list-members
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										45
									
								
								api/examples/list-members
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,45 @@ | ||||
| #!/usr/bin/env python | ||||
| # -*- coding: utf-8 -*- | ||||
|  | ||||
| # Copyright © 2014 Zulip, Inc. | ||||
| # | ||||
| # Permission is hereby granted, free of charge, to any person obtaining a copy | ||||
| # of this software and associated documentation files (the "Software"), to deal | ||||
| # in the Software without restriction, including without limitation the rights | ||||
| # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | ||||
| # copies of the Software, and to permit persons to whom the Software is | ||||
| # furnished to do so, subject to the following conditions: | ||||
| # | ||||
| # The above copyright notice and this permission notice shall be included in | ||||
| # all copies or substantial portions of the Software. | ||||
| # | ||||
| # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | ||||
| # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | ||||
| # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | ||||
| # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | ||||
| # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | ||||
| # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN | ||||
| # THE SOFTWARE. | ||||
|  | ||||
| import sys | ||||
| import os | ||||
| import optparse | ||||
|  | ||||
| usage = """list-members --user=<bot's email address> --api-key=<bot's api key> [options] | ||||
|  | ||||
| List the names and e-mail addresses of the people in your realm. | ||||
|  | ||||
| You can omit --user and --api-key arguments if you have a properly set up ~/.zuliprc | ||||
| """ | ||||
|  | ||||
| sys.path.append(os.path.join(os.path.dirname(__file__), '..')) | ||||
| import zulip | ||||
|  | ||||
| parser = optparse.OptionParser(usage=usage) | ||||
| parser.add_option_group(zulip.generate_option_group(parser)) | ||||
| (options, args) = parser.parse_args() | ||||
|  | ||||
| client = zulip.init_from_options(options) | ||||
|  | ||||
| for user in client.get_members()["members"]: | ||||
|     print user["full_name"], user["email"] | ||||
							
								
								
									
										45
									
								
								api/examples/list-subscriptions
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										45
									
								
								api/examples/list-subscriptions
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,45 @@ | ||||
| #!/usr/bin/env python | ||||
| # -*- coding: utf-8 -*- | ||||
|  | ||||
| # Copyright © 2012 Zulip, Inc. | ||||
| # | ||||
| # Permission is hereby granted, free of charge, to any person obtaining a copy | ||||
| # of this software and associated documentation files (the "Software"), to deal | ||||
| # in the Software without restriction, including without limitation the rights | ||||
| # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | ||||
| # copies of the Software, and to permit persons to whom the Software is | ||||
| # furnished to do so, subject to the following conditions: | ||||
| # | ||||
| # The above copyright notice and this permission notice shall be included in | ||||
| # all copies or substantial portions of the Software. | ||||
| # | ||||
| # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | ||||
| # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | ||||
| # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | ||||
| # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | ||||
| # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | ||||
| # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN | ||||
| # THE SOFTWARE. | ||||
|  | ||||
| import sys | ||||
| import os | ||||
| import optparse | ||||
|  | ||||
| usage = """list-subscriptions --user=<bot's email address> --api-key=<bot's api key> [options] | ||||
|  | ||||
| Prints out a list of the user's subscriptions. | ||||
|  | ||||
| Example: list-subscriptions --user=tabbott@zulip.com --api-key=a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5 | ||||
|  | ||||
| You can omit --user and --api-key arguments if you have a properly set up ~/.zuliprc | ||||
| """ | ||||
| sys.path.append(os.path.join(os.path.dirname(__file__), '..')) | ||||
| import zulip | ||||
|  | ||||
| parser = optparse.OptionParser(usage=usage) | ||||
| parser.add_option_group(zulip.generate_option_group(parser)) | ||||
| (options, args) = parser.parse_args() | ||||
|  | ||||
| client = zulip.init_from_options(options) | ||||
|  | ||||
| print client.list_subscriptions() | ||||
							
								
								
									
										49
									
								
								api/examples/print-messages
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										49
									
								
								api/examples/print-messages
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,49 @@ | ||||
| #!/usr/bin/env python | ||||
| # -*- coding: utf-8 -*- | ||||
|  | ||||
| # Copyright © 2012 Zulip, Inc. | ||||
| # | ||||
| # Permission is hereby granted, free of charge, to any person obtaining a copy | ||||
| # of this software and associated documentation files (the "Software"), to deal | ||||
| # in the Software without restriction, including without limitation the rights | ||||
| # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | ||||
| # copies of the Software, and to permit persons to whom the Software is | ||||
| # furnished to do so, subject to the following conditions: | ||||
| # | ||||
| # The above copyright notice and this permission notice shall be included in | ||||
| # all copies or substantial portions of the Software. | ||||
| # | ||||
| # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | ||||
| # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | ||||
| # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | ||||
| # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | ||||
| # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | ||||
| # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN | ||||
| # THE SOFTWARE. | ||||
|  | ||||
| import sys | ||||
| import os | ||||
| import optparse | ||||
|  | ||||
| usage = """print-messages --user=<bot's email address> --api-key=<bot's api key> [options] | ||||
|  | ||||
| Prints out each message received by the indicated bot or user. | ||||
|  | ||||
| Example: print-messages --user=tabbott@zulip.com --api-key=a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5 | ||||
|  | ||||
| You can omit --user and --api-key arguments if you have a properly set up ~/.zuliprc | ||||
| """ | ||||
| sys.path.append(os.path.join(os.path.dirname(__file__), '..')) | ||||
| import zulip | ||||
|  | ||||
| parser = optparse.OptionParser(usage=usage) | ||||
| parser.add_option_group(zulip.generate_option_group(parser)) | ||||
| (options, args) = parser.parse_args() | ||||
|  | ||||
| client = zulip.init_from_options(options) | ||||
|  | ||||
| def print_message(message): | ||||
|     print message | ||||
|  | ||||
| # This is a blocking call, and will continuously poll for new messages | ||||
| client.call_on_each_message(print_message) | ||||
							
								
								
									
										45
									
								
								api/examples/print-next-message
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										45
									
								
								api/examples/print-next-message
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,45 @@ | ||||
| #!/usr/bin/env python | ||||
| # -*- coding: utf-8 -*- | ||||
|  | ||||
| # Copyright © 2012 Zulip, Inc. | ||||
| # | ||||
| # Permission is hereby granted, free of charge, to any person obtaining a copy | ||||
| # of this software and associated documentation files (the "Software"), to deal | ||||
| # in the Software without restriction, including without limitation the rights | ||||
| # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | ||||
| # copies of the Software, and to permit persons to whom the Software is | ||||
| # furnished to do so, subject to the following conditions: | ||||
| # | ||||
| # The above copyright notice and this permission notice shall be included in | ||||
| # all copies or substantial portions of the Software. | ||||
| # | ||||
| # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | ||||
| # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | ||||
| # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | ||||
| # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | ||||
| # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | ||||
| # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN | ||||
| # THE SOFTWARE. | ||||
|  | ||||
| import sys | ||||
| import os | ||||
| import optparse | ||||
|  | ||||
| usage = """print-next-message --user=<bot's email address> --api-key=<bot's api key> [options] | ||||
|  | ||||
| Prints out the next message received by the user. | ||||
|  | ||||
| Example: print-next-messages --user=tabbott@zulip.com --api-key=a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5 | ||||
|  | ||||
| You can omit --user and --api-key arguments if you have a properly set up ~/.zuliprc | ||||
| """ | ||||
| sys.path.append(os.path.join(os.path.dirname(__file__), '..')) | ||||
| import zulip | ||||
|  | ||||
| parser = optparse.OptionParser(usage=usage) | ||||
| parser.add_option_group(zulip.generate_option_group(parser)) | ||||
| (options, args) = parser.parse_args() | ||||
|  | ||||
| client = zulip.init_from_options(options) | ||||
|  | ||||
| print client.get_messages({}) | ||||
							
								
								
									
										57
									
								
								api/examples/send-message
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										57
									
								
								api/examples/send-message
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,57 @@ | ||||
| #!/usr/bin/env python | ||||
| # -*- coding: utf-8 -*- | ||||
|  | ||||
| # Copyright © 2012 Zulip, Inc. | ||||
| # | ||||
| # Permission is hereby granted, free of charge, to any person obtaining a copy | ||||
| # of this software and associated documentation files (the "Software"), to deal | ||||
| # in the Software without restriction, including without limitation the rights | ||||
| # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | ||||
| # copies of the Software, and to permit persons to whom the Software is | ||||
| # furnished to do so, subject to the following conditions: | ||||
| # | ||||
| # The above copyright notice and this permission notice shall be included in | ||||
| # all copies or substantial portions of the Software. | ||||
| # | ||||
| # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | ||||
| # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | ||||
| # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | ||||
| # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | ||||
| # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | ||||
| # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN | ||||
| # THE SOFTWARE. | ||||
|  | ||||
| import sys | ||||
| import os | ||||
| import optparse | ||||
| sys.path.append(os.path.join(os.path.dirname(__file__), '..')) | ||||
| import zulip | ||||
|  | ||||
| usage = """send-message --user=<bot's email address> --api-key=<bot's api key> [options] <recipients> | ||||
|  | ||||
| Sends a test message to the specified recipients. | ||||
|  | ||||
| Example: send-message --user=your-bot@example.com --api-key=a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5 --type=stream commits --subject="my subject" --message="test message" | ||||
| Example: send-message --user=your-bot@example.com --api-key=a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5 user1@example.com user2@example.com | ||||
|  | ||||
| You can omit --user and --api-key arguments if you have a properly set up ~/.zuliprc | ||||
| """ | ||||
| parser = optparse.OptionParser(usage=usage) | ||||
| parser.add_option('--subject', default="test") | ||||
| parser.add_option('--message', default="test message") | ||||
| parser.add_option('--type',   default='private') | ||||
| parser.add_option_group(zulip.generate_option_group(parser)) | ||||
| (options, args) = parser.parse_args() | ||||
|  | ||||
| if len(args) == 0: | ||||
|     parser.error("You must specify recipients") | ||||
|  | ||||
| client = zulip.init_from_options(options) | ||||
|  | ||||
| message_data = { | ||||
|     "type": options.type, | ||||
|     "content": options.message, | ||||
|     "subject": options.subject, | ||||
|     "to": args, | ||||
| } | ||||
| print client.send_message(message_data) | ||||
							
								
								
									
										52
									
								
								api/examples/subscribe
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										52
									
								
								api/examples/subscribe
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,52 @@ | ||||
| #!/usr/bin/env python | ||||
| # -*- coding: utf-8 -*- | ||||
|  | ||||
| # Copyright © 2012 Zulip, Inc. | ||||
| # | ||||
| # Permission is hereby granted, free of charge, to any person obtaining a copy | ||||
| # of this software and associated documentation files (the "Software"), to deal | ||||
| # in the Software without restriction, including without limitation the rights | ||||
| # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | ||||
| # copies of the Software, and to permit persons to whom the Software is | ||||
| # furnished to do so, subject to the following conditions: | ||||
| # | ||||
| # The above copyright notice and this permission notice shall be included in | ||||
| # all copies or substantial portions of the Software. | ||||
| # | ||||
| # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | ||||
| # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | ||||
| # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | ||||
| # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | ||||
| # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | ||||
| # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN | ||||
| # THE SOFTWARE. | ||||
|  | ||||
| import sys | ||||
| import os | ||||
| import optparse | ||||
|  | ||||
| usage = """subscribe --user=<bot's email address> --api-key=<bot's api key> [options] --streams=<streams> | ||||
|  | ||||
| Ensures the user is subscribed to the listed streams. | ||||
|  | ||||
| Examples: subscribe --user=tabbott@zulip.com --api-key=a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5 --streams=foo | ||||
|           subscribe --user=tabbott@zulip.com --api-key=a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5 --streams='foo bar' | ||||
|  | ||||
| You can omit --user and --api-key arguments if you have a properly set up ~/.zuliprc | ||||
| """ | ||||
| sys.path.append(os.path.join(os.path.dirname(__file__), '..')) | ||||
| import zulip | ||||
|  | ||||
| parser = optparse.OptionParser(usage=usage) | ||||
| parser.add_option_group(zulip.generate_option_group(parser)) | ||||
| parser.add_option('--streams', default='') | ||||
| (options, args) = parser.parse_args() | ||||
|  | ||||
| client = zulip.init_from_options(options) | ||||
|  | ||||
| if options.streams == "": | ||||
|     print >>sys.stderr, "Usage:", parser.usage | ||||
|     sys.exit(1) | ||||
|  | ||||
| print client.add_subscriptions([{"name": stream_name} for stream_name in | ||||
|                                 options.streams.split()]) | ||||
							
								
								
									
										51
									
								
								api/examples/unsubscribe
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										51
									
								
								api/examples/unsubscribe
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,51 @@ | ||||
| #!/usr/bin/env python | ||||
| # -*- coding: utf-8 -*- | ||||
|  | ||||
| # Copyright © 2012 Zulip, Inc. | ||||
| # | ||||
| # Permission is hereby granted, free of charge, to any person obtaining a copy | ||||
| # of this software and associated documentation files (the "Software"), to deal | ||||
| # in the Software without restriction, including without limitation the rights | ||||
| # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | ||||
| # copies of the Software, and to permit persons to whom the Software is | ||||
| # furnished to do so, subject to the following conditions: | ||||
| # | ||||
| # The above copyright notice and this permission notice shall be included in | ||||
| # all copies or substantial portions of the Software. | ||||
| # | ||||
| # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | ||||
| # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | ||||
| # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | ||||
| # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | ||||
| # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | ||||
| # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN | ||||
| # THE SOFTWARE. | ||||
|  | ||||
| import sys | ||||
| import os | ||||
| import optparse | ||||
|  | ||||
| usage = """unsubscribe  --user=<bot's email address> --api-key=<bot's api key> [options] --streams=<streams> | ||||
|  | ||||
| Ensures the user is not subscribed to the listed streams. | ||||
|  | ||||
| Examples: unsubscribe --user=tabbott@zulip.com --api-key=a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5 --streams=foo | ||||
|           unsubscribe --user=tabbott@zulip.com --api-key=a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5 --streams='foo bar' | ||||
|  | ||||
| You can omit --user and --api-key arguments if you have a properly set up ~/.zuliprc | ||||
| """ | ||||
| sys.path.append(os.path.join(os.path.dirname(__file__), '..')) | ||||
| import zulip | ||||
|  | ||||
| parser = optparse.OptionParser(usage=usage) | ||||
| parser.add_option_group(zulip.generate_option_group(parser)) | ||||
| parser.add_option('--streams', default='') | ||||
| (options, args) = parser.parse_args() | ||||
|  | ||||
| client = zulip.init_from_options(options) | ||||
|  | ||||
| if options.streams == "": | ||||
|     print >>sys.stderr, "Usage:", parser.usage | ||||
|     sys.exit(1) | ||||
|  | ||||
| print client.remove_subscriptions(options.streams.split()) | ||||
							
								
								
									
										4
									
								
								api/examples/zuliprc
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										4
									
								
								api/examples/zuliprc
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,4 @@ | ||||
| ; Save this file as ~/.zuliprc | ||||
| [api] | ||||
| key=<your bot's api key from the web interface> | ||||
| email=<your bot's email address> | ||||
							
								
								
									
										57
									
								
								api/integrations/asana/zulip_asana_config.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										57
									
								
								api/integrations/asana/zulip_asana_config.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,57 @@ | ||||
| #!/usr/bin/env python | ||||
| # -*- coding: utf-8 -*- | ||||
| # | ||||
| # Copyright © 2014 Zulip, Inc. | ||||
| # | ||||
| # Permission is hereby granted, free of charge, to any person obtaining a copy | ||||
| # of this software and associated documentation files (the "Software"), to deal | ||||
| # in the Software without restriction, including without limitation the rights | ||||
| # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | ||||
| # copies of the Software, and to permit persons to whom the Software is | ||||
| # furnished to do so, subject to the following conditions: | ||||
| # | ||||
| # The above copyright notice and this permission notice shall be included in | ||||
| # all copies or substantial portions of the Software. | ||||
| # | ||||
| # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | ||||
| # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | ||||
| # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | ||||
| # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | ||||
| # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | ||||
| # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN | ||||
| # THE SOFTWARE. | ||||
|  | ||||
|  | ||||
| ### REQUIRED CONFIGURATION ### | ||||
|  | ||||
| # Change these values to your Asana credentials. | ||||
| ASANA_API_KEY = "0123456789abcdef0123456789abcdef" | ||||
|  | ||||
| # Change these values to the credentials for your Asana bot. | ||||
| ZULIP_USER = "asana-bot@example.com" | ||||
| ZULIP_API_KEY = "0123456789abcdef0123456789abcdef" | ||||
|  | ||||
| # The Zulip stream that will receive Asana task updates. | ||||
| ZULIP_STREAM_NAME = "asana" | ||||
|  | ||||
|  | ||||
| ### OPTIONAL CONFIGURATION ### | ||||
|  | ||||
| # Set to None for logging to stdout when testing, and to a file for | ||||
| # logging when deployed. | ||||
| #LOG_FILE = "/var/tmp/zulip_asana.log" | ||||
| LOG_FILE = None | ||||
|  | ||||
| # This file is used to resume this mirror in case the script shuts down. | ||||
| # It is required and needs to be writeable. | ||||
| RESUME_FILE = "/var/tmp/zulip_asana.state" | ||||
|  | ||||
| # When initially started, how many hours of messages to include. | ||||
| ASANA_INITIAL_HISTORY_HOURS = 1 | ||||
|  | ||||
| # If you're using Zulip Enterprise, set this to your Zulip Enterprise server | ||||
| ZULIP_SITE = "https://api.zulip.com" | ||||
|  | ||||
| # If properly installed, the Zulip API should be in your import | ||||
| # path, but if not, set a custom path below | ||||
| ZULIP_API_PATH = None | ||||
							
								
								
									
										277
									
								
								api/integrations/asana/zulip_asana_mirror
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										277
									
								
								api/integrations/asana/zulip_asana_mirror
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,277 @@ | ||||
| #!/usr/bin/env python | ||||
| # -*- coding: utf-8 -*- | ||||
| # | ||||
| # Asana integration for Zulip | ||||
| # | ||||
| # Copyright © 2014 Zulip, Inc. | ||||
| # | ||||
| # Permission is hereby granted, free of charge, to any person obtaining a copy | ||||
| # of this software and associated documentation files (the "Software"), to deal | ||||
| # in the Software without restriction, including without limitation the rights | ||||
| # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | ||||
| # copies of the Software, and to permit persons to whom the Software is | ||||
| # furnished to do so, subject to the following conditions: | ||||
| # | ||||
| # The above copyright notice and this permission notice shall be included in | ||||
| # all copies or substantial portions of the Software. | ||||
| # | ||||
| # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | ||||
| # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | ||||
| # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | ||||
| # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | ||||
| # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | ||||
| # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN | ||||
| # THE SOFTWARE. | ||||
|  | ||||
| import base64 | ||||
| from datetime import datetime, timedelta | ||||
| import dateutil.parser | ||||
| import dateutil.tz | ||||
| import json | ||||
| import logging | ||||
| import os | ||||
| import time | ||||
| import urllib2 | ||||
|  | ||||
| import sys | ||||
| sys.path.insert(0, os.path.dirname(__file__)) | ||||
| import zulip_asana_config as config | ||||
| VERSION = "0.9" | ||||
|  | ||||
| if config.ZULIP_API_PATH is not None: | ||||
|     sys.path.append(config.ZULIP_API_PATH) | ||||
| import zulip | ||||
|  | ||||
| if config.LOG_FILE: | ||||
|     logging.basicConfig(filename=config.LOG_FILE, level=logging.WARNING) | ||||
| else: | ||||
|     logging.basicConfig(level=logging.INFO) | ||||
|  | ||||
| client = zulip.Client(email=config.ZULIP_USER, api_key=config.ZULIP_API_KEY, | ||||
|                       site=config.ZULIP_SITE, client="ZulipAsana/" + VERSION) | ||||
|  | ||||
| def fetch_from_asana(path): | ||||
|     """ | ||||
|     Request a resource through the Asana API, authenticating using | ||||
|     HTTP basic auth. | ||||
|     """ | ||||
|     auth = base64.encodestring('%s:' % (config.ASANA_API_KEY,)) | ||||
|     headers = {"Authorization": "Basic %s" % auth} | ||||
|  | ||||
|     url = "https://app.asana.com/api/1.0" + path | ||||
|     request = urllib2.Request(url, None, headers) | ||||
|     result = urllib2.urlopen(request) | ||||
|  | ||||
|     return json.load(result) | ||||
|  | ||||
| def send_zulip(topic, content): | ||||
|     """ | ||||
|     Send a message to Zulip using the configured stream and bot credentials. | ||||
|     """ | ||||
|     message = {"type": "stream", | ||||
|                "sender": config.ZULIP_USER, | ||||
|                "to": config.ZULIP_STREAM_NAME, | ||||
|                "subject": topic, | ||||
|                "content": content, | ||||
|                } | ||||
|     return client.send_message(message) | ||||
|  | ||||
| def datestring_to_datetime(datestring): | ||||
|     """ | ||||
|     Given an ISO 8601 datestring, return the corresponding datetime object. | ||||
|     """ | ||||
|     return dateutil.parser.parse(datestring).replace( | ||||
|         tzinfo=dateutil.tz.gettz('Z')) | ||||
|  | ||||
| class TaskDict(dict): | ||||
|     """ | ||||
|     A helper class to turn a dictionary with task information into an | ||||
|     object where each of the keys is an attribute for easy access. | ||||
|     """ | ||||
|     def __getattr__(self, field): | ||||
|         return self.get(field) | ||||
|  | ||||
| def format_topic(task, projects): | ||||
|     """ | ||||
|     Return a string that will be the Zulip message topic for this task. | ||||
|     """ | ||||
|     # Tasks can be associated with multiple projects, but in practice they seem | ||||
|     # to mostly be associated with one. | ||||
|     project_name = projects[task.projects[0]["id"]] | ||||
|     return "%s: %s" % (project_name, task.name) | ||||
|  | ||||
| def format_assignee(task, users): | ||||
|     """ | ||||
|     Return a string describing the task's assignee. | ||||
|     """ | ||||
|     if task.assignee: | ||||
|         assignee_name = users[task.assignee["id"]] | ||||
|         assignee_info = "**Assigned to**: %s (%s)" % ( | ||||
|             assignee_name, task.assignee_status) | ||||
|     else: | ||||
|         assignee_info = "**Status**: Unassigned" | ||||
|  | ||||
|     return assignee_info | ||||
|  | ||||
| def format_due_date(task): | ||||
|     """ | ||||
|     Return a string describing the task's due date. | ||||
|     """ | ||||
|     if task.due_on: | ||||
|         due_date_info = "**Due on**: %s" % (task.due_on,) | ||||
|     else: | ||||
|         due_date_info = "**Due date**: None" | ||||
|     return due_date_info | ||||
|  | ||||
| def format_task_creation_event(task, projects, users): | ||||
|     """ | ||||
|     Format the topic and content for a newly-created task. | ||||
|     """ | ||||
|     topic = format_topic(task, projects) | ||||
|     assignee_info = format_assignee(task, users) | ||||
|     due_date_info = format_due_date(task) | ||||
|  | ||||
|     content = """Task **%s** created: | ||||
|  | ||||
| ~~~ quote | ||||
| %s | ||||
| ~~~ | ||||
|  | ||||
| %s | ||||
| %s | ||||
| """ % (task.name, task.notes, assignee_info, due_date_info) | ||||
|     return topic, content | ||||
|  | ||||
| def format_task_completion_event(task, projects, users): | ||||
|     """ | ||||
|     Format the topic and content for a completed task. | ||||
|     """ | ||||
|     topic = format_topic(task, projects) | ||||
|     assignee_info = format_assignee(task, users) | ||||
|     due_date_info = format_due_date(task) | ||||
|  | ||||
|     content = """Task **%s** completed. :white_check_mark: | ||||
|  | ||||
| %s | ||||
| %s | ||||
| """ % (task.name, assignee_info, due_date_info) | ||||
|     return topic, content | ||||
|  | ||||
| def since(): | ||||
|     """ | ||||
|     Return a newness threshold for task events to be processed. | ||||
|     """ | ||||
|     # If we have a record of the last event processed and it is recent, use it, | ||||
|     # else process everything from ASANA_INITIAL_HISTORY_HOURS ago. | ||||
|     def default_since(): | ||||
|         return datetime.utcnow() - timedelta( | ||||
|             hours=config.ASANA_INITIAL_HISTORY_HOURS) | ||||
|  | ||||
|     if os.path.exists(config.RESUME_FILE): | ||||
|         try: | ||||
|             with open(config.RESUME_FILE, "r") as f: | ||||
|                 datestring = f.readline().strip() | ||||
|                 timestamp = float(datestring) | ||||
|                 max_timestamp_processed = datetime.fromtimestamp(timestamp) | ||||
|                 logging.info("Reading from resume file: " + datestring) | ||||
|         except (ValueError,IOError) as e: | ||||
|             logging.warn("Could not open resume file: %s" % ( | ||||
|                     e.message or e.strerror,)) | ||||
|             max_timestamp_processed = default_since() | ||||
|     else: | ||||
|         logging.info("No resume file, processing an initial history.") | ||||
|         max_timestamp_processed = default_since() | ||||
|  | ||||
|     # Even if we can read a timestamp from RESUME_FILE, if it is old don't use | ||||
|     # it. | ||||
|     return max(max_timestamp_processed, default_since()) | ||||
|  | ||||
| def process_new_events(): | ||||
|     """ | ||||
|     Forward new Asana task events to Zulip. | ||||
|     """ | ||||
|     # In task queries, Asana only exposes IDs for projects and users, so we need | ||||
|     # to look up the mappings. | ||||
|     projects = dict((elt["id"], elt["name"]) for elt in \ | ||||
|                         fetch_from_asana("/projects")["data"]) | ||||
|     users = dict((elt["id"], elt["name"]) for elt in \ | ||||
|                      fetch_from_asana("/users")["data"]) | ||||
|  | ||||
|     cutoff = since() | ||||
|     max_timestamp_processed = cutoff | ||||
|     time_operations = (("created_at", format_task_creation_event), | ||||
|                        ("completed_at", format_task_completion_event)) | ||||
|     task_fields = ["assignee", "assignee_status", "created_at", "completed_at", | ||||
|                    "modified_at", "due_on", "name", "notes", "projects"] | ||||
|  | ||||
|     # First, gather all of the tasks that need processing. We'll | ||||
|     # process them in order. | ||||
|     new_events = [] | ||||
|  | ||||
|     for project_id in projects: | ||||
|         project_url = "/projects/%d/tasks?opt_fields=%s" % ( | ||||
|             project_id, ",".join(task_fields)) | ||||
|         tasks = fetch_from_asana(project_url)["data"] | ||||
|  | ||||
|         for task in tasks: | ||||
|             task = TaskDict(task) | ||||
|  | ||||
|             for time_field, operation in time_operations: | ||||
|                 if task[time_field]: | ||||
|                     operation_time = datestring_to_datetime(task[time_field]) | ||||
|                     if operation_time > cutoff: | ||||
|                         new_events.append((operation_time, time_field, operation, task)) | ||||
|  | ||||
|     new_events.sort() | ||||
|     now = datetime.utcnow() | ||||
|  | ||||
|     for operation_time, time_field, operation, task in new_events: | ||||
|         # Unfortunately, creating an Asana task is not an atomic operation. If | ||||
|         # the task was just created, or is missing basic information, it is | ||||
|         # probably because the task is still being filled out -- wait until the | ||||
|         # next round to process it. | ||||
|         if (time_field == "created_at") and \ | ||||
|                 (now - operation_time < timedelta(seconds=30)): | ||||
|             # The task was just created, give the user some time to fill out | ||||
|             # more information. | ||||
|             return | ||||
|  | ||||
|         if (time_field == "created_at") and (not task.name) and \ | ||||
|                 (now - operation_time < timedelta(seconds=60)): | ||||
|             # If this new task hasn't had a name for a full 30 seconds, assume | ||||
|             # you don't plan on giving it one. | ||||
|             return | ||||
|  | ||||
|         topic, content = operation(task, projects, users) | ||||
|         logging.info("Sending Zulip for " + topic) | ||||
|         result = send_zulip(topic, content) | ||||
|  | ||||
|         # If the Zulip wasn't sent successfully, don't update the | ||||
|         # max timestamp processed so the task has another change to | ||||
|         # be forwarded. Exit, giving temporary issues time to | ||||
|         # resolve. | ||||
|         if not result.get("result"): | ||||
|             logging.warn("Malformed result, exiting:") | ||||
|             logging.warn(result) | ||||
|             return | ||||
|  | ||||
|         if result["result"] != "success": | ||||
|             logging.warn(result["msg"]) | ||||
|             return | ||||
|  | ||||
|         if operation_time > max_timestamp_processed: | ||||
|             max_timestamp_processed = operation_time | ||||
|  | ||||
|     if max_timestamp_processed > cutoff: | ||||
|         max_datestring = max_timestamp_processed.strftime("%s.%f") | ||||
|         logging.info("Updating resume file: " + max_datestring) | ||||
|         open(config.RESUME_FILE, 'w').write(max_datestring) | ||||
|  | ||||
| while True: | ||||
|     try: | ||||
|         process_new_events() | ||||
|         time.sleep(5) | ||||
|     except KeyboardInterrupt: | ||||
|         logging.info("Shutting down...") | ||||
|         logging.info("Set LOG_FILE to log to a file instead of stdout.") | ||||
|         break | ||||
							
								
								
									
										53
									
								
								api/integrations/basecamp/zulip_basecamp_config.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										53
									
								
								api/integrations/basecamp/zulip_basecamp_config.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,53 @@ | ||||
| #!/usr/bin/env python | ||||
| # -*- coding: utf-8 -*- | ||||
| # | ||||
| # Copyright © 2014 Zulip, Inc. | ||||
| # | ||||
| # Permission is hereby granted, free of charge, to any person obtaining a copy | ||||
| # of this software and associated documentation files (the "Software"), to deal | ||||
| # in the Software without restriction, including without limitation the rights | ||||
| # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | ||||
| # copies of the Software, and to permit persons to whom the Software is | ||||
| # furnished to do so, subject to the following conditions: | ||||
| # | ||||
| # The above copyright notice and this permission notice shall be included in | ||||
| # all copies or substantial portions of the Software. | ||||
| # | ||||
| # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | ||||
| # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | ||||
| # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | ||||
| # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | ||||
| # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | ||||
| # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN | ||||
| # THE SOFTWARE. | ||||
|  | ||||
|  | ||||
|  | ||||
| # Change these values to configure authentication for basecamp account | ||||
| BASECAMP_ACCOUNT_ID = "12345678" | ||||
| BASECAMP_USERNAME = "foo@example.com" | ||||
| BASECAMP_PASSWORD = "p455w0rd" | ||||
|  | ||||
| # This script will mirror this many hours of history on the first run. | ||||
| # On subsequent runs this value is ignored. | ||||
| BASECAMP_INITIAL_HISTORY_HOURS = 0 | ||||
|  | ||||
| # Change these values to configure Zulip authentication for the plugin | ||||
| ZULIP_USER = "basecamp-bot@example.com" | ||||
| ZULIP_API_KEY = "0123456789abcdef0123456789abcdef" | ||||
| ZULIP_STREAM_NAME = "basecamp" | ||||
|  | ||||
| ## If properly installed, the Zulip API should be in your import | ||||
| ## path, but if not, set a custom path below | ||||
| ZULIP_API_PATH = None | ||||
|  | ||||
| # If you're using Zulip Enterprise, set this to your Zulip Enterprise server | ||||
| ZULIP_SITE = "https://api.zulip.com" | ||||
|  | ||||
| # If you wish to log to a file rather than stdout/stderr, | ||||
| # please fill this out your desired path | ||||
| LOG_FILE = None | ||||
|  | ||||
| # This file is used to resume this mirror in case the script shuts down. | ||||
| # It is required and needs to be writeable. | ||||
| RESUME_FILE = "/var/tmp/zulip_basecamp.state" | ||||
							
								
								
									
										180
									
								
								api/integrations/basecamp/zulip_basecamp_mirror
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										180
									
								
								api/integrations/basecamp/zulip_basecamp_mirror
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,180 @@ | ||||
| #!/usr/bin/env python | ||||
| # -*- coding: utf-8 -*- | ||||
| # | ||||
| # Zulip mirror of Basecamp activity | ||||
| # Copyright © 2014 Zulip, Inc. | ||||
| # | ||||
| # Permission is hereby granted, free of charge, to any person obtaining a copy | ||||
| # of this software and associated documentation files (the "Software"), to deal | ||||
| # in the Software without restriction, including without limitation the rights | ||||
| # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | ||||
| # copies of the Software, and to permit persons to whom the Software is | ||||
| # furnished to do so, subject to the following conditions: | ||||
| # | ||||
| # The above copyright notice and this permission notice shall be included in | ||||
| # all copies or substantial portions of the Software. | ||||
| # | ||||
| # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | ||||
| # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | ||||
| # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | ||||
| # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | ||||
| # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | ||||
| # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN | ||||
| # THE SOFTWARE. | ||||
| # | ||||
| # The "basecamp-mirror.py" script is run continuously, possibly on a work computer | ||||
| # or preferably on a server. | ||||
| # You may need to install the python-requests library. | ||||
|  | ||||
| import requests | ||||
| import logging | ||||
| import time | ||||
| import re | ||||
| import sys | ||||
| import os | ||||
| from datetime import datetime, timedelta | ||||
| from HTMLParser import HTMLParser | ||||
|  | ||||
| sys.path.insert(0, os.path.dirname(__file__)) | ||||
| import zulip_basecamp_config as config | ||||
| VERSION = "0.9" | ||||
|  | ||||
| if config.ZULIP_API_PATH is not None: | ||||
|     sys.path.append(config.ZULIP_API_PATH) | ||||
| import zulip | ||||
|  | ||||
|  | ||||
| client = zulip.Client( | ||||
|     email=config.ZULIP_USER, | ||||
|     site=config.ZULIP_SITE, | ||||
|     api_key=config.ZULIP_API_KEY, | ||||
|     client="ZulipBasecamp/" + VERSION) | ||||
| user_agent = "Basecamp To Zulip Mirroring script (support@zulip.com)" | ||||
| htmlParser = HTMLParser() | ||||
|  | ||||
| # find some form of JSON loader/dumper, with a preference order for speed. | ||||
| json_implementations = ['ujson', 'cjson', 'simplejson', 'json'] | ||||
|  | ||||
| while len(json_implementations): | ||||
|     try: | ||||
|         json = __import__(json_implementations.pop(0)) | ||||
|         break | ||||
|     except ImportError: | ||||
|         continue | ||||
|  | ||||
| # void function that checks the permissions of the files this script needs. | ||||
| def check_permissions(): | ||||
|     # check that the log file can be written | ||||
|     if config.LOG_FILE: | ||||
|         try: | ||||
|             open(config.LOG_FILE, "w") | ||||
|         except IOError as e: | ||||
|             sys.stderr("Could not open up log for writing:") | ||||
|             sys.stderr(e) | ||||
|     # check that the resume file can be written (this creates if it doesn't exist) | ||||
|     try: | ||||
|         open(config.RESUME_FILE, "a+") | ||||
|     except IOError as e: | ||||
|         sys.stderr("Could not open up the file %s for reading and writing" % (config.RESUME_FILE,)) | ||||
|         sys.stderr(e) | ||||
|  | ||||
| # builds the message dict for sending a message with the Zulip API | ||||
| def build_message(event): | ||||
|     if not (event.has_key('bucket') and event.has_key('creator') and event.has_key('html_url')): | ||||
|         logging.error("Perhaps the Basecamp API changed behavior? " | ||||
|                       "This event doesn't have the expected format:\n%s" %(event,)) | ||||
|         return None | ||||
|     # adjust the topic length to be bounded to 60 characters | ||||
|     topic = event['bucket']['name'] | ||||
|     if len(topic) > 60: | ||||
|         topic = topic[0:57] + "..." | ||||
|     # get the action and target values | ||||
|     action = htmlParser.unescape(re.sub(r"<[^<>]+>", "", event.get('action', ''))) | ||||
|     target = htmlParser.unescape(event.get('target', '')) | ||||
|     # Some events have "excerpts", which we blockquote | ||||
|     excerpt = htmlParser.unescape(event.get('excerpt','')) | ||||
|     if excerpt.strip() == "": | ||||
|         message = '**%s** %s [%s](%s).' % (event['creator']['name'], action, target, event['html_url']) | ||||
|     else: | ||||
|         message = '**%s** %s [%s](%s).\n> %s' % (event['creator']['name'], action, target, event['html_url'], excerpt) | ||||
|     # assemble the message data dict | ||||
|     message_data = { | ||||
|         "type": "stream", | ||||
|         "to": config.ZULIP_STREAM_NAME, | ||||
|         "subject": topic, | ||||
|         "content": message, | ||||
|     } | ||||
|     return message_data | ||||
|  | ||||
| # the main run loop for this mirror script | ||||
| def run_mirror(): | ||||
|     # we should have the right (write) permissions on the resume file, as seen | ||||
|     # in check_permissions, but it may still be empty or corrupted | ||||
|     try: | ||||
|         with open(config.RESUME_FILE) as f: | ||||
|             since = f.read() | ||||
|         since = re.search(r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d{3}-\d{2}:\d{2}", since) | ||||
|         assert since, "resume file does not meet expected format" | ||||
|         since = since.string | ||||
|     except (AssertionError,IOError) as e: | ||||
|         logging.warn("Could not open resume file: %s" % (e.message or e.strerror,)) | ||||
|         since = (datetime.utcnow() - timedelta(hours=config.BASECAMP_INITIAL_HISTORY_HOURS)).isoformat() + "-00:00" | ||||
|     try: | ||||
|         # we use an exponential backoff approach when we get 429 (Too Many Requests). | ||||
|         sleepInterval = 1 | ||||
|         while 1: | ||||
|             time.sleep(sleepInterval) | ||||
|             response = requests.get("https://basecamp.com/%s/api/v1/events.json" % (config.BASECAMP_ACCOUNT_ID), | ||||
|                                     params={'since': since}, | ||||
|                                     auth=(config.BASECAMP_USERNAME, config.BASECAMP_PASSWORD), | ||||
|                                     headers = {"User-Agent": user_agent}) | ||||
|             if response.status_code == 200: | ||||
|                 sleepInterval = 1 | ||||
|                 events = json.loads(response.text) | ||||
|                 if len(events): | ||||
|                     logging.info("Got event(s): %s" % (response.text,)) | ||||
|             if response.status_code >= 500: | ||||
|                 logging.error(response.status_code) | ||||
|                 continue | ||||
|             if response.status_code == 429: | ||||
|                 # exponential backoff | ||||
|                 sleepInterval *= 2 | ||||
|                 logging.error(response.status_code) | ||||
|                 continue | ||||
|             if response.status_code == 400: | ||||
|                 logging.error("Something went wrong. Basecamp must be unhappy for this reason: %s" % (response.text,)) | ||||
|                 sys.exit(-1) | ||||
|             if response.status_code == 401: | ||||
|                 logging.error("Bad authorization from Basecamp. Please check your Basecamp login credentials") | ||||
|                 sys.exit(-1) | ||||
|             if len(events): | ||||
|                 since = events[0]['created_at'] | ||||
|             for event in reversed(events): | ||||
|                 message_data = build_message(event) | ||||
|                 if not message_data: | ||||
|                     continue | ||||
|                 zulip_api_result = client.send_message(message_data) | ||||
|                 if zulip_api_result['result'] == "success": | ||||
|                     logging.info("sent zulip with id: %s" % (zulip_api_result['id'],)) | ||||
|                 else: | ||||
|                     logging.warn("%s %s" % (zulip_api_result['result'], zulip_api_result['msg'])) | ||||
|                 # update 'since' each time in case we get KeyboardInterrupted | ||||
|                 since = event['created_at'] | ||||
|                 # avoid hitting rate-limit | ||||
|                 time.sleep(0.2) | ||||
|  | ||||
|     except KeyboardInterrupt: | ||||
|         logging.info("Shutting down, please hold") | ||||
|         open("events.last", 'w').write(since) | ||||
|         logging.info("Done!") | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     if not isinstance(config.RESUME_FILE, basestring): | ||||
|         sys.stderr("RESUME_FILE path not given; refusing to continue") | ||||
|     check_permissions() | ||||
|     if config.LOG_FILE: | ||||
|         logging.basicConfig(filename=config.LOG_FILE, level=logging.INFO) | ||||
|     else: | ||||
|         logging.basicConfig(level=logging.INFO) | ||||
|     run_mirror() | ||||
							
								
								
									
										62
									
								
								api/integrations/codebase/zulip_codebase_config.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										62
									
								
								api/integrations/codebase/zulip_codebase_config.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,62 @@ | ||||
| #!/usr/bin/env python | ||||
| # -*- coding: utf-8 -*- | ||||
| # | ||||
| # Copyright © 2014 Zulip, Inc. | ||||
| # | ||||
| # Permission is hereby granted, free of charge, to any person obtaining a copy | ||||
| # of this software and associated documentation files (the "Software"), to deal | ||||
| # in the Software without restriction, including without limitation the rights | ||||
| # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | ||||
| # copies of the Software, and to permit persons to whom the Software is | ||||
| # furnished to do so, subject to the following conditions: | ||||
| # | ||||
| # The above copyright notice and this permission notice shall be included in | ||||
| # all copies or substantial portions of the Software. | ||||
| # | ||||
| # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | ||||
| # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | ||||
| # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | ||||
| # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | ||||
| # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | ||||
| # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN | ||||
| # THE SOFTWARE. | ||||
|  | ||||
|  | ||||
|  | ||||
| # Change these values to configure authentication for your codebase account | ||||
| # Note that this is the Codebase API Username, found in the Settings page | ||||
| # for your account | ||||
| CODEBASE_API_USERNAME = "foo@example.com" | ||||
| CODEBASE_API_KEY = "1234561234567abcdef" | ||||
|  | ||||
| # The URL of your codebase setup | ||||
| CODEBASE_ROOT_URL = "https://YOUR_COMPANY.codebasehq.com" | ||||
|  | ||||
| # When initially started, how many hours of messages to include. | ||||
| # Note that the Codebase API only returns the 20 latest events, | ||||
| # if you have more than 20 events that fit within this window, | ||||
| # earlier ones may be lost | ||||
| CODEBASE_INITIAL_HISTORY_HOURS = 12 | ||||
|  | ||||
| # Change these values to configure Zulip authentication for the plugin | ||||
| ZULIP_USER = "codebase-bot@example.com" | ||||
| ZULIP_API_KEY = "0123456789abcdef0123456789abcdef" | ||||
|  | ||||
| # The streams to send commit information and ticket information to | ||||
| ZULIP_COMMITS_STREAM_NAME = "codebase" | ||||
| ZULIP_TICKETS_STREAM_NAME = "tickets" | ||||
|  | ||||
| # If properly installed, the Zulip API should be in your import | ||||
| # path, but if not, set a custom path below | ||||
| ZULIP_API_PATH = None | ||||
|  | ||||
| # If you're using Zulip Enterprise, set this to your Zulip Enterprise server | ||||
| ZULIP_SITE = "https://api.zulip.com" | ||||
|  | ||||
| # If you wish to log to a file rather than stdout/stderr, | ||||
| # please fill this out your desired path | ||||
| LOG_FILE = None | ||||
|  | ||||
| # This file is used to resume this mirror in case the script shuts down. | ||||
| # It is required and needs to be writeable. | ||||
| RESUME_FILE = "/var/tmp/zulip_codebase.state" | ||||
							
								
								
									
										318
									
								
								api/integrations/codebase/zulip_codebase_mirror
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										318
									
								
								api/integrations/codebase/zulip_codebase_mirror
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,318 @@ | ||||
| #!/usr/bin/env python | ||||
| # -*- coding: utf-8 -*- | ||||
| # | ||||
| # Zulip mirror of Codebase HQ activity | ||||
| # Copyright © 2014 Zulip, Inc. | ||||
| # | ||||
| # Permission is hereby granted, free of charge, to any person obtaining a copy | ||||
| # of this software and associated documentation files (the "Software"), to deal | ||||
| # in the Software without restriction, including without limitation the rights | ||||
| # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | ||||
| # copies of the Software, and to permit persons to whom the Software is | ||||
| # furnished to do so, subject to the following conditions: | ||||
| # | ||||
| # The above copyright notice and this permission notice shall be included in | ||||
| # all copies or substantial portions of the Software. | ||||
| # | ||||
| # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | ||||
| # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | ||||
| # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | ||||
| # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | ||||
| # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | ||||
| # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN | ||||
| # THE SOFTWARE. | ||||
| # | ||||
| # The "codebase-mirror.py" script is run continuously, possibly on a work computer | ||||
| # or preferably on a server. | ||||
| # | ||||
| # When restarted, it will attempt to pick up where it left off. | ||||
| # | ||||
| # You may need to install the python-requests library, as well as python-dateutil | ||||
|  | ||||
| import requests | ||||
| import logging | ||||
| import time | ||||
| import sys | ||||
| import os | ||||
|  | ||||
| from datetime import datetime, timedelta | ||||
|  | ||||
| import dateutil.parser | ||||
|  | ||||
| sys.path.insert(0, os.path.dirname(__file__)) | ||||
| import zulip_codebase_config as config | ||||
| VERSION = "0.9" | ||||
|  | ||||
| if config.ZULIP_API_PATH is not None: | ||||
|     sys.path.append(config.ZULIP_API_PATH) | ||||
| import zulip | ||||
|  | ||||
| client = zulip.Client( | ||||
|     email=config.ZULIP_USER, | ||||
|     site=config.ZULIP_SITE, | ||||
|     api_key=config.ZULIP_API_KEY, | ||||
|     client="ZulipCodebase/" + VERSION) | ||||
| user_agent = "Codebase To Zulip Mirroring script (support@zulip.com)" | ||||
|  | ||||
| # find some form of JSON loader/dumper, with a preference order for speed. | ||||
| json_implementations = ['ujson', 'cjson', 'simplejson', 'json'] | ||||
|  | ||||
| while len(json_implementations): | ||||
|     try: | ||||
|         json = __import__(json_implementations.pop(0)) | ||||
|         break | ||||
|     except ImportError: | ||||
|         continue | ||||
|  | ||||
| def make_api_call(path): | ||||
|     response = requests.get("https://api3.codebasehq.com/%s" % (path,), | ||||
|                              auth=(config.CODEBASE_API_USERNAME, config.CODEBASE_API_KEY), | ||||
|                              params={'raw': True}, | ||||
|                              headers = {"User-Agent": user_agent, | ||||
|                                         "Content-Type": "application/json", | ||||
|                                         "Accept": "application/json"}) | ||||
|     if response.status_code == 200: | ||||
|         return json.loads(response.text) | ||||
|  | ||||
|     if response.status_code >= 500: | ||||
|         logging.error(response.status_code) | ||||
|         return None | ||||
|     if response.status_code == 403: | ||||
|         logging.error("Bad authorization from Codebase. Please check your credentials") | ||||
|         sys.exit(-1) | ||||
|     else: | ||||
|         logging.warn("Found non-success response status code: %s %s" % (response.status_code, response.text)) | ||||
|         return None | ||||
|  | ||||
| def make_url(path): | ||||
|     return "%s/%s" % (config.CODEBASE_ROOT_URL, path) | ||||
|  | ||||
| def handle_event(event): | ||||
|     event = event['event'] | ||||
|     event_type = event['type'] | ||||
|     actor_name = event['actor_name'] | ||||
|  | ||||
|     raw_props = event.get('raw_properties', {}) | ||||
|  | ||||
|     project_link = raw_props.get('project_permalink') | ||||
|  | ||||
|     subject = None | ||||
|     content = None | ||||
|     if event_type == 'repository_creation': | ||||
|         stream = config.ZULIP_COMMITS_STREAM_NAME | ||||
|  | ||||
|         project_name = raw_props.get('name') | ||||
|         project_repo_type = raw_props.get('scm_type') | ||||
|  | ||||
|         url = make_url("projects/%s" % project_link) | ||||
|         scm = "of type %s" % (project_repo_type,) if project_repo_type else "" | ||||
|  | ||||
|  | ||||
|         subject = "Repository %s Created" % (project_name,) | ||||
|         content = "%s created a new repository %s [%s](%s)" % (actor_name, scm, project_name, url) | ||||
|     elif event_type == 'push': | ||||
|         stream = config.ZULIP_COMMITS_STREAM_NAME | ||||
|  | ||||
|         num_commits = raw_props.get('commits_count') | ||||
|         branch = raw_props.get('ref_name') | ||||
|         project = raw_props.get('project_name') | ||||
|         repo_link = raw_props.get('repository_permalink') | ||||
|         deleted_ref = raw_props.get('deleted_ref') | ||||
|         new_ref = raw_props.get('new_ref') | ||||
|  | ||||
|         subject = "Push to %s on %s" % (branch, project) | ||||
|  | ||||
|         if deleted_ref: | ||||
|             content = "%s deleted branch %s from %s" % (actor_name, branch, project) | ||||
|         else: | ||||
|             if new_ref: | ||||
|                 branch = "new branch %s" % (branch, ) | ||||
|             content = "%s pushed %s commit(s) to %s in project %s:\n\n" % \ | ||||
|                         (actor_name, num_commits, branch, project) | ||||
|             for commit in raw_props.get('commits'): | ||||
|                 ref = commit.get('ref') | ||||
|                 url = make_url("projects/%s/repositories/%s/commit/%s" % (project_link, repo_link, ref)) | ||||
|                 message = commit.get('message') | ||||
|                 content += "* [%s](%s): %s\n" % (ref, url, message) | ||||
|     elif event_type == 'ticketing_ticket': | ||||
|         stream = config.ZULIP_TICKETS_STREAM_NAME | ||||
|  | ||||
|         num = raw_props.get('number') | ||||
|         name = raw_props.get('subject') | ||||
|         assignee = raw_props.get('assignee') | ||||
|         priority = raw_props.get('priority') | ||||
|         url = make_url("projects/%s/tickets/%s" % (project_link, num)) | ||||
|  | ||||
|         if assignee is None: | ||||
|             assignee = "no one" | ||||
|         subject = "#%s: %s" % (num, name) | ||||
|         content = """%s created a new ticket [#%s](%s) priority **%s** assigned to %s:\n\n~~~ quote\n %s""" % \ | ||||
|                     (actor_name, num, url, priority, assignee, name) | ||||
|     elif event_type == 'ticketing_note': | ||||
|         stream = config.ZULIP_TICKETS_STREAM_NAME | ||||
|  | ||||
|         num = raw_props.get('number') | ||||
|         name = raw_props.get('subject') | ||||
|         body = raw_props.get('content') | ||||
|         changes = raw_props.get('changes') | ||||
|  | ||||
|  | ||||
|         url = make_url("projects/%s/tickets/%s" % (project_link, num)) | ||||
|         subject = "#%s: %s" % (num, name) | ||||
|  | ||||
|         content = "" | ||||
|         if body is not None and len(body) > 0: | ||||
|             content = "%s added a comment to ticket [#%s](%s):\n\n~~~ quote\n%s\n\n" % (actor_name, num, url, body) | ||||
|  | ||||
|         if 'status_id' in changes: | ||||
|             status_change = changes.get('status_id') | ||||
|             content += "Status changed from **%s** to **%s**\n\n" % (status_change[0], status_change[1]) | ||||
|     elif event_type == 'ticketing_milestone': | ||||
|         stream = config.ZULIP_TICKETS_STREAM_NAME | ||||
|  | ||||
|         name = raw_props.get('name') | ||||
|         identifier = raw_props.get('identifier') | ||||
|         url = make_url("projects/%s/milestone/%s" % (project_link, identifier)) | ||||
|  | ||||
|         subject = name | ||||
|         content = "%s created a new milestone [%s](%s)" % (actor_name, name, url) | ||||
|     elif event_type == 'comment': | ||||
|         stream = config.ZULIP_COMMITS_STREAM_NAME | ||||
|  | ||||
|         comment = raw_props.get('content') | ||||
|         commit = raw_props.get('commit_ref') | ||||
|  | ||||
|         # If there's a commit id, it's a comment to a commit | ||||
|         if commit: | ||||
|             repo_link = raw_props.get('repository_permalink') | ||||
|  | ||||
|             url = make_url('projects/%s/repositories/%s/commit/%s' % (project_link, repo_link, commit)) | ||||
|  | ||||
|             subject = "%s commented on %s" % (actor_name, commit) | ||||
|             content = "%s commented on [%s](%s):\n\n~~~ quote\n%s" % (actor_name, commit, url, comment) | ||||
|         else: | ||||
|             # Otherwise, this is a Discussion item, and handle it | ||||
|             subj = raw_props.get("subject") | ||||
|             category = raw_props.get("category") | ||||
|             comment_content = raw_props.get("content") | ||||
|  | ||||
|             subject = "Discussion: %s" % (subj,) | ||||
|  | ||||
|             if category: | ||||
|                 content = "%s started a new discussion in %s:\n\n~~~ quote\n%s\n~~~" % (actor_name, category, comment_content) | ||||
|             else: | ||||
|                 content = "%s posted:\n\n~~~ quote\n%s\n~~~" % (actor_name, comment_content) | ||||
|  | ||||
|     elif event_type == 'deployment': | ||||
|         stream = config.ZULIP_COMMITS_STREAM_NAME | ||||
|  | ||||
|         start_ref = raw_props.get('start_ref') | ||||
|         end_ref = raw_props.get('end_ref') | ||||
|         environment = raw_props.get('environment') | ||||
|         servers = raw_props.get('servers') | ||||
|         repo_link = raw_props.get('repository_permalink') | ||||
|  | ||||
|         start_ref_url = make_url("projects/%s/repositories/%s/commit/%s" % (project_link, repo_link, start_ref)) | ||||
|         end_ref_url = make_url("projects/%s/repositories/%s/commit/%s" % (project_link, repo_link, end_ref)) | ||||
|         between_url = make_url("projects/%s/repositories/%s/compare/%s...%s" % (project_link, repo_link, start_ref, end_ref)) | ||||
|  | ||||
|         subject = "Deployment to %s" % (environment,) | ||||
|  | ||||
|         content = "%s deployed [%s](%s) [through](%s) [%s](%s) to the **%s** environment." % \ | ||||
|                     (actor_name, start_ref, start_ref_url, between_url, end_ref, end_ref_url, environment) | ||||
|         if servers is not None: | ||||
|             content += "\n\nServers deployed to: %s" % (", ".join(["`%s`" % (server,) for server in servers])) | ||||
|  | ||||
|     elif event_type == 'named_tree': | ||||
|         # Docs say named_tree type used for new/deleting branches and tags, | ||||
|         # but experimental testing showed that they were all sent as 'push' events | ||||
|         pass | ||||
|     elif event_type == 'wiki_page': | ||||
|         logging.warn("Wiki page notifications not yet implemented") | ||||
|     elif event_type == 'sprint_creation': | ||||
|         logging.warn("Sprint notifications not yet implemented") | ||||
|     elif event_type == 'sprint_ended': | ||||
|         logging.warn("Sprint notifications not yet implemented") | ||||
|     else: | ||||
|         logging.info("Unknown event type %s, ignoring!" % (event_type,)) | ||||
|  | ||||
|     if subject and content: | ||||
|         if len(subject) > 60: | ||||
|             subject = subject[:57].rstrip() + '...' | ||||
|  | ||||
|         res = client.send_message({"type": "stream", | ||||
|                                    "to": stream, | ||||
|                                    "subject": subject, | ||||
|                                    "content": content}) | ||||
|         if res['result'] == 'success': | ||||
|             logging.info("Successfully sent Zulip with id: %s" % (res['id'])) | ||||
|         else: | ||||
|             logging.warn("Failed to send Zulip: %s %s" % (res['result'], res['msg'])) | ||||
|  | ||||
|  | ||||
| # the main run loop for this mirror script | ||||
| def run_mirror(): | ||||
|     # we should have the right (write) permissions on the resume file, as seen | ||||
|     # in check_permissions, but it may still be empty or corrupted | ||||
|     def default_since(): | ||||
|         return datetime.utcnow() - timedelta(hours=config.CODEBASE_INITIAL_HISTORY_HOURS) | ||||
|  | ||||
|     try: | ||||
|         with open(config.RESUME_FILE) as f: | ||||
|             timestamp = f.read() | ||||
|         if timestamp == '': | ||||
|             since = default_since() | ||||
|         else: | ||||
|             timestamp = int(timestamp, 10) | ||||
|             since = datetime.fromtimestamp(timestamp) | ||||
|     except (ValueError,IOError) as e: | ||||
|         logging.warn("Could not open resume file: %s" % (e.message or e.strerror,)) | ||||
|         since = default_since() | ||||
|  | ||||
|     try: | ||||
|         sleepInterval = 1 | ||||
|         while 1: | ||||
|             events = make_api_call("activity")[::-1] | ||||
|             if events is not None: | ||||
|                 sleepInterval = 1 | ||||
|                 for event in events: | ||||
|                     timestamp = event.get('event', {}).get('timestamp', '') | ||||
|                     event_date = dateutil.parser.parse(timestamp).replace(tzinfo=None) | ||||
|                     if event_date > since: | ||||
|                         handle_event(event) | ||||
|                         since = event_date | ||||
|             else: | ||||
|                 # back off a bit | ||||
|                 if sleepInterval < 22: | ||||
|                     sleepInterval += 4 | ||||
|             time.sleep(sleepInterval) | ||||
|  | ||||
|     except KeyboardInterrupt: | ||||
|         open(config.RESUME_FILE, 'w').write(since.strftime("%s")); | ||||
|         logging.info("Shutting down Codebase mirror") | ||||
|  | ||||
| # void function that checks the permissions of the files this script needs. | ||||
| def check_permissions(): | ||||
|     # check that the log file can be written | ||||
|     if config.LOG_FILE: | ||||
|         try: | ||||
|             open(config.LOG_FILE, "w") | ||||
|         except IOError as e: | ||||
|             sys.stderr("Could not open up log for writing:") | ||||
|             sys.stderr(e) | ||||
|     # check that the resume file can be written (this creates if it doesn't exist) | ||||
|     try: | ||||
|         open(config.RESUME_FILE, "a+") | ||||
|     except IOError as e: | ||||
|         sys.stderr("Could not open up the file %s for reading and writing" % (config.RESUME_FILE,)) | ||||
|         sys.stderr(e) | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     if not isinstance(config.RESUME_FILE, basestring): | ||||
|         sys.stderr("RESUME_FILE path not given; refusing to continue") | ||||
|     check_permissions() | ||||
|     if config.LOG_FILE: | ||||
|         logging.basicConfig(filename=config.LOG_FILE, level=logging.WARNING) | ||||
|     else: | ||||
|         logging.basicConfig(level=logging.WARNING) | ||||
|     run_mirror() | ||||
							
								
								
									
										130
									
								
								api/integrations/git/post-receive
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										130
									
								
								api/integrations/git/post-receive
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,130 @@ | ||||
| #!/usr/bin/env python | ||||
| # -*- coding: utf-8 -*- | ||||
| # | ||||
| # Zulip notification post-receive hook. | ||||
| # Copyright © 2012-2014 Zulip, Inc. | ||||
| # | ||||
| # Permission is hereby granted, free of charge, to any person obtaining a copy | ||||
| # of this software and associated documentation files (the "Software"), to deal | ||||
| # in the Software without restriction, including without limitation the rights | ||||
| # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | ||||
| # copies of the Software, and to permit persons to whom the Software is | ||||
| # furnished to do so, subject to the following conditions: | ||||
| # | ||||
| # The above copyright notice and this permission notice shall be included in | ||||
| # all copies or substantial portions of the Software. | ||||
| # | ||||
| # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | ||||
| # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | ||||
| # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | ||||
| # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | ||||
| # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | ||||
| # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN | ||||
| # THE SOFTWARE. | ||||
| # | ||||
| # The "post-receive" script is run after receive-pack has accepted a pack | ||||
| # and the repository has been updated.  It is passed arguments in through | ||||
| # stdin in the form | ||||
| #  <oldrev> <newrev> <refname> | ||||
| # For example: | ||||
| #  aa453216d1b3e49e7f6f98441fa56946ddcd6a20 68f7abf4e6f922807889f52bc043ecd31b79f814 refs/heads/master | ||||
|  | ||||
| import os | ||||
| import sys | ||||
| import subprocess | ||||
| import os.path | ||||
|  | ||||
| sys.path.insert(0, os.path.dirname(__file__)) | ||||
| import zulip_git_config as config | ||||
| VERSION = "0.9" | ||||
|  | ||||
| if config.ZULIP_API_PATH is not None: | ||||
|     sys.path.append(config.ZULIP_API_PATH) | ||||
|  | ||||
| import zulip | ||||
| client = zulip.Client( | ||||
|     email=config.ZULIP_USER, | ||||
|     site=config.ZULIP_SITE, | ||||
|     api_key=config.ZULIP_API_KEY, | ||||
|     client="ZulipGit/" + VERSION) | ||||
|  | ||||
| # check_output is backported from subprocess.py in Python 2.7 | ||||
| def check_output(*popenargs, **kwargs): | ||||
|     if 'stdout' in kwargs: | ||||
|         raise ValueError('stdout argument not allowed, it will be overridden.') | ||||
|     process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs) | ||||
|     output, unused_err = process.communicate() | ||||
|     retcode = process.poll() | ||||
|     if retcode: | ||||
|         cmd = kwargs.get("args") | ||||
|         if cmd is None: | ||||
|             cmd = popenargs[0] | ||||
|         raise subprocess.CalledProcessError(retcode, cmd, output=output) | ||||
|     return output | ||||
| subprocess.check_output = check_output | ||||
|  | ||||
| def git_repository_name(): | ||||
|     output = subprocess.check_output(["git", "rev-parse", "--is-bare-repository"]) | ||||
|     if output.strip() == "true": | ||||
|         return os.path.basename(os.getcwd())[:-len(".git")] | ||||
|     else: | ||||
|         return os.path.basename(os.path.dirname(os.getcwd())) | ||||
|  | ||||
| def git_commit_range(oldrev, newrev): | ||||
|     log_cmd = ["git", "log", "--reverse", | ||||
|                "--pretty=%aE %H %s", "%s..%s" % (oldrev, newrev)] | ||||
|     commits = '' | ||||
|     for ln in subprocess.check_output(log_cmd).splitlines(): | ||||
|         author_email, commit_id, subject  = ln.split(None, 2) | ||||
|         if hasattr(config, "format_commit_message"): | ||||
|             commits += config.format_commit_message(author_email, subject, commit_id) | ||||
|         else: | ||||
|             commits += '!avatar(%s) %s\n' % (author_email, subject) | ||||
|     return commits | ||||
|  | ||||
| def send_bot_message(oldrev, newrev, refname): | ||||
|     repo_name   = git_repository_name() | ||||
|     branch      = refname.replace('refs/heads/', '') | ||||
|     destination = config.commit_notice_destination(repo_name, branch, newrev) | ||||
|     if destination is None: | ||||
|         # Don't forward the notice anywhere | ||||
|         return | ||||
|  | ||||
|     new_head = newrev[:12] | ||||
|     old_head = oldrev[:12] | ||||
|  | ||||
|     if (oldrev == '0000000000000000000000000000000000000000' or | ||||
|         newrev == '0000000000000000000000000000000000000000'): | ||||
|         # New branch pushed or old branch removed | ||||
|         added = '' | ||||
|         removed = '' | ||||
|     else: | ||||
|         added   = git_commit_range(oldrev, newrev) | ||||
|         removed = git_commit_range(newrev, oldrev) | ||||
|  | ||||
|     if oldrev == '0000000000000000000000000000000000000000': | ||||
|         message = '`%s` was pushed to new branch `%s`' % (new_head, branch) | ||||
|     elif newrev == '0000000000000000000000000000000000000000': | ||||
|         message = 'branch `%s` was removed (was `%s`)' % (branch, old_head) | ||||
|     elif removed: | ||||
|         message = '`%s` was pushed to `%s`, **REMOVING**:\n\n%s' % (new_head, branch, removed) | ||||
|         if added: | ||||
|             message += '\n**and adding**:\n\n' + added | ||||
|         message += '\n**A HISTORY REWRITE HAS OCCURRED!**' | ||||
|         message += '\n@everyone: Please check your local branches to deal with this.' | ||||
|     elif added: | ||||
|         message = '`%s` was deployed to `%s` with:\n\n%s' % (new_head, branch, added) | ||||
|     else: | ||||
|         message = '`%s` was pushed to `%s`... but nothing changed?' % (new_head, branch) | ||||
|  | ||||
|     message_data = { | ||||
|         "type": "stream", | ||||
|         "to": destination["stream"], | ||||
|         "subject": destination["subject"], | ||||
|         "content": message, | ||||
|     } | ||||
|     client.send_message(message_data) | ||||
|  | ||||
| for ln in sys.stdin: | ||||
|     oldrev, newrev, refname = ln.strip().split() | ||||
|     send_bot_message(oldrev, newrev, refname) | ||||
							
								
								
									
										65
									
								
								api/integrations/git/zulip_git_config.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										65
									
								
								api/integrations/git/zulip_git_config.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,65 @@ | ||||
| #!/usr/bin/env python | ||||
| # -*- coding: utf-8 -*- | ||||
| # | ||||
| # Copyright © 2014 Zulip, Inc. | ||||
| # | ||||
| # Permission is hereby granted, free of charge, to any person obtaining a copy | ||||
| # of this software and associated documentation files (the "Software"), to deal | ||||
| # in the Software without restriction, including without limitation the rights | ||||
| # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | ||||
| # copies of the Software, and to permit persons to whom the Software is | ||||
| # furnished to do so, subject to the following conditions: | ||||
| # | ||||
| # The above copyright notice and this permission notice shall be included in | ||||
| # all copies or substantial portions of the Software. | ||||
| # | ||||
| # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | ||||
| # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | ||||
| # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | ||||
| # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | ||||
| # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | ||||
| # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN | ||||
| # THE SOFTWARE. | ||||
|  | ||||
|  | ||||
| # Change these values to configure authentication for the plugin | ||||
| ZULIP_USER = "git-bot@example.com" | ||||
| ZULIP_API_KEY = "0123456789abcdef0123456789abcdef" | ||||
|  | ||||
| # commit_notice_destination() lets you customize where commit notices | ||||
| # are sent to with the full power of a Python function. | ||||
| # | ||||
| # It takes the following arguments: | ||||
| # * repo   = the name of the git repository | ||||
| # * branch = the name of the branch that was pushed to | ||||
| # * commit = the commit id | ||||
| # | ||||
| # Returns a dictionary encoding the stream and subject to send the | ||||
| # notification to (or None to send no notification). | ||||
| # | ||||
| # The default code below will send every commit pushed to "master" to | ||||
| # * stream "commits" | ||||
| # * topic "master" | ||||
| # And similarly for branch "test-post-receive" (for use when testing). | ||||
| def commit_notice_destination(repo, branch, commit): | ||||
|     if branch in ["master", "test-post-receive"]: | ||||
|         return dict(stream  = "commits", | ||||
|                     subject = u"%s" % (branch,)) | ||||
|  | ||||
|     # Return None for cases where you don't want a notice sent | ||||
|     return None | ||||
|  | ||||
| # Modify this function to change how commits are displayed; the most | ||||
| # common customization is to include a link to the commit in your | ||||
| # graphical repository viewer, e.g. | ||||
| # | ||||
| # return '!avatar(%s) [%s](https://example.com/commits/%s)\n' % (author, subject, commit_id) | ||||
| def format_commit_message(author, subject, commit_id): | ||||
|     return '!avatar(%s) %s\n' % (author, subject) | ||||
|  | ||||
| ## If properly installed, the Zulip API should be in your import | ||||
| ## path, but if not, set a custom path below | ||||
| ZULIP_API_PATH = None | ||||
|  | ||||
| # If you're using Zulip Enterprise, set this to your Zulip Enterprise server | ||||
| ZULIP_SITE = "https://api.zulip.com" | ||||
							
								
								
									
										170
									
								
								api/integrations/hg/zulip-changegroup.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										170
									
								
								api/integrations/hg/zulip-changegroup.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,170 @@ | ||||
| #!/usr/bin/env python | ||||
| # -*- coding: utf-8 -*- | ||||
| # | ||||
| # Zulip hook for Mercurial changeset pushes. | ||||
| # Copyright © 2012-2014 Zulip, Inc. | ||||
| # | ||||
| # Permission is hereby granted, free of charge, to any person obtaining a copy | ||||
| # of this software and associated documentation files (the "Software"), to deal | ||||
| # in the Software without restriction, including without limitation the rights | ||||
| # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | ||||
| # copies of the Software, and to permit persons to whom the Software is | ||||
| # furnished to do so, subject to the following conditions: | ||||
| # | ||||
| # The above copyright notice and this permission notice shall be included in | ||||
| # all copies or substantial portions of the Software. | ||||
| # | ||||
| # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | ||||
| # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | ||||
| # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | ||||
| # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | ||||
| # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | ||||
| # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN | ||||
| # THE SOFTWARE. | ||||
| # | ||||
| # | ||||
| # This hook is called when changesets are pushed to the master repository (ie | ||||
| # `hg push`). See https://zulip.com/integrations for installation instructions. | ||||
|  | ||||
| import zulip | ||||
|  | ||||
| VERSION = "0.9" | ||||
|  | ||||
| def format_summary_line(web_url, user, base, tip, branch, node): | ||||
|     """ | ||||
|     Format the first line of the message, which contains summary | ||||
|     information about the changeset and links to the changelog if a | ||||
|     web URL has been configured: | ||||
|  | ||||
|     Jane Doe <jane@example.com> pushed 1 commit to master (170:e494a5be3393): | ||||
|     """ | ||||
|     revcount = tip - base | ||||
|     plural = "s" if revcount > 1 else "" | ||||
|  | ||||
|     if web_url: | ||||
|         shortlog_base_url = web_url.rstrip("/") + "/shortlog/" | ||||
|         summary_url = "{shortlog}{tip}?revcount={revcount}".format( | ||||
|             shortlog=shortlog_base_url, tip=tip - 1, revcount=revcount) | ||||
|         formatted_commit_count = "[{revcount} commit{s}]({url})".format( | ||||
|             revcount=revcount, s=plural, url=summary_url) | ||||
|     else: | ||||
|         formatted_commit_count = "{revcount} commit{s}".format( | ||||
|             revcount=revcount, s=plural) | ||||
|  | ||||
|     return u"**{user}** pushed {commits} to **{branch}** (`{tip}:{node}`):\n\n".format( | ||||
|         user=user, commits=formatted_commit_count, branch=branch, tip=tip, | ||||
|         node=node[:12]) | ||||
|  | ||||
| def format_commit_lines(web_url, repo, base, tip): | ||||
|     """ | ||||
|     Format the per-commit information for the message, including the one-line | ||||
|     commit summary and a link to the diff if a web URL has been configured: | ||||
|     """ | ||||
|     if web_url: | ||||
|         rev_base_url = web_url.rstrip("/") + "/rev/" | ||||
|  | ||||
|     commit_summaries = [] | ||||
|     for rev in range(base, tip): | ||||
|         rev_node = repo.changelog.node(rev) | ||||
|         rev_ctx = repo.changectx(rev_node) | ||||
|         one_liner = rev_ctx.description().split("\n")[0] | ||||
|  | ||||
|         if web_url: | ||||
|             summary_url = rev_base_url + str(rev_ctx) | ||||
|             summary = "* [{summary}]({url})".format( | ||||
|                 summary=one_liner, url=summary_url) | ||||
|         else: | ||||
|             summary = "* {summary}".format(summary=one_liner) | ||||
|  | ||||
|         commit_summaries.append(summary) | ||||
|  | ||||
|     return "\n".join(summary for summary in commit_summaries) | ||||
|  | ||||
| def send_zulip(email, api_key, site, stream, subject, content): | ||||
|     """ | ||||
|     Send a message to Zulip using the provided credentials, which should be for | ||||
|     a bot in most cases. | ||||
|     """ | ||||
|     client = zulip.Client(email=email, api_key=api_key, | ||||
|                           site=site, | ||||
|                           client="ZulipMercurial/" + VERSION) | ||||
|  | ||||
|     message_data = { | ||||
|         "type": "stream", | ||||
|         "to": stream, | ||||
|         "subject": subject, | ||||
|         "content": content, | ||||
|     } | ||||
|  | ||||
|     client.send_message(message_data) | ||||
|  | ||||
| def get_config(ui, item): | ||||
|     try: | ||||
|         # configlist returns everything in lists. | ||||
|         return ui.configlist('zulip', item)[0] | ||||
|     except IndexError: | ||||
|         return None | ||||
|  | ||||
| def hook(ui, repo, **kwargs): | ||||
|     """ | ||||
|     Invoked by configuring a [hook] entry in .hg/hgrc. | ||||
|     """ | ||||
|     hooktype = kwargs["hooktype"] | ||||
|     node = kwargs["node"] | ||||
|  | ||||
|     ui.debug("Zulip: received {hooktype} event\n".format(hooktype=hooktype)) | ||||
|  | ||||
|     if hooktype != "changegroup": | ||||
|         ui.warn("Zulip: {hooktype} not supported\n".format(hooktype=hooktype)) | ||||
|         exit(1) | ||||
|  | ||||
|     ctx = repo.changectx(node) | ||||
|     branch = ctx.branch() | ||||
|  | ||||
|     # If `branches` isn't specified, notify on all branches. | ||||
|     branch_whitelist = get_config(ui, "branches") | ||||
|     branch_blacklist = get_config(ui, "ignore_branches") | ||||
|  | ||||
|     if branch_whitelist: | ||||
|         # Only send notifications on branches we are watching. | ||||
|         watched_branches = [b.lower().strip() for b in branch_whitelist.split(",")] | ||||
|         if branch.lower() not in watched_branches: | ||||
|             ui.debug("Zulip: ignoring event for {branch}\n".format(branch=branch)) | ||||
|             exit(0) | ||||
|  | ||||
|     if branch_blacklist: | ||||
|         # Don't send notifications for branches we've ignored. | ||||
|         ignored_branches = [b.lower().strip() for b in branch_blacklist.split(",")] | ||||
|         if branch.lower() in ignored_branches: | ||||
|             ui.debug("Zulip: ignoring event for {branch}\n".format(branch=branch)) | ||||
|             exit(0) | ||||
|  | ||||
|     # The first and final commits in the changeset. | ||||
|     base = repo[node].rev() | ||||
|     tip = len(repo) | ||||
|  | ||||
|     email = get_config(ui, "email") | ||||
|     api_key = get_config(ui, "api_key") | ||||
|     site = get_config(ui, "site") | ||||
|  | ||||
|     if not (email and api_key): | ||||
|         ui.warn("Zulip: missing email or api_key configurations\n") | ||||
|         ui.warn("in the [zulip] section of your .hg/hgrc.\n") | ||||
|         exit(1) | ||||
|  | ||||
|     stream = get_config(ui, "stream") | ||||
|     # Give a default stream if one isn't provided. | ||||
|     if not stream: | ||||
|         stream = "commits" | ||||
|  | ||||
|     web_url = get_config(ui, "web_url") | ||||
|     user = ctx.user() | ||||
|     content = format_summary_line(web_url, user, base, tip, branch, node) | ||||
|     content += format_commit_lines(web_url, repo, base, tip) | ||||
|  | ||||
|     subject = branch | ||||
|  | ||||
|     ui.debug("Sending to Zulip:\n") | ||||
|     ui.debug(content + "\n") | ||||
|  | ||||
|     send_zulip(email, api_key, site, stream, subject, content) | ||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user