mirror of
				https://github.com/zulip/zulip.git
				synced 2025-10-31 20:13:46 +00:00 
			
		
		
		
	Compare commits
	
		
			160 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | 01902fa648 | ||
|  | cbb9ea6b49 | ||
|  | d163143f12 | ||
|  | c21c8dcd95 | ||
|  | 82d2960ad1 | ||
|  | fa07539016 | ||
|  | 6d0c39fd7e | ||
|  | 2e2004b6c3 | ||
|  | 620e98860e | ||
|  | 83380b4296 | ||
|  | e88aac5105 | ||
|  | 6046ea8014 | ||
|  | ba8ee93fae | ||
|  | e682ea189a | ||
|  | 148ea9fe48 | ||
|  | 31a34836d3 | ||
|  | 309266376e | ||
|  | ef194171f7 | ||
|  | 66fa35f5ac | ||
|  | 2b95f54593 | ||
|  | d41f06e8a9 | ||
|  | d119e97755 | ||
|  | 5ea0d1d1e8 | ||
|  | fd66cfd93c | ||
|  | e76bab19a7 | ||
|  | 13532917ca | ||
|  | b5c9a006f0 | ||
|  | a2edd58b82 | ||
|  | d22cb7d01f | ||
|  | 76ce370181 | ||
|  | 64856d858e | ||
|  | c9796ba7f7 | ||
|  | b21117954d | ||
|  | 59f5ca713f | ||
|  | 67da8e8431 | ||
|  | b79fbf9239 | ||
|  | f1f937e4ea | ||
|  | 68628149db | ||
|  | f247721a2d | ||
|  | e3d6b4f210 | ||
|  | ea8e6149da | ||
|  | 376cd88a83 | ||
|  | bfd92260fd | ||
|  | 217431d0c4 | ||
|  | 30cc6798b3 | ||
|  | 677ad69555 | ||
|  | 95118d860d | ||
|  | b8888c801b | ||
|  | 7a9251a3e1 | ||
|  | 64ec413940 | ||
|  | 147c3998de | ||
|  | 79fc9c3281 | ||
|  | a33d7f0400 | ||
|  | 2471f6ad83 | ||
|  | 19d1ca3a1d | ||
|  | 9fcbc3a49b | ||
|  | 1413fda773 | ||
|  | 494e596be8 | ||
|  | 4cc25f8e84 | ||
|  | 19ab295172 | ||
|  | 31f02cd926 | ||
|  | 266c7c83e0 | ||
|  | dd198fd06e | ||
|  | 10e8928b0f | ||
|  | bc81275d3c | ||
|  | 6c8c3cd3dc | ||
|  | 1783515794 | ||
|  | 21026d984b | ||
|  | 66fe724c8a | ||
|  | 282d6edf2e | ||
|  | 785a7ec9e7 | ||
|  | c44d9f9b1b | ||
|  | 0d5d3c4912 | ||
|  | ef793590c1 | ||
|  | 3032ba15cf | ||
|  | 96a2ddffe7 | ||
|  | 2794362214 | ||
|  | 9b3e1e2c97 | ||
|  | ae44fdd7cc | ||
|  | b45cce61e7 | ||
|  | 2e923a0eb5 | ||
|  | f538f34d95 | ||
|  | 5d2befdc54 | ||
|  | cc8b83b261 | ||
|  | ac8f4aaa93 | ||
|  | 843c148c59 | ||
|  | d39bcf2264 | ||
|  | ce64a6b163 | ||
|  | 7875196783 | ||
|  | 56c1ad1a3d | ||
|  | d9aa4161f8 | ||
|  | 728155afee | ||
|  | 660501c782 | ||
|  | ad974c3ae3 | ||
|  | bc4029deae | ||
|  | 218ca61dd0 | ||
|  | 3419908f39 | ||
|  | af67990f14 | ||
|  | e6cf30fc22 | ||
|  | e2ccbe7c80 | ||
|  | 8b31387670 | ||
|  | 501eb09716 | ||
|  | 280d9db26d | ||
|  | cee6227f53 | ||
|  | cae803e8a9 | ||
|  | ba598366e9 | ||
|  | d452ad31e0 | ||
|  | aed813f44c | ||
|  | 71dae1b92a | ||
|  | 629ec1aa8b | ||
|  | 87d60a1fff | ||
|  | 98eef54e4f | ||
|  | 235ba339d0 | ||
|  | e5320cc1f6 | ||
|  | 1d72ea2fd5 | ||
|  | c7948a7960 | ||
|  | 04bb26be3a | ||
|  | 7f45ca9b22 | ||
|  | 1bedb965e9 | ||
|  | bc752188e7 | ||
|  | b0ea81fe16 | ||
|  | 358ab821c4 | ||
|  | 97322dd195 | ||
|  | 1ba48a04da | ||
|  | e8377b605f | ||
|  | 830f1e9f3f | ||
|  | 037b87b580 | ||
|  | 82a6e77301 | ||
|  | 9efb90510c | ||
|  | b255c8b8a6 | ||
|  | 03e8e8be9d | ||
|  | 2932d9cd28 | ||
|  | 0baa205ad3 | ||
|  | a8d8500c46 | ||
|  | aa19f43f0b | ||
|  | 0974b0130d | ||
|  | 8a1d2bb5b6 | ||
|  | a38976f25d | ||
|  | fccfc02981 | ||
|  | 929847ae2d | ||
|  | a3338f3735 | ||
|  | f377ef6dd7 | ||
|  | 4c9997a523 | ||
|  | 2470fba95c | ||
|  | 2a6145f7fb | ||
|  | 7036fea97b | ||
|  | 05a42fb8df | ||
|  | cd0b14ce2f | ||
|  | a1fc8fb079 | ||
|  | e147ee2087 | ||
|  | 61180020c1 | ||
|  | 2a473c57f4 | ||
|  | c0980e3e9e | ||
|  | 035d4c57be | ||
|  | fcbd24e72c | ||
|  | 29babba85a | ||
|  | 49ff894d6a | ||
|  | f3e75b6b5f | ||
|  | 6b9f37dc8f | ||
|  | cd926b8aae | 
							
								
								
									
										6
									
								
								.browserslistrc
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										6
									
								
								.browserslistrc
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,6 @@ | |||||||
|  | > 0.2% | ||||||
|  | > 0.2% in US | ||||||
|  | last 2 versions | ||||||
|  | Firefox ESR | ||||||
|  | not dead | ||||||
|  | Chrome 26  # similar to PhantomJS | ||||||
							
								
								
									
										151
									
								
								.circleci/config.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										151
									
								
								.circleci/config.yml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,151 @@ | |||||||
|  | # See https://zulip.readthedocs.io/en/latest/testing/continuous-integration.html for | ||||||
|  | #   high-level documentation on our CircleCI setup. | ||||||
|  | # See CircleCI upstream's docs on this config format: | ||||||
|  | #   https://circleci.com/docs/2.0/language-python/ | ||||||
|  | # | ||||||
|  | version: 2.0 | ||||||
|  | aliases: | ||||||
|  |   - &create_cache_directories | ||||||
|  |     run: | ||||||
|  |       name: create cache directories | ||||||
|  |       command: | | ||||||
|  |           dirs=(/srv/zulip-{npm,venv}-cache) | ||||||
|  |           sudo mkdir -p "${dirs[@]}" | ||||||
|  |           sudo chown -R circleci "${dirs[@]}" | ||||||
|  |  | ||||||
|  |   - &restore_cache_package_json | ||||||
|  |     restore_cache: | ||||||
|  |       keys: | ||||||
|  |       - v1-npm-base.{{ .Environment.CIRCLE_JOB }}-{{ checksum "package.json" }}-{{ checksum "yarn.lock" }} | ||||||
|  |  | ||||||
|  |   - &restore_cache_requirements | ||||||
|  |     restore_cache: | ||||||
|  |       keys: | ||||||
|  |       - v1-venv-base.{{ .Environment.CIRCLE_JOB }}-{{ checksum "requirements/thumbor.txt" }}-{{ checksum "requirements/dev.txt" }} | ||||||
|  |  | ||||||
|  |   - &install_dependencies | ||||||
|  |     run: | ||||||
|  |       name: install dependencies | ||||||
|  |       command: | | ||||||
|  |         sudo apt-get update | ||||||
|  |         # Install moreutils so we can use `ts` and `mispipe` in the following. | ||||||
|  |         sudo apt-get install -y moreutils | ||||||
|  |  | ||||||
|  |         # CircleCI sets the following in Git config at clone time: | ||||||
|  |         #   url.ssh://git@github.com.insteadOf https://github.com | ||||||
|  |         # This breaks the Git clones in the NVM `install.sh` we run | ||||||
|  |         # in `install-node`. | ||||||
|  |         # TODO: figure out why that breaks, and whether we want it. | ||||||
|  |         #   (Is it an optimization?) | ||||||
|  |         rm -f /home/circleci/.gitconfig | ||||||
|  |  | ||||||
|  |         # This is the main setup job for the test suite | ||||||
|  |         mispipe "tools/ci/setup-backend" ts | ||||||
|  |  | ||||||
|  |         # Cleaning caches is mostly unnecessary in Circle, because | ||||||
|  |         # most builds don't get to write to the cache. | ||||||
|  |         # mispipe "scripts/lib/clean-unused-caches --verbose --threshold 0" ts | ||||||
|  |  | ||||||
|  |   - &save_cache_package_json | ||||||
|  |     save_cache: | ||||||
|  |       paths: | ||||||
|  |         - /srv/zulip-npm-cache | ||||||
|  |       key: v1-npm-base.{{ .Environment.CIRCLE_JOB }}-{{ checksum "package.json" }}-{{ checksum "yarn.lock" }} | ||||||
|  |  | ||||||
|  |   - &save_cache_requirements | ||||||
|  |     save_cache: | ||||||
|  |       paths: | ||||||
|  |         - /srv/zulip-venv-cache | ||||||
|  |       key: v1-venv-base.{{ .Environment.CIRCLE_JOB }}-{{ checksum "requirements/thumbor.txt" }}-{{ checksum "requirements/dev.txt" }} | ||||||
|  |     # TODO: in Travis we also cache ~/zulip-emoji-cache, ~/node, ~/misc | ||||||
|  |  | ||||||
|  |   - &run_backend_tests | ||||||
|  |     run: | ||||||
|  |       name: run backend tests | ||||||
|  |       command: | | ||||||
|  |         . /srv/zulip-py3-venv/bin/activate | ||||||
|  |         mispipe ./tools/ci/backend ts | ||||||
|  |  | ||||||
|  |   - &run_frontend_tests | ||||||
|  |     run: | ||||||
|  |       name: run frontend tests | ||||||
|  |       command: | | ||||||
|  |         . /srv/zulip-py3-venv/bin/activate | ||||||
|  |         mispipe ./tools/ci/frontend ts | ||||||
|  |  | ||||||
|  |   - &upload_coverage_report | ||||||
|  |     run: | ||||||
|  |      name: upload coverage report | ||||||
|  |      command: | | ||||||
|  |        . /srv/zulip-py3-venv/bin/activate | ||||||
|  |        pip install codecov && codecov \ | ||||||
|  |          || echo "Error in uploading coverage reports to codecov.io." | ||||||
|  |  | ||||||
|  | jobs: | ||||||
|  |   "xenial-backend-frontend-python3.5": | ||||||
|  |     docker: | ||||||
|  |       # This is built from tools/circleci/images/xenial/Dockerfile . | ||||||
|  |       # Xenial ships with Python 3.5. | ||||||
|  |       - image: gregprice/circleci:xenial-python-4.test | ||||||
|  |  | ||||||
|  |     working_directory: ~/zulip | ||||||
|  |  | ||||||
|  |     steps: | ||||||
|  |       - checkout | ||||||
|  |  | ||||||
|  |       - *create_cache_directories | ||||||
|  |       - *restore_cache_package_json | ||||||
|  |       - *restore_cache_requirements | ||||||
|  |       - *install_dependencies | ||||||
|  |       - *save_cache_package_json | ||||||
|  |       - *save_cache_requirements | ||||||
|  |       - *run_backend_tests | ||||||
|  |       - *run_frontend_tests | ||||||
|  |       # We only need to upload coverage reports on whichever platform | ||||||
|  |       # runs the frontend tests. | ||||||
|  |       - *upload_coverage_report | ||||||
|  |  | ||||||
|  |       - store_artifacts: | ||||||
|  |           path: ./var/casper/ | ||||||
|  |           destination: casper | ||||||
|  |  | ||||||
|  |       - store_artifacts:     | ||||||
|  |           path: ../../../tmp/zulip-test-event-log/ | ||||||
|  |           destination: test-reports | ||||||
|  |  | ||||||
|  |       - store_test_results: | ||||||
|  |             path: ./var/xunit-test-results/casper/ | ||||||
|  |  | ||||||
|  |   "bionic-backend-python3.6": | ||||||
|  |     docker: | ||||||
|  |       # This is built from tools/circleci/images/bionic/Dockerfile . | ||||||
|  |       # Bionic ships with Python 3.6. | ||||||
|  |       - image: gregprice/circleci:bionic-python-1.test | ||||||
|  |  | ||||||
|  |     working_directory: ~/zulip | ||||||
|  |  | ||||||
|  |     steps: | ||||||
|  |       - checkout | ||||||
|  |  | ||||||
|  |       - *create_cache_directories | ||||||
|  |  | ||||||
|  |       - run: | ||||||
|  |           name: do Bionic hack | ||||||
|  |           command: | | ||||||
|  |               # Temporary hack till `sudo service redis-server start` gets fixes in Bionic. See | ||||||
|  |               # https://chat.zulip.org/#narrow/stream/3-backend/topic/Ubuntu.20bionic.20CircleCI | ||||||
|  |               sudo sed -i '/^bind/s/bind.*/bind 0.0.0.0/' /etc/redis/redis.conf | ||||||
|  |  | ||||||
|  |       - *restore_cache_package_json | ||||||
|  |       - *restore_cache_requirements | ||||||
|  |       - *install_dependencies | ||||||
|  |       - *save_cache_package_json | ||||||
|  |       - *save_cache_requirements | ||||||
|  |       - *run_backend_tests | ||||||
|  |  | ||||||
|  | workflows: | ||||||
|  |   version: 2 | ||||||
|  |   build: | ||||||
|  |     jobs: | ||||||
|  |       - "xenial-backend-frontend-python3.5" | ||||||
|  |       - "bionic-backend-python3.6" | ||||||
| @@ -1,27 +0,0 @@ | |||||||
| te |  | ||||||
| ans |  | ||||||
| pullrequest |  | ||||||
| ist |  | ||||||
| cros |  | ||||||
| wit |  | ||||||
| nwe |  | ||||||
| circularly |  | ||||||
| ned |  | ||||||
| ba |  | ||||||
| ressemble |  | ||||||
| ser |  | ||||||
| sur |  | ||||||
| hel |  | ||||||
| fpr |  | ||||||
| alls |  | ||||||
| nd |  | ||||||
| ot |  | ||||||
| womens |  | ||||||
| vise |  | ||||||
| falsy |  | ||||||
| ro |  | ||||||
| derails |  | ||||||
| forin |  | ||||||
| uper |  | ||||||
| slac |  | ||||||
| couldn |  | ||||||
| @@ -3,22 +3,23 @@ root = true | |||||||
| [*] | [*] | ||||||
| end_of_line = lf | end_of_line = lf | ||||||
| charset = utf-8 | charset = utf-8 | ||||||
| indent_size = 4 |  | ||||||
| indent_style = space |  | ||||||
| insert_final_newline = true |  | ||||||
| trim_trailing_whitespace = true | trim_trailing_whitespace = true | ||||||
|  | insert_final_newline = true | ||||||
|  |  | ||||||
| binary_next_line = true  # for shfmt | [*.{sh,py,pyi,js,ts,json,yml,xml,css,md,markdown,handlebars,html}] | ||||||
| switch_case_indent = true  # for shfmt | indent_style = space | ||||||
|  | indent_size = 4 | ||||||
|  |  | ||||||
| [{*.{js,json,ts},check-openapi}] | [*.py] | ||||||
| max_line_length = 100 |  | ||||||
|  |  | ||||||
| [*.{py,pyi}] |  | ||||||
| max_line_length = 110 | max_line_length = 110 | ||||||
|  |  | ||||||
| [*.{md,svg,rb,pp,yaml,yml}] | [*.{js,ts}] | ||||||
|  | max_line_length = 100 | ||||||
|  |  | ||||||
|  | [*.{svg,rb,pp,pl}] | ||||||
|  | indent_style = space | ||||||
| indent_size = 2 | indent_size = 2 | ||||||
|  |  | ||||||
| [package.json] | [*.cfg] | ||||||
| indent_size = 2 | indent_style = space | ||||||
|  | indent_size = 8 | ||||||
|   | |||||||
| @@ -4,12 +4,7 @@ | |||||||
|  |  | ||||||
| /docs/_build | /docs/_build | ||||||
| /static/generated | /static/generated | ||||||
|  | /static/third | ||||||
| /static/webpack-bundles | /static/webpack-bundles | ||||||
| /var/* | /var | ||||||
| !/var/puppeteer |  | ||||||
| /var/puppeteer/* |  | ||||||
| !/var/puppeteer/test_credentials.d.ts |  | ||||||
| /web/generated |  | ||||||
| /web/third |  | ||||||
| /zulip-current-venv |  | ||||||
| /zulip-py3-venv | /zulip-py3-venv | ||||||
|   | |||||||
							
								
								
									
										634
									
								
								.eslintrc.json
									
									
									
									
									
								
							
							
						
						
									
										634
									
								
								.eslintrc.json
									
									
									
									
									
								
							| @@ -1,273 +1,495 @@ | |||||||
| { | { | ||||||
|     "root": true, |  | ||||||
|     "env": { |     "env": { | ||||||
|         "es2020": true, |         "node": true, | ||||||
|         "node": true |         "es6": true | ||||||
|     }, |     }, | ||||||
|     "extends": [ |  | ||||||
|         "eslint:recommended", |  | ||||||
|         "plugin:import/errors", |  | ||||||
|         "plugin:import/warnings", |  | ||||||
|         "plugin:no-jquery/recommended", |  | ||||||
|         "plugin:no-jquery/deprecated", |  | ||||||
|         "plugin:unicorn/recommended", |  | ||||||
|         "prettier" |  | ||||||
|     ], |  | ||||||
|     "parser": "@babel/eslint-parser", |  | ||||||
|     "parserOptions": { |     "parserOptions": { | ||||||
|         "requireConfigFile": false, |         "ecmaVersion": 2019, | ||||||
|         "warnOnUnsupportedTypeScriptVersion": false, |         "warnOnUnsupportedTypeScriptVersion": false, | ||||||
|         "sourceType": "unambiguous" |         "sourceType": "module" | ||||||
|     }, |     }, | ||||||
|     "plugins": ["formatjs", "no-jquery"], |     "globals": { | ||||||
|     "settings": { |         "$": false, | ||||||
|         "formatjs": { |         "ClipboardJS": false, | ||||||
|             "additionalFunctionNames": ["$t", "$t_html"] |         "Dict": false, | ||||||
|         }, |         "FetchStatus": false, | ||||||
|         "no-jquery": { |         "Filter": false, | ||||||
|             "collectionReturningPlugins": { |         "Handlebars": false, | ||||||
|                 "expectOne": "always" |         "LightboxCanvas": false, | ||||||
|             }, |         "MessageListData": false, | ||||||
|             "variablePattern": "^\\$(?!t$|t_html$)." |         "MessageListView": false, | ||||||
|         } |         "Plotly": false, | ||||||
|  |         "SockJS": false, | ||||||
|  |         "Socket": false, | ||||||
|  |         "Sortable": false, | ||||||
|  |         "WinChan": false, | ||||||
|  |         "XDate": false, | ||||||
|  |         "_": false, | ||||||
|  |         "activity": false, | ||||||
|  |         "admin": false, | ||||||
|  |         "alert_words": false, | ||||||
|  |         "alert_words_ui": false, | ||||||
|  |         "attachments_ui": false, | ||||||
|  |         "avatar": false, | ||||||
|  |         "billing": false, | ||||||
|  |         "blueslip": false, | ||||||
|  |         "bot_data": false, | ||||||
|  |         "bridge": false, | ||||||
|  |         "buddy_data": false, | ||||||
|  |         "buddy_list": false, | ||||||
|  |         "channel": false, | ||||||
|  |         "click_handlers": false, | ||||||
|  |         "color_data": false, | ||||||
|  |         "colorspace": false, | ||||||
|  |         "common": false, | ||||||
|  |         "components": false, | ||||||
|  |         "compose": false, | ||||||
|  |         "compose_actions": false, | ||||||
|  |         "compose_fade": false, | ||||||
|  |         "compose_pm_pill": false, | ||||||
|  |         "compose_state": false, | ||||||
|  |         "compose_ui": false, | ||||||
|  |         "composebox_typeahead": false, | ||||||
|  |         "condense": false, | ||||||
|  |         "confirm_dialog": false, | ||||||
|  |         "copy_and_paste": false, | ||||||
|  |         "csrf_token": false, | ||||||
|  |         "current_msg_list": true, | ||||||
|  |         "drafts": false, | ||||||
|  |         "echo": false, | ||||||
|  |         "emoji": false, | ||||||
|  |         "emoji_codes": false, | ||||||
|  |         "emoji_picker": false, | ||||||
|  |         "favicon": false, | ||||||
|  |         "feature_flags": false, | ||||||
|  |         "feedback_widget": false, | ||||||
|  |         "fenced_code": false, | ||||||
|  |         "flatpickr": false, | ||||||
|  |         "floating_recipient_bar": false, | ||||||
|  |         "gear_menu": false, | ||||||
|  |         "hash_util": false, | ||||||
|  |         "hashchange": false, | ||||||
|  |         "helpers": false, | ||||||
|  |         "history": false, | ||||||
|  |         "home_msg_list": false, | ||||||
|  |         "hotspots": false, | ||||||
|  |         "i18n": false, | ||||||
|  |         "info_overlay": false, | ||||||
|  |         "input_pill": false, | ||||||
|  |         "invite": false, | ||||||
|  |         "jQuery": false, | ||||||
|  |         "katex": false, | ||||||
|  |         "keydown_util": false, | ||||||
|  |         "lightbox": false, | ||||||
|  |         "list_cursor": false, | ||||||
|  |         "list_render": false, | ||||||
|  |         "list_util": false, | ||||||
|  |         "loading": false, | ||||||
|  |         "localStorage": false, | ||||||
|  |         "local_message": false, | ||||||
|  |         "localstorage": false, | ||||||
|  |         "location": false, | ||||||
|  |         "markdown": false, | ||||||
|  |         "marked": false, | ||||||
|  |         "md5": false, | ||||||
|  |         "message_edit": false, | ||||||
|  |         "message_events": false, | ||||||
|  |         "message_fetch": false, | ||||||
|  |         "message_flags": false, | ||||||
|  |         "message_list": false, | ||||||
|  |         "message_live_update": false, | ||||||
|  |         "message_scroll": false, | ||||||
|  |         "message_store": false, | ||||||
|  |         "message_util": false, | ||||||
|  |         "message_viewport": false, | ||||||
|  |         "moment": false, | ||||||
|  |         "muting": false, | ||||||
|  |         "muting_ui": false, | ||||||
|  |         "narrow": false, | ||||||
|  |         "narrow_state": false, | ||||||
|  |         "navigate": false, | ||||||
|  |         "night_mode": false, | ||||||
|  |         "notifications": false, | ||||||
|  |         "overlays": false, | ||||||
|  |         "padded_widget": false, | ||||||
|  |         "page_params": false, | ||||||
|  |         "panels": false, | ||||||
|  |         "people": false, | ||||||
|  |         "pm_conversations": false, | ||||||
|  |         "pm_list": false, | ||||||
|  |         "pointer": false, | ||||||
|  |         "popovers": false, | ||||||
|  |         "presence": false, | ||||||
|  |         "pygments_data": false, | ||||||
|  |         "reactions": false, | ||||||
|  |         "realm_icon": false, | ||||||
|  |         "realm_logo": false, | ||||||
|  |         "realm_night_logo": false, | ||||||
|  |         "recent_senders": false, | ||||||
|  |         "reload": false, | ||||||
|  |         "reload_state": false, | ||||||
|  |         "reminder": false, | ||||||
|  |         "resize": false, | ||||||
|  |         "rows": false, | ||||||
|  |         "rtl": false, | ||||||
|  |         "run_test": false, | ||||||
|  |         "schema": false, | ||||||
|  |         "scroll_bar": false, | ||||||
|  |         "scroll_util": false, | ||||||
|  |         "search": false, | ||||||
|  |         "search_pill": false, | ||||||
|  |         "search_pill_widget": false, | ||||||
|  |         "search_suggestion": false, | ||||||
|  |         "search_util": false, | ||||||
|  |         "sent_messages": false, | ||||||
|  |         "server_events": false, | ||||||
|  |         "server_events_dispatch": false, | ||||||
|  |         "settings": false, | ||||||
|  |         "settings_account": false, | ||||||
|  |         "settings_bots": false, | ||||||
|  |         "settings_display": false, | ||||||
|  |         "settings_emoji": false, | ||||||
|  |         "settings_exports": false, | ||||||
|  |         "settings_linkifiers": false, | ||||||
|  |         "settings_invites": false, | ||||||
|  |         "settings_muting": false, | ||||||
|  |         "settings_notifications": false, | ||||||
|  |         "settings_org": false, | ||||||
|  |         "settings_panel_menu": false, | ||||||
|  |         "settings_profile_fields": false, | ||||||
|  |         "settings_sections": false, | ||||||
|  |         "settings_streams": false, | ||||||
|  |         "settings_toggle": false, | ||||||
|  |         "settings_ui": false, | ||||||
|  |         "settings_user_groups": false, | ||||||
|  |         "settings_users": false, | ||||||
|  |         "starred_messages": false, | ||||||
|  |         "stream_color": false, | ||||||
|  |         "stream_create": false, | ||||||
|  |         "stream_data": false, | ||||||
|  |         "stream_edit": false, | ||||||
|  |         "stream_events": false, | ||||||
|  |         "stream_list": false, | ||||||
|  |         "stream_muting": false, | ||||||
|  |         "stream_popover": false, | ||||||
|  |         "stream_sort": false, | ||||||
|  |         "stream_ui_updates": false, | ||||||
|  |         "StripeCheckout": false, | ||||||
|  |         "submessage": false, | ||||||
|  |         "subs": false, | ||||||
|  |         "tab_bar": false, | ||||||
|  |         "templates": false, | ||||||
|  |         "tictactoe_widget": false, | ||||||
|  |         "timerender": false, | ||||||
|  |         "todo_widget": false, | ||||||
|  |         "top_left_corner": false, | ||||||
|  |         "topic_data": false, | ||||||
|  |         "topic_generator": false, | ||||||
|  |         "topic_list": false, | ||||||
|  |         "topic_zoom": false, | ||||||
|  |         "transmit": false, | ||||||
|  |         "tutorial": false, | ||||||
|  |         "typeahead_helper": false, | ||||||
|  |         "typing": false, | ||||||
|  |         "typing_data": false, | ||||||
|  |         "typing_events": false, | ||||||
|  |         "ui": false, | ||||||
|  |         "ui_init": false, | ||||||
|  |         "ui_report": false, | ||||||
|  |         "ui_util": false, | ||||||
|  |         "unread": false, | ||||||
|  |         "unread_ops": false, | ||||||
|  |         "unread_ui": false, | ||||||
|  |         "upgrade": false, | ||||||
|  |         "upload": false, | ||||||
|  |         "upload_widget": false, | ||||||
|  |         "user_events": false, | ||||||
|  |         "user_groups": false, | ||||||
|  |         "user_pill": false, | ||||||
|  |         "user_search": false, | ||||||
|  |         "user_status": false, | ||||||
|  |         "user_status_ui": false, | ||||||
|  |         "util": false, | ||||||
|  |         "poll_widget": false, | ||||||
|  |         "widgetize": false, | ||||||
|  |         "zcommand": false, | ||||||
|  |         "zform": false, | ||||||
|  |         "zxcvbn": false | ||||||
|     }, |     }, | ||||||
|     "reportUnusedDisableDirectives": true, |     "plugins": [ | ||||||
|  |         "eslint-plugin-empty-returns" | ||||||
|  |     ], | ||||||
|     "rules": { |     "rules": { | ||||||
|         "array-callback-return": "error", |         "array-callback-return": "error", | ||||||
|         "arrow-body-style": "error", |         "array-bracket-spacing": "error", | ||||||
|  |         "arrow-spacing": [ "error", { "before": true, "after": true } ], | ||||||
|         "block-scoped-var": "error", |         "block-scoped-var": "error", | ||||||
|         "consistent-return": "error", |         "brace-style": [ "error", "1tbs", { "allowSingleLine": true } ], | ||||||
|         "curly": "error", |         "camelcase": "off", | ||||||
|         "dot-notation": "error", |         "comma-dangle": [ "error", | ||||||
|         "eqeqeq": "error", |             { | ||||||
|         "formatjs/enforce-default-message": ["error", "literal"], |                 "arrays": "always-multiline", | ||||||
|         "formatjs/enforce-placeholders": [ |                 "objects": "always-multiline", | ||||||
|             "error", |                 "imports": "always-multiline", | ||||||
|             {"ignoreList": ["b", "code", "em", "i", "kbd", "p", "strong"]} |                 "exports": "always-multiline", | ||||||
|  |                 "functions": "never" | ||||||
|  |             } | ||||||
|         ], |         ], | ||||||
|         "formatjs/no-id": "error", |         "comma-spacing": [ "error", | ||||||
|  |             { | ||||||
|  |                 "before": false, | ||||||
|  |                 "after": true | ||||||
|  |             } | ||||||
|  |         ], | ||||||
|  |         "complexity": [ "off", 4 ], | ||||||
|  |         "curly": "error", | ||||||
|  |         "dot-notation": [ "error", { "allowKeywords": true } ], | ||||||
|  |         "empty-returns/main": "error", | ||||||
|  |         "eol-last": [ "error", "always" ], | ||||||
|  |         "eqeqeq": "error", | ||||||
|  |         "func-style": [ "off", "expression" ], | ||||||
|         "guard-for-in": "error", |         "guard-for-in": "error", | ||||||
|         "import/extensions": "error", |         "indent": ["error", 4, { | ||||||
|         "import/first": "error", |             "ArrayExpression": "first", | ||||||
|         "import/newline-after-import": "error", |             "ObjectExpression": "first", | ||||||
|         "import/no-self-import": "error", |             "SwitchCase": 0, | ||||||
|         "import/no-unresolved": "off", |             "CallExpression": {"arguments": "first"}, | ||||||
|         "import/no-useless-path-segments": "error", |             "FunctionExpression": {"parameters": "first"}, | ||||||
|         "import/order": ["error", {"alphabetize": {"order": "asc"}, "newlines-between": "always"}], |             "FunctionDeclaration": {"parameters": "first"} | ||||||
|         "import/unambiguous": "error", |         }], | ||||||
|         "lines-around-directive": "error", |         "key-spacing": [ "error", | ||||||
|         "new-cap": "error", |             { | ||||||
|  |                 "beforeColon": false, | ||||||
|  |                 "afterColon": true | ||||||
|  |             } | ||||||
|  |         ], | ||||||
|  |         "keyword-spacing": [ "error", | ||||||
|  |             { | ||||||
|  |                 "before": true, | ||||||
|  |                 "after": true, | ||||||
|  |                 "overrides": { | ||||||
|  |                     "return": { "after": true }, | ||||||
|  |                     "throw": { "after": true }, | ||||||
|  |                     "case": { "after": true } | ||||||
|  |                 } | ||||||
|  |             } | ||||||
|  |         ], | ||||||
|  |         "max-depth": [ "off", 4 ], | ||||||
|  |         "max-len": [ "error", 100, 2, | ||||||
|  |             { | ||||||
|  |                 "ignoreUrls": true, | ||||||
|  |                 "ignoreComments": false, | ||||||
|  |                 "ignoreRegExpLiterals": true, | ||||||
|  |                 "ignoreStrings": true, | ||||||
|  |                 "ignoreTemplateLiterals": true | ||||||
|  |             } | ||||||
|  |         ], | ||||||
|  |         "max-params": [ "off", 3 ], | ||||||
|  |         "max-statements": [ "off", 10 ], | ||||||
|  |         "new-cap": [ "error", | ||||||
|  |             { | ||||||
|  |                 "newIsCap": true, | ||||||
|  |                 "capIsNew": false | ||||||
|  |             } | ||||||
|  |         ], | ||||||
|  |         "new-parens": "error", | ||||||
|  |         "newline-per-chained-call": "off", | ||||||
|         "no-alert": "error", |         "no-alert": "error", | ||||||
|         "no-array-constructor": "error", |         "no-array-constructor": "error", | ||||||
|         "no-bitwise": "error", |         "no-bitwise": "error", | ||||||
|         "no-caller": "error", |         "no-caller": "error", | ||||||
|  |         "no-case-declarations": "error", | ||||||
|         "no-catch-shadow": "error", |         "no-catch-shadow": "error", | ||||||
|         "no-constant-condition": ["error", {"checkLoops": false}], |         "no-console": "off", | ||||||
|  |         "no-const-assign": "error", | ||||||
|  |         "no-control-regex": "error", | ||||||
|  |         "no-debugger": "error", | ||||||
|  |         "no-delete-var": "error", | ||||||
|         "no-div-regex": "error", |         "no-div-regex": "error", | ||||||
|  |         "no-dupe-class-members": "error", | ||||||
|  |         "no-dupe-keys": "error", | ||||||
|         "no-duplicate-imports": "error", |         "no-duplicate-imports": "error", | ||||||
|         "no-else-return": "error", |         "no-else-return": "error", | ||||||
|  |         "no-empty": "error", | ||||||
|  |         "no-empty-character-class": "error", | ||||||
|         "no-eq-null": "error", |         "no-eq-null": "error", | ||||||
|         "no-eval": "error", |         "no-eval": "error", | ||||||
|         "no-implicit-coercion": "error", |         "no-ex-assign": "error", | ||||||
|  |         "no-extra-parens": ["error", "all"], | ||||||
|  |         "no-extra-semi": "error", | ||||||
|  |         "no-fallthrough": "error", | ||||||
|  |         "no-floating-decimal": "error", | ||||||
|  |         "no-func-assign": "error", | ||||||
|         "no-implied-eval": "error", |         "no-implied-eval": "error", | ||||||
|         "no-inner-declarations": "off", |  | ||||||
|         "no-iterator": "error", |         "no-iterator": "error", | ||||||
|         "no-jquery/no-constructor-attributes": "error", |  | ||||||
|         "no-jquery/no-parse-html-literal": "error", |  | ||||||
|         "no-label-var": "error", |         "no-label-var": "error", | ||||||
|         "no-labels": "error", |         "no-labels": "error", | ||||||
|         "no-loop-func": "error", |         "no-loop-func": "error", | ||||||
|  |         "no-mixed-requires": [ "off", false ], | ||||||
|         "no-multi-str": "error", |         "no-multi-str": "error", | ||||||
|         "no-native-reassign": "error", |         "no-native-reassign": "error", | ||||||
|  |         "no-nested-ternary": "off", | ||||||
|         "no-new-func": "error", |         "no-new-func": "error", | ||||||
|         "no-new-object": "error", |         "no-new-object": "error", | ||||||
|         "no-new-wrappers": "error", |         "no-new-wrappers": "error", | ||||||
|  |         "no-obj-calls": "error", | ||||||
|  |         "no-octal": "error", | ||||||
|         "no-octal-escape": "error", |         "no-octal-escape": "error", | ||||||
|  |         "no-param-reassign": "off", | ||||||
|         "no-plusplus": "error", |         "no-plusplus": "error", | ||||||
|         "no-proto": "error", |         "no-proto": "error", | ||||||
|  |         "no-redeclare": "error", | ||||||
|  |         "no-regex-spaces": "error", | ||||||
|  |         "no-restricted-syntax": "off", | ||||||
|         "no-return-assign": "error", |         "no-return-assign": "error", | ||||||
|         "no-script-url": "error", |         "no-script-url": "error", | ||||||
|         "no-self-compare": "error", |         "no-self-compare": "error", | ||||||
|  |         "no-shadow": "off", | ||||||
|         "no-sync": "error", |         "no-sync": "error", | ||||||
|         "no-throw-literal": "error", |         "no-ternary": "off", | ||||||
|  |         "no-trailing-spaces": "error", | ||||||
|  |         "no-undef": "error", | ||||||
|         "no-undef-init": "error", |         "no-undef-init": "error", | ||||||
|         "no-unneeded-ternary": ["error", {"defaultAssignment": false}], |         "no-underscore-dangle": "off", | ||||||
|  |         "no-unneeded-ternary": [ "error", { "defaultAssignment": false } ], | ||||||
|  |         "no-unreachable": "error", | ||||||
|         "no-unused-expressions": "error", |         "no-unused-expressions": "error", | ||||||
|         "no-unused-vars": ["error", {"ignoreRestSiblings": true}], |         "no-unused-vars": [ "error", | ||||||
|         "no-use-before-define": ["error", {"functions": false}], |             { | ||||||
|         "no-useless-concat": "error", |                 "vars": "local", | ||||||
|  |                 "args": "after-used", | ||||||
|  |                 "varsIgnorePattern": "print_elapsed_time|check_duplicate_ids" | ||||||
|  |             } | ||||||
|  |         ], | ||||||
|  |         "no-use-before-define": "error", | ||||||
|         "no-useless-constructor": "error", |         "no-useless-constructor": "error", | ||||||
|  |         // The Zulip codebase complies partially with the "no-useless-escape" | ||||||
|  |         // rule; only regex expressions haven't been updated yet. | ||||||
|  |         // Updated regex expressions are currently being tested in casper | ||||||
|  |         // files and will decide about a potential future enforcement of this rule. | ||||||
|  |         "no-useless-escape": "off", | ||||||
|         "no-var": "error", |         "no-var": "error", | ||||||
|         "object-shorthand": ["error", "always", {"avoidExplicitReturnArrows": true}], |         "space-unary-ops": "error", | ||||||
|         "one-var": ["error", "never"], |         "no-whitespace-before-property": "error", | ||||||
|         "prefer-arrow-callback": "error", |         "no-with": "error", | ||||||
|         "prefer-const": ["error", {"ignoreReadBeforeAssign": true}], |         "one-var": [ "error", "never" ], | ||||||
|  |         "padded-blocks": "off", | ||||||
|  |         "prefer-const": [ "error", | ||||||
|  |             { | ||||||
|  |                 "destructuring": "any", | ||||||
|  |                 "ignoreReadBeforeAssign": true | ||||||
|  |             } | ||||||
|  |         ], | ||||||
|  |         "quote-props": [ "error", "as-needed", | ||||||
|  |             { | ||||||
|  |                 "keywords": false, | ||||||
|  |                 "unnecessary": true, | ||||||
|  |                 "numbers": false | ||||||
|  |             } | ||||||
|  |         ], | ||||||
|  |         "quotes": [ "off", "single" ], | ||||||
|         "radix": "error", |         "radix": "error", | ||||||
|         "sort-imports": ["error", {"ignoreDeclarationSort": true}], |         "semi": "error", | ||||||
|         "spaced-comment": ["error", "always", {"markers": ["/"]}], |         "semi-spacing": ["error", {"before": false, "after": true}], | ||||||
|         "strict": "error", |         "sort-imports": "error", | ||||||
|         "unicorn/consistent-function-scoping": "off", |         "space-before-blocks": "error", | ||||||
|         "unicorn/explicit-length-check": "off", |         "space-before-function-paren": [ "error", | ||||||
|         "unicorn/filename-case": "off", |             { | ||||||
|         "unicorn/no-await-expression-member": "off", |                 "anonymous": "always", | ||||||
|         "unicorn/no-negated-condition": "off", |                 "named": "never", | ||||||
|         "unicorn/no-null": "off", |                 "asyncArrow": "always" | ||||||
|         "unicorn/no-process-exit": "off", |             } | ||||||
|         "unicorn/no-useless-undefined": "off", |         ], | ||||||
|         "unicorn/numeric-separators-style": "off", |         "space-in-parens": "error", | ||||||
|         "unicorn/prefer-module": "off", |         "space-infix-ops": "error", | ||||||
|         "unicorn/prefer-node-protocol": "off", |         "spaced-comment": "off", | ||||||
|         "unicorn/prefer-ternary": "off", |         "strict": "off", | ||||||
|         "unicorn/prefer-top-level-await": "off", |         "template-curly-spacing": "error", | ||||||
|         "unicorn/prevent-abbreviations": "off", |         "unnecessary-strict": "off", | ||||||
|         "unicorn/switch-case-braces": "off", |         "use-isnan": "error", | ||||||
|         "valid-typeof": ["error", {"requireStringLiterals": true}], |         "valid-typeof": [ "error", { "requireStringLiterals": true } ], | ||||||
|  |         "wrap-iife": [ "error", "outside", { "functionPrototypeMethods": false } ], | ||||||
|  |         "wrap-regex": "off", | ||||||
|         "yoda": "error" |         "yoda": "error" | ||||||
|     }, |     }, | ||||||
|     "overrides": [ |     "overrides": [ | ||||||
|         { |         { | ||||||
|             "files": ["web/tests/**"], |             "files": [ | ||||||
|  |                 "frontend_tests/casper_tests/*.js", | ||||||
|  |                 "frontend_tests/casper_lib/*.js" | ||||||
|  |             ], | ||||||
|             "rules": { |             "rules": { | ||||||
|                 "no-jquery/no-selector-prop": "off" |                 "no-var": "off" // PhantomJS doesn’t support let, const | ||||||
|             } |  | ||||||
|         }, |  | ||||||
|         { |  | ||||||
|             "files": ["web/e2e-tests/**"], |  | ||||||
|             "globals": { |  | ||||||
|                 "zulip_test": false |  | ||||||
|             } |  | ||||||
|         }, |  | ||||||
|         { |  | ||||||
|             "files": ["web/src/**"], |  | ||||||
|             "globals": { |  | ||||||
|                 "StripeCheckout": false |  | ||||||
|             } |             } | ||||||
|         }, |         }, | ||||||
|         { |         { | ||||||
|             "files": ["**/*.ts"], |             "files": ["**/*.ts"], | ||||||
|             "extends": [ |             "parser": "@typescript-eslint/parser", | ||||||
|                 "plugin:@typescript-eslint/recommended", |  | ||||||
|                 "plugin:@typescript-eslint/recommended-requiring-type-checking", |  | ||||||
|                 "plugin:@typescript-eslint/strict", |  | ||||||
|                 "plugin:import/typescript" |  | ||||||
|             ], |  | ||||||
|             "parserOptions": { |             "parserOptions": { | ||||||
|                 "project": "tsconfig.json" |                 "project": "tsconfig.json" | ||||||
|             }, |             }, | ||||||
|             "settings": { |             "plugins": ["@typescript-eslint"], | ||||||
|                 "import/resolver": { |  | ||||||
|                     "node": { |  | ||||||
|                         "extensions": [".ts", ".d.ts", ".js"] // https://github.com/import-js/eslint-plugin-import/issues/2267 |  | ||||||
|                     } |  | ||||||
|                 } |  | ||||||
|             }, |  | ||||||
|             "globals": { |  | ||||||
|                 "JQuery": false |  | ||||||
|             }, |  | ||||||
|             "rules": { |             "rules": { | ||||||
|                 // Disable base rule to avoid conflict |                 // Disable base rule to avoid conflict | ||||||
|                 "no-duplicate-imports": "off", |                 "empty-returns/main": "off", | ||||||
|                 "no-use-before-define": "off", |                 "indent": "off", | ||||||
|  |                 "func-call-spacing": "off", | ||||||
|  |                 "no-magic-numbers": "off", | ||||||
|  |                 "semi": "off", | ||||||
|  |                 "no-unused-vars": "off", | ||||||
|  |                 "no-useless-constructor": "off", | ||||||
|  |  | ||||||
|                 "@typescript-eslint/consistent-type-definitions": ["error", "type"], |                 "@typescript-eslint/adjacent-overload-signatures": "error", | ||||||
|                 "@typescript-eslint/consistent-type-imports": "error", |                 "@typescript-eslint/array-type": "error", | ||||||
|                 "@typescript-eslint/explicit-function-return-type": [ |                 "@typescript-eslint/await-thenable": "error", | ||||||
|                     "error", |                 "@typescript-eslint/ban-types": "error", | ||||||
|                     {"allowExpressions": true} |                 "@typescript-eslint/ban-ts-ignore": "off", | ||||||
|                 ], |                 "@typescript-eslint/camelcase": "off", | ||||||
|  |                 "@typescript-eslint/class-name-casing": "error", | ||||||
|  |                 "@typescript-eslint/consistent-type-assertions": "error", | ||||||
|  |                 "@typescript-eslint/explicit-function-return-type": ["error", { "allowExpressions": true }], | ||||||
|  |                 "@typescript-eslint/explicit-member-accessibility": "off", | ||||||
|  |                 "@typescript-eslint/func-call-spacing": "error", | ||||||
|  |                 "@typescript-eslint/generic-type-naming": "off", | ||||||
|  |                 "@typescript-eslint/indent": "error", | ||||||
|  |                 "@typescript-eslint/interface-name-prefix": "off", | ||||||
|  |                 "@typescript-eslint/member-delimiter-style": "error", | ||||||
|  |                 "@typescript-eslint/member-naming": ["error", { "private": "^_" } ], | ||||||
|                 "@typescript-eslint/member-ordering": "error", |                 "@typescript-eslint/member-ordering": "error", | ||||||
|                 "@typescript-eslint/no-duplicate-imports": "error", |                 "@typescript-eslint/no-array-constructor": "error", | ||||||
|  |                 "@typescript-eslint/no-empty-interface": "error", | ||||||
|  |                 "@typescript-eslint/no-explicit-any": "off", | ||||||
|  |                 "@typescript-eslint/no-extraneous-class": "error", | ||||||
|  |                 "@typescript-eslint/no-for-in-array": "off", | ||||||
|  |                 "@typescript-eslint/no-inferrable-types": "error", | ||||||
|  |                 "@typescript-eslint/no-magic-numbers": "off", | ||||||
|  |                 "@typescript-eslint/no-misused-new": "error", | ||||||
|  |                 "@typescript-eslint/no-namespace": "error", | ||||||
|                 "@typescript-eslint/no-non-null-assertion": "off", |                 "@typescript-eslint/no-non-null-assertion": "off", | ||||||
|                 "@typescript-eslint/no-parameter-properties": "error", |                 "@typescript-eslint/no-parameter-properties": "error", | ||||||
|                 "@typescript-eslint/no-unnecessary-condition": "off", |                 "@typescript-eslint/no-require-imports": "off", | ||||||
|  |                 "@typescript-eslint/no-this-alias": "off", | ||||||
|  |                 "@typescript-eslint/no-type-alias": "off", | ||||||
|                 "@typescript-eslint/no-unnecessary-qualifier": "error", |                 "@typescript-eslint/no-unnecessary-qualifier": "error", | ||||||
|                 "@typescript-eslint/no-unsafe-argument": "off", |                 "@typescript-eslint/no-unnecessary-type-assertion": "error", | ||||||
|                 "@typescript-eslint/no-unsafe-assignment": "off", |                 "@typescript-eslint/no-unused-vars": ["error", { "varsIgnorePattern": "^_" } ], | ||||||
|                 "@typescript-eslint/no-unsafe-call": "off", |                 "@typescript-eslint/no-use-before-define": "error", | ||||||
|                 "@typescript-eslint/no-unsafe-member-access": "off", |                 "@typescript-eslint/no-useless-constructor": "error", | ||||||
|                 "@typescript-eslint/no-unsafe-return": "off", |                 "@typescript-eslint/no-var-requires": "off", | ||||||
|                 "@typescript-eslint/no-unused-vars": ["error", {"ignoreRestSiblings": true}], |                 "@typescript-eslint/prefer-for-of": "off", | ||||||
|                 "@typescript-eslint/no-use-before-define": ["error", {"functions": false}], |                 "@typescript-eslint/prefer-function-type": "off", | ||||||
|  |                 "@typescript-eslint/prefer-includes": "error", | ||||||
|  |                 "@typescript-eslint/prefer-interface": "off", | ||||||
|  |                 "@typescript-eslint/prefer-namespace-keyword": "error", | ||||||
|  |                 "@typescript-eslint/prefer-regexp-exec": "error", | ||||||
|  |                 "@typescript-eslint/prefer-string-starts-ends-with": "error", | ||||||
|                 "@typescript-eslint/promise-function-async": "error", |                 "@typescript-eslint/promise-function-async": "error", | ||||||
|                 "import/no-cycle": "error", |                 "@typescript-eslint/restrict-plus-operands": "off", | ||||||
|                 "no-undef": "error" |                 "@typescript-eslint/semi": "error", | ||||||
|             } |                 "@typescript-eslint/triple-slash-reference": "error", | ||||||
|         }, |                 "@typescript-eslint/type-annotation-spacing": "error", | ||||||
|         { |                 "@typescript-eslint/unbound-method": "off", | ||||||
|             "files": ["**/*.d.ts"], |                 "@typescript-eslint/unified-signatures": "error" | ||||||
|             "rules": { |  | ||||||
|                 "import/unambiguous": "off" |  | ||||||
|             } |  | ||||||
|         }, |  | ||||||
|         { |  | ||||||
|             "files": ["web/e2e-tests/**", "web/tests/**"], |  | ||||||
|             "globals": { |  | ||||||
|                 "CSS": false, |  | ||||||
|                 "document": false, |  | ||||||
|                 "navigator": false, |  | ||||||
|                 "window": false |  | ||||||
|             }, |  | ||||||
|             "rules": { |  | ||||||
|                 "formatjs/no-id": "off", |  | ||||||
|                 "new-cap": "off", |  | ||||||
|                 "no-sync": "off", |  | ||||||
|                 "unicorn/prefer-prototype-methods": "off" |  | ||||||
|             } |  | ||||||
|         }, |  | ||||||
|         { |  | ||||||
|             "files": ["web/debug-require.js"], |  | ||||||
|             "env": { |  | ||||||
|                 "browser": true, |  | ||||||
|                 "es2020": false |  | ||||||
|             }, |  | ||||||
|             "rules": { |  | ||||||
|                 // Don’t require ES features that PhantomJS doesn’t support |  | ||||||
|                 // TODO: Toggle these settings now that we don't use PhantomJS |  | ||||||
|                 "no-var": "off", |  | ||||||
|                 "object-shorthand": "off", |  | ||||||
|                 "prefer-arrow-callback": "off" |  | ||||||
|             } |  | ||||||
|         }, |  | ||||||
|         { |  | ||||||
|             "files": ["web/shared/**", "web/src/**", "web/third/**"], |  | ||||||
|             "env": { |  | ||||||
|                 "browser": true, |  | ||||||
|                 "node": false |  | ||||||
|             }, |  | ||||||
|             "globals": { |  | ||||||
|                 "ZULIP_VERSION": false |  | ||||||
|             }, |  | ||||||
|             "rules": { |  | ||||||
|                 "no-console": "error" |  | ||||||
|             }, |  | ||||||
|             "settings": { |  | ||||||
|                 "import/resolver": { |  | ||||||
|                     "webpack": { |  | ||||||
|                         "config": "./web/webpack.config.ts" |  | ||||||
|                     } |  | ||||||
|                 } |  | ||||||
|             } |  | ||||||
|         }, |  | ||||||
|         { |  | ||||||
|             "files": ["web/shared/**"], |  | ||||||
|             "env": { |  | ||||||
|                 "browser": false, |  | ||||||
|                 "shared-node-browser": true |  | ||||||
|             }, |  | ||||||
|             "rules": { |  | ||||||
|                 "import/no-restricted-paths": [ |  | ||||||
|                     "error", |  | ||||||
|                     { |  | ||||||
|                         "zones": [ |  | ||||||
|                             { |  | ||||||
|                                 "target": "./web/shared", |  | ||||||
|                                 "from": ".", |  | ||||||
|                                 "except": ["./node_modules", "./web/shared"] |  | ||||||
|                             } |  | ||||||
|                         ] |  | ||||||
|                     } |  | ||||||
|                 ], |  | ||||||
|                 "unicorn/prefer-string-replace-all": "off" |  | ||||||
|             } |             } | ||||||
|         } |         } | ||||||
|     ] |     ] | ||||||
|   | |||||||
							
								
								
									
										19
									
								
								.gitattributes
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										19
									
								
								.gitattributes
									
									
									
									
										vendored
									
									
								
							| @@ -1,19 +1,4 @@ | |||||||
| # DIFFS: Noise suppression. |  | ||||||
| # |  | ||||||
| # Suppress noisy generated files in diffs. |  | ||||||
| # (When you actually want to see these diffs, use `git diff -a`.) |  | ||||||
|  |  | ||||||
| # Large test fixtures: |  | ||||||
| corporate/tests/stripe_fixtures/*.json -diff |  | ||||||
|  |  | ||||||
|  |  | ||||||
| # FORMATTING |  | ||||||
|  |  | ||||||
| # Maintain LF (Unix-style) newlines in text files. |  | ||||||
| *   text=auto eol=lf | *   text=auto eol=lf | ||||||
|  |  | ||||||
| # Make sure various media files never get somehow auto-detected as text |  | ||||||
| # and then newline-converted. |  | ||||||
| *.gif binary | *.gif binary | ||||||
| *.jpg binary | *.jpg binary | ||||||
| *.jpeg binary | *.jpeg binary | ||||||
| @@ -26,7 +11,3 @@ corporate/tests/stripe_fixtures/*.json -diff | |||||||
| *.otf binary | *.otf binary | ||||||
| *.tif binary | *.tif binary | ||||||
| *.ogg binary | *.ogg binary | ||||||
| *.bson binary |  | ||||||
| *.bmp binary |  | ||||||
| *.mp3 binary |  | ||||||
| *.pdf binary |  | ||||||
|   | |||||||
							
								
								
									
										3
									
								
								.github/FUNDING.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										3
									
								
								.github/FUNDING.yml
									
									
									
									
										vendored
									
									
								
							| @@ -1,3 +0,0 @@ | |||||||
| github: zulip |  | ||||||
| patreon: zulip |  | ||||||
| open_collective: zulip |  | ||||||
							
								
								
									
										10
									
								
								.github/ISSUE_TEMPLATE/1_discussed_on_czo.md
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										10
									
								
								.github/ISSUE_TEMPLATE/1_discussed_on_czo.md
									
									
									
									
										vendored
									
									
								
							| @@ -1,10 +0,0 @@ | |||||||
| --- |  | ||||||
| name: Issue discussed in the Zulip development community |  | ||||||
| about: Bug report, feature or improvement already discussed on chat.zulip.org. |  | ||||||
| --- |  | ||||||
|  |  | ||||||
| <!-- Issue description --> |  | ||||||
|  |  | ||||||
| <!-- Link to a message in the chat.zulip.org discussion. Message links will still work even if the topic is renamed or resolved. Link back to this issue from the chat.zulip.org thread. --> |  | ||||||
|  |  | ||||||
| CZO thread |  | ||||||
							
								
								
									
										17
									
								
								.github/ISSUE_TEMPLATE/2_bug_report.md
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										17
									
								
								.github/ISSUE_TEMPLATE/2_bug_report.md
									
									
									
									
										vendored
									
									
								
							| @@ -1,17 +0,0 @@ | |||||||
| --- |  | ||||||
| name: Bug report |  | ||||||
| about: A concrete bug report with steps to reproduce the behavior. (See also "Possible bug" below.) |  | ||||||
| labels: ["bug"] |  | ||||||
| --- |  | ||||||
|  |  | ||||||
| <!-- Describe what you were expecting to see, what you saw instead, and steps to take in order to reproduce the buggy behavior. Screenshots can be helpful. --> |  | ||||||
|  |  | ||||||
| <!-- Check the box for the version of Zulip you are using (see https://zulip.com/help/view-zulip-version).--> |  | ||||||
|  |  | ||||||
| **Zulip Server and web app version:** |  | ||||||
|  |  | ||||||
| - [ ] Zulip Cloud (`*.zulipchat.com`) |  | ||||||
| - [ ] Zulip Server 7.0+ |  | ||||||
| - [ ] Zulip Server 6.0+ |  | ||||||
| - [ ] Zulip Server 5.0 or older |  | ||||||
| - [ ] Other or not sure |  | ||||||
							
								
								
									
										6
									
								
								.github/ISSUE_TEMPLATE/3_feature_request.md
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										6
									
								
								.github/ISSUE_TEMPLATE/3_feature_request.md
									
									
									
									
										vendored
									
									
								
							| @@ -1,6 +0,0 @@ | |||||||
| --- |  | ||||||
| name: Feature or improvement request |  | ||||||
| about: A specific proposal for a new feature of improvement. (See also "Feature suggestion or feedback" below.) |  | ||||||
| --- |  | ||||||
|  |  | ||||||
| <!-- Describe the proposal, including how it would help you or your organization. --> |  | ||||||
							
								
								
									
										14
									
								
								.github/ISSUE_TEMPLATE/config.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										14
									
								
								.github/ISSUE_TEMPLATE/config.yml
									
									
									
									
										vendored
									
									
								
							| @@ -1,14 +0,0 @@ | |||||||
| blank_issues_enabled: true |  | ||||||
| contact_links: |  | ||||||
|   - name: Possible bug |  | ||||||
|     url: https://zulip.readthedocs.io/en/latest/contributing/reporting-bugs.html |  | ||||||
|     about: Report unexpected behavior that may be a bug. |  | ||||||
|   - name: Feature suggestion or feedback |  | ||||||
|     url: https://zulip.readthedocs.io/en/latest/contributing/suggesting-features.html |  | ||||||
|     about: Start a discussion about your idea for improving Zulip. |  | ||||||
|   - name: Issue with running or upgrading a Zulip server |  | ||||||
|     url: https://zulip.readthedocs.io/en/latest/production/troubleshooting.html |  | ||||||
|     about: We provide free, interactive support for the vast majority of questions about running a Zulip server. |  | ||||||
|   - name: Other support requests and sales questions |  | ||||||
|     url: https://zulip.com/help/contact-support |  | ||||||
|     about: Contact us — we're happy to help! |  | ||||||
							
								
								
									
										45
									
								
								.github/pull_request_template.md
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										45
									
								
								.github/pull_request_template.md
									
									
									
									
										vendored
									
									
								
							| @@ -1,43 +1,14 @@ | |||||||
| <!-- Describe your pull request here.--> | <!-- What's this PR for?  (Just a link to an issue is fine.) --> | ||||||
|  |  | ||||||
| Fixes: <!-- Issue link, or clear description.--> |  | ||||||
|  |  | ||||||
| <!-- If the PR makes UI changes, always include one or more still screenshots to demonstrate your changes. If it seems helpful, add a screen capture of the new functionality as well. | **Testing Plan:** <!-- How have you tested? --> | ||||||
|  |  | ||||||
| Tooling tips: https://zulip.readthedocs.io/en/latest/tutorials/screenshot-and-gif-software.html |  | ||||||
| --> |  | ||||||
|  |  | ||||||
| **Screenshots and screen captures:** | **GIFs or Screenshots:** <!-- If a UI change.  See: | ||||||
|  |   https://zulip.readthedocs.io/en/latest/tutorials/screenshot-and-gif-software.html | ||||||
|  |   --> | ||||||
|  |  | ||||||
| <details> |  | ||||||
| <summary>Self-review checklist</summary> |  | ||||||
|  |  | ||||||
| <!-- Prior to submitting a PR, follow our step-by-step guide to review your own code: | <!-- Also be sure to make clear, coherent commits: | ||||||
| https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html#how-to-review-code --> |   https://zulip.readthedocs.io/en/latest/contributing/version-control.html | ||||||
|  |   --> | ||||||
| <!-- Once you create the PR, check off all the steps below that you have completed. |  | ||||||
| If any of these steps are not relevant or you have not completed, leave them unchecked.--> |  | ||||||
|  |  | ||||||
| - [ ] [Self-reviewed](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html#how-to-review-code) the changes for clarity and maintainability |  | ||||||
|       (variable names, code reuse, readability, etc.). |  | ||||||
|  |  | ||||||
| Communicate decisions, questions, and potential concerns. |  | ||||||
|  |  | ||||||
| - [ ] Explains differences from previous plans (e.g., issue description). |  | ||||||
| - [ ] Highlights technical choices and bugs encountered. |  | ||||||
| - [ ] Calls out remaining decisions and concerns. |  | ||||||
| - [ ] Automated tests verify logic where appropriate. |  | ||||||
|  |  | ||||||
| Individual commits are ready for review (see [commit discipline](https://zulip.readthedocs.io/en/latest/contributing/commit-discipline.html)). |  | ||||||
|  |  | ||||||
| - [ ] Each commit is a coherent idea. |  | ||||||
| - [ ] Commit message(s) explain reasoning and motivation for changes. |  | ||||||
|  |  | ||||||
| Completed manual review and testing of the following: |  | ||||||
|  |  | ||||||
| - [ ] Visual appearance of the changes. |  | ||||||
| - [ ] Responsiveness and internationalization. |  | ||||||
| - [ ] Strings and tooltips. |  | ||||||
| - [ ] End-to-end functionality of buttons, interactions and flows. |  | ||||||
| - [ ] Corner cases, error conditions, and easily imagined bugs. |  | ||||||
| </details> |  | ||||||
|   | |||||||
							
								
								
									
										40
									
								
								.github/workflows/codeql-analysis.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										40
									
								
								.github/workflows/codeql-analysis.yml
									
									
									
									
										vendored
									
									
								
							| @@ -1,40 +0,0 @@ | |||||||
| name: "Code scanning" |  | ||||||
|  |  | ||||||
| on: |  | ||||||
|   push: |  | ||||||
|     branches: ["*.x", chat.zulip.org, main] |  | ||||||
|     tags: ["*"] |  | ||||||
|   pull_request: |  | ||||||
|     branches: ["*.x", chat.zulip.org, main] |  | ||||||
|   workflow_dispatch: |  | ||||||
|  |  | ||||||
| concurrency: |  | ||||||
|   group: "${{ github.workflow }}-${{ github.head_ref || github.run_id }}" |  | ||||||
|   cancel-in-progress: true |  | ||||||
|  |  | ||||||
| permissions: |  | ||||||
|   contents: read |  | ||||||
|  |  | ||||||
| jobs: |  | ||||||
|   CodeQL: |  | ||||||
|     permissions: |  | ||||||
|       actions: read # for github/codeql-action/init to get workflow details |  | ||||||
|       contents: read # for actions/checkout to fetch code |  | ||||||
|       security-events: write # for github/codeql-action/analyze to upload SARIF results |  | ||||||
|     if: ${{!github.event.repository.private}} |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|  |  | ||||||
|     steps: |  | ||||||
|       - name: Check out repository |  | ||||||
|         uses: actions/checkout@v3 |  | ||||||
|  |  | ||||||
|       # Initializes the CodeQL tools for scanning. |  | ||||||
|       - name: Initialize CodeQL |  | ||||||
|         uses: github/codeql-action/init@v2 |  | ||||||
|  |  | ||||||
|         # Override language selection by uncommenting this and choosing your languages |  | ||||||
|         # with: |  | ||||||
|         #   languages: go, javascript, csharp, python, cpp, java |  | ||||||
|  |  | ||||||
|       - name: Perform CodeQL Analysis |  | ||||||
|         uses: github/codeql-action/analyze@v2 |  | ||||||
							
								
								
									
										315
									
								
								.github/workflows/production-suite.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										315
									
								
								.github/workflows/production-suite.yml
									
									
									
									
										vendored
									
									
								
							| @@ -1,315 +0,0 @@ | |||||||
| name: Zulip production suite |  | ||||||
|  |  | ||||||
| on: |  | ||||||
|   push: |  | ||||||
|     branches: ["*.x", chat.zulip.org, main] |  | ||||||
|     tags: ["*"] |  | ||||||
|   pull_request: |  | ||||||
|     paths: |  | ||||||
|       - .github/workflows/production-suite.yml |  | ||||||
|       - "**/migrations/**" |  | ||||||
|       - manage.py |  | ||||||
|       - pnpm-lock.yaml |  | ||||||
|       - puppet/** |  | ||||||
|       - requirements/** |  | ||||||
|       - scripts/** |  | ||||||
|       - tools/** |  | ||||||
|       - web/babel.config.js |  | ||||||
|       - web/postcss.config.js |  | ||||||
|       - web/third/** |  | ||||||
|       - web/webpack.config.ts |  | ||||||
|       - zerver/worker/queue_processors.py |  | ||||||
|       - zerver/lib/push_notifications.py |  | ||||||
|       - zerver/decorator.py |  | ||||||
|       - zproject/** |  | ||||||
|   workflow_dispatch: |  | ||||||
|  |  | ||||||
| concurrency: |  | ||||||
|   group: "${{ github.workflow }}-${{ github.head_ref || github.run_id }}" |  | ||||||
|   cancel-in-progress: true |  | ||||||
|  |  | ||||||
| defaults: |  | ||||||
|   run: |  | ||||||
|     shell: bash |  | ||||||
|  |  | ||||||
| permissions: |  | ||||||
|   contents: read |  | ||||||
|  |  | ||||||
| jobs: |  | ||||||
|   production_build: |  | ||||||
|     # This job builds a release tarball from the current commit, which |  | ||||||
|     # will be used for all of the following install/upgrade tests. |  | ||||||
|     name: Ubuntu 20.04 production build |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|  |  | ||||||
|     # Docker images are built from 'tools/ci/Dockerfile'; the comments at |  | ||||||
|     # the top explain how to build and upload these images. |  | ||||||
|     # Ubuntu 20.04 ships with Python 3.8.10. |  | ||||||
|     container: zulip/ci:focal |  | ||||||
|  |  | ||||||
|     steps: |  | ||||||
|       - name: Add required permissions |  | ||||||
|         run: | |  | ||||||
|           # The checkout actions doesn't clone to ~/zulip or allow |  | ||||||
|           # us to use the path option to clone outside the current |  | ||||||
|           # /__w/zulip/zulip directory. Since this directory is owned |  | ||||||
|           # by root we need to change it's ownership to allow the |  | ||||||
|           # github user to clone the code here. |  | ||||||
|           # Note: /__w/ is a docker volume mounted to $GITHUB_WORKSPACE |  | ||||||
|           # which is /home/runner/work/. |  | ||||||
|           sudo chown -R github . |  | ||||||
|  |  | ||||||
|           # This is the GitHub Actions specific cache directory the |  | ||||||
|           # the current github user must be able to access for the |  | ||||||
|           # cache action to work. It is owned by root currently. |  | ||||||
|           sudo chmod -R 0777 /__w/_temp/ |  | ||||||
|  |  | ||||||
|       - uses: actions/checkout@v3 |  | ||||||
|  |  | ||||||
|       - name: Create cache directories |  | ||||||
|         run: | |  | ||||||
|           dirs=(/srv/zulip-{venv,emoji}-cache) |  | ||||||
|           sudo mkdir -p "${dirs[@]}" |  | ||||||
|           sudo chown -R github "${dirs[@]}" |  | ||||||
|  |  | ||||||
|       - name: Restore pnpm store |  | ||||||
|         uses: actions/cache@v3 |  | ||||||
|         with: |  | ||||||
|           path: ~/.local/share/pnpm/store |  | ||||||
|           key: v1-pnpm-store-focal-${{ hashFiles('pnpm-lock.yaml') }} |  | ||||||
|  |  | ||||||
|       - name: Restore python cache |  | ||||||
|         uses: actions/cache@v3 |  | ||||||
|         with: |  | ||||||
|           path: /srv/zulip-venv-cache |  | ||||||
|           key: v1-venv-focal-${{ hashFiles('requirements/dev.txt') }} |  | ||||||
|           restore-keys: v1-venv-focal |  | ||||||
|  |  | ||||||
|       - name: Restore emoji cache |  | ||||||
|         uses: actions/cache@v3 |  | ||||||
|         with: |  | ||||||
|           path: /srv/zulip-emoji-cache |  | ||||||
|           key: v1-emoji-focal-${{ hashFiles('tools/setup/emoji/emoji_map.json') }}-${{ hashFiles('tools/setup/emoji/build_emoji') }}-${{ hashFiles('tools/setup/emoji/emoji_setup_utils.py') }}-${{ hashFiles('tools/setup/emoji/emoji_names.py') }}-${{ hashFiles('package.json') }} |  | ||||||
|           restore-keys: v1-emoji-focal |  | ||||||
|  |  | ||||||
|       - name: Build production tarball |  | ||||||
|         run: ./tools/ci/production-build |  | ||||||
|  |  | ||||||
|       - name: Upload production build artifacts for install jobs |  | ||||||
|         uses: actions/upload-artifact@v3 |  | ||||||
|         with: |  | ||||||
|           name: production-tarball |  | ||||||
|           path: /tmp/production-build |  | ||||||
|           retention-days: 1 |  | ||||||
|  |  | ||||||
|       - name: Generate failure report string |  | ||||||
|         id: failure_report_string |  | ||||||
|         if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }} |  | ||||||
|         run: tools/ci/generate-failure-message >> $GITHUB_OUTPUT |  | ||||||
|  |  | ||||||
|       - name: Report status to CZO |  | ||||||
|         if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }} |  | ||||||
|         uses: zulip/github-actions-zulip/send-message@v1 |  | ||||||
|         with: |  | ||||||
|           api-key: ${{ secrets.ZULIP_BOT_KEY }} |  | ||||||
|           email: "github-actions-bot@chat.zulip.org" |  | ||||||
|           organization-url: "https://chat.zulip.org" |  | ||||||
|           to: "automated testing" |  | ||||||
|           topic: ${{ steps.failure_report_string.outputs.topic }} |  | ||||||
|           type: "stream" |  | ||||||
|           content: ${{ steps.failure_report_string.outputs.content }} |  | ||||||
|  |  | ||||||
|   production_install: |  | ||||||
|     # This job installs the server release tarball built above on a |  | ||||||
|     # range of platforms, and does some basic health checks on the |  | ||||||
|     # resulting installer Zulip server. |  | ||||||
|     strategy: |  | ||||||
|       fail-fast: false |  | ||||||
|       matrix: |  | ||||||
|         include: |  | ||||||
|           # Docker images are built from 'tools/ci/Dockerfile'; the comments at |  | ||||||
|           # the top explain how to build and upload these images. |  | ||||||
|           - docker_image: zulip/ci:focal |  | ||||||
|             name: Ubuntu 20.04 production install and PostgreSQL upgrade with pgroonga |  | ||||||
|             os: focal |  | ||||||
|             extra-args: "" |  | ||||||
|  |  | ||||||
|           - docker_image: zulip/ci:jammy |  | ||||||
|             name: Ubuntu 22.04 production install |  | ||||||
|             os: jammy |  | ||||||
|             extra-args: "" |  | ||||||
|  |  | ||||||
|           - docker_image: zulip/ci:bullseye |  | ||||||
|             name: Debian 11 production install with custom db name and user |  | ||||||
|             os: bullseye |  | ||||||
|             extra-args: --test-custom-db |  | ||||||
|  |  | ||||||
|           - docker_image: zulip/ci:bookworm |  | ||||||
|             name: Debian 12 production install |  | ||||||
|             os: bookworm |  | ||||||
|             extra-args: "" |  | ||||||
|  |  | ||||||
|     name: ${{ matrix.name  }} |  | ||||||
|     container: |  | ||||||
|       image: ${{ matrix.docker_image }} |  | ||||||
|       options: --init |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|     needs: production_build |  | ||||||
|  |  | ||||||
|     steps: |  | ||||||
|       - name: Download built production tarball |  | ||||||
|         uses: actions/download-artifact@v3 |  | ||||||
|         with: |  | ||||||
|           name: production-tarball |  | ||||||
|           path: /tmp |  | ||||||
|  |  | ||||||
|       - name: Add required permissions and setup |  | ||||||
|         run: | |  | ||||||
|           # This is the GitHub Actions specific cache directory the |  | ||||||
|           # the current github user must be able to access for the |  | ||||||
|           # cache action to work. It is owned by root currently. |  | ||||||
|           sudo chmod -R 0777 /__w/_temp/ |  | ||||||
|  |  | ||||||
|           # Since actions/download-artifact@v2 loses all the permissions |  | ||||||
|           # of the tarball uploaded by the upload artifact fix those. |  | ||||||
|           chmod +x /tmp/production-upgrade-pg |  | ||||||
|           chmod +x /tmp/production-pgroonga |  | ||||||
|           chmod +x /tmp/production-install |  | ||||||
|           chmod +x /tmp/production-verify |  | ||||||
|           chmod +x /tmp/generate-failure-message |  | ||||||
|  |  | ||||||
|       - name: Create cache directories |  | ||||||
|         run: | |  | ||||||
|           dirs=(/srv/zulip-{venv,emoji}-cache) |  | ||||||
|           sudo mkdir -p "${dirs[@]}" |  | ||||||
|           sudo chown -R github "${dirs[@]}" |  | ||||||
|  |  | ||||||
|       - name: Restore pnpm store |  | ||||||
|         uses: actions/cache@v3 |  | ||||||
|         with: |  | ||||||
|           path: ~/.local/share/pnpm/store |  | ||||||
|           key: v1-pnpm-store-${{ matrix.os }}-${{ hashFiles('/tmp/pnpm-lock.yaml') }} |  | ||||||
|  |  | ||||||
|       - name: Install production |  | ||||||
|         run: sudo /tmp/production-install ${{ matrix.extra-args }} |  | ||||||
|  |  | ||||||
|       - name: Verify install |  | ||||||
|         run: sudo /tmp/production-verify ${{ matrix.extra-args }} |  | ||||||
|  |  | ||||||
|       - name: Install pgroonga |  | ||||||
|         if: ${{ matrix.os == 'focal' }} |  | ||||||
|         run: sudo /tmp/production-pgroonga |  | ||||||
|  |  | ||||||
|       - name: Verify install after installing pgroonga |  | ||||||
|         if: ${{ matrix.os == 'focal' }} |  | ||||||
|         run: sudo /tmp/production-verify ${{ matrix.extra-args }} |  | ||||||
|  |  | ||||||
|       - name: Upgrade postgresql |  | ||||||
|         if: ${{ matrix.os == 'focal' }} |  | ||||||
|         run: sudo /tmp/production-upgrade-pg |  | ||||||
|  |  | ||||||
|       - name: Verify install after upgrading postgresql |  | ||||||
|         if: ${{ matrix.os == 'focal' }} |  | ||||||
|         run: sudo /tmp/production-verify ${{ matrix.extra-args }} |  | ||||||
|  |  | ||||||
|       - name: Generate failure report string |  | ||||||
|         id: failure_report_string |  | ||||||
|         if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }} |  | ||||||
|         run: /tmp/generate-failure-message >> $GITHUB_OUTPUT |  | ||||||
|  |  | ||||||
|       - name: Report status to CZO |  | ||||||
|         if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }} |  | ||||||
|         uses: zulip/github-actions-zulip/send-message@v1 |  | ||||||
|         with: |  | ||||||
|           api-key: ${{ secrets.ZULIP_BOT_KEY }} |  | ||||||
|           email: "github-actions-bot@chat.zulip.org" |  | ||||||
|           organization-url: "https://chat.zulip.org" |  | ||||||
|           to: "automated testing" |  | ||||||
|           topic: ${{ steps.failure_report_string.outputs.topic }} |  | ||||||
|           type: "stream" |  | ||||||
|           content: ${{ steps.failure_report_string.outputs.content }} |  | ||||||
|  |  | ||||||
|   production_upgrade: |  | ||||||
|     # The production upgrade job starts with a container with a |  | ||||||
|     # previous Zulip release installed, and attempts to upgrade it to |  | ||||||
|     # the release tarball built for the current commit being tested. |  | ||||||
|     # |  | ||||||
|     # This is intended to catch bugs that result in the upgrade |  | ||||||
|     # process failing. |  | ||||||
|     strategy: |  | ||||||
|       fail-fast: false |  | ||||||
|       matrix: |  | ||||||
|         include: |  | ||||||
|           # Docker images are built from 'tools/ci/Dockerfile.prod'; the comments at |  | ||||||
|           # the top explain how to build and upload these images. |  | ||||||
|           - docker_image: zulip/ci:focal-3.2 |  | ||||||
|             name: 3.2 Version Upgrade |  | ||||||
|             os: focal |  | ||||||
|           - docker_image: zulip/ci:bullseye-4.2 |  | ||||||
|             name: 4.2 Version Upgrade |  | ||||||
|             os: bullseye |  | ||||||
|           - docker_image: zulip/ci:bullseye-5.0 |  | ||||||
|             name: 5.0 Version Upgrade |  | ||||||
|             os: bullseye |  | ||||||
|           - docker_image: zulip/ci:bullseye-6.0 |  | ||||||
|             name: 6.0 Version Upgrade |  | ||||||
|             os: bullseye |  | ||||||
|  |  | ||||||
|     name: ${{ matrix.name  }} |  | ||||||
|     container: |  | ||||||
|       image: ${{ matrix.docker_image }} |  | ||||||
|       options: --init |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|     needs: production_build |  | ||||||
|  |  | ||||||
|     steps: |  | ||||||
|       - name: Download built production tarball |  | ||||||
|         uses: actions/download-artifact@v3 |  | ||||||
|         with: |  | ||||||
|           name: production-tarball |  | ||||||
|           path: /tmp |  | ||||||
|  |  | ||||||
|       - name: Add required permissions and setup |  | ||||||
|         run: | |  | ||||||
|           # This is the GitHub Actions specific cache directory the |  | ||||||
|           # the current github user must be able to access for the |  | ||||||
|           # cache action to work. It is owned by root currently. |  | ||||||
|           sudo chmod -R 0777 /__w/_temp/ |  | ||||||
|  |  | ||||||
|           # Since actions/download-artifact@v2 loses all the permissions |  | ||||||
|           # of the tarball uploaded by the upload artifact fix those. |  | ||||||
|           chmod +x /tmp/production-upgrade |  | ||||||
|           chmod +x /tmp/production-verify |  | ||||||
|           chmod +x /tmp/generate-failure-message |  | ||||||
|  |  | ||||||
|       - name: Create cache directories |  | ||||||
|         run: | |  | ||||||
|           dirs=(/srv/zulip-{venv,emoji}-cache) |  | ||||||
|           sudo mkdir -p "${dirs[@]}" |  | ||||||
|           sudo chown -R github "${dirs[@]}" |  | ||||||
|  |  | ||||||
|       - name: Upgrade production |  | ||||||
|         run: sudo /tmp/production-upgrade |  | ||||||
|  |  | ||||||
|         # TODO: We should be running production-verify here, but it |  | ||||||
|         # doesn't pass yet. |  | ||||||
|         # |  | ||||||
|         # - name: Verify install |  | ||||||
|         #   run: sudo /tmp/production-verify |  | ||||||
|  |  | ||||||
|       - name: Generate failure report string |  | ||||||
|         id: failure_report_string |  | ||||||
|         if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }} |  | ||||||
|         run: /tmp/generate-failure-message >> $GITHUB_OUTPUT |  | ||||||
|  |  | ||||||
|       - name: Report status to CZO |  | ||||||
|         if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }} |  | ||||||
|         uses: zulip/github-actions-zulip/send-message@v1 |  | ||||||
|         with: |  | ||||||
|           api-key: ${{ secrets.ZULIP_BOT_KEY }} |  | ||||||
|           email: "github-actions-bot@chat.zulip.org" |  | ||||||
|           organization-url: "https://chat.zulip.org" |  | ||||||
|           to: "automated testing" |  | ||||||
|           topic: ${{ steps.failure_report_string.outputs.topic }} |  | ||||||
|           type: "stream" |  | ||||||
|           content: ${{ steps.failure_report_string.outputs.content }} |  | ||||||
							
								
								
									
										27
									
								
								.github/workflows/update-oneclick-apps.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										27
									
								
								.github/workflows/update-oneclick-apps.yml
									
									
									
									
										vendored
									
									
								
							| @@ -1,27 +0,0 @@ | |||||||
| name: Update one click apps |  | ||||||
| on: |  | ||||||
|   release: |  | ||||||
|     types: [published] |  | ||||||
| permissions: |  | ||||||
|   contents: read |  | ||||||
|  |  | ||||||
| jobs: |  | ||||||
|   update-digitalocean-oneclick-app: |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|     steps: |  | ||||||
|       - uses: actions/checkout@v3 |  | ||||||
|       - name: Update DigitalOcean one click app |  | ||||||
|         env: |  | ||||||
|           DIGITALOCEAN_API_KEY: ${{ secrets.ONE_CLICK_ACTION_DIGITALOCEAN_API_KEY }} |  | ||||||
|           ZULIP_API_KEY: ${{ secrets.ONE_CLICK_ACTION_ZULIP_BOT_API_KEY }} |  | ||||||
|           ZULIP_EMAIL: ${{ secrets.ONE_CLICK_ACTION_ZULIP_BOT_EMAIL }} |  | ||||||
|           ZULIP_SITE: https://chat.zulip.org |  | ||||||
|           ONE_CLICK_ACTION_STREAM: kandra ops |  | ||||||
|           PYTHON_DIGITALOCEAN_REQUEST_TIMEOUT_SEC: 30 |  | ||||||
|           RELEASE_VERSION: ${{ github.event.release.tag_name }} |  | ||||||
|         run: | |  | ||||||
|           export PATH="$HOME/.local/bin:$PATH" |  | ||||||
|           git clone https://github.com/zulip/marketplace-partners |  | ||||||
|           pip3 install python-digitalocean zulip fab-classic PyNaCl |  | ||||||
|           echo $PATH |  | ||||||
|           python3 tools/oneclickapps/prepare_digital_ocean_one_click_app_release.py |  | ||||||
							
								
								
									
										263
									
								
								.github/workflows/zulip-ci.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										263
									
								
								.github/workflows/zulip-ci.yml
									
									
									
									
										vendored
									
									
								
							| @@ -1,263 +0,0 @@ | |||||||
| # NOTE: Everything test in this file should be in `tools/test-all`.  If there's a |  | ||||||
| # reason not to run it there, it should be there as a comment |  | ||||||
| # explaining why. |  | ||||||
|  |  | ||||||
| name: Zulip CI |  | ||||||
|  |  | ||||||
| on: |  | ||||||
|   push: |  | ||||||
|     branches: ["*.x", chat.zulip.org, main] |  | ||||||
|     tags: ["*"] |  | ||||||
|   pull_request: |  | ||||||
|   workflow_dispatch: |  | ||||||
|  |  | ||||||
| concurrency: |  | ||||||
|   group: "${{ github.workflow }}-${{ github.head_ref || github.run_id }}" |  | ||||||
|   cancel-in-progress: true |  | ||||||
|  |  | ||||||
| defaults: |  | ||||||
|   run: |  | ||||||
|     shell: bash |  | ||||||
|  |  | ||||||
| permissions: |  | ||||||
|   contents: read |  | ||||||
|  |  | ||||||
| jobs: |  | ||||||
|   tests: |  | ||||||
|     strategy: |  | ||||||
|       fail-fast: false |  | ||||||
|       matrix: |  | ||||||
|         include: |  | ||||||
|           # Base images are built using `tools/ci/Dockerfile.prod.template`. |  | ||||||
|           # The comments at the top explain how to build and upload these images. |  | ||||||
|           # Ubuntu 20.04 ships with Python 3.8.10. |  | ||||||
|           - docker_image: zulip/ci:focal |  | ||||||
|             name: Ubuntu 20.04 (Python 3.8, backend + frontend) |  | ||||||
|             os: focal |  | ||||||
|             include_documentation_tests: false |  | ||||||
|             include_frontend_tests: true |  | ||||||
|           # Debian 11 ships with Python 3.9.2. |  | ||||||
|           - docker_image: zulip/ci:bullseye |  | ||||||
|             name: Debian 11 (Python 3.9, backend + documentation) |  | ||||||
|             os: bullseye |  | ||||||
|             include_documentation_tests: true |  | ||||||
|             include_frontend_tests: false |  | ||||||
|           # Ubuntu 22.04 ships with Python 3.10.4. |  | ||||||
|           - docker_image: zulip/ci:jammy |  | ||||||
|             name: Ubuntu 22.04 (Python 3.10, backend) |  | ||||||
|             os: jammy |  | ||||||
|             include_documentation_tests: false |  | ||||||
|             include_frontend_tests: false |  | ||||||
|           # Debian 12 ships with Python 3.11.2. |  | ||||||
|           - docker_image: zulip/ci:bookworm |  | ||||||
|             name: Debian 12 (Python 3.11, backend) |  | ||||||
|             os: bookworm |  | ||||||
|             include_documentation_tests: false |  | ||||||
|             include_frontend_tests: false |  | ||||||
|  |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|     name: ${{ matrix.name }} |  | ||||||
|     container: ${{ matrix.docker_image }} |  | ||||||
|     env: |  | ||||||
|       # GitHub Actions sets HOME to /github/home which causes |  | ||||||
|       # problem later in provision and frontend test that runs |  | ||||||
|       # tools/setup/postgresql-init-dev-db because of the .pgpass |  | ||||||
|       # location. PostgreSQL (psql) expects .pgpass to be at |  | ||||||
|       # /home/github/.pgpass and setting home to `/home/github/` |  | ||||||
|       # ensures it written there because we write it to ~/.pgpass. |  | ||||||
|       HOME: /home/github/ |  | ||||||
|  |  | ||||||
|     steps: |  | ||||||
|       - uses: actions/checkout@v3 |  | ||||||
|  |  | ||||||
|       - name: Create cache directories |  | ||||||
|         run: | |  | ||||||
|           dirs=(/srv/zulip-{venv,emoji}-cache) |  | ||||||
|           sudo mkdir -p "${dirs[@]}" |  | ||||||
|           sudo chown -R github "${dirs[@]}" |  | ||||||
|  |  | ||||||
|       - name: Restore pnpm store |  | ||||||
|         uses: actions/cache@v3 |  | ||||||
|         with: |  | ||||||
|           path: ~/.local/share/pnpm/store |  | ||||||
|           key: v1-pnpm-store-${{ matrix.os }}-${{ hashFiles('pnpm-lock.yaml') }} |  | ||||||
|  |  | ||||||
|       - name: Restore python cache |  | ||||||
|         uses: actions/cache@v3 |  | ||||||
|         with: |  | ||||||
|           path: /srv/zulip-venv-cache |  | ||||||
|           key: v1-venv-${{ matrix.os }}-${{ hashFiles('requirements/dev.txt') }} |  | ||||||
|           restore-keys: v1-venv-${{ matrix.os }} |  | ||||||
|  |  | ||||||
|       - name: Restore emoji cache |  | ||||||
|         uses: actions/cache@v3 |  | ||||||
|         with: |  | ||||||
|           path: /srv/zulip-emoji-cache |  | ||||||
|           key: v1-emoji-${{ matrix.os }}-${{ hashFiles('tools/setup/emoji/emoji_map.json', 'tools/setup/emoji/build_emoji', 'tools/setup/emoji/emoji_setup_utils.py', 'tools/setup/emoji/emoji_names.py', 'package.json') }} |  | ||||||
|           restore-keys: v1-emoji-${{ matrix.os }} |  | ||||||
|  |  | ||||||
|       - name: Install dependencies |  | ||||||
|         run: | |  | ||||||
|           # This is the main setup job for the test suite |  | ||||||
|           ./tools/ci/setup-backend --skip-dev-db-build |  | ||||||
|  |  | ||||||
|           # Cleaning caches is mostly unnecessary in GitHub Actions, because |  | ||||||
|           # most builds don't get to write to the cache. |  | ||||||
|           # scripts/lib/clean_unused_caches.py --verbose --threshold 0 |  | ||||||
|  |  | ||||||
|       - name: Run tools test |  | ||||||
|         run: | |  | ||||||
|           source tools/ci/activate-venv |  | ||||||
|           ./tools/test-tools |  | ||||||
|  |  | ||||||
|       - name: Run Codespell lint |  | ||||||
|         run: | |  | ||||||
|           source tools/ci/activate-venv |  | ||||||
|           ./tools/run-codespell |  | ||||||
|  |  | ||||||
|       - name: Run backend lint |  | ||||||
|         run: | |  | ||||||
|           source tools/ci/activate-venv |  | ||||||
|           echo "Test suite is running under $(python --version)." |  | ||||||
|           ./tools/lint --groups=backend --skip=gitlint,mypy # gitlint disabled because flaky |  | ||||||
|  |  | ||||||
|       - name: Run frontend lint |  | ||||||
|         if: ${{ matrix.include_frontend_tests }} |  | ||||||
|         run: | |  | ||||||
|           source tools/ci/activate-venv |  | ||||||
|           ./tools/lint --groups=frontend --skip=gitlint # gitlint disabled because flaky |  | ||||||
|  |  | ||||||
|       - name: Run backend tests |  | ||||||
|         run: | |  | ||||||
|           source tools/ci/activate-venv |  | ||||||
|           ./tools/test-backend --coverage --xml-report --no-html-report --include-webhooks --no-cov-cleanup --ban-console-output |  | ||||||
|  |  | ||||||
|       - name: Run mypy |  | ||||||
|         run: | |  | ||||||
|           source tools/ci/activate-venv |  | ||||||
|           # We run mypy after the backend tests so we get output from the |  | ||||||
|           # backend tests, which tend to uncover more serious problems, first. |  | ||||||
|           ./tools/run-mypy --version |  | ||||||
|           ./tools/run-mypy |  | ||||||
|  |  | ||||||
|       - name: Run miscellaneous tests |  | ||||||
|         run: | |  | ||||||
|           source tools/ci/activate-venv |  | ||||||
|  |  | ||||||
|           # Currently our compiled requirements files will differ for different |  | ||||||
|           # Python versions, so we will run test-locked-requirements only on the |  | ||||||
|           # platform with the oldest one. |  | ||||||
|           # ./tools/test-locked-requirements |  | ||||||
|           # ./tools/test-run-dev  # https://github.com/zulip/zulip/pull/14233 |  | ||||||
|           # |  | ||||||
|           # This test has been persistently flaky at like 1% frequency, is slow, |  | ||||||
|           # and is for a very specific single feature, so we don't run it by default: |  | ||||||
|           # ./tools/test-queue-worker-reload |  | ||||||
|  |  | ||||||
|           ./tools/test-migrations |  | ||||||
|           ./tools/setup/optimize-svg --check |  | ||||||
|           ./tools/setup/generate_integration_bots_avatars.py --check-missing |  | ||||||
|           ./tools/ci/check-executables |  | ||||||
|  |  | ||||||
|           # Ban check-database-compatibility from transitively |  | ||||||
|           # relying on static/generated, because it might not be |  | ||||||
|           # up-to-date at that point in upgrade-zulip-stage-2. |  | ||||||
|           chmod 000 static/generated web/generated |  | ||||||
|           ./scripts/lib/check-database-compatibility |  | ||||||
|           chmod 755 static/generated web/generated |  | ||||||
|  |  | ||||||
|       - name: Run documentation and api tests |  | ||||||
|         if: ${{ matrix.include_documentation_tests }} |  | ||||||
|         run: | |  | ||||||
|           source tools/ci/activate-venv |  | ||||||
|           # In CI, we only test links we control in test-documentation to avoid flakes |  | ||||||
|           ./tools/test-documentation --skip-external-links |  | ||||||
|           ./tools/test-help-documentation --skip-external-links |  | ||||||
|           ./tools/test-api |  | ||||||
|  |  | ||||||
|       - name: Run node tests |  | ||||||
|         if: ${{ matrix.include_frontend_tests }} |  | ||||||
|         run: | |  | ||||||
|           source tools/ci/activate-venv |  | ||||||
|           # Run the node tests first, since they're fast and deterministic |  | ||||||
|           ./tools/test-js-with-node --coverage --parallel=1 |  | ||||||
|  |  | ||||||
|       - name: Check schemas |  | ||||||
|         if: ${{ matrix.include_frontend_tests }} |  | ||||||
|         run: | |  | ||||||
|           source tools/ci/activate-venv |  | ||||||
|           # Check that various schemas are consistent. (is fast) |  | ||||||
|           ./tools/check-schemas |  | ||||||
|  |  | ||||||
|       - name: Check capitalization of strings |  | ||||||
|         if: ${{ matrix.include_frontend_tests }} |  | ||||||
|         run: | |  | ||||||
|           source tools/ci/activate-venv |  | ||||||
|           ./manage.py makemessages --locale en |  | ||||||
|           PYTHONWARNINGS=ignore ./tools/check-capitalization --no-generate |  | ||||||
|           PYTHONWARNINGS=ignore ./tools/check-frontend-i18n --no-generate |  | ||||||
|  |  | ||||||
|       - name: Run puppeteer tests |  | ||||||
|         if: ${{ matrix.include_frontend_tests }} |  | ||||||
|         run: | |  | ||||||
|           source tools/ci/activate-venv |  | ||||||
|           ./tools/test-js-with-puppeteer |  | ||||||
|  |  | ||||||
|       - name: Check pnpm dedupe |  | ||||||
|         if: ${{ matrix.include_frontend_tests }} |  | ||||||
|         run: pnpm dedupe --check |  | ||||||
|  |  | ||||||
|       - name: Check for untracked files |  | ||||||
|         run: | |  | ||||||
|           source tools/ci/activate-venv |  | ||||||
|           # This final check looks for untracked files that may have been |  | ||||||
|           # created by test-backend or provision. |  | ||||||
|           untracked="$(git ls-files --exclude-standard --others)" |  | ||||||
|           if [ -n "$untracked" ]; then |  | ||||||
|               printf >&2 "Error: untracked files:\n%s\n" "$untracked" |  | ||||||
|               exit 1 |  | ||||||
|           fi |  | ||||||
|  |  | ||||||
|       - name: Test locked requirements |  | ||||||
|         if: ${{ matrix.os == 'focal' }} |  | ||||||
|         run: | |  | ||||||
|           . /srv/zulip-py3-venv/bin/activate && \ |  | ||||||
|           ./tools/test-locked-requirements |  | ||||||
|  |  | ||||||
|       - name: Upload coverage reports |  | ||||||
|  |  | ||||||
|         # Only upload coverage when both frontend and backend |  | ||||||
|         # tests are run. |  | ||||||
|         if: ${{ matrix.include_frontend_tests }} |  | ||||||
|         uses: codecov/codecov-action@v3 |  | ||||||
|         with: |  | ||||||
|           files: var/coverage.xml,var/node-coverage/lcov.info |  | ||||||
|  |  | ||||||
|       - name: Store Puppeteer artifacts |  | ||||||
|         # Upload these on failure, as well |  | ||||||
|         if: ${{ always() && matrix.include_frontend_tests }} |  | ||||||
|         uses: actions/upload-artifact@v3 |  | ||||||
|         with: |  | ||||||
|           name: puppeteer |  | ||||||
|           path: ./var/puppeteer |  | ||||||
|           retention-days: 60 |  | ||||||
|  |  | ||||||
|       - name: Check development database build |  | ||||||
|         run: ./tools/ci/setup-backend |  | ||||||
|  |  | ||||||
|       - name: Generate failure report string |  | ||||||
|         id: failure_report_string |  | ||||||
|         if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }} |  | ||||||
|         run: tools/ci/generate-failure-message >> $GITHUB_OUTPUT |  | ||||||
|  |  | ||||||
|       - name: Report status to CZO |  | ||||||
|         if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }} |  | ||||||
|         uses: zulip/github-actions-zulip/send-message@v1 |  | ||||||
|         with: |  | ||||||
|           api-key: ${{ secrets.ZULIP_BOT_KEY }} |  | ||||||
|           email: "github-actions-bot@chat.zulip.org" |  | ||||||
|           organization-url: "https://chat.zulip.org" |  | ||||||
|           to: "automated testing" |  | ||||||
|           topic: ${{ steps.failure_report_string.outputs.topic }} |  | ||||||
|           type: "stream" |  | ||||||
|           content: ${{ steps.failure_report_string.outputs.content }} |  | ||||||
							
								
								
									
										22
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										22
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							| @@ -27,13 +27,12 @@ | |||||||
| package-lock.json | package-lock.json | ||||||
|  |  | ||||||
| /.vagrant | /.vagrant | ||||||
| /var/* | /var | ||||||
| !/var/puppeteer |  | ||||||
| /var/puppeteer/* |  | ||||||
| !/var/puppeteer/test_credentials.d.ts |  | ||||||
|  |  | ||||||
| /.dmypy.json | /.dmypy.json | ||||||
| /.ruff_cache |  | ||||||
|  | # Dockerfiles generated for CircleCI | ||||||
|  | /tools/circleci/images | ||||||
|  |  | ||||||
| # Generated i18n data | # Generated i18n data | ||||||
| /locale/en | /locale/en | ||||||
| @@ -44,11 +43,11 @@ package-lock.json | |||||||
| # Static build | # Static build | ||||||
| *.mo | *.mo | ||||||
| npm-debug.log | npm-debug.log | ||||||
| /.pnpm-store |  | ||||||
| /node_modules | /node_modules | ||||||
| /prod-static | /prod-static | ||||||
| /staticfiles.json | /staticfiles.json | ||||||
| /webpack-stats-production.json | /webpack-stats-production.json | ||||||
|  | /yarn-error.log | ||||||
| zulip-git-version | zulip-git-version | ||||||
|  |  | ||||||
| # Test / analysis tools | # Test / analysis tools | ||||||
| @@ -71,18 +70,13 @@ zulip.kdev4 | |||||||
| *.kate-swp | *.kate-swp | ||||||
| *.sublime-project | *.sublime-project | ||||||
| *.sublime-workspace | *.sublime-workspace | ||||||
|  | .vscode/ | ||||||
| *.DS_Store | *.DS_Store | ||||||
| # VS Code. Avoid checking in .vscode in general, while still specifying | # .cache/ is generated by VSCode's test runner | ||||||
| # recommended extensions for working with this repository. |  | ||||||
| /.vscode/**/* |  | ||||||
| !/.vscode/extensions.json |  | ||||||
| # .cache/ is generated by VS Code test runner |  | ||||||
| .cache/ | .cache/ | ||||||
| .eslintcache | .eslintcache | ||||||
|  |  | ||||||
| # Core dump files |  | ||||||
| core |  | ||||||
|  |  | ||||||
| ## Miscellaneous | ## Miscellaneous | ||||||
| # (Ideally this section is empty.) | # (Ideally this section is empty.) | ||||||
|  | zthumbor/thumbor_local_settings.py | ||||||
| .transifexrc | .transifexrc | ||||||
|   | |||||||
							
								
								
									
										8
									
								
								.gitlint
									
									
									
									
									
								
							
							
						
						
									
										8
									
								
								.gitlint
									
									
									
									
									
								
							| @@ -1,13 +1,13 @@ | |||||||
| [general] | [general] | ||||||
| ignore=title-trailing-punctuation, body-min-length, body-is-missing | ignore=title-trailing-punctuation, body-min-length, body-is-missing, title-imperative-mood | ||||||
|  |  | ||||||
| extra-path=tools/lib/gitlint_rules.py | extra-path=tools/lib/gitlint-rules.py | ||||||
|  |  | ||||||
| [title-match-regex] | [title-match-regex-allow-exception] | ||||||
| regex=^(.+:\ )?[A-Z].+\.$ | regex=^(.+:\ )?[A-Z].+\.$ | ||||||
|  |  | ||||||
| [title-max-length] | [title-max-length] | ||||||
| line-length=72 | line-length=76 | ||||||
|  |  | ||||||
| [body-max-line-length] | [body-max-line-length] | ||||||
| line-length=76 | line-length=76 | ||||||
|   | |||||||
							
								
								
									
										10
									
								
								.isort.cfg
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										10
									
								
								.isort.cfg
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,10 @@ | |||||||
|  | [settings] | ||||||
|  | line_length = 79 | ||||||
|  | multi_line_output = 2 | ||||||
|  | balanced_wrapping = true | ||||||
|  | known_third_party = django, ujson, sqlalchemy | ||||||
|  | known_first_party = zerver, zproject, version, confirmation, zilencer, analytics, frontend_tests, scripts, corporate | ||||||
|  | sections = FUTURE, STDLIB, THIRDPARTY, FIRSTPARTY, LOCALFOLDER | ||||||
|  | lines_after_imports = 1 | ||||||
|  | # See the comment related to ioloop_logging for why this is skipped. | ||||||
|  | skip = zerver/management/commands/runtornado.py | ||||||
							
								
								
									
										131
									
								
								.mailmap
									
									
									
									
									
								
							
							
						
						
									
										131
									
								
								.mailmap
									
									
									
									
									
								
							| @@ -1,131 +0,0 @@ | |||||||
| # This file teaches `git log` and friends the canonical names |  | ||||||
| # and email addresses to use for our contributors. |  | ||||||
| # |  | ||||||
| # For details on the format, see: |  | ||||||
| #   https://git.github.io/htmldocs/gitmailmap.html |  | ||||||
| # |  | ||||||
| # Handy commands for examining or adding to this file: |  | ||||||
| # |  | ||||||
| #     # shows all names/emails after mapping, sorted: |  | ||||||
| #   $ git shortlog -es | sort -k2 |  | ||||||
| # |  | ||||||
| #     # shows raw names/emails, filtered by mapped name: |  | ||||||
| #   $ git log --format='%an %ae' --author=$NAME | uniq -c |  | ||||||
|  |  | ||||||
| acrefoot <acrefoot@zulip.com> <acrefoot@humbughq.com> |  | ||||||
| acrefoot <acrefoot@zulip.com> <acrefoot@dropbox.com> |  | ||||||
| acrefoot <acrefoot@zulip.com> <acrefoot@alum.mit.edu> |  | ||||||
| Adam Benesh <Adam.Benesh@gmail.com> <Adam-Daniel.Benesh@t-systems.com> |  | ||||||
| Adam Benesh <Adam.Benesh@gmail.com> |  | ||||||
| Adarsh Tiwari <xoldyckk@gmail.com> |  | ||||||
| Alex Vandiver <alexmv@zulip.com> <alex@chmrr.net> |  | ||||||
| Alex Vandiver <alexmv@zulip.com> <github@chmrr.net> |  | ||||||
| Allen Rabinovich <allenrabinovich@yahoo.com> <allenr@humbughq.com> |  | ||||||
| Allen Rabinovich <allenrabinovich@yahoo.com> <allenr@zulip.com> |  | ||||||
| Alya Abbott <alya@zulip.com> <2090066+alya@users.noreply.github.com> |  | ||||||
| Aman Agrawal <amanagr@zulip.com> <f2016561@pilani.bits-pilani.ac.in> |  | ||||||
| Aman Agrawal <amanagr@zulip.com> |  | ||||||
| Anders Kaseorg <anders@zulip.com> <anders@zulipchat.com> |  | ||||||
| Anders Kaseorg <anders@zulip.com> <andersk@mit.edu> |  | ||||||
| Aryan Shridhar <aryanshridhar7@gmail.com> <53977614+aryanshridhar@users.noreply.github.com> |  | ||||||
| Aryan Shridhar <aryanshridhar7@gmail.com> |  | ||||||
| aparna-bhatt <aparnabhatt2001@gmail.com> <86338542+aparna-bhatt@users.noreply.github.com> |  | ||||||
| Ashwat Kumar Singh <ashwat.kumarsingh.met20@itbhu.ac.in> |  | ||||||
| Austin Riba <austin@zulip.com> <austin@m51.io> |  | ||||||
| BIKI DAS <bikid475@gmail.com> |  | ||||||
| Brijmohan Siyag <brijsiyag@gmail.com> |  | ||||||
| Brock Whittaker <brock@zulipchat.com> <bjwhitta@asu.edu> |  | ||||||
| Brock Whittaker <brock@zulipchat.com> <brockwhittaker@Brocks-MacBook.local> |  | ||||||
| Brock Whittaker <brock@zulipchat.com> <brock@zulipchat.org> |  | ||||||
| Chris Bobbe <cbobbe@zulip.com> <cbobbe@zulipchat.com> |  | ||||||
| Chris Bobbe <cbobbe@zulip.com> <csbobbe@gmail.com> |  | ||||||
| Danny Su <contact@dannysu.com> <opensource@emailengine.org> |  | ||||||
| Dinesh <chdinesh1089@gmail.com> |  | ||||||
| Dinesh <chdinesh1089@gmail.com> <chdinesh1089> |  | ||||||
| Eeshan Garg <eeshan@zulip.com> <jerryguitarist@gmail.com> |  | ||||||
| Eric Smith <erwsmith@gmail.com> <99841919+erwsmith@users.noreply.github.com> |  | ||||||
| Evy Kassirer <evy.kassirer@gmail.com> |  | ||||||
| Evy Kassirer <evy.kassirer@gmail.com> <evykassirer@users.noreply.github.com> |  | ||||||
| Ganesh Pawar <pawarg256@gmail.com> <58626718+ganpa3@users.noreply.github.com> |  | ||||||
| Greg Price <greg@zulip.com> <gnprice@gmail.com> |  | ||||||
| Greg Price <greg@zulip.com> <greg@zulipchat.com> |  | ||||||
| Greg Price <greg@zulip.com> <price@mit.edu> |  | ||||||
| Hardik Dharmani <Ddharmani99@gmail.com> <ddharmani99@gmail.com> |  | ||||||
| Hemant Umre <hemantumre12@gmail.com> <87542880+HemantUmre12@users.noreply.github.com> |  | ||||||
| Jai soni <jai_s@me.iitr.ac.in> |  | ||||||
| Jai soni <jai_s@me.iitr.ac.in> <76561593+jai2201@users.noreply.github.com> |  | ||||||
| Jeff Arnold <jbarnold@gmail.com> <jbarnold@humbughq.com> |  | ||||||
| Jeff Arnold <jbarnold@gmail.com> <jbarnold@zulip.com> |  | ||||||
| Jessica McKellar <jesstess@mit.edu> <jesstess@humbughq.com> |  | ||||||
| Jessica McKellar <jesstess@mit.edu> <jesstess@zulip.com> |  | ||||||
| Julia Bichler <julia.bichler@tum.de> <74348920+juliaBichler01@users.noreply.github.com> |  | ||||||
| Karl Stolley <karl@zulip.com> <karl@stolley.dev> |  | ||||||
| Kevin Mehall <km@kevinmehall.net> <kevin@humbughq.com> |  | ||||||
| Kevin Mehall <km@kevinmehall.net> <kevin@zulip.com> |  | ||||||
| Kevin Scott <kevin.scott.98@gmail.com> |  | ||||||
| Lalit Kumar Singh <lalitkumarsingh3716@gmail.com> |  | ||||||
| Lauryn Menard <lauryn@zulip.com> <lauryn.menard@gmail.com> |  | ||||||
| Lauryn Menard <lauryn@zulip.com> <63245456+laurynmm@users.noreply.github.com> |  | ||||||
| Mateusz Mandera <mateusz.mandera@zulip.com> <mateusz.mandera@protonmail.com> |  | ||||||
| Matt Keller <matt@zulip.com> |  | ||||||
| Matt Keller <matt@zulip.com> <m@cognusion.com> |  | ||||||
| m-e-l-u-h-a-n <purushottam.tiwari.cd.cse19@itbhu.ac.in> |  | ||||||
| m-e-l-u-h-a-n <purushottam.tiwari.cd.cse19@itbhu.ac.in> <pururshottam.tiwari.cd.cse19@itbhu.ac.in> |  | ||||||
| Noble Mittal <noblemittal@outlook.com> <62551163+beingnoble03@users.noreply.github.com> |  | ||||||
| nzai <nzaih18@gmail.com> <70953556+nzaih1999@users.noreply.github.com> |  | ||||||
| Palash Baderia <palash.baderia@outlook.com> |  | ||||||
| Palash Baderia <palash.baderia@outlook.com> <66828942+palashb01@users.noreply.github.com> |  | ||||||
| Palash Raghuwanshi <singhpalash0@gmail.com> |  | ||||||
| Parth <mittalparth22@gmail.com> |  | ||||||
| Priyam Seth <sethpriyam1@gmail.com> <b19188@students.iitmandi.ac.in> |  | ||||||
| Ray Kraesig <rkraesig@zulip.com> <rkraesig@zulipchat.com> |  | ||||||
| Reid Barton <rwbarton@gmail.com> <rwbarton@humbughq.com> |  | ||||||
| Rein Zustand (rht) <rhtbot@protonmail.com> |  | ||||||
| Rishi Gupta <rishig@zulipchat.com> <rishig+git@mit.edu> |  | ||||||
| Rishi Gupta <rishig@zulipchat.com> <rishig@kandralabs.com> |  | ||||||
| Rishi Gupta <rishig@zulipchat.com> <rishig@users.noreply.github.com> |  | ||||||
| Rishabh Maheshwari <b20063@students.iitmandi.ac.in> |  | ||||||
| Rixant Rokaha <rixantrokaha@gmail.com> |  | ||||||
| Rixant Rokaha <rixantrokaha@gmail.com> <rishantrokaha@gmail.com> |  | ||||||
| Rixant Rokaha <rixantrokaha@gmail.com> <rrokaha@caldwell.edu> |  | ||||||
| Sahil Batra <sahil@zulip.com> <35494118+sahil839@users.noreply.github.com> |  | ||||||
| Sahil Batra <sahil@zulip.com> <sahilbatra839@gmail.com> |  | ||||||
| Satyam Bansal <sbansal1999@gmail.com> |  | ||||||
| Sayam Samal <samal.sayam@gmail.com> |  | ||||||
| Scott Feeney <scott@oceanbase.org> <scott@humbughq.com> |  | ||||||
| Scott Feeney <scott@oceanbase.org> <scott@zulip.com> |  | ||||||
| Shlok Patel <shlokcpatel2001@gmail.com> |  | ||||||
| Somesh Ranjan <somesh.ranjan.met20@itbhu.ac.in> <77766761+somesh202@users.noreply.github.com> |  | ||||||
| Steve Howell <showell@zulip.com> <showell30@yahoo.com> |  | ||||||
| Steve Howell <showell@zulip.com> <showell@yahoo.com> |  | ||||||
| Steve Howell <showell@zulip.com> <showell@zulipchat.com> |  | ||||||
| Steve Howell <showell@zulip.com> <steve@humbughq.com> |  | ||||||
| Steve Howell <showell@zulip.com> <steve@zulip.com> |  | ||||||
| strifel <info@strifel.de> |  | ||||||
| Tim Abbott <tabbott@zulip.com> |  | ||||||
| Tim Abbott <tabbott@zulip.com> <tabbott@dropbox.com> |  | ||||||
| Tim Abbott <tabbott@zulip.com> <tabbott@humbughq.com> |  | ||||||
| Tim Abbott <tabbott@zulip.com> <tabbott@mit.edu> |  | ||||||
| Tim Abbott <tabbott@zulip.com> <tabbott@zulipchat.com> |  | ||||||
| Ujjawal Modi <umodi2003@gmail.com> <99073049+Ujjawal3@users.noreply.github.com> |  | ||||||
| Vishnu KS <vishnu@zulip.com> <hackerkid@vishnuks.com> |  | ||||||
| Vishnu KS <vishnu@zulip.com> <yo@vishnuks.com> |  | ||||||
| Alya Abbott <alya@zulip.com> <alyaabbott@elance-odesk.com> |  | ||||||
| umkay <ukhan@zulipchat.com> <umaimah.k@gmail.com> |  | ||||||
| umkay <ukhan@zulipchat.com> <umkay@users.noreply.github.com> |  | ||||||
| Waseem Daher <wdaher@zulip.com> <wdaher@humbughq.com> |  | ||||||
| Waseem Daher <wdaher@zulip.com> <wdaher@dropbox.com> |  | ||||||
| Yash RE <33805964+YashRE42@users.noreply.github.com> <YashRE42@github.com> |  | ||||||
| Yash RE <33805964+YashRE42@users.noreply.github.com> |  | ||||||
| Yogesh Sirsat <yogeshsirsat56@gmail.com> |  | ||||||
| Yogesh Sirsat <yogeshsirsat56@gmail.com> <41695888+yogesh-sirsat@users.noreply.github.com> |  | ||||||
| Zeeshan Equbal <equbalzeeshan@gmail.com> <54993043+zee-bit@users.noreply.github.com> |  | ||||||
| Zeeshan Equbal <equbalzeeshan@gmail.com> |  | ||||||
| Zev Benjamin <zev@zulip.com> <zev@dropbox.com> |  | ||||||
| Zev Benjamin <zev@zulip.com> <zev@humbughq.com> |  | ||||||
| Zev Benjamin <zev@zulip.com> <zev@mit.edu> |  | ||||||
| Zixuan James Li <p359101898@gmail.com> |  | ||||||
| Zixuan James Li <p359101898@gmail.com> <39874143+PIG208@users.noreply.github.com> |  | ||||||
| Zixuan James Li <p359101898@gmail.com> <359101898@qq.com> |  | ||||||
| Joseph Ho <josephho678@gmail.com> |  | ||||||
| Joseph Ho <josephho678@gmail.com> <62449508+Joelute@users.noreply.github.com> |  | ||||||
| @@ -1,11 +0,0 @@ | |||||||
| pnpm-lock.yaml |  | ||||||
| /api_docs/**/*.md |  | ||||||
| /corporate/tests/stripe_fixtures |  | ||||||
| /help/**/*.md |  | ||||||
| /locale |  | ||||||
| /templates/**/*.md |  | ||||||
| /tools/setup/emoji/emoji_map.json |  | ||||||
| /web/third |  | ||||||
| /zerver/tests/fixtures |  | ||||||
| /zerver/webhooks/*/doc.md |  | ||||||
| /zerver/webhooks/*/fixtures |  | ||||||
| @@ -1,15 +0,0 @@ | |||||||
| { |  | ||||||
|   "source_directories": ["."], |  | ||||||
|   "taint_models_path": [ |  | ||||||
|       "stubs/taint", |  | ||||||
|       "zulip-py3-venv/lib/pyre_check/taint/" |  | ||||||
|   ], |  | ||||||
|   "search_path": [ |  | ||||||
|       "stubs/", |  | ||||||
|       "zulip-py3-venv/lib/pyre_check/stubs/" |  | ||||||
|   ], |  | ||||||
|   "typeshed": "zulip-py3-venv/lib/pyre_check/typeshed/", |  | ||||||
|   "exclude": [ |  | ||||||
|       "/srv/zulip/zulip-py3-venv/.*" |  | ||||||
|   ] |  | ||||||
| } |  | ||||||
| @@ -1,15 +0,0 @@ | |||||||
| # https://docs.readthedocs.io/en/stable/config-file/v2.html |  | ||||||
| version: 2 |  | ||||||
|  |  | ||||||
| build: |  | ||||||
|   os: ubuntu-22.04 |  | ||||||
|   tools: |  | ||||||
|     python: "3.10" |  | ||||||
|  |  | ||||||
| sphinx: |  | ||||||
|   configuration: docs/conf.py |  | ||||||
|   fail_on_warning: true |  | ||||||
|  |  | ||||||
| python: |  | ||||||
|   install: |  | ||||||
|     - requirements: requirements/docs.txt |  | ||||||
| @@ -1 +0,0 @@ | |||||||
| sonar.inclusions=**/*.py,**/*.html |  | ||||||
							
								
								
									
										67
									
								
								.stylelintrc
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										67
									
								
								.stylelintrc
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,67 @@ | |||||||
|  | { | ||||||
|  |     "rules": { | ||||||
|  |         # Stylistic rules for CSS. | ||||||
|  |         "function-comma-space-after": "always", | ||||||
|  |         "function-comma-space-before": "never", | ||||||
|  |         "function-max-empty-lines": 0, | ||||||
|  |         "function-whitespace-after": "always", | ||||||
|  |  | ||||||
|  |         "value-keyword-case": "lower", | ||||||
|  |         "value-list-comma-newline-after": "always-multi-line", | ||||||
|  |         "value-list-comma-space-after": "always-single-line", | ||||||
|  |         "value-list-comma-space-before": "never", | ||||||
|  |         "value-list-max-empty-lines": 0, | ||||||
|  |  | ||||||
|  |         "unit-case": "lower", | ||||||
|  |         "property-case": "lower", | ||||||
|  |         "color-hex-case": "lower", | ||||||
|  |  | ||||||
|  |         "declaration-bang-space-before": "always", | ||||||
|  |         "declaration-colon-newline-after": "always-multi-line", | ||||||
|  |         "declaration-colon-space-after": "always-single-line", | ||||||
|  |         "declaration-colon-space-before": "never", | ||||||
|  |         "declaration-block-semicolon-newline-after": "always", | ||||||
|  |         "declaration-block-semicolon-space-before": "never", | ||||||
|  |         "declaration-block-trailing-semicolon": "always", | ||||||
|  |  | ||||||
|  |         "block-closing-brace-empty-line-before": "never", | ||||||
|  |         "block-closing-brace-newline-after": "always", | ||||||
|  |         "block-closing-brace-newline-before": "always", | ||||||
|  |         "block-opening-brace-newline-after": "always", | ||||||
|  |         "block-opening-brace-space-before": "always", | ||||||
|  |  | ||||||
|  |         "selector-attribute-brackets-space-inside": "never", | ||||||
|  |         "selector-attribute-operator-space-after": "never", | ||||||
|  |         "selector-attribute-operator-space-before": "never", | ||||||
|  |         "selector-combinator-space-after": "always", | ||||||
|  |         "selector-combinator-space-before": "always", | ||||||
|  |         "selector-descendant-combinator-no-non-space": true, | ||||||
|  |         "selector-pseudo-class-parentheses-space-inside": "never", | ||||||
|  |         "selector-pseudo-element-case": "lower", | ||||||
|  |         "selector-pseudo-element-colon-notation": "double", | ||||||
|  |         "selector-type-case": "lower", | ||||||
|  |         "selector-list-comma-newline-after": "always", | ||||||
|  |         "selector-list-comma-space-before": "never", | ||||||
|  |  | ||||||
|  |         "media-feature-colon-space-after": "always", | ||||||
|  |         "media-feature-colon-space-before": "never", | ||||||
|  |         "media-feature-name-case": "lower", | ||||||
|  |         "media-feature-parentheses-space-inside": "never", | ||||||
|  |         "media-feature-range-operator-space-after": "always", | ||||||
|  |         "media-feature-range-operator-space-before": "always", | ||||||
|  |         "media-query-list-comma-newline-after": "always", | ||||||
|  |         "media-query-list-comma-space-before": "never", | ||||||
|  |  | ||||||
|  |         "at-rule-name-case": "lower", | ||||||
|  |         "at-rule-name-space-after": "always", | ||||||
|  |         "at-rule-semicolon-newline-after": "always", | ||||||
|  |         "at-rule-semicolon-space-before": "never", | ||||||
|  |  | ||||||
|  |         "comment-whitespace-inside": "always", | ||||||
|  |         "indentation": 4, | ||||||
|  |          | ||||||
|  |         # Limit language features | ||||||
|  |         "color-no-hex": true, | ||||||
|  |         "color-named": "never", | ||||||
|  |     } | ||||||
|  | } | ||||||
							
								
								
									
										66
									
								
								.travis.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										66
									
								
								.travis.yml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,66 @@ | |||||||
|  | # See https://zulip.readthedocs.io/en/latest/testing/continuous-integration.html for | ||||||
|  | # high-level documentation on our Travis CI setup. | ||||||
|  | dist: xenial | ||||||
|  | install: | ||||||
|  |   # Disable sometimes-broken sources.list in Travis base images | ||||||
|  |   - sudo rm -vf /etc/apt/sources.list.d/* | ||||||
|  |   - sudo apt-get update | ||||||
|  |  | ||||||
|  |   # Disable Travis CI's built-in NVM installation | ||||||
|  |   - mispipe "mv ~/.nvm ~/.travis-nvm-disabled" ts | ||||||
|  |  | ||||||
|  |   # Install codecov, the library for the code coverage reporting tool we use | ||||||
|  |   # With a retry to minimize impact of transient networking errors. | ||||||
|  |   - mispipe "pip install codecov" ts || mispipe "pip install codecov" ts | ||||||
|  |  | ||||||
|  |   # This is the main setup job for the test suite | ||||||
|  |   - mispipe "tools/ci/setup-$TEST_SUITE" ts | ||||||
|  |  | ||||||
|  |   # Clean any caches that are not in use to avoid our cache | ||||||
|  |   # becoming huge. | ||||||
|  |   - mispipe "scripts/lib/clean-unused-caches --verbose --threshold 0" ts | ||||||
|  |  | ||||||
|  | script: | ||||||
|  |   # We unset GEM_PATH here as a hack to work around Travis CI having | ||||||
|  |   # broken running their system puppet with Ruby.  See | ||||||
|  |   # https://travis-ci.org/zulip/zulip/jobs/240120991 for an example traceback. | ||||||
|  |   - unset GEM_PATH | ||||||
|  |   - mispipe "./tools/ci/$TEST_SUITE" ts | ||||||
|  | cache: | ||||||
|  |   yarn: true | ||||||
|  |   apt: false | ||||||
|  |   directories: | ||||||
|  |     - $HOME/zulip-venv-cache | ||||||
|  |     - $HOME/zulip-npm-cache | ||||||
|  |     - $HOME/zulip-emoji-cache | ||||||
|  |     - $HOME/node | ||||||
|  |     - $HOME/misc | ||||||
|  | env: | ||||||
|  |   global: | ||||||
|  |     - BOTO_CONFIG=/nonexistent | ||||||
|  | language: python | ||||||
|  | # Our test suites generally run on Python 3.5, the version in | ||||||
|  | # Ubuntu 16.04 xenial, which is the oldest OS release we support. | ||||||
|  | matrix: | ||||||
|  |   include: | ||||||
|  |     # Travis will actually run the jobs in the order they're listed here; | ||||||
|  |     # that doesn't seem to be documented, but it's what we see empirically. | ||||||
|  |     # We only get 4 jobs running at a time, so we try to make the first few | ||||||
|  |     # the most likely to break. | ||||||
|  |     - python: "3.5" | ||||||
|  |       env: TEST_SUITE=production | ||||||
|  |     # Other suites moved to CircleCI -- see .circleci/. | ||||||
|  | sudo: required | ||||||
|  | addons: | ||||||
|  |   artifacts: | ||||||
|  |     paths: | ||||||
|  |       # Casper debugging data (screenshots, etc.) is super useful for | ||||||
|  |       # debugging test flakes. | ||||||
|  |       - $(ls var/casper/* | tr "\n" ":") | ||||||
|  |       - $(ls /tmp/zulip-test-event-log/* | tr "\n" ":") | ||||||
|  |   postgresql: "9.5" | ||||||
|  |   apt: | ||||||
|  |     packages: | ||||||
|  |       - moreutils | ||||||
|  | after_success: | ||||||
|  |   - codecov | ||||||
							
								
								
									
										27
									
								
								.tx/config
									
									
									
									
									
								
							
							
						
						
									
										27
									
								
								.tx/config
									
									
									
									
									
								
							| @@ -1,39 +1,32 @@ | |||||||
| # Migrated from transifex-client format with `tx migrate` |  | ||||||
| # |  | ||||||
| # See https://developers.transifex.com/docs/using-the-client which hints at |  | ||||||
| # this format, but in general, the headings are in the format of: |  | ||||||
| # |  | ||||||
| # [o:<org>:p:<project>:r:<resource>] |  | ||||||
|  |  | ||||||
| [main] | [main] | ||||||
| host = https://www.transifex.com | host = https://www.transifex.com | ||||||
| lang_map = zh-Hans: zh_Hans, zh-Hant: zh_Hant | lang_map = zh-Hans: zh_Hans, zh-Hant: zh_Hant | ||||||
|  |  | ||||||
| [o:zulip:p:zulip:r:djangopo] | [zulip.djangopo] | ||||||
| file_filter = locale/<lang>/LC_MESSAGES/django.po | file_filter = locale/<lang>/LC_MESSAGES/django.po | ||||||
| source_file = locale/en/LC_MESSAGES/django.po | source_file = locale/en/LC_MESSAGES/django.po | ||||||
| source_lang = en | source_lang = en | ||||||
| type = PO | type = PO | ||||||
|  |  | ||||||
| [o:zulip:p:zulip:r:mobile] | [zulip.translationsjson] | ||||||
|  | file_filter = locale/<lang>/translations.json | ||||||
|  | source_file = locale/en/translations.json | ||||||
|  | source_lang = en | ||||||
|  | type = KEYVALUEJSON | ||||||
|  |  | ||||||
|  | [zulip.mobile] | ||||||
| file_filter = locale/<lang>/mobile.json | file_filter = locale/<lang>/mobile.json | ||||||
| source_file = locale/en/mobile.json | source_file = locale/en/mobile.json | ||||||
| source_lang = en | source_lang = en | ||||||
| type = KEYVALUEJSON | type = KEYVALUEJSON | ||||||
|  |  | ||||||
| [o:zulip:p:zulip:r:translationsjson] | [zulip-test.djangopo] | ||||||
| file_filter = locale/<lang>/translations.json |  | ||||||
| source_file = locale/en/translations.json |  | ||||||
| source_lang = en |  | ||||||
| type = KEYVALUEJSON |  | ||||||
|  |  | ||||||
| [o:zulip:p:zulip-test:r:djangopo] |  | ||||||
| file_filter = locale/<lang>/LC_MESSAGES/django.po | file_filter = locale/<lang>/LC_MESSAGES/django.po | ||||||
| source_file = locale/en/LC_MESSAGES/django.po | source_file = locale/en/LC_MESSAGES/django.po | ||||||
| source_lang = en | source_lang = en | ||||||
| type = PO | type = PO | ||||||
|  |  | ||||||
| [o:zulip:p:zulip-test:r:translationsjson] | [zulip-test.translationsjson] | ||||||
| file_filter = locale/<lang>/translations.json | file_filter = locale/<lang>/translations.json | ||||||
| source_file = locale/en/translations.json | source_file = locale/en/translations.json | ||||||
| source_lang = en | source_lang = en | ||||||
|   | |||||||
							
								
								
									
										23
									
								
								.vscode/extensions.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										23
									
								
								.vscode/extensions.json
									
									
									
									
										vendored
									
									
								
							| @@ -1,23 +0,0 @@ | |||||||
| { |  | ||||||
|     // Recommended VS Code extensions for zulip/zulip. |  | ||||||
|     // |  | ||||||
|     // VS Code prompts a user to install the recommended extensions |  | ||||||
|     // when a workspace is opened for the first time.  The user can |  | ||||||
|     // also review the list with the 'Extensions: Show Recommended |  | ||||||
|     // Extensions' command.  See |  | ||||||
|     // https://code.visualstudio.com/docs/editor/extension-marketplace#_workspace-recommended-extensions |  | ||||||
|     // for more information. |  | ||||||
|     // |  | ||||||
|     // Extension identifier format: ${publisher}.${name}. |  | ||||||
|     // Example: vscode.csharp |  | ||||||
|  |  | ||||||
|     "recommendations": [ |  | ||||||
|         "42crunch.vscode-openapi", |  | ||||||
|         "dbaeumer.vscode-eslint", |  | ||||||
|         "esbenp.prettier-vscode", |  | ||||||
|         "ms-vscode-remote.vscode-remote-extensionpack" |  | ||||||
|     ], |  | ||||||
|  |  | ||||||
|     // Extensions recommended by VS Code which are not recommended for users of zulip/zulip. |  | ||||||
|     "unwantedRecommendations": [] |  | ||||||
| } |  | ||||||
| @@ -14,46 +14,46 @@ This isn't an exhaustive list of things that you can't do. Rather, take it | |||||||
| in the spirit in which it's intended --- a guide to make it easier to enrich | in the spirit in which it's intended --- a guide to make it easier to enrich | ||||||
| all of us and the technical communities in which we participate. | all of us and the technical communities in which we participate. | ||||||
|  |  | ||||||
| ## Expected behavior | ## Expected Behavior | ||||||
|  |  | ||||||
| The following behaviors are expected and requested of all community members: | The following behaviors are expected and requested of all community members: | ||||||
|  |  | ||||||
| - Participate. In doing so, you contribute to the health and longevity of | * Participate. In doing so, you contribute to the health and longevity of | ||||||
|   the community. |   the community. | ||||||
| - Exercise consideration and respect in your speech and actions. | * Exercise consideration and respect in your speech and actions. | ||||||
| - Attempt collaboration before conflict. Assume good faith. | * Attempt collaboration before conflict. Assume good faith. | ||||||
| - Refrain from demeaning, discriminatory, or harassing behavior and speech. | * Refrain from demeaning, discriminatory, or harassing behavior and speech. | ||||||
| - Take action or alert community leaders if you notice a dangerous | * Take action or alert community leaders if you notice a dangerous | ||||||
|   situation, someone in distress, or violations of this code, even if they |   situation, someone in distress, or violations of this code, even if they | ||||||
|   seem inconsequential. |   seem inconsequential. | ||||||
| - Community event venues may be shared with members of the public; be | * Community event venues may be shared with members of the public; be | ||||||
|   respectful to all patrons of these locations. |   respectful to all patrons of these locations. | ||||||
|  |  | ||||||
| ## Unacceptable behavior | ## Unacceptable Behavior | ||||||
|  |  | ||||||
| The following behaviors are considered harassment and are unacceptable | The following behaviors are considered harassment and are unacceptable | ||||||
| within the Zulip community: | within the Zulip community: | ||||||
|  |  | ||||||
| - Jokes or derogatory language that singles out members of any race, | * Jokes or derogatory language that singles out members of any race, | ||||||
|   ethnicity, culture, national origin, color, immigration status, social and |   ethnicity, culture, national origin, color, immigration status, social and | ||||||
|   economic class, educational level, language proficiency, sex, sexual |   economic class, educational level, language proficiency, sex, sexual | ||||||
|   orientation, gender identity and expression, age, size, family status, |   orientation, gender identity and expression, age, size, family status, | ||||||
|   political belief, religion, and mental and physical ability. |   political belief, religion, and mental and physical ability. | ||||||
| - Violence, threats of violence, or violent language directed against | * Violence, threats of violence, or violent language directed against | ||||||
|   another person. |   another person. | ||||||
| - Disseminating or threatening to disseminate another person's personal | * Disseminating or threatening to disseminate another person's personal | ||||||
|   information. |   information. | ||||||
| - Personal insults of any sort. | * Personal insults of any sort. | ||||||
| - Posting or displaying sexually explicit or violent material. | * Posting or displaying sexually explicit or violent material. | ||||||
| - Inappropriate photography or recording. | * Inappropriate photography or recording. | ||||||
| - Deliberate intimidation, stalking, or following (online or in person). | * Deliberate intimidation, stalking, or following (online or in person). | ||||||
| - Unwelcome sexual attention. This includes sexualized comments or jokes, | * Unwelcome sexual attention. This includes sexualized comments or jokes, | ||||||
|   inappropriate touching or groping, and unwelcomed sexual advances. |   inappropriate touching or groping, and unwelcomed sexual advances. | ||||||
| - Sustained disruption of community events, including talks and | * Sustained disruption of community events, including talks and | ||||||
|   presentations. |   presentations. | ||||||
| - Advocating for, or encouraging, any of the behaviors above. | * Advocating for, or encouraging, any of the behaviors above. | ||||||
|  |  | ||||||
| ## Reporting and enforcement | ## Reporting and Enforcement | ||||||
|  |  | ||||||
| Harassment and other code of conduct violations reduce the value of the | Harassment and other code of conduct violations reduce the value of the | ||||||
| community for everyone. If someone makes you or anyone else feel unsafe or | community for everyone. If someone makes you or anyone else feel unsafe or | ||||||
| @@ -95,78 +95,11 @@ behavior occurring outside the scope of community activities when such | |||||||
| behavior has the potential to adversely affect the safety and well-being of | behavior has the potential to adversely affect the safety and well-being of | ||||||
| community members. | community members. | ||||||
|  |  | ||||||
| ## License and attribution | ## License and Attribution | ||||||
|  |  | ||||||
| This Code of Conduct is adapted from the | This Code of Conduct is adapted from the | ||||||
|  | [Citizen Code of Conduct](http://citizencodeofconduct.org/) and the | ||||||
| [Django Code of Conduct](https://www.djangoproject.com/conduct/), and is | [Django Code of Conduct](https://www.djangoproject.com/conduct/), and is | ||||||
| under a | under a | ||||||
| [Creative Commons BY-SA](https://creativecommons.org/licenses/by-sa/4.0/) | [Creative Commons BY-SA](http://creativecommons.org/licenses/by-sa/4.0/) | ||||||
| license. | license. | ||||||
|  |  | ||||||
| ## Moderating the Zulip community |  | ||||||
|  |  | ||||||
| Anyone can help moderate the Zulip community by helping make sure that folks are |  | ||||||
| aware of the [community guidelines](https://zulip.com/development-community/) |  | ||||||
| and this Code of Conduct, and that we maintain a positive and respectful |  | ||||||
| atmosphere. |  | ||||||
|  |  | ||||||
| Here are some guidelines for you how can help: |  | ||||||
|  |  | ||||||
| - Be friendly! Welcoming folks, thanking them for their feedback, ideas and effort, |  | ||||||
|   and just trying to keep the atmosphere warm make the whole community function |  | ||||||
|   more smoothly. New participants who feel accepted, listened to and respected |  | ||||||
|   are likely to treat others the same way. |  | ||||||
|  |  | ||||||
| - Be familiar with the [community |  | ||||||
|   guidelines](https://zulip.com/development-community/), and cite them liberally |  | ||||||
|   when a user violates them. Be polite but firm. Some examples: |  | ||||||
|  |  | ||||||
|   - @user please note that there is no need to @-mention @\_**Tim Abbott** when |  | ||||||
|     you ask a question. As noted in the [guidelines for this |  | ||||||
|     community](https://zulip.com/development-community/): |  | ||||||
|  |  | ||||||
|     > Use @-mentions sparingly… there is generally no need to @-mention a |  | ||||||
|     > core contributor unless you need their timely attention. |  | ||||||
|  |  | ||||||
|   - @user, please keep in mind the following [community |  | ||||||
|     guideline](https://zulip.com/development-community/): |  | ||||||
|  |  | ||||||
|     > Don’t ask the same question in multiple places. Moderators read every |  | ||||||
|     > public stream, and make sure every question gets a reply. |  | ||||||
|  |  | ||||||
|     I’ve gone ahead and moved the other copy of this message to this thread. |  | ||||||
|  |  | ||||||
|   - If asked a question in a PM that is better discussed in a public stream: |  | ||||||
|     > Hi @user! Please start by reviewing |  | ||||||
|     > https://zulip.com/development-community/#community-norms to learn how to |  | ||||||
|     > get help in this community. |  | ||||||
|  |  | ||||||
| - Users sometimes think chat.zulip.org is a testing instance. When this happens, |  | ||||||
|   kindly direct them to use the **#test here** stream. |  | ||||||
|  |  | ||||||
| - If you see a message that’s posted in the wrong place, go ahead and move it if |  | ||||||
|   you have permissions to do so, even if you don’t plan to respond to it. |  | ||||||
|   Leaving the “Send automated notice to new topic” option enabled helps make it |  | ||||||
|   clear what happened to the person who sent the message. |  | ||||||
|  |  | ||||||
|   If you are responding to a message that's been moved, mention the user in your |  | ||||||
|   reply, so that the mention serves as a notification of the new location for |  | ||||||
|   their conversation. |  | ||||||
|  |  | ||||||
| - If a user is posting spam, please report it to an administrator. They will: |  | ||||||
|  |  | ||||||
|   - Change the user's name to `<name> (spammer)` and deactivate them. |  | ||||||
|   - Delete any spam messages they posted in public streams. |  | ||||||
|  |  | ||||||
| - We care very much about maintaining a respectful tone in our community. If you |  | ||||||
|   see someone being mean or rude, point out that their tone is inappropriate, |  | ||||||
|   and ask them to communicate their perspective in a respectful way in the |  | ||||||
|   future. If you don’t feel comfortable doing so yourself, feel free to ask a |  | ||||||
|   member of Zulip's core team to take care of the situation. |  | ||||||
|  |  | ||||||
| - Try to assume the best intentions from others (given the range of |  | ||||||
|   possibilities presented by their visible behavior), and stick with a friendly |  | ||||||
|   and positive tone even when someone‘s behavior is poor or disrespectful. |  | ||||||
|   Everyone has bad days and stressful situations that can result in them |  | ||||||
|   behaving not their best, and while we should be firm about our community |  | ||||||
|   rules, we should also enforce them with kindness. |  | ||||||
|   | |||||||
							
								
								
									
										557
									
								
								CONTRIBUTING.md
									
									
									
									
									
								
							
							
						
						
									
										557
									
								
								CONTRIBUTING.md
									
									
									
									
									
								
							| @@ -1,36 +1,23 @@ | |||||||
| # Contributing guide | # Contributing to Zulip | ||||||
|  |  | ||||||
| Welcome to the Zulip community! | Welcome to the Zulip community! | ||||||
|  |  | ||||||
| ## Zulip development community | ## Community | ||||||
|  |  | ||||||
| The primary communication forum for the Zulip community is the Zulip | The | ||||||
| server hosted at [chat.zulip.org](https://chat.zulip.org/): | [Zulip community server](https://zulip.readthedocs.io/en/latest/contributing/chat-zulip-org.html) | ||||||
|  | is the primary communication forum for the Zulip community. It is a good | ||||||
|  | place to start whether you have a question, are a new contributor, are a new | ||||||
|  | user, or anything else. Make sure to read the | ||||||
|  | [community norms](https://zulip.readthedocs.io/en/latest/contributing/chat-zulip-org.html#community-norms) | ||||||
|  | before posting. The Zulip community is also governed by a | ||||||
|  | [code of conduct](https://zulip.readthedocs.io/en/latest/code-of-conduct.html). | ||||||
|  |  | ||||||
| - **Users** and **administrators** of Zulip organizations stop by to | You can subscribe to zulip-devel-announce@googlegroups.com or our | ||||||
|   ask questions, offer feedback, and participate in product design | [Twitter](https://twitter.com/zulip) account for a lower traffic (~1 | ||||||
|   discussions. | email/month) way to hear about things like mentorship opportunities with Google | ||||||
| - **Contributors to the project**, including the **core Zulip | Code-in, in-person sprints at conferences, and other opportunities to | ||||||
|   development team**, discuss ongoing and future projects, brainstorm | contribute. | ||||||
|   ideas, and generally help each other out. |  | ||||||
|  |  | ||||||
| Everyone is welcome to [sign up](https://chat.zulip.org/) and |  | ||||||
| participate — we love hearing from our users! Public streams in the |  | ||||||
| community receive thousands of messages a week. We recommend signing |  | ||||||
| up using the special invite links for |  | ||||||
| [users](https://chat.zulip.org/join/t5crtoe62bpcxyisiyglmtvb/), |  | ||||||
| [self-hosters](https://chat.zulip.org/join/wnhv3jzm6afa4raenedanfno/) |  | ||||||
| and |  | ||||||
| [contributors](https://chat.zulip.org/join/npzwak7vpmaknrhxthna3c7p/) |  | ||||||
| to get a curated list of initial stream subscriptions. |  | ||||||
|  |  | ||||||
| To learn how to get started participating in the community, including [community |  | ||||||
| norms](https://zulip.com/development-community/#community-norms) and [where to |  | ||||||
| post](https://zulip.com/development-community/#where-do-i-send-my-message), |  | ||||||
| check out our [Zulip development community |  | ||||||
| guide](https://zulip.com/development-community/). The Zulip community is |  | ||||||
| governed by a [code of |  | ||||||
| conduct](https://zulip.readthedocs.io/en/latest/code-of-conduct.html). |  | ||||||
|  |  | ||||||
| ## Ways to contribute | ## Ways to contribute | ||||||
|  |  | ||||||
| @@ -38,283 +25,199 @@ To make a code or documentation contribution, read our | |||||||
| [step-by-step guide](#your-first-codebase-contribution) to getting | [step-by-step guide](#your-first-codebase-contribution) to getting | ||||||
| started with the Zulip codebase. A small sample of the type of work that | started with the Zulip codebase. A small sample of the type of work that | ||||||
| needs doing: | needs doing: | ||||||
|  | * Bug squashing and feature development on our Python/Django | ||||||
| - Bug squashing and feature development on our Python/Django |  | ||||||
|   [backend](https://github.com/zulip/zulip), web |   [backend](https://github.com/zulip/zulip), web | ||||||
|   [frontend](https://github.com/zulip/zulip), React Native |   [frontend](https://github.com/zulip/zulip), React Native | ||||||
|   [mobile app](https://github.com/zulip/zulip-mobile), or Electron |   [mobile app](https://github.com/zulip/zulip-mobile), or Electron | ||||||
|   [desktop app](https://github.com/zulip/zulip-desktop). |   [desktop app](https://github.com/zulip/zulip-desktop). | ||||||
| - Building out our | * Building out our | ||||||
|   [Python API and bots](https://github.com/zulip/python-zulip-api) framework. |   [Python API and bots](https://github.com/zulip/python-zulip-api) framework. | ||||||
| - [Writing an integration](https://zulip.com/api/integrations-overview). | * [Writing an integration](https://zulipchat.com/api/integrations-overview). | ||||||
| - Improving our [user](https://zulip.com/help/) or | * Improving our [user](https://zulipchat.com/help/) or | ||||||
|   [developer](https://zulip.readthedocs.io/en/latest/) documentation. |   [developer](https://zulip.readthedocs.io/en/latest/) documentation. | ||||||
| - [Reviewing code](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html) | * [Reviewing code](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html) | ||||||
|   and manually testing pull requests. |   and manually testing pull requests. | ||||||
|  |  | ||||||
| **Non-code contributions**: Some of the most valuable ways to contribute | **Non-code contributions**: Some of the most valuable ways to contribute | ||||||
| don't require touching the codebase at all. For example, you can: | don't require touching the codebase at all. We list a few of them below: | ||||||
|  |  | ||||||
| - Report issues, including both [feature | * [Reporting issues](#reporting-issues), including both feature requests and | ||||||
|   requests](https://zulip.readthedocs.io/en/latest/contributing/suggesting-features.html) |   bug reports. | ||||||
|   and [bug | * [Giving feedback](#user-feedback) if you are evaluating or using Zulip. | ||||||
|   reports](https://zulip.readthedocs.io/en/latest/contributing/reporting-bugs.html). | * [Translating](https://zulip.readthedocs.io/en/latest/translating/translating.html) | ||||||
| - [Give feedback](#user-feedback) if you are evaluating or using Zulip. |   Zulip. | ||||||
| - [Participate | * [Outreach](#zulip-outreach): Star us on GitHub, upvote us | ||||||
|   thoughtfully](https://zulip.readthedocs.io/en/latest/contributing/design-discussions.html) |   on product comparison sites, or write for [the Zulip blog](http://blog.zulip.org/). | ||||||
|   in design discussions. |  | ||||||
| - [Sponsor Zulip](https://github.com/sponsors/zulip) through the GitHub sponsors program. |  | ||||||
| - [Translate](https://zulip.readthedocs.io/en/latest/translating/translating.html) |  | ||||||
|   Zulip into your language. |  | ||||||
| - [Stay connected](#stay-connected) with Zulip, and [help others |  | ||||||
|   find us](#help-others-find-zulip). |  | ||||||
|  |  | ||||||
| ## Your first codebase contribution | ## Your first (codebase) contribution | ||||||
|  |  | ||||||
| This section has a step by step guide to starting as a Zulip codebase | This section has a step by step guide to starting as a Zulip codebase | ||||||
| contributor. It's long, but don't worry about doing all the steps perfectly; | contributor. It's long, but don't worry about doing all the steps perfectly; | ||||||
| no one gets it right the first time, and there are a lot of people available | no one gets it right the first time, and there are a lot of people available | ||||||
| to help. | to help. | ||||||
|  | * First, make an account on the | ||||||
| - First, make an account on the |   [Zulip community server](https://zulip.readthedocs.io/en/latest/contributing/chat-zulip-org.html), | ||||||
|   [Zulip community server](https://zulip.com/development-community/), |   paying special attention to the community norms. If you'd like, introduce | ||||||
|   paying special attention to the |   yourself in | ||||||
|   [community norms](https://zulip.com/development-community/#community-norms). |  | ||||||
|   If you'd like, introduce yourself in |  | ||||||
|   [#new members](https://chat.zulip.org/#narrow/stream/95-new-members), using |   [#new members](https://chat.zulip.org/#narrow/stream/95-new-members), using | ||||||
|   your name as the topic. Bonus: tell us about your first impressions of |   your name as the topic. Bonus: tell us about your first impressions of | ||||||
|   Zulip, and anything that felt confusing/broken or interesting/helpful as you |   Zulip, and anything that felt confusing/broken as you started using the | ||||||
|   started using the product. |   product. | ||||||
| - Read [What makes a great Zulip contributor](#what-makes-a-great-zulip-contributor). | * Read [What makes a great Zulip contributor](#what-makes-a-great-zulip-contributor). | ||||||
| - [Install the development environment](https://zulip.readthedocs.io/en/latest/development/overview.html), | * [Install the development environment](https://zulip.readthedocs.io/en/latest/development/overview.html), | ||||||
|   getting help in |   getting help in | ||||||
|   [#provision help](https://chat.zulip.org/#narrow/stream/21-provision-help) |   [#development help](https://chat.zulip.org/#narrow/stream/49-development-help) | ||||||
|   if you run into any troubles. |   if you run into any troubles. | ||||||
| - Familiarize yourself with [using the development environment](https://zulip.readthedocs.io/en/latest/development/using.html). | * Read the | ||||||
| - Go through the [new application feature |   [Zulip guide to Git](https://zulip.readthedocs.io/en/latest/git/index.html) | ||||||
|   tutorial](https://zulip.readthedocs.io/en/latest/tutorials/new-feature-tutorial.html) to get familiar with |   and do the Git tutorial (coming soon) if you are unfamiliar with | ||||||
|   how the Zulip codebase is organized and how to find code in it. |   Git, getting help in | ||||||
| - Read the [Zulip guide to |   [#git help](https://chat.zulip.org/#narrow/stream/44-git-help) if | ||||||
|   Git](https://zulip.readthedocs.io/en/latest/git/index.html) if you |   you run into any troubles.  Be sure to check out the | ||||||
|   are unfamiliar with Git or Zulip's rebase-based Git workflow, |   [extremely useful Zulip-specific tools page](https://zulip.readthedocs.io/en/latest/git/zulip-tools.html). | ||||||
|   getting help in [#git | * Sign the | ||||||
|   help](https://chat.zulip.org/#narrow/stream/44-git-help) if you run |   [Dropbox Contributor License Agreement](https://opensource.dropbox.com/cla/). | ||||||
|   into any troubles. Even Git experts should read the [Zulip-specific |  | ||||||
|   Git tools |  | ||||||
|   page](https://zulip.readthedocs.io/en/latest/git/zulip-tools.html). |  | ||||||
|  |  | ||||||
| ### Where to look for an issue | ### Picking an issue | ||||||
|  |  | ||||||
| Now you're ready to pick your first issue! Zulip has several repositories you | Now, you're ready to pick your first issue! There are hundreds of open issues | ||||||
| can check out, depending on your interests. There are hundreds of open issues in | in the main codebase alone. This section will help you find an issue to work | ||||||
| the [main Zulip server and web app | on. | ||||||
| repository](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22) |  | ||||||
| alone. |  | ||||||
|  |  | ||||||
| You can look through issues tagged with the "help wanted" label, which is used | * If you're interested in | ||||||
| to indicate the issues that are ready for contributions. Some repositories also |   [mobile](https://github.com/zulip/zulip-mobile/issues?q=is%3Aopen+is%3Aissue), | ||||||
| use the "good first issue" label to tag issues that are especially approachable |   [desktop](https://github.com/zulip/zulip-desktop/issues?q=is%3Aopen+is%3Aissue), | ||||||
| for new contributors. |   or | ||||||
|  |   [bots](https://github.com/zulip/python-zulip-api/issues?q=is%3Aopen+is%3Aissue) | ||||||
| - [Server and web app](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22) |   development, check the respective links for open issues, or post in | ||||||
| - [Mobile apps](https://github.com/zulip/zulip-mobile/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22) |   [#mobile](https://chat.zulip.org/#narrow/stream/48-mobile), | ||||||
| - [Desktop app](https://github.com/zulip/zulip-desktop/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22) |   [#desktop](https://chat.zulip.org/#narrow/stream/16-desktop), or | ||||||
| - [Terminal app](https://github.com/zulip/zulip-terminal/issues?q=is%3Aopen+is%3Aissue+label%3A"help+wanted") |   [#integration](https://chat.zulip.org/#narrow/stream/127-integrations). | ||||||
| - [Python API bindings and bots](https://github.com/zulip/python-zulip-api/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22) | * For the main server and web repository, we recommend browsing | ||||||
|  |   recently opened issues to look for issues you are confident you can | ||||||
| ### Picking an issue to work on |   fix correctly in a way that clearly communicates why your changes | ||||||
|  |   are the correct fix.  Our GitHub workflow bot, zulipbot, limits | ||||||
| There's a lot to learn while making your first pull request, so start small! |   users who have 0 commits merged to claiming a single issue labeled | ||||||
| Many first contributions have fewer than 10 lines of changes (not counting |   with "good first issue" or "help wanted". | ||||||
| changes to tests). | * We also partition all of our issues in the main repo into areas like | ||||||
|  |  | ||||||
| We recommend the following process for finding an issue to work on: |  | ||||||
|  |  | ||||||
| 1. Read the description of an issue tagged with the "help wanted" label and make |  | ||||||
|    sure you understand it. |  | ||||||
| 2. If it seems promising, poke around the product |  | ||||||
|    (on [chat.zulip.org](https://chat.zulip.org) or in the development |  | ||||||
|    environment) until you know how the piece being |  | ||||||
|    described fits into the bigger picture. If after some exploration the |  | ||||||
|    description seems confusing or ambiguous, post a question on the GitHub |  | ||||||
|    issue, as others may benefit from the clarification as well. |  | ||||||
| 3. When you find an issue you like, try to get started working on it. See if you |  | ||||||
|    can find the part of the code you'll need to modify (`git grep` is your |  | ||||||
|    friend!) and get some idea of how you'll approach the problem. |  | ||||||
| 4. If you feel lost, that's OK! Go through these steps again with another issue. |  | ||||||
|    There's plenty to work on, and the exploration you do will help you learn |  | ||||||
|    more about the project. |  | ||||||
|  |  | ||||||
| Note that you are _not_ claiming an issue while you are iterating through steps |  | ||||||
| 1-4. _Before you claim an issue_, you should be confident that you will be able to |  | ||||||
| tackle it effectively. |  | ||||||
|  |  | ||||||
| Additional tips for the [main server and web app |  | ||||||
| repository](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22): |  | ||||||
|  |  | ||||||
| - We especially recommend browsing recently opened issues, as there are more |  | ||||||
|   likely to be easy ones for you to find. |  | ||||||
| - All issues are partitioned into areas like |  | ||||||
|   admin, compose, emoji, hotkeys, i18n, onboarding, search, etc. Look |   admin, compose, emoji, hotkeys, i18n, onboarding, search, etc. Look | ||||||
|   through our [list of labels](https://github.com/zulip/zulip/labels), and |   through our [list of labels](https://github.com/zulip/zulip/labels), and | ||||||
|   click on some of the `area:` labels to see all the issues related to your |   click on some of the `area:` labels to see all the issues related to your | ||||||
|   areas of interest. |   areas of interest. | ||||||
| - Avoid issues with the "difficult" label unless you | * If the lists of issues are overwhelming, post in | ||||||
|   understand why it is difficult and are highly confident you can resolve the |   [#new members](https://chat.zulip.org/#narrow/stream/95-new-members) with a | ||||||
|   issue correctly and completely. |   bit about your background and interests, and we'll help you out. The most | ||||||
|  |   important thing to say is whether you're looking for a backend (Python), | ||||||
|  |   frontend (JavaScript and TypeScript), mobile (React Native), desktop (Electron), | ||||||
|  |   documentation (English) or visual design (JavaScript/TypeScript + CSS) issue, and a | ||||||
|  |   bit about your programming experience and available time. | ||||||
|  |  | ||||||
| ### Claiming an issue | We also welcome suggestions of features that you feel would be valuable or | ||||||
|  | changes that you feel would make Zulip a better open source project. If you | ||||||
|  | have a new feature you'd like to add, we recommend you start by posting in | ||||||
|  | [#new members](https://chat.zulip.org/#narrow/stream/95-new-members) with the | ||||||
|  | feature idea and the problem that you're hoping to solve. | ||||||
|  |  | ||||||
| #### In the main server/web app repository and Zulip Terminal repository | Other notes: | ||||||
|  | * For a first pull request, it's better to aim for a smaller contribution | ||||||
| The Zulip server/web app repository |   than a bigger one. Many first contributions have fewer than 10 lines of | ||||||
| ([`zulip/zulip`](https://github.com/zulip/zulip/)) and the Zulip Terminal |   changes (not counting changes to tests). | ||||||
| repository ([`zulip/zulip-terminal`](https://github.com/zulip/zulip-terminal/)) | * The full list of issues explicitly looking for a contributor can be | ||||||
| are set up with a GitHub workflow bot called |   found with the | ||||||
| [Zulipbot](https://github.com/zulip/zulipbot), which manages issues and pull |   [good first issue](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22) | ||||||
| requests in order to create a better workflow for Zulip contributors. |   and | ||||||
|  |   [help wanted](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22) | ||||||
| To claim an issue in these repositories, simply post a comment that says |   labels.  Avoid issues with the "difficult" label unless you | ||||||
| `@zulipbot claim` to the issue thread. If the issue is tagged with a [help |   understand why it is difficult and are confident you can resolve the | ||||||
| wanted](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22) |   issue correctly and completely.  Issues without one of these labels | ||||||
| label, Zulipbot will immediately assign the issue to you. |   are fair game if Tim has written a clear technical design proposal | ||||||
|  |   in the issue, or it is a bug that you can reproduce and you are | ||||||
| Note that new contributors can only claim one issue until their first pull request is |   confident you can fix the issue correctly. | ||||||
| merged. This is to encourage folks to finish ongoing work before starting | * For most new contributors, there's a lot to learn while making your first | ||||||
| something new. If you would like to pick up a new issue while waiting for review |   pull request. It's OK if it takes you a while; that's normal! You'll be | ||||||
| on an almost-ready pull request, you can post a comment to this effect on the |   able to work a lot faster as you build experience. | ||||||
| issue you're interested in. |  | ||||||
|  |  | ||||||
| #### In other Zulip repositories |  | ||||||
|  |  | ||||||
| There is no bot for other Zulip repositories |  | ||||||
| ([`zulip/zulip-mobile`](https://github.com/zulip/zulip-mobile/), etc.). If |  | ||||||
| you are interested in claiming an issue in one of these repositories, simply |  | ||||||
| post a comment on the issue thread saying that you'd like to work on it. There |  | ||||||
| is no need to @-mention the issue creator in your comment. |  | ||||||
|  |  | ||||||
| Please follow the same guidelines as described above: find an issue labeled |  | ||||||
| "help wanted", and only pick up one issue at a time to start with. |  | ||||||
|  |  | ||||||
| ### Working on an issue | ### Working on an issue | ||||||
|  |  | ||||||
| You're encouraged to ask questions on how to best implement or debug your | To work on an issue, claim it by adding a comment with `@zulipbot claim` to | ||||||
| changes -- the Zulip maintainers are excited to answer questions to help you | the issue thread. [Zulipbot](https://github.com/zulip/zulipbot) is a GitHub | ||||||
| stay unblocked and working efficiently. You can ask questions in the [Zulip | workflow bot; it will assign you to the issue and label the issue as "in | ||||||
| development community](https://zulip.com/development-community/), or on the | progress". Some additional notes: | ||||||
| GitHub issue or pull request. |  | ||||||
|  |  | ||||||
| To get early feedback on any UI changes, we encourage you to post screenshots of | * You can only claim issues with the | ||||||
| your work in the [#design |   [good first issue](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22) | ||||||
| stream](https://chat.zulip.org/#narrow/stream/101-design) in the [Zulip |   or | ||||||
| development community](https://zulip.com/development-community/) |   [help wanted](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22) | ||||||
|  |   labels. Zulipbot will give you an error if you try to claim an issue | ||||||
|  |   without one of those labels. | ||||||
|  | * You're encouraged to ask questions on how to best implement or debug your | ||||||
|  |   changes -- the Zulip maintainers are excited to answer questions to help | ||||||
|  |   you stay unblocked and working efficiently. You can ask questions on | ||||||
|  |   chat.zulip.org, or on the GitHub issue or pull request. | ||||||
|  | * We encourage early pull requests for work in progress. Prefix the title of | ||||||
|  |   work in progress pull requests with `[WIP]`, and remove the prefix when | ||||||
|  |   you think it might be mergeable and want it to be reviewed. | ||||||
|  | * After updating a PR, add a comment to the GitHub thread mentioning that it | ||||||
|  |   is ready for another review. GitHub only notifies maintainers of the | ||||||
|  |   changes when you post a comment, so if you don't, your PR will likely be | ||||||
|  |   neglected by accident! | ||||||
|  |  | ||||||
| For more advice, see [What makes a great Zulip | ### And beyond | ||||||
| contributor?](#what-makes-a-great-zulip-contributor) below. It's OK if your |  | ||||||
| first issue takes you a while; that's normal! You'll be able to work a lot |  | ||||||
| faster as you build experience. |  | ||||||
|  |  | ||||||
| ### Submitting a pull request | A great place to look for a second issue is to look for issues with the same | ||||||
|  |  | ||||||
| See the [pull request review |  | ||||||
| process](https://zulip.readthedocs.io/en/latest/contributing/review-process.html) |  | ||||||
| guide for detailed instructions on how to submit a pull request, and information |  | ||||||
| on the stages of review your PR will go through. |  | ||||||
|  |  | ||||||
| ### Beyond the first issue |  | ||||||
|  |  | ||||||
| To find a second issue to work on, we recommend looking through issues with the same |  | ||||||
| `area:` label as the last issue you resolved. You'll be able to reuse the | `area:` label as the last issue you resolved. You'll be able to reuse the | ||||||
| work you did learning how that part of the codebase works. Also, the path to | work you did learning how that part of the codebase works. Also, the path to | ||||||
| becoming a core developer often involves taking ownership of one of these area | becoming a core developer often involves taking ownership of one of these area | ||||||
| labels. | labels. | ||||||
|  |  | ||||||
| ### Common questions |  | ||||||
|  |  | ||||||
| - **What if somebody is already working on the issue I want do claim?** There |  | ||||||
|   are lots of issue to work on! If somebody else is actively working on the |  | ||||||
|   issue, you can find a different one, or help with |  | ||||||
|   reviewing their work. |  | ||||||
| - **What if somebody else claims an issue while I'm figuring out whether or not to |  | ||||||
|   work on it?** No worries! You can contribute by providing feedback on |  | ||||||
|   their pull request. If you've made good progress in understanding part of the |  | ||||||
|   codebase, you can also find another "help wanted" issue in the same area to |  | ||||||
|   work on. |  | ||||||
| - **What if there is already a pull request for the issue I want to work on?** |  | ||||||
|   Start by reviewing the existing work. If you agree with the approach, you can |  | ||||||
|   use the existing pull request (PR) as a starting point for your contribution. If |  | ||||||
|   you think a different approach is needed, you can post a new PR, with a comment that clearly |  | ||||||
|   explains _why_ you decided to start from scratch. |  | ||||||
| - **Can I come up with my own feature idea and work on it?** We welcome |  | ||||||
|   suggestions of features or other improvements that you feel would be valuable. If you |  | ||||||
|   have a new feature you'd like to add, you can start a conversation [in our |  | ||||||
|   development community](https://zulip.com/development-community/#where-do-i-send-my-message) |  | ||||||
|   explaining the feature idea and the problem that you're hoping to solve. |  | ||||||
| - **I'm waiting for the next round of review on my PR. Can I pick up |  | ||||||
|   another issue in the meantime?** Someone's first Zulip PR often |  | ||||||
|   requires quite a bit of iteration, so please [make sure your pull |  | ||||||
|   request is reviewable][reviewable-pull-requests] and go through at |  | ||||||
|   least one round of feedback from others before picking up a second |  | ||||||
|   issue. After that, sure! If |  | ||||||
|   [Zulipbot](https://github.com/zulip/zulipbot) does not allow you to |  | ||||||
|   claim an issue, you can post a comment describing the status of your |  | ||||||
|   other work on the issue you're interested in, and asking for the |  | ||||||
|   issue to be assigned to you. Note that addressing feedback on |  | ||||||
|   in-progress PRs should always take priority over starting a new PR. |  | ||||||
| - **I think my PR is done, but it hasn't been merged yet. What's going on?** |  | ||||||
|   1. **Double-check that you have addressed all the feedback**, including any comments |  | ||||||
|      on [Git commit |  | ||||||
|      discipline](https://zulip.readthedocs.io/en/latest/contributing/commit-discipline.html). |  | ||||||
|   2. If all the feedback has been addressed, did you [leave a |  | ||||||
|      comment](https://zulip.readthedocs.io/en/latest/contributing/review-process.html#how-to-help-move-the-review-process-forward) |  | ||||||
|      explaining that you have done so and **requesting another review**? If not, |  | ||||||
|      it may not be clear to project maintainers or reviewers that your PR is |  | ||||||
|      ready for another look. |  | ||||||
|   3. There may be a pause between initial rounds of review for your PR and final |  | ||||||
|      review by project maintainers. This is normal, and we encourage you to **work |  | ||||||
|      on other issues** while you wait. |  | ||||||
|   4. If you think the PR is ready and haven't seen any updates for a couple |  | ||||||
|      of weeks, it can be helpful to **leave another comment**. Summarize the |  | ||||||
|      overall state of the review process and your work, and indicate that you |  | ||||||
|      are waiting for a review. |  | ||||||
|   5. Finally, **Zulip project maintainers are people too**! They may be busy |  | ||||||
|      with other work, and sometimes they might even take a vacation. ;) It can |  | ||||||
|      occasionally take a few weeks for a PR in the final stages of the review |  | ||||||
|      process to be merged. |  | ||||||
|  |  | ||||||
| [reviewable-pull-requests]: https://zulip.readthedocs.io/en/latest/contributing/reviewable-prs.html |  | ||||||
|  |  | ||||||
| ## What makes a great Zulip contributor? | ## What makes a great Zulip contributor? | ||||||
|  |  | ||||||
| Zulip has a lot of experience working with new contributors. In our | Zulip runs a lot of [internship programs](#internship-programs), so we have | ||||||
| experience, these are the best predictors of success: | a lot of experience with new contributors. In our experience, these are the | ||||||
|  | best predictors of success: | ||||||
|  |  | ||||||
| - [Asking great questions][great-questions]. It's very hard to answer a general | * Posting good questions. This generally means explaining your current | ||||||
|   question like, "How do I do this issue?" When asking for help, explain your |   understanding, saying what you've done or tried so far, and including | ||||||
|   current understanding, including what you've done or tried so far and where |   tracebacks or other error messages if appropriate. | ||||||
|   you got stuck. Post tracebacks or other error messages if appropriate. For | * Learning and practicing | ||||||
|   more advice, check out [our guide][great-questions]! |   [Git commit discipline](https://zulip.readthedocs.io/en/latest/contributing/version-control.html#commit-discipline). | ||||||
| - Learning and practicing | * Submitting carefully tested code. This generally means checking your work | ||||||
|   [Git commit discipline](https://zulip.readthedocs.io/en/latest/contributing/commit-discipline.html). |   through a combination of automated tests and manually clicking around the | ||||||
| - Submitting carefully tested code. See our [detailed guide on how to review |   UI trying to find bugs in your work. See | ||||||
|   code](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html#how-to-review-code) |   [things to look for](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html#things-to-look-for) | ||||||
|   (yours or someone else's). |   for additional ideas. | ||||||
| - Posting | * Posting | ||||||
|   [screenshots or GIFs](https://zulip.readthedocs.io/en/latest/tutorials/screenshot-and-gif-software.html) |   [screenshots or GIFs](https://zulip.readthedocs.io/en/latest/tutorials/screenshot-and-gif-software.html) | ||||||
|   for frontend changes. |   for frontend changes. | ||||||
| - Working to [make your pull requests easy to | * Being responsive to feedback on pull requests. This means incorporating or | ||||||
|   review](https://zulip.readthedocs.io/en/latest/contributing/reviewable-prs.html). |  | ||||||
| - Clearly describing what you have implemented and why. For example, if your |  | ||||||
|   implementation differs from the issue description in some way or is a partial |  | ||||||
|   step towards the requirements described in the issue, be sure to call |  | ||||||
|   out those differences. |  | ||||||
| - Being responsive to feedback on pull requests. This means incorporating or |  | ||||||
|   responding to all suggested changes, and leaving a note if you won't be |   responding to all suggested changes, and leaving a note if you won't be | ||||||
|   able to address things within a few days. |   able to address things within a few days. | ||||||
| - Being helpful and friendly on the [Zulip community | * Being helpful and friendly on chat.zulip.org. | ||||||
|   server](https://zulip.com/development-community/). |  | ||||||
|  |  | ||||||
| [great-questions]: https://zulip.readthedocs.io/en/latest/contributing/asking-great-questions.html | These are also the main criteria we use to select interns for all of our | ||||||
|  | internship programs. | ||||||
|  |  | ||||||
|  | ## Reporting issues | ||||||
|  |  | ||||||
|  | If you find an easily reproducible bug and/or are experienced in reporting | ||||||
|  | bugs, feel free to just open an issue on the relevant project on GitHub. | ||||||
|  |  | ||||||
|  | If you have a feature request or are not yet sure what the underlying bug | ||||||
|  | is, the best place to post issues is | ||||||
|  | [#issues](https://chat.zulip.org/#narrow/stream/9-issues) (or | ||||||
|  | [#mobile](https://chat.zulip.org/#narrow/stream/48-mobile) or | ||||||
|  | [#desktop](https://chat.zulip.org/#narrow/stream/16-desktop)) on the | ||||||
|  | [Zulip community server](https://zulip.readthedocs.io/en/latest/contributing/chat-zulip-org.html). | ||||||
|  | This allows us to interactively figure out what is going on, let you know if | ||||||
|  | a similar issue has already been opened, and collect any other information | ||||||
|  | we need. Choose a 2-4 word topic that describes the issue, explain the issue | ||||||
|  | and how to reproduce it if known, your browser/OS if relevant, and a | ||||||
|  | [screenshot or screenGIF](https://zulip.readthedocs.io/en/latest/tutorials/screenshot-and-gif-software.html) | ||||||
|  | if appropriate. | ||||||
|  |  | ||||||
|  | **Reporting security issues**. Please do not report security issues | ||||||
|  |   publicly, including on public streams on chat.zulip.org. You can email | ||||||
|  |   zulip-security@googlegroups.com. We create a CVE for every security issue. | ||||||
|  |  | ||||||
| ## User feedback | ## User feedback | ||||||
|  |  | ||||||
| @@ -324,67 +227,125 @@ hear about your experience with the product. If you're not sure what to | |||||||
| write, here are some questions we're always very curious to know the answer | write, here are some questions we're always very curious to know the answer | ||||||
| to: | to: | ||||||
|  |  | ||||||
| - Evaluation: What is the process by which your organization chose or will | * Evaluation: What is the process by which your organization chose or will | ||||||
|   choose a group chat product? |   choose a group chat product? | ||||||
| - Pros and cons: What are the pros and cons of Zulip for your organization, | * Pros and cons: What are the pros and cons of Zulip for your organization, | ||||||
|   and the pros and cons of other products you are evaluating? |   and the pros and cons of other products you are evaluating? | ||||||
| - Features: What are the features that are most important for your | * Features: What are the features that are most important for your | ||||||
|   organization? In the best-case scenario, what would your chat solution do |   organization? In the best case scenario, what would your chat solution do | ||||||
|   for you? |   for you? | ||||||
| - Onboarding: If you remember it, what was your impression during your first | * Onboarding: If you remember it, what was your impression during your first | ||||||
|   few minutes of using Zulip? What did you notice, and how did you feel? Was |   few minutes of using Zulip? What did you notice, and how did you feel? Was | ||||||
|   there anything that stood out to you as confusing, or broken, or great? |   there anything that stood out to you as confusing, or broken, or great? | ||||||
| - Organization: What does your organization do? How big is the organization? | * Organization: What does your organization do? How big is the organization? | ||||||
|   A link to your organization's website? |   A link to your organization's website? | ||||||
|  |  | ||||||
| You can contact us in the [#feedback stream of the Zulip development | ## Internship programs | ||||||
| community](https://chat.zulip.org/#narrow/stream/137-feedback) or |  | ||||||
| by emailing [support@zulip.com](mailto:support@zulip.com). |  | ||||||
|  |  | ||||||
| ## Outreach programs | Zulip runs internship programs with | ||||||
|  | [Outreachy](https://www.outreachy.org/), | ||||||
|  | [Google Summer of Code (GSoC)](https://developers.google.com/open-source/gsoc/) | ||||||
|  | [1], and the | ||||||
|  | [MIT Externship program](https://alum.mit.edu/students/NetworkwithAlumni/ExternshipProgram), | ||||||
|  | and has in the past taken summer interns from Harvard, MIT, and | ||||||
|  | Stanford. | ||||||
|  |  | ||||||
| Zulip regularly participates in [Google Summer of Code | While each third-party program has its own rules and requirements, the | ||||||
| (GSoC)](https://developers.google.com/open-source/gsoc/) and | Zulip community's approaches all of these programs with these ideas in | ||||||
| [Outreachy](https://www.outreachy.org/). We have been a GSoC mentoring | mind: | ||||||
| organization since 2016, and we accept 15-20 GSoC participants each summer. In | * We try to make the application process as valuable for the applicant as | ||||||
| the past, we’ve also participated in [Google |   possible. Expect high quality code reviews, a supportive community, and | ||||||
| Code-In](https://developers.google.com/open-source/gci/), and hosted summer |   publicly viewable patches you can link to from your resume, regardless of | ||||||
| interns from Harvard, MIT, and Stanford. |   whether you are selected. | ||||||
|  | * To apply, you'll have to submit at least one pull request to a Zulip | ||||||
|  |   repository.  Most students accepted to one of our programs have | ||||||
|  |   several merged pull requests (including at least one larger PR) by | ||||||
|  |   the time of the application deadline. | ||||||
|  | * The main criteria we use is quality of your best contributions, and | ||||||
|  |   the bullets listed at | ||||||
|  |   [What makes a great Zulip contributor](#what-makes-a-great-zulip-contributor). | ||||||
|  |   Because we focus on evaluating your best work, it doesn't hurt your | ||||||
|  |   application to makes mistakes in your first few PRs as long as your | ||||||
|  |   work improves. | ||||||
|  |  | ||||||
| Check out our [outreach programs | Zulip also participates in | ||||||
| overview](https://zulip.readthedocs.io/en/latest/outreach/overview.html) to learn | [Google Code-In](https://developers.google.com/open-source/gci/). Our | ||||||
| more about participating in an outreach program with Zulip. Most of our program | selection criteria for Finalists and Grand Prize Winners is the same as our | ||||||
| participants end up sticking around the project long-term, and many have become | selection criteria for interns above. | ||||||
| core team members, maintaining important parts of the project. We hope you |  | ||||||
| apply! |  | ||||||
|  |  | ||||||
| ## Stay connected | Most of our interns end up sticking around the project long-term, and many | ||||||
|  | quickly become core team members. We hope you apply! | ||||||
|  |  | ||||||
| Even if you are not logging into the development community on a regular basis, | ### Google Summer of Code | ||||||
| you can still stay connected with the project. |  | ||||||
|  |  | ||||||
| - Follow us [on Twitter](https://twitter.com/zulip). | GSoC is by far the largest of our internship programs (14 students in | ||||||
| - Subscribe to [our blog](https://blog.zulip.org/). | 2017; 11 in 2018; 17 in 2019).  While we don't control how many slots | ||||||
| - Join or follow the project [on LinkedIn](https://www.linkedin.com/company/zulip-project/). | Google allocates to Zulip, we hope to mentor a similar number of | ||||||
|  | students in future summers. | ||||||
|  |  | ||||||
| ## Help others find Zulip | If you're reading this well before the application deadline and want | ||||||
|  | to make your application strong, we recommend getting involved in the | ||||||
|  | community and fixing issues in Zulip now. Having good contributions | ||||||
|  | and building a reputation for doing good work is best way to have a | ||||||
|  | strong application.  About half of Zulip's GSoC students for Summer | ||||||
|  | 2017 had made significant contributions to the project by February | ||||||
|  | 2017, and about half had not.  Our | ||||||
|  | [GSoC project ideas page][gsoc-guide] has lots more details on how | ||||||
|  | Zulip does GSoC, as well as project ideas (though the project idea | ||||||
|  | list is maintained only during the GSoC application period, so if | ||||||
|  | you're looking at some other time of year, the project list is likely | ||||||
|  | out-of-date). | ||||||
|  |  | ||||||
| Here are some ways you can help others find Zulip: | We also have in some past years run a Zulip Summer of Code (ZSoC) | ||||||
|  | program for students who we didn't have enough slots to accept for | ||||||
|  | GSoC but were able to find funding for.  Student expectations are the | ||||||
|  | same as with GSoC, and it has no separate application process; your | ||||||
|  | GSoC application is your ZSoC application.  If we'd like to select you | ||||||
|  | for ZSoC, we'll contact you when the GSoC results are announced. | ||||||
|  |  | ||||||
| - Star us on GitHub. There are four main repositories: | [gsoc-guide]: https://zulip.readthedocs.io/en/latest/overview/gsoc-ideas.html | ||||||
|  | [gsoc-faq]: https://developers.google.com/open-source/gsoc/faq | ||||||
|  |  | ||||||
|  | [1] Formally, [GSoC isn't an internship][gsoc-faq], but it is similar | ||||||
|  | enough that we're treating it as such for the purposes of this | ||||||
|  | documentation. | ||||||
|  |  | ||||||
|  | ## Zulip Outreach | ||||||
|  |  | ||||||
|  | **Upvoting Zulip**. Upvotes and reviews make a big difference in the public | ||||||
|  | perception of projects like Zulip. We've collected a few sites below | ||||||
|  | where we know Zulip has been discussed. Doing everything in the following | ||||||
|  | list typically takes about 15 minutes. | ||||||
|  | * Star us on GitHub. There are four main repositories: | ||||||
|   [server/web](https://github.com/zulip/zulip), |   [server/web](https://github.com/zulip/zulip), | ||||||
|   [mobile](https://github.com/zulip/zulip-mobile), |   [mobile](https://github.com/zulip/zulip-mobile), | ||||||
|   [desktop](https://github.com/zulip/zulip-desktop), and |   [desktop](https://github.com/zulip/zulip-desktop), and | ||||||
|   [Python API](https://github.com/zulip/python-zulip-api). |   [Python API](https://github.com/zulip/python-zulip-api). | ||||||
|  | * [Follow us](https://twitter.com/zulip) on Twitter. | ||||||
|  |  | ||||||
| - "Like" and retweet [our tweets](https://twitter.com/zulip). | For both of the following, you'll need to make an account on the site if you | ||||||
|  | don't already have one. | ||||||
|  |  | ||||||
| - Upvote and post feedback on Zulip on comparison websites. A couple specific | * [Like Zulip](https://alternativeto.net/software/zulip-chat-server/) on | ||||||
|   ones to highlight: |   AlternativeTo. We recommend upvoting a couple of other products you like | ||||||
|  |   as well, both to give back to their community, and since single-upvote | ||||||
|  |   accounts are generally given less weight. You can also | ||||||
|  |   [upvote Zulip](https://alternativeto.net/software/slack/) on their page | ||||||
|  |   for Slack. | ||||||
|  | * [Add Zulip to your stack](https://stackshare.io/zulip) on StackShare, star | ||||||
|  |   it, and upvote the reasons why people like Zulip that you find most | ||||||
|  |   compelling. Again, we recommend adding a few other products that you like | ||||||
|  |   as well. | ||||||
|  |  | ||||||
|   - [AlternativeTo](https://alternativeto.net/software/zulip-chat-server/). You can also | We have a doc with more detailed instructions and a few other sites, if you | ||||||
|     [upvote Zulip](https://alternativeto.net/software/slack/) on their page | have been using Zulip for a while and want to contribute more. | ||||||
|     for Slack. |  | ||||||
|   - [Add Zulip to your stack](https://stackshare.io/zulip) on StackShare, star | **Blog posts**. Writing a blog post about your experiences with Zulip, or | ||||||
|     it, and upvote the reasons why people like Zulip that you find most | about a technical aspect of Zulip can be a great way to spread the word | ||||||
|     compelling. | about Zulip. | ||||||
|  |  | ||||||
|  | We also occasionally [publish](http://blog.zulip.org/) longer form | ||||||
|  | articles related to Zulip. Our posts typically get tens of thousands | ||||||
|  | of views, and we always have good ideas for blog posts that we can | ||||||
|  | outline but don't have time to write. If you are an experienced writer | ||||||
|  | or copyeditor, send us a portfolio; we'd love to talk! | ||||||
|   | |||||||
| @@ -1,25 +1,23 @@ | |||||||
| # This is a multiarch Dockerfile.  See https://docs.docker.com/desktop/multi-arch/ | # To build run `docker build -f Dockerfile-postgresql .` from the root of the | ||||||
| # | # zulip repo. | ||||||
| # To set up the first time: |  | ||||||
| #     docker buildx create --name multiarch --use |  | ||||||
| # |  | ||||||
| # To build: |  | ||||||
| #     docker buildx build --platform linux/amd64,linux/arm64 \ |  | ||||||
| #       -f ./Dockerfile-postgresql -t zulip/zulip-postgresql:14 --push . |  | ||||||
|  |  | ||||||
| # Currently the PostgreSQL images do not support automatic upgrading of | # Currently the postgres images do not support automatic upgrading of | ||||||
| # the on-disk data in volumes. So the base image can not currently be upgraded | # the on-disk data in volumes. So the base image can not currently be upgraded | ||||||
| # without users needing a manual pgdump and restore. | # without users needing a manual pgdump and restore. | ||||||
|  |  | ||||||
| # https://hub.docker.com/r/groonga/pgroonga/tags | # Install hunspell, zulip stop words, and run zulip database | ||||||
| ARG PGROONGA_VERSION=latest |  | ||||||
| ARG POSTGRESQL_VERSION=14 |  | ||||||
| FROM groonga/pgroonga:$PGROONGA_VERSION-alpine-$POSTGRESQL_VERSION-slim |  | ||||||
|  |  | ||||||
| # Install hunspell, Zulip stop words, and run Zulip database |  | ||||||
| # init. | # init. | ||||||
| RUN apk add -U --no-cache hunspell-en | FROM postgres:10 | ||||||
| RUN ln -sf /usr/share/hunspell/en_US.dic /usr/local/share/postgresql/tsearch_data/en_us.dict && ln -sf /usr/share/hunspell/en_US.aff /usr/local/share/postgresql/tsearch_data/en_us.affix | COPY puppet/zulip/files/postgresql/zulip_english.stop /usr/share/postgresql/$PG_MAJOR/tsearch_data/zulip_english.stop | ||||||
| COPY puppet/zulip/files/postgresql/zulip_english.stop /usr/local/share/postgresql/tsearch_data/zulip_english.stop |  | ||||||
| COPY scripts/setup/create-db.sql /docker-entrypoint-initdb.d/zulip-create-db.sql | COPY scripts/setup/create-db.sql /docker-entrypoint-initdb.d/zulip-create-db.sql | ||||||
| COPY scripts/setup/create-pgroonga.sql /docker-entrypoint-initdb.d/zulip-create-pgroonga.sql | COPY scripts/setup/create-pgroonga.sql /docker-entrypoint-initdb.d/zulip-create-pgroonga.sql | ||||||
|  | COPY scripts/setup/pgroonga-debian.asc /tmp | ||||||
|  | RUN apt-key add /tmp/pgroonga-debian.asc \ | ||||||
|  |     && echo "deb http://packages.groonga.org/debian/ stretch main" > /etc/apt/sources.list.d/zulip.list \ | ||||||
|  |     && apt-get update \ | ||||||
|  |     && DEBIAN_FRONTEND=noninteractive apt-get install --no-install-recommends -y \ | ||||||
|  |        hunspell-en-us \ | ||||||
|  |        postgresql-${PG_MAJOR}-pgroonga \ | ||||||
|  |     && ln -sf /var/cache/postgresql/dicts/en_us.dict "/usr/share/postgresql/$PG_MAJOR/tsearch_data/en_us.dict" \ | ||||||
|  |     && ln -sf /var/cache/postgresql/dicts/en_us.affix "/usr/share/postgresql/$PG_MAJOR/tsearch_data/en_us.affix" \ | ||||||
|  |     && rm -rf /var/lib/apt/lists/* | ||||||
|   | |||||||
							
								
								
									
										1
									
								
								LICENSE
									
									
									
									
									
								
							
							
						
						
									
										1
									
								
								LICENSE
									
									
									
									
									
								
							| @@ -1,3 +1,4 @@ | |||||||
|  | Copyright 2011-2018 Dropbox, Inc., Kandra Labs, Inc., and contributors | ||||||
|  |  | ||||||
|                                  Apache License |                                  Apache License | ||||||
|                            Version 2.0, January 2004 |                            Version 2.0, January 2004 | ||||||
|   | |||||||
							
								
								
									
										2
									
								
								NOTICE
									
									
									
									
									
								
							
							
						
						
									
										2
									
								
								NOTICE
									
									
									
									
									
								
							| @@ -1,5 +1,3 @@ | |||||||
| Copyright 2012–2015 Dropbox, Inc., 2015–2021 Kandra Labs, Inc., and contributors |  | ||||||
|  |  | ||||||
| Licensed under the Apache License, Version 2.0 (the "License"); | Licensed under the Apache License, Version 2.0 (the "License"); | ||||||
| you may not use this project except in compliance with the License. | you may not use this project except in compliance with the License. | ||||||
| You may obtain a copy of the License at | You may obtain a copy of the License at | ||||||
|   | |||||||
							
								
								
									
										121
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										121
									
								
								README.md
									
									
									
									
									
								
							| @@ -1,83 +1,82 @@ | |||||||
| # Zulip overview | # Zulip overview | ||||||
|  |  | ||||||
| [Zulip](https://zulip.com) is an open-source team collaboration tool with unique | Zulip is a powerful, open source group chat application that combines the | ||||||
| [topic-based threading][why-zulip] that combines the best of email and chat to | immediacy of real-time chat with the productivity benefits of threaded | ||||||
| make remote work productive and delightful. Fortune 500 companies, [leading open | conversations. Zulip is used by open source projects, Fortune 500 companies, | ||||||
| source projects][rust-case-study], and thousands of other organizations use | large standards bodies, and others who need a real-time chat system that | ||||||
| Zulip every day. Zulip is the only [modern team chat app][features] that is | allows users to easily process hundreds or thousands of messages a day. With | ||||||
| designed for both live and asynchronous conversations. | over 500 contributors merging over 500 commits a month, Zulip is also the | ||||||
|  | largest and fastest growing open source group chat project. | ||||||
|  |  | ||||||
| Zulip is built by a distributed community of developers from all around the | [](https://circleci.com/gh/zulip/zulip/tree/master) | ||||||
| world, with 74+ people who have each contributed 100+ commits. With | [](https://codecov.io/gh/zulip/zulip/branch/master) | ||||||
| over 1000 contributors merging over 500 commits a month, Zulip is the |  | ||||||
| largest and fastest growing open source team chat project. |  | ||||||
|  |  | ||||||
| Come find us on the [development community chat](https://zulip.com/development-community/)! |  | ||||||
|  |  | ||||||
| [](https://github.com/zulip/zulip/actions/workflows/zulip-ci.yml?query=branch%3Amain) |  | ||||||
| [](https://codecov.io/gh/zulip/zulip) |  | ||||||
| [][mypy-coverage] | [][mypy-coverage] | ||||||
| [](https://github.com/charliermarsh/ruff) |  | ||||||
| [](https://github.com/psf/black) |  | ||||||
| [](https://github.com/prettier/prettier) |  | ||||||
| [](https://github.com/zulip/zulip/releases/latest) | [](https://github.com/zulip/zulip/releases/latest) | ||||||
| [](https://zulip.readthedocs.io/en/latest/) | [](https://zulip.readthedocs.io/en/latest/) | ||||||
| [](https://chat.zulip.org) | [](https://chat.zulip.org) | ||||||
| [](https://twitter.com/zulip) | [](https://twitter.com/zulip) | ||||||
| [](https://github.com/sponsors/zulip) |  | ||||||
|  |  | ||||||
| [mypy-coverage]: https://blog.zulip.org/2016/10/13/static-types-in-python-oh-mypy/ | [mypy-coverage]: https://blog.zulip.org/2016/10/13/static-types-in-python-oh-mypy/ | ||||||
| [why-zulip]: https://zulip.com/why-zulip/ |  | ||||||
| [rust-case-study]: https://zulip.com/case-studies/rust/ |  | ||||||
| [features]: https://zulip.com/features/ |  | ||||||
|  |  | ||||||
| ## Getting started | ## Getting started | ||||||
|  |  | ||||||
| - **Contributing code**. Check out our [guide for new | Click on the appropriate link below. If nothing seems to apply, | ||||||
|   contributors](https://zulip.readthedocs.io/en/latest/contributing/contributing.html) | join us on the | ||||||
|   to get started. We have invested in making Zulip’s code highly | [Zulip community server](https://zulip.readthedocs.io/en/latest/contributing/chat-zulip-org.html) | ||||||
|   readable, thoughtfully tested, and easy to modify. Beyond that, we | and tell us what's up! | ||||||
|   have written an extraordinary 150K words of documentation for Zulip |  | ||||||
|   contributors. |  | ||||||
|  |  | ||||||
| - **Contributing non-code**. [Report an | You might be interested in: | ||||||
|   issue](https://zulip.readthedocs.io/en/latest/contributing/contributing.html#reporting-issues), |  | ||||||
|   [translate](https://zulip.readthedocs.io/en/latest/translating/translating.html) |  | ||||||
|   Zulip into your language, or [give us |  | ||||||
|   feedback](https://zulip.readthedocs.io/en/latest/contributing/contributing.html#user-feedback). |  | ||||||
|   We'd love to hear from you, whether you've been using Zulip for years, or are just |  | ||||||
|   trying it out for the first time. |  | ||||||
|  |  | ||||||
| - **Checking Zulip out**. The best way to see Zulip in action is to drop by the | * **Contributing code**. Check out our | ||||||
|   [Zulip community server](https://zulip.com/development-community/). We also |   [guide for new contributors](https://zulip.readthedocs.io/en/latest/overview/contributing.html) | ||||||
|   recommend reading about Zulip's [unique |   to get started. Zulip prides itself on maintaining a clean and | ||||||
|   approach](https://zulip.com/why-zulip/) to organizing conversations. |   well-tested codebase, and a stock of hundreds of | ||||||
|  |   [beginner-friendly issues][beginner-friendly]. | ||||||
|  |  | ||||||
| - **Running a Zulip server**. Self-host Zulip directly on Ubuntu or Debian | * **Contributing non-code**. | ||||||
|   Linux, in [Docker](https://github.com/zulip/docker-zulip), or with prebuilt |   [Report an issue](https://zulip.readthedocs.io/en/latest/overview/contributing.html#reporting-issue), | ||||||
|   images for [Digital Ocean](https://marketplace.digitalocean.com/apps/zulip) and |   [translate](https://zulip.readthedocs.io/en/latest/translating/translating.html) Zulip | ||||||
|   [Render](https://render.com/docs/deploy-zulip). |   into your language, | ||||||
|   Learn more about [self-hosting Zulip](https://zulip.com/self-hosting/). |   [write](https://zulip.readthedocs.io/en/latest/overview/contributing.html#zulip-outreach) | ||||||
|  |   for the Zulip blog, or | ||||||
|  |   [give us feedback](https://zulip.readthedocs.io/en/latest/overview/contributing.html#user-feedback). We | ||||||
|  |   would love to hear from you, even if you're just trying the product out. | ||||||
|  |  | ||||||
| - **Using Zulip without setting up a server**. Learn about [Zulip | * **Supporting Zulip**. Advocate for your organization to use Zulip, write a | ||||||
|   Cloud](https://zulip.com/plans/) hosting options. Zulip sponsors free [Zulip |   review in the mobile app stores, or | ||||||
|   Cloud Standard](https://zulip.com/plans/) for hundreds of worthy |   [upvote Zulip](https://zulip.readthedocs.io/en/latest/overview/contributing.html#zulip-outreach) on | ||||||
|   organizations, including [fellow open-source |   product comparison sites. | ||||||
|   projects](https://zulip.com/for/open-source/). |  | ||||||
|  |  | ||||||
| - **Participating in [outreach | * **Checking Zulip out**. The best way to see Zulip in action is to drop by | ||||||
|   programs](https://zulip.readthedocs.io/en/latest/contributing/contributing.html#outreach-programs)** |   the | ||||||
|   like [Google Summer of Code](https://developers.google.com/open-source/gsoc/) |   [Zulip community server](https://zulip.readthedocs.io/en/latest/contributing/chat-zulip-org.html). We | ||||||
|   and [Outreachy](https://www.outreachy.org/). |   also recommend reading Zulip for | ||||||
|  |   [open source](https://zulipchat.com/for/open-source/), Zulip for | ||||||
|  |   [companies](https://zulipchat.com/for/companies/), or Zulip for | ||||||
|  |   [working groups and part time communities](https://zulipchat.com/for/working-groups-and-communities/). | ||||||
|  |  | ||||||
| - **Supporting Zulip**. Advocate for your organization to use Zulip, become a | * **Running a Zulip server**. Use a preconfigured [Digital Ocean droplet](https://marketplace.digitalocean.com/apps/zulip), | ||||||
|   [sponsor](https://github.com/sponsors/zulip), write a review in the mobile app |   [install Zulip](https://zulip.readthedocs.io/en/stable/production/install.html) | ||||||
|   stores, or [help others find |   directly, or use Zulip's | ||||||
|   Zulip](https://zulip.readthedocs.io/en/latest/contributing/contributing.html#help-others-find-zulip). |   experimental [Docker image](https://zulip.readthedocs.io/en/latest/production/deployment.html#zulip-in-docker). | ||||||
|  |   Commercial support is available; see <https://zulipchat.com/plans> for details. | ||||||
|  |  | ||||||
| You may also be interested in reading our [blog](https://blog.zulip.org/), and | * **Using Zulip without setting up a server**. <https://zulipchat.com> offers | ||||||
| following us on [Twitter](https://twitter.com/zulip) and |   free and commercial hosting. | ||||||
| [LinkedIn](https://www.linkedin.com/company/zulip-project/). |  | ||||||
|  |  | ||||||
|  | * **Applying for a Zulip internship**. Zulip runs internship programs with | ||||||
|  |   [Outreachy](https://www.outreachy.org/), | ||||||
|  |   [Google Summer of Code](https://developers.google.com/open-source/gsoc/), | ||||||
|  |   and the | ||||||
|  |   [MIT Externship program](https://alum.mit.edu/students/NetworkwithAlumni/ExternshipProgram). Zulip | ||||||
|  |   also participates in | ||||||
|  |   [Google Code-In](https://developers.google.com/open-source/gci/). More | ||||||
|  |   information is available | ||||||
|  |   [here](https://zulip.readthedocs.io/en/latest/overview/contributing.html#internship-programs). | ||||||
|  |  | ||||||
|  | You may also be interested in reading our [blog](http://blog.zulip.org/) or | ||||||
|  | following us on [twitter](https://twitter.com/zulip). | ||||||
| Zulip is distributed under the | Zulip is distributed under the | ||||||
| [Apache 2.0](https://github.com/zulip/zulip/blob/main/LICENSE) license. | [Apache 2.0](https://github.com/zulip/zulip/blob/master/LICENSE) license. | ||||||
|  |  | ||||||
|  | [beginner-friendly]: https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22 | ||||||
|   | |||||||
							
								
								
									
										37
									
								
								SECURITY.md
									
									
									
									
									
								
							
							
						
						
									
										37
									
								
								SECURITY.md
									
									
									
									
									
								
							| @@ -1,37 +0,0 @@ | |||||||
| # Security policy |  | ||||||
|  |  | ||||||
| ## Reporting a vulnerability |  | ||||||
|  |  | ||||||
| We love responsible reports of (potential) security issues in Zulip, |  | ||||||
| whether in the latest release or our development branch. |  | ||||||
|  |  | ||||||
| Our security contact is security@zulip.com. Reporters should expect a |  | ||||||
| response within 24 hours. |  | ||||||
|  |  | ||||||
| Please include details on the issue and how you'd like to be credited |  | ||||||
| in our release notes when we publish the fix. |  | ||||||
|  |  | ||||||
| Our [security model][security-model] document may be a helpful |  | ||||||
| resource. |  | ||||||
|  |  | ||||||
| ## Security announcements |  | ||||||
|  |  | ||||||
| We send security announcements to our [announcement mailing |  | ||||||
| list](https://groups.google.com/g/zulip-announce). If you are running |  | ||||||
| Zulip in production, you should subscribe, by clicking "Join group" at |  | ||||||
| the top of that page. |  | ||||||
|  |  | ||||||
| ## Supported versions |  | ||||||
|  |  | ||||||
| Zulip provides security support for the latest major release, in the |  | ||||||
| form of minor security/maintenance releases. |  | ||||||
|  |  | ||||||
| We work hard to make [upgrades][upgrades] reliable, so that there's no |  | ||||||
| reason to run older major releases. |  | ||||||
|  |  | ||||||
| See also our documentation on the [Zulip release |  | ||||||
| lifecycle][release-lifecycle]. |  | ||||||
|  |  | ||||||
| [security-model]: https://zulip.readthedocs.io/en/latest/production/security-model.html |  | ||||||
| [upgrades]: https://zulip.readthedocs.io/en/latest/production/upgrade-or-modify.html#upgrading-to-a-release |  | ||||||
| [release-lifecycle]: https://zulip.readthedocs.io/en/latest/overview/release-lifecycle.html |  | ||||||
							
								
								
									
										155
									
								
								Vagrantfile
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										155
									
								
								Vagrantfile
									
									
									
									
										vendored
									
									
								
							| @@ -1,8 +1,53 @@ | |||||||
| # -*- mode: ruby -*- | # -*- mode: ruby -*- | ||||||
|  |  | ||||||
| Vagrant.require_version ">= 2.2.6" | VAGRANTFILE_API_VERSION = "2" | ||||||
|  |  | ||||||
|  | def command?(name) | ||||||
|  |   `which #{name} > /dev/null 2>&1` | ||||||
|  |   $?.success? | ||||||
|  | end | ||||||
|  |  | ||||||
|  | if Vagrant::VERSION == "1.8.7" then | ||||||
|  |     path = `which curl` | ||||||
|  |     if path.include?('/opt/vagrant/embedded/bin/curl') then | ||||||
|  |         puts "In Vagrant 1.8.7, curl is broken. Please use Vagrant 2.0.2 "\ | ||||||
|  |              "or run 'sudo rm -f /opt/vagrant/embedded/bin/curl' to fix the "\ | ||||||
|  |              "issue before provisioning. See "\ | ||||||
|  |              "https://github.com/mitchellh/vagrant/issues/7997 "\ | ||||||
|  |              "for reference." | ||||||
|  |         exit | ||||||
|  |     end | ||||||
|  | end | ||||||
|  |  | ||||||
|  | # Workaround: Vagrant removed the atlas.hashicorp.com to | ||||||
|  | # vagrantcloud.com redirect in February 2018. The value of | ||||||
|  | # DEFAULT_SERVER_URL in Vagrant versions less than 1.9.3 is | ||||||
|  | # atlas.hashicorp.com, which means that removal broke the fetching and | ||||||
|  | # updating of boxes (since the old URL doesn't work).  See | ||||||
|  | # https://github.com/hashicorp/vagrant/issues/9442 | ||||||
|  | if Vagrant::DEFAULT_SERVER_URL == "atlas.hashicorp.com" | ||||||
|  |   Vagrant::DEFAULT_SERVER_URL.replace('https://vagrantcloud.com') | ||||||
|  | end | ||||||
|  |  | ||||||
|  | # Monkey patch https://github.com/hashicorp/vagrant/pull/10879 so we | ||||||
|  | # can fall back to another provider if docker is not installed. | ||||||
|  | begin | ||||||
|  |   require Vagrant.source_root.join("plugins", "providers", "docker", "provider") | ||||||
|  | rescue LoadError | ||||||
|  | else | ||||||
|  |   VagrantPlugins::DockerProvider::Provider.class_eval do | ||||||
|  |     method(:usable?).owner == singleton_class or def self.usable?(raise_error=false) | ||||||
|  |       VagrantPlugins::DockerProvider::Driver.new.execute("docker", "version") | ||||||
|  |       true | ||||||
|  |     rescue Vagrant::Errors::CommandUnavailable, VagrantPlugins::DockerProvider::Errors::ExecuteError | ||||||
|  |       raise if raise_error | ||||||
|  |       return false | ||||||
|  |     end | ||||||
|  |   end | ||||||
|  | end | ||||||
|  |  | ||||||
|  | Vagrant.configure(VAGRANTFILE_API_VERSION) do |config| | ||||||
|  |  | ||||||
| Vagrant.configure("2") do |config| |  | ||||||
|   # The Zulip development environment runs on 9991 on the guest. |   # The Zulip development environment runs on 9991 on the guest. | ||||||
|   host_port = 9991 |   host_port = 9991 | ||||||
|   http_proxy = https_proxy = no_proxy = nil |   http_proxy = https_proxy = no_proxy = nil | ||||||
| @@ -13,20 +58,17 @@ Vagrant.configure("2") do |config| | |||||||
|   vm_memory = "2048" |   vm_memory = "2048" | ||||||
|  |  | ||||||
|   ubuntu_mirror = "" |   ubuntu_mirror = "" | ||||||
|   vboxadd_version = nil |  | ||||||
|  |  | ||||||
|   config.vm.box = "bento/ubuntu-20.04" |  | ||||||
|  |  | ||||||
|   config.vm.synced_folder ".", "/vagrant", disabled: true |   config.vm.synced_folder ".", "/vagrant", disabled: true | ||||||
|   config.vm.synced_folder ".", "/srv/zulip", docker_consistency: "z" |   config.vm.synced_folder ".", "/srv/zulip" | ||||||
|  |  | ||||||
|   vagrant_config_file = ENV["HOME"] + "/.zulip-vagrant-config" |   vagrant_config_file = ENV['HOME'] + "/.zulip-vagrant-config" | ||||||
|   if File.file?(vagrant_config_file) |   if File.file?(vagrant_config_file) | ||||||
|     IO.foreach(vagrant_config_file) do |line| |     IO.foreach(vagrant_config_file) do |line| | ||||||
|       line.chomp! |       line.chomp! | ||||||
|       key, value = line.split(nil, 2) |       key, value = line.split(nil, 2) | ||||||
|       case key |       case key | ||||||
|       when /^([#;]|$)/ # ignore comments |       when /^([#;]|$)/; # ignore comments | ||||||
|       when "HTTP_PROXY"; http_proxy = value |       when "HTTP_PROXY"; http_proxy = value | ||||||
|       when "HTTPS_PROXY"; https_proxy = value |       when "HTTPS_PROXY"; https_proxy = value | ||||||
|       when "NO_PROXY"; no_proxy = value |       when "NO_PROXY"; no_proxy = value | ||||||
| @@ -35,7 +77,6 @@ Vagrant.configure("2") do |config| | |||||||
|       when "GUEST_CPUS"; vm_num_cpus = value |       when "GUEST_CPUS"; vm_num_cpus = value | ||||||
|       when "GUEST_MEMORY_MB"; vm_memory = value |       when "GUEST_MEMORY_MB"; vm_memory = value | ||||||
|       when "UBUNTU_MIRROR"; ubuntu_mirror = value |       when "UBUNTU_MIRROR"; ubuntu_mirror = value | ||||||
|       when "VBOXADD_VERSION"; vboxadd_version = value |  | ||||||
|       end |       end | ||||||
|     end |     end | ||||||
|   end |   end | ||||||
| @@ -53,9 +94,9 @@ Vagrant.configure("2") do |config| | |||||||
|   elsif !http_proxy.nil? or !https_proxy.nil? |   elsif !http_proxy.nil? or !https_proxy.nil? | ||||||
|     # This prints twice due to https://github.com/hashicorp/vagrant/issues/7504 |     # This prints twice due to https://github.com/hashicorp/vagrant/issues/7504 | ||||||
|     # We haven't figured out a workaround. |     # We haven't figured out a workaround. | ||||||
|     puts "You have specified value for proxy in ~/.zulip-vagrant-config file but did not " \ |     puts 'You have specified value for proxy in ~/.zulip-vagrant-config file but did not ' \ | ||||||
|          "install the vagrant-proxyconf plugin. To install it, run `vagrant plugin install " \ |          'install the vagrant-proxyconf plugin. To install it, run `vagrant plugin install ' \ | ||||||
|          "vagrant-proxyconf` in a terminal.  This error will appear twice." |          'vagrant-proxyconf` in a terminal.  This error will appear twice.' | ||||||
|     exit |     exit | ||||||
|   end |   end | ||||||
|  |  | ||||||
| @@ -63,7 +104,6 @@ Vagrant.configure("2") do |config| | |||||||
|   config.vm.network "forwarded_port", guest: 9994, host: host_port + 3, host_ip: host_ip_addr |   config.vm.network "forwarded_port", guest: 9994, host: host_port + 3, host_ip: host_ip_addr | ||||||
|   # Specify Docker provider before VirtualBox provider so it's preferred. |   # Specify Docker provider before VirtualBox provider so it's preferred. | ||||||
|   config.vm.provider "docker" do |d, override| |   config.vm.provider "docker" do |d, override| | ||||||
|     override.vm.box = nil |  | ||||||
|     d.build_dir = File.join(__dir__, "tools", "setup", "dev-vagrant-docker") |     d.build_dir = File.join(__dir__, "tools", "setup", "dev-vagrant-docker") | ||||||
|     d.build_args = ["--build-arg", "VAGRANT_UID=#{Process.uid}"] |     d.build_args = ["--build-arg", "VAGRANT_UID=#{Process.uid}"] | ||||||
|     if !ubuntu_mirror.empty? |     if !ubuntu_mirror.empty? | ||||||
| @@ -74,35 +114,82 @@ Vagrant.configure("2") do |config| | |||||||
|   end |   end | ||||||
|  |  | ||||||
|   config.vm.provider "virtualbox" do |vb, override| |   config.vm.provider "virtualbox" do |vb, override| | ||||||
|  |     override.vm.box = "ubuntu/bionic64" | ||||||
|  |     # An unnecessary log file gets generated when running vagrant up for the | ||||||
|  |     # first time with the Ubuntu Bionic box. This looks like it is being | ||||||
|  |     # caused upstream by the base box containing a Vagrantfile with a similar | ||||||
|  |     # line to the one below. | ||||||
|  |     # see https://github.com/hashicorp/vagrant/issues/9425 | ||||||
|  |     vb.customize [ "modifyvm", :id, "--uartmode1", "disconnected" ] | ||||||
|     # It's possible we can get away with just 1.5GB; more testing needed |     # It's possible we can get away with just 1.5GB; more testing needed | ||||||
|     vb.memory = vm_memory |     vb.memory = vm_memory | ||||||
|     vb.cpus = vm_num_cpus |     vb.cpus = vm_num_cpus | ||||||
|  |  | ||||||
|     if !vboxadd_version.nil? |  | ||||||
|       override.vbguest.installer = Class.new(VagrantVbguest::Installers::Ubuntu) do |  | ||||||
|         define_method(:host_version) do |reload = false| |  | ||||||
|           VagrantVbguest::Version(vboxadd_version) |  | ||||||
|         end |  | ||||||
|       end |  | ||||||
|       override.vbguest.allow_downgrade = true |  | ||||||
|       override.vbguest.iso_path = "https://download.virtualbox.org/virtualbox/#{vboxadd_version}/VBoxGuestAdditions_#{vboxadd_version}.iso" |  | ||||||
|     end |  | ||||||
|   end |   end | ||||||
|  |  | ||||||
|   config.vm.provider "hyperv" do |h, override| | $provision_script = <<SCRIPT | ||||||
|     h.memory = vm_memory | set -x | ||||||
|     h.maxmemory = vm_memory | set -e | ||||||
|     h.cpus = vm_num_cpus | set -o pipefail | ||||||
|   end |  | ||||||
|  |  | ||||||
|   config.vm.provider "parallels" do |prl, override| | # Code should go here, rather than tools/provision, only if it is | ||||||
|     prl.memory = vm_memory | # something that we don't want to happen when running provision in a | ||||||
|     prl.cpus = vm_num_cpus | # development environment not using Vagrant. | ||||||
|   end |  | ||||||
|  | # Set the Ubuntu mirror | ||||||
|  | [ ! '#{ubuntu_mirror}' ] || sudo sed -i 's|http://\\(\\w*\\.\\)*archive\\.ubuntu\\.com/ubuntu/\\? |#{ubuntu_mirror} |' /etc/apt/sources.list | ||||||
|  |  | ||||||
|  | # Set the MOTD on the system to have Zulip instructions | ||||||
|  | sudo ln -nsf /srv/zulip/tools/setup/dev-motd /etc/update-motd.d/99-zulip-dev | ||||||
|  | sudo rm -f /etc/update-motd.d/10-help-text | ||||||
|  | sudo dpkg --purge landscape-client landscape-common ubuntu-release-upgrader-core update-manager-core update-notifier-common ubuntu-server | ||||||
|  | sudo dpkg-divert --add --rename /etc/default/motd-news | ||||||
|  | sudo sh -c 'echo ENABLED=0 > /etc/default/motd-news' | ||||||
|  |  | ||||||
|  | # If the host is running SELinux remount the /sys/fs/selinux directory as read only, | ||||||
|  | # needed for apt-get to work. | ||||||
|  | if [ -d "/sys/fs/selinux" ]; then | ||||||
|  |     sudo mount -o remount,ro /sys/fs/selinux | ||||||
|  | fi | ||||||
|  |  | ||||||
|  | # Set default locale, this prevents errors if the user has another locale set. | ||||||
|  | if ! grep -q 'LC_ALL=en_US.UTF-8' /etc/default/locale; then | ||||||
|  |     echo "LC_ALL=en_US.UTF-8" | sudo tee -a /etc/default/locale | ||||||
|  | fi | ||||||
|  |  | ||||||
|  | # Set an environment variable, so that we won't print the virtualenv | ||||||
|  | # shell warning (it'll be wrong, since the shell is dying anyway) | ||||||
|  | export SKIP_VENV_SHELL_WARNING=1 | ||||||
|  |  | ||||||
|  | # End `set -x`, so that the end of provision doesn't look like an error | ||||||
|  | # message after a successful run. | ||||||
|  | set +x | ||||||
|  |  | ||||||
|  | # Check if the zulip directory is writable | ||||||
|  | if [ ! -w /srv/zulip ]; then | ||||||
|  |     echo "The vagrant user is unable to write to the zulip directory." | ||||||
|  |     echo "To fix this, run the following commands on the host machine:" | ||||||
|  |     # sudo is required since our uid is not 1000 | ||||||
|  |     echo '    vagrant halt -f' | ||||||
|  |     echo '    rm -rf /PATH/TO/ZULIP/CLONE/.vagrant' | ||||||
|  |     echo '    sudo chown -R 1000:$(id -g) /PATH/TO/ZULIP/CLONE' | ||||||
|  |     echo "Replace /PATH/TO/ZULIP/CLONE with the path to where zulip code is cloned." | ||||||
|  |     echo "You can resume setting up your vagrant environment by running:" | ||||||
|  |     echo "    vagrant up" | ||||||
|  |     exit 1 | ||||||
|  | fi | ||||||
|  | # Provision the development environment | ||||||
|  | ln -nsf /srv/zulip ~/zulip | ||||||
|  | /srv/zulip/tools/provision | ||||||
|  |  | ||||||
|  | # Run any custom provision hooks the user has configured | ||||||
|  | if [ -f /srv/zulip/tools/custom_provision ]; then | ||||||
|  |     chmod +x /srv/zulip/tools/custom_provision | ||||||
|  |     /srv/zulip/tools/custom_provision | ||||||
|  | fi | ||||||
|  | SCRIPT | ||||||
|  |  | ||||||
|   config.vm.provision "shell", |   config.vm.provision "shell", | ||||||
|     # We want provision to be run with the permissions of the vagrant user. |     # We want provision to be run with the permissions of the vagrant user. | ||||||
|     privileged: false, |     privileged: false, | ||||||
|     path: "tools/setup/vagrant-provision", |     inline: $provision_script | ||||||
|     env: { "UBUNTU_MIRROR" => ubuntu_mirror } |  | ||||||
| end | end | ||||||
|   | |||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -4,19 +4,11 @@ from typing import List | |||||||
|  |  | ||||||
| from analytics.lib.counts import CountStat | from analytics.lib.counts import CountStat | ||||||
|  |  | ||||||
|  | def generate_time_series_data(days: int=100, business_hours_base: float=10, | ||||||
| def generate_time_series_data( |                               non_business_hours_base: float=10, growth: float=1, | ||||||
|     days: int = 100, |                               autocorrelation: float=0, spikiness: float=1, | ||||||
|     business_hours_base: float = 10, |                               holiday_rate: float=0, frequency: str=CountStat.DAY, | ||||||
|     non_business_hours_base: float = 10, |                               partial_sum: bool=False, random_seed: int=26) -> List[int]: | ||||||
|     growth: float = 1, |  | ||||||
|     autocorrelation: float = 0, |  | ||||||
|     spikiness: float = 1, |  | ||||||
|     holiday_rate: float = 0, |  | ||||||
|     frequency: str = CountStat.DAY, |  | ||||||
|     partial_sum: bool = False, |  | ||||||
|     random_seed: int = 26, |  | ||||||
| ) -> List[int]: |  | ||||||
|     """ |     """ | ||||||
|     Generate semi-realistic looking time series data for testing analytics graphs. |     Generate semi-realistic looking time series data for testing analytics graphs. | ||||||
|  |  | ||||||
| @@ -37,43 +29,35 @@ def generate_time_series_data( | |||||||
|     random_seed -- Seed for random number generator. |     random_seed -- Seed for random number generator. | ||||||
|     """ |     """ | ||||||
|     if frequency == CountStat.HOUR: |     if frequency == CountStat.HOUR: | ||||||
|         length = days * 24 |         length = days*24 | ||||||
|         seasonality = [non_business_hours_base] * 24 * 7 |         seasonality = [non_business_hours_base] * 24 * 7 | ||||||
|         for day in range(5): |         for day in range(5): | ||||||
|             for hour in range(8): |             for hour in range(8): | ||||||
|                 seasonality[24 * day + hour] = business_hours_base |                 seasonality[24*day + hour] = business_hours_base | ||||||
|         holidays = [] |         holidays  = [] | ||||||
|         for i in range(days): |         for i in range(days): | ||||||
|             holidays.extend([random() < holiday_rate] * 24) |             holidays.extend([random() < holiday_rate] * 24) | ||||||
|     elif frequency == CountStat.DAY: |     elif frequency == CountStat.DAY: | ||||||
|         length = days |         length = days | ||||||
|         seasonality = [8 * business_hours_base + 16 * non_business_hours_base] * 5 + [ |         seasonality = [8*business_hours_base + 16*non_business_hours_base] * 5 + \ | ||||||
|             24 * non_business_hours_base |                       [24*non_business_hours_base] * 2 | ||||||
|         ] * 2 |  | ||||||
|         holidays = [random() < holiday_rate for i in range(days)] |         holidays = [random() < holiday_rate for i in range(days)] | ||||||
|     else: |     else: | ||||||
|         raise AssertionError(f"Unknown frequency: {frequency}") |         raise AssertionError("Unknown frequency: %s" % (frequency,)) | ||||||
|     if length < 2: |     if length < 2: | ||||||
|         raise AssertionError( |         raise AssertionError("Must be generating at least 2 data points. " | ||||||
|             f"Must be generating at least 2 data points. Currently generating {length}" |                              "Currently generating %s" % (length,)) | ||||||
|         ) |     growth_base = growth ** (1. / (length-1)) | ||||||
|     growth_base = growth ** (1.0 / (length - 1)) |     values_no_noise = [seasonality[i % len(seasonality)] * (growth_base**i) for i in range(length)] | ||||||
|     values_no_noise = [ |  | ||||||
|         seasonality[i % len(seasonality)] * (growth_base**i) for i in range(length) |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     seed(random_seed) |     seed(random_seed) | ||||||
|     noise_scalars = [gauss(0, 1)] |     noise_scalars = [gauss(0, 1)] | ||||||
|     for i in range(1, length): |     for i in range(1, length): | ||||||
|         noise_scalars.append( |         noise_scalars.append(noise_scalars[-1]*autocorrelation + gauss(0, 1)*(1-autocorrelation)) | ||||||
|             noise_scalars[-1] * autocorrelation + gauss(0, 1) * (1 - autocorrelation) |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     values = [ |     values = [0 if holiday else int(v + sqrt(v)*noise_scalar*spikiness) | ||||||
|         0 if holiday else int(v + sqrt(v) * noise_scalar * spikiness) |               for v, noise_scalar, holiday in zip(values_no_noise, noise_scalars, holidays)] | ||||||
|         for v, noise_scalar, holiday in zip(values_no_noise, noise_scalars, holidays) |  | ||||||
|     ] |  | ||||||
|     if partial_sum: |     if partial_sum: | ||||||
|         for i in range(1, length): |         for i in range(1, length): | ||||||
|             values[i] = values[i - 1] + values[i] |             values[i] = values[i-1] + values[i] | ||||||
|     return [max(v, 0) for v in values] |     return [max(v, 0) for v in values] | ||||||
|   | |||||||
| @@ -4,14 +4,12 @@ from typing import List, Optional | |||||||
| from analytics.lib.counts import CountStat | from analytics.lib.counts import CountStat | ||||||
| from zerver.lib.timestamp import floor_to_day, floor_to_hour, verify_UTC | from zerver.lib.timestamp import floor_to_day, floor_to_hour, verify_UTC | ||||||
|  |  | ||||||
|  |  | ||||||
| # If min_length is None, returns end_times from ceiling(start) to floor(end), inclusive. | # If min_length is None, returns end_times from ceiling(start) to floor(end), inclusive. | ||||||
| # If min_length is greater than 0, pads the list to the left. | # If min_length is greater than 0, pads the list to the left. | ||||||
| # So informally, time_range(Sep 20, Sep 22, day, None) returns [Sep 20, Sep 21, Sep 22], | # So informally, time_range(Sep 20, Sep 22, day, None) returns [Sep 20, Sep 21, Sep 22], | ||||||
| # and time_range(Sep 20, Sep 22, day, 5) returns [Sep 18, Sep 19, Sep 20, Sep 21, Sep 22] | # and time_range(Sep 20, Sep 22, day, 5) returns [Sep 18, Sep 19, Sep 20, Sep 21, Sep 22] | ||||||
| def time_range( | def time_range(start: datetime, end: datetime, frequency: str, | ||||||
|     start: datetime, end: datetime, frequency: str, min_length: Optional[int] |                min_length: Optional[int]) -> List[datetime]: | ||||||
| ) -> List[datetime]: |  | ||||||
|     verify_UTC(start) |     verify_UTC(start) | ||||||
|     verify_UTC(end) |     verify_UTC(end) | ||||||
|     if frequency == CountStat.HOUR: |     if frequency == CountStat.HOUR: | ||||||
| @@ -21,11 +19,11 @@ def time_range( | |||||||
|         end = floor_to_day(end) |         end = floor_to_day(end) | ||||||
|         step = timedelta(days=1) |         step = timedelta(days=1) | ||||||
|     else: |     else: | ||||||
|         raise AssertionError(f"Unknown frequency: {frequency}") |         raise AssertionError("Unknown frequency: %s" % (frequency,)) | ||||||
|  |  | ||||||
|     times = [] |     times = [] | ||||||
|     if min_length is not None: |     if min_length is not None: | ||||||
|         start = min(start, end - (min_length - 1) * step) |         start = min(start, end - (min_length-1)*step) | ||||||
|     current = end |     current = end | ||||||
|     while current >= start: |     while current >= start: | ||||||
|         times.append(current) |         times.append(current) | ||||||
|   | |||||||
							
								
								
									
										81
									
								
								analytics/management/commands/analyze_mit.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										81
									
								
								analytics/management/commands/analyze_mit.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,81 @@ | |||||||
|  | import datetime | ||||||
|  | import logging | ||||||
|  | import time | ||||||
|  | from typing import Any, Dict | ||||||
|  |  | ||||||
|  | from django.core.management.base import BaseCommand, CommandParser | ||||||
|  |  | ||||||
|  | from zerver.lib.timestamp import timestamp_to_datetime | ||||||
|  | from zerver.models import Message, Recipient | ||||||
|  |  | ||||||
|  | def compute_stats(log_level: int) -> None: | ||||||
|  |     logger = logging.getLogger() | ||||||
|  |     logger.setLevel(log_level) | ||||||
|  |  | ||||||
|  |     one_week_ago = timestamp_to_datetime(time.time()) - datetime.timedelta(weeks=1) | ||||||
|  |     mit_query = Message.objects.filter(sender__realm__string_id="zephyr", | ||||||
|  |                                        recipient__type=Recipient.STREAM, | ||||||
|  |                                        date_sent__gt=one_week_ago) | ||||||
|  |     for bot_sender_start in ["imap.", "rcmd.", "sys."]: | ||||||
|  |         mit_query = mit_query.exclude(sender__email__startswith=(bot_sender_start)) | ||||||
|  |     # Filtering for "/" covers tabbott/extra@ and all the daemon/foo bots. | ||||||
|  |     mit_query = mit_query.exclude(sender__email__contains=("/")) | ||||||
|  |     mit_query = mit_query.exclude(sender__email__contains=("aim.com")) | ||||||
|  |     mit_query = mit_query.exclude( | ||||||
|  |         sender__email__in=["rss@mit.edu", "bash@mit.edu", "apache@mit.edu", | ||||||
|  |                            "bitcoin@mit.edu", "lp@mit.edu", "clocks@mit.edu", | ||||||
|  |                            "root@mit.edu", "nagios@mit.edu", | ||||||
|  |                            "www-data|local-realm@mit.edu"]) | ||||||
|  |     user_counts = {}  # type: Dict[str, Dict[str, int]] | ||||||
|  |     for m in mit_query.select_related("sending_client", "sender"): | ||||||
|  |         email = m.sender.email | ||||||
|  |         user_counts.setdefault(email, {}) | ||||||
|  |         user_counts[email].setdefault(m.sending_client.name, 0) | ||||||
|  |         user_counts[email][m.sending_client.name] += 1 | ||||||
|  |  | ||||||
|  |     total_counts = {}  # type: Dict[str, int] | ||||||
|  |     total_user_counts = {}  # type: Dict[str, int] | ||||||
|  |     for email, counts in user_counts.items(): | ||||||
|  |         total_user_counts.setdefault(email, 0) | ||||||
|  |         for client_name, count in counts.items(): | ||||||
|  |             total_counts.setdefault(client_name, 0) | ||||||
|  |             total_counts[client_name] += count | ||||||
|  |             total_user_counts[email] += count | ||||||
|  |  | ||||||
|  |     logging.debug("%40s | %10s | %s" % ("User", "Messages", "Percentage Zulip")) | ||||||
|  |     top_percents = {}  # type: Dict[int, float] | ||||||
|  |     for size in [10, 25, 50, 100, 200, len(total_user_counts.keys())]: | ||||||
|  |         top_percents[size] = 0.0 | ||||||
|  |     for i, email in enumerate(sorted(total_user_counts.keys(), | ||||||
|  |                                      key=lambda x: -total_user_counts[x])): | ||||||
|  |         percent_zulip = round(100 - (user_counts[email].get("zephyr_mirror", 0)) * 100. / | ||||||
|  |                               total_user_counts[email], 1) | ||||||
|  |         for size in top_percents.keys(): | ||||||
|  |             top_percents.setdefault(size, 0) | ||||||
|  |             if i < size: | ||||||
|  |                 top_percents[size] += (percent_zulip * 1.0 / size) | ||||||
|  |  | ||||||
|  |         logging.debug("%40s | %10s | %s%%" % (email, total_user_counts[email], | ||||||
|  |                                               percent_zulip)) | ||||||
|  |  | ||||||
|  |     logging.info("") | ||||||
|  |     for size in sorted(top_percents.keys()): | ||||||
|  |         logging.info("Top %6s | %s%%" % (size, round(top_percents[size], 1))) | ||||||
|  |  | ||||||
|  |     grand_total = sum(total_counts.values()) | ||||||
|  |     print(grand_total) | ||||||
|  |     logging.info("%15s | %s" % ("Client", "Percentage")) | ||||||
|  |     for client in total_counts.keys(): | ||||||
|  |         logging.info("%15s | %s%%" % (client, round(100. * total_counts[client] / grand_total, 1))) | ||||||
|  |  | ||||||
|  | class Command(BaseCommand): | ||||||
|  |     help = "Compute statistics on MIT Zephyr usage." | ||||||
|  |  | ||||||
|  |     def add_arguments(self, parser: CommandParser) -> None: | ||||||
|  |         parser.add_argument('--verbose', default=False, action='store_true') | ||||||
|  |  | ||||||
|  |     def handle(self, *args: Any, **options: Any) -> None: | ||||||
|  |         level = logging.INFO | ||||||
|  |         if options["verbose"]: | ||||||
|  |             level = logging.DEBUG | ||||||
|  |         compute_stats(level) | ||||||
							
								
								
									
										56
									
								
								analytics/management/commands/analyze_user_activity.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										56
									
								
								analytics/management/commands/analyze_user_activity.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,56 @@ | |||||||
|  | import datetime | ||||||
|  | from typing import Any, Dict | ||||||
|  |  | ||||||
|  | from django.core.management.base import BaseCommand, CommandParser | ||||||
|  | from django.utils.timezone import utc | ||||||
|  |  | ||||||
|  | from zerver.lib.statistics import seconds_usage_between | ||||||
|  | from zerver.models import UserProfile | ||||||
|  |  | ||||||
|  | def analyze_activity(options: Dict[str, Any]) -> None: | ||||||
|  |     day_start = datetime.datetime.strptime(options["date"], "%Y-%m-%d").replace(tzinfo=utc) | ||||||
|  |     day_end = day_start + datetime.timedelta(days=options["duration"]) | ||||||
|  |  | ||||||
|  |     user_profile_query = UserProfile.objects.all() | ||||||
|  |     if options["realm"]: | ||||||
|  |         user_profile_query = user_profile_query.filter(realm__string_id=options["realm"]) | ||||||
|  |  | ||||||
|  |     print("Per-user online duration:\n") | ||||||
|  |     total_duration = datetime.timedelta(0) | ||||||
|  |     for user_profile in user_profile_query: | ||||||
|  |         duration = seconds_usage_between(user_profile, day_start, day_end) | ||||||
|  |  | ||||||
|  |         if duration == datetime.timedelta(0): | ||||||
|  |             continue | ||||||
|  |  | ||||||
|  |         total_duration += duration | ||||||
|  |         print("%-*s%s" % (37, user_profile.email, duration,)) | ||||||
|  |  | ||||||
|  |     print("\nTotal Duration:                      %s" % (total_duration,)) | ||||||
|  |     print("\nTotal Duration in minutes:           %s" % (total_duration.total_seconds() / 60.,)) | ||||||
|  |     print("Total Duration amortized to a month: %s" % (total_duration.total_seconds() * 30. / 60.,)) | ||||||
|  |  | ||||||
|  | class Command(BaseCommand): | ||||||
|  |     help = """Report analytics of user activity on a per-user and realm basis. | ||||||
|  |  | ||||||
|  | This command aggregates user activity data that is collected by each user using Zulip. It attempts | ||||||
|  | to approximate how much each user has been using Zulip per day, measured by recording each 15 minute | ||||||
|  | period where some activity has occurred (mouse move or keyboard activity). | ||||||
|  |  | ||||||
|  | It will correctly not count server-initiated reloads in the activity statistics. | ||||||
|  |  | ||||||
|  | The duration flag can be used to control how many days to show usage duration for | ||||||
|  |  | ||||||
|  | Usage: ./manage.py analyze_user_activity [--realm=zulip] [--date=2013-09-10] [--duration=1] | ||||||
|  |  | ||||||
|  | By default, if no date is selected 2013-09-10 is used. If no realm is provided, information | ||||||
|  | is shown for all realms""" | ||||||
|  |  | ||||||
|  |     def add_arguments(self, parser: CommandParser) -> None: | ||||||
|  |         parser.add_argument('--realm', action='store') | ||||||
|  |         parser.add_argument('--date', action='store', default="2013-09-06") | ||||||
|  |         parser.add_argument('--duration', action='store', default=1, type=int, | ||||||
|  |                             help="How many days to show usage information for") | ||||||
|  |  | ||||||
|  |     def handle(self, *args: Any, **options: Any) -> None: | ||||||
|  |         analyze_activity(options) | ||||||
| @@ -1,24 +1,26 @@ | |||||||
| import os |  | ||||||
| import time |  | ||||||
| from datetime import timedelta | from datetime import timedelta | ||||||
| from typing import Any, Dict |  | ||||||
|  |  | ||||||
| from django.core.management.base import BaseCommand | from django.core.management.base import BaseCommand | ||||||
| from django.utils.timezone import now as timezone_now | from django.utils.timezone import now as timezone_now | ||||||
|  |  | ||||||
|  | from analytics.models import installation_epoch, \ | ||||||
|  |     last_successful_fill | ||||||
| from analytics.lib.counts import COUNT_STATS, CountStat | from analytics.lib.counts import COUNT_STATS, CountStat | ||||||
| from analytics.models import installation_epoch | from zerver.lib.timestamp import floor_to_hour, floor_to_day, verify_UTC, \ | ||||||
| from zerver.lib.timestamp import TimeZoneNotUTCError, floor_to_day, floor_to_hour, verify_UTC |     TimezoneNotUTCException | ||||||
| from zerver.models import Realm | from zerver.models import Realm | ||||||
|  |  | ||||||
|  | import os | ||||||
|  | import time | ||||||
|  | from typing import Any, Dict | ||||||
|  |  | ||||||
| states = { | states = { | ||||||
|     0: "OK", |     0: "OK", | ||||||
|     1: "WARNING", |     1: "WARNING", | ||||||
|     2: "CRITICAL", |     2: "CRITICAL", | ||||||
|     3: "UNKNOWN", |     3: "UNKNOWN" | ||||||
| } | } | ||||||
|  |  | ||||||
|  |  | ||||||
| class Command(BaseCommand): | class Command(BaseCommand): | ||||||
|     help = """Checks FillState table. |     help = """Checks FillState table. | ||||||
|  |  | ||||||
| @@ -26,30 +28,31 @@ class Command(BaseCommand): | |||||||
|  |  | ||||||
|     def handle(self, *args: Any, **options: Any) -> None: |     def handle(self, *args: Any, **options: Any) -> None: | ||||||
|         fill_state = self.get_fill_state() |         fill_state = self.get_fill_state() | ||||||
|         status = fill_state["status"] |         status = fill_state['status'] | ||||||
|         message = fill_state["message"] |         message = fill_state['message'] | ||||||
|  |  | ||||||
|         state_file_path = "/var/lib/nagios_state/check-analytics-state" |         state_file_path = "/var/lib/nagios_state/check-analytics-state" | ||||||
|         state_file_tmp = state_file_path + "-tmp" |         state_file_tmp = state_file_path + "-tmp" | ||||||
|  |  | ||||||
|         with open(state_file_tmp, "w") as f: |         with open(state_file_tmp, "w") as f: | ||||||
|             f.write(f"{int(time.time())}|{status}|{states[status]}|{message}\n") |             f.write("%s|%s|%s|%s\n" % ( | ||||||
|  |                 int(time.time()), status, states[status], message)) | ||||||
|         os.rename(state_file_tmp, state_file_path) |         os.rename(state_file_tmp, state_file_path) | ||||||
|  |  | ||||||
|     def get_fill_state(self) -> Dict[str, Any]: |     def get_fill_state(self) -> Dict[str, Any]: | ||||||
|         if not Realm.objects.exists(): |         if not Realm.objects.exists(): | ||||||
|             return {"status": 0, "message": "No realms exist, so not checking FillState."} |             return {'status': 0, 'message': 'No realms exist, so not checking FillState.'} | ||||||
|  |  | ||||||
|         warning_unfilled_properties = [] |         warning_unfilled_properties = [] | ||||||
|         critical_unfilled_properties = [] |         critical_unfilled_properties = [] | ||||||
|         for property, stat in COUNT_STATS.items(): |         for property, stat in COUNT_STATS.items(): | ||||||
|             last_fill = stat.last_successful_fill() |             last_fill = last_successful_fill(property) | ||||||
|             if last_fill is None: |             if last_fill is None: | ||||||
|                 last_fill = installation_epoch() |                 last_fill = installation_epoch() | ||||||
|             try: |             try: | ||||||
|                 verify_UTC(last_fill) |                 verify_UTC(last_fill) | ||||||
|             except TimeZoneNotUTCError: |             except TimezoneNotUTCException: | ||||||
|                 return {"status": 2, "message": f"FillState not in UTC for {property}"} |                 return {'status': 2, 'message': 'FillState not in UTC for %s' % (property,)} | ||||||
|  |  | ||||||
|             if stat.frequency == CountStat.DAY: |             if stat.frequency == CountStat.DAY: | ||||||
|                 floor_function = floor_to_day |                 floor_function = floor_to_day | ||||||
| @@ -61,10 +64,8 @@ class Command(BaseCommand): | |||||||
|                 critical_threshold = timedelta(minutes=150) |                 critical_threshold = timedelta(minutes=150) | ||||||
|  |  | ||||||
|             if floor_function(last_fill) != last_fill: |             if floor_function(last_fill) != last_fill: | ||||||
|                 return { |                 return {'status': 2, 'message': 'FillState not on %s boundary for %s' % | ||||||
|                     "status": 2, |                         (stat.frequency, property)} | ||||||
|                     "message": f"FillState not on {stat.frequency} boundary for {property}", |  | ||||||
|                 } |  | ||||||
|  |  | ||||||
|             time_to_last_fill = timezone_now() - last_fill |             time_to_last_fill = timezone_now() - last_fill | ||||||
|             if time_to_last_fill > critical_threshold: |             if time_to_last_fill > critical_threshold: | ||||||
| @@ -73,18 +74,9 @@ class Command(BaseCommand): | |||||||
|                 warning_unfilled_properties.append(property) |                 warning_unfilled_properties.append(property) | ||||||
|  |  | ||||||
|         if len(critical_unfilled_properties) == 0 and len(warning_unfilled_properties) == 0: |         if len(critical_unfilled_properties) == 0 and len(warning_unfilled_properties) == 0: | ||||||
|             return {"status": 0, "message": "FillState looks fine."} |             return {'status': 0, 'message': 'FillState looks fine.'} | ||||||
|         if len(critical_unfilled_properties) == 0: |         if len(critical_unfilled_properties) == 0: | ||||||
|             return { |             return {'status': 1, 'message': 'Missed filling %s once.' % | ||||||
|                 "status": 1, |                     (', '.join(warning_unfilled_properties),)} | ||||||
|                 "message": "Missed filling {} once.".format( |         return {'status': 2, 'message': 'Missed filling %s once. Missed filling %s at least twice.' % | ||||||
|                     ", ".join(warning_unfilled_properties), |                 (', '.join(warning_unfilled_properties), ', '.join(critical_unfilled_properties))} | ||||||
|                 ), |  | ||||||
|             } |  | ||||||
|         return { |  | ||||||
|             "status": 2, |  | ||||||
|             "message": "Missed filling {} once. Missed filling {} at least twice.".format( |  | ||||||
|                 ", ".join(warning_unfilled_properties), |  | ||||||
|                 ", ".join(critical_unfilled_properties), |  | ||||||
|             ), |  | ||||||
|         } |  | ||||||
|   | |||||||
| @@ -5,17 +5,16 @@ from django.core.management.base import BaseCommand, CommandError | |||||||
|  |  | ||||||
| from analytics.lib.counts import do_drop_all_analytics_tables | from analytics.lib.counts import do_drop_all_analytics_tables | ||||||
|  |  | ||||||
|  |  | ||||||
| class Command(BaseCommand): | class Command(BaseCommand): | ||||||
|     help = """Clear analytics tables.""" |     help = """Clear analytics tables.""" | ||||||
|  |  | ||||||
|     def add_arguments(self, parser: ArgumentParser) -> None: |     def add_arguments(self, parser: ArgumentParser) -> None: | ||||||
|         parser.add_argument("--force", action="store_true", help="Clear analytics tables.") |         parser.add_argument('--force', | ||||||
|  |                             action='store_true', | ||||||
|  |                             help="Clear analytics tables.") | ||||||
|  |  | ||||||
|     def handle(self, *args: Any, **options: Any) -> None: |     def handle(self, *args: Any, **options: Any) -> None: | ||||||
|         if options["force"]: |         if options['force']: | ||||||
|             do_drop_all_analytics_tables() |             do_drop_all_analytics_tables() | ||||||
|         else: |         else: | ||||||
|             raise CommandError( |             raise CommandError("Would delete all data from analytics tables (!); use --force to do so.") | ||||||
|                 "Would delete all data from analytics tables (!); use --force to do so." |  | ||||||
|             ) |  | ||||||
|   | |||||||
| @@ -5,19 +5,22 @@ from django.core.management.base import BaseCommand, CommandError | |||||||
|  |  | ||||||
| from analytics.lib.counts import COUNT_STATS, do_drop_single_stat | from analytics.lib.counts import COUNT_STATS, do_drop_single_stat | ||||||
|  |  | ||||||
|  |  | ||||||
| class Command(BaseCommand): | class Command(BaseCommand): | ||||||
|     help = """Clear analytics tables.""" |     help = """Clear analytics tables.""" | ||||||
|  |  | ||||||
|     def add_arguments(self, parser: ArgumentParser) -> None: |     def add_arguments(self, parser: ArgumentParser) -> None: | ||||||
|         parser.add_argument("--force", action="store_true", help="Actually do it.") |         parser.add_argument('--force', | ||||||
|         parser.add_argument("--property", help="The property of the stat to be cleared.") |                             action='store_true', | ||||||
|  |                             help="Actually do it.") | ||||||
|  |         parser.add_argument('--property', | ||||||
|  |                             type=str, | ||||||
|  |                             help="The property of the stat to be cleared.") | ||||||
|  |  | ||||||
|     def handle(self, *args: Any, **options: Any) -> None: |     def handle(self, *args: Any, **options: Any) -> None: | ||||||
|         property = options["property"] |         property = options['property'] | ||||||
|         if property not in COUNT_STATS: |         if property not in COUNT_STATS: | ||||||
|             raise CommandError(f"Invalid property: {property}") |             raise CommandError("Invalid property: %s" % (property,)) | ||||||
|         if not options["force"]: |         if not options['force']: | ||||||
|             raise CommandError("No action taken. Use --force.") |             raise CommandError("No action taken. Use --force.") | ||||||
|  |  | ||||||
|         do_drop_single_stat(property) |         do_drop_single_stat(property) | ||||||
|   | |||||||
							
								
								
									
										73
									
								
								analytics/management/commands/client_activity.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										73
									
								
								analytics/management/commands/client_activity.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,73 @@ | |||||||
|  | import datetime | ||||||
|  | from argparse import ArgumentParser | ||||||
|  | from typing import Any, Optional | ||||||
|  |  | ||||||
|  | from django.db.models import Count, QuerySet | ||||||
|  | from django.utils.timezone import now as timezone_now | ||||||
|  |  | ||||||
|  | from zerver.lib.management import ZulipBaseCommand | ||||||
|  | from zerver.models import UserActivity | ||||||
|  |  | ||||||
|  | class Command(ZulipBaseCommand): | ||||||
|  |     help = """Report rough client activity globally, for a realm, or for a user | ||||||
|  |  | ||||||
|  | Usage examples: | ||||||
|  |  | ||||||
|  | ./manage.py client_activity --target server | ||||||
|  | ./manage.py client_activity --target realm --realm zulip | ||||||
|  | ./manage.py client_activity --target user --user hamlet@zulip.com --realm zulip""" | ||||||
|  |  | ||||||
|  |     def add_arguments(self, parser: ArgumentParser) -> None: | ||||||
|  |         parser.add_argument('--target', dest='target', required=True, type=str, | ||||||
|  |                             help="'server' will calculate client activity of the entire server. " | ||||||
|  |                                  "'realm' will calculate client activity of realm. " | ||||||
|  |                                  "'user' will calculate client activity of the user.") | ||||||
|  |         parser.add_argument('--user', dest='user', type=str, | ||||||
|  |                             help="The email address of the user you want to calculate activity.") | ||||||
|  |         self.add_realm_args(parser) | ||||||
|  |  | ||||||
|  |     def compute_activity(self, user_activity_objects: QuerySet) -> None: | ||||||
|  |         # Report data from the past week. | ||||||
|  |         # | ||||||
|  |         # This is a rough report of client activity because we inconsistently | ||||||
|  |         # register activity from various clients; think of it as telling you | ||||||
|  |         # approximately how many people from a group have used a particular | ||||||
|  |         # client recently. For example, this might be useful to get a sense of | ||||||
|  |         # how popular different versions of a desktop client are. | ||||||
|  |         # | ||||||
|  |         # Importantly, this does NOT tell you anything about the relative | ||||||
|  |         # volumes of requests from clients. | ||||||
|  |         threshold = timezone_now() - datetime.timedelta(days=7) | ||||||
|  |         client_counts = user_activity_objects.filter( | ||||||
|  |             last_visit__gt=threshold).values("client__name").annotate( | ||||||
|  |             count=Count('client__name')) | ||||||
|  |  | ||||||
|  |         total = 0 | ||||||
|  |         counts = [] | ||||||
|  |         for client_type in client_counts: | ||||||
|  |             count = client_type["count"] | ||||||
|  |             client = client_type["client__name"] | ||||||
|  |             total += count | ||||||
|  |             counts.append((count, client)) | ||||||
|  |  | ||||||
|  |         counts.sort() | ||||||
|  |  | ||||||
|  |         for count in counts: | ||||||
|  |             print("%25s %15d" % (count[1], count[0])) | ||||||
|  |         print("Total:", total) | ||||||
|  |  | ||||||
|  |     def handle(self, *args: Any, **options: Optional[str]) -> None: | ||||||
|  |         realm = self.get_realm(options) | ||||||
|  |         if options["user"] is None: | ||||||
|  |             if options["target"] == "server" and realm is None: | ||||||
|  |                 # Report global activity. | ||||||
|  |                 self.compute_activity(UserActivity.objects.all()) | ||||||
|  |             elif options["target"] == "realm" and realm is not None: | ||||||
|  |                 self.compute_activity(UserActivity.objects.filter(user_profile__realm=realm)) | ||||||
|  |             else: | ||||||
|  |                 self.print_help("./manage.py", "client_activity") | ||||||
|  |         elif options["target"] == "user": | ||||||
|  |             user_profile = self.get_user(options["user"], realm) | ||||||
|  |             self.compute_activity(UserActivity.objects.filter(user_profile=user_profile)) | ||||||
|  |         else: | ||||||
|  |             self.print_help("./manage.py", "client_activity") | ||||||
| @@ -1,40 +1,21 @@ | |||||||
| import os |  | ||||||
| from datetime import timedelta | from datetime import timedelta | ||||||
| from typing import Any, Dict, List, Mapping, Type, Union | from typing import Any, Dict, List, Mapping, Optional, Type | ||||||
|  | import mock | ||||||
|  |  | ||||||
| from django.core.files.uploadedfile import UploadedFile |  | ||||||
| from django.core.management.base import BaseCommand | from django.core.management.base import BaseCommand | ||||||
| from django.utils.timezone import now as timezone_now | from django.utils.timezone import now as timezone_now | ||||||
|  |  | ||||||
| from analytics.lib.counts import COUNT_STATS, CountStat, do_drop_all_analytics_tables | from analytics.lib.counts import COUNT_STATS, \ | ||||||
|  |     CountStat, do_drop_all_analytics_tables | ||||||
| from analytics.lib.fixtures import generate_time_series_data | from analytics.lib.fixtures import generate_time_series_data | ||||||
| from analytics.lib.time_utils import time_range | from analytics.lib.time_utils import time_range | ||||||
| from analytics.models import ( | from analytics.models import BaseCount, FillState, RealmCount, UserCount, \ | ||||||
|     BaseCount, |     StreamCount, InstallationCount | ||||||
|     FillState, | from zerver.lib.actions import do_change_is_admin, STREAM_ASSIGNMENT_COLORS | ||||||
|     InstallationCount, |  | ||||||
|     RealmCount, |  | ||||||
|     StreamCount, |  | ||||||
|     UserCount, |  | ||||||
| ) |  | ||||||
| from zerver.actions.create_realm import do_create_realm |  | ||||||
| from zerver.actions.users import do_change_user_role |  | ||||||
| from zerver.lib.create_user import create_user | from zerver.lib.create_user import create_user | ||||||
| from zerver.lib.storage import static_path |  | ||||||
| from zerver.lib.stream_color import STREAM_ASSIGNMENT_COLORS |  | ||||||
| from zerver.lib.timestamp import floor_to_day | from zerver.lib.timestamp import floor_to_day | ||||||
| from zerver.lib.upload import upload_message_attachment_from_request | from zerver.models import Realm, Stream, Client, \ | ||||||
| from zerver.models import ( |     Recipient, Subscription | ||||||
|     Client, |  | ||||||
|     Realm, |  | ||||||
|     RealmAuditLog, |  | ||||||
|     Recipient, |  | ||||||
|     Stream, |  | ||||||
|     Subscription, |  | ||||||
|     UserGroup, |  | ||||||
|     UserProfile, |  | ||||||
| ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Command(BaseCommand): | class Command(BaseCommand): | ||||||
|     help = """Populates analytics tables with randomly generated data.""" |     help = """Populates analytics tables with randomly generated data.""" | ||||||
| @@ -42,30 +23,16 @@ class Command(BaseCommand): | |||||||
|     DAYS_OF_DATA = 100 |     DAYS_OF_DATA = 100 | ||||||
|     random_seed = 26 |     random_seed = 26 | ||||||
|  |  | ||||||
|     def generate_fixture_data( |     def generate_fixture_data(self, stat: CountStat, business_hours_base: float, | ||||||
|         self, |                               non_business_hours_base: float, growth: float, | ||||||
|         stat: CountStat, |                               autocorrelation: float, spikiness: float, | ||||||
|         business_hours_base: float, |                               holiday_rate: float=0, partial_sum: bool=False) -> List[int]: | ||||||
|         non_business_hours_base: float, |  | ||||||
|         growth: float, |  | ||||||
|         autocorrelation: float, |  | ||||||
|         spikiness: float, |  | ||||||
|         holiday_rate: float = 0, |  | ||||||
|         partial_sum: bool = False, |  | ||||||
|     ) -> List[int]: |  | ||||||
|         self.random_seed += 1 |         self.random_seed += 1 | ||||||
|         return generate_time_series_data( |         return generate_time_series_data( | ||||||
|             days=self.DAYS_OF_DATA, |             days=self.DAYS_OF_DATA, business_hours_base=business_hours_base, | ||||||
|             business_hours_base=business_hours_base, |             non_business_hours_base=non_business_hours_base, growth=growth, | ||||||
|             non_business_hours_base=non_business_hours_base, |             autocorrelation=autocorrelation, spikiness=spikiness, holiday_rate=holiday_rate, | ||||||
|             growth=growth, |             frequency=stat.frequency, partial_sum=partial_sum, random_seed=self.random_seed) | ||||||
|             autocorrelation=autocorrelation, |  | ||||||
|             spikiness=spikiness, |  | ||||||
|             holiday_rate=holiday_rate, |  | ||||||
|             frequency=stat.frequency, |  | ||||||
|             partial_sum=partial_sum, |  | ||||||
|             random_seed=self.random_seed, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def handle(self, *args: Any, **options: Any) -> None: |     def handle(self, *args: Any, **options: Any) -> None: | ||||||
|         # TODO: This should arguably only delete the objects |         # TODO: This should arguably only delete the objects | ||||||
| @@ -73,7 +40,7 @@ class Command(BaseCommand): | |||||||
|         do_drop_all_analytics_tables() |         do_drop_all_analytics_tables() | ||||||
|  |  | ||||||
|         # This also deletes any objects with this realm as a foreign key |         # This also deletes any objects with this realm as a foreign key | ||||||
|         Realm.objects.filter(string_id="analytics").delete() |         Realm.objects.filter(string_id='analytics').delete() | ||||||
|  |  | ||||||
|         # Because we just deleted a bunch of objects in the database |         # Because we just deleted a bunch of objects in the database | ||||||
|         # directly (rather than deleting individual objects in Django, |         # directly (rather than deleting individual objects in Django, | ||||||
| @@ -82,267 +49,168 @@ class Command(BaseCommand): | |||||||
|         # memcached in order to ensure deleted objects aren't still |         # memcached in order to ensure deleted objects aren't still | ||||||
|         # present in the memcached cache. |         # present in the memcached cache. | ||||||
|         from zerver.apps import flush_cache |         from zerver.apps import flush_cache | ||||||
|  |  | ||||||
|         flush_cache(None) |         flush_cache(None) | ||||||
|  |  | ||||||
|         installation_time = timezone_now() - timedelta(days=self.DAYS_OF_DATA) |         installation_time = timezone_now() - timedelta(days=self.DAYS_OF_DATA) | ||||||
|         last_end_time = floor_to_day(timezone_now()) |         last_end_time = floor_to_day(timezone_now()) | ||||||
|         realm = do_create_realm( |         realm = Realm.objects.create( | ||||||
|             string_id="analytics", name="Analytics", date_created=installation_time |             string_id='analytics', name='Analytics', date_created=installation_time) | ||||||
|         ) |         with mock.patch("zerver.lib.create_user.timezone_now", return_value=installation_time): | ||||||
|  |             shylock = create_user('shylock@analytics.ds', 'Shylock', realm, | ||||||
|         shylock = create_user( |                                   full_name='Shylock', short_name='shylock', | ||||||
|             "shylock@analytics.ds", |                                   is_realm_admin=True) | ||||||
|             "Shylock", |         do_change_is_admin(shylock, True) | ||||||
|             realm, |  | ||||||
|             full_name="Shylock", |  | ||||||
|             role=UserProfile.ROLE_REALM_OWNER, |  | ||||||
|             force_date_joined=installation_time, |  | ||||||
|         ) |  | ||||||
|         do_change_user_role(shylock, UserProfile.ROLE_REALM_OWNER, acting_user=None) |  | ||||||
|  |  | ||||||
|         # Create guest user for set_guest_users_statistic. |  | ||||||
|         create_user( |  | ||||||
|             "bassanio@analytics.ds", |  | ||||||
|             "Bassanio", |  | ||||||
|             realm, |  | ||||||
|             full_name="Bassanio", |  | ||||||
|             role=UserProfile.ROLE_GUEST, |  | ||||||
|             force_date_joined=installation_time, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         administrators_user_group = UserGroup.objects.get( |  | ||||||
|             name=UserGroup.ADMINISTRATORS_GROUP_NAME, realm=realm, is_system_group=True |  | ||||||
|         ) |  | ||||||
|         stream = Stream.objects.create( |         stream = Stream.objects.create( | ||||||
|             name="all", |             name='all', realm=realm, date_created=installation_time) | ||||||
|             realm=realm, |  | ||||||
|             date_created=installation_time, |  | ||||||
|             can_remove_subscribers_group=administrators_user_group, |  | ||||||
|         ) |  | ||||||
|         recipient = Recipient.objects.create(type_id=stream.id, type=Recipient.STREAM) |         recipient = Recipient.objects.create(type_id=stream.id, type=Recipient.STREAM) | ||||||
|         stream.recipient = recipient |         stream.recipient = recipient | ||||||
|         stream.save(update_fields=["recipient"]) |         stream.save(update_fields=["recipient"]) | ||||||
|  |  | ||||||
|         # Subscribe shylock to the stream to avoid invariant failures. |         # Subscribe shylock to the stream to avoid invariant failures. | ||||||
|         Subscription.objects.create( |         # TODO: This should use subscribe_users_to_streams from populate_db. | ||||||
|             recipient=recipient, |         subs = [ | ||||||
|             user_profile=shylock, |             Subscription(recipient=recipient, | ||||||
|             is_user_active=shylock.is_active, |                          user_profile=shylock, | ||||||
|             color=STREAM_ASSIGNMENT_COLORS[0], |                          color=STREAM_ASSIGNMENT_COLORS[0]), | ||||||
|         ) |         ] | ||||||
|         RealmAuditLog.objects.create( |         Subscription.objects.bulk_create(subs) | ||||||
|             realm=realm, |  | ||||||
|             modified_user=shylock, |  | ||||||
|             modified_stream=stream, |  | ||||||
|             event_last_message_id=0, |  | ||||||
|             event_type=RealmAuditLog.SUBSCRIPTION_CREATED, |  | ||||||
|             event_time=installation_time, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         # Create an attachment in the database for set_storage_space_used_statistic. |         def insert_fixture_data(stat: CountStat, | ||||||
|         IMAGE_FILE_PATH = static_path("images/test-images/checkbox.png") |                                 fixture_data: Mapping[Optional[str], List[int]], | ||||||
|         file_info = os.stat(IMAGE_FILE_PATH) |                                 table: Type[BaseCount]) -> None: | ||||||
|         file_size = file_info.st_size |             end_times = time_range(last_end_time, last_end_time, stat.frequency, | ||||||
|         with open(IMAGE_FILE_PATH, "rb") as fp: |                                    len(list(fixture_data.values())[0])) | ||||||
|             upload_message_attachment_from_request(UploadedFile(fp), shylock, file_size) |  | ||||||
|  |  | ||||||
|         FixtureData = Mapping[Union[str, int, None], List[int]] |  | ||||||
|  |  | ||||||
|         def insert_fixture_data( |  | ||||||
|             stat: CountStat, |  | ||||||
|             fixture_data: FixtureData, |  | ||||||
|             table: Type[BaseCount], |  | ||||||
|         ) -> None: |  | ||||||
|             end_times = time_range( |  | ||||||
|                 last_end_time, last_end_time, stat.frequency, len(list(fixture_data.values())[0]) |  | ||||||
|             ) |  | ||||||
|             if table == InstallationCount: |             if table == InstallationCount: | ||||||
|                 id_args: Dict[str, Any] = {} |                 id_args = {}  # type: Dict[str, Any] | ||||||
|             if table == RealmCount: |             if table == RealmCount: | ||||||
|                 id_args = {"realm": realm} |                 id_args = {'realm': realm} | ||||||
|             if table == UserCount: |             if table == UserCount: | ||||||
|                 id_args = {"realm": realm, "user": shylock} |                 id_args = {'realm': realm, 'user': shylock} | ||||||
|             if table == StreamCount: |             if table == StreamCount: | ||||||
|                 id_args = {"stream": stream, "realm": realm} |                 id_args = {'stream': stream, 'realm': realm} | ||||||
|  |  | ||||||
|             for subgroup, values in fixture_data.items(): |             for subgroup, values in fixture_data.items(): | ||||||
|                 table.objects.bulk_create( |                 table.objects.bulk_create([ | ||||||
|                     table( |                     table(property=stat.property, subgroup=subgroup, end_time=end_time, | ||||||
|                         property=stat.property, |                           value=value, **id_args) | ||||||
|                         subgroup=subgroup, |                     for end_time, value in zip(end_times, values) if value != 0]) | ||||||
|                         end_time=end_time, |  | ||||||
|                         value=value, |  | ||||||
|                         **id_args, |  | ||||||
|                     ) |  | ||||||
|                     for end_time, value in zip(end_times, values) |  | ||||||
|                     if value != 0 |  | ||||||
|                 ) |  | ||||||
|  |  | ||||||
|         stat = COUNT_STATS["1day_actives::day"] |         stat = COUNT_STATS['1day_actives::day'] | ||||||
|         realm_data: FixtureData = { |  | ||||||
|             None: self.generate_fixture_data(stat, 0.08, 0.02, 3, 0.3, 6, partial_sum=True), |  | ||||||
|         } |  | ||||||
|         insert_fixture_data(stat, realm_data, RealmCount) |  | ||||||
|         installation_data: FixtureData = { |  | ||||||
|             None: self.generate_fixture_data(stat, 0.8, 0.2, 4, 0.3, 6, partial_sum=True), |  | ||||||
|         } |  | ||||||
|         insert_fixture_data(stat, installation_data, InstallationCount) |  | ||||||
|         FillState.objects.create( |  | ||||||
|             property=stat.property, end_time=last_end_time, state=FillState.DONE |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         stat = COUNT_STATS["7day_actives::day"] |  | ||||||
|         realm_data = { |         realm_data = { | ||||||
|             None: self.generate_fixture_data(stat, 0.2, 0.07, 3, 0.3, 6, partial_sum=True), |             None: self.generate_fixture_data(stat, .08, .02, 3, .3, 6, partial_sum=True), | ||||||
|  |         }  # type: Mapping[Optional[str], List[int]] | ||||||
|  |         insert_fixture_data(stat, realm_data, RealmCount) | ||||||
|  |         installation_data = { | ||||||
|  |             None: self.generate_fixture_data(stat, .8, .2, 4, .3, 6, partial_sum=True), | ||||||
|  |         }  # type: Mapping[Optional[str], List[int]] | ||||||
|  |         insert_fixture_data(stat, installation_data, InstallationCount) | ||||||
|  |         FillState.objects.create(property=stat.property, end_time=last_end_time, | ||||||
|  |                                  state=FillState.DONE) | ||||||
|  |  | ||||||
|  |         stat = COUNT_STATS['realm_active_humans::day'] | ||||||
|  |         realm_data = { | ||||||
|  |             None: self.generate_fixture_data(stat, .1, .03, 3, .5, 3, partial_sum=True), | ||||||
|         } |         } | ||||||
|         insert_fixture_data(stat, realm_data, RealmCount) |         insert_fixture_data(stat, realm_data, RealmCount) | ||||||
|         installation_data = { |         installation_data = { | ||||||
|             None: self.generate_fixture_data(stat, 2, 0.7, 4, 0.3, 6, partial_sum=True), |             None: self.generate_fixture_data(stat, 1, .3, 4, .5, 3, partial_sum=True), | ||||||
|         } |         } | ||||||
|         insert_fixture_data(stat, installation_data, InstallationCount) |         insert_fixture_data(stat, installation_data, InstallationCount) | ||||||
|         FillState.objects.create( |         FillState.objects.create(property=stat.property, end_time=last_end_time, | ||||||
|             property=stat.property, end_time=last_end_time, state=FillState.DONE |                                  state=FillState.DONE) | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         stat = COUNT_STATS["realm_active_humans::day"] |         stat = COUNT_STATS['active_users_audit:is_bot:day'] | ||||||
|         realm_data = { |         realm_data = { | ||||||
|             None: self.generate_fixture_data(stat, 0.8, 0.08, 3, 0.5, 3, partial_sum=True), |             'false': self.generate_fixture_data(stat, .1, .03, 3.5, .8, 2, partial_sum=True), | ||||||
|         } |         } | ||||||
|         insert_fixture_data(stat, realm_data, RealmCount) |         insert_fixture_data(stat, realm_data, RealmCount) | ||||||
|         installation_data = { |         installation_data = { | ||||||
|             None: self.generate_fixture_data(stat, 1, 0.3, 4, 0.5, 3, partial_sum=True), |             'false': self.generate_fixture_data(stat, 1, .3, 6, .8, 2, partial_sum=True), | ||||||
|         } |         } | ||||||
|         insert_fixture_data(stat, installation_data, InstallationCount) |         insert_fixture_data(stat, installation_data, InstallationCount) | ||||||
|         FillState.objects.create( |         FillState.objects.create(property=stat.property, end_time=last_end_time, | ||||||
|             property=stat.property, end_time=last_end_time, state=FillState.DONE |                                  state=FillState.DONE) | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         stat = COUNT_STATS["active_users_audit:is_bot:day"] |         stat = COUNT_STATS['messages_sent:is_bot:hour'] | ||||||
|         realm_data = { |         user_data = {'false': self.generate_fixture_data( | ||||||
|             "false": self.generate_fixture_data(stat, 1, 0.2, 3.5, 0.8, 2, partial_sum=True), |             stat, 2, 1, 1.5, .6, 8, holiday_rate=.1)}  # type: Mapping[Optional[str], List[int]] | ||||||
|             "true": self.generate_fixture_data(stat, 0.3, 0.05, 3, 0.3, 2, partial_sum=True), |  | ||||||
|         } |  | ||||||
|         insert_fixture_data(stat, realm_data, RealmCount) |  | ||||||
|         installation_data = { |  | ||||||
|             "false": self.generate_fixture_data(stat, 3, 1, 4, 0.8, 2, partial_sum=True), |  | ||||||
|             "true": self.generate_fixture_data(stat, 1, 0.4, 4, 0.8, 2, partial_sum=True), |  | ||||||
|         } |  | ||||||
|         insert_fixture_data(stat, installation_data, InstallationCount) |  | ||||||
|         FillState.objects.create( |  | ||||||
|             property=stat.property, end_time=last_end_time, state=FillState.DONE |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         stat = COUNT_STATS["messages_sent:is_bot:hour"] |  | ||||||
|         user_data: FixtureData = { |  | ||||||
|             "false": self.generate_fixture_data(stat, 2, 1, 1.5, 0.6, 8, holiday_rate=0.1), |  | ||||||
|         } |  | ||||||
|         insert_fixture_data(stat, user_data, UserCount) |         insert_fixture_data(stat, user_data, UserCount) | ||||||
|         realm_data = { |         realm_data = {'false': self.generate_fixture_data(stat, 35, 15, 6, .6, 4), | ||||||
|             "false": self.generate_fixture_data(stat, 35, 15, 6, 0.6, 4), |                       'true': self.generate_fixture_data(stat, 15, 15, 3, .4, 2)} | ||||||
|             "true": self.generate_fixture_data(stat, 15, 15, 3, 0.4, 2), |  | ||||||
|         } |  | ||||||
|         insert_fixture_data(stat, realm_data, RealmCount) |         insert_fixture_data(stat, realm_data, RealmCount) | ||||||
|         installation_data = { |         installation_data = {'false': self.generate_fixture_data(stat, 350, 150, 6, .6, 4), | ||||||
|             "false": self.generate_fixture_data(stat, 350, 150, 6, 0.6, 4), |                              'true': self.generate_fixture_data(stat, 150, 150, 3, .4, 2)} | ||||||
|             "true": self.generate_fixture_data(stat, 150, 150, 3, 0.4, 2), |  | ||||||
|         } |  | ||||||
|         insert_fixture_data(stat, installation_data, InstallationCount) |         insert_fixture_data(stat, installation_data, InstallationCount) | ||||||
|         FillState.objects.create( |         FillState.objects.create(property=stat.property, end_time=last_end_time, | ||||||
|             property=stat.property, end_time=last_end_time, state=FillState.DONE |                                  state=FillState.DONE) | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         stat = COUNT_STATS["messages_sent:message_type:day"] |         stat = COUNT_STATS['messages_sent:message_type:day'] | ||||||
|         user_data = { |         user_data = { | ||||||
|             "public_stream": self.generate_fixture_data(stat, 1.5, 1, 3, 0.6, 8), |             'public_stream': self.generate_fixture_data(stat, 1.5, 1, 3, .6, 8), | ||||||
|             "private_message": self.generate_fixture_data(stat, 0.5, 0.3, 1, 0.6, 8), |             'private_message': self.generate_fixture_data(stat, .5, .3, 1, .6, 8), | ||||||
|             "huddle_message": self.generate_fixture_data(stat, 0.2, 0.2, 2, 0.6, 8), |             'huddle_message': self.generate_fixture_data(stat, .2, .2, 2, .6, 8)} | ||||||
|         } |  | ||||||
|         insert_fixture_data(stat, user_data, UserCount) |         insert_fixture_data(stat, user_data, UserCount) | ||||||
|         realm_data = { |         realm_data = { | ||||||
|             "public_stream": self.generate_fixture_data(stat, 30, 8, 5, 0.6, 4), |             'public_stream': self.generate_fixture_data(stat, 30, 8, 5, .6, 4), | ||||||
|             "private_stream": self.generate_fixture_data(stat, 7, 7, 5, 0.6, 4), |             'private_stream': self.generate_fixture_data(stat, 7, 7, 5, .6, 4), | ||||||
|             "private_message": self.generate_fixture_data(stat, 13, 5, 5, 0.6, 4), |             'private_message': self.generate_fixture_data(stat, 13, 5, 5, .6, 4), | ||||||
|             "huddle_message": self.generate_fixture_data(stat, 6, 3, 3, 0.6, 4), |             'huddle_message': self.generate_fixture_data(stat, 6, 3, 3, .6, 4)} | ||||||
|         } |  | ||||||
|         insert_fixture_data(stat, realm_data, RealmCount) |         insert_fixture_data(stat, realm_data, RealmCount) | ||||||
|         installation_data = { |         installation_data = { | ||||||
|             "public_stream": self.generate_fixture_data(stat, 300, 80, 5, 0.6, 4), |             'public_stream': self.generate_fixture_data(stat, 300, 80, 5, .6, 4), | ||||||
|             "private_stream": self.generate_fixture_data(stat, 70, 70, 5, 0.6, 4), |             'private_stream': self.generate_fixture_data(stat, 70, 70, 5, .6, 4), | ||||||
|             "private_message": self.generate_fixture_data(stat, 130, 50, 5, 0.6, 4), |             'private_message': self.generate_fixture_data(stat, 130, 50, 5, .6, 4), | ||||||
|             "huddle_message": self.generate_fixture_data(stat, 60, 30, 3, 0.6, 4), |             'huddle_message': self.generate_fixture_data(stat, 60, 30, 3, .6, 4)} | ||||||
|         } |  | ||||||
|         insert_fixture_data(stat, installation_data, InstallationCount) |         insert_fixture_data(stat, installation_data, InstallationCount) | ||||||
|         FillState.objects.create( |         FillState.objects.create(property=stat.property, end_time=last_end_time, | ||||||
|             property=stat.property, end_time=last_end_time, state=FillState.DONE |                                  state=FillState.DONE) | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         website, created = Client.objects.get_or_create(name="website") |         website, created = Client.objects.get_or_create(name='website') | ||||||
|         old_desktop, created = Client.objects.get_or_create(name="desktop app Linux 0.3.7") |         old_desktop, created = Client.objects.get_or_create(name='desktop app Linux 0.3.7') | ||||||
|         android, created = Client.objects.get_or_create(name="ZulipAndroid") |         android, created = Client.objects.get_or_create(name='ZulipAndroid') | ||||||
|         iOS, created = Client.objects.get_or_create(name="ZulipiOS") |         iOS, created = Client.objects.get_or_create(name='ZulipiOS') | ||||||
|         react_native, created = Client.objects.get_or_create(name="ZulipMobile") |         react_native, created = Client.objects.get_or_create(name='ZulipMobile') | ||||||
|         API, created = Client.objects.get_or_create(name="API: Python") |         API, created = Client.objects.get_or_create(name='API: Python') | ||||||
|         zephyr_mirror, created = Client.objects.get_or_create(name="zephyr_mirror") |         zephyr_mirror, created = Client.objects.get_or_create(name='zephyr_mirror') | ||||||
|         unused, created = Client.objects.get_or_create(name="unused") |         unused, created = Client.objects.get_or_create(name='unused') | ||||||
|         long_webhook, created = Client.objects.get_or_create(name="ZulipLooooooooooongNameWebhook") |         long_webhook, created = Client.objects.get_or_create(name='ZulipLooooooooooongNameWebhook') | ||||||
|  |  | ||||||
|         stat = COUNT_STATS["messages_sent:client:day"] |         stat = COUNT_STATS['messages_sent:client:day'] | ||||||
|         user_data = { |         user_data = { | ||||||
|             website.id: self.generate_fixture_data(stat, 2, 1, 1.5, 0.6, 8), |             website.id: self.generate_fixture_data(stat, 2, 1, 1.5, .6, 8), | ||||||
|             zephyr_mirror.id: self.generate_fixture_data(stat, 0, 0.3, 1.5, 0.6, 8), |             zephyr_mirror.id: self.generate_fixture_data(stat, 0, .3, 1.5, .6, 8)} | ||||||
|         } |  | ||||||
|         insert_fixture_data(stat, user_data, UserCount) |         insert_fixture_data(stat, user_data, UserCount) | ||||||
|         realm_data = { |         realm_data = { | ||||||
|             website.id: self.generate_fixture_data(stat, 30, 20, 5, 0.6, 3), |             website.id: self.generate_fixture_data(stat, 30, 20, 5, .6, 3), | ||||||
|             old_desktop.id: self.generate_fixture_data(stat, 5, 3, 8, 0.6, 3), |             old_desktop.id: self.generate_fixture_data(stat, 5, 3, 8, .6, 3), | ||||||
|             android.id: self.generate_fixture_data(stat, 5, 5, 2, 0.6, 3), |             android.id: self.generate_fixture_data(stat, 5, 5, 2, .6, 3), | ||||||
|             iOS.id: self.generate_fixture_data(stat, 5, 5, 2, 0.6, 3), |             iOS.id: self.generate_fixture_data(stat, 5, 5, 2, .6, 3), | ||||||
|             react_native.id: self.generate_fixture_data(stat, 5, 5, 10, 0.6, 3), |             react_native.id: self.generate_fixture_data(stat, 5, 5, 10, .6, 3), | ||||||
|             API.id: self.generate_fixture_data(stat, 5, 5, 5, 0.6, 3), |             API.id: self.generate_fixture_data(stat, 5, 5, 5, .6, 3), | ||||||
|             zephyr_mirror.id: self.generate_fixture_data(stat, 1, 1, 3, 0.6, 3), |             zephyr_mirror.id: self.generate_fixture_data(stat, 1, 1, 3, .6, 3), | ||||||
|             unused.id: self.generate_fixture_data(stat, 0, 0, 0, 0, 0), |             unused.id: self.generate_fixture_data(stat, 0, 0, 0, 0, 0), | ||||||
|             long_webhook.id: self.generate_fixture_data(stat, 5, 5, 2, 0.6, 3), |             long_webhook.id: self.generate_fixture_data(stat, 5, 5, 2, .6, 3)} | ||||||
|         } |  | ||||||
|         insert_fixture_data(stat, realm_data, RealmCount) |         insert_fixture_data(stat, realm_data, RealmCount) | ||||||
|         installation_data = { |         installation_data = { | ||||||
|             website.id: self.generate_fixture_data(stat, 300, 200, 5, 0.6, 3), |             website.id: self.generate_fixture_data(stat, 300, 200, 5, .6, 3), | ||||||
|             old_desktop.id: self.generate_fixture_data(stat, 50, 30, 8, 0.6, 3), |             old_desktop.id: self.generate_fixture_data(stat, 50, 30, 8, .6, 3), | ||||||
|             android.id: self.generate_fixture_data(stat, 50, 50, 2, 0.6, 3), |             android.id: self.generate_fixture_data(stat, 50, 50, 2, .6, 3), | ||||||
|             iOS.id: self.generate_fixture_data(stat, 50, 50, 2, 0.6, 3), |             iOS.id: self.generate_fixture_data(stat, 50, 50, 2, .6, 3), | ||||||
|             react_native.id: self.generate_fixture_data(stat, 5, 5, 10, 0.6, 3), |             react_native.id: self.generate_fixture_data(stat, 5, 5, 10, .6, 3), | ||||||
|             API.id: self.generate_fixture_data(stat, 50, 50, 5, 0.6, 3), |             API.id: self.generate_fixture_data(stat, 50, 50, 5, .6, 3), | ||||||
|             zephyr_mirror.id: self.generate_fixture_data(stat, 10, 10, 3, 0.6, 3), |             zephyr_mirror.id: self.generate_fixture_data(stat, 10, 10, 3, .6, 3), | ||||||
|             unused.id: self.generate_fixture_data(stat, 0, 0, 0, 0, 0), |             unused.id: self.generate_fixture_data(stat, 0, 0, 0, 0, 0), | ||||||
|             long_webhook.id: self.generate_fixture_data(stat, 50, 50, 2, 0.6, 3), |             long_webhook.id: self.generate_fixture_data(stat, 50, 50, 2, .6, 3)} | ||||||
|         } |  | ||||||
|         insert_fixture_data(stat, installation_data, InstallationCount) |         insert_fixture_data(stat, installation_data, InstallationCount) | ||||||
|         FillState.objects.create( |         FillState.objects.create(property=stat.property, end_time=last_end_time, | ||||||
|             property=stat.property, end_time=last_end_time, state=FillState.DONE |                                  state=FillState.DONE) | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         stat = COUNT_STATS["messages_in_stream:is_bot:day"] |         stat = COUNT_STATS['messages_in_stream:is_bot:day'] | ||||||
|         realm_data = { |         realm_data = {'false': self.generate_fixture_data(stat, 30, 5, 6, .6, 4), | ||||||
|             "false": self.generate_fixture_data(stat, 30, 5, 6, 0.6, 4), |                       'true': self.generate_fixture_data(stat, 20, 2, 3, .2, 3)} | ||||||
|             "true": self.generate_fixture_data(stat, 20, 2, 3, 0.2, 3), |  | ||||||
|         } |  | ||||||
|         insert_fixture_data(stat, realm_data, RealmCount) |         insert_fixture_data(stat, realm_data, RealmCount) | ||||||
|         stream_data: Mapping[Union[int, str, None], List[int]] = { |         stream_data = {'false': self.generate_fixture_data(stat, 10, 7, 5, .6, 4), | ||||||
|             "false": self.generate_fixture_data(stat, 10, 7, 5, 0.6, 4), |                        'true': self.generate_fixture_data(stat, 5, 3, 2, .4, 2)}  # type: Mapping[Optional[str], List[int]] | ||||||
|             "true": self.generate_fixture_data(stat, 5, 3, 2, 0.4, 2), |  | ||||||
|         } |  | ||||||
|         insert_fixture_data(stat, stream_data, StreamCount) |         insert_fixture_data(stat, stream_data, StreamCount) | ||||||
|         FillState.objects.create( |         FillState.objects.create(property=stat.property, end_time=last_end_time, | ||||||
|             property=stat.property, end_time=last_end_time, state=FillState.DONE |                                  state=FillState.DONE) | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         stat = COUNT_STATS["messages_read::hour"] |  | ||||||
|         user_data = { |  | ||||||
|             None: self.generate_fixture_data(stat, 7, 3, 2, 0.6, 8, holiday_rate=0.1), |  | ||||||
|         } |  | ||||||
|         insert_fixture_data(stat, user_data, UserCount) |  | ||||||
|         realm_data = {None: self.generate_fixture_data(stat, 50, 35, 6, 0.6, 4)} |  | ||||||
|         insert_fixture_data(stat, realm_data, RealmCount) |  | ||||||
|         FillState.objects.create( |  | ||||||
|             property=stat.property, end_time=last_end_time, state=FillState.DONE |  | ||||||
|         ) |  | ||||||
|   | |||||||
							
								
								
									
										151
									
								
								analytics/management/commands/realm_stats.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										151
									
								
								analytics/management/commands/realm_stats.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,151 @@ | |||||||
|  | import datetime | ||||||
|  | from argparse import ArgumentParser | ||||||
|  | from typing import Any, List | ||||||
|  |  | ||||||
|  | from django.core.management.base import BaseCommand, CommandError | ||||||
|  | from django.db.models import Count | ||||||
|  | from django.utils.timezone import now as timezone_now | ||||||
|  |  | ||||||
|  | from zerver.models import Message, Realm, Recipient, Stream, \ | ||||||
|  |     Subscription, UserActivity, UserMessage, UserProfile, get_realm | ||||||
|  |  | ||||||
|  | MOBILE_CLIENT_LIST = ["Android", "ios"] | ||||||
|  | HUMAN_CLIENT_LIST = MOBILE_CLIENT_LIST + ["website"] | ||||||
|  |  | ||||||
|  | human_messages = Message.objects.filter(sending_client__name__in=HUMAN_CLIENT_LIST) | ||||||
|  |  | ||||||
|  | class Command(BaseCommand): | ||||||
|  |     help = "Generate statistics on realm activity." | ||||||
|  |  | ||||||
|  |     def add_arguments(self, parser: ArgumentParser) -> None: | ||||||
|  |         parser.add_argument('realms', metavar='<realm>', type=str, nargs='*', | ||||||
|  |                             help="realm to generate statistics for") | ||||||
|  |  | ||||||
|  |     def active_users(self, realm: Realm) -> List[UserProfile]: | ||||||
|  |         # Has been active (on the website, for now) in the last 7 days. | ||||||
|  |         activity_cutoff = timezone_now() - datetime.timedelta(days=7) | ||||||
|  |         return [activity.user_profile for activity in ( | ||||||
|  |             UserActivity.objects.filter(user_profile__realm=realm, | ||||||
|  |                                         user_profile__is_active=True, | ||||||
|  |                                         last_visit__gt=activity_cutoff, | ||||||
|  |                                         query="/json/users/me/pointer", | ||||||
|  |                                         client__name="website"))] | ||||||
|  |  | ||||||
|  |     def messages_sent_by(self, user: UserProfile, days_ago: int) -> int: | ||||||
|  |         sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago) | ||||||
|  |         return human_messages.filter(sender=user, date_sent__gt=sent_time_cutoff).count() | ||||||
|  |  | ||||||
|  |     def total_messages(self, realm: Realm, days_ago: int) -> int: | ||||||
|  |         sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago) | ||||||
|  |         return Message.objects.filter(sender__realm=realm, date_sent__gt=sent_time_cutoff).count() | ||||||
|  |  | ||||||
|  |     def human_messages(self, realm: Realm, days_ago: int) -> int: | ||||||
|  |         sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago) | ||||||
|  |         return human_messages.filter(sender__realm=realm, date_sent__gt=sent_time_cutoff).count() | ||||||
|  |  | ||||||
|  |     def api_messages(self, realm: Realm, days_ago: int) -> int: | ||||||
|  |         return (self.total_messages(realm, days_ago) - self.human_messages(realm, days_ago)) | ||||||
|  |  | ||||||
|  |     def stream_messages(self, realm: Realm, days_ago: int) -> int: | ||||||
|  |         sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago) | ||||||
|  |         return human_messages.filter(sender__realm=realm, date_sent__gt=sent_time_cutoff, | ||||||
|  |                                      recipient__type=Recipient.STREAM).count() | ||||||
|  |  | ||||||
|  |     def private_messages(self, realm: Realm, days_ago: int) -> int: | ||||||
|  |         sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago) | ||||||
|  |         return human_messages.filter(sender__realm=realm, date_sent__gt=sent_time_cutoff).exclude( | ||||||
|  |             recipient__type=Recipient.STREAM).exclude(recipient__type=Recipient.HUDDLE).count() | ||||||
|  |  | ||||||
|  |     def group_private_messages(self, realm: Realm, days_ago: int) -> int: | ||||||
|  |         sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago) | ||||||
|  |         return human_messages.filter(sender__realm=realm, date_sent__gt=sent_time_cutoff).exclude( | ||||||
|  |             recipient__type=Recipient.STREAM).exclude(recipient__type=Recipient.PERSONAL).count() | ||||||
|  |  | ||||||
|  |     def report_percentage(self, numerator: float, denominator: float, text: str) -> None: | ||||||
|  |         if not denominator: | ||||||
|  |             fraction = 0.0 | ||||||
|  |         else: | ||||||
|  |             fraction = numerator / float(denominator) | ||||||
|  |         print("%.2f%% of" % (fraction * 100,), text) | ||||||
|  |  | ||||||
|  |     def handle(self, *args: Any, **options: Any) -> None: | ||||||
|  |         if options['realms']: | ||||||
|  |             try: | ||||||
|  |                 realms = [get_realm(string_id) for string_id in options['realms']] | ||||||
|  |             except Realm.DoesNotExist as e: | ||||||
|  |                 raise CommandError(e) | ||||||
|  |         else: | ||||||
|  |             realms = Realm.objects.all() | ||||||
|  |  | ||||||
|  |         for realm in realms: | ||||||
|  |             print(realm.string_id) | ||||||
|  |  | ||||||
|  |             user_profiles = UserProfile.objects.filter(realm=realm, is_active=True) | ||||||
|  |             active_users = self.active_users(realm) | ||||||
|  |             num_active = len(active_users) | ||||||
|  |  | ||||||
|  |             print("%d active users (%d total)" % (num_active, len(user_profiles))) | ||||||
|  |             streams = Stream.objects.filter(realm=realm).extra( | ||||||
|  |                 tables=['zerver_subscription', 'zerver_recipient'], | ||||||
|  |                 where=['zerver_subscription.recipient_id = zerver_recipient.id', | ||||||
|  |                        'zerver_recipient.type = 2', | ||||||
|  |                        'zerver_recipient.type_id = zerver_stream.id', | ||||||
|  |                        'zerver_subscription.active = true']).annotate(count=Count("name")) | ||||||
|  |             print("%d streams" % (streams.count(),)) | ||||||
|  |  | ||||||
|  |             for days_ago in (1, 7, 30): | ||||||
|  |                 print("In last %d days, users sent:" % (days_ago,)) | ||||||
|  |                 sender_quantities = [self.messages_sent_by(user, days_ago) for user in user_profiles] | ||||||
|  |                 for quantity in sorted(sender_quantities, reverse=True): | ||||||
|  |                     print(quantity, end=' ') | ||||||
|  |                 print("") | ||||||
|  |  | ||||||
|  |                 print("%d stream messages" % (self.stream_messages(realm, days_ago),)) | ||||||
|  |                 print("%d one-on-one private messages" % (self.private_messages(realm, days_ago),)) | ||||||
|  |                 print("%d messages sent via the API" % (self.api_messages(realm, days_ago),)) | ||||||
|  |                 print("%d group private messages" % (self.group_private_messages(realm, days_ago),)) | ||||||
|  |  | ||||||
|  |             num_notifications_enabled = len([x for x in active_users if x.enable_desktop_notifications]) | ||||||
|  |             self.report_percentage(num_notifications_enabled, num_active, | ||||||
|  |                                    "active users have desktop notifications enabled") | ||||||
|  |  | ||||||
|  |             num_enter_sends = len([x for x in active_users if x.enter_sends]) | ||||||
|  |             self.report_percentage(num_enter_sends, num_active, | ||||||
|  |                                    "active users have enter-sends") | ||||||
|  |  | ||||||
|  |             all_message_count = human_messages.filter(sender__realm=realm).count() | ||||||
|  |             multi_paragraph_message_count = human_messages.filter( | ||||||
|  |                 sender__realm=realm, content__contains="\n\n").count() | ||||||
|  |             self.report_percentage(multi_paragraph_message_count, all_message_count, | ||||||
|  |                                    "all messages are multi-paragraph") | ||||||
|  |  | ||||||
|  |             # Starred messages | ||||||
|  |             starrers = UserMessage.objects.filter(user_profile__in=user_profiles, | ||||||
|  |                                                   flags=UserMessage.flags.starred).values( | ||||||
|  |                 "user_profile").annotate(count=Count("user_profile")) | ||||||
|  |             print("%d users have starred %d messages" % ( | ||||||
|  |                 len(starrers), sum([elt["count"] for elt in starrers]))) | ||||||
|  |  | ||||||
|  |             active_user_subs = Subscription.objects.filter( | ||||||
|  |                 user_profile__in=user_profiles, active=True) | ||||||
|  |  | ||||||
|  |             # Streams not in home view | ||||||
|  |             non_home_view = active_user_subs.filter(is_muted=True).values( | ||||||
|  |                 "user_profile").annotate(count=Count("user_profile")) | ||||||
|  |             print("%d users have %d streams not in home view" % ( | ||||||
|  |                 len(non_home_view), sum([elt["count"] for elt in non_home_view]))) | ||||||
|  |  | ||||||
|  |             # Code block markup | ||||||
|  |             markup_messages = human_messages.filter( | ||||||
|  |                 sender__realm=realm, content__contains="~~~").values( | ||||||
|  |                 "sender").annotate(count=Count("sender")) | ||||||
|  |             print("%d users have used code block markup on %s messages" % ( | ||||||
|  |                 len(markup_messages), sum([elt["count"] for elt in markup_messages]))) | ||||||
|  |  | ||||||
|  |             # Notifications for stream messages | ||||||
|  |             notifications = active_user_subs.filter(desktop_notifications=True).values( | ||||||
|  |                 "user_profile").annotate(count=Count("user_profile")) | ||||||
|  |             print("%d users receive desktop notifications for %d streams" % ( | ||||||
|  |                 len(notifications), sum([elt["count"] for elt in notifications]))) | ||||||
|  |  | ||||||
|  |             print("") | ||||||
							
								
								
									
										56
									
								
								analytics/management/commands/stream_stats.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										56
									
								
								analytics/management/commands/stream_stats.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,56 @@ | |||||||
|  | from argparse import ArgumentParser | ||||||
|  | from typing import Any | ||||||
|  |  | ||||||
|  | from django.core.management.base import BaseCommand, CommandError | ||||||
|  | from django.db.models import Q | ||||||
|  |  | ||||||
|  | from zerver.models import Message, Realm, \ | ||||||
|  |     Recipient, Stream, Subscription, get_realm | ||||||
|  |  | ||||||
|  | class Command(BaseCommand): | ||||||
|  |     help = "Generate statistics on the streams for a realm." | ||||||
|  |  | ||||||
|  |     def add_arguments(self, parser: ArgumentParser) -> None: | ||||||
|  |         parser.add_argument('realms', metavar='<realm>', type=str, nargs='*', | ||||||
|  |                             help="realm to generate statistics for") | ||||||
|  |  | ||||||
|  |     def handle(self, *args: Any, **options: str) -> None: | ||||||
|  |         if options['realms']: | ||||||
|  |             try: | ||||||
|  |                 realms = [get_realm(string_id) for string_id in options['realms']] | ||||||
|  |             except Realm.DoesNotExist as e: | ||||||
|  |                 raise CommandError(e) | ||||||
|  |         else: | ||||||
|  |             realms = Realm.objects.all() | ||||||
|  |  | ||||||
|  |         for realm in realms: | ||||||
|  |             streams = Stream.objects.filter(realm=realm).exclude(Q(name__istartswith="tutorial-")) | ||||||
|  |             # private stream count | ||||||
|  |             private_count = 0 | ||||||
|  |             # public stream count | ||||||
|  |             public_count = 0 | ||||||
|  |             for stream in streams: | ||||||
|  |                 if stream.invite_only: | ||||||
|  |                     private_count += 1 | ||||||
|  |                 else: | ||||||
|  |                     public_count += 1 | ||||||
|  |             print("------------") | ||||||
|  |             print(realm.string_id, end=' ') | ||||||
|  |             print("%10s %d public streams and" % ("(", public_count), end=' ') | ||||||
|  |             print("%d private streams )" % (private_count,)) | ||||||
|  |             print("------------") | ||||||
|  |             print("%25s %15s %10s %12s" % ("stream", "subscribers", "messages", "type")) | ||||||
|  |  | ||||||
|  |             for stream in streams: | ||||||
|  |                 if stream.invite_only: | ||||||
|  |                     stream_type = 'private' | ||||||
|  |                 else: | ||||||
|  |                     stream_type = 'public' | ||||||
|  |                 print("%25s" % (stream.name,), end=' ') | ||||||
|  |                 recipient = Recipient.objects.filter(type=Recipient.STREAM, type_id=stream.id) | ||||||
|  |                 print("%10d" % (len(Subscription.objects.filter(recipient=recipient, | ||||||
|  |                                                                 active=True)),), end=' ') | ||||||
|  |                 num_messages = len(Message.objects.filter(recipient=recipient)) | ||||||
|  |                 print("%12d" % (num_messages,), end=' ') | ||||||
|  |                 print("%15s" % (stream_type,)) | ||||||
|  |             print("") | ||||||
| @@ -1,13 +1,13 @@ | |||||||
| import os | import os | ||||||
| import time | import time | ||||||
| from argparse import ArgumentParser | from argparse import ArgumentParser | ||||||
| from datetime import timezone |  | ||||||
| from typing import Any, Dict | from typing import Any, Dict | ||||||
|  |  | ||||||
| from django.conf import settings | from django.conf import settings | ||||||
| from django.core.management.base import BaseCommand | from django.core.management.base import BaseCommand | ||||||
| from django.utils.dateparse import parse_datetime | from django.utils.dateparse import parse_datetime | ||||||
| from django.utils.timezone import now as timezone_now | from django.utils.timezone import now as timezone_now | ||||||
|  | from django.utils.timezone import utc as timezone_utc | ||||||
|  |  | ||||||
| from analytics.lib.counts import COUNT_STATS, logger, process_count_stat | from analytics.lib.counts import COUNT_STATS, logger, process_count_stat | ||||||
| from scripts.lib.zulip_tools import ENDC, WARNING | from scripts.lib.zulip_tools import ENDC, WARNING | ||||||
| @@ -15,36 +15,34 @@ from zerver.lib.remote_server import send_analytics_to_remote_server | |||||||
| from zerver.lib.timestamp import floor_to_hour | from zerver.lib.timestamp import floor_to_hour | ||||||
| from zerver.models import Realm | from zerver.models import Realm | ||||||
|  |  | ||||||
|  |  | ||||||
| class Command(BaseCommand): | class Command(BaseCommand): | ||||||
|     help = """Fills Analytics tables. |     help = """Fills Analytics tables. | ||||||
|  |  | ||||||
|     Run as a cron job that runs every hour.""" |     Run as a cron job that runs every hour.""" | ||||||
|  |  | ||||||
|     def add_arguments(self, parser: ArgumentParser) -> None: |     def add_arguments(self, parser: ArgumentParser) -> None: | ||||||
|         parser.add_argument( |         parser.add_argument('--time', '-t', | ||||||
|             "--time", |                             type=str, | ||||||
|             "-t", |                             help='Update stat tables from current state to' | ||||||
|             help="Update stat tables from current state to " |                                  '--time. Defaults to the current time.', | ||||||
|             "--time. Defaults to the current time.", |                             default=timezone_now().isoformat()) | ||||||
|             default=timezone_now().isoformat(), |         parser.add_argument('--utc', | ||||||
|         ) |                             action='store_true', | ||||||
|         parser.add_argument("--utc", action="store_true", help="Interpret --time in UTC.") |                             help="Interpret --time in UTC.", | ||||||
|         parser.add_argument( |                             default=False) | ||||||
|             "--stat", "-s", help="CountStat to process. If omitted, all stats are processed." |         parser.add_argument('--stat', '-s', | ||||||
|         ) |                             type=str, | ||||||
|         parser.add_argument( |                             help="CountStat to process. If omitted, all stats are processed.") | ||||||
|             "--verbose", action="store_true", help="Print timing information to stdout." |         parser.add_argument('--verbose', | ||||||
|         ) |                             action='store_true', | ||||||
|  |                             help="Print timing information to stdout.", | ||||||
|  |                             default=False) | ||||||
|  |  | ||||||
|     def handle(self, *args: Any, **options: Any) -> None: |     def handle(self, *args: Any, **options: Any) -> None: | ||||||
|         try: |         try: | ||||||
|             os.mkdir(settings.ANALYTICS_LOCK_DIR) |             os.mkdir(settings.ANALYTICS_LOCK_DIR) | ||||||
|         except OSError: |         except OSError: | ||||||
|             print( |             print(WARNING + "Analytics lock %s is unavailable; exiting... " + ENDC) | ||||||
|                 f"{WARNING}Analytics lock {settings.ANALYTICS_LOCK_DIR} is unavailable;" |  | ||||||
|                 f" exiting.{ENDC}" |  | ||||||
|             ) |  | ||||||
|             return |             return | ||||||
|  |  | ||||||
|         try: |         try: | ||||||
| @@ -59,38 +57,34 @@ class Command(BaseCommand): | |||||||
|             logger.info("No realms, stopping update_analytics_counts") |             logger.info("No realms, stopping update_analytics_counts") | ||||||
|             return |             return | ||||||
|  |  | ||||||
|         fill_to_time = parse_datetime(options["time"]) |         fill_to_time = parse_datetime(options['time']) | ||||||
|         assert fill_to_time is not None |         if options['utc']: | ||||||
|         if options["utc"]: |             fill_to_time = fill_to_time.replace(tzinfo=timezone_utc) | ||||||
|             fill_to_time = fill_to_time.replace(tzinfo=timezone.utc) |  | ||||||
|         if fill_to_time.tzinfo is None: |         if fill_to_time.tzinfo is None: | ||||||
|             raise ValueError( |             raise ValueError("--time must be timezone aware. Maybe you meant to use the --utc option?") | ||||||
|                 "--time must be time-zone-aware. Maybe you meant to use the --utc option?" |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|         fill_to_time = floor_to_hour(fill_to_time.astimezone(timezone.utc)) |         fill_to_time = floor_to_hour(fill_to_time.astimezone(timezone_utc)) | ||||||
|  |  | ||||||
|         if options["stat"] is not None: |         if options['stat'] is not None: | ||||||
|             stats = [COUNT_STATS[options["stat"]]] |             stats = [COUNT_STATS[options['stat']]] | ||||||
|         else: |         else: | ||||||
|             stats = list(COUNT_STATS.values()) |             stats = list(COUNT_STATS.values()) | ||||||
|  |  | ||||||
|         logger.info("Starting updating analytics counts through %s", fill_to_time) |         logger.info("Starting updating analytics counts through %s" % (fill_to_time,)) | ||||||
|         if options["verbose"]: |         if options['verbose']: | ||||||
|             start = time.time() |             start = time.time() | ||||||
|             last = start |             last = start | ||||||
|  |  | ||||||
|         for stat in stats: |         for stat in stats: | ||||||
|             process_count_stat(stat, fill_to_time) |             process_count_stat(stat, fill_to_time) | ||||||
|             if options["verbose"]: |             if options['verbose']: | ||||||
|                 print(f"Updated {stat.property} in {time.time() - last:.3f}s") |                 print("Updated %s in %.3fs" % (stat.property, time.time() - last)) | ||||||
|                 last = time.time() |                 last = time.time() | ||||||
|  |  | ||||||
|         if options["verbose"]: |         if options['verbose']: | ||||||
|             print( |             print("Finished updating analytics counts through %s in %.3fs" % | ||||||
|                 f"Finished updating analytics counts through {fill_to_time} in {time.time() - start:.3f}s" |                   (fill_to_time, time.time() - start)) | ||||||
|             ) |         logger.info("Finished updating analytics counts through %s" % (fill_to_time,)) | ||||||
|         logger.info("Finished updating analytics counts through %s", fill_to_time) |  | ||||||
|  |  | ||||||
|         if settings.PUSH_NOTIFICATION_BOUNCER_URL and settings.SUBMIT_USAGE_STATISTICS: |         if settings.PUSH_NOTIFICATION_BOUNCER_URL and settings.SUBMIT_USAGE_STATISTICS: | ||||||
|             send_analytics_to_remote_server() |             send_analytics_to_remote_server() | ||||||
|   | |||||||
							
								
								
									
										41
									
								
								analytics/management/commands/user_stats.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										41
									
								
								analytics/management/commands/user_stats.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,41 @@ | |||||||
|  | import datetime | ||||||
|  | from argparse import ArgumentParser | ||||||
|  | from typing import Any | ||||||
|  |  | ||||||
|  | from django.core.management.base import BaseCommand, CommandError | ||||||
|  | from django.utils.timezone import now as timezone_now | ||||||
|  |  | ||||||
|  | from zerver.models import Message, Realm, Stream, UserProfile, get_realm | ||||||
|  |  | ||||||
|  | class Command(BaseCommand): | ||||||
|  |     help = "Generate statistics on user activity." | ||||||
|  |  | ||||||
|  |     def add_arguments(self, parser: ArgumentParser) -> None: | ||||||
|  |         parser.add_argument('realms', metavar='<realm>', type=str, nargs='*', | ||||||
|  |                             help="realm to generate statistics for") | ||||||
|  |  | ||||||
|  |     def messages_sent_by(self, user: UserProfile, week: int) -> int: | ||||||
|  |         start = timezone_now() - datetime.timedelta(days=(week + 1)*7) | ||||||
|  |         end = timezone_now() - datetime.timedelta(days=week*7) | ||||||
|  |         return Message.objects.filter(sender=user, date_sent__gt=start, date_sent__lte=end).count() | ||||||
|  |  | ||||||
|  |     def handle(self, *args: Any, **options: Any) -> None: | ||||||
|  |         if options['realms']: | ||||||
|  |             try: | ||||||
|  |                 realms = [get_realm(string_id) for string_id in options['realms']] | ||||||
|  |             except Realm.DoesNotExist as e: | ||||||
|  |                 raise CommandError(e) | ||||||
|  |         else: | ||||||
|  |             realms = Realm.objects.all() | ||||||
|  |  | ||||||
|  |         for realm in realms: | ||||||
|  |             print(realm.string_id) | ||||||
|  |             user_profiles = UserProfile.objects.filter(realm=realm, is_active=True) | ||||||
|  |             print("%d users" % (len(user_profiles),)) | ||||||
|  |             print("%d streams" % (len(Stream.objects.filter(realm=realm)),)) | ||||||
|  |  | ||||||
|  |             for user_profile in user_profiles: | ||||||
|  |                 print("%35s" % (user_profile.email,), end=' ') | ||||||
|  |                 for week in range(10): | ||||||
|  |                     print("%5d" % (self.messages_sent_by(user_profile, week),), end=' ') | ||||||
|  |                 print("") | ||||||
| @@ -1,208 +1,110 @@ | |||||||
|  | # -*- coding: utf-8 -*- | ||||||
| import django.db.models.deletion | import django.db.models.deletion | ||||||
| from django.conf import settings | from django.conf import settings | ||||||
| from django.db import migrations, models | from django.db import migrations, models | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): | class Migration(migrations.Migration): | ||||||
|  |  | ||||||
|     dependencies = [ |     dependencies = [ | ||||||
|         ("zerver", "0030_realm_org_type"), |         ('zerver', '0030_realm_org_type'), | ||||||
|         migrations.swappable_dependency(settings.AUTH_USER_MODEL), |         migrations.swappable_dependency(settings.AUTH_USER_MODEL), | ||||||
|     ] |     ] | ||||||
|  |  | ||||||
|     operations = [ |     operations = [ | ||||||
|         migrations.CreateModel( |         migrations.CreateModel( | ||||||
|             name="Anomaly", |             name='Anomaly', | ||||||
|             fields=[ |             fields=[ | ||||||
|                 ( |                 ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), | ||||||
|                     "id", |                 ('info', models.CharField(max_length=1000)), | ||||||
|                     models.AutoField( |  | ||||||
|                         verbose_name="ID", serialize=False, auto_created=True, primary_key=True |  | ||||||
|                     ), |  | ||||||
|                 ), |  | ||||||
|                 ("info", models.CharField(max_length=1000)), |  | ||||||
|             ], |             ], | ||||||
|             bases=(models.Model,), |             bases=(models.Model,), | ||||||
|         ), |         ), | ||||||
|         migrations.CreateModel( |         migrations.CreateModel( | ||||||
|             name="HuddleCount", |             name='HuddleCount', | ||||||
|             fields=[ |             fields=[ | ||||||
|                 ( |                 ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), | ||||||
|                     "id", |                 ('huddle', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Recipient')), | ||||||
|                     models.AutoField( |                 ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), | ||||||
|                         verbose_name="ID", serialize=False, auto_created=True, primary_key=True |                 ('property', models.CharField(max_length=40)), | ||||||
|                     ), |                 ('end_time', models.DateTimeField()), | ||||||
|                 ), |                 ('interval', models.CharField(max_length=20)), | ||||||
|                 ( |                 ('value', models.BigIntegerField()), | ||||||
|                     "huddle", |                 ('anomaly', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='analytics.Anomaly', null=True)), | ||||||
|                     models.ForeignKey( |  | ||||||
|                         on_delete=django.db.models.deletion.CASCADE, to="zerver.Recipient" |  | ||||||
|                     ), |  | ||||||
|                 ), |  | ||||||
|                 ( |  | ||||||
|                     "user", |  | ||||||
|                     models.ForeignKey( |  | ||||||
|                         on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL |  | ||||||
|                     ), |  | ||||||
|                 ), |  | ||||||
|                 ("property", models.CharField(max_length=40)), |  | ||||||
|                 ("end_time", models.DateTimeField()), |  | ||||||
|                 ("interval", models.CharField(max_length=20)), |  | ||||||
|                 ("value", models.BigIntegerField()), |  | ||||||
|                 ( |  | ||||||
|                     "anomaly", |  | ||||||
|                     models.ForeignKey( |  | ||||||
|                         on_delete=django.db.models.deletion.CASCADE, |  | ||||||
|                         to="analytics.Anomaly", |  | ||||||
|                         null=True, |  | ||||||
|                     ), |  | ||||||
|                 ), |  | ||||||
|             ], |             ], | ||||||
|             bases=(models.Model,), |             bases=(models.Model,), | ||||||
|         ), |         ), | ||||||
|         migrations.CreateModel( |         migrations.CreateModel( | ||||||
|             name="InstallationCount", |             name='InstallationCount', | ||||||
|             fields=[ |             fields=[ | ||||||
|                 ( |                 ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), | ||||||
|                     "id", |                 ('property', models.CharField(max_length=40)), | ||||||
|                     models.AutoField( |                 ('end_time', models.DateTimeField()), | ||||||
|                         verbose_name="ID", serialize=False, auto_created=True, primary_key=True |                 ('interval', models.CharField(max_length=20)), | ||||||
|                     ), |                 ('value', models.BigIntegerField()), | ||||||
|                 ), |                 ('anomaly', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='analytics.Anomaly', null=True)), | ||||||
|                 ("property", models.CharField(max_length=40)), |  | ||||||
|                 ("end_time", models.DateTimeField()), |  | ||||||
|                 ("interval", models.CharField(max_length=20)), |  | ||||||
|                 ("value", models.BigIntegerField()), |  | ||||||
|                 ( |  | ||||||
|                     "anomaly", |  | ||||||
|                     models.ForeignKey( |  | ||||||
|                         on_delete=django.db.models.deletion.CASCADE, |  | ||||||
|                         to="analytics.Anomaly", |  | ||||||
|                         null=True, |  | ||||||
|                     ), |  | ||||||
|                 ), |  | ||||||
|             ], |             ], | ||||||
|             bases=(models.Model,), |             bases=(models.Model,), | ||||||
|         ), |         ), | ||||||
|         migrations.CreateModel( |         migrations.CreateModel( | ||||||
|             name="RealmCount", |             name='RealmCount', | ||||||
|             fields=[ |             fields=[ | ||||||
|                 ( |                 ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), | ||||||
|                     "id", |                 ('realm', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Realm')), | ||||||
|                     models.AutoField( |                 ('property', models.CharField(max_length=40)), | ||||||
|                         verbose_name="ID", serialize=False, auto_created=True, primary_key=True |                 ('end_time', models.DateTimeField()), | ||||||
|                     ), |                 ('interval', models.CharField(max_length=20)), | ||||||
|                 ), |                 ('value', models.BigIntegerField()), | ||||||
|                 ( |                 ('anomaly', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='analytics.Anomaly', null=True)), | ||||||
|                     "realm", |  | ||||||
|                     models.ForeignKey( |  | ||||||
|                         on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm" |  | ||||||
|                     ), |  | ||||||
|                 ), |  | ||||||
|                 ("property", models.CharField(max_length=40)), |  | ||||||
|                 ("end_time", models.DateTimeField()), |  | ||||||
|                 ("interval", models.CharField(max_length=20)), |  | ||||||
|                 ("value", models.BigIntegerField()), |  | ||||||
|                 ( |  | ||||||
|                     "anomaly", |  | ||||||
|                     models.ForeignKey( |  | ||||||
|                         on_delete=django.db.models.deletion.CASCADE, |  | ||||||
|                         to="analytics.Anomaly", |  | ||||||
|                         null=True, |  | ||||||
|                     ), |  | ||||||
|                 ), |  | ||||||
|             ], |             ], | ||||||
|             bases=(models.Model,), |             bases=(models.Model,), | ||||||
|         ), |         ), | ||||||
|         migrations.CreateModel( |         migrations.CreateModel( | ||||||
|             name="StreamCount", |             name='StreamCount', | ||||||
|             fields=[ |             fields=[ | ||||||
|                 ( |                 ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), | ||||||
|                     "id", |                 ('realm', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Realm')), | ||||||
|                     models.AutoField( |                 ('stream', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Stream')), | ||||||
|                         verbose_name="ID", serialize=False, auto_created=True, primary_key=True |                 ('property', models.CharField(max_length=40)), | ||||||
|                     ), |                 ('end_time', models.DateTimeField()), | ||||||
|                 ), |                 ('interval', models.CharField(max_length=20)), | ||||||
|                 ( |                 ('value', models.BigIntegerField()), | ||||||
|                     "realm", |                 ('anomaly', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='analytics.Anomaly', null=True)), | ||||||
|                     models.ForeignKey( |  | ||||||
|                         on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm" |  | ||||||
|                     ), |  | ||||||
|                 ), |  | ||||||
|                 ( |  | ||||||
|                     "stream", |  | ||||||
|                     models.ForeignKey( |  | ||||||
|                         on_delete=django.db.models.deletion.CASCADE, to="zerver.Stream" |  | ||||||
|                     ), |  | ||||||
|                 ), |  | ||||||
|                 ("property", models.CharField(max_length=40)), |  | ||||||
|                 ("end_time", models.DateTimeField()), |  | ||||||
|                 ("interval", models.CharField(max_length=20)), |  | ||||||
|                 ("value", models.BigIntegerField()), |  | ||||||
|                 ( |  | ||||||
|                     "anomaly", |  | ||||||
|                     models.ForeignKey( |  | ||||||
|                         on_delete=django.db.models.deletion.CASCADE, |  | ||||||
|                         to="analytics.Anomaly", |  | ||||||
|                         null=True, |  | ||||||
|                     ), |  | ||||||
|                 ), |  | ||||||
|             ], |             ], | ||||||
|             bases=(models.Model,), |             bases=(models.Model,), | ||||||
|         ), |         ), | ||||||
|         migrations.CreateModel( |         migrations.CreateModel( | ||||||
|             name="UserCount", |             name='UserCount', | ||||||
|             fields=[ |             fields=[ | ||||||
|                 ( |                 ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), | ||||||
|                     "id", |                 ('realm', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Realm')), | ||||||
|                     models.AutoField( |                 ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), | ||||||
|                         verbose_name="ID", serialize=False, auto_created=True, primary_key=True |                 ('property', models.CharField(max_length=40)), | ||||||
|                     ), |                 ('end_time', models.DateTimeField()), | ||||||
|                 ), |                 ('interval', models.CharField(max_length=20)), | ||||||
|                 ( |                 ('value', models.BigIntegerField()), | ||||||
|                     "realm", |                 ('anomaly', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='analytics.Anomaly', null=True)), | ||||||
|                     models.ForeignKey( |  | ||||||
|                         on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm" |  | ||||||
|                     ), |  | ||||||
|                 ), |  | ||||||
|                 ( |  | ||||||
|                     "user", |  | ||||||
|                     models.ForeignKey( |  | ||||||
|                         on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL |  | ||||||
|                     ), |  | ||||||
|                 ), |  | ||||||
|                 ("property", models.CharField(max_length=40)), |  | ||||||
|                 ("end_time", models.DateTimeField()), |  | ||||||
|                 ("interval", models.CharField(max_length=20)), |  | ||||||
|                 ("value", models.BigIntegerField()), |  | ||||||
|                 ( |  | ||||||
|                     "anomaly", |  | ||||||
|                     models.ForeignKey( |  | ||||||
|                         on_delete=django.db.models.deletion.CASCADE, |  | ||||||
|                         to="analytics.Anomaly", |  | ||||||
|                         null=True, |  | ||||||
|                     ), |  | ||||||
|                 ), |  | ||||||
|             ], |             ], | ||||||
|             bases=(models.Model,), |             bases=(models.Model,), | ||||||
|         ), |         ), | ||||||
|         migrations.AlterUniqueTogether( |         migrations.AlterUniqueTogether( | ||||||
|             name="usercount", |             name='usercount', | ||||||
|             unique_together={("user", "property", "end_time", "interval")}, |             unique_together=set([('user', 'property', 'end_time', 'interval')]), | ||||||
|         ), |         ), | ||||||
|         migrations.AlterUniqueTogether( |         migrations.AlterUniqueTogether( | ||||||
|             name="streamcount", |             name='streamcount', | ||||||
|             unique_together={("stream", "property", "end_time", "interval")}, |             unique_together=set([('stream', 'property', 'end_time', 'interval')]), | ||||||
|         ), |         ), | ||||||
|         migrations.AlterUniqueTogether( |         migrations.AlterUniqueTogether( | ||||||
|             name="realmcount", |             name='realmcount', | ||||||
|             unique_together={("realm", "property", "end_time", "interval")}, |             unique_together=set([('realm', 'property', 'end_time', 'interval')]), | ||||||
|         ), |         ), | ||||||
|         migrations.AlterUniqueTogether( |         migrations.AlterUniqueTogether( | ||||||
|             name="installationcount", |             name='installationcount', | ||||||
|             unique_together={("property", "end_time", "interval")}, |             unique_together=set([('property', 'end_time', 'interval')]), | ||||||
|         ), |         ), | ||||||
|         migrations.AlterUniqueTogether( |         migrations.AlterUniqueTogether( | ||||||
|             name="huddlecount", |             name='huddlecount', | ||||||
|             unique_together={("huddle", "property", "end_time", "interval")}, |             unique_together=set([('huddle', 'property', 'end_time', 'interval')]), | ||||||
|         ), |         ), | ||||||
|     ] |     ] | ||||||
|   | |||||||
| @@ -1,29 +1,30 @@ | |||||||
|  | # -*- coding: utf-8 -*- | ||||||
| from django.db import migrations | from django.db import migrations | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): | class Migration(migrations.Migration): | ||||||
|  |  | ||||||
|     dependencies = [ |     dependencies = [ | ||||||
|         ("analytics", "0001_initial"), |         ('analytics', '0001_initial'), | ||||||
|     ] |     ] | ||||||
|  |  | ||||||
|     operations = [ |     operations = [ | ||||||
|         migrations.AlterUniqueTogether( |         migrations.AlterUniqueTogether( | ||||||
|             name="huddlecount", |             name='huddlecount', | ||||||
|             unique_together=set(), |             unique_together=set([]), | ||||||
|         ), |         ), | ||||||
|         migrations.RemoveField( |         migrations.RemoveField( | ||||||
|             model_name="huddlecount", |             model_name='huddlecount', | ||||||
|             name="anomaly", |             name='anomaly', | ||||||
|         ), |         ), | ||||||
|         migrations.RemoveField( |         migrations.RemoveField( | ||||||
|             model_name="huddlecount", |             model_name='huddlecount', | ||||||
|             name="huddle", |             name='huddle', | ||||||
|         ), |         ), | ||||||
|         migrations.RemoveField( |         migrations.RemoveField( | ||||||
|             model_name="huddlecount", |             model_name='huddlecount', | ||||||
|             name="user", |             name='user', | ||||||
|         ), |         ), | ||||||
|         migrations.DeleteModel( |         migrations.DeleteModel( | ||||||
|             name="HuddleCount", |             name='HuddleCount', | ||||||
|         ), |         ), | ||||||
|     ] |     ] | ||||||
|   | |||||||
| @@ -1,25 +1,21 @@ | |||||||
|  | # -*- coding: utf-8 -*- | ||||||
| from django.db import migrations, models | from django.db import migrations, models | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): | class Migration(migrations.Migration): | ||||||
|  |  | ||||||
|     dependencies = [ |     dependencies = [ | ||||||
|         ("analytics", "0002_remove_huddlecount"), |         ('analytics', '0002_remove_huddlecount'), | ||||||
|     ] |     ] | ||||||
|  |  | ||||||
|     operations = [ |     operations = [ | ||||||
|         migrations.CreateModel( |         migrations.CreateModel( | ||||||
|             name="FillState", |             name='FillState', | ||||||
|             fields=[ |             fields=[ | ||||||
|                 ( |                 ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), | ||||||
|                     "id", |                 ('property', models.CharField(unique=True, max_length=40)), | ||||||
|                     models.AutoField( |                 ('end_time', models.DateTimeField()), | ||||||
|                         verbose_name="ID", serialize=False, auto_created=True, primary_key=True |                 ('state', models.PositiveSmallIntegerField()), | ||||||
|                     ), |                 ('last_modified', models.DateTimeField(auto_now=True)), | ||||||
|                 ), |  | ||||||
|                 ("property", models.CharField(unique=True, max_length=40)), |  | ||||||
|                 ("end_time", models.DateTimeField()), |  | ||||||
|                 ("state", models.PositiveSmallIntegerField()), |  | ||||||
|                 ("last_modified", models.DateTimeField(auto_now=True)), |  | ||||||
|             ], |             ], | ||||||
|             bases=(models.Model,), |             bases=(models.Model,), | ||||||
|         ), |         ), | ||||||
|   | |||||||
| @@ -1,30 +1,31 @@ | |||||||
|  | # -*- coding: utf-8 -*- | ||||||
| from django.db import migrations, models | from django.db import migrations, models | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): | class Migration(migrations.Migration): | ||||||
|  |  | ||||||
|     dependencies = [ |     dependencies = [ | ||||||
|         ("analytics", "0003_fillstate"), |         ('analytics', '0003_fillstate'), | ||||||
|     ] |     ] | ||||||
|  |  | ||||||
|     operations = [ |     operations = [ | ||||||
|         migrations.AddField( |         migrations.AddField( | ||||||
|             model_name="installationcount", |             model_name='installationcount', | ||||||
|             name="subgroup", |             name='subgroup', | ||||||
|             field=models.CharField(max_length=16, null=True), |             field=models.CharField(max_length=16, null=True), | ||||||
|         ), |         ), | ||||||
|         migrations.AddField( |         migrations.AddField( | ||||||
|             model_name="realmcount", |             model_name='realmcount', | ||||||
|             name="subgroup", |             name='subgroup', | ||||||
|             field=models.CharField(max_length=16, null=True), |             field=models.CharField(max_length=16, null=True), | ||||||
|         ), |         ), | ||||||
|         migrations.AddField( |         migrations.AddField( | ||||||
|             model_name="streamcount", |             model_name='streamcount', | ||||||
|             name="subgroup", |             name='subgroup', | ||||||
|             field=models.CharField(max_length=16, null=True), |             field=models.CharField(max_length=16, null=True), | ||||||
|         ), |         ), | ||||||
|         migrations.AddField( |         migrations.AddField( | ||||||
|             model_name="usercount", |             model_name='usercount', | ||||||
|             name="subgroup", |             name='subgroup', | ||||||
|             field=models.CharField(max_length=16, null=True), |             field=models.CharField(max_length=16, null=True), | ||||||
|         ), |         ), | ||||||
|     ] |     ] | ||||||
|   | |||||||
| @@ -1,50 +1,51 @@ | |||||||
|  | # -*- coding: utf-8 -*- | ||||||
| from django.db import migrations, models | from django.db import migrations, models | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): | class Migration(migrations.Migration): | ||||||
|  |  | ||||||
|     dependencies = [ |     dependencies = [ | ||||||
|         ("analytics", "0004_add_subgroup"), |         ('analytics', '0004_add_subgroup'), | ||||||
|     ] |     ] | ||||||
|  |  | ||||||
|     operations = [ |     operations = [ | ||||||
|         migrations.AlterField( |         migrations.AlterField( | ||||||
|             model_name="installationcount", |             model_name='installationcount', | ||||||
|             name="interval", |             name='interval', | ||||||
|             field=models.CharField(max_length=8), |             field=models.CharField(max_length=8), | ||||||
|         ), |         ), | ||||||
|         migrations.AlterField( |         migrations.AlterField( | ||||||
|             model_name="installationcount", |             model_name='installationcount', | ||||||
|             name="property", |             name='property', | ||||||
|             field=models.CharField(max_length=32), |             field=models.CharField(max_length=32), | ||||||
|         ), |         ), | ||||||
|         migrations.AlterField( |         migrations.AlterField( | ||||||
|             model_name="realmcount", |             model_name='realmcount', | ||||||
|             name="interval", |             name='interval', | ||||||
|             field=models.CharField(max_length=8), |             field=models.CharField(max_length=8), | ||||||
|         ), |         ), | ||||||
|         migrations.AlterField( |         migrations.AlterField( | ||||||
|             model_name="realmcount", |             model_name='realmcount', | ||||||
|             name="property", |             name='property', | ||||||
|             field=models.CharField(max_length=32), |             field=models.CharField(max_length=32), | ||||||
|         ), |         ), | ||||||
|         migrations.AlterField( |         migrations.AlterField( | ||||||
|             model_name="streamcount", |             model_name='streamcount', | ||||||
|             name="interval", |             name='interval', | ||||||
|             field=models.CharField(max_length=8), |             field=models.CharField(max_length=8), | ||||||
|         ), |         ), | ||||||
|         migrations.AlterField( |         migrations.AlterField( | ||||||
|             model_name="streamcount", |             model_name='streamcount', | ||||||
|             name="property", |             name='property', | ||||||
|             field=models.CharField(max_length=32), |             field=models.CharField(max_length=32), | ||||||
|         ), |         ), | ||||||
|         migrations.AlterField( |         migrations.AlterField( | ||||||
|             model_name="usercount", |             model_name='usercount', | ||||||
|             name="interval", |             name='interval', | ||||||
|             field=models.CharField(max_length=8), |             field=models.CharField(max_length=8), | ||||||
|         ), |         ), | ||||||
|         migrations.AlterField( |         migrations.AlterField( | ||||||
|             model_name="usercount", |             model_name='usercount', | ||||||
|             name="property", |             name='property', | ||||||
|             field=models.CharField(max_length=32), |             field=models.CharField(max_length=32), | ||||||
|         ), |         ), | ||||||
|     ] |     ] | ||||||
|   | |||||||
| @@ -1,26 +1,27 @@ | |||||||
|  | # -*- coding: utf-8 -*- | ||||||
| from django.db import migrations | from django.db import migrations | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): | class Migration(migrations.Migration): | ||||||
|  |  | ||||||
|     dependencies = [ |     dependencies = [ | ||||||
|         ("analytics", "0005_alter_field_size"), |         ('analytics', '0005_alter_field_size'), | ||||||
|     ] |     ] | ||||||
|  |  | ||||||
|     operations = [ |     operations = [ | ||||||
|         migrations.AlterUniqueTogether( |         migrations.AlterUniqueTogether( | ||||||
|             name="installationcount", |             name='installationcount', | ||||||
|             unique_together={("property", "subgroup", "end_time", "interval")}, |             unique_together=set([('property', 'subgroup', 'end_time', 'interval')]), | ||||||
|         ), |         ), | ||||||
|         migrations.AlterUniqueTogether( |         migrations.AlterUniqueTogether( | ||||||
|             name="realmcount", |             name='realmcount', | ||||||
|             unique_together={("realm", "property", "subgroup", "end_time", "interval")}, |             unique_together=set([('realm', 'property', 'subgroup', 'end_time', 'interval')]), | ||||||
|         ), |         ), | ||||||
|         migrations.AlterUniqueTogether( |         migrations.AlterUniqueTogether( | ||||||
|             name="streamcount", |             name='streamcount', | ||||||
|             unique_together={("stream", "property", "subgroup", "end_time", "interval")}, |             unique_together=set([('stream', 'property', 'subgroup', 'end_time', 'interval')]), | ||||||
|         ), |         ), | ||||||
|         migrations.AlterUniqueTogether( |         migrations.AlterUniqueTogether( | ||||||
|             name="usercount", |             name='usercount', | ||||||
|             unique_together={("user", "property", "subgroup", "end_time", "interval")}, |             unique_together=set([('user', 'property', 'subgroup', 'end_time', 'interval')]), | ||||||
|         ), |         ), | ||||||
|     ] |     ] | ||||||
|   | |||||||
| @@ -1,43 +1,44 @@ | |||||||
|  | # -*- coding: utf-8 -*- | ||||||
| # Generated by Django 1.10.4 on 2017-01-16 20:50 | # Generated by Django 1.10.4 on 2017-01-16 20:50 | ||||||
| from django.db import migrations | from django.db import migrations | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): | class Migration(migrations.Migration): | ||||||
|  |  | ||||||
|     dependencies = [ |     dependencies = [ | ||||||
|         ("analytics", "0006_add_subgroup_to_unique_constraints"), |         ('analytics', '0006_add_subgroup_to_unique_constraints'), | ||||||
|     ] |     ] | ||||||
|  |  | ||||||
|     operations = [ |     operations = [ | ||||||
|         migrations.AlterUniqueTogether( |         migrations.AlterUniqueTogether( | ||||||
|             name="installationcount", |             name='installationcount', | ||||||
|             unique_together={("property", "subgroup", "end_time")}, |             unique_together=set([('property', 'subgroup', 'end_time')]), | ||||||
|         ), |         ), | ||||||
|         migrations.RemoveField( |         migrations.RemoveField( | ||||||
|             model_name="installationcount", |             model_name='installationcount', | ||||||
|             name="interval", |             name='interval', | ||||||
|         ), |         ), | ||||||
|         migrations.AlterUniqueTogether( |         migrations.AlterUniqueTogether( | ||||||
|             name="realmcount", |             name='realmcount', | ||||||
|             unique_together={("realm", "property", "subgroup", "end_time")}, |             unique_together=set([('realm', 'property', 'subgroup', 'end_time')]), | ||||||
|         ), |         ), | ||||||
|         migrations.RemoveField( |         migrations.RemoveField( | ||||||
|             model_name="realmcount", |             model_name='realmcount', | ||||||
|             name="interval", |             name='interval', | ||||||
|         ), |         ), | ||||||
|         migrations.AlterUniqueTogether( |         migrations.AlterUniqueTogether( | ||||||
|             name="streamcount", |             name='streamcount', | ||||||
|             unique_together={("stream", "property", "subgroup", "end_time")}, |             unique_together=set([('stream', 'property', 'subgroup', 'end_time')]), | ||||||
|         ), |         ), | ||||||
|         migrations.RemoveField( |         migrations.RemoveField( | ||||||
|             model_name="streamcount", |             model_name='streamcount', | ||||||
|             name="interval", |             name='interval', | ||||||
|         ), |         ), | ||||||
|         migrations.AlterUniqueTogether( |         migrations.AlterUniqueTogether( | ||||||
|             name="usercount", |             name='usercount', | ||||||
|             unique_together={("user", "property", "subgroup", "end_time")}, |             unique_together=set([('user', 'property', 'subgroup', 'end_time')]), | ||||||
|         ), |         ), | ||||||
|         migrations.RemoveField( |         migrations.RemoveField( | ||||||
|             model_name="usercount", |             model_name='usercount', | ||||||
|             name="interval", |             name='interval', | ||||||
|         ), |         ), | ||||||
|     ] |     ] | ||||||
|   | |||||||
| @@ -1,24 +1,25 @@ | |||||||
|  | # -*- coding: utf-8 -*- | ||||||
| # Generated by Django 1.10.5 on 2017-02-01 22:28 | # Generated by Django 1.10.5 on 2017-02-01 22:28 | ||||||
| from django.db import migrations | from django.db import migrations | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): | class Migration(migrations.Migration): | ||||||
|  |  | ||||||
|     dependencies = [ |     dependencies = [ | ||||||
|         ("zerver", "0050_userprofile_avatar_version"), |         ('zerver', '0050_userprofile_avatar_version'), | ||||||
|         ("analytics", "0007_remove_interval"), |         ('analytics', '0007_remove_interval'), | ||||||
|     ] |     ] | ||||||
|  |  | ||||||
|     operations = [ |     operations = [ | ||||||
|         migrations.AlterIndexTogether( |         migrations.AlterIndexTogether( | ||||||
|             name="realmcount", |             name='realmcount', | ||||||
|             index_together={("property", "end_time")}, |             index_together=set([('property', 'end_time')]), | ||||||
|         ), |         ), | ||||||
|         migrations.AlterIndexTogether( |         migrations.AlterIndexTogether( | ||||||
|             name="streamcount", |             name='streamcount', | ||||||
|             index_together={("property", "realm", "end_time")}, |             index_together=set([('property', 'realm', 'end_time')]), | ||||||
|         ), |         ), | ||||||
|         migrations.AlterIndexTogether( |         migrations.AlterIndexTogether( | ||||||
|             name="usercount", |             name='usercount', | ||||||
|             index_together={("property", "realm", "end_time")}, |             index_together=set([('property', 'realm', 'end_time')]), | ||||||
|         ), |         ), | ||||||
|     ] |     ] | ||||||
|   | |||||||
| @@ -1,28 +1,26 @@ | |||||||
|  | # -*- coding: utf-8 -*- | ||||||
| from django.db import migrations | from django.db import migrations | ||||||
| from django.db.backends.base.schema import BaseDatabaseSchemaEditor | from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor | ||||||
| from django.db.migrations.state import StateApps | from django.db.migrations.state import StateApps | ||||||
|  |  | ||||||
|  | def delete_messages_sent_to_stream_stat(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: | ||||||
|  |     UserCount = apps.get_model('analytics', 'UserCount') | ||||||
|  |     StreamCount = apps.get_model('analytics', 'StreamCount') | ||||||
|  |     RealmCount = apps.get_model('analytics', 'RealmCount') | ||||||
|  |     InstallationCount = apps.get_model('analytics', 'InstallationCount') | ||||||
|  |     FillState = apps.get_model('analytics', 'FillState') | ||||||
|  |  | ||||||
| def delete_messages_sent_to_stream_stat( |     property = 'messages_sent_to_stream:is_bot' | ||||||
|     apps: StateApps, schema_editor: BaseDatabaseSchemaEditor |  | ||||||
| ) -> None: |  | ||||||
|     UserCount = apps.get_model("analytics", "UserCount") |  | ||||||
|     StreamCount = apps.get_model("analytics", "StreamCount") |  | ||||||
|     RealmCount = apps.get_model("analytics", "RealmCount") |  | ||||||
|     InstallationCount = apps.get_model("analytics", "InstallationCount") |  | ||||||
|     FillState = apps.get_model("analytics", "FillState") |  | ||||||
|  |  | ||||||
|     property = "messages_sent_to_stream:is_bot" |  | ||||||
|     UserCount.objects.filter(property=property).delete() |     UserCount.objects.filter(property=property).delete() | ||||||
|     StreamCount.objects.filter(property=property).delete() |     StreamCount.objects.filter(property=property).delete() | ||||||
|     RealmCount.objects.filter(property=property).delete() |     RealmCount.objects.filter(property=property).delete() | ||||||
|     InstallationCount.objects.filter(property=property).delete() |     InstallationCount.objects.filter(property=property).delete() | ||||||
|     FillState.objects.filter(property=property).delete() |     FillState.objects.filter(property=property).delete() | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): | class Migration(migrations.Migration): | ||||||
|  |  | ||||||
|     dependencies = [ |     dependencies = [ | ||||||
|         ("analytics", "0008_add_count_indexes"), |         ('analytics', '0008_add_count_indexes'), | ||||||
|     ] |     ] | ||||||
|  |  | ||||||
|     operations = [ |     operations = [ | ||||||
|   | |||||||
| @@ -1,27 +1,25 @@ | |||||||
|  | # -*- coding: utf-8 -*- | ||||||
| from django.db import migrations | from django.db import migrations | ||||||
| from django.db.backends.base.schema import BaseDatabaseSchemaEditor | from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor | ||||||
| from django.db.migrations.state import StateApps | from django.db.migrations.state import StateApps | ||||||
|  |  | ||||||
|  | def clear_message_sent_by_message_type_values(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: | ||||||
|  |     UserCount = apps.get_model('analytics', 'UserCount') | ||||||
|  |     StreamCount = apps.get_model('analytics', 'StreamCount') | ||||||
|  |     RealmCount = apps.get_model('analytics', 'RealmCount') | ||||||
|  |     InstallationCount = apps.get_model('analytics', 'InstallationCount') | ||||||
|  |     FillState = apps.get_model('analytics', 'FillState') | ||||||
|  |  | ||||||
| def clear_message_sent_by_message_type_values( |     property = 'messages_sent:message_type:day' | ||||||
|     apps: StateApps, schema_editor: BaseDatabaseSchemaEditor |  | ||||||
| ) -> None: |  | ||||||
|     UserCount = apps.get_model("analytics", "UserCount") |  | ||||||
|     StreamCount = apps.get_model("analytics", "StreamCount") |  | ||||||
|     RealmCount = apps.get_model("analytics", "RealmCount") |  | ||||||
|     InstallationCount = apps.get_model("analytics", "InstallationCount") |  | ||||||
|     FillState = apps.get_model("analytics", "FillState") |  | ||||||
|  |  | ||||||
|     property = "messages_sent:message_type:day" |  | ||||||
|     UserCount.objects.filter(property=property).delete() |     UserCount.objects.filter(property=property).delete() | ||||||
|     StreamCount.objects.filter(property=property).delete() |     StreamCount.objects.filter(property=property).delete() | ||||||
|     RealmCount.objects.filter(property=property).delete() |     RealmCount.objects.filter(property=property).delete() | ||||||
|     InstallationCount.objects.filter(property=property).delete() |     InstallationCount.objects.filter(property=property).delete() | ||||||
|     FillState.objects.filter(property=property).delete() |     FillState.objects.filter(property=property).delete() | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): | class Migration(migrations.Migration): | ||||||
|     dependencies = [("analytics", "0009_remove_messages_to_stream_stat")] |  | ||||||
|  |     dependencies = [('analytics', '0009_remove_messages_to_stream_stat')] | ||||||
|  |  | ||||||
|     operations = [ |     operations = [ | ||||||
|         migrations.RunPython(clear_message_sent_by_message_type_values), |         migrations.RunPython(clear_message_sent_by_message_type_values), | ||||||
|   | |||||||
| @@ -1,14 +1,14 @@ | |||||||
|  | # -*- coding: utf-8 -*- | ||||||
| from django.db import migrations | from django.db import migrations | ||||||
| from django.db.backends.base.schema import BaseDatabaseSchemaEditor | from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor | ||||||
| from django.db.migrations.state import StateApps | from django.db.migrations.state import StateApps | ||||||
|  |  | ||||||
|  | def clear_analytics_tables(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: | ||||||
| def clear_analytics_tables(apps: StateApps, schema_editor: BaseDatabaseSchemaEditor) -> None: |     UserCount = apps.get_model('analytics', 'UserCount') | ||||||
|     UserCount = apps.get_model("analytics", "UserCount") |     StreamCount = apps.get_model('analytics', 'StreamCount') | ||||||
|     StreamCount = apps.get_model("analytics", "StreamCount") |     RealmCount = apps.get_model('analytics', 'RealmCount') | ||||||
|     RealmCount = apps.get_model("analytics", "RealmCount") |     InstallationCount = apps.get_model('analytics', 'InstallationCount') | ||||||
|     InstallationCount = apps.get_model("analytics", "InstallationCount") |     FillState = apps.get_model('analytics', 'FillState') | ||||||
|     FillState = apps.get_model("analytics", "FillState") |  | ||||||
|  |  | ||||||
|     UserCount.objects.all().delete() |     UserCount.objects.all().delete() | ||||||
|     StreamCount.objects.all().delete() |     StreamCount.objects.all().delete() | ||||||
| @@ -16,10 +16,10 @@ def clear_analytics_tables(apps: StateApps, schema_editor: BaseDatabaseSchemaEdi | |||||||
|     InstallationCount.objects.all().delete() |     InstallationCount.objects.all().delete() | ||||||
|     FillState.objects.all().delete() |     FillState.objects.all().delete() | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): | class Migration(migrations.Migration): | ||||||
|  |  | ||||||
|     dependencies = [ |     dependencies = [ | ||||||
|         ("analytics", "0010_clear_messages_sent_values"), |         ('analytics', '0010_clear_messages_sent_values'), | ||||||
|     ] |     ] | ||||||
|  |  | ||||||
|     operations = [ |     operations = [ | ||||||
|   | |||||||
| @@ -1,41 +1,36 @@ | |||||||
|  | # -*- coding: utf-8 -*- | ||||||
| # Generated by Django 1.11.6 on 2018-01-29 08:14 | # Generated by Django 1.11.6 on 2018-01-29 08:14 | ||||||
|  | from __future__ import unicode_literals | ||||||
|  |  | ||||||
| import django.db.models.deletion |  | ||||||
| from django.db import migrations, models | from django.db import migrations, models | ||||||
|  | import django.db.models.deletion | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): | class Migration(migrations.Migration): | ||||||
|  |  | ||||||
|     dependencies = [ |     dependencies = [ | ||||||
|         ("analytics", "0011_clear_analytics_tables"), |         ('analytics', '0011_clear_analytics_tables'), | ||||||
|     ] |     ] | ||||||
|  |  | ||||||
|     operations = [ |     operations = [ | ||||||
|         migrations.AlterField( |         migrations.AlterField( | ||||||
|             model_name="installationcount", |             model_name='installationcount', | ||||||
|             name="anomaly", |             name='anomaly', | ||||||
|             field=models.ForeignKey( |             field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='analytics.Anomaly'), | ||||||
|                 null=True, on_delete=django.db.models.deletion.SET_NULL, to="analytics.Anomaly" |  | ||||||
|             ), |  | ||||||
|         ), |         ), | ||||||
|         migrations.AlterField( |         migrations.AlterField( | ||||||
|             model_name="realmcount", |             model_name='realmcount', | ||||||
|             name="anomaly", |             name='anomaly', | ||||||
|             field=models.ForeignKey( |             field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='analytics.Anomaly'), | ||||||
|                 null=True, on_delete=django.db.models.deletion.SET_NULL, to="analytics.Anomaly" |  | ||||||
|             ), |  | ||||||
|         ), |         ), | ||||||
|         migrations.AlterField( |         migrations.AlterField( | ||||||
|             model_name="streamcount", |             model_name='streamcount', | ||||||
|             name="anomaly", |             name='anomaly', | ||||||
|             field=models.ForeignKey( |             field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='analytics.Anomaly'), | ||||||
|                 null=True, on_delete=django.db.models.deletion.SET_NULL, to="analytics.Anomaly" |  | ||||||
|             ), |  | ||||||
|         ), |         ), | ||||||
|         migrations.AlterField( |         migrations.AlterField( | ||||||
|             model_name="usercount", |             model_name='usercount', | ||||||
|             name="anomaly", |             name='anomaly', | ||||||
|             field=models.ForeignKey( |             field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='analytics.Anomaly'), | ||||||
|                 null=True, on_delete=django.db.models.deletion.SET_NULL, to="analytics.Anomaly" |  | ||||||
|             ), |  | ||||||
|         ), |         ), | ||||||
|     ] |     ] | ||||||
|   | |||||||
| @@ -1,31 +1,34 @@ | |||||||
|  | # -*- coding: utf-8 -*- | ||||||
| # Generated by Django 1.11.18 on 2019-02-02 02:47 | # Generated by Django 1.11.18 on 2019-02-02 02:47 | ||||||
|  | from __future__ import unicode_literals | ||||||
|  |  | ||||||
| from django.db import migrations | from django.db import migrations | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): | class Migration(migrations.Migration): | ||||||
|  |  | ||||||
|     dependencies = [ |     dependencies = [ | ||||||
|         ("analytics", "0012_add_on_delete"), |         ('analytics', '0012_add_on_delete'), | ||||||
|     ] |     ] | ||||||
|  |  | ||||||
|     operations = [ |     operations = [ | ||||||
|         migrations.RemoveField( |         migrations.RemoveField( | ||||||
|             model_name="installationcount", |             model_name='installationcount', | ||||||
|             name="anomaly", |             name='anomaly', | ||||||
|         ), |         ), | ||||||
|         migrations.RemoveField( |         migrations.RemoveField( | ||||||
|             model_name="realmcount", |             model_name='realmcount', | ||||||
|             name="anomaly", |             name='anomaly', | ||||||
|         ), |         ), | ||||||
|         migrations.RemoveField( |         migrations.RemoveField( | ||||||
|             model_name="streamcount", |             model_name='streamcount', | ||||||
|             name="anomaly", |             name='anomaly', | ||||||
|         ), |         ), | ||||||
|         migrations.RemoveField( |         migrations.RemoveField( | ||||||
|             model_name="usercount", |             model_name='usercount', | ||||||
|             name="anomaly", |             name='anomaly', | ||||||
|         ), |         ), | ||||||
|         migrations.DeleteModel( |         migrations.DeleteModel( | ||||||
|             name="Anomaly", |             name='Anomaly', | ||||||
|         ), |         ), | ||||||
|     ] |     ] | ||||||
|   | |||||||
| @@ -1,16 +0,0 @@ | |||||||
| # Generated by Django 1.11.26 on 2020-01-27 04:32 |  | ||||||
|  |  | ||||||
| from django.db import migrations |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|     dependencies = [ |  | ||||||
|         ("analytics", "0013_remove_anomaly"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.RemoveField( |  | ||||||
|             model_name="fillstate", |  | ||||||
|             name="last_modified", |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| @@ -1,64 +0,0 @@ | |||||||
| from django.db import migrations |  | ||||||
| from django.db.backends.base.schema import BaseDatabaseSchemaEditor |  | ||||||
| from django.db.migrations.state import StateApps |  | ||||||
| from django.db.models import Count, Sum |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def clear_duplicate_counts(apps: StateApps, schema_editor: BaseDatabaseSchemaEditor) -> None: |  | ||||||
|     """This is a preparatory migration for our Analytics tables. |  | ||||||
|  |  | ||||||
|     The backstory is that Django's unique_together indexes do not properly |  | ||||||
|     handle the subgroup=None corner case (allowing duplicate rows that have a |  | ||||||
|     subgroup of None), which meant that in race conditions, rather than updating |  | ||||||
|     an existing row for the property/(realm, stream, user)/time with subgroup=None, Django would |  | ||||||
|     create a duplicate row. |  | ||||||
|  |  | ||||||
|     In the next migration, we'll add a proper constraint to fix this bug, but |  | ||||||
|     we need to fix any existing problematic rows before we can add that constraint. |  | ||||||
|  |  | ||||||
|     We fix this in an appropriate fashion for each type of CountStat object; mainly |  | ||||||
|     this means deleting the extra rows, but for LoggingCountStat objects, we need to |  | ||||||
|     additionally combine the sums. |  | ||||||
|     """ |  | ||||||
|     count_tables = dict( |  | ||||||
|         realm=apps.get_model("analytics", "RealmCount"), |  | ||||||
|         user=apps.get_model("analytics", "UserCount"), |  | ||||||
|         stream=apps.get_model("analytics", "StreamCount"), |  | ||||||
|         installation=apps.get_model("analytics", "InstallationCount"), |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     for name, count_table in count_tables.items(): |  | ||||||
|         value = [name, "property", "end_time"] |  | ||||||
|         if name == "installation": |  | ||||||
|             value = ["property", "end_time"] |  | ||||||
|         counts = ( |  | ||||||
|             count_table.objects.filter(subgroup=None) |  | ||||||
|             .values(*value) |  | ||||||
|             .annotate(Count("id"), Sum("value")) |  | ||||||
|             .filter(id__count__gt=1) |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         for count in counts: |  | ||||||
|             count.pop("id__count") |  | ||||||
|             total_value = count.pop("value__sum") |  | ||||||
|             duplicate_counts = list(count_table.objects.filter(**count)) |  | ||||||
|             first_count = duplicate_counts[0] |  | ||||||
|             if count["property"] in ["invites_sent::day", "active_users_log:is_bot:day"]: |  | ||||||
|                 # For LoggingCountStat objects, the right fix is to combine the totals; |  | ||||||
|                 # for other CountStat objects, we expect the duplicates to have the same value. |  | ||||||
|                 # And so all we need to do is delete them. |  | ||||||
|                 first_count.value = total_value |  | ||||||
|                 first_count.save() |  | ||||||
|             to_cleanup = duplicate_counts[1:] |  | ||||||
|             for duplicate_count in to_cleanup: |  | ||||||
|                 duplicate_count.delete() |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|     dependencies = [ |  | ||||||
|         ("analytics", "0014_remove_fillstate_last_modified"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.RunPython(clear_duplicate_counts, reverse_code=migrations.RunPython.noop), |  | ||||||
|     ] |  | ||||||
| @@ -1,92 +0,0 @@ | |||||||
| # Generated by Django 2.2.10 on 2020-02-29 19:40 |  | ||||||
|  |  | ||||||
| from django.db import migrations, models |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Migration(migrations.Migration): |  | ||||||
|     dependencies = [ |  | ||||||
|         ("analytics", "0015_clear_duplicate_counts"), |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     operations = [ |  | ||||||
|         migrations.AlterUniqueTogether( |  | ||||||
|             name="installationcount", |  | ||||||
|             unique_together=set(), |  | ||||||
|         ), |  | ||||||
|         migrations.AlterUniqueTogether( |  | ||||||
|             name="realmcount", |  | ||||||
|             unique_together=set(), |  | ||||||
|         ), |  | ||||||
|         migrations.AlterUniqueTogether( |  | ||||||
|             name="streamcount", |  | ||||||
|             unique_together=set(), |  | ||||||
|         ), |  | ||||||
|         migrations.AlterUniqueTogether( |  | ||||||
|             name="usercount", |  | ||||||
|             unique_together=set(), |  | ||||||
|         ), |  | ||||||
|         migrations.AddConstraint( |  | ||||||
|             model_name="installationcount", |  | ||||||
|             constraint=models.UniqueConstraint( |  | ||||||
|                 condition=models.Q(subgroup__isnull=False), |  | ||||||
|                 fields=("property", "subgroup", "end_time"), |  | ||||||
|                 name="unique_installation_count", |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|         migrations.AddConstraint( |  | ||||||
|             model_name="installationcount", |  | ||||||
|             constraint=models.UniqueConstraint( |  | ||||||
|                 condition=models.Q(subgroup__isnull=True), |  | ||||||
|                 fields=("property", "end_time"), |  | ||||||
|                 name="unique_installation_count_null_subgroup", |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|         migrations.AddConstraint( |  | ||||||
|             model_name="realmcount", |  | ||||||
|             constraint=models.UniqueConstraint( |  | ||||||
|                 condition=models.Q(subgroup__isnull=False), |  | ||||||
|                 fields=("realm", "property", "subgroup", "end_time"), |  | ||||||
|                 name="unique_realm_count", |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|         migrations.AddConstraint( |  | ||||||
|             model_name="realmcount", |  | ||||||
|             constraint=models.UniqueConstraint( |  | ||||||
|                 condition=models.Q(subgroup__isnull=True), |  | ||||||
|                 fields=("realm", "property", "end_time"), |  | ||||||
|                 name="unique_realm_count_null_subgroup", |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|         migrations.AddConstraint( |  | ||||||
|             model_name="streamcount", |  | ||||||
|             constraint=models.UniqueConstraint( |  | ||||||
|                 condition=models.Q(subgroup__isnull=False), |  | ||||||
|                 fields=("stream", "property", "subgroup", "end_time"), |  | ||||||
|                 name="unique_stream_count", |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|         migrations.AddConstraint( |  | ||||||
|             model_name="streamcount", |  | ||||||
|             constraint=models.UniqueConstraint( |  | ||||||
|                 condition=models.Q(subgroup__isnull=True), |  | ||||||
|                 fields=("stream", "property", "end_time"), |  | ||||||
|                 name="unique_stream_count_null_subgroup", |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|         migrations.AddConstraint( |  | ||||||
|             model_name="usercount", |  | ||||||
|             constraint=models.UniqueConstraint( |  | ||||||
|                 condition=models.Q(subgroup__isnull=False), |  | ||||||
|                 fields=("user", "property", "subgroup", "end_time"), |  | ||||||
|                 name="unique_user_count", |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|         migrations.AddConstraint( |  | ||||||
|             model_name="usercount", |  | ||||||
|             constraint=models.UniqueConstraint( |  | ||||||
|                 condition=models.Q(subgroup__isnull=True), |  | ||||||
|                 fields=("user", "property", "end_time"), |  | ||||||
|                 name="unique_user_count_null_subgroup", |  | ||||||
|             ), |  | ||||||
|         ), |  | ||||||
|     ] |  | ||||||
| @@ -1,137 +1,92 @@ | |||||||
| import datetime | import datetime | ||||||
|  | from typing import Optional | ||||||
|  |  | ||||||
| from django.db import models | from django.db import models | ||||||
| from django.db.models import Q, UniqueConstraint |  | ||||||
|  |  | ||||||
| from zerver.lib.timestamp import floor_to_day | from zerver.lib.timestamp import floor_to_day | ||||||
| from zerver.models import Realm, Stream, UserProfile | from zerver.models import Realm, Stream, UserProfile | ||||||
|  |  | ||||||
|  |  | ||||||
| class FillState(models.Model): | class FillState(models.Model): | ||||||
|     property = models.CharField(max_length=40, unique=True) |     property = models.CharField(max_length=40, unique=True)  # type: str | ||||||
|     end_time = models.DateTimeField() |     end_time = models.DateTimeField()  # type: datetime.datetime | ||||||
|  |  | ||||||
|     # Valid states are {DONE, STARTED} |     # Valid states are {DONE, STARTED} | ||||||
|     DONE = 1 |     DONE = 1 | ||||||
|     STARTED = 2 |     STARTED = 2 | ||||||
|     state = models.PositiveSmallIntegerField() |     state = models.PositiveSmallIntegerField()  # type: int | ||||||
|  |  | ||||||
|  |     last_modified = models.DateTimeField(auto_now=True)  # type: datetime.datetime | ||||||
|  |  | ||||||
|     def __str__(self) -> str: |     def __str__(self) -> str: | ||||||
|         return f"{self.property} {self.end_time} {self.state}" |         return "<FillState: %s %s %s>" % (self.property, self.end_time, self.state) | ||||||
|  |  | ||||||
|  |  | ||||||
| # The earliest/starting end_time in FillState | # The earliest/starting end_time in FillState | ||||||
| # We assume there is at least one realm | # We assume there is at least one realm | ||||||
| def installation_epoch() -> datetime.datetime: | def installation_epoch() -> datetime.datetime: | ||||||
|     earliest_realm_creation = Realm.objects.aggregate(models.Min("date_created"))[ |     earliest_realm_creation = Realm.objects.aggregate(models.Min('date_created'))['date_created__min'] | ||||||
|         "date_created__min" |  | ||||||
|     ] |  | ||||||
|     return floor_to_day(earliest_realm_creation) |     return floor_to_day(earliest_realm_creation) | ||||||
|  |  | ||||||
|  | def last_successful_fill(property: str) -> Optional[datetime.datetime]: | ||||||
|  |     fillstate = FillState.objects.filter(property=property).first() | ||||||
|  |     if fillstate is None: | ||||||
|  |         return None | ||||||
|  |     if fillstate.state == FillState.DONE: | ||||||
|  |         return fillstate.end_time | ||||||
|  |     return fillstate.end_time - datetime.timedelta(hours=1) | ||||||
|  |  | ||||||
| class BaseCount(models.Model): | class BaseCount(models.Model): | ||||||
|     # Note: When inheriting from BaseCount, you may want to rearrange |     # Note: When inheriting from BaseCount, you may want to rearrange | ||||||
|     # the order of the columns in the migration to make sure they |     # the order of the columns in the migration to make sure they | ||||||
|     # match how you'd like the table to be arranged. |     # match how you'd like the table to be arranged. | ||||||
|     property = models.CharField(max_length=32) |     property = models.CharField(max_length=32)  # type: str | ||||||
|     subgroup = models.CharField(max_length=16, null=True) |     subgroup = models.CharField(max_length=16, null=True)  # type: Optional[str] | ||||||
|     end_time = models.DateTimeField() |     end_time = models.DateTimeField()  # type: datetime.datetime | ||||||
|     value = models.BigIntegerField() |     value = models.BigIntegerField()  # type: int | ||||||
|  |  | ||||||
|     class Meta: |     class Meta: | ||||||
|         abstract = True |         abstract = True | ||||||
|  |  | ||||||
|  |  | ||||||
| class InstallationCount(BaseCount): | class InstallationCount(BaseCount): | ||||||
|  |  | ||||||
|     class Meta: |     class Meta: | ||||||
|         # Handles invalid duplicate InstallationCount data |         unique_together = ("property", "subgroup", "end_time") | ||||||
|         constraints = [ |  | ||||||
|             UniqueConstraint( |  | ||||||
|                 fields=["property", "subgroup", "end_time"], |  | ||||||
|                 condition=Q(subgroup__isnull=False), |  | ||||||
|                 name="unique_installation_count", |  | ||||||
|             ), |  | ||||||
|             UniqueConstraint( |  | ||||||
|                 fields=["property", "end_time"], |  | ||||||
|                 condition=Q(subgroup__isnull=True), |  | ||||||
|                 name="unique_installation_count_null_subgroup", |  | ||||||
|             ), |  | ||||||
|         ] |  | ||||||
|  |  | ||||||
|     def __str__(self) -> str: |     def __str__(self) -> str: | ||||||
|         return f"{self.property} {self.subgroup} {self.value}" |         return "<InstallationCount: %s %s %s>" % (self.property, self.subgroup, self.value) | ||||||
|  |  | ||||||
|  |  | ||||||
| class RealmCount(BaseCount): | class RealmCount(BaseCount): | ||||||
|     realm = models.ForeignKey(Realm, on_delete=models.CASCADE) |     realm = models.ForeignKey(Realm, on_delete=models.CASCADE) | ||||||
|  |  | ||||||
|     class Meta: |     class Meta: | ||||||
|         # Handles invalid duplicate RealmCount data |         unique_together = ("realm", "property", "subgroup", "end_time") | ||||||
|         constraints = [ |  | ||||||
|             UniqueConstraint( |  | ||||||
|                 fields=["realm", "property", "subgroup", "end_time"], |  | ||||||
|                 condition=Q(subgroup__isnull=False), |  | ||||||
|                 name="unique_realm_count", |  | ||||||
|             ), |  | ||||||
|             UniqueConstraint( |  | ||||||
|                 fields=["realm", "property", "end_time"], |  | ||||||
|                 condition=Q(subgroup__isnull=True), |  | ||||||
|                 name="unique_realm_count_null_subgroup", |  | ||||||
|             ), |  | ||||||
|         ] |  | ||||||
|         index_together = ["property", "end_time"] |         index_together = ["property", "end_time"] | ||||||
|  |  | ||||||
|     def __str__(self) -> str: |     def __str__(self) -> str: | ||||||
|         return f"{self.realm!r} {self.property} {self.subgroup} {self.value}" |         return "<RealmCount: %s %s %s %s>" % (self.realm, self.property, self.subgroup, self.value) | ||||||
|  |  | ||||||
|  |  | ||||||
| class UserCount(BaseCount): | class UserCount(BaseCount): | ||||||
|     user = models.ForeignKey(UserProfile, on_delete=models.CASCADE) |     user = models.ForeignKey(UserProfile, on_delete=models.CASCADE) | ||||||
|     realm = models.ForeignKey(Realm, on_delete=models.CASCADE) |     realm = models.ForeignKey(Realm, on_delete=models.CASCADE) | ||||||
|  |  | ||||||
|     class Meta: |     class Meta: | ||||||
|         # Handles invalid duplicate UserCount data |         unique_together = ("user", "property", "subgroup", "end_time") | ||||||
|         constraints = [ |  | ||||||
|             UniqueConstraint( |  | ||||||
|                 fields=["user", "property", "subgroup", "end_time"], |  | ||||||
|                 condition=Q(subgroup__isnull=False), |  | ||||||
|                 name="unique_user_count", |  | ||||||
|             ), |  | ||||||
|             UniqueConstraint( |  | ||||||
|                 fields=["user", "property", "end_time"], |  | ||||||
|                 condition=Q(subgroup__isnull=True), |  | ||||||
|                 name="unique_user_count_null_subgroup", |  | ||||||
|             ), |  | ||||||
|         ] |  | ||||||
|         # This index dramatically improves the performance of |         # This index dramatically improves the performance of | ||||||
|         # aggregating from users to realms |         # aggregating from users to realms | ||||||
|         index_together = ["property", "realm", "end_time"] |         index_together = ["property", "realm", "end_time"] | ||||||
|  |  | ||||||
|     def __str__(self) -> str: |     def __str__(self) -> str: | ||||||
|         return f"{self.user!r} {self.property} {self.subgroup} {self.value}" |         return "<UserCount: %s %s %s %s>" % (self.user, self.property, self.subgroup, self.value) | ||||||
|  |  | ||||||
|  |  | ||||||
| class StreamCount(BaseCount): | class StreamCount(BaseCount): | ||||||
|     stream = models.ForeignKey(Stream, on_delete=models.CASCADE) |     stream = models.ForeignKey(Stream, on_delete=models.CASCADE) | ||||||
|     realm = models.ForeignKey(Realm, on_delete=models.CASCADE) |     realm = models.ForeignKey(Realm, on_delete=models.CASCADE) | ||||||
|  |  | ||||||
|     class Meta: |     class Meta: | ||||||
|         # Handles invalid duplicate StreamCount data |         unique_together = ("stream", "property", "subgroup", "end_time") | ||||||
|         constraints = [ |  | ||||||
|             UniqueConstraint( |  | ||||||
|                 fields=["stream", "property", "subgroup", "end_time"], |  | ||||||
|                 condition=Q(subgroup__isnull=False), |  | ||||||
|                 name="unique_stream_count", |  | ||||||
|             ), |  | ||||||
|             UniqueConstraint( |  | ||||||
|                 fields=["stream", "property", "end_time"], |  | ||||||
|                 condition=Q(subgroup__isnull=True), |  | ||||||
|                 name="unique_stream_count_null_subgroup", |  | ||||||
|             ), |  | ||||||
|         ] |  | ||||||
|         # This index dramatically improves the performance of |         # This index dramatically improves the performance of | ||||||
|         # aggregating from streams to realms |         # aggregating from streams to realms | ||||||
|         index_together = ["property", "realm", "end_time"] |         index_together = ["property", "realm", "end_time"] | ||||||
|  |  | ||||||
|     def __str__(self) -> str: |     def __str__(self) -> str: | ||||||
|         return f"{self.stream!r} {self.property} {self.subgroup} {self.value} {self.id}" |         return "<StreamCount: %s %s %s %s %s>" % ( | ||||||
|  |             self.stream, self.property, self.subgroup, self.value, self.id) | ||||||
|   | |||||||
| @@ -1,48 +0,0 @@ | |||||||
| from unittest import mock |  | ||||||
|  |  | ||||||
| from django.utils.timezone import now as timezone_now |  | ||||||
|  |  | ||||||
| from zerver.lib.test_classes import ZulipTestCase |  | ||||||
| from zerver.models import Client, UserActivity, UserProfile, flush_per_request_caches |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class ActivityTest(ZulipTestCase): |  | ||||||
|     @mock.patch("stripe.Customer.list", return_value=[]) |  | ||||||
|     def test_activity(self, unused_mock: mock.Mock) -> None: |  | ||||||
|         self.login("hamlet") |  | ||||||
|         client, _ = Client.objects.get_or_create(name="website") |  | ||||||
|         query = "/json/messages/flags" |  | ||||||
|         last_visit = timezone_now() |  | ||||||
|         count = 150 |  | ||||||
|         for activity_user_profile in UserProfile.objects.all(): |  | ||||||
|             UserActivity.objects.get_or_create( |  | ||||||
|                 user_profile=activity_user_profile, |  | ||||||
|                 client=client, |  | ||||||
|                 query=query, |  | ||||||
|                 count=count, |  | ||||||
|                 last_visit=last_visit, |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|         # Fails when not staff |  | ||||||
|         result = self.client_get("/activity") |  | ||||||
|         self.assertEqual(result.status_code, 302) |  | ||||||
|  |  | ||||||
|         user_profile = self.example_user("hamlet") |  | ||||||
|         user_profile.is_staff = True |  | ||||||
|         user_profile.save(update_fields=["is_staff"]) |  | ||||||
|  |  | ||||||
|         flush_per_request_caches() |  | ||||||
|         with self.assert_database_query_count(18): |  | ||||||
|             result = self.client_get("/activity") |  | ||||||
|             self.assertEqual(result.status_code, 200) |  | ||||||
|  |  | ||||||
|         flush_per_request_caches() |  | ||||||
|         with self.assert_database_query_count(8): |  | ||||||
|             result = self.client_get("/realm_activity/zulip/") |  | ||||||
|             self.assertEqual(result.status_code, 200) |  | ||||||
|  |  | ||||||
|         iago = self.example_user("iago") |  | ||||||
|         flush_per_request_caches() |  | ||||||
|         with self.assert_database_query_count(5): |  | ||||||
|             result = self.client_get(f"/user_activity/{iago.id}/") |  | ||||||
|             self.assertEqual(result.status_code, 200) |  | ||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -2,39 +2,28 @@ from analytics.lib.counts import CountStat | |||||||
| from analytics.lib.fixtures import generate_time_series_data | from analytics.lib.fixtures import generate_time_series_data | ||||||
| from zerver.lib.test_classes import ZulipTestCase | from zerver.lib.test_classes import ZulipTestCase | ||||||
|  |  | ||||||
|  |  | ||||||
| # A very light test suite; the code being tested is not run in production. | # A very light test suite; the code being tested is not run in production. | ||||||
| class TestFixtures(ZulipTestCase): | class TestFixtures(ZulipTestCase): | ||||||
|     def test_deterministic_settings(self) -> None: |     def test_deterministic_settings(self) -> None: | ||||||
|         # test basic business_hour / non_business_hour calculation |         # test basic business_hour / non_business_hour calculation | ||||||
|         # test we get an array of the right length with frequency=CountStat.DAY |         # test we get an array of the right length with frequency=CountStat.DAY | ||||||
|         data = generate_time_series_data( |         data = generate_time_series_data( | ||||||
|             days=7, business_hours_base=20, non_business_hours_base=15, spikiness=0 |             days=7, business_hours_base=20, non_business_hours_base=15, spikiness=0) | ||||||
|         ) |  | ||||||
|         self.assertEqual(data, [400, 400, 400, 400, 400, 360, 360]) |         self.assertEqual(data, [400, 400, 400, 400, 400, 360, 360]) | ||||||
|  |  | ||||||
|         data = generate_time_series_data( |         data = generate_time_series_data( | ||||||
|             days=1, |             days=1, business_hours_base=2000, non_business_hours_base=1500, | ||||||
|             business_hours_base=2000, |             growth=2, spikiness=0, frequency=CountStat.HOUR) | ||||||
|             non_business_hours_base=1500, |  | ||||||
|             growth=2, |  | ||||||
|             spikiness=0, |  | ||||||
|             frequency=CountStat.HOUR, |  | ||||||
|         ) |  | ||||||
|         # test we get an array of the right length with frequency=CountStat.HOUR |         # test we get an array of the right length with frequency=CountStat.HOUR | ||||||
|         self.assert_length(data, 24) |         self.assertEqual(len(data), 24) | ||||||
|         # test that growth doesn't affect the first data point |         # test that growth doesn't affect the first data point | ||||||
|         self.assertEqual(data[0], 2000) |         self.assertEqual(data[0], 2000) | ||||||
|         # test that the last data point is growth times what it otherwise would be |         # test that the last data point is growth times what it otherwise would be | ||||||
|         self.assertEqual(data[-1], 1500 * 2) |         self.assertEqual(data[-1], 1500*2) | ||||||
|  |  | ||||||
|         # test autocorrelation == 1, since that's the easiest value to test |         # test autocorrelation == 1, since that's the easiest value to test | ||||||
|         data = generate_time_series_data( |         data = generate_time_series_data( | ||||||
|             days=1, |             days=1, business_hours_base=2000, non_business_hours_base=2000, | ||||||
|             business_hours_base=2000, |             autocorrelation=1, frequency=CountStat.HOUR) | ||||||
|             non_business_hours_base=2000, |  | ||||||
|             autocorrelation=1, |  | ||||||
|             frequency=CountStat.HOUR, |  | ||||||
|         ) |  | ||||||
|         self.assertEqual(data[0], data[1]) |         self.assertEqual(data[0], data[1]) | ||||||
|         self.assertEqual(data[0], data[-1]) |         self.assertEqual(data[0], data[-1]) | ||||||
|   | |||||||
| @@ -1,629 +0,0 @@ | |||||||
| from datetime import datetime, timedelta, timezone |  | ||||||
| from typing import List, Optional |  | ||||||
|  |  | ||||||
| from django.utils.timezone import now as timezone_now |  | ||||||
|  |  | ||||||
| from analytics.lib.counts import COUNT_STATS, CountStat |  | ||||||
| from analytics.lib.time_utils import time_range |  | ||||||
| from analytics.models import FillState, RealmCount, UserCount |  | ||||||
| from analytics.views.stats import rewrite_client_arrays, sort_by_totals, sort_client_labels |  | ||||||
| from zerver.lib.test_classes import ZulipTestCase |  | ||||||
| from zerver.lib.timestamp import ceiling_to_day, ceiling_to_hour, datetime_to_timestamp |  | ||||||
| from zerver.models import Client, get_realm |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestStatsEndpoint(ZulipTestCase): |  | ||||||
|     def test_stats(self) -> None: |  | ||||||
|         self.user = self.example_user("hamlet") |  | ||||||
|         self.login_user(self.user) |  | ||||||
|         result = self.client_get("/stats") |  | ||||||
|         self.assertEqual(result.status_code, 200) |  | ||||||
|         # Check that we get something back |  | ||||||
|         self.assert_in_response("Zulip analytics for", result) |  | ||||||
|  |  | ||||||
|     def test_guest_user_cant_access_stats(self) -> None: |  | ||||||
|         self.user = self.example_user("polonius") |  | ||||||
|         self.login_user(self.user) |  | ||||||
|         result = self.client_get("/stats") |  | ||||||
|         self.assert_json_error(result, "Not allowed for guest users", 400) |  | ||||||
|  |  | ||||||
|         result = self.client_get("/json/analytics/chart_data") |  | ||||||
|         self.assert_json_error(result, "Not allowed for guest users", 400) |  | ||||||
|  |  | ||||||
|     def test_stats_for_realm(self) -> None: |  | ||||||
|         user = self.example_user("hamlet") |  | ||||||
|         self.login_user(user) |  | ||||||
|  |  | ||||||
|         result = self.client_get("/stats/realm/zulip/") |  | ||||||
|         self.assertEqual(result.status_code, 302) |  | ||||||
|  |  | ||||||
|         result = self.client_get("/stats/realm/not_existing_realm/") |  | ||||||
|         self.assertEqual(result.status_code, 302) |  | ||||||
|  |  | ||||||
|         user = self.example_user("hamlet") |  | ||||||
|         user.is_staff = True |  | ||||||
|         user.save(update_fields=["is_staff"]) |  | ||||||
|  |  | ||||||
|         result = self.client_get("/stats/realm/not_existing_realm/") |  | ||||||
|         self.assertEqual(result.status_code, 404) |  | ||||||
|  |  | ||||||
|         result = self.client_get("/stats/realm/zulip/") |  | ||||||
|         self.assertEqual(result.status_code, 200) |  | ||||||
|         self.assert_in_response("Zulip analytics for", result) |  | ||||||
|  |  | ||||||
|     def test_stats_for_installation(self) -> None: |  | ||||||
|         user = self.example_user("hamlet") |  | ||||||
|         self.login_user(user) |  | ||||||
|  |  | ||||||
|         result = self.client_get("/stats/installation") |  | ||||||
|         self.assertEqual(result.status_code, 302) |  | ||||||
|  |  | ||||||
|         user = self.example_user("hamlet") |  | ||||||
|         user.is_staff = True |  | ||||||
|         user.save(update_fields=["is_staff"]) |  | ||||||
|  |  | ||||||
|         result = self.client_get("/stats/installation") |  | ||||||
|         self.assertEqual(result.status_code, 200) |  | ||||||
|         self.assert_in_response("Zulip analytics for", result) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestGetChartData(ZulipTestCase): |  | ||||||
|     def setUp(self) -> None: |  | ||||||
|         super().setUp() |  | ||||||
|         self.realm = get_realm("zulip") |  | ||||||
|         self.user = self.example_user("hamlet") |  | ||||||
|         self.login_user(self.user) |  | ||||||
|         self.end_times_hour = [ |  | ||||||
|             ceiling_to_hour(self.realm.date_created) + timedelta(hours=i) for i in range(4) |  | ||||||
|         ] |  | ||||||
|         self.end_times_day = [ |  | ||||||
|             ceiling_to_day(self.realm.date_created) + timedelta(days=i) for i in range(4) |  | ||||||
|         ] |  | ||||||
|  |  | ||||||
|     def data(self, i: int) -> List[int]: |  | ||||||
|         return [0, 0, i, 0] |  | ||||||
|  |  | ||||||
|     def insert_data( |  | ||||||
|         self, stat: CountStat, realm_subgroups: List[Optional[str]], user_subgroups: List[str] |  | ||||||
|     ) -> None: |  | ||||||
|         if stat.frequency == CountStat.HOUR: |  | ||||||
|             insert_time = self.end_times_hour[2] |  | ||||||
|             fill_time = self.end_times_hour[-1] |  | ||||||
|         if stat.frequency == CountStat.DAY: |  | ||||||
|             insert_time = self.end_times_day[2] |  | ||||||
|             fill_time = self.end_times_day[-1] |  | ||||||
|  |  | ||||||
|         RealmCount.objects.bulk_create( |  | ||||||
|             RealmCount( |  | ||||||
|                 property=stat.property, |  | ||||||
|                 subgroup=subgroup, |  | ||||||
|                 end_time=insert_time, |  | ||||||
|                 value=100 + i, |  | ||||||
|                 realm=self.realm, |  | ||||||
|             ) |  | ||||||
|             for i, subgroup in enumerate(realm_subgroups) |  | ||||||
|         ) |  | ||||||
|         UserCount.objects.bulk_create( |  | ||||||
|             UserCount( |  | ||||||
|                 property=stat.property, |  | ||||||
|                 subgroup=subgroup, |  | ||||||
|                 end_time=insert_time, |  | ||||||
|                 value=200 + i, |  | ||||||
|                 realm=self.realm, |  | ||||||
|                 user=self.user, |  | ||||||
|             ) |  | ||||||
|             for i, subgroup in enumerate(user_subgroups) |  | ||||||
|         ) |  | ||||||
|         FillState.objects.create(property=stat.property, end_time=fill_time, state=FillState.DONE) |  | ||||||
|  |  | ||||||
|     def test_number_of_humans(self) -> None: |  | ||||||
|         stat = COUNT_STATS["realm_active_humans::day"] |  | ||||||
|         self.insert_data(stat, [None], []) |  | ||||||
|         stat = COUNT_STATS["1day_actives::day"] |  | ||||||
|         self.insert_data(stat, [None], []) |  | ||||||
|         stat = COUNT_STATS["active_users_audit:is_bot:day"] |  | ||||||
|         self.insert_data(stat, ["false"], []) |  | ||||||
|         result = self.client_get("/json/analytics/chart_data", {"chart_name": "number_of_humans"}) |  | ||||||
|         data = self.assert_json_success(result) |  | ||||||
|         self.assertEqual( |  | ||||||
|             data, |  | ||||||
|             { |  | ||||||
|                 "msg": "", |  | ||||||
|                 "end_times": [datetime_to_timestamp(dt) for dt in self.end_times_day], |  | ||||||
|                 "frequency": CountStat.DAY, |  | ||||||
|                 "everyone": { |  | ||||||
|                     "_1day": self.data(100), |  | ||||||
|                     "_15day": self.data(100), |  | ||||||
|                     "all_time": self.data(100), |  | ||||||
|                 }, |  | ||||||
|                 "display_order": None, |  | ||||||
|                 "result": "success", |  | ||||||
|             }, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def test_messages_sent_over_time(self) -> None: |  | ||||||
|         stat = COUNT_STATS["messages_sent:is_bot:hour"] |  | ||||||
|         self.insert_data(stat, ["true", "false"], ["false"]) |  | ||||||
|         result = self.client_get( |  | ||||||
|             "/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"} |  | ||||||
|         ) |  | ||||||
|         data = self.assert_json_success(result) |  | ||||||
|         self.assertEqual( |  | ||||||
|             data, |  | ||||||
|             { |  | ||||||
|                 "msg": "", |  | ||||||
|                 "end_times": [datetime_to_timestamp(dt) for dt in self.end_times_hour], |  | ||||||
|                 "frequency": CountStat.HOUR, |  | ||||||
|                 "everyone": {"bot": self.data(100), "human": self.data(101)}, |  | ||||||
|                 "user": {"bot": self.data(0), "human": self.data(200)}, |  | ||||||
|                 "display_order": None, |  | ||||||
|                 "result": "success", |  | ||||||
|             }, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def test_messages_sent_by_message_type(self) -> None: |  | ||||||
|         stat = COUNT_STATS["messages_sent:message_type:day"] |  | ||||||
|         self.insert_data( |  | ||||||
|             stat, ["public_stream", "private_message"], ["public_stream", "private_stream"] |  | ||||||
|         ) |  | ||||||
|         result = self.client_get( |  | ||||||
|             "/json/analytics/chart_data", {"chart_name": "messages_sent_by_message_type"} |  | ||||||
|         ) |  | ||||||
|         data = self.assert_json_success(result) |  | ||||||
|         self.assertEqual( |  | ||||||
|             data, |  | ||||||
|             { |  | ||||||
|                 "msg": "", |  | ||||||
|                 "end_times": [datetime_to_timestamp(dt) for dt in self.end_times_day], |  | ||||||
|                 "frequency": CountStat.DAY, |  | ||||||
|                 "everyone": { |  | ||||||
|                     "Public streams": self.data(100), |  | ||||||
|                     "Private streams": self.data(0), |  | ||||||
|                     "Direct messages": self.data(101), |  | ||||||
|                     "Group direct messages": self.data(0), |  | ||||||
|                 }, |  | ||||||
|                 "user": { |  | ||||||
|                     "Public streams": self.data(200), |  | ||||||
|                     "Private streams": self.data(201), |  | ||||||
|                     "Direct messages": self.data(0), |  | ||||||
|                     "Group direct messages": self.data(0), |  | ||||||
|                 }, |  | ||||||
|                 "display_order": [ |  | ||||||
|                     "Direct messages", |  | ||||||
|                     "Public streams", |  | ||||||
|                     "Private streams", |  | ||||||
|                     "Group direct messages", |  | ||||||
|                 ], |  | ||||||
|                 "result": "success", |  | ||||||
|             }, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def test_messages_sent_by_client(self) -> None: |  | ||||||
|         stat = COUNT_STATS["messages_sent:client:day"] |  | ||||||
|         client1 = Client.objects.create(name="client 1") |  | ||||||
|         client2 = Client.objects.create(name="client 2") |  | ||||||
|         client3 = Client.objects.create(name="client 3") |  | ||||||
|         client4 = Client.objects.create(name="client 4") |  | ||||||
|         self.insert_data( |  | ||||||
|             stat, |  | ||||||
|             [str(client4.id), str(client3.id), str(client2.id)], |  | ||||||
|             [str(client3.id), str(client1.id)], |  | ||||||
|         ) |  | ||||||
|         result = self.client_get( |  | ||||||
|             "/json/analytics/chart_data", {"chart_name": "messages_sent_by_client"} |  | ||||||
|         ) |  | ||||||
|         data = self.assert_json_success(result) |  | ||||||
|         self.assertEqual( |  | ||||||
|             data, |  | ||||||
|             { |  | ||||||
|                 "msg": "", |  | ||||||
|                 "end_times": [datetime_to_timestamp(dt) for dt in self.end_times_day], |  | ||||||
|                 "frequency": CountStat.DAY, |  | ||||||
|                 "everyone": { |  | ||||||
|                     "client 4": self.data(100), |  | ||||||
|                     "client 3": self.data(101), |  | ||||||
|                     "client 2": self.data(102), |  | ||||||
|                 }, |  | ||||||
|                 "user": {"client 3": self.data(200), "client 1": self.data(201)}, |  | ||||||
|                 "display_order": ["client 1", "client 2", "client 3", "client 4"], |  | ||||||
|                 "result": "success", |  | ||||||
|             }, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def test_messages_read_over_time(self) -> None: |  | ||||||
|         stat = COUNT_STATS["messages_read::hour"] |  | ||||||
|         self.insert_data(stat, [None], []) |  | ||||||
|         result = self.client_get( |  | ||||||
|             "/json/analytics/chart_data", {"chart_name": "messages_read_over_time"} |  | ||||||
|         ) |  | ||||||
|         data = self.assert_json_success(result) |  | ||||||
|         self.assertEqual( |  | ||||||
|             data, |  | ||||||
|             { |  | ||||||
|                 "msg": "", |  | ||||||
|                 "end_times": [datetime_to_timestamp(dt) for dt in self.end_times_hour], |  | ||||||
|                 "frequency": CountStat.HOUR, |  | ||||||
|                 "everyone": {"read": self.data(100)}, |  | ||||||
|                 "user": {"read": self.data(0)}, |  | ||||||
|                 "display_order": None, |  | ||||||
|                 "result": "success", |  | ||||||
|             }, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def test_include_empty_subgroups(self) -> None: |  | ||||||
|         FillState.objects.create( |  | ||||||
|             property="realm_active_humans::day", |  | ||||||
|             end_time=self.end_times_day[0], |  | ||||||
|             state=FillState.DONE, |  | ||||||
|         ) |  | ||||||
|         result = self.client_get("/json/analytics/chart_data", {"chart_name": "number_of_humans"}) |  | ||||||
|         data = self.assert_json_success(result) |  | ||||||
|         self.assertEqual(data["everyone"], {"_1day": [0], "_15day": [0], "all_time": [0]}) |  | ||||||
|         self.assertFalse("user" in data) |  | ||||||
|  |  | ||||||
|         FillState.objects.create( |  | ||||||
|             property="messages_sent:is_bot:hour", |  | ||||||
|             end_time=self.end_times_hour[0], |  | ||||||
|             state=FillState.DONE, |  | ||||||
|         ) |  | ||||||
|         result = self.client_get( |  | ||||||
|             "/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"} |  | ||||||
|         ) |  | ||||||
|         data = self.assert_json_success(result) |  | ||||||
|         self.assertEqual(data["everyone"], {"human": [0], "bot": [0]}) |  | ||||||
|         self.assertEqual(data["user"], {"human": [0], "bot": [0]}) |  | ||||||
|  |  | ||||||
|         FillState.objects.create( |  | ||||||
|             property="messages_sent:message_type:day", |  | ||||||
|             end_time=self.end_times_day[0], |  | ||||||
|             state=FillState.DONE, |  | ||||||
|         ) |  | ||||||
|         result = self.client_get( |  | ||||||
|             "/json/analytics/chart_data", {"chart_name": "messages_sent_by_message_type"} |  | ||||||
|         ) |  | ||||||
|         data = self.assert_json_success(result) |  | ||||||
|         self.assertEqual( |  | ||||||
|             data["everyone"], |  | ||||||
|             { |  | ||||||
|                 "Public streams": [0], |  | ||||||
|                 "Private streams": [0], |  | ||||||
|                 "Direct messages": [0], |  | ||||||
|                 "Group direct messages": [0], |  | ||||||
|             }, |  | ||||||
|         ) |  | ||||||
|         self.assertEqual( |  | ||||||
|             data["user"], |  | ||||||
|             { |  | ||||||
|                 "Public streams": [0], |  | ||||||
|                 "Private streams": [0], |  | ||||||
|                 "Direct messages": [0], |  | ||||||
|                 "Group direct messages": [0], |  | ||||||
|             }, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         FillState.objects.create( |  | ||||||
|             property="messages_sent:client:day", |  | ||||||
|             end_time=self.end_times_day[0], |  | ||||||
|             state=FillState.DONE, |  | ||||||
|         ) |  | ||||||
|         result = self.client_get( |  | ||||||
|             "/json/analytics/chart_data", {"chart_name": "messages_sent_by_client"} |  | ||||||
|         ) |  | ||||||
|         data = self.assert_json_success(result) |  | ||||||
|         self.assertEqual(data["everyone"], {}) |  | ||||||
|         self.assertEqual(data["user"], {}) |  | ||||||
|  |  | ||||||
|     def test_start_and_end(self) -> None: |  | ||||||
|         stat = COUNT_STATS["realm_active_humans::day"] |  | ||||||
|         self.insert_data(stat, [None], []) |  | ||||||
|         stat = COUNT_STATS["1day_actives::day"] |  | ||||||
|         self.insert_data(stat, [None], []) |  | ||||||
|         stat = COUNT_STATS["active_users_audit:is_bot:day"] |  | ||||||
|         self.insert_data(stat, ["false"], []) |  | ||||||
|         end_time_timestamps = [datetime_to_timestamp(dt) for dt in self.end_times_day] |  | ||||||
|  |  | ||||||
|         # valid start and end |  | ||||||
|         result = self.client_get( |  | ||||||
|             "/json/analytics/chart_data", |  | ||||||
|             { |  | ||||||
|                 "chart_name": "number_of_humans", |  | ||||||
|                 "start": end_time_timestamps[1], |  | ||||||
|                 "end": end_time_timestamps[2], |  | ||||||
|             }, |  | ||||||
|         ) |  | ||||||
|         data = self.assert_json_success(result) |  | ||||||
|         self.assertEqual(data["end_times"], end_time_timestamps[1:3]) |  | ||||||
|         self.assertEqual( |  | ||||||
|             data["everyone"], {"_1day": [0, 100], "_15day": [0, 100], "all_time": [0, 100]} |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         # start later then end |  | ||||||
|         result = self.client_get( |  | ||||||
|             "/json/analytics/chart_data", |  | ||||||
|             { |  | ||||||
|                 "chart_name": "number_of_humans", |  | ||||||
|                 "start": end_time_timestamps[2], |  | ||||||
|                 "end": end_time_timestamps[1], |  | ||||||
|             }, |  | ||||||
|         ) |  | ||||||
|         self.assert_json_error_contains(result, "Start time is later than") |  | ||||||
|  |  | ||||||
|     def test_min_length(self) -> None: |  | ||||||
|         stat = COUNT_STATS["realm_active_humans::day"] |  | ||||||
|         self.insert_data(stat, [None], []) |  | ||||||
|         stat = COUNT_STATS["1day_actives::day"] |  | ||||||
|         self.insert_data(stat, [None], []) |  | ||||||
|         stat = COUNT_STATS["active_users_audit:is_bot:day"] |  | ||||||
|         self.insert_data(stat, ["false"], []) |  | ||||||
|         # test min_length is too short to change anything |  | ||||||
|         result = self.client_get( |  | ||||||
|             "/json/analytics/chart_data", {"chart_name": "number_of_humans", "min_length": 2} |  | ||||||
|         ) |  | ||||||
|         data = self.assert_json_success(result) |  | ||||||
|         self.assertEqual( |  | ||||||
|             data["end_times"], [datetime_to_timestamp(dt) for dt in self.end_times_day] |  | ||||||
|         ) |  | ||||||
|         self.assertEqual( |  | ||||||
|             data["everyone"], |  | ||||||
|             {"_1day": self.data(100), "_15day": self.data(100), "all_time": self.data(100)}, |  | ||||||
|         ) |  | ||||||
|         # test min_length larger than filled data |  | ||||||
|         result = self.client_get( |  | ||||||
|             "/json/analytics/chart_data", {"chart_name": "number_of_humans", "min_length": 5} |  | ||||||
|         ) |  | ||||||
|         data = self.assert_json_success(result) |  | ||||||
|         end_times = [ |  | ||||||
|             ceiling_to_day(self.realm.date_created) + timedelta(days=i) for i in range(-1, 4) |  | ||||||
|         ] |  | ||||||
|         self.assertEqual(data["end_times"], [datetime_to_timestamp(dt) for dt in end_times]) |  | ||||||
|         self.assertEqual( |  | ||||||
|             data["everyone"], |  | ||||||
|             { |  | ||||||
|                 "_1day": [0, *self.data(100)], |  | ||||||
|                 "_15day": [0, *self.data(100)], |  | ||||||
|                 "all_time": [0, *self.data(100)], |  | ||||||
|             }, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def test_non_existent_chart(self) -> None: |  | ||||||
|         result = self.client_get("/json/analytics/chart_data", {"chart_name": "does_not_exist"}) |  | ||||||
|         self.assert_json_error_contains(result, "Unknown chart name") |  | ||||||
|  |  | ||||||
|     def test_analytics_not_running(self) -> None: |  | ||||||
|         realm = get_realm("zulip") |  | ||||||
|  |  | ||||||
|         self.assertEqual(FillState.objects.count(), 0) |  | ||||||
|  |  | ||||||
|         realm.date_created = timezone_now() - timedelta(days=3) |  | ||||||
|         realm.save(update_fields=["date_created"]) |  | ||||||
|         with self.assertLogs(level="WARNING") as m: |  | ||||||
|             result = self.client_get( |  | ||||||
|                 "/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"} |  | ||||||
|             ) |  | ||||||
|             self.assertEqual( |  | ||||||
|                 m.output, |  | ||||||
|                 [ |  | ||||||
|                     f"WARNING:root:User from realm zulip attempted to access /stats, but the computed start time: {realm.date_created} (creation of realm or installation) is later than the computed end time: 0001-01-01 00:00:00+00:00 (last successful analytics update). Is the analytics cron job running?" |  | ||||||
|                 ], |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|         self.assert_json_error_contains(result, "No analytics data available") |  | ||||||
|  |  | ||||||
|         realm.date_created = timezone_now() - timedelta(days=1, hours=2) |  | ||||||
|         realm.save(update_fields=["date_created"]) |  | ||||||
|         with self.assertLogs(level="WARNING") as m: |  | ||||||
|             result = self.client_get( |  | ||||||
|                 "/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"} |  | ||||||
|             ) |  | ||||||
|             self.assertEqual( |  | ||||||
|                 m.output, |  | ||||||
|                 [ |  | ||||||
|                     f"WARNING:root:User from realm zulip attempted to access /stats, but the computed start time: {realm.date_created} (creation of realm or installation) is later than the computed end time: 0001-01-01 00:00:00+00:00 (last successful analytics update). Is the analytics cron job running?" |  | ||||||
|                 ], |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|         self.assert_json_error_contains(result, "No analytics data available") |  | ||||||
|  |  | ||||||
|         realm.date_created = timezone_now() - timedelta(days=1, minutes=10) |  | ||||||
|         realm.save(update_fields=["date_created"]) |  | ||||||
|         result = self.client_get( |  | ||||||
|             "/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"} |  | ||||||
|         ) |  | ||||||
|         self.assert_json_success(result) |  | ||||||
|  |  | ||||||
|         realm.date_created = timezone_now() - timedelta(hours=10) |  | ||||||
|         realm.save(update_fields=["date_created"]) |  | ||||||
|         result = self.client_get( |  | ||||||
|             "/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"} |  | ||||||
|         ) |  | ||||||
|         self.assert_json_success(result) |  | ||||||
|  |  | ||||||
|         end_time = timezone_now() - timedelta(days=5) |  | ||||||
|         fill_state = FillState.objects.create( |  | ||||||
|             property="messages_sent:is_bot:hour", end_time=end_time, state=FillState.DONE |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         realm.date_created = timezone_now() - timedelta(days=3) |  | ||||||
|         realm.save(update_fields=["date_created"]) |  | ||||||
|         with self.assertLogs(level="WARNING") as m: |  | ||||||
|             result = self.client_get( |  | ||||||
|                 "/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"} |  | ||||||
|             ) |  | ||||||
|             self.assertEqual( |  | ||||||
|                 m.output, |  | ||||||
|                 [ |  | ||||||
|                     f"WARNING:root:User from realm zulip attempted to access /stats, but the computed start time: {realm.date_created} (creation of realm or installation) is later than the computed end time: {end_time} (last successful analytics update). Is the analytics cron job running?" |  | ||||||
|                 ], |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|         self.assert_json_error_contains(result, "No analytics data available") |  | ||||||
|  |  | ||||||
|         realm.date_created = timezone_now() - timedelta(days=1, minutes=10) |  | ||||||
|         realm.save(update_fields=["date_created"]) |  | ||||||
|         result = self.client_get( |  | ||||||
|             "/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"} |  | ||||||
|         ) |  | ||||||
|         self.assert_json_success(result) |  | ||||||
|  |  | ||||||
|         end_time = timezone_now() - timedelta(days=2) |  | ||||||
|         fill_state.end_time = end_time |  | ||||||
|         fill_state.save(update_fields=["end_time"]) |  | ||||||
|  |  | ||||||
|         realm.date_created = timezone_now() - timedelta(days=3) |  | ||||||
|         realm.save(update_fields=["date_created"]) |  | ||||||
|         result = self.client_get( |  | ||||||
|             "/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"} |  | ||||||
|         ) |  | ||||||
|         self.assert_json_success(result) |  | ||||||
|  |  | ||||||
|         realm.date_created = timezone_now() - timedelta(days=1, hours=2) |  | ||||||
|         realm.save(update_fields=["date_created"]) |  | ||||||
|         with self.assertLogs(level="WARNING") as m: |  | ||||||
|             result = self.client_get( |  | ||||||
|                 "/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"} |  | ||||||
|             ) |  | ||||||
|             self.assertEqual( |  | ||||||
|                 m.output, |  | ||||||
|                 [ |  | ||||||
|                     f"WARNING:root:User from realm zulip attempted to access /stats, but the computed start time: {realm.date_created} (creation of realm or installation) is later than the computed end time: {end_time} (last successful analytics update). Is the analytics cron job running?" |  | ||||||
|                 ], |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|         self.assert_json_error_contains(result, "No analytics data available") |  | ||||||
|  |  | ||||||
|         realm.date_created = timezone_now() - timedelta(days=1, minutes=10) |  | ||||||
|         realm.save(update_fields=["date_created"]) |  | ||||||
|         result = self.client_get( |  | ||||||
|             "/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"} |  | ||||||
|         ) |  | ||||||
|         self.assert_json_success(result) |  | ||||||
|  |  | ||||||
|     def test_get_chart_data_for_realm(self) -> None: |  | ||||||
|         user = self.example_user("hamlet") |  | ||||||
|         self.login_user(user) |  | ||||||
|  |  | ||||||
|         result = self.client_get( |  | ||||||
|             "/json/analytics/chart_data/realm/zulip", {"chart_name": "number_of_humans"} |  | ||||||
|         ) |  | ||||||
|         self.assert_json_error(result, "Must be an server administrator", 400) |  | ||||||
|  |  | ||||||
|         user = self.example_user("hamlet") |  | ||||||
|         user.is_staff = True |  | ||||||
|         user.save(update_fields=["is_staff"]) |  | ||||||
|         stat = COUNT_STATS["realm_active_humans::day"] |  | ||||||
|         self.insert_data(stat, [None], []) |  | ||||||
|  |  | ||||||
|         result = self.client_get( |  | ||||||
|             "/json/analytics/chart_data/realm/not_existing_realm", |  | ||||||
|             {"chart_name": "number_of_humans"}, |  | ||||||
|         ) |  | ||||||
|         self.assert_json_error(result, "Invalid organization", 400) |  | ||||||
|  |  | ||||||
|         result = self.client_get( |  | ||||||
|             "/json/analytics/chart_data/realm/zulip", {"chart_name": "number_of_humans"} |  | ||||||
|         ) |  | ||||||
|         self.assert_json_success(result) |  | ||||||
|  |  | ||||||
|     def test_get_chart_data_for_installation(self) -> None: |  | ||||||
|         user = self.example_user("hamlet") |  | ||||||
|         self.login_user(user) |  | ||||||
|  |  | ||||||
|         result = self.client_get( |  | ||||||
|             "/json/analytics/chart_data/installation", {"chart_name": "number_of_humans"} |  | ||||||
|         ) |  | ||||||
|         self.assert_json_error(result, "Must be an server administrator", 400) |  | ||||||
|  |  | ||||||
|         user = self.example_user("hamlet") |  | ||||||
|         user.is_staff = True |  | ||||||
|         user.save(update_fields=["is_staff"]) |  | ||||||
|         stat = COUNT_STATS["realm_active_humans::day"] |  | ||||||
|         self.insert_data(stat, [None], []) |  | ||||||
|  |  | ||||||
|         result = self.client_get( |  | ||||||
|             "/json/analytics/chart_data/installation", {"chart_name": "number_of_humans"} |  | ||||||
|         ) |  | ||||||
|         self.assert_json_success(result) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestGetChartDataHelpers(ZulipTestCase): |  | ||||||
|     def test_sort_by_totals(self) -> None: |  | ||||||
|         empty: List[int] = [] |  | ||||||
|         value_arrays = {"c": [0, 1], "a": [9], "b": [1, 1, 1], "d": empty} |  | ||||||
|         self.assertEqual(sort_by_totals(value_arrays), ["a", "b", "c", "d"]) |  | ||||||
|  |  | ||||||
|     def test_sort_client_labels(self) -> None: |  | ||||||
|         data = { |  | ||||||
|             "everyone": {"a": [16], "c": [15], "b": [14], "e": [13], "d": [12], "h": [11]}, |  | ||||||
|             "user": {"a": [6], "b": [5], "d": [4], "e": [3], "f": [2], "g": [1]}, |  | ||||||
|         } |  | ||||||
|         self.assertEqual(sort_client_labels(data), ["a", "b", "c", "d", "e", "f", "g", "h"]) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestTimeRange(ZulipTestCase): |  | ||||||
|     def test_time_range(self) -> None: |  | ||||||
|         HOUR = timedelta(hours=1) |  | ||||||
|         DAY = timedelta(days=1) |  | ||||||
|  |  | ||||||
|         a_time = datetime(2016, 3, 14, 22, 59, tzinfo=timezone.utc) |  | ||||||
|         floor_hour = datetime(2016, 3, 14, 22, tzinfo=timezone.utc) |  | ||||||
|         floor_day = datetime(2016, 3, 14, tzinfo=timezone.utc) |  | ||||||
|  |  | ||||||
|         # test start == end |  | ||||||
|         self.assertEqual(time_range(a_time, a_time, CountStat.HOUR, None), []) |  | ||||||
|         self.assertEqual(time_range(a_time, a_time, CountStat.DAY, None), []) |  | ||||||
|         # test start == end == boundary, and min_length == 0 |  | ||||||
|         self.assertEqual(time_range(floor_hour, floor_hour, CountStat.HOUR, 0), [floor_hour]) |  | ||||||
|         self.assertEqual(time_range(floor_day, floor_day, CountStat.DAY, 0), [floor_day]) |  | ||||||
|         # test start and end on different boundaries |  | ||||||
|         self.assertEqual( |  | ||||||
|             time_range(floor_hour, floor_hour + HOUR, CountStat.HOUR, None), |  | ||||||
|             [floor_hour, floor_hour + HOUR], |  | ||||||
|         ) |  | ||||||
|         self.assertEqual( |  | ||||||
|             time_range(floor_day, floor_day + DAY, CountStat.DAY, None), |  | ||||||
|             [floor_day, floor_day + DAY], |  | ||||||
|         ) |  | ||||||
|         # test min_length |  | ||||||
|         self.assertEqual( |  | ||||||
|             time_range(floor_hour, floor_hour + HOUR, CountStat.HOUR, 4), |  | ||||||
|             [floor_hour - 2 * HOUR, floor_hour - HOUR, floor_hour, floor_hour + HOUR], |  | ||||||
|         ) |  | ||||||
|         self.assertEqual( |  | ||||||
|             time_range(floor_day, floor_day + DAY, CountStat.DAY, 4), |  | ||||||
|             [floor_day - 2 * DAY, floor_day - DAY, floor_day, floor_day + DAY], |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestMapArrays(ZulipTestCase): |  | ||||||
|     def test_map_arrays(self) -> None: |  | ||||||
|         a = { |  | ||||||
|             "desktop app 1.0": [1, 2, 3], |  | ||||||
|             "desktop app 2.0": [10, 12, 13], |  | ||||||
|             "desktop app 3.0": [21, 22, 23], |  | ||||||
|             "website": [1, 2, 3], |  | ||||||
|             "ZulipiOS": [1, 2, 3], |  | ||||||
|             "ZulipElectron": [2, 5, 7], |  | ||||||
|             "ZulipMobile": [1, 5, 7], |  | ||||||
|             "ZulipPython": [1, 2, 3], |  | ||||||
|             "API: Python": [1, 2, 3], |  | ||||||
|             "SomethingRandom": [4, 5, 6], |  | ||||||
|             "ZulipGitHubWebhook": [7, 7, 9], |  | ||||||
|             "ZulipAndroid": [64, 63, 65], |  | ||||||
|             "ZulipTerminal": [9, 10, 11], |  | ||||||
|         } |  | ||||||
|         result = rewrite_client_arrays(a) |  | ||||||
|         self.assertEqual( |  | ||||||
|             result, |  | ||||||
|             { |  | ||||||
|                 "Old desktop app": [32, 36, 39], |  | ||||||
|                 "Old iOS app": [1, 2, 3], |  | ||||||
|                 "Desktop app": [2, 5, 7], |  | ||||||
|                 "Mobile app": [1, 5, 7], |  | ||||||
|                 "Web app": [1, 2, 3], |  | ||||||
|                 "Python API": [2, 4, 6], |  | ||||||
|                 "SomethingRandom": [4, 5, 6], |  | ||||||
|                 "GitHub webhook": [7, 7, 9], |  | ||||||
|                 "Old Android app": [64, 63, 65], |  | ||||||
|                 "Terminal app": [9, 10, 11], |  | ||||||
|             }, |  | ||||||
|         ) |  | ||||||
| @@ -1,733 +0,0 @@ | |||||||
| from datetime import datetime, timedelta, timezone |  | ||||||
| from typing import TYPE_CHECKING, Optional |  | ||||||
| from unittest import mock |  | ||||||
|  |  | ||||||
| import orjson |  | ||||||
| from django.utils.timezone import now as timezone_now |  | ||||||
|  |  | ||||||
| from corporate.lib.stripe import add_months, update_sponsorship_status |  | ||||||
| from corporate.models import Customer, CustomerPlan, LicenseLedger, get_customer_by_realm |  | ||||||
| from zerver.actions.invites import do_create_multiuse_invite_link |  | ||||||
| from zerver.actions.realm_settings import do_change_realm_org_type, do_send_realm_reactivation_email |  | ||||||
| from zerver.actions.user_settings import do_change_user_setting |  | ||||||
| from zerver.lib.test_classes import ZulipTestCase |  | ||||||
| from zerver.lib.test_helpers import reset_email_visibility_to_everyone_in_zulip_realm |  | ||||||
| from zerver.models import ( |  | ||||||
|     MultiuseInvite, |  | ||||||
|     PreregistrationUser, |  | ||||||
|     Realm, |  | ||||||
|     UserMessage, |  | ||||||
|     UserProfile, |  | ||||||
|     get_org_type_display_name, |  | ||||||
|     get_realm, |  | ||||||
| ) |  | ||||||
|  |  | ||||||
| if TYPE_CHECKING: |  | ||||||
|     from django.test.client import _MonkeyPatchedWSGIResponse as TestHttpResponse |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class TestSupportEndpoint(ZulipTestCase): |  | ||||||
|     def test_search(self) -> None: |  | ||||||
|         reset_email_visibility_to_everyone_in_zulip_realm() |  | ||||||
|         lear_user = self.lear_user("king") |  | ||||||
|         lear_user.is_staff = True |  | ||||||
|         lear_user.save(update_fields=["is_staff"]) |  | ||||||
|         lear_realm = get_realm("lear") |  | ||||||
|  |  | ||||||
|         def assert_user_details_in_html_response( |  | ||||||
|             html_response: "TestHttpResponse", full_name: str, email: str, role: str |  | ||||||
|         ) -> None: |  | ||||||
|             self.assert_in_success_response( |  | ||||||
|                 [ |  | ||||||
|                     '<span class="label">user</span>\n', |  | ||||||
|                     f"<h3>{full_name}</h3>", |  | ||||||
|                     f"<b>Email</b>: {email}", |  | ||||||
|                     "<b>Is active</b>: True<br />", |  | ||||||
|                     f"<b>Role</b>: {role}<br />", |  | ||||||
|                 ], |  | ||||||
|                 html_response, |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|         def create_invitation( |  | ||||||
|             stream: str, invitee_email: str, realm: Optional[Realm] = None |  | ||||||
|         ) -> None: |  | ||||||
|             invite_expires_in_minutes = 10 * 24 * 60 |  | ||||||
|             self.client_post( |  | ||||||
|                 "/json/invites", |  | ||||||
|                 { |  | ||||||
|                     "invitee_emails": [invitee_email], |  | ||||||
|                     "stream_ids": orjson.dumps([self.get_stream_id(stream, realm)]).decode(), |  | ||||||
|                     "invite_expires_in_minutes": invite_expires_in_minutes, |  | ||||||
|                     "invite_as": PreregistrationUser.INVITE_AS["MEMBER"], |  | ||||||
|                 }, |  | ||||||
|                 subdomain=realm.string_id if realm is not None else "zulip", |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|         def check_hamlet_user_query_result(result: "TestHttpResponse") -> None: |  | ||||||
|             assert_user_details_in_html_response( |  | ||||||
|                 result, "King Hamlet", self.example_email("hamlet"), "Member" |  | ||||||
|             ) |  | ||||||
|             self.assert_in_success_response( |  | ||||||
|                 [ |  | ||||||
|                     f"<b>Admins</b>: {self.example_email('iago')}\n", |  | ||||||
|                     f"<b>Owners</b>: {self.example_email('desdemona')}\n", |  | ||||||
|                     'class="copy-button" data-copytext="{}">'.format(self.example_email("iago")), |  | ||||||
|                     'class="copy-button" data-copytext="{}">'.format( |  | ||||||
|                         self.example_email("desdemona") |  | ||||||
|                     ), |  | ||||||
|                 ], |  | ||||||
|                 result, |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|         def check_lear_user_query_result(result: "TestHttpResponse") -> None: |  | ||||||
|             assert_user_details_in_html_response( |  | ||||||
|                 result, lear_user.full_name, lear_user.email, "Member" |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|         def check_othello_user_query_result(result: "TestHttpResponse") -> None: |  | ||||||
|             assert_user_details_in_html_response( |  | ||||||
|                 result, "Othello, the Moor of Venice", self.example_email("othello"), "Member" |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|         def check_polonius_user_query_result(result: "TestHttpResponse") -> None: |  | ||||||
|             assert_user_details_in_html_response( |  | ||||||
|                 result, "Polonius", self.example_email("polonius"), "Guest" |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|         def check_zulip_realm_query_result(result: "TestHttpResponse") -> None: |  | ||||||
|             zulip_realm = get_realm("zulip") |  | ||||||
|             first_human_user = zulip_realm.get_first_human_user() |  | ||||||
|             assert first_human_user is not None |  | ||||||
|             self.assert_in_success_response( |  | ||||||
|                 [ |  | ||||||
|                     f"<b>First human user</b>: {first_human_user.delivery_email}\n", |  | ||||||
|                     f'<input type="hidden" name="realm_id" value="{zulip_realm.id}"', |  | ||||||
|                     "Zulip Dev</h3>", |  | ||||||
|                     '<option value="1" selected>Self-hosted</option>', |  | ||||||
|                     '<option value="2" >Limited</option>', |  | ||||||
|                     'input type="number" name="discount" value="None"', |  | ||||||
|                     '<option value="active" selected>Active</option>', |  | ||||||
|                     '<option value="deactivated" >Deactivated</option>', |  | ||||||
|                     f'<option value="{zulip_realm.org_type}" selected>', |  | ||||||
|                     'scrub-realm-button">', |  | ||||||
|                     'data-string-id="zulip"', |  | ||||||
|                 ], |  | ||||||
|                 result, |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|         def check_lear_realm_query_result(result: "TestHttpResponse") -> None: |  | ||||||
|             self.assert_in_success_response( |  | ||||||
|                 [ |  | ||||||
|                     f'<input type="hidden" name="realm_id" value="{lear_realm.id}"', |  | ||||||
|                     "Lear & Co.</h3>", |  | ||||||
|                     '<option value="1" selected>Self-hosted</option>', |  | ||||||
|                     '<option value="2" >Limited</option>', |  | ||||||
|                     'input type="number" name="discount" value="None"', |  | ||||||
|                     '<option value="active" selected>Active</option>', |  | ||||||
|                     '<option value="deactivated" >Deactivated</option>', |  | ||||||
|                     'scrub-realm-button">', |  | ||||||
|                     'data-string-id="lear"', |  | ||||||
|                     "<b>Name</b>: Zulip Cloud Standard", |  | ||||||
|                     "<b>Status</b>: Active", |  | ||||||
|                     "<b>Billing schedule</b>: Annual", |  | ||||||
|                     "<b>Licenses</b>: 2/10 (Manual)", |  | ||||||
|                     "<b>Price per license</b>: $80.0", |  | ||||||
|                     "<b>Next invoice date</b>: 02 January 2017", |  | ||||||
|                     '<option value="send_invoice" selected>', |  | ||||||
|                     '<option value="charge_automatically" >', |  | ||||||
|                 ], |  | ||||||
|                 result, |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|         def check_preregistration_user_query_result( |  | ||||||
|             result: "TestHttpResponse", email: str, invite: bool = False |  | ||||||
|         ) -> None: |  | ||||||
|             self.assert_in_success_response( |  | ||||||
|                 [ |  | ||||||
|                     '<span class="label">preregistration user</span>\n', |  | ||||||
|                     f"<b>Email</b>: {email}", |  | ||||||
|                 ], |  | ||||||
|                 result, |  | ||||||
|             ) |  | ||||||
|             if invite: |  | ||||||
|                 self.assert_in_success_response(['<span class="label">invite</span>'], result) |  | ||||||
|                 self.assert_in_success_response( |  | ||||||
|                     [ |  | ||||||
|                         "<b>Expires in</b>: 1\xa0week, 3\xa0days", |  | ||||||
|                         "<b>Status</b>: Link has not been used", |  | ||||||
|                     ], |  | ||||||
|                     result, |  | ||||||
|                 ) |  | ||||||
|                 self.assert_in_success_response([], result) |  | ||||||
|             else: |  | ||||||
|                 self.assert_not_in_success_response(['<span class="label">invite</span>'], result) |  | ||||||
|                 self.assert_in_success_response( |  | ||||||
|                     [ |  | ||||||
|                         "<b>Expires in</b>: 1\xa0day", |  | ||||||
|                         "<b>Status</b>: Link has not been used", |  | ||||||
|                     ], |  | ||||||
|                     result, |  | ||||||
|                 ) |  | ||||||
|  |  | ||||||
|         def check_realm_creation_query_result(result: "TestHttpResponse", email: str) -> None: |  | ||||||
|             self.assert_in_success_response( |  | ||||||
|                 [ |  | ||||||
|                     '<span class="label">preregistration user</span>\n', |  | ||||||
|                     '<span class="label">realm creation</span>\n', |  | ||||||
|                     "<b>Link</b>: http://testserver/accounts/do_confirm/", |  | ||||||
|                     "<b>Expires in</b>: 1\xa0day", |  | ||||||
|                 ], |  | ||||||
|                 result, |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|         def check_multiuse_invite_link_query_result(result: "TestHttpResponse") -> None: |  | ||||||
|             self.assert_in_success_response( |  | ||||||
|                 [ |  | ||||||
|                     '<span class="label">multiuse invite</span>\n', |  | ||||||
|                     "<b>Link</b>: http://zulip.testserver/join/", |  | ||||||
|                     "<b>Expires in</b>: 1\xa0week, 3\xa0days", |  | ||||||
|                 ], |  | ||||||
|                 result, |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|         def check_realm_reactivation_link_query_result(result: "TestHttpResponse") -> None: |  | ||||||
|             self.assert_in_success_response( |  | ||||||
|                 [ |  | ||||||
|                     '<span class="label">realm reactivation</span>\n', |  | ||||||
|                     "<b>Link</b>: http://zulip.testserver/reactivate/", |  | ||||||
|                     "<b>Expires in</b>: 1\xa0day", |  | ||||||
|                 ], |  | ||||||
|                 result, |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|         def get_check_query_result( |  | ||||||
|             query: str, count: int, subdomain: str = "zulip" |  | ||||||
|         ) -> "TestHttpResponse": |  | ||||||
|             result = self.client_get("/activity/support", {"q": query}, subdomain=subdomain) |  | ||||||
|             self.assertEqual(result.content.decode().count("support-query-result"), count) |  | ||||||
|             return result |  | ||||||
|  |  | ||||||
|         self.login("cordelia") |  | ||||||
|  |  | ||||||
|         result = self.client_get("/activity/support") |  | ||||||
|         self.assertEqual(result.status_code, 302) |  | ||||||
|         self.assertEqual(result["Location"], "/login/") |  | ||||||
|  |  | ||||||
|         self.login("iago") |  | ||||||
|  |  | ||||||
|         do_change_user_setting( |  | ||||||
|             self.example_user("hamlet"), |  | ||||||
|             "email_address_visibility", |  | ||||||
|             UserProfile.EMAIL_ADDRESS_VISIBILITY_NOBODY, |  | ||||||
|             acting_user=None, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         customer = Customer.objects.create(realm=lear_realm, stripe_customer_id="cus_123") |  | ||||||
|         now = datetime(2016, 1, 2, tzinfo=timezone.utc) |  | ||||||
|         plan = CustomerPlan.objects.create( |  | ||||||
|             customer=customer, |  | ||||||
|             billing_cycle_anchor=now, |  | ||||||
|             billing_schedule=CustomerPlan.ANNUAL, |  | ||||||
|             tier=CustomerPlan.STANDARD, |  | ||||||
|             price_per_license=8000, |  | ||||||
|             next_invoice_date=add_months(now, 12), |  | ||||||
|         ) |  | ||||||
|         LicenseLedger.objects.create( |  | ||||||
|             licenses=10, |  | ||||||
|             licenses_at_next_renewal=10, |  | ||||||
|             event_time=timezone_now(), |  | ||||||
|             is_renewal=True, |  | ||||||
|             plan=plan, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         result = self.client_get("/activity/support") |  | ||||||
|         self.assert_in_success_response( |  | ||||||
|             ['<input type="text" name="q" class="input-xxlarge search-query"'], result |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         result = get_check_query_result(self.example_email("hamlet"), 1) |  | ||||||
|         check_hamlet_user_query_result(result) |  | ||||||
|         check_zulip_realm_query_result(result) |  | ||||||
|  |  | ||||||
|         # Search should be case-insensitive: |  | ||||||
|         assert self.example_email("hamlet") != self.example_email("hamlet").upper() |  | ||||||
|         result = get_check_query_result(self.example_email("hamlet").upper(), 1) |  | ||||||
|         check_hamlet_user_query_result(result) |  | ||||||
|         check_zulip_realm_query_result(result) |  | ||||||
|  |  | ||||||
|         result = get_check_query_result(lear_user.email, 1) |  | ||||||
|         check_lear_user_query_result(result) |  | ||||||
|         check_lear_realm_query_result(result) |  | ||||||
|  |  | ||||||
|         result = get_check_query_result(self.example_email("polonius"), 1) |  | ||||||
|         check_polonius_user_query_result(result) |  | ||||||
|         check_zulip_realm_query_result(result) |  | ||||||
|  |  | ||||||
|         result = get_check_query_result("lear", 1) |  | ||||||
|         check_lear_realm_query_result(result) |  | ||||||
|  |  | ||||||
|         result = get_check_query_result("http://lear.testserver", 1) |  | ||||||
|         check_lear_realm_query_result(result) |  | ||||||
|  |  | ||||||
|         with self.settings(REALM_HOSTS={"zulip": "localhost"}): |  | ||||||
|             result = get_check_query_result("http://localhost", 1) |  | ||||||
|             check_zulip_realm_query_result(result) |  | ||||||
|  |  | ||||||
|         result = get_check_query_result("hamlet@zulip.com, lear", 2) |  | ||||||
|         check_hamlet_user_query_result(result) |  | ||||||
|         check_zulip_realm_query_result(result) |  | ||||||
|         check_lear_realm_query_result(result) |  | ||||||
|  |  | ||||||
|         result = get_check_query_result("King hamlet,lear", 2) |  | ||||||
|         check_hamlet_user_query_result(result) |  | ||||||
|         check_zulip_realm_query_result(result) |  | ||||||
|         check_lear_realm_query_result(result) |  | ||||||
|  |  | ||||||
|         result = get_check_query_result("Othello, the Moor of Venice", 1) |  | ||||||
|         check_othello_user_query_result(result) |  | ||||||
|         check_zulip_realm_query_result(result) |  | ||||||
|  |  | ||||||
|         result = get_check_query_result("lear, Hamlet <hamlet@zulip.com>", 2) |  | ||||||
|         check_hamlet_user_query_result(result) |  | ||||||
|         check_zulip_realm_query_result(result) |  | ||||||
|         check_lear_realm_query_result(result) |  | ||||||
|  |  | ||||||
|         with mock.patch( |  | ||||||
|             "analytics.views.support.timezone_now", |  | ||||||
|             return_value=timezone_now() - timedelta(minutes=50), |  | ||||||
|         ): |  | ||||||
|             self.client_post("/accounts/home/", {"email": self.nonreg_email("test")}) |  | ||||||
|             self.login("iago") |  | ||||||
|             result = get_check_query_result(self.nonreg_email("test"), 1) |  | ||||||
|             check_preregistration_user_query_result(result, self.nonreg_email("test")) |  | ||||||
|             check_zulip_realm_query_result(result) |  | ||||||
|  |  | ||||||
|             create_invitation("Denmark", self.nonreg_email("test1")) |  | ||||||
|             result = get_check_query_result(self.nonreg_email("test1"), 1) |  | ||||||
|             check_preregistration_user_query_result(result, self.nonreg_email("test1"), invite=True) |  | ||||||
|             check_zulip_realm_query_result(result) |  | ||||||
|  |  | ||||||
|             email = self.nonreg_email("alice") |  | ||||||
|             self.submit_realm_creation_form( |  | ||||||
|                 email, realm_subdomain="zuliptest", realm_name="Zulip test" |  | ||||||
|             ) |  | ||||||
|             result = get_check_query_result(email, 1) |  | ||||||
|             check_realm_creation_query_result(result, email) |  | ||||||
|  |  | ||||||
|             invite_expires_in_minutes = 10 * 24 * 60 |  | ||||||
|             do_create_multiuse_invite_link( |  | ||||||
|                 self.example_user("hamlet"), |  | ||||||
|                 invited_as=1, |  | ||||||
|                 invite_expires_in_minutes=invite_expires_in_minutes, |  | ||||||
|             ) |  | ||||||
|             result = get_check_query_result("zulip", 2) |  | ||||||
|             check_multiuse_invite_link_query_result(result) |  | ||||||
|             check_zulip_realm_query_result(result) |  | ||||||
|             MultiuseInvite.objects.all().delete() |  | ||||||
|  |  | ||||||
|             do_send_realm_reactivation_email(get_realm("zulip"), acting_user=None) |  | ||||||
|             result = get_check_query_result("zulip", 2) |  | ||||||
|             check_realm_reactivation_link_query_result(result) |  | ||||||
|             check_zulip_realm_query_result(result) |  | ||||||
|  |  | ||||||
|             lear_nonreg_email = "newguy@lear.org" |  | ||||||
|             self.client_post("/accounts/home/", {"email": lear_nonreg_email}, subdomain="lear") |  | ||||||
|             result = get_check_query_result(lear_nonreg_email, 1) |  | ||||||
|             check_preregistration_user_query_result(result, lear_nonreg_email) |  | ||||||
|             check_lear_realm_query_result(result) |  | ||||||
|  |  | ||||||
|             self.login_user(lear_user) |  | ||||||
|             create_invitation("general", "newguy2@lear.org", lear_realm) |  | ||||||
|             result = get_check_query_result("newguy2@lear.org", 1, lear_realm.string_id) |  | ||||||
|             check_preregistration_user_query_result(result, "newguy2@lear.org", invite=True) |  | ||||||
|             check_lear_realm_query_result(result) |  | ||||||
|  |  | ||||||
|     def test_get_org_type_display_name(self) -> None: |  | ||||||
|         self.assertEqual(get_org_type_display_name(Realm.ORG_TYPES["business"]["id"]), "Business") |  | ||||||
|         self.assertEqual(get_org_type_display_name(883), "") |  | ||||||
|  |  | ||||||
|     def test_unspecified_org_type_correctly_displayed(self) -> None: |  | ||||||
|         """ |  | ||||||
|         Unspecified org type is special in that it is marked to not be shown |  | ||||||
|         on the registration page (because organitions are not meant to be able to choose it), |  | ||||||
|         but should be correctly shown at the /support/ endpoint. |  | ||||||
|         """ |  | ||||||
|         realm = get_realm("zulip") |  | ||||||
|  |  | ||||||
|         do_change_realm_org_type(realm, 0, acting_user=None) |  | ||||||
|         self.assertEqual(realm.org_type, 0) |  | ||||||
|  |  | ||||||
|         self.login("iago") |  | ||||||
|  |  | ||||||
|         result = self.client_get("/activity/support", {"q": "zulip"}, subdomain="zulip") |  | ||||||
|         self.assert_in_success_response( |  | ||||||
|             [ |  | ||||||
|                 f'<input type="hidden" name="realm_id" value="{realm.id}"', |  | ||||||
|                 '<option value="0" selected>', |  | ||||||
|             ], |  | ||||||
|             result, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     @mock.patch("analytics.views.support.update_billing_method_of_current_plan") |  | ||||||
|     def test_change_billing_method(self, m: mock.Mock) -> None: |  | ||||||
|         cordelia = self.example_user("cordelia") |  | ||||||
|         self.login_user(cordelia) |  | ||||||
|  |  | ||||||
|         result = self.client_post( |  | ||||||
|             "/activity/support", {"realm_id": f"{cordelia.realm_id}", "plan_type": "2"} |  | ||||||
|         ) |  | ||||||
|         self.assertEqual(result.status_code, 302) |  | ||||||
|         self.assertEqual(result["Location"], "/login/") |  | ||||||
|  |  | ||||||
|         iago = self.example_user("iago") |  | ||||||
|         self.login_user(iago) |  | ||||||
|  |  | ||||||
|         result = self.client_post( |  | ||||||
|             "/activity/support", |  | ||||||
|             {"realm_id": f"{iago.realm_id}", "billing_method": "charge_automatically"}, |  | ||||||
|         ) |  | ||||||
|         m.assert_called_once_with(get_realm("zulip"), charge_automatically=True, acting_user=iago) |  | ||||||
|         self.assert_in_success_response( |  | ||||||
|             ["Billing method of zulip updated to charge automatically"], result |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         m.reset_mock() |  | ||||||
|  |  | ||||||
|         result = self.client_post( |  | ||||||
|             "/activity/support", {"realm_id": f"{iago.realm_id}", "billing_method": "send_invoice"} |  | ||||||
|         ) |  | ||||||
|         m.assert_called_once_with(get_realm("zulip"), charge_automatically=False, acting_user=iago) |  | ||||||
|         self.assert_in_success_response( |  | ||||||
|             ["Billing method of zulip updated to pay by invoice"], result |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def test_change_realm_plan_type(self) -> None: |  | ||||||
|         cordelia = self.example_user("cordelia") |  | ||||||
|         self.login_user(cordelia) |  | ||||||
|  |  | ||||||
|         result = self.client_post( |  | ||||||
|             "/activity/support", {"realm_id": f"{cordelia.realm_id}", "plan_type": "2"} |  | ||||||
|         ) |  | ||||||
|         self.assertEqual(result.status_code, 302) |  | ||||||
|         self.assertEqual(result["Location"], "/login/") |  | ||||||
|  |  | ||||||
|         iago = self.example_user("iago") |  | ||||||
|         self.login_user(iago) |  | ||||||
|  |  | ||||||
|         with mock.patch("analytics.views.support.do_change_realm_plan_type") as m: |  | ||||||
|             result = self.client_post( |  | ||||||
|                 "/activity/support", {"realm_id": f"{iago.realm_id}", "plan_type": "2"} |  | ||||||
|             ) |  | ||||||
|             m.assert_called_once_with(get_realm("zulip"), 2, acting_user=iago) |  | ||||||
|             self.assert_in_success_response( |  | ||||||
|                 ["Plan type of zulip changed from self-hosted to limited"], result |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|         with mock.patch("analytics.views.support.do_change_realm_plan_type") as m: |  | ||||||
|             result = self.client_post( |  | ||||||
|                 "/activity/support", {"realm_id": f"{iago.realm_id}", "plan_type": "10"} |  | ||||||
|             ) |  | ||||||
|             m.assert_called_once_with(get_realm("zulip"), 10, acting_user=iago) |  | ||||||
|             self.assert_in_success_response( |  | ||||||
|                 ["Plan type of zulip changed from self-hosted to plus"], result |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|     def test_change_org_type(self) -> None: |  | ||||||
|         cordelia = self.example_user("cordelia") |  | ||||||
|         self.login_user(cordelia) |  | ||||||
|  |  | ||||||
|         result = self.client_post( |  | ||||||
|             "/activity/support", {"realm_id": f"{cordelia.realm_id}", "org_type": "70"} |  | ||||||
|         ) |  | ||||||
|         self.assertEqual(result.status_code, 302) |  | ||||||
|         self.assertEqual(result["Location"], "/login/") |  | ||||||
|  |  | ||||||
|         iago = self.example_user("iago") |  | ||||||
|         self.login_user(iago) |  | ||||||
|  |  | ||||||
|         with mock.patch("analytics.views.support.do_change_realm_org_type") as m: |  | ||||||
|             result = self.client_post( |  | ||||||
|                 "/activity/support", {"realm_id": f"{iago.realm_id}", "org_type": "70"} |  | ||||||
|             ) |  | ||||||
|             m.assert_called_once_with(get_realm("zulip"), 70, acting_user=iago) |  | ||||||
|             self.assert_in_success_response( |  | ||||||
|                 ["Org type of zulip changed from Business to Government"], result |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|     def test_attach_discount(self) -> None: |  | ||||||
|         cordelia = self.example_user("cordelia") |  | ||||||
|         lear_realm = get_realm("lear") |  | ||||||
|         self.login_user(cordelia) |  | ||||||
|  |  | ||||||
|         result = self.client_post( |  | ||||||
|             "/activity/support", {"realm_id": f"{lear_realm.id}", "discount": "25"} |  | ||||||
|         ) |  | ||||||
|         self.assertEqual(result.status_code, 302) |  | ||||||
|         self.assertEqual(result["Location"], "/login/") |  | ||||||
|  |  | ||||||
|         iago = self.example_user("iago") |  | ||||||
|         self.login("iago") |  | ||||||
|  |  | ||||||
|         with mock.patch("analytics.views.support.attach_discount_to_realm") as m: |  | ||||||
|             result = self.client_post( |  | ||||||
|                 "/activity/support", {"realm_id": f"{lear_realm.id}", "discount": "25"} |  | ||||||
|             ) |  | ||||||
|             m.assert_called_once_with(get_realm("lear"), 25, acting_user=iago) |  | ||||||
|             self.assert_in_success_response(["Discount of lear changed to 25% from 0%"], result) |  | ||||||
|  |  | ||||||
|     def test_change_sponsorship_status(self) -> None: |  | ||||||
|         lear_realm = get_realm("lear") |  | ||||||
|         self.assertIsNone(get_customer_by_realm(lear_realm)) |  | ||||||
|  |  | ||||||
|         cordelia = self.example_user("cordelia") |  | ||||||
|         self.login_user(cordelia) |  | ||||||
|  |  | ||||||
|         result = self.client_post( |  | ||||||
|             "/activity/support", {"realm_id": f"{lear_realm.id}", "sponsorship_pending": "true"} |  | ||||||
|         ) |  | ||||||
|         self.assertEqual(result.status_code, 302) |  | ||||||
|         self.assertEqual(result["Location"], "/login/") |  | ||||||
|  |  | ||||||
|         iago = self.example_user("iago") |  | ||||||
|         self.login_user(iago) |  | ||||||
|  |  | ||||||
|         result = self.client_post( |  | ||||||
|             "/activity/support", {"realm_id": f"{lear_realm.id}", "sponsorship_pending": "true"} |  | ||||||
|         ) |  | ||||||
|         self.assert_in_success_response(["lear marked as pending sponsorship."], result) |  | ||||||
|         customer = get_customer_by_realm(lear_realm) |  | ||||||
|         assert customer is not None |  | ||||||
|         self.assertTrue(customer.sponsorship_pending) |  | ||||||
|  |  | ||||||
|         result = self.client_post( |  | ||||||
|             "/activity/support", {"realm_id": f"{lear_realm.id}", "sponsorship_pending": "false"} |  | ||||||
|         ) |  | ||||||
|         self.assert_in_success_response(["lear is no longer pending sponsorship."], result) |  | ||||||
|         customer = get_customer_by_realm(lear_realm) |  | ||||||
|         assert customer is not None |  | ||||||
|         self.assertFalse(customer.sponsorship_pending) |  | ||||||
|  |  | ||||||
|     def test_approve_sponsorship(self) -> None: |  | ||||||
|         lear_realm = get_realm("lear") |  | ||||||
|         update_sponsorship_status(lear_realm, True, acting_user=None) |  | ||||||
|         king_user = self.lear_user("king") |  | ||||||
|         king_user.role = UserProfile.ROLE_REALM_OWNER |  | ||||||
|         king_user.save() |  | ||||||
|  |  | ||||||
|         cordelia = self.example_user("cordelia") |  | ||||||
|         self.login_user(cordelia) |  | ||||||
|  |  | ||||||
|         result = self.client_post( |  | ||||||
|             "/activity/support", |  | ||||||
|             {"realm_id": f"{lear_realm.id}", "approve_sponsorship": "true"}, |  | ||||||
|         ) |  | ||||||
|         self.assertEqual(result.status_code, 302) |  | ||||||
|         self.assertEqual(result["Location"], "/login/") |  | ||||||
|  |  | ||||||
|         iago = self.example_user("iago") |  | ||||||
|         self.login_user(iago) |  | ||||||
|  |  | ||||||
|         result = self.client_post( |  | ||||||
|             "/activity/support", |  | ||||||
|             {"realm_id": f"{lear_realm.id}", "approve_sponsorship": "true"}, |  | ||||||
|         ) |  | ||||||
|         self.assert_in_success_response(["Sponsorship approved for lear"], result) |  | ||||||
|         lear_realm.refresh_from_db() |  | ||||||
|         self.assertEqual(lear_realm.plan_type, Realm.PLAN_TYPE_STANDARD_FREE) |  | ||||||
|         customer = get_customer_by_realm(lear_realm) |  | ||||||
|         assert customer is not None |  | ||||||
|         self.assertFalse(customer.sponsorship_pending) |  | ||||||
|         messages = UserMessage.objects.filter(user_profile=king_user) |  | ||||||
|         self.assertIn( |  | ||||||
|             "request for sponsored hosting has been approved", messages[0].message.content |  | ||||||
|         ) |  | ||||||
|         self.assert_length(messages, 1) |  | ||||||
|  |  | ||||||
|     def test_activate_or_deactivate_realm(self) -> None: |  | ||||||
|         cordelia = self.example_user("cordelia") |  | ||||||
|         lear_realm = get_realm("lear") |  | ||||||
|         self.login_user(cordelia) |  | ||||||
|  |  | ||||||
|         result = self.client_post( |  | ||||||
|             "/activity/support", {"realm_id": f"{lear_realm.id}", "status": "deactivated"} |  | ||||||
|         ) |  | ||||||
|         self.assertEqual(result.status_code, 302) |  | ||||||
|         self.assertEqual(result["Location"], "/login/") |  | ||||||
|  |  | ||||||
|         self.login("iago") |  | ||||||
|  |  | ||||||
|         with mock.patch("analytics.views.support.do_deactivate_realm") as m: |  | ||||||
|             result = self.client_post( |  | ||||||
|                 "/activity/support", {"realm_id": f"{lear_realm.id}", "status": "deactivated"} |  | ||||||
|             ) |  | ||||||
|             m.assert_called_once_with(lear_realm, acting_user=self.example_user("iago")) |  | ||||||
|             self.assert_in_success_response(["lear deactivated"], result) |  | ||||||
|  |  | ||||||
|         with mock.patch("analytics.views.support.do_send_realm_reactivation_email") as m: |  | ||||||
|             result = self.client_post( |  | ||||||
|                 "/activity/support", {"realm_id": f"{lear_realm.id}", "status": "active"} |  | ||||||
|             ) |  | ||||||
|             m.assert_called_once_with(lear_realm, acting_user=self.example_user("iago")) |  | ||||||
|             self.assert_in_success_response( |  | ||||||
|                 ["Realm reactivation email sent to admins of lear"], result |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|     def test_change_subdomain(self) -> None: |  | ||||||
|         cordelia = self.example_user("cordelia") |  | ||||||
|         lear_realm = get_realm("lear") |  | ||||||
|         self.login_user(cordelia) |  | ||||||
|  |  | ||||||
|         result = self.client_post( |  | ||||||
|             "/activity/support", {"realm_id": f"{lear_realm.id}", "new_subdomain": "new_name"} |  | ||||||
|         ) |  | ||||||
|         self.assertEqual(result.status_code, 302) |  | ||||||
|         self.assertEqual(result["Location"], "/login/") |  | ||||||
|         self.login("iago") |  | ||||||
|  |  | ||||||
|         result = self.client_post( |  | ||||||
|             "/activity/support", {"realm_id": f"{lear_realm.id}", "new_subdomain": "new-name"} |  | ||||||
|         ) |  | ||||||
|         self.assertEqual(result.status_code, 302) |  | ||||||
|         self.assertEqual(result["Location"], "/activity/support?q=new-name") |  | ||||||
|         realm_id = lear_realm.id |  | ||||||
|         lear_realm = get_realm("new-name") |  | ||||||
|         self.assertEqual(lear_realm.id, realm_id) |  | ||||||
|         self.assertTrue(Realm.objects.filter(string_id="lear").exists()) |  | ||||||
|         self.assertTrue(Realm.objects.filter(string_id="lear")[0].deactivated) |  | ||||||
|  |  | ||||||
|         result = self.client_post( |  | ||||||
|             "/activity/support", {"realm_id": f"{lear_realm.id}", "new_subdomain": "new-name"} |  | ||||||
|         ) |  | ||||||
|         self.assert_in_success_response( |  | ||||||
|             ["Subdomain unavailable. Please choose a different one."], result |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         result = self.client_post( |  | ||||||
|             "/activity/support", {"realm_id": f"{lear_realm.id}", "new_subdomain": "zulip"} |  | ||||||
|         ) |  | ||||||
|         self.assert_in_success_response( |  | ||||||
|             ["Subdomain unavailable. Please choose a different one."], result |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         result = self.client_post( |  | ||||||
|             "/activity/support", {"realm_id": f"{lear_realm.id}", "new_subdomain": "lear"} |  | ||||||
|         ) |  | ||||||
|         self.assert_in_success_response( |  | ||||||
|             ["Subdomain unavailable. Please choose a different one."], result |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def test_downgrade_realm(self) -> None: |  | ||||||
|         cordelia = self.example_user("cordelia") |  | ||||||
|         self.login_user(cordelia) |  | ||||||
|         result = self.client_post( |  | ||||||
|             "/activity/support", {"realm_id": f"{cordelia.realm_id}", "plan_type": "2"} |  | ||||||
|         ) |  | ||||||
|         self.assertEqual(result.status_code, 302) |  | ||||||
|         self.assertEqual(result["Location"], "/login/") |  | ||||||
|  |  | ||||||
|         iago = self.example_user("iago") |  | ||||||
|         self.login_user(iago) |  | ||||||
|  |  | ||||||
|         with mock.patch("analytics.views.support.downgrade_at_the_end_of_billing_cycle") as m: |  | ||||||
|             result = self.client_post( |  | ||||||
|                 "/activity/support", |  | ||||||
|                 { |  | ||||||
|                     "realm_id": f"{iago.realm_id}", |  | ||||||
|                     "modify_plan": "downgrade_at_billing_cycle_end", |  | ||||||
|                 }, |  | ||||||
|             ) |  | ||||||
|             m.assert_called_once_with(get_realm("zulip")) |  | ||||||
|             self.assert_in_success_response( |  | ||||||
|                 ["zulip marked for downgrade at the end of billing cycle"], result |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|         with mock.patch( |  | ||||||
|             "analytics.views.support.downgrade_now_without_creating_additional_invoices" |  | ||||||
|         ) as m: |  | ||||||
|             result = self.client_post( |  | ||||||
|                 "/activity/support", |  | ||||||
|                 { |  | ||||||
|                     "realm_id": f"{iago.realm_id}", |  | ||||||
|                     "modify_plan": "downgrade_now_without_additional_licenses", |  | ||||||
|                 }, |  | ||||||
|             ) |  | ||||||
|             m.assert_called_once_with(get_realm("zulip")) |  | ||||||
|             self.assert_in_success_response( |  | ||||||
|                 ["zulip downgraded without creating additional invoices"], result |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|         with mock.patch( |  | ||||||
|             "analytics.views.support.downgrade_now_without_creating_additional_invoices" |  | ||||||
|         ) as m1: |  | ||||||
|             with mock.patch("analytics.views.support.void_all_open_invoices", return_value=1) as m2: |  | ||||||
|                 result = self.client_post( |  | ||||||
|                     "/activity/support", |  | ||||||
|                     { |  | ||||||
|                         "realm_id": f"{iago.realm_id}", |  | ||||||
|                         "modify_plan": "downgrade_now_void_open_invoices", |  | ||||||
|                     }, |  | ||||||
|                 ) |  | ||||||
|                 m1.assert_called_once_with(get_realm("zulip")) |  | ||||||
|                 m2.assert_called_once_with(get_realm("zulip")) |  | ||||||
|                 self.assert_in_success_response( |  | ||||||
|                     ["zulip downgraded and voided 1 open invoices"], result |  | ||||||
|                 ) |  | ||||||
|  |  | ||||||
|         with mock.patch("analytics.views.support.switch_realm_from_standard_to_plus_plan") as m: |  | ||||||
|             result = self.client_post( |  | ||||||
|                 "/activity/support", |  | ||||||
|                 { |  | ||||||
|                     "realm_id": f"{iago.realm_id}", |  | ||||||
|                     "modify_plan": "upgrade_to_plus", |  | ||||||
|                 }, |  | ||||||
|             ) |  | ||||||
|             m.assert_called_once_with(get_realm("zulip")) |  | ||||||
|             self.assert_in_success_response(["zulip upgraded to Plus"], result) |  | ||||||
|  |  | ||||||
|     def test_scrub_realm(self) -> None: |  | ||||||
|         cordelia = self.example_user("cordelia") |  | ||||||
|         lear_realm = get_realm("lear") |  | ||||||
|         self.login_user(cordelia) |  | ||||||
|  |  | ||||||
|         result = self.client_post( |  | ||||||
|             "/activity/support", {"realm_id": f"{lear_realm.id}", "discount": "25"} |  | ||||||
|         ) |  | ||||||
|         self.assertEqual(result.status_code, 302) |  | ||||||
|         self.assertEqual(result["Location"], "/login/") |  | ||||||
|  |  | ||||||
|         self.login("iago") |  | ||||||
|  |  | ||||||
|         with mock.patch("analytics.views.support.do_scrub_realm") as m: |  | ||||||
|             result = self.client_post( |  | ||||||
|                 "/activity/support", {"realm_id": f"{lear_realm.id}", "scrub_realm": "true"} |  | ||||||
|             ) |  | ||||||
|             m.assert_called_once_with(lear_realm, acting_user=self.example_user("iago")) |  | ||||||
|             self.assert_in_success_response(["lear scrubbed"], result) |  | ||||||
|  |  | ||||||
|         with mock.patch("analytics.views.support.do_scrub_realm") as m: |  | ||||||
|             result = self.client_post("/activity/support", {"realm_id": f"{lear_realm.id}"}) |  | ||||||
|             self.assert_json_error(result, "Invalid parameters") |  | ||||||
|             m.assert_not_called() |  | ||||||
|  |  | ||||||
|     def test_delete_user(self) -> None: |  | ||||||
|         cordelia = self.example_user("cordelia") |  | ||||||
|         hamlet = self.example_user("hamlet") |  | ||||||
|         hamlet_email = hamlet.delivery_email |  | ||||||
|         realm = get_realm("zulip") |  | ||||||
|         self.login_user(cordelia) |  | ||||||
|  |  | ||||||
|         result = self.client_post( |  | ||||||
|             "/activity/support", {"realm_id": f"{realm.id}", "delete_user_by_id": hamlet.id} |  | ||||||
|         ) |  | ||||||
|         self.assertEqual(result.status_code, 302) |  | ||||||
|         self.assertEqual(result["Location"], "/login/") |  | ||||||
|  |  | ||||||
|         self.login("iago") |  | ||||||
|  |  | ||||||
|         with mock.patch("analytics.views.support.do_delete_user_preserving_messages") as m: |  | ||||||
|             result = self.client_post( |  | ||||||
|                 "/activity/support", |  | ||||||
|                 {"realm_id": f"{realm.id}", "delete_user_by_id": hamlet.id}, |  | ||||||
|             ) |  | ||||||
|             m.assert_called_once_with(hamlet) |  | ||||||
|             self.assert_in_success_response([f"{hamlet_email} in zulip deleted"], result) |  | ||||||
							
								
								
									
										618
									
								
								analytics/tests/test_views.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										618
									
								
								analytics/tests/test_views.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,618 @@ | |||||||
|  | from datetime import datetime, timedelta | ||||||
|  | from typing import List, Optional | ||||||
|  |  | ||||||
|  | import mock | ||||||
|  | from django.utils.timezone import utc | ||||||
|  | from django.http import HttpResponse | ||||||
|  | import ujson | ||||||
|  |  | ||||||
|  | from analytics.lib.counts import COUNT_STATS, CountStat | ||||||
|  | from analytics.lib.time_utils import time_range | ||||||
|  | from analytics.models import FillState, \ | ||||||
|  |     RealmCount, UserCount, last_successful_fill | ||||||
|  | from analytics.views import rewrite_client_arrays, \ | ||||||
|  |     sort_by_totals, sort_client_labels | ||||||
|  | from zerver.lib.test_classes import ZulipTestCase | ||||||
|  | from zerver.lib.timestamp import ceiling_to_day, \ | ||||||
|  |     ceiling_to_hour, datetime_to_timestamp | ||||||
|  | from zerver.lib.actions import do_create_multiuse_invite_link, \ | ||||||
|  |     do_send_realm_reactivation_email | ||||||
|  | from zerver.models import Client, get_realm, MultiuseInvite | ||||||
|  |  | ||||||
|  | class TestStatsEndpoint(ZulipTestCase): | ||||||
|  |     def test_stats(self) -> None: | ||||||
|  |         self.user = self.example_user('hamlet') | ||||||
|  |         self.login(self.user.email) | ||||||
|  |         result = self.client_get('/stats') | ||||||
|  |         self.assertEqual(result.status_code, 200) | ||||||
|  |         # Check that we get something back | ||||||
|  |         self.assert_in_response("Zulip analytics for", result) | ||||||
|  |  | ||||||
|  |     def test_guest_user_cant_access_stats(self) -> None: | ||||||
|  |         self.user = self.example_user('polonius') | ||||||
|  |         self.login(self.user.email) | ||||||
|  |         result = self.client_get('/stats') | ||||||
|  |         self.assert_json_error(result, "Not allowed for guest users", 400) | ||||||
|  |  | ||||||
|  |         result = self.client_get('/json/analytics/chart_data') | ||||||
|  |         self.assert_json_error(result, "Not allowed for guest users", 400) | ||||||
|  |  | ||||||
|  |     def test_stats_for_realm(self) -> None: | ||||||
|  |         user_profile = self.example_user('hamlet') | ||||||
|  |         self.login(user_profile.email) | ||||||
|  |  | ||||||
|  |         result = self.client_get('/stats/realm/zulip/') | ||||||
|  |         self.assertEqual(result.status_code, 302) | ||||||
|  |  | ||||||
|  |         user_profile = self.example_user('hamlet') | ||||||
|  |         user_profile.is_staff = True | ||||||
|  |         user_profile.save(update_fields=['is_staff']) | ||||||
|  |  | ||||||
|  |         result = self.client_get('/stats/realm/not_existing_realm/') | ||||||
|  |         self.assertEqual(result.status_code, 302) | ||||||
|  |  | ||||||
|  |         result = self.client_get('/stats/realm/zulip/') | ||||||
|  |         self.assertEqual(result.status_code, 200) | ||||||
|  |         self.assert_in_response("Zulip analytics for", result) | ||||||
|  |  | ||||||
|  |     def test_stats_for_installation(self) -> None: | ||||||
|  |         user_profile = self.example_user('hamlet') | ||||||
|  |         self.login(user_profile.email) | ||||||
|  |  | ||||||
|  |         result = self.client_get('/stats/installation') | ||||||
|  |         self.assertEqual(result.status_code, 302) | ||||||
|  |  | ||||||
|  |         user_profile = self.example_user('hamlet') | ||||||
|  |         user_profile.is_staff = True | ||||||
|  |         user_profile.save(update_fields=['is_staff']) | ||||||
|  |  | ||||||
|  |         result = self.client_get('/stats/installation') | ||||||
|  |         self.assertEqual(result.status_code, 200) | ||||||
|  |         self.assert_in_response("Zulip analytics for", result) | ||||||
|  |  | ||||||
|  | class TestGetChartData(ZulipTestCase): | ||||||
|  |     def setUp(self) -> None: | ||||||
|  |         super().setUp() | ||||||
|  |         self.realm = get_realm('zulip') | ||||||
|  |         self.user = self.example_user('hamlet') | ||||||
|  |         self.login(self.user.email) | ||||||
|  |         self.end_times_hour = [ceiling_to_hour(self.realm.date_created) + timedelta(hours=i) | ||||||
|  |                                for i in range(4)] | ||||||
|  |         self.end_times_day = [ceiling_to_day(self.realm.date_created) + timedelta(days=i) | ||||||
|  |                               for i in range(4)] | ||||||
|  |  | ||||||
|  |     def data(self, i: int) -> List[int]: | ||||||
|  |         return [0, 0, i, 0] | ||||||
|  |  | ||||||
|  |     def insert_data(self, stat: CountStat, realm_subgroups: List[Optional[str]], | ||||||
|  |                     user_subgroups: List[str]) -> None: | ||||||
|  |         if stat.frequency == CountStat.HOUR: | ||||||
|  |             insert_time = self.end_times_hour[2] | ||||||
|  |             fill_time = self.end_times_hour[-1] | ||||||
|  |         if stat.frequency == CountStat.DAY: | ||||||
|  |             insert_time = self.end_times_day[2] | ||||||
|  |             fill_time = self.end_times_day[-1] | ||||||
|  |  | ||||||
|  |         RealmCount.objects.bulk_create([ | ||||||
|  |             RealmCount(property=stat.property, subgroup=subgroup, end_time=insert_time, | ||||||
|  |                        value=100+i, realm=self.realm) | ||||||
|  |             for i, subgroup in enumerate(realm_subgroups)]) | ||||||
|  |         UserCount.objects.bulk_create([ | ||||||
|  |             UserCount(property=stat.property, subgroup=subgroup, end_time=insert_time, | ||||||
|  |                       value=200+i, realm=self.realm, user=self.user) | ||||||
|  |             for i, subgroup in enumerate(user_subgroups)]) | ||||||
|  |         FillState.objects.create(property=stat.property, end_time=fill_time, state=FillState.DONE) | ||||||
|  |  | ||||||
|  |     def test_number_of_humans(self) -> None: | ||||||
|  |         stat = COUNT_STATS['realm_active_humans::day'] | ||||||
|  |         self.insert_data(stat, [None], []) | ||||||
|  |         stat = COUNT_STATS['1day_actives::day'] | ||||||
|  |         self.insert_data(stat, [None], []) | ||||||
|  |         stat = COUNT_STATS['active_users_audit:is_bot:day'] | ||||||
|  |         self.insert_data(stat, ['false'], []) | ||||||
|  |         result = self.client_get('/json/analytics/chart_data', | ||||||
|  |                                  {'chart_name': 'number_of_humans'}) | ||||||
|  |         self.assert_json_success(result) | ||||||
|  |         data = result.json() | ||||||
|  |         self.assertEqual(data, { | ||||||
|  |             'msg': '', | ||||||
|  |             'end_times': [datetime_to_timestamp(dt) for dt in self.end_times_day], | ||||||
|  |             'frequency': CountStat.DAY, | ||||||
|  |             'everyone': {'_1day': self.data(100), '_15day': self.data(100), 'all_time': self.data(100)}, | ||||||
|  |             'display_order': None, | ||||||
|  |             'result': 'success', | ||||||
|  |         }) | ||||||
|  |  | ||||||
|  |     def test_messages_sent_over_time(self) -> None: | ||||||
|  |         stat = COUNT_STATS['messages_sent:is_bot:hour'] | ||||||
|  |         self.insert_data(stat, ['true', 'false'], ['false']) | ||||||
|  |         result = self.client_get('/json/analytics/chart_data', | ||||||
|  |                                  {'chart_name': 'messages_sent_over_time'}) | ||||||
|  |         self.assert_json_success(result) | ||||||
|  |         data = result.json() | ||||||
|  |         self.assertEqual(data, { | ||||||
|  |             'msg': '', | ||||||
|  |             'end_times': [datetime_to_timestamp(dt) for dt in self.end_times_hour], | ||||||
|  |             'frequency': CountStat.HOUR, | ||||||
|  |             'everyone': {'bot': self.data(100), 'human': self.data(101)}, | ||||||
|  |             'user': {'bot': self.data(0), 'human': self.data(200)}, | ||||||
|  |             'display_order': None, | ||||||
|  |             'result': 'success', | ||||||
|  |         }) | ||||||
|  |  | ||||||
|  |     def test_messages_sent_by_message_type(self) -> None: | ||||||
|  |         stat = COUNT_STATS['messages_sent:message_type:day'] | ||||||
|  |         self.insert_data(stat, ['public_stream', 'private_message'], | ||||||
|  |                          ['public_stream', 'private_stream']) | ||||||
|  |         result = self.client_get('/json/analytics/chart_data', | ||||||
|  |                                  {'chart_name': 'messages_sent_by_message_type'}) | ||||||
|  |         self.assert_json_success(result) | ||||||
|  |         data = result.json() | ||||||
|  |         self.assertEqual(data, { | ||||||
|  |             'msg': '', | ||||||
|  |             'end_times': [datetime_to_timestamp(dt) for dt in self.end_times_day], | ||||||
|  |             'frequency': CountStat.DAY, | ||||||
|  |             'everyone': {'Public streams': self.data(100), 'Private streams': self.data(0), | ||||||
|  |                          'Private messages': self.data(101), 'Group private messages': self.data(0)}, | ||||||
|  |             'user': {'Public streams': self.data(200), 'Private streams': self.data(201), | ||||||
|  |                      'Private messages': self.data(0), 'Group private messages': self.data(0)}, | ||||||
|  |             'display_order': ['Private messages', 'Public streams', 'Private streams', 'Group private messages'], | ||||||
|  |             'result': 'success', | ||||||
|  |         }) | ||||||
|  |  | ||||||
|  |     def test_messages_sent_by_client(self) -> None: | ||||||
|  |         stat = COUNT_STATS['messages_sent:client:day'] | ||||||
|  |         client1 = Client.objects.create(name='client 1') | ||||||
|  |         client2 = Client.objects.create(name='client 2') | ||||||
|  |         client3 = Client.objects.create(name='client 3') | ||||||
|  |         client4 = Client.objects.create(name='client 4') | ||||||
|  |         self.insert_data(stat, [client4.id, client3.id, client2.id], | ||||||
|  |                          [client3.id, client1.id]) | ||||||
|  |         result = self.client_get('/json/analytics/chart_data', | ||||||
|  |                                  {'chart_name': 'messages_sent_by_client'}) | ||||||
|  |         self.assert_json_success(result) | ||||||
|  |         data = result.json() | ||||||
|  |         self.assertEqual(data, { | ||||||
|  |             'msg': '', | ||||||
|  |             'end_times': [datetime_to_timestamp(dt) for dt in self.end_times_day], | ||||||
|  |             'frequency': CountStat.DAY, | ||||||
|  |             'everyone': {'client 4': self.data(100), 'client 3': self.data(101), | ||||||
|  |                          'client 2': self.data(102)}, | ||||||
|  |             'user': {'client 3': self.data(200), 'client 1': self.data(201)}, | ||||||
|  |             'display_order': ['client 1', 'client 2', 'client 3', 'client 4'], | ||||||
|  |             'result': 'success', | ||||||
|  |         }) | ||||||
|  |  | ||||||
|  |     def test_include_empty_subgroups(self) -> None: | ||||||
|  |         FillState.objects.create( | ||||||
|  |             property='realm_active_humans::day', end_time=self.end_times_day[0], | ||||||
|  |             state=FillState.DONE) | ||||||
|  |         result = self.client_get('/json/analytics/chart_data', | ||||||
|  |                                  {'chart_name': 'number_of_humans'}) | ||||||
|  |         self.assert_json_success(result) | ||||||
|  |         data = result.json() | ||||||
|  |         self.assertEqual(data['everyone'], {"_1day": [0], "_15day": [0], "all_time": [0]}) | ||||||
|  |         self.assertFalse('user' in data) | ||||||
|  |  | ||||||
|  |         FillState.objects.create( | ||||||
|  |             property='messages_sent:is_bot:hour', end_time=self.end_times_hour[0], | ||||||
|  |             state=FillState.DONE) | ||||||
|  |         result = self.client_get('/json/analytics/chart_data', | ||||||
|  |                                  {'chart_name': 'messages_sent_over_time'}) | ||||||
|  |         self.assert_json_success(result) | ||||||
|  |         data = result.json() | ||||||
|  |         self.assertEqual(data['everyone'], {'human': [0], 'bot': [0]}) | ||||||
|  |         self.assertEqual(data['user'], {'human': [0], 'bot': [0]}) | ||||||
|  |  | ||||||
|  |         FillState.objects.create( | ||||||
|  |             property='messages_sent:message_type:day', end_time=self.end_times_day[0], | ||||||
|  |             state=FillState.DONE) | ||||||
|  |         result = self.client_get('/json/analytics/chart_data', | ||||||
|  |                                  {'chart_name': 'messages_sent_by_message_type'}) | ||||||
|  |         self.assert_json_success(result) | ||||||
|  |         data = result.json() | ||||||
|  |         self.assertEqual(data['everyone'], { | ||||||
|  |             'Public streams': [0], 'Private streams': [0], | ||||||
|  |             'Private messages': [0], 'Group private messages': [0]}) | ||||||
|  |         self.assertEqual(data['user'], { | ||||||
|  |             'Public streams': [0], 'Private streams': [0], | ||||||
|  |             'Private messages': [0], 'Group private messages': [0]}) | ||||||
|  |  | ||||||
|  |         FillState.objects.create( | ||||||
|  |             property='messages_sent:client:day', end_time=self.end_times_day[0], | ||||||
|  |             state=FillState.DONE) | ||||||
|  |         result = self.client_get('/json/analytics/chart_data', | ||||||
|  |                                  {'chart_name': 'messages_sent_by_client'}) | ||||||
|  |         self.assert_json_success(result) | ||||||
|  |         data = result.json() | ||||||
|  |         self.assertEqual(data['everyone'], {}) | ||||||
|  |         self.assertEqual(data['user'], {}) | ||||||
|  |  | ||||||
|  |     def test_start_and_end(self) -> None: | ||||||
|  |         stat = COUNT_STATS['realm_active_humans::day'] | ||||||
|  |         self.insert_data(stat, [None], []) | ||||||
|  |         stat = COUNT_STATS['1day_actives::day'] | ||||||
|  |         self.insert_data(stat, [None], []) | ||||||
|  |         stat = COUNT_STATS['active_users_audit:is_bot:day'] | ||||||
|  |         self.insert_data(stat, ['false'], []) | ||||||
|  |         end_time_timestamps = [datetime_to_timestamp(dt) for dt in self.end_times_day] | ||||||
|  |  | ||||||
|  |         # valid start and end | ||||||
|  |         result = self.client_get('/json/analytics/chart_data', | ||||||
|  |                                  {'chart_name': 'number_of_humans', | ||||||
|  |                                   'start': end_time_timestamps[1], | ||||||
|  |                                   'end': end_time_timestamps[2]}) | ||||||
|  |         self.assert_json_success(result) | ||||||
|  |         data = result.json() | ||||||
|  |         self.assertEqual(data['end_times'], end_time_timestamps[1:3]) | ||||||
|  |         self.assertEqual(data['everyone'], {'_1day': [0, 100], '_15day': [0, 100], 'all_time': [0, 100]}) | ||||||
|  |  | ||||||
|  |         # start later then end | ||||||
|  |         result = self.client_get('/json/analytics/chart_data', | ||||||
|  |                                  {'chart_name': 'number_of_humans', | ||||||
|  |                                   'start': end_time_timestamps[2], | ||||||
|  |                                   'end': end_time_timestamps[1]}) | ||||||
|  |         self.assert_json_error_contains(result, 'Start time is later than') | ||||||
|  |  | ||||||
|  |     def test_min_length(self) -> None: | ||||||
|  |         stat = COUNT_STATS['realm_active_humans::day'] | ||||||
|  |         self.insert_data(stat, [None], []) | ||||||
|  |         stat = COUNT_STATS['1day_actives::day'] | ||||||
|  |         self.insert_data(stat, [None], []) | ||||||
|  |         stat = COUNT_STATS['active_users_audit:is_bot:day'] | ||||||
|  |         self.insert_data(stat, ['false'], []) | ||||||
|  |         # test min_length is too short to change anything | ||||||
|  |         result = self.client_get('/json/analytics/chart_data', | ||||||
|  |                                  {'chart_name': 'number_of_humans', | ||||||
|  |                                   'min_length': 2}) | ||||||
|  |         self.assert_json_success(result) | ||||||
|  |         data = result.json() | ||||||
|  |         self.assertEqual(data['end_times'], [datetime_to_timestamp(dt) for dt in self.end_times_day]) | ||||||
|  |         self.assertEqual(data['everyone'], {'_1day': self.data(100), '_15day': self.data(100), 'all_time': self.data(100)}) | ||||||
|  |         # test min_length larger than filled data | ||||||
|  |         result = self.client_get('/json/analytics/chart_data', | ||||||
|  |                                  {'chart_name': 'number_of_humans', | ||||||
|  |                                   'min_length': 5}) | ||||||
|  |         self.assert_json_success(result) | ||||||
|  |         data = result.json() | ||||||
|  |         end_times = [ceiling_to_day(self.realm.date_created) + timedelta(days=i) for i in range(-1, 4)] | ||||||
|  |         self.assertEqual(data['end_times'], [datetime_to_timestamp(dt) for dt in end_times]) | ||||||
|  |         self.assertEqual(data['everyone'], {'_1day': [0]+self.data(100), '_15day': [0]+self.data(100), 'all_time': [0]+self.data(100)}) | ||||||
|  |  | ||||||
|  |     def test_non_existent_chart(self) -> None: | ||||||
|  |         result = self.client_get('/json/analytics/chart_data', | ||||||
|  |                                  {'chart_name': 'does_not_exist'}) | ||||||
|  |         self.assert_json_error_contains(result, 'Unknown chart name') | ||||||
|  |  | ||||||
|  |     def test_analytics_not_running(self) -> None: | ||||||
|  |         # try to get data for a valid chart, but before we've put anything in the database | ||||||
|  |         # (e.g. before update_analytics_counts has been run) | ||||||
|  |         with mock.patch('logging.warning'): | ||||||
|  |             result = self.client_get('/json/analytics/chart_data', | ||||||
|  |                                      {'chart_name': 'number_of_humans'}) | ||||||
|  |         self.assert_json_error_contains(result, 'No analytics data available') | ||||||
|  |  | ||||||
|  |     def test_get_chart_data_for_realm(self) -> None: | ||||||
|  |         user_profile = self.example_user('hamlet') | ||||||
|  |         self.login(user_profile.email) | ||||||
|  |  | ||||||
|  |         result = self.client_get('/json/analytics/chart_data/realm/zulip/', | ||||||
|  |                                  {'chart_name': 'number_of_humans'}) | ||||||
|  |         self.assert_json_error(result, "Must be an server administrator", 400) | ||||||
|  |  | ||||||
|  |         user_profile = self.example_user('hamlet') | ||||||
|  |         user_profile.is_staff = True | ||||||
|  |         user_profile.save(update_fields=['is_staff']) | ||||||
|  |         stat = COUNT_STATS['realm_active_humans::day'] | ||||||
|  |         self.insert_data(stat, [None], []) | ||||||
|  |  | ||||||
|  |         result = self.client_get('/json/analytics/chart_data/realm/not_existing_realm', | ||||||
|  |                                  {'chart_name': 'number_of_humans'}) | ||||||
|  |         self.assert_json_error(result, 'Invalid organization', 400) | ||||||
|  |  | ||||||
|  |         result = self.client_get('/json/analytics/chart_data/realm/zulip', | ||||||
|  |                                  {'chart_name': 'number_of_humans'}) | ||||||
|  |         self.assert_json_success(result) | ||||||
|  |  | ||||||
|  |     def test_get_chart_data_for_installation(self) -> None: | ||||||
|  |         user_profile = self.example_user('hamlet') | ||||||
|  |         self.login(user_profile.email) | ||||||
|  |  | ||||||
|  |         result = self.client_get('/json/analytics/chart_data/installation', | ||||||
|  |                                  {'chart_name': 'number_of_humans'}) | ||||||
|  |         self.assert_json_error(result, "Must be an server administrator", 400) | ||||||
|  |  | ||||||
|  |         user_profile = self.example_user('hamlet') | ||||||
|  |         user_profile.is_staff = True | ||||||
|  |         user_profile.save(update_fields=['is_staff']) | ||||||
|  |         stat = COUNT_STATS['realm_active_humans::day'] | ||||||
|  |         self.insert_data(stat, [None], []) | ||||||
|  |  | ||||||
|  |         result = self.client_get('/json/analytics/chart_data/installation', | ||||||
|  |                                  {'chart_name': 'number_of_humans'}) | ||||||
|  |         self.assert_json_success(result) | ||||||
|  |  | ||||||
|  | class TestSupportEndpoint(ZulipTestCase): | ||||||
|  |     def test_search(self) -> None: | ||||||
|  |         def check_hamlet_user_query_result(result: HttpResponse) -> None: | ||||||
|  |             self.assert_in_success_response(['<span class="label">user</span>\n', '<h3>King Hamlet</h3>', | ||||||
|  |                                              '<b>Email</b>: hamlet@zulip.com', '<b>Is active</b>: True<br>', | ||||||
|  |                                              '<b>Admins</b>: iago@zulip.com\n', | ||||||
|  |                                              'class="copy-button" data-copytext="iago@zulip.com"' | ||||||
|  |                                              ], result) | ||||||
|  |  | ||||||
|  |         def check_zulip_realm_query_result(result: HttpResponse) -> None: | ||||||
|  |             zulip_realm = get_realm("zulip") | ||||||
|  |             self.assert_in_success_response(['<input type="hidden" name="realm_id" value="%s"' % (zulip_realm.id,), | ||||||
|  |                                              'Zulip Dev</h3>', | ||||||
|  |                                              '<option value="1" selected>Self Hosted</option>', | ||||||
|  |                                              '<option value="2" >Limited</option>', | ||||||
|  |                                              'input type="number" name="discount" value="None"', | ||||||
|  |                                              '<option value="active" selected>Active</option>', | ||||||
|  |                                              '<option value="deactivated" >Deactivated</option>', | ||||||
|  |                                              'scrub-realm-button">', | ||||||
|  |                                              'data-string-id="zulip"'], result) | ||||||
|  |  | ||||||
|  |         def check_lear_realm_query_result(result: HttpResponse) -> None: | ||||||
|  |             lear_realm = get_realm("lear") | ||||||
|  |             self.assert_in_success_response(['<input type="hidden" name="realm_id" value="%s"' % (lear_realm.id,), | ||||||
|  |                                              'Lear & Co.</h3>', | ||||||
|  |                                              '<option value="1" selected>Self Hosted</option>', | ||||||
|  |                                              '<option value="2" >Limited</option>', | ||||||
|  |                                              'input type="number" name="discount" value="None"', | ||||||
|  |                                              '<option value="active" selected>Active</option>', | ||||||
|  |                                              '<option value="deactivated" >Deactivated</option>', | ||||||
|  |                                              'scrub-realm-button">', | ||||||
|  |                                              'data-string-id="lear"'], result) | ||||||
|  |  | ||||||
|  |         def check_preregistration_user_query_result(result: HttpResponse, email: str, invite: Optional[bool]=False) -> None: | ||||||
|  |             self.assert_in_success_response(['<span class="label">preregistration user</span>\n', | ||||||
|  |                                              '<b>Email</b>: {}'.format(email), | ||||||
|  |                                              ], result) | ||||||
|  |             if invite: | ||||||
|  |                 self.assert_in_success_response(['<span class="label">invite</span>'], result) | ||||||
|  |                 self.assert_in_success_response(['<b>Expires in</b>: 1\xa0week, 3', | ||||||
|  |                                                  '<b>Status</b>: Link has never been clicked'], result) | ||||||
|  |                 self.assert_in_success_response([], result) | ||||||
|  |             else: | ||||||
|  |                 self.assert_not_in_success_response(['<span class="label">invite</span>'], result) | ||||||
|  |                 self.assert_in_success_response(['<b>Expires in</b>: 1\xa0day', | ||||||
|  |                                                  '<b>Status</b>: Link has never been clicked'], result) | ||||||
|  |  | ||||||
|  |         def check_realm_creation_query_result(result: HttpResponse, email: str) -> None: | ||||||
|  |             self.assert_in_success_response(['<span class="label">preregistration user</span>\n', | ||||||
|  |                                              '<span class="label">realm creation</span>\n', | ||||||
|  |                                              '<b>Link</b>: http://zulip.testserver/accounts/do_confirm/', | ||||||
|  |                                              '<b>Expires in</b>: 1\xa0day<br>\n' | ||||||
|  |                                              ], result) | ||||||
|  |  | ||||||
|  |         def check_multiuse_invite_link_query_result(result: HttpResponse) -> None: | ||||||
|  |             self.assert_in_success_response(['<span class="label">multiuse invite</span>\n', | ||||||
|  |                                              '<b>Link</b>: http://zulip.testserver/join/', | ||||||
|  |                                              '<b>Expires in</b>: 1\xa0week, 3' | ||||||
|  |                                              ], result) | ||||||
|  |  | ||||||
|  |         def check_realm_reactivation_link_query_result(result: HttpResponse) -> None: | ||||||
|  |             self.assert_in_success_response(['<span class="label">realm reactivation</span>\n', | ||||||
|  |                                              '<b>Link</b>: http://zulip.testserver/reactivate/', | ||||||
|  |                                              '<b>Expires in</b>: 1\xa0day' | ||||||
|  |                                              ], result) | ||||||
|  |  | ||||||
|  |         cordelia_email = self.example_email("cordelia") | ||||||
|  |         self.login(cordelia_email) | ||||||
|  |  | ||||||
|  |         result = self.client_get("/activity/support") | ||||||
|  |         self.assertEqual(result.status_code, 302) | ||||||
|  |         self.assertEqual(result["Location"], "/login/") | ||||||
|  |  | ||||||
|  |         iago_email = self.example_email("iago") | ||||||
|  |         self.login(iago_email) | ||||||
|  |  | ||||||
|  |         result = self.client_get("/activity/support") | ||||||
|  |         self.assert_in_success_response(['<input type="text" name="q" class="input-xxlarge search-query"'], result) | ||||||
|  |  | ||||||
|  |         result = self.client_get("/activity/support", {"q": "hamlet@zulip.com"}) | ||||||
|  |         check_hamlet_user_query_result(result) | ||||||
|  |         check_zulip_realm_query_result(result) | ||||||
|  |  | ||||||
|  |         result = self.client_get("/activity/support", {"q": "lear"}) | ||||||
|  |         check_lear_realm_query_result(result) | ||||||
|  |  | ||||||
|  |         result = self.client_get("/activity/support", {"q": "http://lear.testserver"}) | ||||||
|  |         check_lear_realm_query_result(result) | ||||||
|  |  | ||||||
|  |         with self.settings(REALM_HOSTS={'zulip': 'localhost'}): | ||||||
|  |             result = self.client_get("/activity/support", {"q": "http://localhost"}) | ||||||
|  |             check_zulip_realm_query_result(result) | ||||||
|  |  | ||||||
|  |         result = self.client_get("/activity/support", {"q": "hamlet@zulip.com, lear"}) | ||||||
|  |         check_hamlet_user_query_result(result) | ||||||
|  |         check_zulip_realm_query_result(result) | ||||||
|  |         check_lear_realm_query_result(result) | ||||||
|  |  | ||||||
|  |         result = self.client_get("/activity/support", {"q": "lear, Hamlet <hamlet@zulip.com>"}) | ||||||
|  |         check_hamlet_user_query_result(result) | ||||||
|  |         check_zulip_realm_query_result(result) | ||||||
|  |         check_lear_realm_query_result(result) | ||||||
|  |  | ||||||
|  |         self.client_post('/accounts/home/', {'email': self.nonreg_email("test")}) | ||||||
|  |         self.login(iago_email) | ||||||
|  |         result = self.client_get("/activity/support", {"q": self.nonreg_email("test")}) | ||||||
|  |         check_preregistration_user_query_result(result, self.nonreg_email("test")) | ||||||
|  |         check_zulip_realm_query_result(result) | ||||||
|  |  | ||||||
|  |         stream_ids = [self.get_stream_id("Denmark")] | ||||||
|  |         invitee_emails = [self.nonreg_email("test1")] | ||||||
|  |         self.client_post("/json/invites", {"invitee_emails": invitee_emails, | ||||||
|  |                          "stream_ids": ujson.dumps(stream_ids), "invite_as": 1}) | ||||||
|  |         result = self.client_get("/activity/support", {"q": self.nonreg_email("test1")}) | ||||||
|  |         check_preregistration_user_query_result(result, self.nonreg_email("test1"), invite=True) | ||||||
|  |         check_zulip_realm_query_result(result) | ||||||
|  |  | ||||||
|  |         email = self.nonreg_email('alice') | ||||||
|  |         self.client_post('/new/', {'email': email}) | ||||||
|  |         result = self.client_get("/activity/support", {"q": email}) | ||||||
|  |         check_realm_creation_query_result(result, email) | ||||||
|  |  | ||||||
|  |         do_create_multiuse_invite_link(self.example_user("hamlet"), invited_as=1) | ||||||
|  |         result = self.client_get("/activity/support", {"q": "zulip"}) | ||||||
|  |         check_multiuse_invite_link_query_result(result) | ||||||
|  |         check_zulip_realm_query_result(result) | ||||||
|  |         MultiuseInvite.objects.all().delete() | ||||||
|  |  | ||||||
|  |         do_send_realm_reactivation_email(get_realm("zulip")) | ||||||
|  |         result = self.client_get("/activity/support", {"q": "zulip"}) | ||||||
|  |         check_realm_reactivation_link_query_result(result) | ||||||
|  |         check_zulip_realm_query_result(result) | ||||||
|  |  | ||||||
|  |     def test_change_plan_type(self) -> None: | ||||||
|  |         cordelia = self.example_user("cordelia") | ||||||
|  |         self.login(cordelia.email) | ||||||
|  |  | ||||||
|  |         result = self.client_post("/activity/support", {"realm_id": "%s" % (cordelia.realm_id,), "plan_type": "2"}) | ||||||
|  |         self.assertEqual(result.status_code, 302) | ||||||
|  |         self.assertEqual(result["Location"], "/login/") | ||||||
|  |  | ||||||
|  |         iago = self.example_user("iago") | ||||||
|  |         self.login(iago.email) | ||||||
|  |  | ||||||
|  |         with mock.patch("analytics.views.do_change_plan_type") as m: | ||||||
|  |             result = self.client_post("/activity/support", {"realm_id": "%s" % (iago.realm_id,), "plan_type": "2"}) | ||||||
|  |             m.assert_called_once_with(get_realm("zulip"), 2) | ||||||
|  |             self.assert_in_success_response(["Plan type of Zulip Dev changed from self hosted to limited"], result) | ||||||
|  |  | ||||||
|  |     def test_attach_discount(self) -> None: | ||||||
|  |         lear_realm = get_realm("lear") | ||||||
|  |         cordelia_email = self.example_email("cordelia") | ||||||
|  |         self.login(cordelia_email) | ||||||
|  |  | ||||||
|  |         result = self.client_post("/activity/support", {"realm_id": "%s" % (lear_realm.id,), "discount": "25"}) | ||||||
|  |         self.assertEqual(result.status_code, 302) | ||||||
|  |         self.assertEqual(result["Location"], "/login/") | ||||||
|  |  | ||||||
|  |         iago_email = self.example_email("iago") | ||||||
|  |         self.login(iago_email) | ||||||
|  |  | ||||||
|  |         with mock.patch("analytics.views.attach_discount_to_realm") as m: | ||||||
|  |             result = self.client_post("/activity/support", {"realm_id": "%s" % (lear_realm.id,), "discount": "25"}) | ||||||
|  |             m.assert_called_once_with(get_realm("lear"), 25) | ||||||
|  |             self.assert_in_success_response(["Discount of Lear & Co. changed to 25 from None"], result) | ||||||
|  |  | ||||||
|  |     def test_activate_or_deactivate_realm(self) -> None: | ||||||
|  |         lear_realm = get_realm("lear") | ||||||
|  |         cordelia_email = self.example_email("cordelia") | ||||||
|  |         self.login(cordelia_email) | ||||||
|  |  | ||||||
|  |         result = self.client_post("/activity/support", {"realm_id": "%s" % (lear_realm.id,), "status": "deactivated"}) | ||||||
|  |         self.assertEqual(result.status_code, 302) | ||||||
|  |         self.assertEqual(result["Location"], "/login/") | ||||||
|  |  | ||||||
|  |         iago_email = self.example_email("iago") | ||||||
|  |         self.login(iago_email) | ||||||
|  |  | ||||||
|  |         with mock.patch("analytics.views.do_deactivate_realm") as m: | ||||||
|  |             result = self.client_post("/activity/support", {"realm_id": "%s" % (lear_realm.id,), "status": "deactivated"}) | ||||||
|  |             m.assert_called_once_with(lear_realm, self.example_user("iago")) | ||||||
|  |             self.assert_in_success_response(["Lear & Co. deactivated"], result) | ||||||
|  |  | ||||||
|  |         with mock.patch("analytics.views.do_send_realm_reactivation_email") as m: | ||||||
|  |             result = self.client_post("/activity/support", {"realm_id": "%s" % (lear_realm.id,), "status": "active"}) | ||||||
|  |             m.assert_called_once_with(lear_realm) | ||||||
|  |             self.assert_in_success_response(["Realm reactivation email sent to admins of Lear"], result) | ||||||
|  |  | ||||||
|  |     def test_scrub_realm(self) -> None: | ||||||
|  |         lear_realm = get_realm("lear") | ||||||
|  |         cordelia_email = self.example_email("cordelia") | ||||||
|  |         self.login(cordelia_email) | ||||||
|  |  | ||||||
|  |         result = self.client_post("/activity/support", {"realm_id": "%s" % (lear_realm.id,), "discount": "25"}) | ||||||
|  |         self.assertEqual(result.status_code, 302) | ||||||
|  |         self.assertEqual(result["Location"], "/login/") | ||||||
|  |  | ||||||
|  |         iago_email = self.example_email("iago") | ||||||
|  |         self.login(iago_email) | ||||||
|  |  | ||||||
|  |         with mock.patch("analytics.views.do_scrub_realm") as m: | ||||||
|  |             result = self.client_post("/activity/support", {"realm_id": "%s" % (lear_realm.id,), "scrub_realm": "scrub_realm"}) | ||||||
|  |             m.assert_called_once_with(lear_realm) | ||||||
|  |             self.assert_in_success_response(["Lear & Co. scrubbed"], result) | ||||||
|  |  | ||||||
|  |         with mock.patch("analytics.views.do_scrub_realm") as m: | ||||||
|  |             result = self.client_post("/activity/support", {"realm_id": "%s" % (lear_realm.id,)}) | ||||||
|  |             m.assert_not_called() | ||||||
|  |  | ||||||
|  | class TestGetChartDataHelpers(ZulipTestCase): | ||||||
|  |     # last_successful_fill is in analytics/models.py, but get_chart_data is | ||||||
|  |     # the only function that uses it at the moment | ||||||
|  |     def test_last_successful_fill(self) -> None: | ||||||
|  |         self.assertIsNone(last_successful_fill('non-existant')) | ||||||
|  |         a_time = datetime(2016, 3, 14, 19).replace(tzinfo=utc) | ||||||
|  |         one_hour_before = datetime(2016, 3, 14, 18).replace(tzinfo=utc) | ||||||
|  |         fillstate = FillState.objects.create(property='property', end_time=a_time, | ||||||
|  |                                              state=FillState.DONE) | ||||||
|  |         self.assertEqual(last_successful_fill('property'), a_time) | ||||||
|  |         fillstate.state = FillState.STARTED | ||||||
|  |         fillstate.save() | ||||||
|  |         self.assertEqual(last_successful_fill('property'), one_hour_before) | ||||||
|  |  | ||||||
|  |     def test_sort_by_totals(self) -> None: | ||||||
|  |         empty = []  # type: List[int] | ||||||
|  |         value_arrays = {'c': [0, 1], 'a': [9], 'b': [1, 1, 1], 'd': empty} | ||||||
|  |         self.assertEqual(sort_by_totals(value_arrays), ['a', 'b', 'c', 'd']) | ||||||
|  |  | ||||||
|  |     def test_sort_client_labels(self) -> None: | ||||||
|  |         data = {'everyone': {'a': [16], 'c': [15], 'b': [14], 'e': [13], 'd': [12], 'h': [11]}, | ||||||
|  |                 'user': {'a': [6], 'b': [5], 'd': [4], 'e': [3], 'f': [2], 'g': [1]}} | ||||||
|  |         self.assertEqual(sort_client_labels(data), ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h']) | ||||||
|  |  | ||||||
|  | class TestTimeRange(ZulipTestCase): | ||||||
|  |     def test_time_range(self) -> None: | ||||||
|  |         HOUR = timedelta(hours=1) | ||||||
|  |         DAY = timedelta(days=1) | ||||||
|  |  | ||||||
|  |         a_time = datetime(2016, 3, 14, 22, 59).replace(tzinfo=utc) | ||||||
|  |         floor_hour = datetime(2016, 3, 14, 22).replace(tzinfo=utc) | ||||||
|  |         floor_day = datetime(2016, 3, 14).replace(tzinfo=utc) | ||||||
|  |  | ||||||
|  |         # test start == end | ||||||
|  |         self.assertEqual(time_range(a_time, a_time, CountStat.HOUR, None), []) | ||||||
|  |         self.assertEqual(time_range(a_time, a_time, CountStat.DAY, None), []) | ||||||
|  |         # test start == end == boundary, and min_length == 0 | ||||||
|  |         self.assertEqual(time_range(floor_hour, floor_hour, CountStat.HOUR, 0), [floor_hour]) | ||||||
|  |         self.assertEqual(time_range(floor_day, floor_day, CountStat.DAY, 0), [floor_day]) | ||||||
|  |         # test start and end on different boundaries | ||||||
|  |         self.assertEqual(time_range(floor_hour, floor_hour+HOUR, CountStat.HOUR, None), | ||||||
|  |                          [floor_hour, floor_hour+HOUR]) | ||||||
|  |         self.assertEqual(time_range(floor_day, floor_day+DAY, CountStat.DAY, None), | ||||||
|  |                          [floor_day, floor_day+DAY]) | ||||||
|  |         # test min_length | ||||||
|  |         self.assertEqual(time_range(floor_hour, floor_hour+HOUR, CountStat.HOUR, 4), | ||||||
|  |                          [floor_hour-2*HOUR, floor_hour-HOUR, floor_hour, floor_hour+HOUR]) | ||||||
|  |         self.assertEqual(time_range(floor_day, floor_day+DAY, CountStat.DAY, 4), | ||||||
|  |                          [floor_day-2*DAY, floor_day-DAY, floor_day, floor_day+DAY]) | ||||||
|  |  | ||||||
|  | class TestMapArrays(ZulipTestCase): | ||||||
|  |     def test_map_arrays(self) -> None: | ||||||
|  |         a = {'desktop app 1.0': [1, 2, 3], | ||||||
|  |              'desktop app 2.0': [10, 12, 13], | ||||||
|  |              'desktop app 3.0': [21, 22, 23], | ||||||
|  |              'website': [1, 2, 3], | ||||||
|  |              'ZulipiOS': [1, 2, 3], | ||||||
|  |              'ZulipElectron': [2, 5, 7], | ||||||
|  |              'ZulipMobile': [1, 5, 7], | ||||||
|  |              'ZulipPython': [1, 2, 3], | ||||||
|  |              'API: Python': [1, 2, 3], | ||||||
|  |              'SomethingRandom': [4, 5, 6], | ||||||
|  |              'ZulipGitHubWebhook': [7, 7, 9], | ||||||
|  |              'ZulipAndroid': [64, 63, 65]} | ||||||
|  |         result = rewrite_client_arrays(a) | ||||||
|  |         self.assertEqual(result, | ||||||
|  |                          {'Old desktop app': [32, 36, 39], | ||||||
|  |                           'Old iOS app': [1, 2, 3], | ||||||
|  |                           'Desktop app': [2, 5, 7], | ||||||
|  |                           'Mobile app': [1, 5, 7], | ||||||
|  |                           'Website': [1, 2, 3], | ||||||
|  |                           'Python API': [2, 4, 6], | ||||||
|  |                           'SomethingRandom': [4, 5, 6], | ||||||
|  |                           'GitHub webhook': [7, 7, 9], | ||||||
|  |                           'Old Android app': [64, 63, 65]}) | ||||||
| @@ -1,41 +1,33 @@ | |||||||
| from typing import List, Union | from django.conf.urls import include, url | ||||||
|  |  | ||||||
| from django.conf.urls import include | import analytics.views | ||||||
| from django.urls import path | from zerver.lib.rest import rest_dispatch | ||||||
| from django.urls.resolvers import URLPattern, URLResolver |  | ||||||
|  |  | ||||||
| from analytics.views.installation_activity import get_installation_activity | i18n_urlpatterns = [ | ||||||
| from analytics.views.realm_activity import get_realm_activity |  | ||||||
| from analytics.views.stats import ( |  | ||||||
|     get_chart_data, |  | ||||||
|     get_chart_data_for_installation, |  | ||||||
|     get_chart_data_for_realm, |  | ||||||
|     get_chart_data_for_remote_installation, |  | ||||||
|     get_chart_data_for_remote_realm, |  | ||||||
|     stats, |  | ||||||
|     stats_for_installation, |  | ||||||
|     stats_for_realm, |  | ||||||
|     stats_for_remote_installation, |  | ||||||
|     stats_for_remote_realm, |  | ||||||
| ) |  | ||||||
| from analytics.views.support import support |  | ||||||
| from analytics.views.user_activity import get_user_activity |  | ||||||
| from zerver.lib.rest import rest_path |  | ||||||
|  |  | ||||||
| i18n_urlpatterns: List[Union[URLPattern, URLResolver]] = [ |  | ||||||
|     # Server admin (user_profile.is_staff) visible stats pages |     # Server admin (user_profile.is_staff) visible stats pages | ||||||
|     path("activity", get_installation_activity), |     url(r'^activity$', analytics.views.get_activity, | ||||||
|     path("activity/support", support, name="support"), |         name='analytics.views.get_activity'), | ||||||
|     path("realm_activity/<realm_str>/", get_realm_activity), |     url(r'^activity/support$', analytics.views.support, | ||||||
|     path("user_activity/<user_profile_id>/", get_user_activity), |         name='analytics.views.support'), | ||||||
|     path("stats/realm/<realm_str>/", stats_for_realm), |     url(r'^realm_activity/(?P<realm_str>[\S]+)/$', analytics.views.get_realm_activity, | ||||||
|     path("stats/installation", stats_for_installation), |         name='analytics.views.get_realm_activity'), | ||||||
|     path("stats/remote/<int:remote_server_id>/installation", stats_for_remote_installation), |     url(r'^user_activity/(?P<email>[\S]+)/$', analytics.views.get_user_activity, | ||||||
|     path( |         name='analytics.views.get_user_activity'), | ||||||
|         "stats/remote/<int:remote_server_id>/realm/<int:remote_realm_id>/", stats_for_remote_realm |  | ||||||
|     ), |     url(r'^stats/realm/(?P<realm_str>[\S]+)/$', analytics.views.stats_for_realm, | ||||||
|  |         name='analytics.views.stats_for_realm'), | ||||||
|  |     url(r'^stats/installation$', analytics.views.stats_for_installation, | ||||||
|  |         name='analytics.views.stats_for_installation'), | ||||||
|  |     url(r'^stats/remote/(?P<remote_server_id>[\S]+)/installation$', | ||||||
|  |         analytics.views.stats_for_remote_installation, | ||||||
|  |         name='analytics.views.stats_for_remote_installation'), | ||||||
|  |     url(r'^stats/remote/(?P<remote_server_id>[\S]+)/realm/(?P<remote_realm_id>[\S]+)/$', | ||||||
|  |         analytics.views.stats_for_remote_realm, | ||||||
|  |         name='analytics.views.stats_for_remote_realm'), | ||||||
|  |  | ||||||
|     # User-visible stats page |     # User-visible stats page | ||||||
|     path("stats", stats, name="stats"), |     url(r'^stats$', analytics.views.stats, | ||||||
|  |         name='analytics.views.stats'), | ||||||
| ] | ] | ||||||
|  |  | ||||||
| # These endpoints are a part of the API (V1), which uses: | # These endpoints are a part of the API (V1), which uses: | ||||||
| @@ -48,22 +40,22 @@ i18n_urlpatterns: List[Union[URLPattern, URLResolver]] = [ | |||||||
| # All of these paths are accessed by either a /json or /api prefix | # All of these paths are accessed by either a /json or /api prefix | ||||||
| v1_api_and_json_patterns = [ | v1_api_and_json_patterns = [ | ||||||
|     # get data for the graphs at /stats |     # get data for the graphs at /stats | ||||||
|     rest_path("analytics/chart_data", GET=get_chart_data), |     url(r'^analytics/chart_data$', rest_dispatch, | ||||||
|     rest_path("analytics/chart_data/realm/<realm_str>", GET=get_chart_data_for_realm), |         {'GET': 'analytics.views.get_chart_data'}), | ||||||
|     rest_path("analytics/chart_data/installation", GET=get_chart_data_for_installation), |     url(r'^analytics/chart_data/realm/(?P<realm_str>[\S]+)$', rest_dispatch, | ||||||
|     rest_path( |         {'GET': 'analytics.views.get_chart_data_for_realm'}), | ||||||
|         "analytics/chart_data/remote/<int:remote_server_id>/installation", |     url(r'^analytics/chart_data/installation$', rest_dispatch, | ||||||
|         GET=get_chart_data_for_remote_installation, |         {'GET': 'analytics.views.get_chart_data_for_installation'}), | ||||||
|     ), |     url(r'^analytics/chart_data/remote/(?P<remote_server_id>[\S]+)/installation$', rest_dispatch, | ||||||
|     rest_path( |         {'GET': 'analytics.views.get_chart_data_for_remote_installation'}), | ||||||
|         "analytics/chart_data/remote/<int:remote_server_id>/realm/<int:remote_realm_id>", |     url(r'^analytics/chart_data/remote/(?P<remote_server_id>[\S]+)/realm/(?P<remote_realm_id>[\S]+)$', | ||||||
|         GET=get_chart_data_for_remote_realm, |         rest_dispatch, | ||||||
|     ), |         {'GET': 'analytics.views.get_chart_data_for_remote_realm'}), | ||||||
| ] | ] | ||||||
|  |  | ||||||
| i18n_urlpatterns += [ | i18n_urlpatterns += [ | ||||||
|     path("api/v1/", include(v1_api_and_json_patterns)), |     url(r'^api/v1/', include(v1_api_and_json_patterns)), | ||||||
|     path("json/", include(v1_api_and_json_patterns)), |     url(r'^json/', include(v1_api_and_json_patterns)), | ||||||
| ] | ] | ||||||
|  |  | ||||||
| urlpatterns = i18n_urlpatterns | urlpatterns = i18n_urlpatterns | ||||||
|   | |||||||
							
								
								
									
										1484
									
								
								analytics/views.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1484
									
								
								analytics/views.py
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,154 +0,0 @@ | |||||||
| import re |  | ||||||
| import sys |  | ||||||
| from datetime import datetime |  | ||||||
| from typing import Any, Collection, Dict, List, Optional, Sequence |  | ||||||
| from urllib.parse import urlencode |  | ||||||
|  |  | ||||||
| from django.conf import settings |  | ||||||
| from django.db.backends.utils import CursorWrapper |  | ||||||
| from django.template import loader |  | ||||||
| from django.urls import reverse |  | ||||||
| from markupsafe import Markup |  | ||||||
|  |  | ||||||
| from zerver.lib.url_encoding import append_url_query_string |  | ||||||
| from zerver.models import UserActivity, get_realm |  | ||||||
|  |  | ||||||
| if sys.version_info < (3, 9):  # nocoverage |  | ||||||
|     from backports import zoneinfo |  | ||||||
| else:  # nocoverage |  | ||||||
|     import zoneinfo |  | ||||||
|  |  | ||||||
| eastern_tz = zoneinfo.ZoneInfo("America/New_York") |  | ||||||
|  |  | ||||||
|  |  | ||||||
| if settings.BILLING_ENABLED: |  | ||||||
|     pass |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def make_table( |  | ||||||
|     title: str, cols: Sequence[str], rows: Sequence[Any], has_row_class: bool = False |  | ||||||
| ) -> str: |  | ||||||
|     if not has_row_class: |  | ||||||
|  |  | ||||||
|         def fix_row(row: Any) -> Dict[str, Any]: |  | ||||||
|             return dict(cells=row, row_class=None) |  | ||||||
|  |  | ||||||
|         rows = list(map(fix_row, rows)) |  | ||||||
|  |  | ||||||
|     data = dict(title=title, cols=cols, rows=rows) |  | ||||||
|  |  | ||||||
|     content = loader.render_to_string( |  | ||||||
|         "analytics/ad_hoc_query.html", |  | ||||||
|         dict(data=data), |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     return content |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def dictfetchall(cursor: CursorWrapper) -> List[Dict[str, Any]]: |  | ||||||
|     """Returns all rows from a cursor as a dict""" |  | ||||||
|     desc = cursor.description |  | ||||||
|     return [dict(zip((col[0] for col in desc), row)) for row in cursor.fetchall()] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def format_date_for_activity_reports(date: Optional[datetime]) -> str: |  | ||||||
|     if date: |  | ||||||
|         return date.astimezone(eastern_tz).strftime("%Y-%m-%d %H:%M") |  | ||||||
|     else: |  | ||||||
|         return "" |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def user_activity_link(email: str, user_profile_id: int) -> Markup: |  | ||||||
|     from analytics.views.user_activity import get_user_activity |  | ||||||
|  |  | ||||||
|     url = reverse(get_user_activity, kwargs=dict(user_profile_id=user_profile_id)) |  | ||||||
|     return Markup('<a href="{url}">{email}</a>').format(url=url, email=email) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def realm_activity_link(realm_str: str) -> Markup: |  | ||||||
|     from analytics.views.realm_activity import get_realm_activity |  | ||||||
|  |  | ||||||
|     url = reverse(get_realm_activity, kwargs=dict(realm_str=realm_str)) |  | ||||||
|     return Markup('<a href="{url}">{realm_str}</a>').format(url=url, realm_str=realm_str) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def realm_stats_link(realm_str: str) -> Markup: |  | ||||||
|     from analytics.views.stats import stats_for_realm |  | ||||||
|  |  | ||||||
|     url = reverse(stats_for_realm, kwargs=dict(realm_str=realm_str)) |  | ||||||
|     return Markup('<a href="{url}"><i class="fa fa-pie-chart"></i></a>').format(url=url) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def realm_support_link(realm_str: str) -> Markup: |  | ||||||
|     support_url = reverse("support") |  | ||||||
|     query = urlencode({"q": realm_str}) |  | ||||||
|     url = append_url_query_string(support_url, query) |  | ||||||
|     return Markup('<a href="{url}">{realm_str}</a>').format(url=url, realm_str=realm_str) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def realm_url_link(realm_str: str) -> Markup: |  | ||||||
|     url = get_realm(realm_str).uri |  | ||||||
|     return Markup('<a href="{url}"><i class="fa fa-home"></i></a>').format(url=url) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def remote_installation_stats_link(server_id: int, hostname: str) -> Markup: |  | ||||||
|     from analytics.views.stats import stats_for_remote_installation |  | ||||||
|  |  | ||||||
|     url = reverse(stats_for_remote_installation, kwargs=dict(remote_server_id=server_id)) |  | ||||||
|     return Markup('<a href="{url}"><i class="fa fa-pie-chart"></i>{hostname}</a>').format( |  | ||||||
|         url=url, hostname=hostname |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_user_activity_summary(records: Collection[UserActivity]) -> Dict[str, Any]: |  | ||||||
|     #: The type annotation used above is clearly overly permissive. |  | ||||||
|     #: We should perhaps use TypedDict to clearly lay out the schema |  | ||||||
|     #: for the user activity summary. |  | ||||||
|     summary: Dict[str, Any] = {} |  | ||||||
|  |  | ||||||
|     def update(action: str, record: UserActivity) -> None: |  | ||||||
|         if action not in summary: |  | ||||||
|             summary[action] = dict( |  | ||||||
|                 count=record.count, |  | ||||||
|                 last_visit=record.last_visit, |  | ||||||
|             ) |  | ||||||
|         else: |  | ||||||
|             summary[action]["count"] += record.count |  | ||||||
|             summary[action]["last_visit"] = max( |  | ||||||
|                 summary[action]["last_visit"], |  | ||||||
|                 record.last_visit, |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|     if records: |  | ||||||
|         first_record = next(iter(records)) |  | ||||||
|         summary["name"] = first_record.user_profile.full_name |  | ||||||
|         summary["user_profile_id"] = first_record.user_profile.id |  | ||||||
|  |  | ||||||
|     for record in records: |  | ||||||
|         client = record.client.name |  | ||||||
|         query = str(record.query) |  | ||||||
|  |  | ||||||
|         update("use", record) |  | ||||||
|  |  | ||||||
|         if client == "API": |  | ||||||
|             m = re.match("/api/.*/external/(.*)", query) |  | ||||||
|             if m: |  | ||||||
|                 client = m.group(1) |  | ||||||
|                 update(client, record) |  | ||||||
|  |  | ||||||
|         if client.startswith("desktop"): |  | ||||||
|             update("desktop", record) |  | ||||||
|         if client == "website": |  | ||||||
|             update("website", record) |  | ||||||
|         if ("send_message" in query) or re.search("/api/.*/external/.*", query): |  | ||||||
|             update("send", record) |  | ||||||
|         if query in [ |  | ||||||
|             "/json/update_pointer", |  | ||||||
|             "/json/users/me/pointer", |  | ||||||
|             "/api/v1/update_pointer", |  | ||||||
|             "update_pointer_backend", |  | ||||||
|         ]: |  | ||||||
|             update("pointer", record) |  | ||||||
|         update(client, record) |  | ||||||
|  |  | ||||||
|     return summary |  | ||||||
| @@ -1,620 +0,0 @@ | |||||||
| import itertools |  | ||||||
| import time |  | ||||||
| from collections import defaultdict |  | ||||||
| from contextlib import suppress |  | ||||||
| from datetime import datetime, timedelta |  | ||||||
| from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union |  | ||||||
|  |  | ||||||
| from django.conf import settings |  | ||||||
| from django.db import connection |  | ||||||
| from django.http import HttpRequest, HttpResponse |  | ||||||
| from django.shortcuts import render |  | ||||||
| from django.template import loader |  | ||||||
| from django.utils.timezone import now as timezone_now |  | ||||||
| from markupsafe import Markup |  | ||||||
| from psycopg2.sql import SQL, Composable, Literal |  | ||||||
|  |  | ||||||
| from analytics.lib.counts import COUNT_STATS |  | ||||||
| from analytics.views.activity_common import ( |  | ||||||
|     dictfetchall, |  | ||||||
|     format_date_for_activity_reports, |  | ||||||
|     make_table, |  | ||||||
|     realm_activity_link, |  | ||||||
|     realm_stats_link, |  | ||||||
|     realm_support_link, |  | ||||||
|     realm_url_link, |  | ||||||
|     remote_installation_stats_link, |  | ||||||
| ) |  | ||||||
| from analytics.views.support import get_plan_name |  | ||||||
| from zerver.decorator import require_server_admin |  | ||||||
| from zerver.lib.request import has_request_variables |  | ||||||
| from zerver.lib.timestamp import timestamp_to_datetime |  | ||||||
| from zerver.models import Realm, UserActivityInterval, get_org_type_display_name |  | ||||||
|  |  | ||||||
| if settings.BILLING_ENABLED: |  | ||||||
|     from corporate.lib.stripe import ( |  | ||||||
|         estimate_annual_recurring_revenue_by_realm, |  | ||||||
|         get_realms_to_default_discount_dict, |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_realm_day_counts() -> Dict[str, Dict[str, Markup]]: |  | ||||||
|     query = SQL( |  | ||||||
|         """ |  | ||||||
|         select |  | ||||||
|             r.string_id, |  | ||||||
|             (now()::date - date_sent::date) age, |  | ||||||
|             count(*) cnt |  | ||||||
|         from zerver_message m |  | ||||||
|         join zerver_userprofile up on up.id = m.sender_id |  | ||||||
|         join zerver_realm r on r.id = up.realm_id |  | ||||||
|         join zerver_client c on c.id = m.sending_client_id |  | ||||||
|         where |  | ||||||
|             (not up.is_bot) |  | ||||||
|         and |  | ||||||
|             date_sent > now()::date - interval '8 day' |  | ||||||
|         and |  | ||||||
|             c.name not in ('zephyr_mirror', 'ZulipMonitoring') |  | ||||||
|         group by |  | ||||||
|             r.string_id, |  | ||||||
|             age |  | ||||||
|         order by |  | ||||||
|             r.string_id, |  | ||||||
|             age |  | ||||||
|     """ |  | ||||||
|     ) |  | ||||||
|     cursor = connection.cursor() |  | ||||||
|     cursor.execute(query) |  | ||||||
|     rows = dictfetchall(cursor) |  | ||||||
|     cursor.close() |  | ||||||
|  |  | ||||||
|     counts: Dict[str, Dict[int, int]] = defaultdict(dict) |  | ||||||
|     for row in rows: |  | ||||||
|         counts[row["string_id"]][row["age"]] = row["cnt"] |  | ||||||
|  |  | ||||||
|     result = {} |  | ||||||
|     for string_id in counts: |  | ||||||
|         raw_cnts = [counts[string_id].get(age, 0) for age in range(8)] |  | ||||||
|         min_cnt = min(raw_cnts[1:]) |  | ||||||
|         max_cnt = max(raw_cnts[1:]) |  | ||||||
|  |  | ||||||
|         def format_count(cnt: int, style: Optional[str] = None) -> Markup: |  | ||||||
|             if style is not None: |  | ||||||
|                 good_bad = style |  | ||||||
|             elif cnt == min_cnt: |  | ||||||
|                 good_bad = "bad" |  | ||||||
|             elif cnt == max_cnt: |  | ||||||
|                 good_bad = "good" |  | ||||||
|             else: |  | ||||||
|                 good_bad = "neutral" |  | ||||||
|  |  | ||||||
|             return Markup('<td class="number {good_bad}">{cnt}</td>').format( |  | ||||||
|                 good_bad=good_bad, cnt=cnt |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|         cnts = format_count(raw_cnts[0], "neutral") + Markup().join(map(format_count, raw_cnts[1:])) |  | ||||||
|         result[string_id] = dict(cnts=cnts) |  | ||||||
|  |  | ||||||
|     return result |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def realm_summary_table(realm_minutes: Dict[str, float]) -> str: |  | ||||||
|     now = timezone_now() |  | ||||||
|  |  | ||||||
|     query = SQL( |  | ||||||
|         """ |  | ||||||
|         SELECT |  | ||||||
|             realm.string_id, |  | ||||||
|             realm.date_created, |  | ||||||
|             realm.plan_type, |  | ||||||
|             realm.org_type, |  | ||||||
|             coalesce(wau_table.value, 0) wau_count, |  | ||||||
|             coalesce(dau_table.value, 0) dau_count, |  | ||||||
|             coalesce(user_count_table.value, 0) user_profile_count, |  | ||||||
|             coalesce(bot_count_table.value, 0) bot_count |  | ||||||
|         FROM |  | ||||||
|             zerver_realm as realm |  | ||||||
|             LEFT OUTER JOIN ( |  | ||||||
|                 SELECT |  | ||||||
|                     value _14day_active_humans, |  | ||||||
|                     realm_id |  | ||||||
|                 from |  | ||||||
|                     analytics_realmcount |  | ||||||
|                 WHERE |  | ||||||
|                     property = 'realm_active_humans::day' |  | ||||||
|                     AND end_time = %(realm_active_humans_end_time)s |  | ||||||
|             ) as _14day_active_humans_table ON realm.id = _14day_active_humans_table.realm_id |  | ||||||
|             LEFT OUTER JOIN ( |  | ||||||
|                 SELECT |  | ||||||
|                     value, |  | ||||||
|                     realm_id |  | ||||||
|                 from |  | ||||||
|                     analytics_realmcount |  | ||||||
|                 WHERE |  | ||||||
|                     property = '7day_actives::day' |  | ||||||
|                     AND end_time = %(seven_day_actives_end_time)s |  | ||||||
|             ) as wau_table ON realm.id = wau_table.realm_id |  | ||||||
|             LEFT OUTER JOIN ( |  | ||||||
|                 SELECT |  | ||||||
|                     value, |  | ||||||
|                     realm_id |  | ||||||
|                 from |  | ||||||
|                     analytics_realmcount |  | ||||||
|                 WHERE |  | ||||||
|                     property = '1day_actives::day' |  | ||||||
|                     AND end_time = %(one_day_actives_end_time)s |  | ||||||
|             ) as dau_table ON realm.id = dau_table.realm_id |  | ||||||
|             LEFT OUTER JOIN ( |  | ||||||
|                 SELECT |  | ||||||
|                     value, |  | ||||||
|                     realm_id |  | ||||||
|                 from |  | ||||||
|                     analytics_realmcount |  | ||||||
|                 WHERE |  | ||||||
|                     property = 'active_users_audit:is_bot:day' |  | ||||||
|                     AND subgroup = 'false' |  | ||||||
|                     AND end_time = %(active_users_audit_end_time)s |  | ||||||
|             ) as user_count_table ON realm.id = user_count_table.realm_id |  | ||||||
|             LEFT OUTER JOIN ( |  | ||||||
|                 SELECT |  | ||||||
|                     value, |  | ||||||
|                     realm_id |  | ||||||
|                 from |  | ||||||
|                     analytics_realmcount |  | ||||||
|                 WHERE |  | ||||||
|                     property = 'active_users_audit:is_bot:day' |  | ||||||
|                     AND subgroup = 'true' |  | ||||||
|                     AND end_time = %(active_users_audit_end_time)s |  | ||||||
|             ) as bot_count_table ON realm.id = bot_count_table.realm_id |  | ||||||
|         WHERE |  | ||||||
|             _14day_active_humans IS NOT NULL |  | ||||||
|             or realm.plan_type = 3 |  | ||||||
|         ORDER BY |  | ||||||
|             dau_count DESC, |  | ||||||
|             string_id ASC |  | ||||||
|     """ |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     cursor = connection.cursor() |  | ||||||
|     cursor.execute( |  | ||||||
|         query, |  | ||||||
|         { |  | ||||||
|             "realm_active_humans_end_time": COUNT_STATS[ |  | ||||||
|                 "realm_active_humans::day" |  | ||||||
|             ].last_successful_fill(), |  | ||||||
|             "seven_day_actives_end_time": COUNT_STATS["7day_actives::day"].last_successful_fill(), |  | ||||||
|             "one_day_actives_end_time": COUNT_STATS["1day_actives::day"].last_successful_fill(), |  | ||||||
|             "active_users_audit_end_time": COUNT_STATS[ |  | ||||||
|                 "active_users_audit:is_bot:day" |  | ||||||
|             ].last_successful_fill(), |  | ||||||
|         }, |  | ||||||
|     ) |  | ||||||
|     rows = dictfetchall(cursor) |  | ||||||
|     cursor.close() |  | ||||||
|  |  | ||||||
|     for row in rows: |  | ||||||
|         row["date_created_day"] = row["date_created"].strftime("%Y-%m-%d") |  | ||||||
|         row["age_days"] = int((now - row["date_created"]).total_seconds() / 86400) |  | ||||||
|         row["is_new"] = row["age_days"] < 12 * 7 |  | ||||||
|  |  | ||||||
|     # get messages sent per day |  | ||||||
|     counts = get_realm_day_counts() |  | ||||||
|     for row in rows: |  | ||||||
|         try: |  | ||||||
|             row["history"] = counts[row["string_id"]]["cnts"] |  | ||||||
|         except Exception: |  | ||||||
|             row["history"] = "" |  | ||||||
|  |  | ||||||
|     # estimate annual subscription revenue |  | ||||||
|     total_arr = 0 |  | ||||||
|     if settings.BILLING_ENABLED: |  | ||||||
|         estimated_arrs = estimate_annual_recurring_revenue_by_realm() |  | ||||||
|         realms_to_default_discount = get_realms_to_default_discount_dict() |  | ||||||
|  |  | ||||||
|         for row in rows: |  | ||||||
|             row["plan_type_string"] = get_plan_name(row["plan_type"]) |  | ||||||
|  |  | ||||||
|             string_id = row["string_id"] |  | ||||||
|  |  | ||||||
|             if string_id in estimated_arrs: |  | ||||||
|                 row["arr"] = estimated_arrs[string_id] |  | ||||||
|  |  | ||||||
|             if row["plan_type"] in [Realm.PLAN_TYPE_STANDARD, Realm.PLAN_TYPE_PLUS]: |  | ||||||
|                 row["effective_rate"] = 100 - int(realms_to_default_discount.get(string_id, 0)) |  | ||||||
|             elif row["plan_type"] == Realm.PLAN_TYPE_STANDARD_FREE: |  | ||||||
|                 row["effective_rate"] = 0 |  | ||||||
|             elif ( |  | ||||||
|                 row["plan_type"] == Realm.PLAN_TYPE_LIMITED |  | ||||||
|                 and string_id in realms_to_default_discount |  | ||||||
|             ): |  | ||||||
|                 row["effective_rate"] = 100 - int(realms_to_default_discount[string_id]) |  | ||||||
|             else: |  | ||||||
|                 row["effective_rate"] = "" |  | ||||||
|  |  | ||||||
|         total_arr += sum(estimated_arrs.values()) |  | ||||||
|  |  | ||||||
|     for row in rows: |  | ||||||
|         row["org_type_string"] = get_org_type_display_name(row["org_type"]) |  | ||||||
|  |  | ||||||
|     # augment data with realm_minutes |  | ||||||
|     total_hours = 0.0 |  | ||||||
|     for row in rows: |  | ||||||
|         string_id = row["string_id"] |  | ||||||
|         minutes = realm_minutes.get(string_id, 0.0) |  | ||||||
|         hours = minutes / 60.0 |  | ||||||
|         total_hours += hours |  | ||||||
|         row["hours"] = str(int(hours)) |  | ||||||
|         with suppress(Exception): |  | ||||||
|             row["hours_per_user"] = "{:.1f}".format(hours / row["dau_count"]) |  | ||||||
|  |  | ||||||
|     # formatting |  | ||||||
|     for row in rows: |  | ||||||
|         row["realm_url"] = realm_url_link(row["string_id"]) |  | ||||||
|         row["stats_link"] = realm_stats_link(row["string_id"]) |  | ||||||
|         row["support_link"] = realm_support_link(row["string_id"]) |  | ||||||
|         row["string_id"] = realm_activity_link(row["string_id"]) |  | ||||||
|  |  | ||||||
|     # Count active sites |  | ||||||
|     def meets_goal(row: Dict[str, int]) -> bool: |  | ||||||
|         return row["dau_count"] >= 5 |  | ||||||
|  |  | ||||||
|     num_active_sites = len(list(filter(meets_goal, rows))) |  | ||||||
|  |  | ||||||
|     # create totals |  | ||||||
|     total_dau_count = 0 |  | ||||||
|     total_user_profile_count = 0 |  | ||||||
|     total_bot_count = 0 |  | ||||||
|     total_wau_count = 0 |  | ||||||
|     for row in rows: |  | ||||||
|         total_dau_count += int(row["dau_count"]) |  | ||||||
|         total_user_profile_count += int(row["user_profile_count"]) |  | ||||||
|         total_bot_count += int(row["bot_count"]) |  | ||||||
|         total_wau_count += int(row["wau_count"]) |  | ||||||
|  |  | ||||||
|     total_row = dict( |  | ||||||
|         string_id="Total", |  | ||||||
|         plan_type_string="", |  | ||||||
|         org_type_string="", |  | ||||||
|         effective_rate="", |  | ||||||
|         arr=total_arr, |  | ||||||
|         realm_url="", |  | ||||||
|         stats_link="", |  | ||||||
|         support_link="", |  | ||||||
|         date_created_day="", |  | ||||||
|         dau_count=total_dau_count, |  | ||||||
|         user_profile_count=total_user_profile_count, |  | ||||||
|         bot_count=total_bot_count, |  | ||||||
|         hours=int(total_hours), |  | ||||||
|         wau_count=total_wau_count, |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     rows.insert(0, total_row) |  | ||||||
|  |  | ||||||
|     content = loader.render_to_string( |  | ||||||
|         "analytics/realm_summary_table.html", |  | ||||||
|         dict( |  | ||||||
|             rows=rows, |  | ||||||
|             num_active_sites=num_active_sites, |  | ||||||
|             utctime=now.strftime("%Y-%m-%d %H:%M %Z"), |  | ||||||
|             billing_enabled=settings.BILLING_ENABLED, |  | ||||||
|         ), |  | ||||||
|     ) |  | ||||||
|     return content |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def user_activity_intervals() -> Tuple[Markup, Dict[str, float]]: |  | ||||||
|     day_end = timestamp_to_datetime(time.time()) |  | ||||||
|     day_start = day_end - timedelta(hours=24) |  | ||||||
|  |  | ||||||
|     output = Markup() |  | ||||||
|     output += "Per-user online duration for the last 24 hours:\n" |  | ||||||
|     total_duration = timedelta(0) |  | ||||||
|  |  | ||||||
|     all_intervals = ( |  | ||||||
|         UserActivityInterval.objects.filter( |  | ||||||
|             end__gte=day_start, |  | ||||||
|             start__lte=day_end, |  | ||||||
|         ) |  | ||||||
|         .select_related( |  | ||||||
|             "user_profile", |  | ||||||
|             "user_profile__realm", |  | ||||||
|         ) |  | ||||||
|         .only( |  | ||||||
|             "start", |  | ||||||
|             "end", |  | ||||||
|             "user_profile__delivery_email", |  | ||||||
|             "user_profile__realm__string_id", |  | ||||||
|         ) |  | ||||||
|         .order_by( |  | ||||||
|             "user_profile__realm__string_id", |  | ||||||
|             "user_profile__delivery_email", |  | ||||||
|         ) |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     by_string_id = lambda row: row.user_profile.realm.string_id |  | ||||||
|     by_email = lambda row: row.user_profile.delivery_email |  | ||||||
|  |  | ||||||
|     realm_minutes = {} |  | ||||||
|  |  | ||||||
|     for string_id, realm_intervals in itertools.groupby(all_intervals, by_string_id): |  | ||||||
|         realm_duration = timedelta(0) |  | ||||||
|         output += Markup("<hr>") + f"{string_id}\n" |  | ||||||
|         for email, intervals in itertools.groupby(realm_intervals, by_email): |  | ||||||
|             duration = timedelta(0) |  | ||||||
|             for interval in intervals: |  | ||||||
|                 start = max(day_start, interval.start) |  | ||||||
|                 end = min(day_end, interval.end) |  | ||||||
|                 duration += end - start |  | ||||||
|  |  | ||||||
|             total_duration += duration |  | ||||||
|             realm_duration += duration |  | ||||||
|             output += f"  {email:<37}{duration}\n" |  | ||||||
|  |  | ||||||
|         realm_minutes[string_id] = realm_duration.total_seconds() / 60 |  | ||||||
|  |  | ||||||
|     output += f"\nTotal duration:                      {total_duration}\n" |  | ||||||
|     output += f"\nTotal duration in minutes:           {total_duration.total_seconds() / 60.}\n" |  | ||||||
|     output += f"Total duration amortized to a month: {total_duration.total_seconds() * 30. / 60.}" |  | ||||||
|     content = Markup("<pre>{}</pre>").format(output) |  | ||||||
|     return content, realm_minutes |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def ad_hoc_queries() -> List[Dict[str, str]]: |  | ||||||
|     def get_page( |  | ||||||
|         query: Composable, cols: Sequence[str], title: str, totals_columns: Sequence[int] = [] |  | ||||||
|     ) -> Dict[str, str]: |  | ||||||
|         cursor = connection.cursor() |  | ||||||
|         cursor.execute(query) |  | ||||||
|         rows = cursor.fetchall() |  | ||||||
|         rows = list(map(list, rows)) |  | ||||||
|         cursor.close() |  | ||||||
|  |  | ||||||
|         def fix_rows( |  | ||||||
|             i: int, fixup_func: Union[Callable[[str], Markup], Callable[[datetime], str]] |  | ||||||
|         ) -> None: |  | ||||||
|             for row in rows: |  | ||||||
|                 row[i] = fixup_func(row[i]) |  | ||||||
|  |  | ||||||
|         total_row = [] |  | ||||||
|         for i, col in enumerate(cols): |  | ||||||
|             if col == "Realm": |  | ||||||
|                 fix_rows(i, realm_activity_link) |  | ||||||
|             elif col in ["Last time", "Last visit"]: |  | ||||||
|                 fix_rows(i, format_date_for_activity_reports) |  | ||||||
|             elif col == "Hostname": |  | ||||||
|                 for row in rows: |  | ||||||
|                     row[i] = remote_installation_stats_link(row[0], row[i]) |  | ||||||
|             if len(totals_columns) > 0: |  | ||||||
|                 if i == 0: |  | ||||||
|                     total_row.append("Total") |  | ||||||
|                 elif i in totals_columns: |  | ||||||
|                     total_row.append(str(sum(row[i] for row in rows if row[i] is not None))) |  | ||||||
|                 else: |  | ||||||
|                     total_row.append("") |  | ||||||
|         if len(totals_columns) > 0: |  | ||||||
|             rows.insert(0, total_row) |  | ||||||
|  |  | ||||||
|         content = make_table(title, cols, rows) |  | ||||||
|  |  | ||||||
|         return dict( |  | ||||||
|             content=content, |  | ||||||
|             title=title, |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     pages = [] |  | ||||||
|  |  | ||||||
|     ### |  | ||||||
|  |  | ||||||
|     for mobile_type in ["Android", "ZulipiOS"]: |  | ||||||
|         title = f"{mobile_type} usage" |  | ||||||
|  |  | ||||||
|         query: Composable = SQL( |  | ||||||
|             """ |  | ||||||
|             select |  | ||||||
|                 realm.string_id, |  | ||||||
|                 up.id user_id, |  | ||||||
|                 client.name, |  | ||||||
|                 sum(count) as hits, |  | ||||||
|                 max(last_visit) as last_time |  | ||||||
|             from zerver_useractivity ua |  | ||||||
|             join zerver_client client on client.id = ua.client_id |  | ||||||
|             join zerver_userprofile up on up.id = ua.user_profile_id |  | ||||||
|             join zerver_realm realm on realm.id = up.realm_id |  | ||||||
|             where |  | ||||||
|                 client.name like {mobile_type} |  | ||||||
|             group by string_id, up.id, client.name |  | ||||||
|             having max(last_visit) > now() - interval '2 week' |  | ||||||
|             order by string_id, up.id, client.name |  | ||||||
|         """ |  | ||||||
|         ).format( |  | ||||||
|             mobile_type=Literal(mobile_type), |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         cols = [ |  | ||||||
|             "Realm", |  | ||||||
|             "User id", |  | ||||||
|             "Name", |  | ||||||
|             "Hits", |  | ||||||
|             "Last time", |  | ||||||
|         ] |  | ||||||
|  |  | ||||||
|         pages.append(get_page(query, cols, title)) |  | ||||||
|  |  | ||||||
|     ### |  | ||||||
|  |  | ||||||
|     title = "Desktop users" |  | ||||||
|  |  | ||||||
|     query = SQL( |  | ||||||
|         """ |  | ||||||
|         select |  | ||||||
|             realm.string_id, |  | ||||||
|             client.name, |  | ||||||
|             sum(count) as hits, |  | ||||||
|             max(last_visit) as last_time |  | ||||||
|         from zerver_useractivity ua |  | ||||||
|         join zerver_client client on client.id = ua.client_id |  | ||||||
|         join zerver_userprofile up on up.id = ua.user_profile_id |  | ||||||
|         join zerver_realm realm on realm.id = up.realm_id |  | ||||||
|         where |  | ||||||
|             client.name like 'desktop%%' |  | ||||||
|         group by string_id, client.name |  | ||||||
|         having max(last_visit) > now() - interval '2 week' |  | ||||||
|         order by string_id, client.name |  | ||||||
|     """ |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     cols = [ |  | ||||||
|         "Realm", |  | ||||||
|         "Client", |  | ||||||
|         "Hits", |  | ||||||
|         "Last time", |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     pages.append(get_page(query, cols, title)) |  | ||||||
|  |  | ||||||
|     ### |  | ||||||
|  |  | ||||||
|     title = "Integrations by realm" |  | ||||||
|  |  | ||||||
|     query = SQL( |  | ||||||
|         """ |  | ||||||
|         select |  | ||||||
|             realm.string_id, |  | ||||||
|             case |  | ||||||
|                 when query like '%%external%%' then split_part(query, '/', 5) |  | ||||||
|                 else client.name |  | ||||||
|             end client_name, |  | ||||||
|             sum(count) as hits, |  | ||||||
|             max(last_visit) as last_time |  | ||||||
|         from zerver_useractivity ua |  | ||||||
|         join zerver_client client on client.id = ua.client_id |  | ||||||
|         join zerver_userprofile up on up.id = ua.user_profile_id |  | ||||||
|         join zerver_realm realm on realm.id = up.realm_id |  | ||||||
|         where |  | ||||||
|             (query in ('send_message_backend', '/api/v1/send_message') |  | ||||||
|             and client.name not in ('Android', 'ZulipiOS') |  | ||||||
|             and client.name not like 'test: Zulip%%' |  | ||||||
|             ) |  | ||||||
|         or |  | ||||||
|             query like '%%external%%' |  | ||||||
|         group by string_id, client_name |  | ||||||
|         having max(last_visit) > now() - interval '2 week' |  | ||||||
|         order by string_id, client_name |  | ||||||
|     """ |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     cols = [ |  | ||||||
|         "Realm", |  | ||||||
|         "Client", |  | ||||||
|         "Hits", |  | ||||||
|         "Last time", |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     pages.append(get_page(query, cols, title)) |  | ||||||
|  |  | ||||||
|     ### |  | ||||||
|  |  | ||||||
|     title = "Integrations by client" |  | ||||||
|  |  | ||||||
|     query = SQL( |  | ||||||
|         """ |  | ||||||
|         select |  | ||||||
|             case |  | ||||||
|                 when query like '%%external%%' then split_part(query, '/', 5) |  | ||||||
|                 else client.name |  | ||||||
|             end client_name, |  | ||||||
|             realm.string_id, |  | ||||||
|             sum(count) as hits, |  | ||||||
|             max(last_visit) as last_time |  | ||||||
|         from zerver_useractivity ua |  | ||||||
|         join zerver_client client on client.id = ua.client_id |  | ||||||
|         join zerver_userprofile up on up.id = ua.user_profile_id |  | ||||||
|         join zerver_realm realm on realm.id = up.realm_id |  | ||||||
|         where |  | ||||||
|             (query in ('send_message_backend', '/api/v1/send_message') |  | ||||||
|             and client.name not in ('Android', 'ZulipiOS') |  | ||||||
|             and client.name not like 'test: Zulip%%' |  | ||||||
|             ) |  | ||||||
|         or |  | ||||||
|             query like '%%external%%' |  | ||||||
|         group by client_name, string_id |  | ||||||
|         having max(last_visit) > now() - interval '2 week' |  | ||||||
|         order by client_name, string_id |  | ||||||
|     """ |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     cols = [ |  | ||||||
|         "Client", |  | ||||||
|         "Realm", |  | ||||||
|         "Hits", |  | ||||||
|         "Last time", |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     pages.append(get_page(query, cols, title)) |  | ||||||
|  |  | ||||||
|     title = "Remote Zulip servers" |  | ||||||
|  |  | ||||||
|     query = SQL( |  | ||||||
|         """ |  | ||||||
|         with icount as ( |  | ||||||
|             select |  | ||||||
|                 server_id, |  | ||||||
|                 max(value) as max_value, |  | ||||||
|                 max(end_time) as max_end_time |  | ||||||
|             from zilencer_remoteinstallationcount |  | ||||||
|             where |  | ||||||
|                 property='active_users:is_bot:day' |  | ||||||
|                 and subgroup='false' |  | ||||||
|             group by server_id |  | ||||||
|             ), |  | ||||||
|         remote_push_devices as ( |  | ||||||
|             select server_id, count(distinct(user_id)) as push_user_count from zilencer_remotepushdevicetoken |  | ||||||
|             group by server_id |  | ||||||
|         ) |  | ||||||
|         select |  | ||||||
|             rserver.id, |  | ||||||
|             rserver.hostname, |  | ||||||
|             rserver.contact_email, |  | ||||||
|             max_value, |  | ||||||
|             push_user_count, |  | ||||||
|             max_end_time |  | ||||||
|         from zilencer_remotezulipserver rserver |  | ||||||
|         left join icount on icount.server_id = rserver.id |  | ||||||
|         left join remote_push_devices on remote_push_devices.server_id = rserver.id |  | ||||||
|         order by max_value DESC NULLS LAST, push_user_count DESC NULLS LAST |  | ||||||
|     """ |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     cols = [ |  | ||||||
|         "ID", |  | ||||||
|         "Hostname", |  | ||||||
|         "Contact email", |  | ||||||
|         "Analytics users", |  | ||||||
|         "Mobile users", |  | ||||||
|         "Last update time", |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     pages.append(get_page(query, cols, title, totals_columns=[3, 4])) |  | ||||||
|  |  | ||||||
|     return pages |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @require_server_admin |  | ||||||
| @has_request_variables |  | ||||||
| def get_installation_activity(request: HttpRequest) -> HttpResponse: |  | ||||||
|     duration_content, realm_minutes = user_activity_intervals() |  | ||||||
|     counts_content: str = realm_summary_table(realm_minutes) |  | ||||||
|     data = [ |  | ||||||
|         ("Counts", counts_content), |  | ||||||
|         ("Durations", duration_content), |  | ||||||
|     ] |  | ||||||
|     for page in ad_hoc_queries(): |  | ||||||
|         data.append((page["title"], page["content"])) |  | ||||||
|  |  | ||||||
|     title = "Activity" |  | ||||||
|  |  | ||||||
|     return render( |  | ||||||
|         request, |  | ||||||
|         "analytics/activity.html", |  | ||||||
|         context=dict(data=data, title=title, is_home=True), |  | ||||||
|     ) |  | ||||||
| @@ -1,262 +0,0 @@ | |||||||
| import itertools |  | ||||||
| from datetime import datetime |  | ||||||
| from typing import Any, Dict, List, Optional, Set, Tuple |  | ||||||
|  |  | ||||||
| from django.db import connection |  | ||||||
| from django.db.models import QuerySet |  | ||||||
| from django.http import HttpRequest, HttpResponse, HttpResponseNotFound |  | ||||||
| from django.shortcuts import render |  | ||||||
| from django.utils.timezone import now as timezone_now |  | ||||||
| from psycopg2.sql import SQL |  | ||||||
|  |  | ||||||
| from analytics.views.activity_common import ( |  | ||||||
|     format_date_for_activity_reports, |  | ||||||
|     get_user_activity_summary, |  | ||||||
|     make_table, |  | ||||||
|     realm_stats_link, |  | ||||||
|     user_activity_link, |  | ||||||
| ) |  | ||||||
| from zerver.decorator import require_server_admin |  | ||||||
| from zerver.models import Realm, UserActivity |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_user_activity_records_for_realm(realm: str, is_bot: bool) -> QuerySet[UserActivity]: |  | ||||||
|     fields = [ |  | ||||||
|         "user_profile__full_name", |  | ||||||
|         "user_profile__delivery_email", |  | ||||||
|         "query", |  | ||||||
|         "client__name", |  | ||||||
|         "count", |  | ||||||
|         "last_visit", |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     records = UserActivity.objects.filter( |  | ||||||
|         user_profile__realm__string_id=realm, |  | ||||||
|         user_profile__is_active=True, |  | ||||||
|         user_profile__is_bot=is_bot, |  | ||||||
|     ) |  | ||||||
|     records = records.order_by("user_profile__delivery_email", "-last_visit") |  | ||||||
|     records = records.select_related("user_profile", "client").only(*fields) |  | ||||||
|     return records |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def realm_user_summary_table( |  | ||||||
|     all_records: QuerySet[UserActivity], admin_emails: Set[str] |  | ||||||
| ) -> Tuple[Dict[str, Any], str]: |  | ||||||
|     user_records = {} |  | ||||||
|  |  | ||||||
|     def by_email(record: UserActivity) -> str: |  | ||||||
|         return record.user_profile.delivery_email |  | ||||||
|  |  | ||||||
|     for email, records in itertools.groupby(all_records, by_email): |  | ||||||
|         user_records[email] = get_user_activity_summary(list(records)) |  | ||||||
|  |  | ||||||
|     def get_last_visit(user_summary: Dict[str, Dict[str, datetime]], k: str) -> Optional[datetime]: |  | ||||||
|         if k in user_summary: |  | ||||||
|             return user_summary[k]["last_visit"] |  | ||||||
|         else: |  | ||||||
|             return None |  | ||||||
|  |  | ||||||
|     def get_count(user_summary: Dict[str, Dict[str, str]], k: str) -> str: |  | ||||||
|         if k in user_summary: |  | ||||||
|             return user_summary[k]["count"] |  | ||||||
|         else: |  | ||||||
|             return "" |  | ||||||
|  |  | ||||||
|     def is_recent(val: datetime) -> bool: |  | ||||||
|         age = timezone_now() - val |  | ||||||
|         return age.total_seconds() < 5 * 60 |  | ||||||
|  |  | ||||||
|     rows = [] |  | ||||||
|     for email, user_summary in user_records.items(): |  | ||||||
|         email_link = user_activity_link(email, user_summary["user_profile_id"]) |  | ||||||
|         sent_count = get_count(user_summary, "send") |  | ||||||
|         cells = [user_summary["name"], email_link, sent_count] |  | ||||||
|         row_class = "" |  | ||||||
|         for field in ["use", "send", "pointer", "desktop", "ZulipiOS", "Android"]: |  | ||||||
|             visit = get_last_visit(user_summary, field) |  | ||||||
|             if field == "use": |  | ||||||
|                 if visit and is_recent(visit): |  | ||||||
|                     row_class += " recently_active" |  | ||||||
|                 if email in admin_emails: |  | ||||||
|                     row_class += " admin" |  | ||||||
|             val = format_date_for_activity_reports(visit) |  | ||||||
|             cells.append(val) |  | ||||||
|         row = dict(cells=cells, row_class=row_class) |  | ||||||
|         rows.append(row) |  | ||||||
|  |  | ||||||
|     def by_used_time(row: Dict[str, Any]) -> str: |  | ||||||
|         return row["cells"][3] |  | ||||||
|  |  | ||||||
|     rows = sorted(rows, key=by_used_time, reverse=True) |  | ||||||
|  |  | ||||||
|     cols = [ |  | ||||||
|         "Name", |  | ||||||
|         "Email", |  | ||||||
|         "Total sent", |  | ||||||
|         "Heard from", |  | ||||||
|         "Message sent", |  | ||||||
|         "Pointer motion", |  | ||||||
|         "Desktop", |  | ||||||
|         "ZulipiOS", |  | ||||||
|         "Android", |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     title = "Summary" |  | ||||||
|  |  | ||||||
|     content = make_table(title, cols, rows, has_row_class=True) |  | ||||||
|     return user_records, content |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def realm_client_table(user_summaries: Dict[str, Dict[str, Any]]) -> str: |  | ||||||
|     exclude_keys = [ |  | ||||||
|         "internal", |  | ||||||
|         "name", |  | ||||||
|         "user_profile_id", |  | ||||||
|         "use", |  | ||||||
|         "send", |  | ||||||
|         "pointer", |  | ||||||
|         "website", |  | ||||||
|         "desktop", |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     rows = [] |  | ||||||
|     for email, user_summary in user_summaries.items(): |  | ||||||
|         email_link = user_activity_link(email, user_summary["user_profile_id"]) |  | ||||||
|         name = user_summary["name"] |  | ||||||
|         for k, v in user_summary.items(): |  | ||||||
|             if k in exclude_keys: |  | ||||||
|                 continue |  | ||||||
|             client = k |  | ||||||
|             count = v["count"] |  | ||||||
|             last_visit = v["last_visit"] |  | ||||||
|             row = [ |  | ||||||
|                 format_date_for_activity_reports(last_visit), |  | ||||||
|                 client, |  | ||||||
|                 name, |  | ||||||
|                 email_link, |  | ||||||
|                 count, |  | ||||||
|             ] |  | ||||||
|             rows.append(row) |  | ||||||
|  |  | ||||||
|     rows = sorted(rows, key=lambda r: r[0], reverse=True) |  | ||||||
|  |  | ||||||
|     cols = [ |  | ||||||
|         "Last visit", |  | ||||||
|         "Client", |  | ||||||
|         "Name", |  | ||||||
|         "Email", |  | ||||||
|         "Count", |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     title = "Clients" |  | ||||||
|  |  | ||||||
|     return make_table(title, cols, rows) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def sent_messages_report(realm: str) -> str: |  | ||||||
|     title = "Recently sent messages for " + realm |  | ||||||
|  |  | ||||||
|     cols = [ |  | ||||||
|         "Date", |  | ||||||
|         "Humans", |  | ||||||
|         "Bots", |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     query = SQL( |  | ||||||
|         """ |  | ||||||
|         select |  | ||||||
|             series.day::date, |  | ||||||
|             humans.cnt, |  | ||||||
|             bots.cnt |  | ||||||
|         from ( |  | ||||||
|             select generate_series( |  | ||||||
|                 (now()::date - interval '2 week'), |  | ||||||
|                 now()::date, |  | ||||||
|                 interval '1 day' |  | ||||||
|             ) as day |  | ||||||
|         ) as series |  | ||||||
|         left join ( |  | ||||||
|             select |  | ||||||
|                 date_sent::date date_sent, |  | ||||||
|                 count(*) cnt |  | ||||||
|             from zerver_message m |  | ||||||
|             join zerver_userprofile up on up.id = m.sender_id |  | ||||||
|             join zerver_realm r on r.id = up.realm_id |  | ||||||
|             where |  | ||||||
|                 r.string_id = %s |  | ||||||
|             and |  | ||||||
|                 (not up.is_bot) |  | ||||||
|             and |  | ||||||
|                 date_sent > now() - interval '2 week' |  | ||||||
|             group by |  | ||||||
|                 date_sent::date |  | ||||||
|             order by |  | ||||||
|                 date_sent::date |  | ||||||
|         ) humans on |  | ||||||
|             series.day = humans.date_sent |  | ||||||
|         left join ( |  | ||||||
|             select |  | ||||||
|                 date_sent::date date_sent, |  | ||||||
|                 count(*) cnt |  | ||||||
|             from zerver_message m |  | ||||||
|             join zerver_userprofile up on up.id = m.sender_id |  | ||||||
|             join zerver_realm r on r.id = up.realm_id |  | ||||||
|             where |  | ||||||
|                 r.string_id = %s |  | ||||||
|             and |  | ||||||
|                 up.is_bot |  | ||||||
|             and |  | ||||||
|                 date_sent > now() - interval '2 week' |  | ||||||
|             group by |  | ||||||
|                 date_sent::date |  | ||||||
|             order by |  | ||||||
|                 date_sent::date |  | ||||||
|         ) bots on |  | ||||||
|             series.day = bots.date_sent |  | ||||||
|     """ |  | ||||||
|     ) |  | ||||||
|     cursor = connection.cursor() |  | ||||||
|     cursor.execute(query, [realm, realm]) |  | ||||||
|     rows = cursor.fetchall() |  | ||||||
|     cursor.close() |  | ||||||
|  |  | ||||||
|     return make_table(title, cols, rows) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @require_server_admin |  | ||||||
| def get_realm_activity(request: HttpRequest, realm_str: str) -> HttpResponse: |  | ||||||
|     data: List[Tuple[str, str]] = [] |  | ||||||
|     all_user_records: Dict[str, Any] = {} |  | ||||||
|  |  | ||||||
|     try: |  | ||||||
|         admins = Realm.objects.get(string_id=realm_str).get_human_admin_users() |  | ||||||
|     except Realm.DoesNotExist: |  | ||||||
|         return HttpResponseNotFound() |  | ||||||
|  |  | ||||||
|     admin_emails = {admin.delivery_email for admin in admins} |  | ||||||
|  |  | ||||||
|     for is_bot, page_title in [(False, "Humans"), (True, "Bots")]: |  | ||||||
|         all_records = get_user_activity_records_for_realm(realm_str, is_bot) |  | ||||||
|  |  | ||||||
|         user_records, content = realm_user_summary_table(all_records, admin_emails) |  | ||||||
|         all_user_records.update(user_records) |  | ||||||
|  |  | ||||||
|         data += [(page_title, content)] |  | ||||||
|  |  | ||||||
|     page_title = "Clients" |  | ||||||
|     content = realm_client_table(all_user_records) |  | ||||||
|     data += [(page_title, content)] |  | ||||||
|  |  | ||||||
|     page_title = "History" |  | ||||||
|     content = sent_messages_report(realm_str) |  | ||||||
|     data += [(page_title, content)] |  | ||||||
|  |  | ||||||
|     title = realm_str |  | ||||||
|     realm_stats = realm_stats_link(realm_str) |  | ||||||
|  |  | ||||||
|     return render( |  | ||||||
|         request, |  | ||||||
|         "analytics/activity.html", |  | ||||||
|         context=dict(data=data, realm_stats_link=realm_stats, title=title), |  | ||||||
|     ) |  | ||||||
| @@ -1,542 +0,0 @@ | |||||||
| import logging |  | ||||||
| from collections import defaultdict |  | ||||||
| from datetime import datetime, timedelta, timezone |  | ||||||
| from typing import Any, Dict, List, Optional, Tuple, Type, TypeVar, Union, cast |  | ||||||
|  |  | ||||||
| from django.conf import settings |  | ||||||
| from django.db.models import QuerySet |  | ||||||
| from django.http import HttpRequest, HttpResponse, HttpResponseNotFound |  | ||||||
| from django.shortcuts import render |  | ||||||
| from django.utils import translation |  | ||||||
| from django.utils.timezone import now as timezone_now |  | ||||||
| from django.utils.translation import gettext as _ |  | ||||||
|  |  | ||||||
| from analytics.lib.counts import COUNT_STATS, CountStat |  | ||||||
| from analytics.lib.time_utils import time_range |  | ||||||
| from analytics.models import ( |  | ||||||
|     BaseCount, |  | ||||||
|     InstallationCount, |  | ||||||
|     RealmCount, |  | ||||||
|     StreamCount, |  | ||||||
|     UserCount, |  | ||||||
|     installation_epoch, |  | ||||||
| ) |  | ||||||
| from zerver.decorator import ( |  | ||||||
|     require_non_guest_user, |  | ||||||
|     require_server_admin, |  | ||||||
|     require_server_admin_api, |  | ||||||
|     to_utc_datetime, |  | ||||||
|     zulip_login_required, |  | ||||||
| ) |  | ||||||
| from zerver.lib.exceptions import JsonableError |  | ||||||
| from zerver.lib.i18n import get_and_set_request_language, get_language_translation_data |  | ||||||
| from zerver.lib.request import REQ, has_request_variables |  | ||||||
| from zerver.lib.response import json_success |  | ||||||
| from zerver.lib.timestamp import convert_to_UTC |  | ||||||
| from zerver.lib.validator import to_non_negative_int |  | ||||||
| from zerver.models import Client, Realm, UserProfile, get_realm |  | ||||||
|  |  | ||||||
| if settings.ZILENCER_ENABLED: |  | ||||||
|     from zilencer.models import RemoteInstallationCount, RemoteRealmCount, RemoteZulipServer |  | ||||||
|  |  | ||||||
| MAX_TIME_FOR_FULL_ANALYTICS_GENERATION = timedelta(days=1, minutes=30) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def is_analytics_ready(realm: Realm) -> bool: |  | ||||||
|     return (timezone_now() - realm.date_created) > MAX_TIME_FOR_FULL_ANALYTICS_GENERATION |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def render_stats( |  | ||||||
|     request: HttpRequest, |  | ||||||
|     data_url_suffix: str, |  | ||||||
|     realm: Optional[Realm], |  | ||||||
|     *, |  | ||||||
|     title: Optional[str] = None, |  | ||||||
|     for_installation: bool = False, |  | ||||||
|     remote: bool = False, |  | ||||||
|     analytics_ready: bool = True, |  | ||||||
| ) -> HttpResponse: |  | ||||||
|     assert request.user.is_authenticated |  | ||||||
|  |  | ||||||
|     if realm is not None: |  | ||||||
|         # Same query to get guest user count as in get_seat_count in corporate/lib/stripe.py. |  | ||||||
|         guest_users = UserProfile.objects.filter( |  | ||||||
|             realm=realm, is_active=True, is_bot=False, role=UserProfile.ROLE_GUEST |  | ||||||
|         ).count() |  | ||||||
|         space_used = realm.currently_used_upload_space_bytes() |  | ||||||
|         if title: |  | ||||||
|             pass |  | ||||||
|         else: |  | ||||||
|             title = realm.name or realm.string_id |  | ||||||
|     else: |  | ||||||
|         assert title |  | ||||||
|         guest_users = None |  | ||||||
|         space_used = None |  | ||||||
|  |  | ||||||
|     page_params = dict( |  | ||||||
|         data_url_suffix=data_url_suffix, |  | ||||||
|         for_installation=for_installation, |  | ||||||
|         remote=remote, |  | ||||||
|         upload_space_used=space_used, |  | ||||||
|         guest_users=guest_users, |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     request_language = get_and_set_request_language( |  | ||||||
|         request, |  | ||||||
|         request.user.default_language, |  | ||||||
|         translation.get_language_from_path(request.path_info), |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     page_params["translation_data"] = get_language_translation_data(request_language) |  | ||||||
|  |  | ||||||
|     return render( |  | ||||||
|         request, |  | ||||||
|         "analytics/stats.html", |  | ||||||
|         context=dict( |  | ||||||
|             target_name=title, |  | ||||||
|             page_params=page_params, |  | ||||||
|             analytics_ready=analytics_ready, |  | ||||||
|         ), |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @zulip_login_required |  | ||||||
| def stats(request: HttpRequest) -> HttpResponse: |  | ||||||
|     assert request.user.is_authenticated |  | ||||||
|     realm = request.user.realm |  | ||||||
|     if request.user.is_guest: |  | ||||||
|         # TODO: Make @zulip_login_required pass the UserProfile so we |  | ||||||
|         # can use @require_member_or_admin |  | ||||||
|         raise JsonableError(_("Not allowed for guest users")) |  | ||||||
|     return render_stats(request, "", realm, analytics_ready=is_analytics_ready(realm)) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @require_server_admin |  | ||||||
| @has_request_variables |  | ||||||
| def stats_for_realm(request: HttpRequest, realm_str: str) -> HttpResponse: |  | ||||||
|     try: |  | ||||||
|         realm = get_realm(realm_str) |  | ||||||
|     except Realm.DoesNotExist: |  | ||||||
|         return HttpResponseNotFound() |  | ||||||
|  |  | ||||||
|     return render_stats( |  | ||||||
|         request, |  | ||||||
|         f"/realm/{realm_str}", |  | ||||||
|         realm, |  | ||||||
|         analytics_ready=is_analytics_ready(realm), |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @require_server_admin |  | ||||||
| @has_request_variables |  | ||||||
| def stats_for_remote_realm( |  | ||||||
|     request: HttpRequest, remote_server_id: int, remote_realm_id: int |  | ||||||
| ) -> HttpResponse: |  | ||||||
|     assert settings.ZILENCER_ENABLED |  | ||||||
|     server = RemoteZulipServer.objects.get(id=remote_server_id) |  | ||||||
|     return render_stats( |  | ||||||
|         request, |  | ||||||
|         f"/remote/{server.id}/realm/{remote_realm_id}", |  | ||||||
|         None, |  | ||||||
|         title=f"Realm {remote_realm_id} on server {server.hostname}", |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @require_server_admin_api |  | ||||||
| @has_request_variables |  | ||||||
| def get_chart_data_for_realm( |  | ||||||
|     request: HttpRequest, /, user_profile: UserProfile, realm_str: str, **kwargs: Any |  | ||||||
| ) -> HttpResponse: |  | ||||||
|     try: |  | ||||||
|         realm = get_realm(realm_str) |  | ||||||
|     except Realm.DoesNotExist: |  | ||||||
|         raise JsonableError(_("Invalid organization")) |  | ||||||
|  |  | ||||||
|     return get_chart_data(request, user_profile, realm=realm, **kwargs) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @require_server_admin_api |  | ||||||
| @has_request_variables |  | ||||||
| def get_chart_data_for_remote_realm( |  | ||||||
|     request: HttpRequest, |  | ||||||
|     /, |  | ||||||
|     user_profile: UserProfile, |  | ||||||
|     remote_server_id: int, |  | ||||||
|     remote_realm_id: int, |  | ||||||
|     **kwargs: Any, |  | ||||||
| ) -> HttpResponse: |  | ||||||
|     assert settings.ZILENCER_ENABLED |  | ||||||
|     server = RemoteZulipServer.objects.get(id=remote_server_id) |  | ||||||
|     return get_chart_data( |  | ||||||
|         request, |  | ||||||
|         user_profile, |  | ||||||
|         server=server, |  | ||||||
|         remote=True, |  | ||||||
|         remote_realm_id=int(remote_realm_id), |  | ||||||
|         **kwargs, |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @require_server_admin |  | ||||||
| def stats_for_installation(request: HttpRequest) -> HttpResponse: |  | ||||||
|     assert request.user.is_authenticated |  | ||||||
|     return render_stats(request, "/installation", None, title="installation", for_installation=True) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @require_server_admin |  | ||||||
| def stats_for_remote_installation(request: HttpRequest, remote_server_id: int) -> HttpResponse: |  | ||||||
|     assert settings.ZILENCER_ENABLED |  | ||||||
|     server = RemoteZulipServer.objects.get(id=remote_server_id) |  | ||||||
|     return render_stats( |  | ||||||
|         request, |  | ||||||
|         f"/remote/{server.id}/installation", |  | ||||||
|         None, |  | ||||||
|         title=f"remote installation {server.hostname}", |  | ||||||
|         for_installation=True, |  | ||||||
|         remote=True, |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @require_server_admin_api |  | ||||||
| @has_request_variables |  | ||||||
| def get_chart_data_for_installation( |  | ||||||
|     request: HttpRequest, /, user_profile: UserProfile, chart_name: str = REQ(), **kwargs: Any |  | ||||||
| ) -> HttpResponse: |  | ||||||
|     return get_chart_data(request, user_profile, for_installation=True, **kwargs) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @require_server_admin_api |  | ||||||
| @has_request_variables |  | ||||||
| def get_chart_data_for_remote_installation( |  | ||||||
|     request: HttpRequest, |  | ||||||
|     /, |  | ||||||
|     user_profile: UserProfile, |  | ||||||
|     remote_server_id: int, |  | ||||||
|     chart_name: str = REQ(), |  | ||||||
|     **kwargs: Any, |  | ||||||
| ) -> HttpResponse: |  | ||||||
|     assert settings.ZILENCER_ENABLED |  | ||||||
|     server = RemoteZulipServer.objects.get(id=remote_server_id) |  | ||||||
|     return get_chart_data( |  | ||||||
|         request, |  | ||||||
|         user_profile, |  | ||||||
|         for_installation=True, |  | ||||||
|         remote=True, |  | ||||||
|         server=server, |  | ||||||
|         **kwargs, |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @require_non_guest_user |  | ||||||
| @has_request_variables |  | ||||||
| def get_chart_data( |  | ||||||
|     request: HttpRequest, |  | ||||||
|     user_profile: UserProfile, |  | ||||||
|     chart_name: str = REQ(), |  | ||||||
|     min_length: Optional[int] = REQ(converter=to_non_negative_int, default=None), |  | ||||||
|     start: Optional[datetime] = REQ(converter=to_utc_datetime, default=None), |  | ||||||
|     end: Optional[datetime] = REQ(converter=to_utc_datetime, default=None), |  | ||||||
|     realm: Optional[Realm] = None, |  | ||||||
|     for_installation: bool = False, |  | ||||||
|     remote: bool = False, |  | ||||||
|     remote_realm_id: Optional[int] = None, |  | ||||||
|     server: Optional["RemoteZulipServer"] = None, |  | ||||||
| ) -> HttpResponse: |  | ||||||
|     TableType = Union[ |  | ||||||
|         Type["RemoteInstallationCount"], |  | ||||||
|         Type[InstallationCount], |  | ||||||
|         Type["RemoteRealmCount"], |  | ||||||
|         Type[RealmCount], |  | ||||||
|     ] |  | ||||||
|     if for_installation: |  | ||||||
|         if remote: |  | ||||||
|             assert settings.ZILENCER_ENABLED |  | ||||||
|             aggregate_table: TableType = RemoteInstallationCount |  | ||||||
|             assert server is not None |  | ||||||
|         else: |  | ||||||
|             aggregate_table = InstallationCount |  | ||||||
|     else: |  | ||||||
|         if remote: |  | ||||||
|             assert settings.ZILENCER_ENABLED |  | ||||||
|             aggregate_table = RemoteRealmCount |  | ||||||
|             assert server is not None |  | ||||||
|             assert remote_realm_id is not None |  | ||||||
|         else: |  | ||||||
|             aggregate_table = RealmCount |  | ||||||
|  |  | ||||||
|     tables: Union[Tuple[TableType], Tuple[TableType, Type[UserCount]]] |  | ||||||
|  |  | ||||||
|     if chart_name == "number_of_humans": |  | ||||||
|         stats = [ |  | ||||||
|             COUNT_STATS["1day_actives::day"], |  | ||||||
|             COUNT_STATS["realm_active_humans::day"], |  | ||||||
|             COUNT_STATS["active_users_audit:is_bot:day"], |  | ||||||
|         ] |  | ||||||
|         tables = (aggregate_table,) |  | ||||||
|         subgroup_to_label: Dict[CountStat, Dict[Optional[str], str]] = { |  | ||||||
|             stats[0]: {None: "_1day"}, |  | ||||||
|             stats[1]: {None: "_15day"}, |  | ||||||
|             stats[2]: {"false": "all_time"}, |  | ||||||
|         } |  | ||||||
|         labels_sort_function = None |  | ||||||
|         include_empty_subgroups = True |  | ||||||
|     elif chart_name == "messages_sent_over_time": |  | ||||||
|         stats = [COUNT_STATS["messages_sent:is_bot:hour"]] |  | ||||||
|         tables = (aggregate_table, UserCount) |  | ||||||
|         subgroup_to_label = {stats[0]: {"false": "human", "true": "bot"}} |  | ||||||
|         labels_sort_function = None |  | ||||||
|         include_empty_subgroups = True |  | ||||||
|     elif chart_name == "messages_sent_by_message_type": |  | ||||||
|         stats = [COUNT_STATS["messages_sent:message_type:day"]] |  | ||||||
|         tables = (aggregate_table, UserCount) |  | ||||||
|         subgroup_to_label = { |  | ||||||
|             stats[0]: { |  | ||||||
|                 "public_stream": _("Public streams"), |  | ||||||
|                 "private_stream": _("Private streams"), |  | ||||||
|                 "private_message": _("Direct messages"), |  | ||||||
|                 "huddle_message": _("Group direct messages"), |  | ||||||
|             } |  | ||||||
|         } |  | ||||||
|         labels_sort_function = lambda data: sort_by_totals(data["everyone"]) |  | ||||||
|         include_empty_subgroups = True |  | ||||||
|     elif chart_name == "messages_sent_by_client": |  | ||||||
|         stats = [COUNT_STATS["messages_sent:client:day"]] |  | ||||||
|         tables = (aggregate_table, UserCount) |  | ||||||
|         # Note that the labels are further re-written by client_label_map |  | ||||||
|         subgroup_to_label = { |  | ||||||
|             stats[0]: {str(id): name for id, name in Client.objects.values_list("id", "name")} |  | ||||||
|         } |  | ||||||
|         labels_sort_function = sort_client_labels |  | ||||||
|         include_empty_subgroups = False |  | ||||||
|     elif chart_name == "messages_read_over_time": |  | ||||||
|         stats = [COUNT_STATS["messages_read::hour"]] |  | ||||||
|         tables = (aggregate_table, UserCount) |  | ||||||
|         subgroup_to_label = {stats[0]: {None: "read"}} |  | ||||||
|         labels_sort_function = None |  | ||||||
|         include_empty_subgroups = True |  | ||||||
|     else: |  | ||||||
|         raise JsonableError(_("Unknown chart name: {}").format(chart_name)) |  | ||||||
|  |  | ||||||
|     # Most likely someone using our API endpoint. The /stats page does not |  | ||||||
|     # pass a start or end in its requests. |  | ||||||
|     if start is not None: |  | ||||||
|         start = convert_to_UTC(start) |  | ||||||
|     if end is not None: |  | ||||||
|         end = convert_to_UTC(end) |  | ||||||
|     if start is not None and end is not None and start > end: |  | ||||||
|         raise JsonableError( |  | ||||||
|             _("Start time is later than end time. Start: {start}, End: {end}").format( |  | ||||||
|                 start=start, |  | ||||||
|                 end=end, |  | ||||||
|             ) |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     if realm is None: |  | ||||||
|         # Note that this value is invalid for Remote tables; be |  | ||||||
|         # careful not to access it in those code paths. |  | ||||||
|         realm = user_profile.realm |  | ||||||
|  |  | ||||||
|     if remote: |  | ||||||
|         # For remote servers, we don't have fillstate data, and thus |  | ||||||
|         # should simply use the first and last data points for the |  | ||||||
|         # table. |  | ||||||
|         assert server is not None |  | ||||||
|         assert aggregate_table is RemoteInstallationCount or aggregate_table is RemoteRealmCount |  | ||||||
|         aggregate_table_remote = cast( |  | ||||||
|             Union[Type[RemoteInstallationCount], Type[RemoteRealmCount]], aggregate_table |  | ||||||
|         )  # https://stackoverflow.com/questions/68540528/mypy-assertions-on-the-types-of-types |  | ||||||
|         if not aggregate_table_remote.objects.filter(server=server).exists(): |  | ||||||
|             raise JsonableError( |  | ||||||
|                 _("No analytics data available. Please contact your server administrator.") |  | ||||||
|             ) |  | ||||||
|         if start is None: |  | ||||||
|             first = aggregate_table_remote.objects.filter(server=server).first() |  | ||||||
|             assert first is not None |  | ||||||
|             start = first.end_time |  | ||||||
|         if end is None: |  | ||||||
|             last = aggregate_table_remote.objects.filter(server=server).last() |  | ||||||
|             assert last is not None |  | ||||||
|             end = last.end_time |  | ||||||
|     else: |  | ||||||
|         # Otherwise, we can use tables on the current server to |  | ||||||
|         # determine a nice range, and some additional validation. |  | ||||||
|         if start is None: |  | ||||||
|             if for_installation: |  | ||||||
|                 start = installation_epoch() |  | ||||||
|             else: |  | ||||||
|                 start = realm.date_created |  | ||||||
|         if end is None: |  | ||||||
|             end = max( |  | ||||||
|                 stat.last_successful_fill() or datetime.min.replace(tzinfo=timezone.utc) |  | ||||||
|                 for stat in stats |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|         if start > end and (timezone_now() - start > MAX_TIME_FOR_FULL_ANALYTICS_GENERATION): |  | ||||||
|             logging.warning( |  | ||||||
|                 "User from realm %s attempted to access /stats, but the computed " |  | ||||||
|                 "start time: %s (creation of realm or installation) is later than the computed " |  | ||||||
|                 "end time: %s (last successful analytics update). Is the " |  | ||||||
|                 "analytics cron job running?", |  | ||||||
|                 realm.string_id, |  | ||||||
|                 start, |  | ||||||
|                 end, |  | ||||||
|             ) |  | ||||||
|             raise JsonableError( |  | ||||||
|                 _("No analytics data available. Please contact your server administrator.") |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|     assert len({stat.frequency for stat in stats}) == 1 |  | ||||||
|     end_times = time_range(start, end, stats[0].frequency, min_length) |  | ||||||
|     data: Dict[str, Any] = { |  | ||||||
|         "end_times": [int(end_time.timestamp()) for end_time in end_times], |  | ||||||
|         "frequency": stats[0].frequency, |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     aggregation_level = { |  | ||||||
|         InstallationCount: "everyone", |  | ||||||
|         RealmCount: "everyone", |  | ||||||
|         UserCount: "user", |  | ||||||
|     } |  | ||||||
|     if settings.ZILENCER_ENABLED: |  | ||||||
|         aggregation_level[RemoteInstallationCount] = "everyone" |  | ||||||
|         aggregation_level[RemoteRealmCount] = "everyone" |  | ||||||
|  |  | ||||||
|     # -1 is a placeholder value, since there is no relevant filtering on InstallationCount |  | ||||||
|     id_value = { |  | ||||||
|         InstallationCount: -1, |  | ||||||
|         RealmCount: realm.id, |  | ||||||
|         UserCount: user_profile.id, |  | ||||||
|     } |  | ||||||
|     if settings.ZILENCER_ENABLED: |  | ||||||
|         if server is not None: |  | ||||||
|             id_value[RemoteInstallationCount] = server.id |  | ||||||
|         # TODO: RemoteRealmCount logic doesn't correctly handle |  | ||||||
|         # filtering by server_id as well. |  | ||||||
|         if remote_realm_id is not None: |  | ||||||
|             id_value[RemoteRealmCount] = remote_realm_id |  | ||||||
|  |  | ||||||
|     for table in tables: |  | ||||||
|         data[aggregation_level[table]] = {} |  | ||||||
|         for stat in stats: |  | ||||||
|             data[aggregation_level[table]].update( |  | ||||||
|                 get_time_series_by_subgroup( |  | ||||||
|                     stat, |  | ||||||
|                     table, |  | ||||||
|                     id_value[table], |  | ||||||
|                     end_times, |  | ||||||
|                     subgroup_to_label[stat], |  | ||||||
|                     include_empty_subgroups, |  | ||||||
|                 ) |  | ||||||
|             ) |  | ||||||
|  |  | ||||||
|     if labels_sort_function is not None: |  | ||||||
|         data["display_order"] = labels_sort_function(data) |  | ||||||
|     else: |  | ||||||
|         data["display_order"] = None |  | ||||||
|     return json_success(request, data=data) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def sort_by_totals(value_arrays: Dict[str, List[int]]) -> List[str]: |  | ||||||
|     totals = [(sum(values), label) for label, values in value_arrays.items()] |  | ||||||
|     totals.sort(reverse=True) |  | ||||||
|     return [label for total, label in totals] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| # For any given user, we want to show a fixed set of clients in the chart, |  | ||||||
| # regardless of the time aggregation or whether we're looking at realm or |  | ||||||
| # user data. This fixed set ideally includes the clients most important in |  | ||||||
| # understanding the realm's traffic and the user's traffic. This function |  | ||||||
| # tries to rank the clients so that taking the first N elements of the |  | ||||||
| # sorted list has a reasonable chance of doing so. |  | ||||||
| def sort_client_labels(data: Dict[str, Dict[str, List[int]]]) -> List[str]: |  | ||||||
|     realm_order = sort_by_totals(data["everyone"]) |  | ||||||
|     user_order = sort_by_totals(data["user"]) |  | ||||||
|     label_sort_values: Dict[str, float] = {} |  | ||||||
|     for i, label in enumerate(realm_order): |  | ||||||
|         label_sort_values[label] = i |  | ||||||
|     for i, label in enumerate(user_order): |  | ||||||
|         label_sort_values[label] = min(i - 0.1, label_sort_values.get(label, i)) |  | ||||||
|     return [label for label, sort_value in sorted(label_sort_values.items(), key=lambda x: x[1])] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| CountT = TypeVar("CountT", bound=BaseCount) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def table_filtered_to_id(table: Type[CountT], key_id: int) -> QuerySet[CountT]: |  | ||||||
|     if table == RealmCount: |  | ||||||
|         return table.objects.filter(realm_id=key_id) |  | ||||||
|     elif table == UserCount: |  | ||||||
|         return table.objects.filter(user_id=key_id) |  | ||||||
|     elif table == StreamCount: |  | ||||||
|         return table.objects.filter(stream_id=key_id) |  | ||||||
|     elif table == InstallationCount: |  | ||||||
|         return table.objects.all() |  | ||||||
|     elif settings.ZILENCER_ENABLED and table == RemoteInstallationCount: |  | ||||||
|         return table.objects.filter(server_id=key_id) |  | ||||||
|     elif settings.ZILENCER_ENABLED and table == RemoteRealmCount: |  | ||||||
|         return table.objects.filter(realm_id=key_id) |  | ||||||
|     else: |  | ||||||
|         raise AssertionError(f"Unknown table: {table}") |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def client_label_map(name: str) -> str: |  | ||||||
|     if name == "website": |  | ||||||
|         return "Web app" |  | ||||||
|     if name.startswith("desktop app"): |  | ||||||
|         return "Old desktop app" |  | ||||||
|     if name == "ZulipElectron": |  | ||||||
|         return "Desktop app" |  | ||||||
|     if name == "ZulipTerminal": |  | ||||||
|         return "Terminal app" |  | ||||||
|     if name == "ZulipAndroid": |  | ||||||
|         return "Old Android app" |  | ||||||
|     if name == "ZulipiOS": |  | ||||||
|         return "Old iOS app" |  | ||||||
|     if name == "ZulipMobile": |  | ||||||
|         return "Mobile app" |  | ||||||
|     if name in ["ZulipPython", "API: Python"]: |  | ||||||
|         return "Python API" |  | ||||||
|     if name.startswith("Zulip") and name.endswith("Webhook"): |  | ||||||
|         return name[len("Zulip") : -len("Webhook")] + " webhook" |  | ||||||
|     return name |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def rewrite_client_arrays(value_arrays: Dict[str, List[int]]) -> Dict[str, List[int]]: |  | ||||||
|     mapped_arrays: Dict[str, List[int]] = {} |  | ||||||
|     for label, array in value_arrays.items(): |  | ||||||
|         mapped_label = client_label_map(label) |  | ||||||
|         if mapped_label in mapped_arrays: |  | ||||||
|             for i in range(0, len(array)): |  | ||||||
|                 mapped_arrays[mapped_label][i] += value_arrays[label][i] |  | ||||||
|         else: |  | ||||||
|             mapped_arrays[mapped_label] = [value_arrays[label][i] for i in range(0, len(array))] |  | ||||||
|     return mapped_arrays |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_time_series_by_subgroup( |  | ||||||
|     stat: CountStat, |  | ||||||
|     table: Type[BaseCount], |  | ||||||
|     key_id: int, |  | ||||||
|     end_times: List[datetime], |  | ||||||
|     subgroup_to_label: Dict[Optional[str], str], |  | ||||||
|     include_empty_subgroups: bool, |  | ||||||
| ) -> Dict[str, List[int]]: |  | ||||||
|     queryset = ( |  | ||||||
|         table_filtered_to_id(table, key_id) |  | ||||||
|         .filter(property=stat.property) |  | ||||||
|         .values_list("subgroup", "end_time", "value") |  | ||||||
|     ) |  | ||||||
|     value_dicts: Dict[Optional[str], Dict[datetime, int]] = defaultdict(lambda: defaultdict(int)) |  | ||||||
|     for subgroup, end_time, value in queryset: |  | ||||||
|         value_dicts[subgroup][end_time] = value |  | ||||||
|     value_arrays = {} |  | ||||||
|     for subgroup, label in subgroup_to_label.items(): |  | ||||||
|         if (subgroup in value_dicts) or include_empty_subgroups: |  | ||||||
|             value_arrays[label] = [value_dicts[subgroup][end_time] for end_time in end_times] |  | ||||||
|  |  | ||||||
|     if stat == COUNT_STATS["messages_sent:client:day"]: |  | ||||||
|         # HACK: We rewrite these arrays to collapse the Client objects |  | ||||||
|         # with similar names into a single sum, and generally give |  | ||||||
|         # them better names |  | ||||||
|         return rewrite_client_arrays(value_arrays) |  | ||||||
|     return value_arrays |  | ||||||
| @@ -1,408 +0,0 @@ | |||||||
| import urllib |  | ||||||
| from contextlib import suppress |  | ||||||
| from dataclasses import dataclass |  | ||||||
| from datetime import timedelta |  | ||||||
| from decimal import Decimal |  | ||||||
| from typing import Any, Dict, Iterable, List, Optional |  | ||||||
| from urllib.parse import urlencode |  | ||||||
|  |  | ||||||
| from django.conf import settings |  | ||||||
| from django.core.exceptions import ValidationError |  | ||||||
| from django.core.validators import URLValidator |  | ||||||
| from django.db.models import Q |  | ||||||
| from django.http import HttpRequest, HttpResponse, HttpResponseRedirect |  | ||||||
| from django.shortcuts import render |  | ||||||
| from django.urls import reverse |  | ||||||
| from django.utils.timesince import timesince |  | ||||||
| from django.utils.timezone import now as timezone_now |  | ||||||
| from django.utils.translation import gettext as _ |  | ||||||
|  |  | ||||||
| from confirmation.models import Confirmation, confirmation_url |  | ||||||
| from confirmation.settings import STATUS_USED |  | ||||||
| from zerver.actions.create_realm import do_change_realm_subdomain |  | ||||||
| from zerver.actions.realm_settings import ( |  | ||||||
|     do_change_realm_org_type, |  | ||||||
|     do_change_realm_plan_type, |  | ||||||
|     do_deactivate_realm, |  | ||||||
|     do_scrub_realm, |  | ||||||
|     do_send_realm_reactivation_email, |  | ||||||
| ) |  | ||||||
| from zerver.actions.users import do_delete_user_preserving_messages |  | ||||||
| from zerver.decorator import require_server_admin |  | ||||||
| from zerver.forms import check_subdomain_available |  | ||||||
| from zerver.lib.exceptions import JsonableError |  | ||||||
| from zerver.lib.realm_icon import realm_icon_url |  | ||||||
| from zerver.lib.request import REQ, has_request_variables |  | ||||||
| from zerver.lib.subdomains import get_subdomain_from_hostname |  | ||||||
| from zerver.lib.validator import check_bool, check_string_in, to_decimal, to_non_negative_int |  | ||||||
| from zerver.models import ( |  | ||||||
|     MultiuseInvite, |  | ||||||
|     PreregistrationRealm, |  | ||||||
|     PreregistrationUser, |  | ||||||
|     Realm, |  | ||||||
|     RealmReactivationStatus, |  | ||||||
|     UserProfile, |  | ||||||
|     get_org_type_display_name, |  | ||||||
|     get_realm, |  | ||||||
|     get_user_profile_by_id, |  | ||||||
| ) |  | ||||||
| from zerver.views.invite import get_invitee_emails_set |  | ||||||
|  |  | ||||||
| if settings.BILLING_ENABLED: |  | ||||||
|     from corporate.lib.stripe import approve_sponsorship as do_approve_sponsorship |  | ||||||
|     from corporate.lib.stripe import ( |  | ||||||
|         attach_discount_to_realm, |  | ||||||
|         downgrade_at_the_end_of_billing_cycle, |  | ||||||
|         downgrade_now_without_creating_additional_invoices, |  | ||||||
|         get_discount_for_realm, |  | ||||||
|         get_latest_seat_count, |  | ||||||
|         make_end_of_cycle_updates_if_needed, |  | ||||||
|         switch_realm_from_standard_to_plus_plan, |  | ||||||
|         update_billing_method_of_current_plan, |  | ||||||
|         update_sponsorship_status, |  | ||||||
|         void_all_open_invoices, |  | ||||||
|     ) |  | ||||||
|     from corporate.models import ( |  | ||||||
|         Customer, |  | ||||||
|         CustomerPlan, |  | ||||||
|         get_current_plan_by_realm, |  | ||||||
|         get_customer_by_realm, |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_plan_name(plan_type: int) -> str: |  | ||||||
|     return { |  | ||||||
|         Realm.PLAN_TYPE_SELF_HOSTED: "self-hosted", |  | ||||||
|         Realm.PLAN_TYPE_LIMITED: "limited", |  | ||||||
|         Realm.PLAN_TYPE_STANDARD: "standard", |  | ||||||
|         Realm.PLAN_TYPE_STANDARD_FREE: "open source", |  | ||||||
|         Realm.PLAN_TYPE_PLUS: "plus", |  | ||||||
|     }[plan_type] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_confirmations( |  | ||||||
|     types: List[int], object_ids: Iterable[int], hostname: Optional[str] = None |  | ||||||
| ) -> List[Dict[str, Any]]: |  | ||||||
|     lowest_datetime = timezone_now() - timedelta(days=30) |  | ||||||
|     confirmations = Confirmation.objects.filter( |  | ||||||
|         type__in=types, object_id__in=object_ids, date_sent__gte=lowest_datetime |  | ||||||
|     ) |  | ||||||
|     confirmation_dicts = [] |  | ||||||
|     for confirmation in confirmations: |  | ||||||
|         realm = confirmation.realm |  | ||||||
|         content_object = confirmation.content_object |  | ||||||
|  |  | ||||||
|         type = confirmation.type |  | ||||||
|         expiry_date = confirmation.expiry_date |  | ||||||
|  |  | ||||||
|         assert content_object is not None |  | ||||||
|         if hasattr(content_object, "status"): |  | ||||||
|             if content_object.status == STATUS_USED: |  | ||||||
|                 link_status = "Link has been used" |  | ||||||
|             else: |  | ||||||
|                 link_status = "Link has not been used" |  | ||||||
|         else: |  | ||||||
|             link_status = "" |  | ||||||
|  |  | ||||||
|         now = timezone_now() |  | ||||||
|         if expiry_date is None: |  | ||||||
|             expires_in = "Never" |  | ||||||
|         elif now < expiry_date: |  | ||||||
|             expires_in = timesince(now, expiry_date) |  | ||||||
|         else: |  | ||||||
|             expires_in = "Expired" |  | ||||||
|  |  | ||||||
|         url = confirmation_url(confirmation.confirmation_key, realm, type) |  | ||||||
|         confirmation_dicts.append( |  | ||||||
|             { |  | ||||||
|                 "object": confirmation.content_object, |  | ||||||
|                 "url": url, |  | ||||||
|                 "type": type, |  | ||||||
|                 "link_status": link_status, |  | ||||||
|                 "expires_in": expires_in, |  | ||||||
|             } |  | ||||||
|         ) |  | ||||||
|     return confirmation_dicts |  | ||||||
|  |  | ||||||
|  |  | ||||||
| VALID_MODIFY_PLAN_METHODS = [ |  | ||||||
|     "downgrade_at_billing_cycle_end", |  | ||||||
|     "downgrade_now_without_additional_licenses", |  | ||||||
|     "downgrade_now_void_open_invoices", |  | ||||||
|     "upgrade_to_plus", |  | ||||||
| ] |  | ||||||
|  |  | ||||||
| VALID_STATUS_VALUES = [ |  | ||||||
|     "active", |  | ||||||
|     "deactivated", |  | ||||||
| ] |  | ||||||
|  |  | ||||||
| VALID_BILLING_METHODS = [ |  | ||||||
|     "send_invoice", |  | ||||||
|     "charge_automatically", |  | ||||||
| ] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @dataclass |  | ||||||
| class PlanData: |  | ||||||
|     customer: Optional["Customer"] = None |  | ||||||
|     current_plan: Optional["CustomerPlan"] = None |  | ||||||
|     licenses: Optional[int] = None |  | ||||||
|     licenses_used: Optional[int] = None |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @require_server_admin |  | ||||||
| @has_request_variables |  | ||||||
| def support( |  | ||||||
|     request: HttpRequest, |  | ||||||
|     realm_id: Optional[int] = REQ(default=None, converter=to_non_negative_int), |  | ||||||
|     plan_type: Optional[int] = REQ(default=None, converter=to_non_negative_int), |  | ||||||
|     discount: Optional[Decimal] = REQ(default=None, converter=to_decimal), |  | ||||||
|     new_subdomain: Optional[str] = REQ(default=None), |  | ||||||
|     status: Optional[str] = REQ(default=None, str_validator=check_string_in(VALID_STATUS_VALUES)), |  | ||||||
|     billing_method: Optional[str] = REQ( |  | ||||||
|         default=None, str_validator=check_string_in(VALID_BILLING_METHODS) |  | ||||||
|     ), |  | ||||||
|     sponsorship_pending: Optional[bool] = REQ(default=None, json_validator=check_bool), |  | ||||||
|     approve_sponsorship: bool = REQ(default=False, json_validator=check_bool), |  | ||||||
|     modify_plan: Optional[str] = REQ( |  | ||||||
|         default=None, str_validator=check_string_in(VALID_MODIFY_PLAN_METHODS) |  | ||||||
|     ), |  | ||||||
|     scrub_realm: bool = REQ(default=False, json_validator=check_bool), |  | ||||||
|     delete_user_by_id: Optional[int] = REQ(default=None, converter=to_non_negative_int), |  | ||||||
|     query: Optional[str] = REQ("q", default=None), |  | ||||||
|     org_type: Optional[int] = REQ(default=None, converter=to_non_negative_int), |  | ||||||
| ) -> HttpResponse: |  | ||||||
|     context: Dict[str, Any] = {} |  | ||||||
|  |  | ||||||
|     if "success_message" in request.session: |  | ||||||
|         context["success_message"] = request.session["success_message"] |  | ||||||
|         del request.session["success_message"] |  | ||||||
|  |  | ||||||
|     if settings.BILLING_ENABLED and request.method == "POST": |  | ||||||
|         # We check that request.POST only has two keys in it: The |  | ||||||
|         # realm_id and a field to change. |  | ||||||
|         keys = set(request.POST.keys()) |  | ||||||
|         if "csrfmiddlewaretoken" in keys: |  | ||||||
|             keys.remove("csrfmiddlewaretoken") |  | ||||||
|         if len(keys) != 2: |  | ||||||
|             raise JsonableError(_("Invalid parameters")) |  | ||||||
|  |  | ||||||
|         assert realm_id is not None |  | ||||||
|         realm = Realm.objects.get(id=realm_id) |  | ||||||
|  |  | ||||||
|         acting_user = request.user |  | ||||||
|         assert isinstance(acting_user, UserProfile) |  | ||||||
|         if plan_type is not None: |  | ||||||
|             current_plan_type = realm.plan_type |  | ||||||
|             do_change_realm_plan_type(realm, plan_type, acting_user=acting_user) |  | ||||||
|             msg = f"Plan type of {realm.string_id} changed from {get_plan_name(current_plan_type)} to {get_plan_name(plan_type)} " |  | ||||||
|             context["success_message"] = msg |  | ||||||
|         elif org_type is not None: |  | ||||||
|             current_realm_type = realm.org_type |  | ||||||
|             do_change_realm_org_type(realm, org_type, acting_user=acting_user) |  | ||||||
|             msg = f"Org type of {realm.string_id} changed from {get_org_type_display_name(current_realm_type)} to {get_org_type_display_name(org_type)} " |  | ||||||
|             context["success_message"] = msg |  | ||||||
|         elif discount is not None: |  | ||||||
|             current_discount = get_discount_for_realm(realm) or 0 |  | ||||||
|             attach_discount_to_realm(realm, discount, acting_user=acting_user) |  | ||||||
|             context[ |  | ||||||
|                 "success_message" |  | ||||||
|             ] = f"Discount of {realm.string_id} changed to {discount}% from {current_discount}%." |  | ||||||
|         elif new_subdomain is not None: |  | ||||||
|             old_subdomain = realm.string_id |  | ||||||
|             try: |  | ||||||
|                 check_subdomain_available(new_subdomain) |  | ||||||
|             except ValidationError as error: |  | ||||||
|                 context["error_message"] = error.message |  | ||||||
|             else: |  | ||||||
|                 do_change_realm_subdomain(realm, new_subdomain, acting_user=acting_user) |  | ||||||
|                 request.session[ |  | ||||||
|                     "success_message" |  | ||||||
|                 ] = f"Subdomain changed from {old_subdomain} to {new_subdomain}" |  | ||||||
|                 return HttpResponseRedirect( |  | ||||||
|                     reverse("support") + "?" + urlencode({"q": new_subdomain}) |  | ||||||
|                 ) |  | ||||||
|         elif status is not None: |  | ||||||
|             if status == "active": |  | ||||||
|                 do_send_realm_reactivation_email(realm, acting_user=acting_user) |  | ||||||
|                 context[ |  | ||||||
|                     "success_message" |  | ||||||
|                 ] = f"Realm reactivation email sent to admins of {realm.string_id}." |  | ||||||
|             elif status == "deactivated": |  | ||||||
|                 do_deactivate_realm(realm, acting_user=acting_user) |  | ||||||
|                 context["success_message"] = f"{realm.string_id} deactivated." |  | ||||||
|         elif billing_method is not None: |  | ||||||
|             if billing_method == "send_invoice": |  | ||||||
|                 update_billing_method_of_current_plan( |  | ||||||
|                     realm, charge_automatically=False, acting_user=acting_user |  | ||||||
|                 ) |  | ||||||
|                 context[ |  | ||||||
|                     "success_message" |  | ||||||
|                 ] = f"Billing method of {realm.string_id} updated to pay by invoice." |  | ||||||
|             elif billing_method == "charge_automatically": |  | ||||||
|                 update_billing_method_of_current_plan( |  | ||||||
|                     realm, charge_automatically=True, acting_user=acting_user |  | ||||||
|                 ) |  | ||||||
|                 context[ |  | ||||||
|                     "success_message" |  | ||||||
|                 ] = f"Billing method of {realm.string_id} updated to charge automatically." |  | ||||||
|         elif sponsorship_pending is not None: |  | ||||||
|             if sponsorship_pending: |  | ||||||
|                 update_sponsorship_status(realm, True, acting_user=acting_user) |  | ||||||
|                 context["success_message"] = f"{realm.string_id} marked as pending sponsorship." |  | ||||||
|             else: |  | ||||||
|                 update_sponsorship_status(realm, False, acting_user=acting_user) |  | ||||||
|                 context["success_message"] = f"{realm.string_id} is no longer pending sponsorship." |  | ||||||
|         elif approve_sponsorship: |  | ||||||
|             do_approve_sponsorship(realm, acting_user=acting_user) |  | ||||||
|             context["success_message"] = f"Sponsorship approved for {realm.string_id}" |  | ||||||
|         elif modify_plan is not None: |  | ||||||
|             if modify_plan == "downgrade_at_billing_cycle_end": |  | ||||||
|                 downgrade_at_the_end_of_billing_cycle(realm) |  | ||||||
|                 context[ |  | ||||||
|                     "success_message" |  | ||||||
|                 ] = f"{realm.string_id} marked for downgrade at the end of billing cycle" |  | ||||||
|             elif modify_plan == "downgrade_now_without_additional_licenses": |  | ||||||
|                 downgrade_now_without_creating_additional_invoices(realm) |  | ||||||
|                 context[ |  | ||||||
|                     "success_message" |  | ||||||
|                 ] = f"{realm.string_id} downgraded without creating additional invoices" |  | ||||||
|             elif modify_plan == "downgrade_now_void_open_invoices": |  | ||||||
|                 downgrade_now_without_creating_additional_invoices(realm) |  | ||||||
|                 voided_invoices_count = void_all_open_invoices(realm) |  | ||||||
|                 context[ |  | ||||||
|                     "success_message" |  | ||||||
|                 ] = f"{realm.string_id} downgraded and voided {voided_invoices_count} open invoices" |  | ||||||
|             elif modify_plan == "upgrade_to_plus": |  | ||||||
|                 switch_realm_from_standard_to_plus_plan(realm) |  | ||||||
|                 context["success_message"] = f"{realm.string_id} upgraded to Plus" |  | ||||||
|         elif scrub_realm: |  | ||||||
|             do_scrub_realm(realm, acting_user=acting_user) |  | ||||||
|             context["success_message"] = f"{realm.string_id} scrubbed." |  | ||||||
|         elif delete_user_by_id: |  | ||||||
|             user_profile_for_deletion = get_user_profile_by_id(delete_user_by_id) |  | ||||||
|             user_email = user_profile_for_deletion.delivery_email |  | ||||||
|             assert user_profile_for_deletion.realm == realm |  | ||||||
|             do_delete_user_preserving_messages(user_profile_for_deletion) |  | ||||||
|             context["success_message"] = f"{user_email} in {realm.subdomain} deleted." |  | ||||||
|  |  | ||||||
|     if query: |  | ||||||
|         key_words = get_invitee_emails_set(query) |  | ||||||
|  |  | ||||||
|         case_insensitive_users_q = Q() |  | ||||||
|         for key_word in key_words: |  | ||||||
|             case_insensitive_users_q |= Q(delivery_email__iexact=key_word) |  | ||||||
|         users = set(UserProfile.objects.filter(case_insensitive_users_q)) |  | ||||||
|         realms = set(Realm.objects.filter(string_id__in=key_words)) |  | ||||||
|  |  | ||||||
|         for key_word in key_words: |  | ||||||
|             try: |  | ||||||
|                 URLValidator()(key_word) |  | ||||||
|                 parse_result = urllib.parse.urlparse(key_word) |  | ||||||
|                 hostname = parse_result.hostname |  | ||||||
|                 assert hostname is not None |  | ||||||
|                 if parse_result.port: |  | ||||||
|                     hostname = f"{hostname}:{parse_result.port}" |  | ||||||
|                 subdomain = get_subdomain_from_hostname(hostname) |  | ||||||
|                 with suppress(Realm.DoesNotExist): |  | ||||||
|                     realms.add(get_realm(subdomain)) |  | ||||||
|             except ValidationError: |  | ||||||
|                 users.update(UserProfile.objects.filter(full_name__iexact=key_word)) |  | ||||||
|  |  | ||||||
|         # full_names can have , in them |  | ||||||
|         users.update(UserProfile.objects.filter(full_name__iexact=query)) |  | ||||||
|  |  | ||||||
|         context["users"] = users |  | ||||||
|         context["realms"] = realms |  | ||||||
|  |  | ||||||
|         confirmations: List[Dict[str, Any]] = [] |  | ||||||
|  |  | ||||||
|         preregistration_user_ids = [ |  | ||||||
|             user.id for user in PreregistrationUser.objects.filter(email__in=key_words) |  | ||||||
|         ] |  | ||||||
|         confirmations += get_confirmations( |  | ||||||
|             [Confirmation.USER_REGISTRATION, Confirmation.INVITATION], |  | ||||||
|             preregistration_user_ids, |  | ||||||
|             hostname=request.get_host(), |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         preregistration_realm_ids = [ |  | ||||||
|             user.id for user in PreregistrationRealm.objects.filter(email__in=key_words) |  | ||||||
|         ] |  | ||||||
|         confirmations += get_confirmations( |  | ||||||
|             [Confirmation.REALM_CREATION], |  | ||||||
|             preregistration_realm_ids, |  | ||||||
|             hostname=request.get_host(), |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         multiuse_invite_ids = [ |  | ||||||
|             invite.id for invite in MultiuseInvite.objects.filter(realm__in=realms) |  | ||||||
|         ] |  | ||||||
|         confirmations += get_confirmations([Confirmation.MULTIUSE_INVITE], multiuse_invite_ids) |  | ||||||
|  |  | ||||||
|         realm_reactivation_status_objects = RealmReactivationStatus.objects.filter(realm__in=realms) |  | ||||||
|         confirmations += get_confirmations( |  | ||||||
|             [Confirmation.REALM_REACTIVATION], [obj.id for obj in realm_reactivation_status_objects] |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|         context["confirmations"] = confirmations |  | ||||||
|  |  | ||||||
|         # We want a union of all realms that might appear in the search result, |  | ||||||
|         # but not necessary as a separate result item. |  | ||||||
|         # Therefore, we do not modify the realms object in the context. |  | ||||||
|         all_realms = realms.union( |  | ||||||
|             [ |  | ||||||
|                 confirmation["object"].realm |  | ||||||
|                 for confirmation in confirmations |  | ||||||
|                 # For confirmations, we only display realm details when the type is USER_REGISTRATION |  | ||||||
|                 # or INVITATION. |  | ||||||
|                 if confirmation["type"] in (Confirmation.USER_REGISTRATION, Confirmation.INVITATION) |  | ||||||
|             ] |  | ||||||
|             + [user.realm for user in users] |  | ||||||
|         ) |  | ||||||
|         plan_data: Dict[int, PlanData] = {} |  | ||||||
|         for realm in all_realms: |  | ||||||
|             current_plan = get_current_plan_by_realm(realm) |  | ||||||
|             plan_data[realm.id] = PlanData( |  | ||||||
|                 customer=get_customer_by_realm(realm), |  | ||||||
|                 current_plan=current_plan, |  | ||||||
|             ) |  | ||||||
|             if current_plan is not None: |  | ||||||
|                 new_plan, last_ledger_entry = make_end_of_cycle_updates_if_needed( |  | ||||||
|                     current_plan, timezone_now() |  | ||||||
|                 ) |  | ||||||
|                 if last_ledger_entry is not None: |  | ||||||
|                     if new_plan is not None: |  | ||||||
|                         plan_data[realm.id].current_plan = new_plan |  | ||||||
|                     else: |  | ||||||
|                         plan_data[realm.id].current_plan = current_plan |  | ||||||
|                     plan_data[realm.id].licenses = last_ledger_entry.licenses |  | ||||||
|                     plan_data[realm.id].licenses_used = get_latest_seat_count(realm) |  | ||||||
|         context["plan_data"] = plan_data |  | ||||||
|  |  | ||||||
|     def get_realm_owner_emails_as_string(realm: Realm) -> str: |  | ||||||
|         return ", ".join( |  | ||||||
|             realm.get_human_owner_users() |  | ||||||
|             .order_by("delivery_email") |  | ||||||
|             .values_list("delivery_email", flat=True) |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     def get_realm_admin_emails_as_string(realm: Realm) -> str: |  | ||||||
|         return ", ".join( |  | ||||||
|             realm.get_human_admin_users(include_realm_owners=False) |  | ||||||
|             .order_by("delivery_email") |  | ||||||
|             .values_list("delivery_email", flat=True) |  | ||||||
|         ) |  | ||||||
|  |  | ||||||
|     context["get_realm_owner_emails_as_string"] = get_realm_owner_emails_as_string |  | ||||||
|     context["get_realm_admin_emails_as_string"] = get_realm_admin_emails_as_string |  | ||||||
|     context["get_discount_for_realm"] = get_discount_for_realm |  | ||||||
|     context["get_org_type_display_name"] = get_org_type_display_name |  | ||||||
|     context["realm_icon_url"] = realm_icon_url |  | ||||||
|     context["Confirmation"] = Confirmation |  | ||||||
|     context["sorted_realm_types"] = sorted( |  | ||||||
|         Realm.ORG_TYPES.values(), key=lambda d: d["display_order"] |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     return render(request, "analytics/support.html", context=context) |  | ||||||
| @@ -1,106 +0,0 @@ | |||||||
| from typing import Any, Dict, List, Tuple |  | ||||||
|  |  | ||||||
| from django.conf import settings |  | ||||||
| from django.db.models import QuerySet |  | ||||||
| from django.http import HttpRequest, HttpResponse |  | ||||||
| from django.shortcuts import render |  | ||||||
|  |  | ||||||
| from analytics.views.activity_common import ( |  | ||||||
|     format_date_for_activity_reports, |  | ||||||
|     get_user_activity_summary, |  | ||||||
|     make_table, |  | ||||||
| ) |  | ||||||
| from zerver.decorator import require_server_admin |  | ||||||
| from zerver.models import UserActivity, UserProfile, get_user_profile_by_id |  | ||||||
|  |  | ||||||
| if settings.BILLING_ENABLED: |  | ||||||
|     pass |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_user_activity_records( |  | ||||||
|     user_profile: UserProfile, |  | ||||||
| ) -> QuerySet[UserActivity]: |  | ||||||
|     fields = [ |  | ||||||
|         "user_profile__full_name", |  | ||||||
|         "query", |  | ||||||
|         "client__name", |  | ||||||
|         "count", |  | ||||||
|         "last_visit", |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     records = UserActivity.objects.filter( |  | ||||||
|         user_profile=user_profile, |  | ||||||
|     ) |  | ||||||
|     records = records.order_by("-last_visit") |  | ||||||
|     records = records.select_related("user_profile", "client").only(*fields) |  | ||||||
|     return records |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def raw_user_activity_table(records: QuerySet[UserActivity]) -> str: |  | ||||||
|     cols = [ |  | ||||||
|         "query", |  | ||||||
|         "client", |  | ||||||
|         "count", |  | ||||||
|         "last_visit", |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     def row(record: UserActivity) -> List[Any]: |  | ||||||
|         return [ |  | ||||||
|             record.query, |  | ||||||
|             record.client.name, |  | ||||||
|             record.count, |  | ||||||
|             format_date_for_activity_reports(record.last_visit), |  | ||||||
|         ] |  | ||||||
|  |  | ||||||
|     rows = list(map(row, records)) |  | ||||||
|     title = "Raw data" |  | ||||||
|     return make_table(title, cols, rows) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def user_activity_summary_table(user_summary: Dict[str, Dict[str, Any]]) -> str: |  | ||||||
|     rows = [] |  | ||||||
|     for k, v in user_summary.items(): |  | ||||||
|         if k == "name" or k == "user_profile_id": |  | ||||||
|             continue |  | ||||||
|         client = k |  | ||||||
|         count = v["count"] |  | ||||||
|         last_visit = v["last_visit"] |  | ||||||
|         row = [ |  | ||||||
|             format_date_for_activity_reports(last_visit), |  | ||||||
|             client, |  | ||||||
|             count, |  | ||||||
|         ] |  | ||||||
|         rows.append(row) |  | ||||||
|  |  | ||||||
|     rows = sorted(rows, key=lambda r: r[0], reverse=True) |  | ||||||
|  |  | ||||||
|     cols = [ |  | ||||||
|         "last_visit", |  | ||||||
|         "client", |  | ||||||
|         "count", |  | ||||||
|     ] |  | ||||||
|  |  | ||||||
|     title = "User activity" |  | ||||||
|     return make_table(title, cols, rows) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| @require_server_admin |  | ||||||
| def get_user_activity(request: HttpRequest, user_profile_id: int) -> HttpResponse: |  | ||||||
|     user_profile = get_user_profile_by_id(user_profile_id) |  | ||||||
|     records = get_user_activity_records(user_profile) |  | ||||||
|  |  | ||||||
|     data: List[Tuple[str, str]] = [] |  | ||||||
|     user_summary = get_user_activity_summary(records) |  | ||||||
|     content = user_activity_summary_table(user_summary) |  | ||||||
|  |  | ||||||
|     data += [("Summary", content)] |  | ||||||
|  |  | ||||||
|     content = raw_user_activity_table(records) |  | ||||||
|     data += [("Info", content)] |  | ||||||
|  |  | ||||||
|     title = user_profile.delivery_email |  | ||||||
|     return render( |  | ||||||
|         request, |  | ||||||
|         "analytics/activity.html", |  | ||||||
|         context=dict(data=data, title=title), |  | ||||||
|     ) |  | ||||||
| @@ -1,31 +0,0 @@ | |||||||
| {generate_api_header(API_ENDPOINT_NAME)} |  | ||||||
|  |  | ||||||
| ## Usage examples |  | ||||||
|  |  | ||||||
| {start_tabs} |  | ||||||
|  |  | ||||||
| {generate_code_example(python)|API_ENDPOINT_NAME|example} |  | ||||||
|  |  | ||||||
| {generate_code_example(javascript)|API_ENDPOINT_NAME|example} |  | ||||||
|  |  | ||||||
| {tab|curl} |  | ||||||
|  |  | ||||||
| {generate_code_example(curl)|API_ENDPOINT_NAME|example} |  | ||||||
|  |  | ||||||
| {end_tabs} |  | ||||||
|  |  | ||||||
| ## Parameters |  | ||||||
|  |  | ||||||
| {generate_api_arguments_table|zulip.yaml|API_ENDPOINT_NAME} |  | ||||||
|  |  | ||||||
| {generate_parameter_description(API_ENDPOINT_NAME)} |  | ||||||
|  |  | ||||||
| ## Response |  | ||||||
|  |  | ||||||
| {generate_return_values_table|zulip.yaml|API_ENDPOINT_NAME} |  | ||||||
|  |  | ||||||
| {generate_response_description(API_ENDPOINT_NAME)} |  | ||||||
|  |  | ||||||
| #### Example response(s) |  | ||||||
|  |  | ||||||
| {generate_code_example|API_ENDPOINT_NAME|fixture} |  | ||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,103 +0,0 @@ | |||||||
| # Construct a narrow |  | ||||||
|  |  | ||||||
| A **narrow** is a set of filters for Zulip messages, that can be based |  | ||||||
| on many different factors (like sender, stream, topic, search |  | ||||||
| keywords, etc.). Narrows are used in various places in the the Zulip |  | ||||||
| API (most importantly, in the API for fetching messages). |  | ||||||
|  |  | ||||||
| It is simplest to explain the algorithm for encoding a search as a |  | ||||||
| narrow using a single example. Consider the following search query |  | ||||||
| (written as it would be entered in the Zulip web app's search box). |  | ||||||
| It filters for messages sent to stream `announce`, not sent by |  | ||||||
| `iago@zulip.com`, and containing the words `cool` and `sunglasses`: |  | ||||||
|  |  | ||||||
| ``` |  | ||||||
| stream:announce -sender:iago@zulip.com cool sunglasses |  | ||||||
| ``` |  | ||||||
|  |  | ||||||
| This query would be JSON-encoded for use in the Zulip API using JSON |  | ||||||
| as a list of simple objects, as follows: |  | ||||||
|  |  | ||||||
| ```json |  | ||||||
| [ |  | ||||||
|     { |  | ||||||
|         "operator": "stream", |  | ||||||
|         "operand": "announce" |  | ||||||
|     }, |  | ||||||
|     { |  | ||||||
|         "operator": "sender", |  | ||||||
|         "operand": "iago@zulip.com", |  | ||||||
|         "negated": true |  | ||||||
|     }, |  | ||||||
|     { |  | ||||||
|         "operator": "search", |  | ||||||
|         "operand": "cool sunglasses" |  | ||||||
|     } |  | ||||||
| ] |  | ||||||
| ``` |  | ||||||
|  |  | ||||||
| The Zulip help center article on [searching for messages](/help/search-for-messages) |  | ||||||
| documents the majority of the search/narrow options supported by the |  | ||||||
| Zulip API. |  | ||||||
|  |  | ||||||
| Note that many narrows, including all that lack a `stream` or `streams` |  | ||||||
| operator, search the current user's personal message history. See |  | ||||||
| [searching shared history](/help/search-for-messages#searching-shared-history) |  | ||||||
| for details. |  | ||||||
|  |  | ||||||
| **Changes**: In Zulip 7.0 (feature level 177), support was added |  | ||||||
| for three filters related to direct messages: `is:dm`, `dm` and |  | ||||||
| `dm-including`. The `dm` operator replaced and deprecated the |  | ||||||
| `pm-with` operator. The `is:dm` filter replaced and deprecated |  | ||||||
| the `is:private` filter. The `dm-including` operator replaced and |  | ||||||
| deprecated the `group-pm-with` operator. |  | ||||||
|  |  | ||||||
| The `dm-including` and `group-pm-with` operators return slightly |  | ||||||
| different results. For example, `dm-including:1234` returns all |  | ||||||
| direct messages (1-on-1 and group) that include the current user |  | ||||||
| and the user with the unique user ID of `1234`. On the other hand, |  | ||||||
| `group-pm-with:1234` returned only group direct messages that included |  | ||||||
| the current user and the user with the unique user ID of `1234`. |  | ||||||
|  |  | ||||||
| Both `dm` and `is:dm` are aliases of `pm-with` and `is:private` |  | ||||||
| respectively, and return the same exact results that the deprecated |  | ||||||
| filters did. |  | ||||||
|  |  | ||||||
| ## Narrows that use IDs |  | ||||||
|  |  | ||||||
| The `near` and `id` operators, documented in the help center, use message |  | ||||||
| IDs for their operands. |  | ||||||
|  |  | ||||||
| * `near:12345`: Search messages around the message with ID `12345`. |  | ||||||
| * `id:12345`: Search for only message with ID `12345`. |  | ||||||
|  |  | ||||||
| There are a few additional narrow/search options (new in Zulip 2.1) |  | ||||||
| that use either stream IDs or user IDs that are not documented in the |  | ||||||
| help center because they are primarily useful to API clients: |  | ||||||
|  |  | ||||||
| * `stream:1234`: Search messages sent to the stream with ID `1234`. |  | ||||||
| * `sender:1234`: Search messages sent by user ID `1234`. |  | ||||||
| * `dm:1234`: Search the direct message conversation between |  | ||||||
|   you and user ID `1234`. |  | ||||||
| * `dm:1234,5678`: Search the direct message conversation between |  | ||||||
|   you, user ID `1234`, and user ID `5678`. |  | ||||||
| * `dm-including:1234`: Search all direct messages (1-on-1 and group) |  | ||||||
|   that include you and user ID `1234`. |  | ||||||
|  |  | ||||||
| The operands for these search options must be encoded either as an |  | ||||||
| integer ID or a JSON list of integer IDs. For example, to query |  | ||||||
| messages sent by a user 1234 to a PM thread with yourself, user 1234, |  | ||||||
| and user 5678, the correct JSON-encoded query is: |  | ||||||
|  |  | ||||||
| ```json |  | ||||||
| [ |  | ||||||
|     { |  | ||||||
|         "operator": "dm", |  | ||||||
|         "operand": [1234, 5678] |  | ||||||
|     }, |  | ||||||
|     { |  | ||||||
|         "operator": "sender", |  | ||||||
|         "operand": 1234 |  | ||||||
|     } |  | ||||||
| ] |  | ||||||
| ``` |  | ||||||
| @@ -1,49 +0,0 @@ | |||||||
| {generate_api_header(/scheduled_messages:post)} |  | ||||||
|  |  | ||||||
| ## Usage examples |  | ||||||
|  |  | ||||||
| {start_tabs} |  | ||||||
|  |  | ||||||
| {generate_code_example(python)|/scheduled_messages:post|example} |  | ||||||
|  |  | ||||||
| {generate_code_example(javascript)|/scheduled_messages:post|example} |  | ||||||
|  |  | ||||||
| {tab|curl} |  | ||||||
|  |  | ||||||
| ``` curl |  | ||||||
| # Create a scheduled stream message |  | ||||||
| curl -X POST {{ api_url }}/v1/scheduled_messages \ |  | ||||||
|     -u BOT_EMAIL_ADDRESS:BOT_API_KEY \ |  | ||||||
|     --data-urlencode type=stream \ |  | ||||||
|     --data-urlencode to=9 \ |  | ||||||
|     --data-urlencode topic=Hello \ |  | ||||||
|     --data-urlencode 'content=Nice to meet everyone!' \ |  | ||||||
|     --data-urlencode scheduled_delivery_timestamp=3165826990 |  | ||||||
|  |  | ||||||
| # Create a scheduled direct message |  | ||||||
| curl -X POST {{ api_url }}/v1/messages \ |  | ||||||
|     -u BOT_EMAIL_ADDRESS:BOT_API_KEY \ |  | ||||||
|     --data-urlencode type=direct \ |  | ||||||
|     --data-urlencode 'to=[9, 10]' \ |  | ||||||
|     --data-urlencode 'content=Can we meet on Monday?' \ |  | ||||||
|     --data-urlencode scheduled_delivery_timestamp=3165826990 |  | ||||||
|  |  | ||||||
| ``` |  | ||||||
|  |  | ||||||
| {end_tabs} |  | ||||||
|  |  | ||||||
| ## Parameters |  | ||||||
|  |  | ||||||
| {generate_api_arguments_table|zulip.yaml|/scheduled_messages:post} |  | ||||||
|  |  | ||||||
| {generate_parameter_description(/scheduled_messages:post)} |  | ||||||
|  |  | ||||||
| ## Response |  | ||||||
|  |  | ||||||
| {generate_return_values_table|zulip.yaml|/scheduled_messages:post} |  | ||||||
|  |  | ||||||
| {generate_response_description(/scheduled_messages:post)} |  | ||||||
|  |  | ||||||
| #### Example response(s) |  | ||||||
|  |  | ||||||
| {generate_code_example|/scheduled_messages:post|fixture} |  | ||||||
| @@ -1,6 +0,0 @@ | |||||||
| # Create a stream |  | ||||||
|  |  | ||||||
| You can create a stream using Zulip's REST API by submitting a |  | ||||||
| [subscribe](/api/subscribe) request with a stream name that |  | ||||||
| doesn't yet exist and passing appropriate parameters to define |  | ||||||
| the initial configuration of the new stream. |  | ||||||
| @@ -1,80 +0,0 @@ | |||||||
| # HTTP headers |  | ||||||
|  |  | ||||||
| This page documents the HTTP headers used by the Zulip API. |  | ||||||
|  |  | ||||||
| Most important is that API clients authenticate to the server using |  | ||||||
| HTTP Basic authentication. If you're using the official [Python or |  | ||||||
| JavaScript bindings](/api/installation-instructions), this is taken |  | ||||||
| care of when you configure said bindings. |  | ||||||
|  |  | ||||||
| Otherwise, see the `curl` example on each endpoint's documentation |  | ||||||
| page, which details the request format. |  | ||||||
|  |  | ||||||
| Documented below are additional HTTP headers and header conventions |  | ||||||
| generally used by Zulip: |  | ||||||
|  |  | ||||||
| ## The `User-Agent` header |  | ||||||
|  |  | ||||||
| Clients are not required to pass a `User-Agent` HTTP header, but we |  | ||||||
| highly recommend doing so when writing an integration. It's easy to do |  | ||||||
| and it can help save time when debugging issues related to an API |  | ||||||
| client. |  | ||||||
|  |  | ||||||
| If provided, the Zulip server will parse the `User-Agent` HTTP header |  | ||||||
| in order to identify specific clients and integrations. This |  | ||||||
| information is used by the server for logging, [usage |  | ||||||
| statistics](/help/analytics), and on rare occasions, for |  | ||||||
| backwards-compatibility logic to preserve support for older versions |  | ||||||
| of official clients. |  | ||||||
|  |  | ||||||
| Official Zulip clients and integrations use a `User-Agent` that starts |  | ||||||
| with something like `ZulipMobile/20.0.103 `, encoding the name of the |  | ||||||
| application and it's version. |  | ||||||
|  |  | ||||||
| Zulip's official API bindings have reasonable defaults for |  | ||||||
| `User-Agent`. For example, the official Zulip Python bindings have a |  | ||||||
| default `User-Agent` starting with `ZulipPython/{version}`, where |  | ||||||
| `version` is the version of the library. |  | ||||||
|  |  | ||||||
| You can give your bot/integration its own name by passing the `client` |  | ||||||
| parameter when initializing the Python bindings. For example, the |  | ||||||
| official Zulip Nagios integration is initialized like this: |  | ||||||
|  |  | ||||||
| ``` python |  | ||||||
| client = zulip.Client( |  | ||||||
|     config_file=opts.config, client=f"ZulipNagios/{VERSION}" |  | ||||||
| ) |  | ||||||
| ``` |  | ||||||
|  |  | ||||||
| If you are working on an integration that you plan to share outside |  | ||||||
| your organization, you can get help picking a good name in |  | ||||||
| `#integrations` in the [Zulip development |  | ||||||
| community](https://zulip.com/development-community). |  | ||||||
|  |  | ||||||
| ## Rate-limiting response headers |  | ||||||
|  |  | ||||||
| To help clients avoid exceeding rate limits, Zulip sets the following |  | ||||||
| HTTP headers in all API responses: |  | ||||||
|  |  | ||||||
| * `X-RateLimit-Remaining`: The number of additional requests of this |  | ||||||
|   type that the client can send before exceeding its limit. |  | ||||||
| * `X-RateLimit-Limit`: The limit that would be applicable to a client |  | ||||||
|   that had not made any recent requests of this type. This is useful |  | ||||||
|   for designing a client's burst behavior so as to avoid ever reaching |  | ||||||
|   a rate limit. |  | ||||||
| * `X-RateLimit-Reset`: The time at which the client will no longer |  | ||||||
|   have any rate limits applied to it (and thus could do a burst of |  | ||||||
|   `X-RateLimit-Limit` requests). |  | ||||||
|  |  | ||||||
| [Zulip's rate limiting rules are configurable][rate-limiting-rules], |  | ||||||
| and can vary by server and over time. The default configuration |  | ||||||
| currently limits: |  | ||||||
|  |  | ||||||
| * Every user is limited to 200 total API requests per minute. |  | ||||||
| * Separate, much lower limits for authentication/login attempts. |  | ||||||
|  |  | ||||||
| When the Zulip server has configured multiple rate limits that apply |  | ||||||
| to a given request, the values returned will be for the strictest |  | ||||||
| limit. |  | ||||||
|  |  | ||||||
| [rate-limiting-rules]: https://zulip.readthedocs.io/en/latest/production/security-model.html#rate-limiting |  | ||||||
| @@ -1,114 +0,0 @@ | |||||||
| #### Messages |  | ||||||
|  |  | ||||||
| * [Send a message](/api/send-message) |  | ||||||
| * [Upload a file](/api/upload-file) |  | ||||||
| * [Edit a message](/api/update-message) |  | ||||||
| * [Delete a message](/api/delete-message) |  | ||||||
| * [Get messages](/api/get-messages) |  | ||||||
| * [Construct a narrow](/api/construct-narrow) |  | ||||||
| * [Add an emoji reaction](/api/add-reaction) |  | ||||||
| * [Remove an emoji reaction](/api/remove-reaction) |  | ||||||
| * [Render a message](/api/render-message) |  | ||||||
| * [Fetch a single message](/api/get-message) |  | ||||||
| * [Check if messages match narrow](/api/check-messages-match-narrow) |  | ||||||
| * [Get a message's edit history](/api/get-message-history) |  | ||||||
| * [Update personal message flags](/api/update-message-flags) |  | ||||||
| * [Update personal message flags for narrow](/api/update-message-flags-for-narrow) |  | ||||||
| * [Mark messages as read in bulk](/api/mark-all-as-read) |  | ||||||
| * [Get a message's read receipts](/api/get-read-receipts) |  | ||||||
|  |  | ||||||
| #### Scheduled messages |  | ||||||
|  |  | ||||||
| * [Get scheduled messages](/api/get-scheduled-messages) |  | ||||||
| * [Create a scheduled message](/api/create-scheduled-message) |  | ||||||
| * [Edit a scheduled message](/api/update-scheduled-message) |  | ||||||
| * [Delete a scheduled message](/api/delete-scheduled-message) |  | ||||||
|  |  | ||||||
| #### Drafts |  | ||||||
|  |  | ||||||
| * [Get drafts](/api/get-drafts) |  | ||||||
| * [Create drafts](/api/create-drafts) |  | ||||||
| * [Edit a draft](/api/edit-draft) |  | ||||||
| * [Delete a draft](/api/delete-draft) |  | ||||||
|  |  | ||||||
| #### Streams |  | ||||||
|  |  | ||||||
| * [Get subscribed streams](/api/get-subscriptions) |  | ||||||
| * [Subscribe to a stream](/api/subscribe) |  | ||||||
| * [Unsubscribe from a stream](/api/unsubscribe) |  | ||||||
| * [Get subscription status](/api/get-subscription-status) |  | ||||||
| * [Get all subscribers](/api/get-subscribers) |  | ||||||
| * [Update subscription settings](/api/update-subscription-settings) |  | ||||||
| * [Get all streams](/api/get-streams) |  | ||||||
| * [Get a stream by ID](/api/get-stream-by-id) |  | ||||||
| * [Get stream ID](/api/get-stream-id) |  | ||||||
| * [Create a stream](/api/create-stream) |  | ||||||
| * [Update a stream](/api/update-stream) |  | ||||||
| * [Archive a stream](/api/archive-stream) |  | ||||||
| * [Get topics in a stream](/api/get-stream-topics) |  | ||||||
| * [Topic muting](/api/mute-topic) |  | ||||||
| * [Update personal preferences for a topic](/api/update-user-topic) |  | ||||||
| * [Delete a topic](/api/delete-topic) |  | ||||||
| * [Add a default stream](/api/add-default-stream) |  | ||||||
| * [Remove a default stream](/api/remove-default-stream) |  | ||||||
|  |  | ||||||
| #### Users |  | ||||||
|  |  | ||||||
| * [Get all users](/api/get-users) |  | ||||||
| * [Get own user](/api/get-own-user) |  | ||||||
| * [Get a user](/api/get-user) |  | ||||||
| * [Get a user by email](/api/get-user-by-email) |  | ||||||
| * [Update a user](/api/update-user) |  | ||||||
| * [Update your status](/api/update-status) |  | ||||||
| * [Create a user](/api/create-user) |  | ||||||
| * [Deactivate a user](/api/deactivate-user) |  | ||||||
| * [Reactivate a user](/api/reactivate-user) |  | ||||||
| * [Deactivate own user](/api/deactivate-own-user) |  | ||||||
| * [Set "typing" status](/api/set-typing-status) |  | ||||||
| * [Get user presence](/api/get-user-presence) |  | ||||||
| * [Get presence of all users](/api/get-presence) |  | ||||||
| * [Get attachments](/api/get-attachments) |  | ||||||
| * [Delete an attachment](/api/remove-attachment) |  | ||||||
| * [Update settings](/api/update-settings) |  | ||||||
| * [Get user groups](/api/get-user-groups) |  | ||||||
| * [Create a user group](/api/create-user-group) |  | ||||||
| * [Update a user group](/api/update-user-group) |  | ||||||
| * [Delete a user group](/api/remove-user-group) |  | ||||||
| * [Update user group members](/api/update-user-group-members) |  | ||||||
| * [Update user group subgroups](/api/update-user-group-subgroups) |  | ||||||
| * [Get user group membership status](/api/get-is-user-group-member) |  | ||||||
| * [Get user group members](/api/get-user-group-members) |  | ||||||
| * [Get subgroups of user group](/api/get-user-group-subgroups) |  | ||||||
| * [Mute a user](/api/mute-user) |  | ||||||
| * [Unmute a user](/api/unmute-user) |  | ||||||
| * [Get all alert words](/api/get-alert-words) |  | ||||||
| * [Add alert words](/api/add-alert-words) |  | ||||||
| * [Remove alert words](/api/remove-alert-words) |  | ||||||
|  |  | ||||||
| #### Server & organizations |  | ||||||
|  |  | ||||||
| * [Get server settings](/api/get-server-settings) |  | ||||||
| * [Get linkifiers](/api/get-linkifiers) |  | ||||||
| * [Add a linkifier](/api/add-linkifier) |  | ||||||
| * [Update a linkifier](/api/update-linkifier) |  | ||||||
| * [Remove a linkifier](/api/remove-linkifier) |  | ||||||
| * [Add a code playground](/api/add-code-playground) |  | ||||||
| * [Remove a code playground](/api/remove-code-playground) |  | ||||||
| * [Get all custom emoji](/api/get-custom-emoji) |  | ||||||
| * [Upload custom emoji](/api/upload-custom-emoji) |  | ||||||
| * [Get all custom profile fields](/api/get-custom-profile-fields) |  | ||||||
| * [Reorder custom profile fields](/api/reorder-custom-profile-fields) |  | ||||||
| * [Create a custom profile field](/api/create-custom-profile-field) |  | ||||||
| * [Update realm-level defaults of user settings](/api/update-realm-user-settings-defaults) |  | ||||||
|  |  | ||||||
| #### Real-time events |  | ||||||
|  |  | ||||||
| * [Real time events API](/api/real-time-events) |  | ||||||
| * [Register an event queue](/api/register-queue) |  | ||||||
| * [Get events from an event queue](/api/get-events) |  | ||||||
| * [Delete an event queue](/api/delete-queue) |  | ||||||
|  |  | ||||||
| #### Specialty endpoints |  | ||||||
|  |  | ||||||
| * [Fetch an API key (production)](/api/fetch-api-key) |  | ||||||
| * [Fetch an API key (development only)](/api/dev-fetch-api-key) |  | ||||||
| @@ -1,157 +0,0 @@ | |||||||
| # Incoming webhook integrations |  | ||||||
|  |  | ||||||
| An incoming webhook allows a third-party service to push data to Zulip when |  | ||||||
| something happens.  There's several ways to do an incoming webhook in |  | ||||||
| Zulip: |  | ||||||
|  |  | ||||||
| * Use our [REST API](/api/rest) endpoint for [sending |  | ||||||
|   messages](/api/send-message).  This works great for internal tools |  | ||||||
|   or cases where the third-party tool wants to control the formatting |  | ||||||
|   of the messages in Zulip. |  | ||||||
| * Use one of our supported [integration |  | ||||||
|   frameworks](/integrations/meta-integration), such as the |  | ||||||
|   [Slack-compatible incoming webhook](/integrations/doc/slack_incoming), |  | ||||||
|   [Zapier integration](/integrations/docs/zapier), or |  | ||||||
|   [IFTTT integration](/integrations/doc/ifttt). |  | ||||||
| * Adding an incoming webhook integration (detailed on this page), |  | ||||||
|   where all the logic for formatting the Zulip messages lives in the |  | ||||||
|   Zulip server.  This is how most of [Zulip's official |  | ||||||
|   integrations](/integrations/) work, because they enable Zulip to |  | ||||||
|   support third-party services that just have an "outgoing webhook" |  | ||||||
|   feature (without the third party needing to do any work specific to |  | ||||||
|   Zulip). |  | ||||||
|  |  | ||||||
| In an incoming webhook integration, the third-party service's |  | ||||||
| "outgoing webhook" feature sends an `HTTP POST`s to a special URL when |  | ||||||
| it has something for you, and then the Zulip "incoming webhook" |  | ||||||
| integration handles that incoming data to format and send a message in |  | ||||||
| Zulip. |  | ||||||
|  |  | ||||||
| New official Zulip webhook integrations can take just a few hours to |  | ||||||
| write, including tests and documentation, if you use the right |  | ||||||
| process. |  | ||||||
|  |  | ||||||
| ## Quick guide |  | ||||||
|  |  | ||||||
| * Set up the |  | ||||||
|   [Zulip development environment](https://zulip.readthedocs.io/en/latest/development/overview.html). |  | ||||||
|  |  | ||||||
| * Use [Zulip's JSON integration](/integrations/doc/json), |  | ||||||
|   <https://webhook.site/>, or a similar site to capture an example |  | ||||||
|   webhook payload from the third-party service. Create a |  | ||||||
|   `zerver/webhooks/<mywebhook>/fixtures/` directory, and add the |  | ||||||
|   captured payload as a test fixture. |  | ||||||
|  |  | ||||||
| * Create an `Integration` object, and add it to `WEBHOOK_INTEGRATIONS` in |  | ||||||
|   `zerver/lib/integrations.py`. Search for `webhook` in that file to find an |  | ||||||
|   existing one to copy. |  | ||||||
|  |  | ||||||
| * Write a draft webhook handler under `zerver/webhooks/`. There are a lot of |  | ||||||
|   examples in that directory that you can copy. We recommend templating off |  | ||||||
|   a short one, like `zendesk`. |  | ||||||
|  |  | ||||||
| * Add a test for your fixture at `zerver/webhooks/<mywebhook>/tests.py`. |  | ||||||
|   Run the tests for your integration like this: |  | ||||||
|  |  | ||||||
|     ``` |  | ||||||
|     tools/test-backend zerver/webhooks/<mywebhook>/ |  | ||||||
|     ``` |  | ||||||
|  |  | ||||||
|     Iterate on debugging the test and webhooks handler until it all |  | ||||||
|     works. |  | ||||||
|  |  | ||||||
| * Capture payloads for the other common types of `POST`s the third-party |  | ||||||
|   service will make, and add tests for them; usually this part of the |  | ||||||
|   process is pretty fast. |  | ||||||
|  |  | ||||||
| * Document the integration (required for getting it merged into Zulip). You |  | ||||||
|   can template off an existing guide, like |  | ||||||
|   [this one](https://raw.githubusercontent.com/zulip/zulip/main/zerver/webhooks/github/doc.md). |  | ||||||
|   This should not take more than 15 minutes, even if you don't speak English |  | ||||||
|   as a first language (we'll clean up the text before merging). |  | ||||||
|  |  | ||||||
| ## Hello world walkthrough |  | ||||||
|  |  | ||||||
| Check out the [detailed walkthrough](incoming-webhooks-walkthrough) for step-by-step |  | ||||||
| instructions. |  | ||||||
|  |  | ||||||
| ## Checklist |  | ||||||
|  |  | ||||||
| ### Files that need to be created |  | ||||||
|  |  | ||||||
| Select a name for your incoming webhook and use it consistently. The examples |  | ||||||
| below are for a webhook named `MyWebHook`. |  | ||||||
|  |  | ||||||
| * `zerver/webhooks/mywebhook/__init__.py`: Empty file that is an obligatory |  | ||||||
|    part of every python package.  Remember to `git add` it. |  | ||||||
| * `zerver/webhooks/mywebhook/view.py`: The main webhook integration function |  | ||||||
|   as well as any needed helper functions. |  | ||||||
| * `zerver/webhooks/mywebhook/fixtures/messagetype.json`: Sample json payload data |  | ||||||
|   used by tests. Add one fixture file per type of message supported by your |  | ||||||
|   integration. |  | ||||||
| * `zerver/webhooks/mywebhook/tests.py`: Tests for your webhook. |  | ||||||
| * `zerver/webhooks/mywebhook/doc.md`: End-user documentation explaining |  | ||||||
|   how to add the integration. |  | ||||||
| * `static/images/integrations/logos/mywebhook.svg`: A square logo for the |  | ||||||
|   platform/server/product you are integrating. Used on the documentation |  | ||||||
|   pages as well as the sender's avatar for messages sent by the integration. |  | ||||||
| * `static/images/integrations/mywebhook/001.svg`: A screenshot of a message |  | ||||||
|   sent by the integration, used on the documentation page. This can be |  | ||||||
|   generated by running `tools/generate-integration-docs-screenshot --integration mywebhook`. |  | ||||||
| * `static/images/integrations/bot_avatars/mywebhook.png`: A square logo for the |  | ||||||
|   platform/server/product you are integrating which is used to create the avatar |  | ||||||
|   for generating screenshots with. This can be generated automatically from |  | ||||||
|   `static/images/integrations/logos/mywebhook.svg` by running |  | ||||||
|   `tools/setup/generate_integration_bots_avatars.py`. |  | ||||||
|  |  | ||||||
| ### Files that need to be updated |  | ||||||
|  |  | ||||||
| * `zerver/lib/integrations.py`: Add your integration to |  | ||||||
|   `WEBHOOK_INTEGRATIONS`. This will automatically register a |  | ||||||
|   URL for the incoming webhook of the form `api/v1/external/mywebhook` and |  | ||||||
|   associate it with the function called `api_mywebhook_webhook` in |  | ||||||
|   `zerver/webhooks/mywebhook/view.py`. Also add your integration to |  | ||||||
|   `DOC_SCREENSHOT_CONFIG`. This will allow you to automatically generate |  | ||||||
|   a screenshot for the documentation by running |  | ||||||
|   `tools/generate-integration-docs-screenshot --integration mywebhook`. |  | ||||||
|  |  | ||||||
| ## Common Helpers |  | ||||||
|  |  | ||||||
| * If your integration will receive a test webhook payload, you can use |  | ||||||
|   `get_setup_webhook_message` to create our standard message for test payloads. |  | ||||||
|   You can import this from `zerver/lib/webhooks/common.py`, and it will generate |  | ||||||
|   a message like this: "GitHub webhook is successfully configured! 🎉" |  | ||||||
|  |  | ||||||
| ## General advice |  | ||||||
|  |  | ||||||
| * Consider using our Zulip markup to make the output from your |  | ||||||
|   integration especially attractive or useful (e.g.  emoji, Markdown |  | ||||||
|   emphasis or @-mentions). |  | ||||||
|  |  | ||||||
| * Use topics effectively to ensure sequential messages about the same |  | ||||||
|   thing are threaded together; this makes for much better consumption |  | ||||||
|   by users.  E.g. for a bug tracker integration, put the bug number in |  | ||||||
|   the topic for all messages; for an integration like Nagios, put the |  | ||||||
|   service in the topic. |  | ||||||
|  |  | ||||||
| * Integrations that don't match a team's workflow can often be |  | ||||||
|   uselessly spammy.  Give careful thought to providing options for |  | ||||||
|   triggering Zulip messages only for certain message types, certain |  | ||||||
|   projects, or sending different messages to different streams/topics, |  | ||||||
|   to make it easy for teams to configure the integration to support |  | ||||||
|   their workflow. |  | ||||||
|  |  | ||||||
| * Consistently capitalize the name of the integration in the |  | ||||||
|   documentation and the Client name the way the vendor does.  It's OK |  | ||||||
|   to use all-lower-case in the implementation. |  | ||||||
|  |  | ||||||
| * Sometimes it can be helpful to contact the vendor if it appears they |  | ||||||
|   don't have an API or webhook we can use; sometimes the right API |  | ||||||
|   is just not properly documented. |  | ||||||
|  |  | ||||||
| * A helpful tool for testing your integration is |  | ||||||
|   [UltraHook](http://www.ultrahook.com/), which allows you to receive webhook |  | ||||||
|   calls via your local Zulip development environment. This enables you to do end-to-end |  | ||||||
|   testing with live data from the service you're integrating and can help you |  | ||||||
|   spot why something isn't working or if the service is using custom HTTP |  | ||||||
|   headers. |  | ||||||
| @@ -1,26 +0,0 @@ | |||||||
| # The Zulip API |  | ||||||
|  |  | ||||||
| Zulip's APIs allow you to integrate other services with Zulip.  This |  | ||||||
| guide should help you find the API you need: |  | ||||||
|  |  | ||||||
| * First, check if the tool you'd like to integrate with Zulip |  | ||||||
|   [already has a native integration](/integrations/). |  | ||||||
| * Next, check if [Zapier](https://zapier.com/apps) or |  | ||||||
|   [IFTTT](https://ifttt.com/search) has an integration. |  | ||||||
|   [Zulip's Zapier integration](/integrations/doc/zapier) and |  | ||||||
|   [Zulip's IFTTT integration](/integrations/doc/ifttt) often allow |  | ||||||
|   integrating a new service with Zulip without writing any code. |  | ||||||
| * If you'd like to send content into Zulip, you can |  | ||||||
|   [write a native incoming webhook integration](/api/incoming-webhooks-overview) |  | ||||||
|   or use [Zulip's API for sending messages](/api/send-message). |  | ||||||
| * If you're building an interactive bot that reacts to activity inside |  | ||||||
|   Zulip, you'll want to look at Zulip's |  | ||||||
|   [Python framework for interactive bots](/api/running-bots) or |  | ||||||
|   [Zulip's real-time events API](/api/get-events). |  | ||||||
|  |  | ||||||
| And if you still need to build your own integration with Zulip, check out |  | ||||||
| the full [REST API](/api/rest), generally starting with |  | ||||||
| [installing the API client bindings](/api/installation-instructions). |  | ||||||
|  |  | ||||||
| In case you already know how you want to build your integration and you're |  | ||||||
| just looking for an API key, we've got you covered [here](/api/api-keys). |  | ||||||
| @@ -1,69 +0,0 @@ | |||||||
| # Integrations overview |  | ||||||
|  |  | ||||||
| Integrations allow you to send data from other products into or out of |  | ||||||
| Zulip. Zulip natively integrates with dozens of products, and with hundreds |  | ||||||
| more through Zapier and IFTTT. |  | ||||||
|  |  | ||||||
| Zulip also makes it very easy to write your own integration, and (if you'd |  | ||||||
| like) to get it merged into the main Zulip repository. |  | ||||||
|  |  | ||||||
| Integrations are one of the most important parts of a group chat tool like |  | ||||||
| Zulip, and we are committed to making integrating with Zulip as easy as |  | ||||||
| possible. |  | ||||||
|  |  | ||||||
| ## Set up an existing integration |  | ||||||
|  |  | ||||||
| Most existing integrations send content from a third-party product into |  | ||||||
| Zulip. |  | ||||||
|  |  | ||||||
| * Search Zulip's [list of native integrations](/integrations/) for the |  | ||||||
|   third-party product. Each integration has a page describing how to set it |  | ||||||
|   up. |  | ||||||
|  |  | ||||||
| * Check if [Zapier](https://zapier.com/apps) has an integration with the |  | ||||||
|   product. If it does, follow [these instructions](/integrations/doc/zapier) |  | ||||||
|   to set it up. |  | ||||||
|  |  | ||||||
| * Check if [IFTTT](https://ifttt.com/search) has an integration with the |  | ||||||
|   product. If it does, follow [these instructions](/integrations/doc/ifttt) |  | ||||||
|   to set it up. |  | ||||||
|  |  | ||||||
| * Use a third-party webhook integration designed to work with |  | ||||||
|   [Slack's webhook API](https://api.slack.com/messaging/webhooks) |  | ||||||
|   pointed at Zulip's |  | ||||||
|   [Slack-compatible webhook API](/integrations/slack/slack_incoming). |  | ||||||
|  |  | ||||||
| * If the product can send email notifications, you can |  | ||||||
|   [send those emails to a stream](/help/message-a-stream-by-email). |  | ||||||
|  |  | ||||||
| ## Write your own integration |  | ||||||
|  |  | ||||||
| We've put a lot of effort into making this as easy as possible, but |  | ||||||
| all of the options below do require some comfort writing code. If you |  | ||||||
| need an integration and don't have an engineer on staff, [contact |  | ||||||
| us](/help/contact-support) and we'll see what we can do. |  | ||||||
|  |  | ||||||
| ### Sending content into Zulip |  | ||||||
|  |  | ||||||
| * If the third-party service supports outgoing webhooks, you likely want to |  | ||||||
|   build an [incoming webhook integration](/api/incoming-webhooks-overview). |  | ||||||
|  |  | ||||||
| * If it doesn't, you may want to write a |  | ||||||
|   [script or plugin integration](/api/non-webhook-integrations). |  | ||||||
|  |  | ||||||
| * Finally, you can |  | ||||||
|   [send messages using Zulip's API](/api/send-message). |  | ||||||
|  |  | ||||||
| ### Sending and receiving content |  | ||||||
|  |  | ||||||
| * To react to activity inside Zulip, look at Zulip's |  | ||||||
|   [Python framework for interactive bots](/api/running-bots) or |  | ||||||
|   [Zulip's real-time events API](/api/get-events). |  | ||||||
|  |  | ||||||
| * If what you want isn't covered by the above, check out the full |  | ||||||
|   [REST API](/api/rest). The web, mobile, desktop, and terminal apps are |  | ||||||
|   built on top of this API, so it can do anything a human user can do. Most |  | ||||||
|   but not all of the endpoints are documented on this site; if you need |  | ||||||
|   something that isn't there check out Zulip's |  | ||||||
|   [REST endpoints](https://github.com/zulip/zulip/blob/main/zproject/urls.py) |  | ||||||
|   or [contact us](/help/contact-support) and we'll help you out. |  | ||||||
| @@ -1,89 +0,0 @@ | |||||||
| {generate_api_header(/mark_all_as_read:post)} |  | ||||||
|  |  | ||||||
| ## Usage examples |  | ||||||
|  |  | ||||||
| {start_tabs} |  | ||||||
|  |  | ||||||
| {generate_code_example(python)|/mark_all_as_read:post|example} |  | ||||||
|  |  | ||||||
| {generate_code_example(javascript)|/mark_all_as_read:post|example} |  | ||||||
|  |  | ||||||
| {tab|curl} |  | ||||||
|  |  | ||||||
| {generate_code_example(curl)|/mark_all_as_read:post|example} |  | ||||||
|  |  | ||||||
| {end_tabs} |  | ||||||
|  |  | ||||||
| ## Parameters |  | ||||||
|  |  | ||||||
| {generate_api_arguments_table|zulip.yaml|/mark_all_as_read:post} |  | ||||||
|  |  | ||||||
| {generate_parameter_description(/mark_all_as_read:post)} |  | ||||||
|  |  | ||||||
| ## Response |  | ||||||
|  |  | ||||||
| {generate_response_description(/mark_all_as_read:post)} |  | ||||||
|  |  | ||||||
| #### Example response(s) |  | ||||||
|  |  | ||||||
| {generate_code_example|/mark_all_as_read:post|fixture} |  | ||||||
|  |  | ||||||
| {generate_api_header(/mark_stream_as_read:post)} |  | ||||||
|  |  | ||||||
| ## Usage examples |  | ||||||
|  |  | ||||||
| {start_tabs} |  | ||||||
|  |  | ||||||
| {generate_code_example(python)|/mark_stream_as_read:post|example} |  | ||||||
|  |  | ||||||
| {generate_code_example(javascript)|/mark_all_as_read:post|example} |  | ||||||
|  |  | ||||||
| {tab|curl} |  | ||||||
|  |  | ||||||
| {generate_code_example(curl)|/mark_stream_as_read:post|example} |  | ||||||
|  |  | ||||||
| {end_tabs} |  | ||||||
|  |  | ||||||
| ## Parameters |  | ||||||
|  |  | ||||||
| {generate_api_arguments_table|zulip.yaml|/mark_stream_as_read:post} |  | ||||||
|  |  | ||||||
| {generate_parameter_description(/mark_all_as_read:post)} |  | ||||||
|  |  | ||||||
| ## Response |  | ||||||
|  |  | ||||||
| {generate_response_description(/mark_all_as_read:post)} |  | ||||||
|  |  | ||||||
| #### Example response(s) |  | ||||||
|  |  | ||||||
| {generate_code_example|/mark_stream_as_read:post|fixture} |  | ||||||
|  |  | ||||||
| {generate_api_header(/mark_topic_as_read:post)} |  | ||||||
|  |  | ||||||
| ## Usage examples |  | ||||||
|  |  | ||||||
| {start_tabs} |  | ||||||
|  |  | ||||||
| {generate_code_example(python)|/mark_topic_as_read:post|example} |  | ||||||
|  |  | ||||||
| {generate_code_example(javascript)|/mark_all_as_read:post|example} |  | ||||||
|  |  | ||||||
| {tab|curl} |  | ||||||
|  |  | ||||||
| {generate_code_example(curl)|/mark_topic_as_read:post|example} |  | ||||||
|  |  | ||||||
| {end_tabs} |  | ||||||
|  |  | ||||||
| ## Parameters |  | ||||||
|  |  | ||||||
| {generate_api_arguments_table|zulip.yaml|/mark_topic_as_read:post} |  | ||||||
|  |  | ||||||
| {generate_parameter_description(/mark_all_as_read:post)} |  | ||||||
|  |  | ||||||
| ## Response |  | ||||||
|  |  | ||||||
| {generate_response_description(/mark_all_as_read:post)} |  | ||||||
|  |  | ||||||
| #### Example response(s) |  | ||||||
|  |  | ||||||
| {generate_code_example|/mark_topic_as_read:post|fixture} |  | ||||||
| @@ -1,184 +0,0 @@ | |||||||
| # Outgoing webhooks |  | ||||||
|  |  | ||||||
| Outgoing webhooks allow you to build or set up Zulip integrations |  | ||||||
| which are notified when certain types of messages are sent in |  | ||||||
| Zulip. When one of those events is triggered, we'll send a HTTP POST |  | ||||||
| payload to the webhook's configured URL.  Webhooks can be used to |  | ||||||
| power a wide range of Zulip integrations.  For example, the |  | ||||||
| [Zulip Botserver][zulip-botserver] is built on top of this API. |  | ||||||
|  |  | ||||||
| Zulip supports outgoing webhooks both in a clean native Zulip format, |  | ||||||
| as well as a format that's compatible with |  | ||||||
| [Slack's outgoing webhook API][slack-outgoing-webhook], which can help |  | ||||||
| with porting an existing Slack integration to work with Zulip. |  | ||||||
|  |  | ||||||
| [zulip-botserver]: /api/deploying-bots#zulip-botserver |  | ||||||
| [slack-outgoing-webhook]: https://api.slack.com/custom-integrations/outgoing-webhooks |  | ||||||
|  |  | ||||||
| To register an outgoing webhook: |  | ||||||
|  |  | ||||||
| * Log in to the Zulip server. |  | ||||||
| * Navigate to *Personal settings (<i class="fa fa-cog"></i>)* -> *Bots* -> |  | ||||||
|   *Add a new bot*.  Select *Outgoing webhook* for bot type, the URL |  | ||||||
|   you'd like Zulip to post to as the **Endpoint URL**, the format you |  | ||||||
|   want, and click on *Create bot*. to submit the form/ |  | ||||||
| * Your new bot user will appear in the *Active bots* panel, which you |  | ||||||
|   can use to edit the bot's settings. |  | ||||||
|  |  | ||||||
| ## Triggering |  | ||||||
|  |  | ||||||
| There are currently two ways to trigger an outgoing webhook: |  | ||||||
|  |  | ||||||
| 1.  **@-mention** the bot user in a stream.  If the bot replies, its |  | ||||||
|     reply will be sent to that stream and topic. |  | ||||||
| 2.  **Send a private message** with the bot as one of the recipients. |  | ||||||
|     If the bot replies, its reply will be sent to that thread. |  | ||||||
|  |  | ||||||
| ## Timeouts |  | ||||||
|  |  | ||||||
| The remote server must respond to a `POST` request in a timely manner. |  | ||||||
| The default timeout for outgoing webhooks is 10 seconds, though this |  | ||||||
| can be configured by the administrator of the Zulip server by setting |  | ||||||
| `OUTGOING_WEBHOOKS_TIMEOUT_SECONDS` in the [server's |  | ||||||
| settings][settings]. |  | ||||||
|  |  | ||||||
| [settings]: https://zulip.readthedocs.io/en/latest/subsystems/settings.html#server-settings |  | ||||||
|  |  | ||||||
| ## Outgoing webhook format |  | ||||||
|  |  | ||||||
| {generate_code_example|/zulip-outgoing-webhook:post|fixture} |  | ||||||
|  |  | ||||||
| ### Fields documentation |  | ||||||
|  |  | ||||||
| {generate_return_values_table|zulip.yaml|/zulip-outgoing-webhook:post} |  | ||||||
|  |  | ||||||
| ## Replying with a message |  | ||||||
|  |  | ||||||
| Many bots implemented using this outgoing webhook API will want to |  | ||||||
| send a reply message into Zulip.  Zulip's outgoing webhook API |  | ||||||
| provides a convenient way to do that by simply returning an |  | ||||||
| appropriate HTTP response to the Zulip server. |  | ||||||
|  |  | ||||||
| A correctly implemented bot will return a JSON object containing one |  | ||||||
| of two possible formats, described below. |  | ||||||
|  |  | ||||||
| ### Example response payloads |  | ||||||
|  |  | ||||||
| If the bot code wants to opt out of responding, it can explicitly |  | ||||||
| encode a JSON dictionary that contains `response_not_required` set |  | ||||||
| to `True`, so that no response message is sent to the user.  (This |  | ||||||
| is helpful to distinguish deliberate non-responses from bugs.) |  | ||||||
|  |  | ||||||
| Here's an example of the JSON your server should respond with if |  | ||||||
| you would not like to send a response message: |  | ||||||
|  |  | ||||||
| ```json |  | ||||||
| { |  | ||||||
|     "response_not_required": true |  | ||||||
| } |  | ||||||
| ``` |  | ||||||
|  |  | ||||||
| Here's an example of the JSON your server should respond with if |  | ||||||
| you would like to send a response message: |  | ||||||
|  |  | ||||||
| ```json |  | ||||||
| { |  | ||||||
|     "content": "Hey, we just received **something** from Zulip!" |  | ||||||
| } |  | ||||||
| ``` |  | ||||||
|  |  | ||||||
| The `content` field should contain Zulip-format Markdown. |  | ||||||
|  |  | ||||||
| Note that an outgoing webhook bot can use the [Zulip REST |  | ||||||
| API](/api/rest) with its API key in case your bot needs to do |  | ||||||
| something else, like add an emoji reaction or upload a file. |  | ||||||
|  |  | ||||||
| ## Slack-format webhook format |  | ||||||
|  |  | ||||||
| This interface translates Zulip's outgoing webhook's request into the |  | ||||||
| format that Slack's outgoing webhook interface sends.  As a result, |  | ||||||
| one should be able to use this to interact with third-party |  | ||||||
| integrations designed to work with Slack's outgoing webhook interface. |  | ||||||
| Here's how we fill in the fields that a Slack-format webhook expects: |  | ||||||
|  |  | ||||||
| <table class="table"> |  | ||||||
|     <thead> |  | ||||||
|         <tr> |  | ||||||
|             <th>Name</th> |  | ||||||
|             <th>Description</th> |  | ||||||
|         </tr> |  | ||||||
|     </thead> |  | ||||||
|     <tbody> |  | ||||||
|         <tr> |  | ||||||
|             <td><code>token</code></td> |  | ||||||
|             <td>A string of alphanumeric characters you can use to |  | ||||||
|             authenticate the webhook request (each bot user uses a fixed token)</td> |  | ||||||
|         </tr> |  | ||||||
|         <tr> |  | ||||||
|             <td><code>team_id</code></td> |  | ||||||
|             <td>ID of the Zulip organization prefixed by "T".</td> |  | ||||||
|         </tr> |  | ||||||
|         <tr> |  | ||||||
|             <td><code>team_domain</code></td> |  | ||||||
|             <td>Hostname of the Zulip organization</td> |  | ||||||
|         </tr> |  | ||||||
|         <tr> |  | ||||||
|             <td><code>channel_id</code></td> |  | ||||||
|             <td>Stream ID prefixed by "C"</td> |  | ||||||
|         </tr> |  | ||||||
|         <tr> |  | ||||||
|             <td><code>channel_name</code></td> |  | ||||||
|             <td>Stream name</td> |  | ||||||
|         </tr> |  | ||||||
|         <tr> |  | ||||||
|             <td><code>thread_ts</code></td> |  | ||||||
|             <td>Timestamp for when message was sent</td> |  | ||||||
|         </tr> |  | ||||||
|         <tr> |  | ||||||
|             <td><code>timestamp</code></td> |  | ||||||
|             <td>Timestamp for when message was sent</td> |  | ||||||
|         </tr> |  | ||||||
|         <tr> |  | ||||||
|             <td><code>user_id</code></td> |  | ||||||
|             <td>ID of the user who sent the message prefixed by "U"</td> |  | ||||||
|         </tr> |  | ||||||
|         <tr> |  | ||||||
|             <td><code>user_name</code></td> |  | ||||||
|             <td>Full name of sender</td> |  | ||||||
|         </tr> |  | ||||||
|         <tr> |  | ||||||
|             <td><code>text</code></td> |  | ||||||
|             <td>The content of the message (in Markdown)</td> |  | ||||||
|         </tr> |  | ||||||
|         <tr> |  | ||||||
|             <td><code>trigger_word</code></td> |  | ||||||
|             <td>Trigger method</td> |  | ||||||
|         </tr> |  | ||||||
|         <tr> |  | ||||||
|             <td><code>service_id</code></td> |  | ||||||
|             <td>ID of the bot user</td> |  | ||||||
|         </tr> |  | ||||||
|     </tbody> |  | ||||||
| </table> |  | ||||||
|  |  | ||||||
| The above data is posted as list of tuples (not JSON), here's an example: |  | ||||||
|  |  | ||||||
| ``` |  | ||||||
| [('token', 'v9fpCdldZIej2bco3uoUvGp06PowKFOf'), |  | ||||||
|  ('team_id', 'T1512'), |  | ||||||
|  ('team_domain', 'zulip.example.com'), |  | ||||||
|  ('channel_id', 'C123'), |  | ||||||
|  ('channel_name', 'integrations'), |  | ||||||
|  ('thread_ts', 1532078950), |  | ||||||
|  ('timestamp', 1532078950), |  | ||||||
|  ('user_id', 'U21'), |  | ||||||
|  ('user_name', 'Full Name'), |  | ||||||
|  ('text', '@**test**'), |  | ||||||
|  ('trigger_word', 'mention'), |  | ||||||
|  ('service_id', 27)] |  | ||||||
| ``` |  | ||||||
|  |  | ||||||
| * For successful request, if data is returned, it returns that data, |  | ||||||
|   else it returns a blank response. |  | ||||||
| * For failed request, it returns the reason of failure, as returned by |  | ||||||
|   the server, or the exception message. |  | ||||||
| @@ -1,34 +0,0 @@ | |||||||
| # Error handling |  | ||||||
|  |  | ||||||
| Zulip's API will always return a JSON format response. |  | ||||||
| The HTTP status code indicates whether the request was successful |  | ||||||
| (200 = success, 40x = user error, 50x = server error).  Every response |  | ||||||
| will contain at least two keys: `msg` (a human-readable error message) |  | ||||||
| and `result`, which will be either `error` or `success` (this is |  | ||||||
| redundant with the HTTP status code, but is convenient when printing |  | ||||||
| responses while debugging). |  | ||||||
|  |  | ||||||
| For some common errors, Zulip provides a `code` attribute.  Where |  | ||||||
| present, clients should check `code`, rather than `msg`, when looking |  | ||||||
| for specific error conditions, since the `msg` strings are |  | ||||||
| internationalized (e.g. the server will send the error message |  | ||||||
| translated into French if the user has a French locale). |  | ||||||
|  |  | ||||||
| Each endpoint documents its own unique errors; documented below are |  | ||||||
| errors common to many endpoints: |  | ||||||
|  |  | ||||||
| {generate_code_example|/rest-error-handling:post|fixture} |  | ||||||
|  |  | ||||||
| ## Ignored Parameters |  | ||||||
|  |  | ||||||
| In JSON success responses, all Zulip REST API endpoints may return |  | ||||||
| an array of parameters sent in the request that are not supported |  | ||||||
| by that specific endpoint. |  | ||||||
|  |  | ||||||
| While this can be expected, e.g. when sending both current and legacy |  | ||||||
| names for a parameter to a Zulip server of unknown version, this often |  | ||||||
| indicates either a bug in the client implementation or an attempt to |  | ||||||
| configure a new feature while connected to an older Zulip server that |  | ||||||
| does not support said feature. |  | ||||||
|  |  | ||||||
| {generate_code_example|/settings:patch|fixture} |  | ||||||
| @@ -1,120 +0,0 @@ | |||||||
| # Roles and permissions |  | ||||||
|  |  | ||||||
| Zulip offers several levels of permissions based on a |  | ||||||
| [user's role](/help/roles-and-permissions) in a Zulip organization. |  | ||||||
|  |  | ||||||
| Here are some important details to note when working with these |  | ||||||
| roles and permissions in Zulip's API: |  | ||||||
|  |  | ||||||
| ## A user's role |  | ||||||
|  |  | ||||||
| A user's account data include a `role` property, which contains the |  | ||||||
| user's role in the Zulip organization. These roles are encoded as: |  | ||||||
|  |  | ||||||
| * Organization owner: 100 |  | ||||||
|  |  | ||||||
| * Organization administrator: 200 |  | ||||||
|  |  | ||||||
| * Organization moderator: 300 |  | ||||||
|  |  | ||||||
| * Member: 400 |  | ||||||
|  |  | ||||||
| * Guest: 600 |  | ||||||
|  |  | ||||||
| User account data also include these boolean properties that duplicate |  | ||||||
| the related roles above: |  | ||||||
|  |  | ||||||
| * `is_owner` specifying whether the user is an organization owner. |  | ||||||
|  |  | ||||||
| * `is_admin` specifying whether the user is an organization administrator. |  | ||||||
|  |  | ||||||
| * `is_guest` specifying whether the user is a guest user. |  | ||||||
|  |  | ||||||
| These are intended as conveniences for simple clients, and clients |  | ||||||
| should prefer using the `role` field, since only that one is updated |  | ||||||
| by the [events API](/api/get-events). |  | ||||||
|  |  | ||||||
| Note that [`POST /register`](/api/register-queue) also returns an |  | ||||||
| `is_moderator` boolean property specifying whether the current user is |  | ||||||
| an organization moderator. |  | ||||||
|  |  | ||||||
| Additionally, user account data include an `is_billing_admin` property |  | ||||||
| specifying whether the user is a billing administrator for the Zulip |  | ||||||
| organization, which is not related to one of the roles listed above, |  | ||||||
| but rather allows for specific permissions related to billing |  | ||||||
| administration in [paid Zulip Cloud plans](https://zulip.com/plans/). |  | ||||||
|  |  | ||||||
| ### User account data in the API |  | ||||||
|  |  | ||||||
| Endpoints that return the user account data / properties mentioned |  | ||||||
| above are: |  | ||||||
|  |  | ||||||
| * [`GET /users`](/api/get-users) |  | ||||||
|  |  | ||||||
| * [`GET /users/{user_id}`](/api/get-user) |  | ||||||
|  |  | ||||||
| * [`GET /users/{email}`](/api/get-user-by-email) |  | ||||||
|  |  | ||||||
| * [`GET /users/me`](/api/get-own-user) |  | ||||||
|  |  | ||||||
| * [`GET /events`](/api/get-events) |  | ||||||
|  |  | ||||||
| * [`POST /register`](/api/register-queue) |  | ||||||
|  |  | ||||||
| Note that the [`POST /register` endpoint](/api/register-queue) returns |  | ||||||
| the above boolean properties to describe the role of the current user, |  | ||||||
| when `realm_user` is present in `fetch_event_types`. |  | ||||||
|  |  | ||||||
| Additionally, the specific events returned by the |  | ||||||
| [`GET /events` endpoint](/api/get-events) containing data related |  | ||||||
| to user accounts and roles are the [`realm_user` add |  | ||||||
| event](/api/get-events#realm_user-add), and the |  | ||||||
| [`realm_user` update event](/api/get-events#realm_user-update). |  | ||||||
|  |  | ||||||
| ## Permission levels |  | ||||||
|  |  | ||||||
| Many areas of Zulip are customizable by the roles |  | ||||||
| above, such as (but not limited to) [restricting message editing and |  | ||||||
| deletion](/help/restrict-message-editing-and-deletion) and |  | ||||||
| [streams permissions](/help/stream-permissions). The potential |  | ||||||
| permission levels are: |  | ||||||
|  |  | ||||||
| * Everyone / Any user including Guests (least restrictive) |  | ||||||
|  |  | ||||||
| * Members |  | ||||||
|  |  | ||||||
| * Full members |  | ||||||
|  |  | ||||||
| * Moderators |  | ||||||
|  |  | ||||||
| * Administrators |  | ||||||
|  |  | ||||||
| * Owners |  | ||||||
|  |  | ||||||
| * Nobody (most restrictive) |  | ||||||
|  |  | ||||||
| These permission levels and policies in the API are designed to be |  | ||||||
| cutoffs in that users with the specified role and above have the |  | ||||||
| specified ability or access. For example, a permission level documented |  | ||||||
| as 'moderators only' includes organization moderators, administrators, |  | ||||||
| and owners. |  | ||||||
|  |  | ||||||
| Note that specific settings and policies in the Zulip API that use these |  | ||||||
| permission levels will likely support a subset of those listed above. |  | ||||||
|  |  | ||||||
| ## Determining if a user is a full member |  | ||||||
|  |  | ||||||
| When a Zulip organization has set up a [waiting period before new members |  | ||||||
| turn into full members](/help/restrict-permissions-of-new-members), |  | ||||||
| clients will need to determine if a user's account has aged past the |  | ||||||
| organization's waiting period threshold. |  | ||||||
|  |  | ||||||
| The `realm_waiting_period_threshold`, which is the number of days until |  | ||||||
| a user's account is treated as a full member, is returned by the |  | ||||||
| [`POST /register` endpoint](/api/register-queue) when `realm` is present |  | ||||||
| in `fetch_event_types`. |  | ||||||
|  |  | ||||||
| Clients can compare the `realm_waiting_period_threshold` to a user |  | ||||||
| accounts's `date_joined` property, which is the time the user account |  | ||||||
| was created, to determine if a user has the permissions of a full |  | ||||||
| member or a new member. |  | ||||||
| @@ -1,77 +0,0 @@ | |||||||
| {generate_api_header(/messages:post)} |  | ||||||
|  |  | ||||||
| ## Usage examples |  | ||||||
|  |  | ||||||
| {start_tabs} |  | ||||||
|  |  | ||||||
| {generate_code_example(python)|/messages:post|example} |  | ||||||
|  |  | ||||||
| {generate_code_example(javascript)|/messages:post|example} |  | ||||||
|  |  | ||||||
| {tab|curl} |  | ||||||
|  |  | ||||||
| ``` curl |  | ||||||
| # For stream messages |  | ||||||
| curl -X POST {{ api_url }}/v1/messages \ |  | ||||||
|     -u BOT_EMAIL_ADDRESS:BOT_API_KEY \ |  | ||||||
|     --data-urlencode type=stream \ |  | ||||||
|     --data-urlencode 'to="Denmark"' \ |  | ||||||
|     --data-urlencode topic=Castle \ |  | ||||||
|     --data-urlencode 'content=I come not, friends, to steal away your hearts.' |  | ||||||
|  |  | ||||||
| # For direct messages |  | ||||||
| curl -X POST {{ api_url }}/v1/messages \ |  | ||||||
|     -u BOT_EMAIL_ADDRESS:BOT_API_KEY \ |  | ||||||
|     --data-urlencode type=direct \ |  | ||||||
|     --data-urlencode 'to=[9]' \ |  | ||||||
|     --data-urlencode 'content=With mirth and laughter let old wrinkles come.' |  | ||||||
| ``` |  | ||||||
|  |  | ||||||
| {tab|zulip-send} |  | ||||||
|  |  | ||||||
| You can use `zulip-send` |  | ||||||
| (available after you `pip install zulip`) to easily send Zulips from |  | ||||||
| the command-line, providing the message content via STDIN. |  | ||||||
|  |  | ||||||
| ```bash |  | ||||||
| # For stream messages |  | ||||||
| zulip-send --stream Denmark --subject Castle \ |  | ||||||
|     --user othello-bot@example.com --api-key a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5 |  | ||||||
|  |  | ||||||
| # For direct messages |  | ||||||
| zulip-send hamlet@example.com \ |  | ||||||
|     --user othello-bot@example.com --api-key a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5 |  | ||||||
| ``` |  | ||||||
|  |  | ||||||
| #### Passing in the message on the command-line |  | ||||||
|  |  | ||||||
| If you'd like, you can also provide the message on the command-line with the |  | ||||||
| `-m` or `--message` flag, as follows: |  | ||||||
|  |  | ||||||
|  |  | ||||||
| ```bash |  | ||||||
| zulip-send --stream Denmark --subject Castle \ |  | ||||||
|     --message 'I come not, friends, to steal away your hearts.' \ |  | ||||||
|     --user othello-bot@example.com --api-key a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5 |  | ||||||
| ``` |  | ||||||
|  |  | ||||||
| You can omit the `user` and `api-key` parameters if you have a `~/.zuliprc` |  | ||||||
| file. |  | ||||||
|  |  | ||||||
| {end_tabs} |  | ||||||
|  |  | ||||||
| ## Parameters |  | ||||||
|  |  | ||||||
| {generate_api_arguments_table|zulip.yaml|/messages:post} |  | ||||||
|  |  | ||||||
| {generate_parameter_description(/messages:post)} |  | ||||||
|  |  | ||||||
| ## Response |  | ||||||
|  |  | ||||||
| {generate_return_values_table|zulip.yaml|/messages:post} |  | ||||||
|  |  | ||||||
| {generate_response_description(/messages:post)} |  | ||||||
|  |  | ||||||
| #### Example response(s) |  | ||||||
|  |  | ||||||
| {generate_code_example|/messages:post|fixture} |  | ||||||
| @@ -1,27 +0,0 @@ | |||||||
| ## Integrations |  | ||||||
|  |  | ||||||
| * [Overview](/api/integrations-overview) |  | ||||||
| * [Incoming webhook integrations](/api/incoming-webhooks-overview) |  | ||||||
| * [Hello world walkthrough](/api/incoming-webhooks-walkthrough) |  | ||||||
| * [Non-webhook integrations](/api/non-webhook-integrations) |  | ||||||
|  |  | ||||||
| ## Interactive bots (beta) |  | ||||||
|  |  | ||||||
| * [Running bots](/api/running-bots) |  | ||||||
| * [Deploying bots](/api/deploying-bots) |  | ||||||
| * [Writing bots](/api/writing-bots) |  | ||||||
| * [Outgoing webhooks](/api/outgoing-webhooks) |  | ||||||
|  |  | ||||||
| ## REST API |  | ||||||
|  |  | ||||||
| * [Overview](/api/rest) |  | ||||||
| * [Installation instructions](/api/installation-instructions) |  | ||||||
| * [API keys](/api/api-keys) |  | ||||||
| * [Configuring the Python bindings](/api/configuring-python-bindings) |  | ||||||
| * [HTTP headers](/api/http-headers) |  | ||||||
| * [Error handling](/api/rest-error-handling) |  | ||||||
| * [Roles and permissions](/api/roles-and-permissions) |  | ||||||
| * [Client libraries](/api/client-libraries) |  | ||||||
| * [API changelog](/api/changelog) |  | ||||||
|  |  | ||||||
| {!rest-endpoints.md!} |  | ||||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user