mirror of
https://github.com/zulip/zulip.git
synced 2025-10-23 16:14:02 +00:00
Compare commits
8 Commits
11.0-beta1
...
1.7.1
Author | SHA1 | Date | |
---|---|---|---|
|
2e4ae9c5dc | ||
|
139fb8c2ee | ||
|
93ffaa73bd | ||
|
960d736e55 | ||
|
28a3dcf787 | ||
|
4eb958b6d8 | ||
|
d35d5953c7 | ||
|
c256c5e91c |
@@ -5,8 +5,6 @@ coverage:
|
||||
project:
|
||||
default:
|
||||
target: auto
|
||||
# Codecov has the tendency to report a lot of false negatives,
|
||||
# so we basically suppress comments completely.
|
||||
threshold: 50%
|
||||
threshold: 0.03
|
||||
base: auto
|
||||
patch: off
|
||||
|
@@ -1,32 +0,0 @@
|
||||
te
|
||||
ans
|
||||
pullrequest
|
||||
ist
|
||||
cros
|
||||
wit
|
||||
nwe
|
||||
circularly
|
||||
ned
|
||||
ba
|
||||
ressemble
|
||||
ser
|
||||
sur
|
||||
hel
|
||||
fpr
|
||||
alls
|
||||
nd
|
||||
ot
|
||||
womens
|
||||
vise
|
||||
falsy
|
||||
ro
|
||||
derails
|
||||
forin
|
||||
uper
|
||||
slac
|
||||
couldn
|
||||
ges
|
||||
assertIn
|
||||
thirdparty
|
||||
asend
|
||||
COO
|
@@ -3,23 +3,17 @@ root = true
|
||||
[*]
|
||||
end_of_line = lf
|
||||
charset = utf-8
|
||||
indent_size = 4
|
||||
indent_style = space
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
insert_final_newline = true
|
||||
|
||||
[[shell]]
|
||||
binary_next_line = true
|
||||
switch_case_indent = true
|
||||
[*.{sh,py,js,json,yml,xml,css,md,markdown,handlebars,html}]
|
||||
indent_style = space
|
||||
indent_size = 4
|
||||
|
||||
[{*.{cjs,cts,js,json,mjs,mts,ts},check-openapi}]
|
||||
max_line_length = 100
|
||||
|
||||
[*.{py,pyi}]
|
||||
max_line_length = 110
|
||||
|
||||
[*.{md,svg,rb,pp,yaml,yml}]
|
||||
[*.{svg,rb,pp,pl}]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
||||
[package.json]
|
||||
indent_size = 2
|
||||
[*.{cfg}]
|
||||
indent_style = space
|
||||
indent_size = 8
|
||||
|
2
.eslintignore
Normal file
2
.eslintignore
Normal file
@@ -0,0 +1,2 @@
|
||||
static/js/blueslip.js
|
||||
static/webpack-bundles
|
327
.eslintrc.json
Normal file
327
.eslintrc.json
Normal file
@@ -0,0 +1,327 @@
|
||||
{
|
||||
"env": {
|
||||
"node": true,
|
||||
"es6": true
|
||||
},
|
||||
"parserOptions": {
|
||||
"sourceType": "module"
|
||||
},
|
||||
"globals": {
|
||||
"$": false,
|
||||
"_": false,
|
||||
"jQuery": false,
|
||||
"Spinner": false,
|
||||
"Handlebars": false,
|
||||
"XDate": false,
|
||||
"zxcvbn": false,
|
||||
"LazyLoad": false,
|
||||
"Dropbox": false,
|
||||
"SockJS": false,
|
||||
"marked": false,
|
||||
"moment": false,
|
||||
"i18n": false,
|
||||
"DynamicText": false,
|
||||
"LightboxCanvas": false,
|
||||
"bridge": false,
|
||||
"page_params": false,
|
||||
"attachments_ui": false,
|
||||
"csrf_token": false,
|
||||
"typeahead_helper": false,
|
||||
"pygments_data": false,
|
||||
"popovers": false,
|
||||
"server_events": false,
|
||||
"server_events_dispatch": false,
|
||||
"ui": false,
|
||||
"ui_report": false,
|
||||
"ui_util": false,
|
||||
"lightbox": false,
|
||||
"stream_color": false,
|
||||
"people": false,
|
||||
"navigate": false,
|
||||
"settings_account": false,
|
||||
"settings_display": false,
|
||||
"settings_notifications": false,
|
||||
"settings_muting": false,
|
||||
"settings_lab": false,
|
||||
"settings_bots": false,
|
||||
"settings_sections": false,
|
||||
"settings_emoji": false,
|
||||
"settings_org": false,
|
||||
"settings_users": false,
|
||||
"settings_streams": false,
|
||||
"settings_filters": false,
|
||||
"settings": false,
|
||||
"resize": false,
|
||||
"loading": false,
|
||||
"typing": false,
|
||||
"typing_events": false,
|
||||
"typing_data": false,
|
||||
"typing_status": false,
|
||||
"sent_messages": false,
|
||||
"compose": false,
|
||||
"compose_actions": false,
|
||||
"compose_state": false,
|
||||
"compose_fade": false,
|
||||
"overlays": false,
|
||||
"stream_create": false,
|
||||
"stream_edit": false,
|
||||
"subs": false,
|
||||
"stream_muting": false,
|
||||
"stream_events": false,
|
||||
"timerender": false,
|
||||
"message_live_update": false,
|
||||
"message_edit": false,
|
||||
"reload": false,
|
||||
"composebox_typeahead": false,
|
||||
"search": false,
|
||||
"topic_list": false,
|
||||
"topic_generator": false,
|
||||
"gear_menu": false,
|
||||
"hashchange": false,
|
||||
"hash_util": false,
|
||||
"message_list": false,
|
||||
"Filter": false,
|
||||
"pointer": false,
|
||||
"util": false,
|
||||
"MessageListView": false,
|
||||
"blueslip": false,
|
||||
"rows": false,
|
||||
"WinChan": false,
|
||||
"muting_ui": false,
|
||||
"Socket": false,
|
||||
"channel": false,
|
||||
"components": false,
|
||||
"message_viewport": false,
|
||||
"upload_widget": false,
|
||||
"avatar": false,
|
||||
"realm_icon": false,
|
||||
"feature_flags": false,
|
||||
"search_suggestion": false,
|
||||
"notifications": false,
|
||||
"message_flags": false,
|
||||
"bot_data": false,
|
||||
"top_left_corner": false,
|
||||
"stream_sort": false,
|
||||
"stream_list": false,
|
||||
"stream_popover": false,
|
||||
"narrow_state": false,
|
||||
"narrow": false,
|
||||
"admin_sections": false,
|
||||
"admin": false,
|
||||
"stream_data": false,
|
||||
"topic_data": false,
|
||||
"list_util": false,
|
||||
"muting": false,
|
||||
"Dict": false,
|
||||
"unread": false,
|
||||
"alert_words_ui": false,
|
||||
"message_store": false,
|
||||
"message_util": false,
|
||||
"message_events": false,
|
||||
"message_fetch": false,
|
||||
"favicon": false,
|
||||
"condense": false,
|
||||
"list_render": false,
|
||||
"floating_recipient_bar": false,
|
||||
"tab_bar": false,
|
||||
"emoji": false,
|
||||
"presence": false,
|
||||
"activity": false,
|
||||
"invite": false,
|
||||
"colorspace": false,
|
||||
"reactions": false,
|
||||
"tutorial": false,
|
||||
"templates": false,
|
||||
"alert_words": false,
|
||||
"fenced_code": false,
|
||||
"markdown": false,
|
||||
"echo": false,
|
||||
"localstorage": false,
|
||||
"localStorage": false,
|
||||
"current_msg_list": true,
|
||||
"home_msg_list": false,
|
||||
"pm_list": false,
|
||||
"pm_conversations": false,
|
||||
"recent_senders": false,
|
||||
"unread_ui": false,
|
||||
"unread_ops": false,
|
||||
"user_events": false,
|
||||
"Plotly": false,
|
||||
"emoji_codes": false,
|
||||
"drafts": false,
|
||||
"katex": false,
|
||||
"Clipboard": false,
|
||||
"emoji_picker": false,
|
||||
"hotspots": false,
|
||||
"compose_ui": false,
|
||||
"common": false,
|
||||
"desktop_notifications_panel": false
|
||||
},
|
||||
"rules": {
|
||||
"array-callback-return": "error",
|
||||
"array-bracket-spacing": "error",
|
||||
"arrow-spacing": [ "error", { "before": true, "after": true } ],
|
||||
"block-scoped-var": 2,
|
||||
"brace-style": [ "error", "1tbs", { "allowSingleLine": true } ],
|
||||
"camelcase": 0,
|
||||
"comma-dangle": [ "error",
|
||||
{
|
||||
"arrays": "always-multiline",
|
||||
"objects": "always-multiline",
|
||||
"imports": "always-multiline",
|
||||
"exports": "always-multiline",
|
||||
"functions": "never"
|
||||
}
|
||||
],
|
||||
"complexity": [ 0, 4 ],
|
||||
"curly": 2,
|
||||
"dot-notation": [ "error", { "allowKeywords": true } ],
|
||||
"eol-last": [ "error", "always" ],
|
||||
"eqeqeq": 2,
|
||||
"func-style": [ "off", "expression" ],
|
||||
"guard-for-in": 2,
|
||||
"keyword-spacing": [ "error",
|
||||
{
|
||||
"before": true,
|
||||
"after": true,
|
||||
"overrides": {
|
||||
"return": { "after": true },
|
||||
"throw": { "after": true },
|
||||
"case": { "after": true }
|
||||
}
|
||||
}
|
||||
],
|
||||
"max-depth": [ 0, 4 ],
|
||||
"max-len": [ "error", 100, 2,
|
||||
{
|
||||
"ignoreUrls": true,
|
||||
"ignoreComments": false,
|
||||
"ignoreRegExpLiterals": true,
|
||||
"ignoreStrings": true,
|
||||
"ignoreTemplateLiterals": true
|
||||
}
|
||||
],
|
||||
"max-params": [ 0, 3 ],
|
||||
"max-statements": [ 0, 10 ],
|
||||
"new-cap": [ "error",
|
||||
{
|
||||
"newIsCap": true,
|
||||
"capIsNew": false
|
||||
}
|
||||
],
|
||||
"new-parens": 2,
|
||||
"newline-per-chained-call": 0,
|
||||
"no-alert": 2,
|
||||
"no-array-constructor": "error",
|
||||
"no-bitwise": 2,
|
||||
"no-caller": 2,
|
||||
"no-case-declarations": "error",
|
||||
"no-catch-shadow": 2,
|
||||
"no-console": 0,
|
||||
"no-const-assign": "error",
|
||||
"no-control-regex": 2,
|
||||
"no-debugger": 2,
|
||||
"no-delete-var": 2,
|
||||
"no-div-regex": 2,
|
||||
"no-dupe-class-members": "error",
|
||||
"no-dupe-keys": 2,
|
||||
"no-duplicate-imports": "error",
|
||||
"no-else-return": 2,
|
||||
"no-empty": 2,
|
||||
"no-empty-character-class": 2,
|
||||
"no-eq-null": 2,
|
||||
"no-eval": 2,
|
||||
"no-ex-assign": 2,
|
||||
"no-extra-parens": [ "error", "functions" ],
|
||||
"no-extra-semi": 2,
|
||||
"no-fallthrough": 2,
|
||||
"no-floating-decimal": 2,
|
||||
"no-func-assign": 2,
|
||||
"no-implied-eval": 2,
|
||||
"no-iterator": "error",
|
||||
"no-label-var": 2,
|
||||
"no-labels": 2,
|
||||
"no-loop-func": 2,
|
||||
"no-mixed-requires": [ 0, false ],
|
||||
"no-multi-str": 2,
|
||||
"no-native-reassign": 2,
|
||||
"no-nested-ternary": 0,
|
||||
"no-new-func": "error",
|
||||
"no-new-object": 2,
|
||||
"no-new-wrappers": 2,
|
||||
"no-obj-calls": 2,
|
||||
"no-octal": 2,
|
||||
"no-octal-escape": 2,
|
||||
"no-param-reassign": 0,
|
||||
"no-plusplus": 2,
|
||||
"no-proto": 2,
|
||||
"no-redeclare": 2,
|
||||
"no-regex-spaces": 2,
|
||||
"no-restricted-syntax": 0,
|
||||
"no-return-assign": 2,
|
||||
"no-script-url": 2,
|
||||
"no-self-compare": 2,
|
||||
"no-shadow": 0,
|
||||
"no-sync": 2,
|
||||
"no-ternary": 0,
|
||||
"no-undef": "error",
|
||||
"no-undef-init": 2,
|
||||
"no-underscore-dangle": 0,
|
||||
"no-unneeded-ternary": [ "error", { "defaultAssignment": false } ],
|
||||
"no-unreachable": 2,
|
||||
"no-unused-expressions": 2,
|
||||
"no-unused-vars": [ "error",
|
||||
{
|
||||
"vars": "local",
|
||||
"args": "after-used",
|
||||
"varsIgnorePattern": "print_elapsed_time|check_duplicate_ids"
|
||||
}
|
||||
],
|
||||
"no-use-before-define": 2,
|
||||
"no-useless-constructor": "error",
|
||||
// The Zulip codebase complies partially with the "no-useless-escape"
|
||||
// rule; only regex expressions haven't been updated yet.
|
||||
// Updated regex expressions are currently being tested in casper
|
||||
// files and will decide about a potential future enforcement of this rule.
|
||||
"no-useless-escape": 0,
|
||||
"no-whitespace-before-property": 0,
|
||||
"no-with": 2,
|
||||
"one-var": [ "error", "never" ],
|
||||
"padded-blocks": 0,
|
||||
"prefer-const": [ "error",
|
||||
{
|
||||
"destructuring": "any",
|
||||
"ignoreReadBeforeAssign": true
|
||||
}
|
||||
],
|
||||
"quote-props": [ "error", "as-needed",
|
||||
{
|
||||
"keywords": false,
|
||||
"unnecessary": true,
|
||||
"numbers": false
|
||||
}
|
||||
],
|
||||
"quotes": [ 0, "single" ],
|
||||
"radix": 2,
|
||||
"semi": 2,
|
||||
"space-before-blocks": 2,
|
||||
"space-before-function-paren": [ "error",
|
||||
{
|
||||
"anonymous": "always",
|
||||
"named": "never",
|
||||
"asyncArrow": "always"
|
||||
}
|
||||
],
|
||||
"space-in-parens": 2,
|
||||
"space-infix-ops": 0,
|
||||
"spaced-comment": 0,
|
||||
"strict": 0,
|
||||
"template-curly-spacing": "error",
|
||||
"unnecessary-strict": 0,
|
||||
"use-isnan": 2,
|
||||
"valid-typeof": [ "error", { "requireStringLiterals": true } ],
|
||||
"wrap-iife": [ "error", "outside", { "functionPrototypeMethods": false } ],
|
||||
"wrap-regex": 0,
|
||||
"yoda": 2
|
||||
}
|
||||
}
|
25
.gitattributes
vendored
25
.gitattributes
vendored
@@ -1,34 +1,11 @@
|
||||
# DIFFS: Noise suppression.
|
||||
#
|
||||
# Suppress noisy generated files in diffs.
|
||||
# (When you actually want to see these diffs, use `git diff -a`.)
|
||||
|
||||
# Large test fixtures:
|
||||
corporate/tests/stripe_fixtures/*.json -diff
|
||||
|
||||
|
||||
# FORMATTING
|
||||
|
||||
# Maintain LF (Unix-style) newlines in text files.
|
||||
* text=auto eol=lf
|
||||
|
||||
# Make sure various media files never get somehow auto-detected as text
|
||||
# and then newline-converted.
|
||||
*.gif binary
|
||||
*.jpg binary
|
||||
*.jpeg binary
|
||||
*.eot binary
|
||||
*.woff binary
|
||||
*.woff2 binary
|
||||
*.svg binary
|
||||
*.ttf binary
|
||||
*.png binary
|
||||
*.otf binary
|
||||
*.tif binary
|
||||
*.ogg binary
|
||||
*.bson binary
|
||||
*.bmp binary
|
||||
*.mp3 binary
|
||||
*.pdf binary
|
||||
|
||||
# Treat SVG files as code for diffing purposes.
|
||||
*.svg diff
|
||||
|
3
.github/FUNDING.yml
vendored
3
.github/FUNDING.yml
vendored
@@ -1,3 +0,0 @@
|
||||
github: zulip
|
||||
patreon: zulip
|
||||
open_collective: zulip
|
10
.github/ISSUE_TEMPLATE/1_discussed_on_czo.md
vendored
10
.github/ISSUE_TEMPLATE/1_discussed_on_czo.md
vendored
@@ -1,10 +0,0 @@
|
||||
---
|
||||
name: Issue discussed in the Zulip development community
|
||||
about: Bug report, feature or improvement already discussed on chat.zulip.org.
|
||||
---
|
||||
|
||||
<!-- Issue description -->
|
||||
|
||||
<!-- Link to a message in the chat.zulip.org discussion. Message links will still work even if the topic is renamed or resolved. Link back to this issue from the chat.zulip.org thread. -->
|
||||
|
||||
CZO thread
|
18
.github/ISSUE_TEMPLATE/2_bug_report.md
vendored
18
.github/ISSUE_TEMPLATE/2_bug_report.md
vendored
@@ -1,18 +0,0 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: A concrete bug report with steps to reproduce the behavior. (See also "Possible bug" below.)
|
||||
labels: ["bug"]
|
||||
---
|
||||
|
||||
<!-- Describe what you were expecting to see, what you saw instead, and steps to take in order to reproduce the buggy behavior. Screenshots can be helpful. -->
|
||||
|
||||
<!-- Check the box for the version of Zulip you are using (see https://zulip.com/help/view-zulip-version).-->
|
||||
|
||||
**Zulip Server and web app version:**
|
||||
|
||||
- [ ] Zulip Cloud (`*.zulipchat.com`)
|
||||
- [ ] Zulip Server 10.x
|
||||
- [ ] Zulip Server 9.x
|
||||
- [ ] Zulip Server 8.x
|
||||
- [ ] Zulip Server 7.x or older
|
||||
- [ ] Other or not sure
|
6
.github/ISSUE_TEMPLATE/3_feature_request.md
vendored
6
.github/ISSUE_TEMPLATE/3_feature_request.md
vendored
@@ -1,6 +0,0 @@
|
||||
---
|
||||
name: Feature or improvement request
|
||||
about: A specific proposal for a new feature of improvement. (See also "Feature suggestion or feedback" below.)
|
||||
---
|
||||
|
||||
<!-- Describe the proposal, including how it would help you or your organization. -->
|
14
.github/ISSUE_TEMPLATE/config.yml
vendored
14
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,14 +0,0 @@
|
||||
blank_issues_enabled: true
|
||||
contact_links:
|
||||
- name: Possible bug
|
||||
url: https://zulip.readthedocs.io/en/latest/contributing/reporting-bugs.html
|
||||
about: Report unexpected behavior that may be a bug.
|
||||
- name: Feature suggestion or feedback
|
||||
url: https://zulip.readthedocs.io/en/latest/contributing/suggesting-features.html
|
||||
about: Start a discussion about your idea for improving Zulip.
|
||||
- name: Issue with running or upgrading a Zulip server
|
||||
url: https://zulip.readthedocs.io/en/latest/production/troubleshooting.html
|
||||
about: We provide free, interactive support for the vast majority of questions about running a Zulip server.
|
||||
- name: Other support requests and sales questions
|
||||
url: https://zulip.com/help/contact-support
|
||||
about: Contact us — we're happy to help!
|
82
.github/funding.json
vendored
82
.github/funding.json
vendored
@@ -1,82 +0,0 @@
|
||||
{
|
||||
"version": "v1.0.0",
|
||||
"entity": {
|
||||
"type": "organisation",
|
||||
"role": "steward",
|
||||
"name": "Kandra Labs, Inc.",
|
||||
"email": "support@zulip.com",
|
||||
"description": "Guiding the Zulip community in developing a world-class organized team chat product with apps for every major desktop and mobile platform requires leadership from a talented, dedicated team. We believe that the only sustainable model is for our core team to be compensated fairly for their time. We have thus founded a company (Kandra Labs) to steward and financially support Zulip’s development. We are growing our business sustainably, without venture capital funding. VCs are incentivized to push companies to gamble for explosive growth. Often, the result is that a company with a useful product burns rapidly through its resources and goes out of business. We have built Zulip as a sustainable business (also supported by SBIR grants from the US National Science Foundation), and are being thoughtful about our pace of spending. Funding our company without venture capital also allows us to live by our values, without investor pressure to compromise them when doing so might be “good business” or “what everyone does”.",
|
||||
"webpageUrl": {
|
||||
"url": "https://zulip.com/values/",
|
||||
"wellKnown": "https://zulip.com/.well-known/funding-manifest-urls"
|
||||
}
|
||||
},
|
||||
"projects": [
|
||||
{
|
||||
"guid": "zulip",
|
||||
"name": "Zulip",
|
||||
"description": "Zulip is an open-source team chat application designed for seamless remote and hybrid work. With conversations organized by topic, Zulip is ideal for both live and asynchronous communication. Zulip’s 100% open-source software is available as a cloud service or a self-hosted solution, and is used by thousands of organizations around the world. An important part of Zulip’s mission is ensuring that worthy organizations, from programming-language developers to research communities, are able to use Zulip whether or not they have funding. For this reason, we sponsor Zulip Cloud Standard for open source projects, non-profits, education, and academic research. This program has grown exponentially since its inception; today we are proud to fully sponsor Zulip hosting for several hundred organizations. Support from the community will help us continue to afford these programs as their popularity grows. ",
|
||||
"webpageUrl": {
|
||||
"url": "https://zulip.com/",
|
||||
"wellKnown": "https://zulip.com/.well-known/funding-manifest-urls"
|
||||
},
|
||||
"repositoryUrl": {
|
||||
"url": "https://github.com/zulip"
|
||||
},
|
||||
"licenses": ["spdx:Apache-2.0"],
|
||||
"tags": ["communication", "team-chat", "collaboration"]
|
||||
}
|
||||
],
|
||||
"funding": {
|
||||
"channels": [
|
||||
{
|
||||
"guid": "github-sponsors",
|
||||
"type": "payment-provider",
|
||||
"address": "https://github.com/sponsors/zulip",
|
||||
"description": "Preferred channel for sponsoring Zulip, since GitHub Sponsors does not charge any fees to sponsored projects."
|
||||
},
|
||||
{
|
||||
"guid": "patreon",
|
||||
"type": "payment-provider",
|
||||
"address": "https://patreon.com/zulip"
|
||||
},
|
||||
{
|
||||
"guid": "open-collective",
|
||||
"type": "payment-provider",
|
||||
"address": "https://opencollective.com/zulip"
|
||||
}
|
||||
],
|
||||
"plans": [
|
||||
{
|
||||
"guid": "github-sponsors",
|
||||
"status": "active",
|
||||
"name": "Support Zulip",
|
||||
"description": "Contribute to Zulip's development and free hosting for open source projects and other worthy organizations!",
|
||||
"amount": 0,
|
||||
"currency": "USD",
|
||||
"frequency": "monthly",
|
||||
"channels": ["github-sponsors"]
|
||||
},
|
||||
{
|
||||
"guid": "patreon",
|
||||
"status": "active",
|
||||
"name": "Support Zulip",
|
||||
"description": "Contribute to Zulip's development and free hosting for open source projects and other worthy organizations!",
|
||||
"amount": 0,
|
||||
"currency": "USD",
|
||||
"frequency": "monthly",
|
||||
"channels": ["patreon"]
|
||||
},
|
||||
{
|
||||
"guid": "open-collective",
|
||||
"status": "active",
|
||||
"name": "Support Zulip",
|
||||
"description": "Contribute to Zulip's development and free hosting for open source projects and other worthy organizations!",
|
||||
"amount": 0,
|
||||
"currency": "USD",
|
||||
"frequency": "monthly",
|
||||
"channels": ["open-collective"]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
43
.github/pull_request_template.md
vendored
43
.github/pull_request_template.md
vendored
@@ -1,43 +0,0 @@
|
||||
<!-- Describe your pull request here.-->
|
||||
|
||||
Fixes: <!-- Issue link, or clear description.-->
|
||||
|
||||
<!-- If the PR makes UI changes, always include one or more still screenshots to demonstrate your changes. If it seems helpful, add a screen capture of the new functionality as well.
|
||||
|
||||
Tooling tips: https://zulip.readthedocs.io/en/latest/tutorials/screenshot-and-gif-software.html
|
||||
-->
|
||||
|
||||
**Screenshots and screen captures:**
|
||||
|
||||
<details>
|
||||
<summary>Self-review checklist</summary>
|
||||
|
||||
<!-- Prior to submitting a PR, follow our step-by-step guide to review your own code:
|
||||
https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html#how-to-review-code -->
|
||||
|
||||
<!-- Once you create the PR, check off all the steps below that you have completed.
|
||||
If any of these steps are not relevant or you have not completed, leave them unchecked.-->
|
||||
|
||||
- [ ] [Self-reviewed](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html#how-to-review-code) the changes for clarity and maintainability
|
||||
(variable names, code reuse, readability, etc.).
|
||||
|
||||
Communicate decisions, questions, and potential concerns.
|
||||
|
||||
- [ ] Explains differences from previous plans (e.g., issue description).
|
||||
- [ ] Highlights technical choices and bugs encountered.
|
||||
- [ ] Calls out remaining decisions and concerns.
|
||||
- [ ] Automated tests verify logic where appropriate.
|
||||
|
||||
Individual commits are ready for review (see [commit discipline](https://zulip.readthedocs.io/en/latest/contributing/commit-discipline.html)).
|
||||
|
||||
- [ ] Each commit is a coherent idea.
|
||||
- [ ] Commit message(s) explain reasoning and motivation for changes.
|
||||
|
||||
Completed manual review and testing of the following:
|
||||
|
||||
- [ ] Visual appearance of the changes.
|
||||
- [ ] Responsiveness and internationalization.
|
||||
- [ ] Strings and tooltips.
|
||||
- [ ] End-to-end functionality of buttons, interactions and flows.
|
||||
- [ ] Corner cases, error conditions, and easily imagined bugs.
|
||||
</details>
|
@@ -1,46 +0,0 @@
|
||||
name: Check feature level updated
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- "api_docs/**"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-feature-level-updated:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
||||
- name: Add required permissions
|
||||
run: chmod +x ./tools/check-feature-level-updated
|
||||
|
||||
- name: Run tools/check-feature-level-updated
|
||||
id: run_check
|
||||
run: ./tools/check-feature-level-updated >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Report status to CZO
|
||||
if: ${{ steps.run_check.outputs.fail == 'true' && github.repository == 'zulip/zulip'}}
|
||||
uses: zulip/github-actions-zulip/send-message@v1
|
||||
with:
|
||||
api-key: ${{ secrets.ZULIP_BOT_KEY }}
|
||||
email: "github-actions-bot@chat.zulip.org"
|
||||
organization-url: "https://chat.zulip.org"
|
||||
to: "automated testing"
|
||||
topic: ${{ steps.run_check.outputs.topic }}
|
||||
type: "stream"
|
||||
content: ${{ steps.run_check.outputs.content }}
|
||||
|
||||
- name: Fail job if feature level not updated in API docs
|
||||
if: ${{ steps.run_check.outputs.fail == 'true' }}
|
||||
run: exit 1
|
40
.github/workflows/codeql-analysis.yml
vendored
40
.github/workflows/codeql-analysis.yml
vendored
@@ -1,40 +0,0 @@
|
||||
name: "Code scanning"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["*.x", chat.zulip.org, main]
|
||||
tags: ["*"]
|
||||
pull_request:
|
||||
branches: ["*.x", chat.zulip.org, main]
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: "${{ github.workflow }}-${{ github.head_ref || github.run_id }}"
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
CodeQL:
|
||||
permissions:
|
||||
actions: read # for github/codeql-action/init to get workflow details
|
||||
contents: read # for actions/checkout to fetch code
|
||||
security-events: write # for github/codeql-action/analyze to upload SARIF results
|
||||
if: ${{!github.event.repository.private}}
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
|
||||
# Override language selection by uncommenting this and choosing your languages
|
||||
# with:
|
||||
# languages: go, javascript, csharp, python, cpp, java
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
317
.github/workflows/production-suite.yml
vendored
317
.github/workflows/production-suite.yml
vendored
@@ -1,317 +0,0 @@
|
||||
name: Zulip production suite
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["*.x", chat.zulip.org, main]
|
||||
tags: ["*"]
|
||||
pull_request:
|
||||
paths:
|
||||
- .github/workflows/production-suite.yml
|
||||
- "**/migrations/**"
|
||||
- manage.py
|
||||
- pnpm-lock.yaml
|
||||
- puppet/**
|
||||
- scripts/**
|
||||
- tools/**
|
||||
- uv.lock
|
||||
- web/babel.config.js
|
||||
- web/postcss.config.js
|
||||
- web/third/**
|
||||
- web/webpack.config.ts
|
||||
- zerver/worker/queue_processors.py
|
||||
- zerver/lib/push_notifications.py
|
||||
- zerver/lib/storage.py
|
||||
- zerver/decorator.py
|
||||
- zproject/**
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: "${{ github.workflow }}-${{ github.head_ref || github.run_id }}"
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
production_build:
|
||||
# This job builds a release tarball from the current commit, which
|
||||
# will be used for all of the following install/upgrade tests.
|
||||
name: Ubuntu 22.04 production build
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
# Docker images are built from 'tools/ci/Dockerfile'; the comments at
|
||||
# the top explain how to build and upload these images.
|
||||
# Ubuntu 22.04 ships with Python 3.10.12.
|
||||
container: zulip/ci:jammy
|
||||
|
||||
steps:
|
||||
- name: Add required permissions
|
||||
run: |
|
||||
# The checkout actions doesn't clone to ~/zulip or allow
|
||||
# us to use the path option to clone outside the current
|
||||
# /__w/zulip/zulip directory. Since this directory is owned
|
||||
# by root we need to change it's ownership to allow the
|
||||
# github user to clone the code here.
|
||||
# Note: /__w/ is a docker volume mounted to $GITHUB_WORKSPACE
|
||||
# which is /home/runner/work/.
|
||||
sudo chown -R github .
|
||||
|
||||
# This is the GitHub Actions specific cache directory the
|
||||
# the current github user must be able to access for the
|
||||
# cache action to work. It is owned by root currently.
|
||||
sudo chmod -R 0777 /__w/_temp/
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Create cache directories
|
||||
run: |
|
||||
dirs=(/srv/zulip-emoji-cache)
|
||||
sudo mkdir -p "${dirs[@]}"
|
||||
sudo chown -R github "${dirs[@]}"
|
||||
|
||||
- name: Restore pnpm store
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: /__w/.pnpm-store
|
||||
key: v1-pnpm-store-jammy-${{ hashFiles('pnpm-lock.yaml') }}
|
||||
|
||||
- name: Restore uv cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/.cache/uv
|
||||
key: uv-jammy-${{ hashFiles('uv.lock') }}
|
||||
restore-keys: uv-jammy-
|
||||
|
||||
- name: Restore emoji cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: /srv/zulip-emoji-cache
|
||||
key: v1-emoji-jammy-${{ hashFiles('tools/setup/emoji/emoji_map.json') }}-${{ hashFiles('tools/setup/emoji/build_emoji') }}-${{ hashFiles('tools/setup/emoji/emoji_setup_utils.py') }}-${{ hashFiles('tools/setup/emoji/emoji_names.py') }}-${{ hashFiles('package.json') }}
|
||||
restore-keys: v1-emoji-jammy
|
||||
|
||||
- name: Build production tarball
|
||||
run: ./tools/ci/production-build
|
||||
|
||||
- name: Upload production build artifacts for install jobs
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: production-tarball
|
||||
path: /tmp/production-build
|
||||
retention-days: 1
|
||||
|
||||
- name: Verify pnpm store path
|
||||
run: |
|
||||
set -x
|
||||
path="$(pnpm store path)"
|
||||
[[ "$path" == /__w/.pnpm-store/* ]]
|
||||
|
||||
- name: Minimize uv cache
|
||||
run: uv cache prune --ci
|
||||
|
||||
- name: Generate failure report string
|
||||
id: failure_report_string
|
||||
if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }}
|
||||
run: tools/ci/generate-failure-message >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Report status to CZO
|
||||
if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }}
|
||||
uses: zulip/github-actions-zulip/send-message@v1
|
||||
with:
|
||||
api-key: ${{ secrets.ZULIP_BOT_KEY }}
|
||||
email: "github-actions-bot@chat.zulip.org"
|
||||
organization-url: "https://chat.zulip.org"
|
||||
to: "automated testing"
|
||||
topic: ${{ steps.failure_report_string.outputs.topic }}
|
||||
type: "stream"
|
||||
content: ${{ steps.failure_report_string.outputs.content }}
|
||||
|
||||
production_install:
|
||||
# This job installs the server release tarball built above on a
|
||||
# range of platforms, and does some basic health checks on the
|
||||
# resulting installer Zulip server.
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
# Docker images are built from 'tools/ci/Dockerfile'; the comments at
|
||||
# the top explain how to build and upload these images.
|
||||
- docker_image: zulip/ci:jammy
|
||||
name: Ubuntu 22.04 production install and PostgreSQL upgrade with pgroonga
|
||||
os: jammy
|
||||
extra-args: ""
|
||||
|
||||
- docker_image: zulip/ci:noble
|
||||
name: Ubuntu 24.04 production install
|
||||
os: noble
|
||||
extra-args: ""
|
||||
|
||||
- docker_image: zulip/ci:bookworm
|
||||
name: Debian 12 production install with custom db name and user
|
||||
os: bookworm
|
||||
extra-args: --test-custom-db
|
||||
|
||||
name: ${{ matrix.name }}
|
||||
container:
|
||||
image: ${{ matrix.docker_image }}
|
||||
options: --init
|
||||
runs-on: ubuntu-latest
|
||||
needs: production_build
|
||||
|
||||
steps:
|
||||
- name: Download built production tarball
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: production-tarball
|
||||
path: /tmp
|
||||
|
||||
- name: Add required permissions and setup
|
||||
run: |
|
||||
# This is the GitHub Actions specific cache directory the
|
||||
# the current github user must be able to access for the
|
||||
# cache action to work. It is owned by root currently.
|
||||
sudo chmod -R 0777 /__w/_temp/
|
||||
|
||||
# Since actions/download-artifact@v4 loses all the permissions
|
||||
# of the tarball uploaded by the upload artifact fix those.
|
||||
chmod +x /tmp/production-upgrade-pg
|
||||
chmod +x /tmp/production-pgroonga
|
||||
chmod +x /tmp/production-install
|
||||
chmod +x /tmp/production-verify
|
||||
chmod +x /tmp/generate-failure-message
|
||||
|
||||
- name: Create cache directories
|
||||
run: |
|
||||
dirs=(/srv/zulip-emoji-cache)
|
||||
sudo mkdir -p "${dirs[@]}"
|
||||
sudo chown -R github "${dirs[@]}"
|
||||
|
||||
- name: Install production
|
||||
run: sudo /tmp/production-install ${{ matrix.extra-args }}
|
||||
|
||||
- name: Verify install
|
||||
run: sudo /tmp/production-verify ${{ matrix.extra-args }}
|
||||
|
||||
- name: Install pgroonga
|
||||
if: ${{ matrix.os == 'jammy' }}
|
||||
run: sudo /tmp/production-pgroonga
|
||||
|
||||
- name: Verify install after installing pgroonga
|
||||
if: ${{ matrix.os == 'jammy' }}
|
||||
run: sudo /tmp/production-verify ${{ matrix.extra-args }}
|
||||
|
||||
- name: Upgrade postgresql
|
||||
if: ${{ matrix.os == 'jammy' }}
|
||||
run: sudo /tmp/production-upgrade-pg
|
||||
|
||||
- name: Verify install after upgrading postgresql
|
||||
if: ${{ matrix.os == 'jammy' }}
|
||||
run: sudo /tmp/production-verify ${{ matrix.extra-args }}
|
||||
|
||||
- name: Generate failure report string
|
||||
id: failure_report_string
|
||||
if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }}
|
||||
run: /tmp/generate-failure-message >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Report status to CZO
|
||||
if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }}
|
||||
uses: zulip/github-actions-zulip/send-message@v1
|
||||
with:
|
||||
api-key: ${{ secrets.ZULIP_BOT_KEY }}
|
||||
email: "github-actions-bot@chat.zulip.org"
|
||||
organization-url: "https://chat.zulip.org"
|
||||
to: "automated testing"
|
||||
topic: ${{ steps.failure_report_string.outputs.topic }}
|
||||
type: "stream"
|
||||
content: ${{ steps.failure_report_string.outputs.content }}
|
||||
|
||||
production_upgrade:
|
||||
# The production upgrade job starts with a container with a
|
||||
# previous Zulip release installed, and attempts to upgrade it to
|
||||
# the release tarball built for the current commit being tested.
|
||||
#
|
||||
# This is intended to catch bugs that result in the upgrade
|
||||
# process failing.
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
# Docker images are built from 'tools/ci/Dockerfile.prod'; the comments at
|
||||
# the top explain how to build and upload these images.
|
||||
- docker_image: zulip/ci:jammy-6.0
|
||||
name: 6.0 Version Upgrade
|
||||
os: jammy
|
||||
- docker_image: zulip/ci:bookworm-7.0
|
||||
name: 7.0 Version Upgrade
|
||||
os: bookworm
|
||||
- docker_image: zulip/ci:bookworm-8.0
|
||||
name: 8.0 Version Upgrade
|
||||
os: bookworm
|
||||
- docker_image: zulip/ci:noble-9.0
|
||||
name: 9.0 Version Upgrade
|
||||
os: noble
|
||||
- docker_image: zulip/ci:noble-10.0
|
||||
name: 10.0 Version Upgrade
|
||||
os: noble
|
||||
|
||||
name: ${{ matrix.name }}
|
||||
container:
|
||||
image: ${{ matrix.docker_image }}
|
||||
options: --init
|
||||
runs-on: ubuntu-latest
|
||||
needs: production_build
|
||||
|
||||
steps:
|
||||
- name: Download built production tarball
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: production-tarball
|
||||
path: /tmp
|
||||
|
||||
- name: Add required permissions and setup
|
||||
run: |
|
||||
# This is the GitHub Actions specific cache directory the
|
||||
# the current github user must be able to access for the
|
||||
# cache action to work. It is owned by root currently.
|
||||
sudo chmod -R 0777 /__w/_temp/
|
||||
|
||||
# Since actions/download-artifact@v4 loses all the permissions
|
||||
# of the tarball uploaded by the upload artifact fix those.
|
||||
chmod +x /tmp/production-upgrade
|
||||
chmod +x /tmp/production-verify
|
||||
chmod +x /tmp/generate-failure-message
|
||||
|
||||
- name: Create cache directories
|
||||
run: |
|
||||
dirs=(/srv/zulip-emoji-cache)
|
||||
sudo mkdir -p "${dirs[@]}"
|
||||
sudo chown -R github "${dirs[@]}"
|
||||
|
||||
- name: Upgrade production
|
||||
run: sudo /tmp/production-upgrade
|
||||
|
||||
# TODO: We should be running production-verify here, but it
|
||||
# doesn't pass yet.
|
||||
#
|
||||
# - name: Verify install
|
||||
# run: sudo /tmp/production-verify
|
||||
|
||||
- name: Generate failure report string
|
||||
id: failure_report_string
|
||||
if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }}
|
||||
run: /tmp/generate-failure-message >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Report status to CZO
|
||||
if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }}
|
||||
uses: zulip/github-actions-zulip/send-message@v1
|
||||
with:
|
||||
api-key: ${{ secrets.ZULIP_BOT_KEY }}
|
||||
email: "github-actions-bot@chat.zulip.org"
|
||||
organization-url: "https://chat.zulip.org"
|
||||
to: "automated testing"
|
||||
topic: ${{ steps.failure_report_string.outputs.topic }}
|
||||
type: "stream"
|
||||
content: ${{ steps.failure_report_string.outputs.content }}
|
27
.github/workflows/update-oneclick-apps.yml
vendored
27
.github/workflows/update-oneclick-apps.yml
vendored
@@ -1,27 +0,0 @@
|
||||
name: Update one click apps
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
update-digitalocean-oneclick-app:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Update DigitalOcean one click app
|
||||
env:
|
||||
DIGITALOCEAN_API_KEY: ${{ secrets.ONE_CLICK_ACTION_DIGITALOCEAN_API_KEY }}
|
||||
ZULIP_API_KEY: ${{ secrets.ONE_CLICK_ACTION_ZULIP_BOT_API_KEY }}
|
||||
ZULIP_EMAIL: ${{ secrets.ONE_CLICK_ACTION_ZULIP_BOT_EMAIL }}
|
||||
ZULIP_SITE: https://chat.zulip.org
|
||||
ONE_CLICK_ACTION_STREAM: kandra ops
|
||||
PYTHON_DIGITALOCEAN_REQUEST_TIMEOUT_SEC: 30
|
||||
RELEASE_VERSION: ${{ github.event.release.tag_name }}
|
||||
run: |
|
||||
export PATH="$HOME/.local/bin:$PATH"
|
||||
git clone https://github.com/zulip/marketplace-partners
|
||||
pip3 install python-digitalocean zulip fab-classic PyNaCl
|
||||
echo $PATH
|
||||
python3 tools/oneclickapps/prepare_digital_ocean_one_click_app_release.py
|
260
.github/workflows/zulip-ci.yml
vendored
260
.github/workflows/zulip-ci.yml
vendored
@@ -1,260 +0,0 @@
|
||||
# NOTE: Everything test in this file should be in `tools/test-all`. If there's a
|
||||
# reason not to run it there, it should be there as a comment
|
||||
# explaining why.
|
||||
|
||||
name: Zulip CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["*.x", chat.zulip.org, main]
|
||||
tags: ["*"]
|
||||
pull_request:
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: "${{ github.workflow }}-${{ github.head_ref || github.run_id }}"
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
# Base images are built using `tools/ci/Dockerfile.prod.template`.
|
||||
# The comments at the top explain how to build and upload these images.
|
||||
# Ubuntu 22.04 ships with Python 3.10.12.
|
||||
- docker_image: zulip/ci:jammy
|
||||
name: Ubuntu 22.04 (Python 3.10, backend + frontend)
|
||||
os: jammy
|
||||
include_documentation_tests: false
|
||||
include_frontend_tests: true
|
||||
# Debian 12 ships with Python 3.11.2.
|
||||
- docker_image: zulip/ci:bookworm
|
||||
name: Debian 12 (Python 3.11, backend + documentation)
|
||||
os: bookworm
|
||||
include_documentation_tests: true
|
||||
include_frontend_tests: false
|
||||
# Ubuntu 24.04 ships with Python 3.12.2.
|
||||
- docker_image: zulip/ci:noble
|
||||
name: Ubuntu 24.04 (Python 3.12, backend)
|
||||
os: noble
|
||||
include_documentation_tests: false
|
||||
include_frontend_tests: false
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
name: ${{ matrix.name }}
|
||||
container: ${{ matrix.docker_image }}
|
||||
env:
|
||||
# GitHub Actions sets HOME to /github/home which causes
|
||||
# problem later in provision and frontend test that runs
|
||||
# tools/setup/postgresql-init-dev-db because of the .pgpass
|
||||
# location. PostgreSQL (psql) expects .pgpass to be at
|
||||
# /home/github/.pgpass and setting home to `/home/github/`
|
||||
# ensures it written there because we write it to ~/.pgpass.
|
||||
HOME: /home/github/
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Create cache directories
|
||||
run: |
|
||||
dirs=(/srv/zulip-emoji-cache)
|
||||
sudo mkdir -p "${dirs[@]}"
|
||||
sudo chown -R github "${dirs[@]}"
|
||||
|
||||
- name: Restore pnpm store
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: /__w/.pnpm-store
|
||||
key: v1-pnpm-store-${{ matrix.os }}-${{ hashFiles('pnpm-lock.yaml') }}
|
||||
|
||||
- name: Restore uv cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/.cache/uv
|
||||
key: uv-${{ matrix.os }}-${{ hashFiles('uv.lock') }}
|
||||
restore-keys: uv-${{ matrix.os }}-
|
||||
|
||||
- name: Restore emoji cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: /srv/zulip-emoji-cache
|
||||
key: v1-emoji-${{ matrix.os }}-${{ hashFiles('tools/setup/emoji/emoji_map.json', 'tools/setup/emoji/build_emoji', 'tools/setup/emoji/emoji_setup_utils.py', 'tools/setup/emoji/emoji_names.py', 'package.json') }}
|
||||
restore-keys: v1-emoji-${{ matrix.os }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
# This is the main setup job for the test suite
|
||||
./tools/ci/setup-backend --skip-dev-db-build
|
||||
scripts/lib/clean_unused_caches.py --verbose --threshold=0
|
||||
|
||||
- name: Run tools test
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
./tools/test-tools
|
||||
|
||||
- name: Run Codespell lint
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
./tools/run-codespell
|
||||
|
||||
# We run the tests that are only run in a specific job early, so
|
||||
# that we get feedback to the developer about likely failures as
|
||||
# quickly as possible. Backend/mypy failures that aren't
|
||||
# identical across different versions are much more rare than
|
||||
# frontend linter or node test failures.
|
||||
- name: Run documentation and api tests
|
||||
if: ${{ matrix.include_documentation_tests }}
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
# In CI, we only test links we control in test-documentation to avoid flakes
|
||||
./tools/test-documentation --skip-external-links
|
||||
./tools/test-help-documentation --skip-external-links
|
||||
./tools/test-api
|
||||
|
||||
- name: Run node tests
|
||||
if: ${{ matrix.include_frontend_tests }}
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
# Run the node tests first, since they're fast and deterministic
|
||||
./tools/test-js-with-node --coverage --parallel=1
|
||||
|
||||
- name: Run frontend lint
|
||||
if: ${{ matrix.include_frontend_tests }}
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
./tools/lint --groups=frontend --skip=gitlint # gitlint disabled because flaky
|
||||
|
||||
- name: Check schemas
|
||||
if: ${{ matrix.include_frontend_tests }}
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
# Check that various schemas are consistent. (is fast)
|
||||
./tools/check-schemas
|
||||
|
||||
- name: Check capitalization of strings
|
||||
if: ${{ matrix.include_frontend_tests }}
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
./manage.py makemessages --locale en
|
||||
PYTHONWARNINGS=ignore ./tools/check-capitalization --no-generate
|
||||
PYTHONWARNINGS=ignore ./tools/check-frontend-i18n --no-generate
|
||||
|
||||
- name: Run puppeteer tests
|
||||
if: ${{ matrix.include_frontend_tests }}
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
./tools/test-js-with-puppeteer
|
||||
|
||||
- name: Check pnpm dedupe
|
||||
if: ${{ matrix.include_frontend_tests }}
|
||||
run: pnpm dedupe --check
|
||||
|
||||
- name: Run backend lint
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
echo "Test suite is running under $(python --version)."
|
||||
./tools/lint --groups=backend --skip=gitlint,mypy # gitlint disabled because flaky
|
||||
|
||||
- name: Run backend tests
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
./tools/test-backend ${{ matrix.os != 'bookworm' && '--coverage' || '' }} --xml-report --no-html-report --include-webhooks --include-transaction-tests --no-cov-cleanup --ban-console-output
|
||||
|
||||
- name: Run mypy
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
# We run mypy after the backend tests so we get output from the
|
||||
# backend tests, which tend to uncover more serious problems, first.
|
||||
./tools/run-mypy --version
|
||||
./tools/run-mypy
|
||||
|
||||
- name: Run miscellaneous tests
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
uv lock --check
|
||||
|
||||
# ./tools/test-run-dev # https://github.com/zulip/zulip/pull/14233
|
||||
#
|
||||
# This test has been persistently flaky at like 1% frequency, is slow,
|
||||
# and is for a very specific single feature, so we don't run it by default:
|
||||
# ./tools/test-queue-worker-reload
|
||||
|
||||
./tools/test-migrations
|
||||
./tools/setup/optimize-svg --check
|
||||
./tools/setup/generate_integration_bots_avatars.py --check-missing
|
||||
./tools/ci/check-executables
|
||||
|
||||
# Ban check-database-compatibility from transitively
|
||||
# relying on static/generated, because it might not be
|
||||
# up-to-date at that point in upgrade-zulip-stage-2.
|
||||
chmod 000 static/generated web/generated
|
||||
./scripts/lib/check-database-compatibility
|
||||
chmod 755 static/generated web/generated
|
||||
|
||||
- name: Check for untracked files
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
# This final check looks for untracked files that may have been
|
||||
# created by test-backend or provision.
|
||||
untracked="$(git ls-files --exclude-standard --others)"
|
||||
if [ -n "$untracked" ]; then
|
||||
printf >&2 "Error: untracked files:\n%s\n" "$untracked"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Upload coverage reports
|
||||
|
||||
# Only upload coverage when both frontend and backend
|
||||
# tests are run.
|
||||
if: ${{ matrix.include_frontend_tests }}
|
||||
uses: codecov/codecov-action@v4
|
||||
with:
|
||||
files: var/coverage.xml,var/node-coverage/lcov.info
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
- name: Store Puppeteer artifacts
|
||||
# Upload these on failure, as well
|
||||
if: ${{ always() && matrix.include_frontend_tests }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: puppeteer
|
||||
path: ./var/puppeteer
|
||||
retention-days: 60
|
||||
|
||||
- name: Check development database build
|
||||
run: ./tools/ci/setup-backend
|
||||
|
||||
- name: Verify pnpm store path
|
||||
run: |
|
||||
set -x
|
||||
path="$(pnpm store path)"
|
||||
[[ "$path" == /__w/.pnpm-store/* ]]
|
||||
|
||||
- name: Minimize uv cache
|
||||
run: uv cache prune --ci
|
||||
|
||||
- name: Generate failure report string
|
||||
id: failure_report_string
|
||||
if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }}
|
||||
run: tools/ci/generate-failure-message >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Report status to CZO
|
||||
if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }}
|
||||
uses: zulip/github-actions-zulip/send-message@v1
|
||||
with:
|
||||
api-key: ${{ secrets.ZULIP_BOT_KEY }}
|
||||
email: "github-actions-bot@chat.zulip.org"
|
||||
organization-url: "https://chat.zulip.org"
|
||||
to: "automated testing"
|
||||
topic: ${{ steps.failure_report_string.outputs.topic }}
|
||||
type: "stream"
|
||||
content: ${{ steps.failure_report_string.outputs.content }}
|
39
.gitignore
vendored
39
.gitignore
vendored
@@ -12,53 +12,31 @@
|
||||
# * Subdirectories with several internal things to ignore get their own
|
||||
# `.gitignore` files.
|
||||
#
|
||||
# * Comments must be on their own line. (Otherwise they don't work.)
|
||||
#
|
||||
# See `git help ignore` for details on the format.
|
||||
|
||||
## Config files for the dev environment
|
||||
/zproject/apns-dev.pem
|
||||
/zproject/apns-dev-key.p8
|
||||
/zproject/dev-secrets.conf
|
||||
/zproject/custom_dev_settings.py
|
||||
/tools/conf.ini
|
||||
/tools/custom_provision
|
||||
/tools/droplets/conf.ini
|
||||
|
||||
## Byproducts of setting up and using the dev environment
|
||||
*.pyc
|
||||
*.tsbuildinfo
|
||||
package-lock.json
|
||||
|
||||
/.vagrant
|
||||
/var
|
||||
|
||||
/.dmypy.json
|
||||
/.ruff_cache
|
||||
/.venv
|
||||
|
||||
# Generated i18n data
|
||||
/locale/en
|
||||
/locale/language_options.json
|
||||
/locale/language_name_map.json
|
||||
/locale/*/mobile.json
|
||||
|
||||
# Static build
|
||||
*.mo
|
||||
npm-debug.log
|
||||
/.pnpm-store
|
||||
/node_modules
|
||||
/prod-static
|
||||
/staticfiles.json
|
||||
/webpack-stats-production.json
|
||||
zulip-git-version
|
||||
/yarn-error.log
|
||||
|
||||
# Test / analysis tools
|
||||
.coverage
|
||||
|
||||
## Files (or really symlinks) created in a prod deployment
|
||||
/zproject/prod_settings.py
|
||||
|
||||
## Files left by various editors and local environments
|
||||
# (Ideally these should be in everyone's respective personal gitignore files.)
|
||||
*~
|
||||
@@ -71,21 +49,8 @@ zulip.kdev4
|
||||
*.kate-swp
|
||||
*.sublime-project
|
||||
*.sublime-workspace
|
||||
.vscode/
|
||||
*.DS_Store
|
||||
# VS Code. Avoid checking in .vscode in general, while still specifying
|
||||
# recommended extensions for working with this repository.
|
||||
/.vscode/**/*
|
||||
!/.vscode/extensions.json
|
||||
# .cache/ is generated by VS Code test runner
|
||||
.cache/
|
||||
.eslintcache
|
||||
|
||||
# Core dump files
|
||||
core
|
||||
|
||||
# Static generated files for landing page.
|
||||
/static/images/landing-page/hello/generated
|
||||
|
||||
## Miscellaneous
|
||||
# (Ideally this section is empty.)
|
||||
.transifexrc
|
||||
|
6
.gitlint
6
.gitlint
@@ -1,13 +1,13 @@
|
||||
[general]
|
||||
ignore=title-trailing-punctuation, body-min-length, body-is-missing
|
||||
|
||||
extra-path=tools/lib/gitlint_rules.py
|
||||
extra-path=tools/lib/gitlint-rules.py
|
||||
|
||||
[title-match-regex]
|
||||
regex=^(.+:\ )?[A-Z].+\.$
|
||||
regex=^.+\.$
|
||||
|
||||
[title-max-length]
|
||||
line-length=72
|
||||
line-length=76
|
||||
|
||||
[body-max-line-length]
|
||||
line-length=76
|
||||
|
184
.mailmap
184
.mailmap
@@ -1,184 +0,0 @@
|
||||
# This file teaches `git log` and friends the canonical names
|
||||
# and email addresses to use for our contributors.
|
||||
#
|
||||
# For details on the format, see:
|
||||
# https://git.github.io/htmldocs/gitmailmap.html
|
||||
#
|
||||
# Handy commands for examining or adding to this file:
|
||||
#
|
||||
# # shows all names/emails after mapping, sorted:
|
||||
# $ git shortlog -es | sort -k2
|
||||
#
|
||||
# # shows raw names/emails, filtered by mapped name:
|
||||
# $ git log --format='%an %ae' --author=$NAME | uniq -c
|
||||
|
||||
acrefoot <acrefoot@zulip.com> <acrefoot@alum.mit.edu>
|
||||
acrefoot <acrefoot@zulip.com> <acrefoot@dropbox.com>
|
||||
acrefoot <acrefoot@zulip.com> <acrefoot@humbughq.com>
|
||||
Adam Benesh <Adam.Benesh@gmail.com>
|
||||
Adam Benesh <Adam.Benesh@gmail.com> <Adam-Daniel.Benesh@t-systems.com>
|
||||
Adarsh Tiwari <xoldyckk@gmail.com>
|
||||
Aditya Chaudhary <aditya.chaudhary1558@gmail.com>
|
||||
Adnan Shabbir Husain <generaladnan139@gmail.com>
|
||||
Adnan Shabbir Husain <generaladnan139@gmail.com> <78212328+adnan-td@users.noreply.github.com>
|
||||
Alex Vandiver <alexmv@zulip.com> <alex@chmrr.net>
|
||||
Alex Vandiver <alexmv@zulip.com> <github@chmrr.net>
|
||||
Allen Rabinovich <allenrabinovich@yahoo.com> <allenr@humbughq.com>
|
||||
Allen Rabinovich <allenrabinovich@yahoo.com> <allenr@zulip.com>
|
||||
Alya Abbott <alya@zulip.com> <2090066+alya@users.noreply.github.com>
|
||||
Alya Abbott <alya@zulip.com> <alyaabbott@elance-odesk.com>
|
||||
Aman Agrawal <amanagr@zulip.com>
|
||||
Aman Agrawal <amanagr@zulip.com> <f2016561@pilani.bits-pilani.ac.in>
|
||||
Aman Vishwakarma <vishwakarmarambhawan572@gmail.com>
|
||||
Aman Vishwakarma <vishwakarmarambhawan572@gmail.com> <185982038+whilstsomebody@users.noreply.github.com>
|
||||
Anders Kaseorg <anders@zulip.com> <anders@zulipchat.com>
|
||||
Anders Kaseorg <anders@zulip.com> <andersk@mit.edu>
|
||||
aparna-bhatt <aparnabhatt2001@gmail.com> <86338542+aparna-bhatt@users.noreply.github.com>
|
||||
Apoorva Pendse <apoorvavpendse@gmail.com>
|
||||
Aryan Bhokare <aryan1bhokare@gmail.com>
|
||||
Aryan Bhokare <aryan1bhokare@gmail.com> <92683836+aryan-bhokare@users.noreply.github.com>
|
||||
Aryan Shridhar <aryanshridhar7@gmail.com>
|
||||
Aryan Shridhar <aryanshridhar7@gmail.com> <53977614+aryanshridhar@users.noreply.github.com>
|
||||
Ashwat Kumar Singh <ashwat.kumarsingh.met20@itbhu.ac.in>
|
||||
Austin Riba <austin@zulip.com> <austin@m51.io>
|
||||
Bedo Khaled <bedokhaled66@gmail.com>
|
||||
Bedo Khaled <bedokhaled66@gmail.com> <64221784+abdelrahman725@users.noreply.github.com>
|
||||
BIKI DAS <bikid475@gmail.com>
|
||||
Brijmohan Siyag <brijsiyag@gmail.com>
|
||||
Brock Whittaker <whittakerbrock@gmail.com> <bjwhitta@asu.edu>
|
||||
Brock Whittaker <whittakerbrock@gmail.com> <brock@zulip.com>
|
||||
Brock Whittaker <whittakerbrock@gmail.com> <brock@zulip.org>
|
||||
Brock Whittaker <whittakerbrock@gmail.com> <brock@zulipchat.org>
|
||||
Brock Whittaker <whittakerbrock@gmail.com> <brockwhittaker@Brocks-MacBook.local>
|
||||
Chris Bobbe <cbobbe@zulip.com> <cbobbe@zulipchat.com>
|
||||
Chris Bobbe <cbobbe@zulip.com> <csbobbe@gmail.com>
|
||||
codewithnick <nikhilsingh526452@gmail.com>
|
||||
Danny Su <contact@dannysu.com> <opensource@emailengine.org>
|
||||
Dhruv Goyal <dhruvgoyal.dev@gmail.com>
|
||||
Dinesh <chdinesh1089@gmail.com>
|
||||
Dinesh <chdinesh1089@gmail.com> <chdinesh1089>
|
||||
Eeshan Garg <eeshan@zulip.com> <jerryguitarist@gmail.com>
|
||||
Eric Smith <erwsmith@gmail.com> <99841919+erwsmith@users.noreply.github.com>
|
||||
Evy Kassirer <evy@zulip.com>
|
||||
Evy Kassirer <evy@zulip.com> <evy.kassirer@gmail.com>
|
||||
Evy Kassirer <evy@zulip.com> <evykassirer@users.noreply.github.com>
|
||||
Ganesh Pawar <pawarg256@gmail.com> <58626718+ganpa3@users.noreply.github.com>
|
||||
Greg Price <greg@zulip.com> <gnprice@gmail.com>
|
||||
Greg Price <greg@zulip.com> <greg@zulipchat.com>
|
||||
Greg Price <greg@zulip.com> <price@mit.edu>
|
||||
Hardik Dharmani <Ddharmani99@gmail.com> <ddharmani99@gmail.com>
|
||||
Harsh Bansal <harsh@harshbansal.in>
|
||||
Harsh Meena <reharshmeena@gmail.com>
|
||||
Harsh Meena <reharshmeena@gmail.com> <116981900+reharsh@users.noreply.github.com>
|
||||
Hemant Umre <hemantumre12@gmail.com> <87542880+HemantUmre12@users.noreply.github.com>
|
||||
Jai soni <jai_s@me.iitr.ac.in>
|
||||
Jai soni <jai_s@me.iitr.ac.in> <76561593+jai2201@users.noreply.github.com>
|
||||
Jeff Arnold <jbarnold@gmail.com> <jbarnold@humbughq.com>
|
||||
Jeff Arnold <jbarnold@gmail.com> <jbarnold@zulip.com>
|
||||
Jessica McKellar <jesstess@mit.edu> <jesstess@humbughq.com>
|
||||
Jessica McKellar <jesstess@mit.edu> <jesstess@zulip.com>
|
||||
Jitendra Kumar <jk69854@gmail.com>
|
||||
Jitendra Kumar <jk69854@gmail.com> <36557466+jitendra-ky@users.noreply.github.com>
|
||||
John Lu <JohnLu10212004@gmail.com>
|
||||
John Lu <JohnLu10212004@gmail.com> <87673068+JohnLu2004@users.noreply.github.com>
|
||||
Joseph Ho <josephho678@gmail.com>
|
||||
Joseph Ho <josephho678@gmail.com> <62449508+Joelute@users.noreply.github.com>
|
||||
Julia Bichler <julia.bichler@tum.de> <74348920+juliaBichler01@users.noreply.github.com>
|
||||
Karl Stolley <karl@zulip.com> <karl@stolley.dev>
|
||||
Kartikay Sambher <kartikaysambher@gmail.com>
|
||||
Kevin Mehall <km@kevinmehall.net> <kevin@humbughq.com>
|
||||
Kevin Mehall <km@kevinmehall.net> <kevin@zulip.com>
|
||||
Kevin Scott <kevin.scott.98@gmail.com>
|
||||
Kislay Verma <kislayuv27@gmail.com>
|
||||
Klara Brrettby <klara.bratteby@gmail.com>
|
||||
Klara Brrettby <klara.bratteby@gmail.com> <93648999+klarabratteby@users.noreply.github.com>
|
||||
Kumar Aniket <sachinaniket2004@gmail.com>
|
||||
Kumar Aniket <sachinaniket2004@gmail.com> <142340063+opmkumar@users.noreply.github.com>
|
||||
Kunal Sharma <v.shm.kunal@gmail.com>
|
||||
Lalit Kumar Singh <lalitkumarsingh3716@gmail.com>
|
||||
Lalit Kumar Singh <lalitkumarsingh3716@gmail.com> <lalits01@smartek21.com>
|
||||
Lauryn Menard <lauryn@zulip.com> <63245456+laurynmm@users.noreply.github.com>
|
||||
Lauryn Menard <lauryn@zulip.com> <lauryn.menard@gmail.com>
|
||||
m-e-l-u-h-a-n <purushottam.tiwari.cd.cse19@itbhu.ac.in>
|
||||
m-e-l-u-h-a-n <purushottam.tiwari.cd.cse19@itbhu.ac.in> <pururshottam.tiwari.cd.cse19@itbhu.ac.in>
|
||||
Maneesh Shukla <shuklamaneesh24@gmail.com> <143504391+shuklamaneesh23@users.noreply.github.com>
|
||||
Mateusz Mandera <mateusz.mandera@zulip.com> <mateusz.mandera@protonmail.com>
|
||||
Matt Keller <matt@zulip.com>
|
||||
Matt Keller <matt@zulip.com> <m@cognusion.com>
|
||||
Nehal Sharma <bablinaneh@gmail.com>
|
||||
Nehal Sharma <bablinaneh@gmail.com> <68962290+N-Shar-ma@users.noreply.github.com>
|
||||
Nimish Medatwal <medatwalnimish@gmail.com>
|
||||
Noble Mittal <noblemittal@outlook.com> <62551163+beingnoble03@users.noreply.github.com>
|
||||
nzai <nzaih18@gmail.com> <70953556+nzaih1999@users.noreply.github.com>
|
||||
Palash Baderia <palash.baderia@outlook.com>
|
||||
Palash Baderia <palash.baderia@outlook.com> <66828942+palashb01@users.noreply.github.com>
|
||||
Palash Raghuwanshi <singhpalash0@gmail.com>
|
||||
Parth <mittalparth22@gmail.com>
|
||||
Prakhar Pratyush <prakhar@zulip.com> <prakhar841301@gmail.com>
|
||||
Pratik Chanda <pratikchanda2000@gmail.com>
|
||||
Pratik Solanki <pratiksolanki2021@gmail.com>
|
||||
Priyam Seth <sethpriyam1@gmail.com> <b19188@students.iitmandi.ac.in>
|
||||
Ray Kraesig <rkraesig@zulip.com> <rkraesig@zulipchat.com>
|
||||
Reid Barton <rwbarton@gmail.com> <rwbarton@humbughq.com>
|
||||
Rein Zustand (rht) <rhtbot@protonmail.com>
|
||||
Rishabh Maheshwari <b20063@students.iitmandi.ac.in>
|
||||
Rishi Gupta <rishig@zulipchat.com> <rishig+git@mit.edu>
|
||||
Rishi Gupta <rishig@zulipchat.com> <rishig@kandralabs.com>
|
||||
Rishi Gupta <rishig@zulipchat.com> <rishig@users.noreply.github.com>
|
||||
Ritwik Patnaik <ritwikpatnaik@gmail.com>
|
||||
Rixant Rokaha <rixantrokaha@gmail.com>
|
||||
Rixant Rokaha <rixantrokaha@gmail.com> <rishantrokaha@gmail.com>
|
||||
Rixant Rokaha <rixantrokaha@gmail.com> <rrokaha@caldwell.edu>
|
||||
Rohan Gudimetla <rohan.gudimetla07@gmail.com>
|
||||
Sahil Batra <sahil@zulip.com> <35494118+sahil839@users.noreply.github.com>
|
||||
Sahil Batra <sahil@zulip.com> <sahilbatra839@gmail.com>
|
||||
Sanchit Sharma <ssharmas10662@gmail.com>
|
||||
Satyam Bansal <sbansal1999@gmail.com>
|
||||
Sayam Samal <samal.sayam@gmail.com>
|
||||
Scott Feeney <scott@oceanbase.org> <scott@humbughq.com>
|
||||
Scott Feeney <scott@oceanbase.org> <scott@zulip.com>
|
||||
Shashank Singh <21bec103@iiitdmj.ac.in>
|
||||
Shlok Patel <shlokcpatel2001@gmail.com>
|
||||
Shu Chen <shu@zulip.com>
|
||||
Shubham Padia <shubham@zulip.com>
|
||||
Shubham Padia <shubham@zulip.com> <shubham-padia@users.noreply.github.com>
|
||||
Shubham Padia <shubham@zulip.com> <shubham@glints.com>
|
||||
Somesh Ranjan <somesh.ranjan.met20@itbhu.ac.in> <77766761+somesh202@users.noreply.github.com>
|
||||
Steve Howell <showell@zulip.com> <showell30@yahoo.com>
|
||||
Steve Howell <showell@zulip.com> <showell@yahoo.com>
|
||||
Steve Howell <showell@zulip.com> <showell@zulipchat.com>
|
||||
Steve Howell <showell@zulip.com> <steve@humbughq.com>
|
||||
Steve Howell <showell@zulip.com> <steve@zulip.com>
|
||||
strifel <info@strifel.de>
|
||||
Sujal Shah <sujalshah28092004@gmail.com>
|
||||
Tanmay Kumar <tnmdotkr@gmail.com>
|
||||
Tanmay Kumar <tnmdotkr@gmail.com> <133781250+tnmkr@users.noreply.github.com>
|
||||
Tim Abbott <tabbott@zulip.com>
|
||||
Tim Abbott <tabbott@zulip.com> <tabbott@dropbox.com>
|
||||
Tim Abbott <tabbott@zulip.com> <tabbott@humbughq.com>
|
||||
Tim Abbott <tabbott@zulip.com> <tabbott@mit.edu>
|
||||
Tim Abbott <tabbott@zulip.com> <tabbott@zulipchat.com>
|
||||
Tomasz Kolek <tomasz-kolek@o2.pl> <tomasz-kolek@go2.pl>
|
||||
Ujjawal Modi <umodi2003@gmail.com> <99073049+Ujjawal3@users.noreply.github.com>
|
||||
umkay <ukhan@zulipchat.com> <umaimah.k@gmail.com>
|
||||
umkay <ukhan@zulipchat.com> <umkay@users.noreply.github.com>
|
||||
Viktor Illmer <1476338+v-ji@users.noreply.github.com>
|
||||
Vishesh Singh <vishesh.bhu1971@gmail.com>
|
||||
Vishesh Singh <vishesh.bhu1971@gmail.com> <142628839+NotVishesh@users.noreply.github.com>
|
||||
Vishnu KS <vishnu@zulip.com> <hackerkid@vishnuks.com>
|
||||
Vishnu KS <vishnu@zulip.com> <yo@vishnuks.com>
|
||||
Vivek Tripathi <vivektripathi8005@gmail.com>
|
||||
Waseem Daher <wdaher@zulip.com> <wdaher@dropbox.com>
|
||||
Waseem Daher <wdaher@zulip.com> <wdaher@humbughq.com>
|
||||
Yash RE <33805964+YashRE42@users.noreply.github.com>
|
||||
Yash RE <33805964+YashRE42@users.noreply.github.com> <YashRE42@github.com>
|
||||
Yogesh Sirsat <yogeshsirsat56@gmail.com>
|
||||
Yogesh Sirsat <yogeshsirsat56@gmail.com> <41695888+yogesh-sirsat@users.noreply.github.com>
|
||||
Zeeshan Equbal <equbalzeeshan@gmail.com>
|
||||
Zeeshan Equbal <equbalzeeshan@gmail.com> <54993043+zee-bit@users.noreply.github.com>
|
||||
Zev Benjamin <zev@zulip.com> <zev@dropbox.com>
|
||||
Zev Benjamin <zev@zulip.com> <zev@humbughq.com>
|
||||
Zev Benjamin <zev@zulip.com> <zev@mit.edu>
|
||||
Zixuan James Li <p359101898@gmail.com>
|
||||
Zixuan James Li <p359101898@gmail.com> <359101898@qq.com>
|
||||
Zixuan James Li <p359101898@gmail.com> <39874143+PIG208@users.noreply.github.com>
|
@@ -1,17 +0,0 @@
|
||||
pnpm-lock.yaml
|
||||
/api_docs/**/*.md
|
||||
/corporate/tests/stripe_fixtures
|
||||
/help/**/*.md
|
||||
/locale
|
||||
/templates/**/*.md
|
||||
/tools/setup/emoji/emoji_map.json
|
||||
/web/third/*
|
||||
!/web/third/marked
|
||||
/web/third/marked/*
|
||||
!/web/third/marked/lib
|
||||
/web/third/marked/lib/*
|
||||
!/web/third/marked/lib/marked.d.cts
|
||||
/zerver/tests/fixtures
|
||||
/zerver/webhooks/*/doc.md
|
||||
/zerver/webhooks/github/githubsponsors.md
|
||||
/zerver/webhooks/*/fixtures
|
@@ -1,19 +0,0 @@
|
||||
# https://docs.readthedocs.io/en/stable/config-file/v2.html
|
||||
version: 2
|
||||
|
||||
build:
|
||||
os: ubuntu-22.04
|
||||
tools:
|
||||
python: "3.10"
|
||||
jobs:
|
||||
create_environment:
|
||||
- asdf plugin add uv
|
||||
- asdf install uv 0.6.6
|
||||
- asdf global uv 0.6.6
|
||||
- UV_PROJECT_ENVIRONMENT=$READTHEDOCS_VIRTUALENV_PATH uv venv
|
||||
install:
|
||||
- UV_PROJECT_ENVIRONMENT=$READTHEDOCS_VIRTUALENV_PATH uv sync --frozen --only-group=docs
|
||||
|
||||
sphinx:
|
||||
configuration: docs/conf.py
|
||||
fail_on_warning: true
|
@@ -1 +0,0 @@
|
||||
sonar.inclusions=**/*.py,**/*.html
|
75
.travis.yml
Normal file
75
.travis.yml
Normal file
@@ -0,0 +1,75 @@
|
||||
# See https://zulip.readthedocs.io/en/latest/travis.html for
|
||||
# high-level documentation on our Travis CI setup.
|
||||
dist: trusty
|
||||
install:
|
||||
# Disable broken riak sources.list in Travis base image 2017-10-18
|
||||
- rm -vf "/etc/apt/sources.list.d/*riak*"
|
||||
|
||||
# Disable Travis CI's built-in NVM installation
|
||||
- mispipe "mv ~/.nvm ~/.travis-nvm-disabled" ts
|
||||
|
||||
# Install codecov, the library for the code coverage reporting tool we use
|
||||
# With a retry to minimize impact of transient networking errors.
|
||||
- mispipe "pip install codecov" ts || mispipe "pip install codecov" ts
|
||||
|
||||
# This is the main setup job for the test suite
|
||||
- mispipe "tools/travis/setup-$TEST_SUITE" ts
|
||||
|
||||
# Clean any caches that are not in use to avoid our cache
|
||||
# becoming huge.
|
||||
- mispipe "scripts/lib/clean-unused-caches --verbose --threshold 0" ts
|
||||
|
||||
script:
|
||||
# We unset GEM_PATH here as a hack to work around Travis CI having
|
||||
# broken running their system puppet with Ruby. See
|
||||
# https://travis-ci.org/zulip/zulip/jobs/240120991 for an example traceback.
|
||||
- unset GEM_PATH
|
||||
- mispipe "./tools/travis/$TEST_SUITE" ts
|
||||
cache:
|
||||
yarn: true
|
||||
apt: false
|
||||
directories:
|
||||
- $HOME/zulip-venv-cache
|
||||
- $HOME/zulip-npm-cache
|
||||
- $HOME/zulip-emoji-cache
|
||||
- $HOME/node
|
||||
env:
|
||||
global:
|
||||
- BOTO_CONFIG=/tmp/nowhere
|
||||
language: python
|
||||
# Our test suites generally run on Python 3.4, the version in
|
||||
# Ubuntu 14.04 trusty, which is the oldest OS release we support.
|
||||
matrix:
|
||||
include:
|
||||
# Travis will actually run the jobs in the order they're listed here;
|
||||
# that doesn't seem to be documented, but it's what we see empirically.
|
||||
# We only get 4 jobs running at a time, so we try to make the first few
|
||||
# the most likely to break.
|
||||
- python: "3.4"
|
||||
env: TEST_SUITE=frontend
|
||||
- python: "3.4"
|
||||
env: TEST_SUITE=backend
|
||||
- python: "3.4"
|
||||
env: TEST_SUITE=production
|
||||
- python: "3.5"
|
||||
env: TEST_SUITE=backend
|
||||
sudo: required
|
||||
addons:
|
||||
artifacts:
|
||||
paths:
|
||||
# Casper debugging data (screenshots, etc.) is super useful for
|
||||
# debugging test flakes.
|
||||
- $(ls var/casper/* | tr "\n" ":")
|
||||
- $(ls /tmp/zulip-test-event-log/* | tr "\n" ":")
|
||||
postgresql: "9.3"
|
||||
apt:
|
||||
packages:
|
||||
- moreutils
|
||||
after_success:
|
||||
- codecov
|
||||
notifications:
|
||||
webhooks:
|
||||
urls:
|
||||
- https://zulip.org/zulipbot/travis
|
||||
on_success: always
|
||||
on_failure: always
|
39
.tx/config
39
.tx/config
@@ -1,40 +1,15 @@
|
||||
# Migrated from transifex-client format with `tx migrate`
|
||||
#
|
||||
# See https://developers.transifex.com/docs/using-the-client which hints at
|
||||
# this format, but in general, the headings are in the format of:
|
||||
#
|
||||
# [o:<org>:p:<project>:r:<resource>]
|
||||
|
||||
[main]
|
||||
host = https://www.transifex.com
|
||||
lang_map = zh-Hans: zh_Hans
|
||||
lang_map = zh-Hans: zh_Hans, zh-Hant: zh_Hant
|
||||
|
||||
[o:zulip:p:zulip:r:djangopo]
|
||||
file_filter = locale/<lang>/LC_MESSAGES/django.po
|
||||
source_file = locale/en/LC_MESSAGES/django.po
|
||||
[zulip.djangopo]
|
||||
source_file = static/locale/en/LC_MESSAGES/django.po
|
||||
source_lang = en
|
||||
type = PO
|
||||
file_filter = static/locale/<lang>/LC_MESSAGES/django.po
|
||||
|
||||
[o:zulip:p:zulip:r:mobile]
|
||||
file_filter = locale/<lang>/mobile.json
|
||||
source_file = locale/en/mobile.json
|
||||
source_lang = en
|
||||
type = KEYVALUEJSON
|
||||
|
||||
[o:zulip:p:zulip:r:translationsjson]
|
||||
file_filter = locale/<lang>/translations.json
|
||||
source_file = locale/en/translations.json
|
||||
source_lang = en
|
||||
type = KEYVALUEJSON
|
||||
|
||||
[o:zulip:p:zulip-test:r:djangopo]
|
||||
file_filter = locale/<lang>/LC_MESSAGES/django.po
|
||||
source_file = locale/en/LC_MESSAGES/django.po
|
||||
source_lang = en
|
||||
type = PO
|
||||
|
||||
[o:zulip:p:zulip-test:r:translationsjson]
|
||||
file_filter = locale/<lang>/translations.json
|
||||
source_file = locale/en/translations.json
|
||||
[zulip.translationsjson]
|
||||
source_file = static/locale/en/translations.json
|
||||
source_lang = en
|
||||
type = KEYVALUEJSON
|
||||
file_filter = static/locale/<lang>/translations.json
|
||||
|
23
.vscode/extensions.json
vendored
23
.vscode/extensions.json
vendored
@@ -1,23 +0,0 @@
|
||||
{
|
||||
// Recommended VS Code extensions for zulip/zulip.
|
||||
//
|
||||
// VS Code prompts a user to install the recommended extensions
|
||||
// when a workspace is opened for the first time. The user can
|
||||
// also review the list with the 'Extensions: Show Recommended
|
||||
// Extensions' command. See
|
||||
// https://code.visualstudio.com/docs/editor/extension-marketplace#_workspace-recommended-extensions
|
||||
// for more information.
|
||||
//
|
||||
// Extension identifier format: ${publisher}.${name}.
|
||||
// Example: vscode.csharp
|
||||
|
||||
"recommendations": [
|
||||
"42crunch.vscode-openapi",
|
||||
"dbaeumer.vscode-eslint",
|
||||
"esbenp.prettier-vscode",
|
||||
"ms-vscode-remote.vscode-remote-extensionpack"
|
||||
],
|
||||
|
||||
// Extensions recommended by VS Code which are not recommended for users of zulip/zulip.
|
||||
"unwantedRecommendations": []
|
||||
}
|
@@ -14,46 +14,46 @@ This isn't an exhaustive list of things that you can't do. Rather, take it
|
||||
in the spirit in which it's intended --- a guide to make it easier to enrich
|
||||
all of us and the technical communities in which we participate.
|
||||
|
||||
## Expected behavior
|
||||
## Expected Behavior
|
||||
|
||||
The following behaviors are expected and requested of all community members:
|
||||
|
||||
- Participate. In doing so, you contribute to the health and longevity of
|
||||
* Participate. In doing so, you contribute to the health and longevity of
|
||||
the community.
|
||||
- Exercise consideration and respect in your speech and actions.
|
||||
- Attempt collaboration before conflict. Assume good faith.
|
||||
- Refrain from demeaning, discriminatory, or harassing behavior and speech.
|
||||
- Take action or alert community leaders if you notice a dangerous
|
||||
* Exercise consideration and respect in your speech and actions.
|
||||
* Attempt collaboration before conflict. Assume good faith.
|
||||
* Refrain from demeaning, discriminatory, or harassing behavior and speech.
|
||||
* Take action or alert community leaders if you notice a dangerous
|
||||
situation, someone in distress, or violations of this code, even if they
|
||||
seem inconsequential.
|
||||
- Community event venues may be shared with members of the public; be
|
||||
* Community event venues may be shared with members of the public; be
|
||||
respectful to all patrons of these locations.
|
||||
|
||||
## Unacceptable behavior
|
||||
## Unacceptable Behavior
|
||||
|
||||
The following behaviors are considered harassment and are unacceptable
|
||||
within the Zulip community:
|
||||
|
||||
- Jokes or derogatory language that singles out members of any race,
|
||||
* Jokes or derogatory language that singles out members of any race,
|
||||
ethnicity, culture, national origin, color, immigration status, social and
|
||||
economic class, educational level, language proficiency, sex, sexual
|
||||
orientation, gender identity and expression, age, size, family status,
|
||||
political belief, religion, and mental and physical ability.
|
||||
- Violence, threats of violence, or violent language directed against
|
||||
* Violence, threats of violence, or violent language directed against
|
||||
another person.
|
||||
- Disseminating or threatening to disseminate another person's personal
|
||||
* Disseminating or threatening to disseminate another person's personal
|
||||
information.
|
||||
- Personal insults of any sort.
|
||||
- Posting or displaying sexually explicit or violent material.
|
||||
- Inappropriate photography or recording.
|
||||
- Deliberate intimidation, stalking, or following (online or in person).
|
||||
- Unwelcome sexual attention. This includes sexualized comments or jokes,
|
||||
* Personal insults of any sort.
|
||||
* Posting or displaying sexually explicit or violent material.
|
||||
* Inappropriate photography or recording.
|
||||
* Deliberate intimidation, stalking, or following (online or in person).
|
||||
* Unwelcome sexual attention. This includes sexualized comments or jokes,
|
||||
inappropriate touching or groping, and unwelcomed sexual advances.
|
||||
- Sustained disruption of community events, including talks and
|
||||
* Sustained disruption of community events, including talks and
|
||||
presentations.
|
||||
- Advocating for, or encouraging, any of the behaviors above.
|
||||
* Advocating for, or encouraging, any of the behaviors above.
|
||||
|
||||
## Reporting and enforcement
|
||||
## Reporting and Enforcement
|
||||
|
||||
Harassment and other code of conduct violations reduce the value of the
|
||||
community for everyone. If someone makes you or anyone else feel unsafe or
|
||||
@@ -66,7 +66,7 @@ organizers may take any action they deem appropriate, up to and including a
|
||||
temporary ban or permanent expulsion from the community without warning (and
|
||||
without refund in the case of a paid event).
|
||||
|
||||
If someone outside the development community (e.g., a user of the Zulip
|
||||
If someone outside the development community (e.g. a user of the Zulip
|
||||
software) engages in unacceptable behavior that affects someone in the
|
||||
community, we still want to know. Even if we don't have direct control over
|
||||
the violator, the community organizers can still support the people
|
||||
@@ -78,7 +78,7 @@ something you can do while a violation is happening, do it. A lot of the
|
||||
harms of harassment and other violations can be mitigated by the victim
|
||||
knowing that the other people present are on their side.
|
||||
|
||||
All reports will be kept confidential. In some cases, we may determine that a
|
||||
All reports will be kept confidential. In some cases we may determine that a
|
||||
public statement will need to be made. In such cases, the identities of all
|
||||
victims and reporters will remain confidential unless those individuals
|
||||
instruct us otherwise.
|
||||
@@ -95,79 +95,11 @@ behavior occurring outside the scope of community activities when such
|
||||
behavior has the potential to adversely affect the safety and well-being of
|
||||
community members.
|
||||
|
||||
## License and attribution
|
||||
## License and Attribution
|
||||
|
||||
This Code of Conduct is adapted from the
|
||||
[Citizen Code of Conduct](http://citizencodeofconduct.org/) and the
|
||||
[Django Code of Conduct](https://www.djangoproject.com/conduct/), and is
|
||||
under a
|
||||
[Creative Commons BY-SA](https://creativecommons.org/licenses/by-sa/4.0/)
|
||||
[Creative Commons BY-SA](http://creativecommons.org/licenses/by-sa/4.0/)
|
||||
license.
|
||||
|
||||
## Moderating the Zulip community
|
||||
|
||||
Anyone can help moderate the Zulip community by helping make sure that folks are
|
||||
aware of the [community guidelines](https://zulip.com/development-community/)
|
||||
and this Code of Conduct, and that we maintain a positive and respectful
|
||||
atmosphere.
|
||||
|
||||
Here are some guidelines for you how can help:
|
||||
|
||||
- Be friendly! Welcoming folks, thanking them for their feedback, ideas and effort,
|
||||
and just trying to keep the atmosphere warm make the whole community function
|
||||
more smoothly. New participants who feel accepted, listened to and respected
|
||||
are likely to treat others the same way.
|
||||
|
||||
- Be familiar with the [community
|
||||
guidelines](https://zulip.com/development-community/), and cite them liberally
|
||||
when a user violates them. Be polite but firm. Some examples:
|
||||
|
||||
- @user please note that there is no need to @-mention @\_**Tim Abbott** when
|
||||
you ask a question. As noted in the [guidelines for this
|
||||
community](https://zulip.com/development-community/):
|
||||
|
||||
> Use @-mentions sparingly… there is generally no need to @-mention a
|
||||
> core contributor unless you need their timely attention.
|
||||
|
||||
- @user, please keep in mind the following [community
|
||||
guideline](https://zulip.com/development-community/):
|
||||
|
||||
> Don’t ask the same question in multiple places. Moderators read every
|
||||
> public stream, and make sure every question gets a reply.
|
||||
|
||||
I’ve gone ahead and moved the other copy of this message to this thread.
|
||||
|
||||
- If asked a question in a direct message that is better discussed in a public
|
||||
stream:
|
||||
> Hi @user! Please start by reviewing
|
||||
> https://zulip.com/development-community/#community-norms to learn how to
|
||||
> get help in this community.
|
||||
|
||||
- Users sometimes think chat.zulip.org is a testing instance. When this happens,
|
||||
kindly direct them to use the **#test here** stream.
|
||||
|
||||
- If you see a message that’s posted in the wrong place, go ahead and move it if
|
||||
you have permissions to do so, even if you don’t plan to respond to it.
|
||||
Leaving the “Send automated notice to new topic” option enabled helps make it
|
||||
clear what happened to the person who sent the message.
|
||||
|
||||
If you are responding to a message that's been moved, mention the user in your
|
||||
reply, so that the mention serves as a notification of the new location for
|
||||
their conversation.
|
||||
|
||||
- If a user is posting spam, please report it to an administrator. They will:
|
||||
|
||||
- Change the user's name to `<name> (spammer)` and deactivate them.
|
||||
- Delete any spam messages they posted in public streams.
|
||||
|
||||
- We care very much about maintaining a respectful tone in our community. If you
|
||||
see someone being mean or rude, point out that their tone is inappropriate,
|
||||
and ask them to communicate their perspective in a respectful way in the
|
||||
future. If you don’t feel comfortable doing so yourself, feel free to ask a
|
||||
member of Zulip's core team to take care of the situation.
|
||||
|
||||
- Try to assume the best intentions from others (given the range of
|
||||
possibilities presented by their visible behavior), and stick with a friendly
|
||||
and positive tone even when someone’s behavior is poor or disrespectful.
|
||||
Everyone has bad days and stressful situations that can result in them
|
||||
behaving not their best, and while we should be firm about our community
|
||||
rules, we should also enforce them with kindness.
|
||||
|
418
CONTRIBUTING.md
418
CONTRIBUTING.md
@@ -1,418 +0,0 @@
|
||||
# Contributing guide
|
||||
|
||||
Welcome! This is a step-by-step guide on how to get started contributing code to
|
||||
the [Zulip](https://zulip.com/) organized team chat [open-source
|
||||
project](https://github.com/zulip). Thousands of people use Zulip every day, and
|
||||
your work on Zulip will have a meaningful impact on their experience. We hope
|
||||
you'll join us!
|
||||
|
||||
To learn about ways to contribute without writing code, please see our
|
||||
suggestions for how you can [support the Zulip
|
||||
project](https://zulip.com/help/support-zulip-project).
|
||||
|
||||
## Learning from the docs
|
||||
|
||||
Zulip has a documentation-based approach to onboarding new contributors. As you
|
||||
are getting started, this page will be your go-to for figuring out what to do
|
||||
next. You will also explore other guides, learning about how to put together
|
||||
your first pull request, diving into [Zulip's
|
||||
subsystems](https://zulip.readthedocs.io/en/latest/subsystems/index.html), and
|
||||
much more. We hope you'll find this process to be a great learning experience.
|
||||
|
||||
This page will guide you through the following steps:
|
||||
|
||||
1. [Getting started](#getting-started)
|
||||
1. [Finding an issue to work on](#finding-an-issue-to-work-on)
|
||||
1. [Getting help](#getting-help) as you work on your first pull request
|
||||
1. Learning [what makes a great Zulip contributor](#what-makes-a-great-zulip-contributor)
|
||||
1. [Submitting a pull request](#submitting-a-pull-request)
|
||||
1. [Going beyond the first issue](#beyond-the-first-issue)
|
||||
|
||||
Any time you feel lost, come back to this guide. The information you need is
|
||||
likely somewhere on this page (perhaps in the list of [common
|
||||
questions](#common-questions)), or in one of the many references it points to.
|
||||
|
||||
If you've done all you can with the documentation and are still feeling stuck,
|
||||
join the [Zulip development community](https://zulip.com/development-community/)
|
||||
to ask for help! Before you post, be sure to review [community
|
||||
norms](https://zulip.com/development-community/#community-norms) and [where to
|
||||
post](https://zulip.com/development-community/#where-do-i-send-my-message) your
|
||||
question. The Zulip community is governed by a [code of
|
||||
conduct](https://zulip.readthedocs.io/en/latest/code-of-conduct.html).
|
||||
|
||||
## Getting started
|
||||
|
||||
### Learning how to use Git (the Zulip way)
|
||||
|
||||
Zulip uses GitHub for source control and code review, and becoming familiar with
|
||||
Git is essential for navigating and contributing to the Zulip codebase. [Our
|
||||
guide to Git](https://zulip.readthedocs.io/en/latest/git/index.html) will help
|
||||
you get started even if you've never used Git before.
|
||||
|
||||
If you're familiar with Git, you'll still want to take a look at [our
|
||||
Zulip-specific Git
|
||||
tools](https://zulip.readthedocs.io/en/latest/git/zulip-tools.html).
|
||||
|
||||
### Setting up your development environment and diving in
|
||||
|
||||
To get started contributing code to Zulip, you will need to set up the
|
||||
development environment for the Zulip codebase you want to work on. You'll then
|
||||
want to take some time to familiarize yourself with the code.
|
||||
|
||||
#### Server and web app
|
||||
|
||||
1. [Install the development
|
||||
environment](https://zulip.readthedocs.io/en/latest/development/overview.html).
|
||||
1. Familiarize yourself with [using the development
|
||||
environment](https://zulip.readthedocs.io/en/latest/development/using.html).
|
||||
1. Go through the [new application feature
|
||||
tutorial](https://zulip.readthedocs.io/en/latest/tutorials/new-feature-tutorial.html)
|
||||
to get familiar with how the Zulip codebase is organized and how to find code
|
||||
in it.
|
||||
|
||||
#### Flutter-based mobile app
|
||||
|
||||
1. Set up a development environment following the instructions in [the project
|
||||
README](https://github.com/zulip/zulip-flutter).
|
||||
1. Start reading recent commits to see the code we're writing.
|
||||
Use either a [graphical Git viewer][] like `gitk`, or `git log -p`
|
||||
with [the "secret" to reading its output][git-log-secret].
|
||||
1. Pick some of the code that appears in those Git commits and that looks
|
||||
interesting. Use your IDE to visit that code and to navigate to related code,
|
||||
reading to see how it works and how the codebase is organized.
|
||||
|
||||
[graphical Git viewer]: https://zulip.readthedocs.io/en/latest/git/setup.html#get-a-graphical-client
|
||||
[git-log-secret]: https://github.com/zulip/zulip-mobile/blob/main/docs/howto/git.md#git-log-secret
|
||||
|
||||
#### Desktop app
|
||||
|
||||
Follow [this
|
||||
documentation](https://github.com/zulip/zulip-desktop/blob/main/development.md)
|
||||
to set up the Zulip Desktop development environment.
|
||||
|
||||
#### Terminal app
|
||||
|
||||
Follow [this
|
||||
documentation](https://github.com/zulip/zulip-terminal?tab=readme-ov-file#setting-up-a-development-environment)
|
||||
to set up the Zulip Terminal development environment.
|
||||
|
||||
## Finding an issue to work on
|
||||
|
||||
### Where to look for an issue
|
||||
|
||||
Now you're ready to pick your first issue! Zulip has several repositories you
|
||||
can check out, depending on your interests. There are hundreds of open issues in
|
||||
the [main Zulip server and web app
|
||||
repository](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22)
|
||||
alone.
|
||||
|
||||
You can look through issues tagged with the "help wanted" label, which is used
|
||||
to indicate the issues that are open for contributions. You'll be able to claim
|
||||
unassigned issues, which you can find using the `no:assignee` filter in GitHub.
|
||||
You can also pick up issues that are assigned but are no longer being worked on.
|
||||
|
||||
Some repositories use the "good first issue" label to tag issues that are
|
||||
especially approachable for new contributors.
|
||||
|
||||
Here are some handy links for issues to look through:
|
||||
|
||||
- [Server and web app](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22)
|
||||
- Mobile apps: no "help wanted" label, but see the
|
||||
[project board](https://github.com/orgs/zulip/projects/5/views/4)
|
||||
for the upcoming Flutter-based app. Look for issues up through the
|
||||
"Launch" milestone, and that aren't already assigned.
|
||||
- [Desktop app](https://github.com/zulip/zulip-desktop/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22)
|
||||
- [Terminal app](https://github.com/zulip/zulip-terminal/issues?q=is%3Aopen+is%3Aissue+label%3A"help+wanted")
|
||||
- [Python API bindings and bots](https://github.com/zulip/python-zulip-api/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22)
|
||||
|
||||
### Picking an issue to work on
|
||||
|
||||
There's a lot to learn while making your first pull request, so start small!
|
||||
Many first contributions have fewer than 10 lines of changes (not counting
|
||||
changes to tests).
|
||||
|
||||
We recommend the following process for finding an issue to work on:
|
||||
|
||||
1. Find an issue tagged with the "help wanted" label that is either unassigned,
|
||||
or looks to be abandoned.
|
||||
1. Read the description of the issue and make sure you understand it.
|
||||
1. If it seems promising, poke around the product
|
||||
(on [chat.zulip.org](https://chat.zulip.org) or in the development
|
||||
environment) until you know how the piece being
|
||||
described fits into the bigger picture. If after some exploration the
|
||||
description seems confusing or ambiguous, post a question on the GitHub
|
||||
issue, as others may benefit from the clarification as well.
|
||||
1. When you find an issue you like, try to get started working on it. See if you
|
||||
can find the part of the code you'll need to modify (`git grep` is your
|
||||
friend!) and get some idea of how you'll approach the problem.
|
||||
1. If you feel lost, that's OK! Go through these steps again with another issue.
|
||||
There's plenty to work on, and the exploration you do will help you learn
|
||||
more about the project.
|
||||
|
||||
An assigned issue can be considered abandoned if:
|
||||
|
||||
- There is no recent contributor activity.
|
||||
- There are no open PRs, or an open PR needs work in order to be ready for
|
||||
review. For example, a PR may need to be updated to address reviewer feedback
|
||||
or to pass tests.
|
||||
|
||||
Note that you are _not_ claiming an issue while you are iterating through steps
|
||||
1-4. _Before you claim an issue_, you should be confident that you will be able to
|
||||
tackle it effectively.
|
||||
|
||||
Additional tips for the [main server and web app
|
||||
repository](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22):
|
||||
|
||||
- We especially recommend browsing recently opened issues, as there are more
|
||||
likely to be easy ones for you to find.
|
||||
- Take a look at issues with the ["good first issue"
|
||||
label](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22),
|
||||
as they are especially accessible to new contributors. However, you will
|
||||
likely find issues without this label that are accessible as well.
|
||||
- All issues are partitioned into areas like
|
||||
admin, compose, emoji, hotkeys, i18n, onboarding, search, etc. Look
|
||||
through our [list of labels](https://github.com/zulip/zulip/labels), and
|
||||
click on some of the `area:` labels to see all the issues related to your
|
||||
areas of interest.
|
||||
- Avoid issues with the "difficult" label unless you
|
||||
understand why it is difficult and are highly confident you can resolve the
|
||||
issue correctly and completely.
|
||||
|
||||
### Claiming an issue
|
||||
|
||||
#### In the main server/web app repository and Zulip Terminal repository
|
||||
|
||||
The Zulip server/web app repository
|
||||
([`zulip/zulip`](https://github.com/zulip/zulip/)) and the Zulip Terminal
|
||||
repository ([`zulip/zulip-terminal`](https://github.com/zulip/zulip-terminal/))
|
||||
are set up with a GitHub workflow bot called
|
||||
[Zulipbot](https://github.com/zulip/zulipbot), which manages issues and pull
|
||||
requests in order to create a better workflow for Zulip contributors.
|
||||
|
||||
To claim an issue in these repositories, simply post a comment that says
|
||||
`@zulipbot claim` to the issue thread. If the issue is [tagged with a help
|
||||
wanted label and is not assigned to someone
|
||||
else](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22+no%3Aassignee),
|
||||
Zulipbot will immediately assign the issue to you.
|
||||
|
||||
Note that new contributors can only claim one issue until their first pull request is
|
||||
merged. This is to encourage folks to finish ongoing work before starting
|
||||
something new. If you would like to pick up a new issue while waiting for review
|
||||
on an almost-ready pull request, you can post a comment to this effect on the
|
||||
issue you're interested in.
|
||||
|
||||
#### In other Zulip repositories
|
||||
|
||||
There is no bot for other Zulip repositories
|
||||
([`zulip/zulip-flutter`](https://github.com/zulip/zulip-flutter/), etc.). If
|
||||
you are interested in claiming an issue in one of these repositories, simply
|
||||
post a comment on the issue thread saying that you've started work on the
|
||||
issue and would like to claim it. In your comment, describe what part of the
|
||||
code you're modifying and how you plan to approach the problem, based on
|
||||
what you learned in steps 1–4 [above](#picking-an-issue-to-work-on).
|
||||
|
||||
There is no need to @-mention the issue creator in your comment. There is
|
||||
also no need to post the same information in multiple places, for example in
|
||||
a chat thread in addition to the GitHub issue.
|
||||
|
||||
Please follow the same guidelines as described above: find an issue labeled
|
||||
"help wanted", and only pick up one issue at a time to start with.
|
||||
|
||||
## Getting help
|
||||
|
||||
You may have questions as you work on your pull request. For example, you might
|
||||
not be sure about some details of what's required, or have questions about your
|
||||
implementation approach. Zulip's maintainers are happy to answer thoughtfully
|
||||
posed questions, and discuss any difficulties that might arise as you work on
|
||||
your PR.
|
||||
|
||||
If you haven't done so yet, now is the time to join the [Zulip development
|
||||
community](https://zulip.com/development-community/). If you'd like, introduce
|
||||
yourself in the [#new
|
||||
members](https://chat.zulip.org/#narrow/channel/95-new-members) channel, using
|
||||
your name as the [topic](https://zulip.com/help/introduction-to-topics).
|
||||
|
||||
You can get help in public channels in the community:
|
||||
|
||||
1. **Review** the [Zulip development community
|
||||
guidelines](https://zulip.com/development-community/#community-norms).
|
||||
|
||||
1. **Decide where to post.** If there is a discussion thread linked from the
|
||||
issue you're working on, that's usually the best place to post any
|
||||
clarification questions about the issue. Otherwise, follow [these
|
||||
guidelines](https://zulip.com/development-community/#where-do-i-send-my-message)
|
||||
to figure out where to post your question. Don’t stress too much about
|
||||
picking the right place if you’re not sure, as moderators can [move your
|
||||
question thread to a different
|
||||
channel](https://zulip.com/help/move-content-to-another-channel) if needed.
|
||||
|
||||
1. **Write** up your question, being sure to follow our [guide on asking great
|
||||
questions](https://zulip.readthedocs.io/en/latest/contributing/asking-great-questions.html).
|
||||
The guide explains what you need to do make sure that folks will be able to
|
||||
help you out, and that you're making good use of maintainers' limited time.
|
||||
|
||||
1. **Review** your message before you send it. Will your question make sense to
|
||||
someone who is familiar with Zulip, but might not have the details of what
|
||||
you are working on fresh in mind?
|
||||
|
||||
Well-posed questions will generally get a response within 1-2 business days.
|
||||
There is no need to @-mention anyone when you ask a question, as maintainers
|
||||
keep a close eye on all the ongoing discussions.
|
||||
|
||||
## What makes a great Zulip contributor?
|
||||
|
||||
As you're working on your first code contribution, here are some best practices
|
||||
to keep in mind.
|
||||
|
||||
- [Asking great questions][great-questions]. It's very hard to answer a general
|
||||
question like, "How do I do this issue?" When asking for help, explain your
|
||||
current understanding, including what you've done or tried so far and where
|
||||
you got stuck. Post tracebacks or other error messages if appropriate. For
|
||||
more advice, check out [our guide][great-questions]!
|
||||
- Learning and practicing
|
||||
[Git commit discipline](https://zulip.readthedocs.io/en/latest/contributing/commit-discipline.html).
|
||||
- Submitting carefully tested code. See our [detailed guide on how to review
|
||||
code](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html#how-to-review-code)
|
||||
(yours or someone else's).
|
||||
- Posting
|
||||
[screenshots or GIFs](https://zulip.readthedocs.io/en/latest/tutorials/screenshot-and-gif-software.html)
|
||||
for frontend changes.
|
||||
- Working to [make your pull requests easy to
|
||||
review](https://zulip.readthedocs.io/en/latest/contributing/reviewable-prs.html).
|
||||
- Clearly describing what you have implemented and why. For example, if your
|
||||
implementation differs from the issue description in some way or is a partial
|
||||
step towards the requirements described in the issue, be sure to call
|
||||
out those differences.
|
||||
- Being responsive to feedback on pull requests. This means incorporating or
|
||||
responding to all suggested changes, and leaving a note if you won't be
|
||||
able to address things within a few days.
|
||||
- Being helpful and friendly on the [Zulip community
|
||||
server](https://zulip.com/development-community/).
|
||||
|
||||
[great-questions]: https://zulip.readthedocs.io/en/latest/contributing/asking-great-questions.html
|
||||
|
||||
## Submitting a pull request
|
||||
|
||||
See the [guide on submitting a pull
|
||||
request](https://zulip.readthedocs.io/en/latest/contributing/reviewable-prs.html)
|
||||
for detailed instructions on how to present your proposed changes to Zulip.
|
||||
|
||||
The [pull request review process
|
||||
guide](https://zulip.readthedocs.io/en/latest/contributing/review-process.html)
|
||||
explains the stages of review your PR will go through, and offers guidance on
|
||||
how to help the review process move forward.
|
||||
|
||||
It's OK if your first issue takes you a while; that's normal! You'll be able to
|
||||
work a lot faster as you build experience.
|
||||
|
||||
## Beyond the first issue
|
||||
|
||||
To find a second issue to work on, we recommend looking through issues with the same
|
||||
`area:` label as the last issue you resolved. You'll be able to reuse the
|
||||
work you did learning how that part of the codebase works. Also, the path to
|
||||
becoming a core developer often involves taking ownership of one of these area
|
||||
labels.
|
||||
|
||||
## Common questions
|
||||
|
||||
- **What if somebody is already working on the issue I want to claim?** There
|
||||
are lots of issues to work on (likely
|
||||
[hundreds](https://github.com/zulip/zulip/issues?q=is%3Aissue%20state%3Aopen%20label%3A%22help%20wanted%22%20no%3Aassignee)
|
||||
in the server repository)! If somebody else is actively working on the issue,
|
||||
you can find a different one, or help with reviewing their work.
|
||||
|
||||
- **What if it looks like the person who's assigned an issue is no longer
|
||||
working on it?** Post a comment on the issue, e.g., "Hi @ someone! Are you
|
||||
still working on this one? I'd like to pick it up if not." You can pick up the
|
||||
issue if they say they don't plan to work on it more.
|
||||
|
||||
- **What if I don't get a response?** If you don't get a reply within 2-3
|
||||
days, go ahead and post a comment that you are working on the issue, and
|
||||
submit a pull request. If the original assignee ends up submitting a pull
|
||||
request first, no worries! You can help by providing feedback on their work,
|
||||
or submit your own PR if you think a different approach is needed (as
|
||||
described above).
|
||||
|
||||
- **What if there is already a pull request for the issue I want to work on?**
|
||||
See our [guide on continuing unfinished
|
||||
work](https://zulip.readthedocs.io/en/latest/contributing/continuing-unfinished-work.html).
|
||||
|
||||
- **What if somebody else claims an issue while I'm figuring out whether or not to
|
||||
work on it?** No worries! You can contribute by providing feedback on
|
||||
their pull request. If you've made good progress in understanding part of the
|
||||
codebase, you can also find another "help wanted" issue in the same area to
|
||||
work on.
|
||||
|
||||
- **Can I work on an old issue?** Of course! Open issues marked as “help wanted”
|
||||
are generally eligible to be worked on. If you find that the context around
|
||||
the issue has changed (e.g., the UI looks different), do your best to apply
|
||||
the current patterns, and comment on any differences from the spec in your PR
|
||||
description.
|
||||
|
||||
If picking up a bug, start by checking if you can replicate it. If it no longer
|
||||
replicates, post a comment on the issue explaining how you tested the
|
||||
behavior, and what you saw, with screenshots as appropriate. And if you _can_
|
||||
replicate it, fixing it is great!
|
||||
|
||||
If you're starting a major project where the issue was filed more than a
|
||||
couple of years ago, it's a good idea to post to the development community
|
||||
discussion thread for that issue to check if the thinking around it has
|
||||
changed.
|
||||
|
||||
- **Can I come up with my own feature idea and work on it?** We welcome
|
||||
suggestions of features or other improvements that you feel would be valuable. If you
|
||||
have a new feature you'd like to add, you can start a conversation [in our
|
||||
development community](https://zulip.com/development-community/#where-do-i-send-my-message)
|
||||
explaining the feature idea and the problem that you're hoping to solve.
|
||||
- **I'm waiting for the next round of review on my PR. Can I pick up
|
||||
another issue in the meantime?** Someone's first Zulip PR often
|
||||
requires quite a bit of iteration, so please [make sure your pull
|
||||
request is reviewable][reviewable-pull-requests] and go through at
|
||||
least one round of feedback from others before picking up a second
|
||||
issue. After that, sure! If
|
||||
[Zulipbot](https://github.com/zulip/zulipbot) does not allow you to
|
||||
claim an issue, you can post a comment describing the status of your
|
||||
other work on the issue you're interested in (including links to all open
|
||||
PRs), and asking for the issue to be assigned to you. Note that addressing
|
||||
feedback on in-progress PRs should always take priority over starting a new
|
||||
PR.
|
||||
- **I think my PR is done, but it hasn't been merged yet. What's going on?**
|
||||
1. **Double-check that you have addressed all the feedback**, including any comments
|
||||
on [Git commit
|
||||
discipline](https://zulip.readthedocs.io/en/latest/contributing/commit-discipline.html),
|
||||
and that automated tests are passing.
|
||||
2. If all the feedback has been addressed, did you [leave a
|
||||
comment](https://zulip.readthedocs.io/en/latest/contributing/review-process.html#how-to-help-move-the-review-process-forward)
|
||||
explaining that you have done so and **requesting another review**? If not,
|
||||
it may not be clear to project maintainers or reviewers that your PR is
|
||||
ready for another look.
|
||||
3. There may be a pause between initial rounds of review for your PR and final
|
||||
review by project maintainers. This is normal, and we encourage you to **work
|
||||
on other issues** while you wait.
|
||||
4. If you think the PR is ready and haven't seen any updates for a couple
|
||||
of weeks, it can be helpful to **leave another comment**. Summarize the
|
||||
overall state of the review process and your work, and indicate that you
|
||||
are waiting for a review.
|
||||
5. Finally, **Zulip project maintainers are people too**! They may be busy
|
||||
with other work, and sometimes they might even take a vacation. ;) It can
|
||||
occasionally take a few weeks for a PR in the final stages of the review
|
||||
process to be merged.
|
||||
|
||||
[reviewable-pull-requests]: https://zulip.readthedocs.io/en/latest/contributing/reviewable-prs.html
|
||||
|
||||
## Outreach programs
|
||||
|
||||
Zulip regularly participates in [Google Summer of Code
|
||||
(GSoC)](https://developers.google.com/open-source/gsoc/) and
|
||||
[Outreachy](https://www.outreachy.org/). We have been a GSoC mentoring
|
||||
organization since 2016, and we accept 15-20 GSoC participants each summer. In
|
||||
the past, we’ve also participated in [Google
|
||||
Code-In](https://developers.google.com/open-source/gci/), and hosted summer
|
||||
interns from Harvard, MIT, and Stanford.
|
||||
|
||||
Check out our [outreach programs
|
||||
overview](https://zulip.readthedocs.io/en/latest/outreach/overview.html) to learn
|
||||
more about participating in an outreach program with Zulip. Most of our program
|
||||
participants end up sticking around the project long-term, and many have become
|
||||
core team members, maintaining important parts of the project. We hope you
|
||||
apply!
|
17
Dockerfile-dev
Normal file
17
Dockerfile-dev
Normal file
@@ -0,0 +1,17 @@
|
||||
FROM ubuntu:trusty
|
||||
|
||||
EXPOSE 9991
|
||||
|
||||
RUN apt-get update && apt-get install -y wget
|
||||
|
||||
RUN locale-gen en_US.UTF-8
|
||||
|
||||
RUN useradd -d /home/zulip -m zulip && echo 'zulip ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers
|
||||
|
||||
USER zulip
|
||||
|
||||
RUN ln -nsf /srv/zulip ~/zulip
|
||||
|
||||
RUN echo 'export LC_ALL="en_US.UTF-8" LANG="en_US.UTF-8" LANGUAGE="en_US.UTF-8"' >> ~zulip/.bashrc
|
||||
|
||||
WORKDIR /srv/zulip
|
@@ -1,25 +0,0 @@
|
||||
# This is a multiarch Dockerfile. See https://docs.docker.com/desktop/multi-arch/
|
||||
#
|
||||
# To set up the first time:
|
||||
# docker buildx create --name multiarch --use
|
||||
#
|
||||
# To build:
|
||||
# docker buildx build --platform linux/amd64,linux/arm64 \
|
||||
# -f ./Dockerfile-postgresql -t zulip/zulip-postgresql:14 --push .
|
||||
|
||||
# Currently the PostgreSQL images do not support automatic upgrading of
|
||||
# the on-disk data in volumes. So the base image cannot currently be upgraded
|
||||
# without users needing a manual pgdump and restore.
|
||||
|
||||
# https://hub.docker.com/r/groonga/pgroonga/tags
|
||||
ARG PGROONGA_VERSION=latest
|
||||
ARG POSTGRESQL_VERSION=14
|
||||
FROM groonga/pgroonga:$PGROONGA_VERSION-alpine-$POSTGRESQL_VERSION-slim
|
||||
|
||||
# Install hunspell, Zulip stop words, and run Zulip database
|
||||
# init.
|
||||
RUN apk add -U --no-cache hunspell-en
|
||||
RUN ln -sf /usr/share/hunspell/en_US.dic /usr/local/share/postgresql/tsearch_data/en_us.dict && ln -sf /usr/share/hunspell/en_US.aff /usr/local/share/postgresql/tsearch_data/en_us.affix
|
||||
COPY puppet/zulip/files/postgresql/zulip_english.stop /usr/local/share/postgresql/tsearch_data/zulip_english.stop
|
||||
COPY scripts/setup/create-db.sql /docker-entrypoint-initdb.d/zulip-create-db.sql
|
||||
COPY scripts/setup/create-pgroonga.sql /docker-entrypoint-initdb.d/zulip-create-pgroonga.sql
|
18
NOTICE
18
NOTICE
@@ -1,18 +0,0 @@
|
||||
Copyright 2012–2015 Dropbox, Inc., 2015–2021 Kandra Labs, Inc., and contributors
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this project except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
The software includes some works released by third parties under other
|
||||
free and open source licenses. Those works are redistributed under the
|
||||
license terms under which the works were received. For more details,
|
||||
see the ``docs/THIRDPARTY`` file included with this distribution.
|
349
README.md
349
README.md
@@ -1,82 +1,301 @@
|
||||
**[Zulip overview](#zulip-overview)** |
|
||||
**[Community](#community)** |
|
||||
**[Installing for dev](#installing-the-zulip-development-environment)** |
|
||||
**[Installing for production](#running-zulip-in-production)** |
|
||||
**[Ways to contribute](#ways-to-contribute)** |
|
||||
**[How to get involved](#how-to-get-involved-with-contributing-to-zulip)** |
|
||||
**[License](#license)**
|
||||
|
||||
# Zulip overview
|
||||
|
||||
[Zulip](https://zulip.com) is an open-source team collaboration tool with unique
|
||||
[topic-based threading][why-zulip] that combines the best of email and chat to
|
||||
make remote work productive and delightful. Fortune 500 companies, [leading open
|
||||
source projects][rust-case-study], and thousands of other organizations use
|
||||
Zulip every day. Zulip is the only [modern team chat app][features] that is
|
||||
designed for both live and asynchronous conversations.
|
||||
Zulip is a powerful, open source group chat application. Written in
|
||||
Python and using the Django framework, Zulip supports both private
|
||||
messaging and group chats via conversation streams.
|
||||
|
||||
Zulip is built by a distributed community of developers from all around the
|
||||
world, with 74+ people who have each contributed 100+ commits. With
|
||||
over 1000 contributors merging over 500 commits a month, Zulip is the
|
||||
largest and fastest growing open source team chat project.
|
||||
Zulip also supports fast search, drag-and-drop file uploads, image
|
||||
previews, group private messages, audible notifications,
|
||||
missed-message emails, desktop apps, and much more.
|
||||
|
||||
Come find us on the [development community chat](https://zulip.com/development-community/)!
|
||||
Further information on the Zulip project and its features can be found
|
||||
at <https://www.zulip.org>.
|
||||
|
||||
[](https://github.com/zulip/zulip/actions/workflows/zulip-ci.yml?query=branch%3Amain)
|
||||
[](https://codecov.io/gh/zulip/zulip)
|
||||
[][mypy-coverage]
|
||||
[](https://github.com/astral-sh/ruff)
|
||||
[](https://github.com/prettier/prettier)
|
||||
[](https://github.com/zulip/zulip/releases/latest)
|
||||
[](https://zulip.readthedocs.io/en/latest/)
|
||||
[](https://travis-ci.org/zulip/zulip)
|
||||
[](https://codecov.io/gh/zulip/zulip)
|
||||
[](http://blog.zulip.org/2016/10/13/static-types-in-python-oh-mypy/)
|
||||
[](http://zulip.readthedocs.io/en/latest/)
|
||||
[](https://chat.zulip.org)
|
||||
[](https://twitter.com/zulip)
|
||||
[](https://github.com/sponsors/zulip)
|
||||
[](http://twitter.com/zulip)
|
||||
|
||||
[mypy-coverage]: https://blog.zulip.org/2016/10/13/static-types-in-python-oh-mypy/
|
||||
[why-zulip]: https://zulip.com/why-zulip/
|
||||
[rust-case-study]: https://zulip.com/case-studies/rust/
|
||||
[features]: https://zulip.com/features/
|
||||
## Community
|
||||
|
||||
## Getting started
|
||||
There are several places online where folks discuss Zulip.
|
||||
|
||||
- **Contributing code**. Check out our [guide for new
|
||||
contributors](https://zulip.readthedocs.io/en/latest/contributing/contributing.html)
|
||||
to get started. We have invested in making Zulip’s code highly
|
||||
readable, thoughtfully tested, and easy to modify. Beyond that, we
|
||||
have written an extraordinary 150K words of documentation for Zulip
|
||||
contributors.
|
||||
* The primary place is the
|
||||
[Zulip development community Zulip server][czo-doc] at
|
||||
chat.zulip.org.
|
||||
|
||||
- **Contributing non-code**. [Report an
|
||||
issue](https://zulip.readthedocs.io/en/latest/contributing/contributing.html#reporting-issues),
|
||||
[translate](https://zulip.readthedocs.io/en/latest/translating/translating.html)
|
||||
Zulip into your language, or [give us
|
||||
feedback](https://zulip.readthedocs.io/en/latest/contributing/suggesting-features.html).
|
||||
We'd love to hear from you, whether you've been using Zulip for years, or are just
|
||||
trying it out for the first time.
|
||||
* For Google Summer of Code students and applicants, we have
|
||||
[a mailing list](https://groups.google.com/forum/#!forum/zulip-gsoc)
|
||||
for help, questions, and announcements. But it's often simpler to
|
||||
[visit chat.zulip.org][czo-doc] instead.
|
||||
|
||||
- **Checking Zulip out**. The best way to see Zulip in action is to drop by the
|
||||
[Zulip community server](https://zulip.com/development-community/). We also
|
||||
recommend reading about Zulip's [unique
|
||||
approach](https://zulip.com/why-zulip/) to organizing conversations.
|
||||
* We have a [public development discussion mailing list][zulip-devel],
|
||||
zulip-devel, which is currently pretty low traffic because most
|
||||
discussions happen in our public Zulip instance. We use it to
|
||||
announce Zulip developer community gatherings and ask for feedback on
|
||||
major technical or design decisions. It has several hundred
|
||||
subscribers, so you can use it to ask questions about features or
|
||||
possible bugs, but please don't use it ask for generic help getting
|
||||
started as a contributor (e.g. because you want to do Google Summer of
|
||||
Code). The rest of this page covers how to get involved in the Zulip
|
||||
project in detail.
|
||||
|
||||
- **Running a Zulip server**. Self-host Zulip directly on Ubuntu or Debian
|
||||
Linux, in [Docker](https://github.com/zulip/docker-zulip), or with prebuilt
|
||||
images for [Digital Ocean](https://marketplace.digitalocean.com/apps/zulip) and
|
||||
[Render](https://render.com/docs/deploy-zulip).
|
||||
Learn more about [self-hosting Zulip](https://zulip.com/self-hosting/).
|
||||
* Zulip also has a [blog](https://blog.zulip.org/) and
|
||||
[twitter account](https://twitter.com/zulip).
|
||||
|
||||
- **Using Zulip without setting up a server**. Learn about [Zulip
|
||||
Cloud](https://zulip.com/plans/) hosting options. Zulip sponsors free [Zulip
|
||||
Cloud Standard](https://zulip.com/plans/) for hundreds of worthy
|
||||
organizations, including [fellow open-source
|
||||
projects](https://zulip.com/for/open-source/).
|
||||
* Last but not least, we use [GitHub](https://github.com/zulip/zulip)
|
||||
to track Zulip-related issues (and store our code, of course).
|
||||
Anybody with a GitHub account should be able to create Issues there
|
||||
pertaining to bugs or enhancement requests. We also use Pull Requests
|
||||
as our primary mechanism to receive code contributions.
|
||||
|
||||
- **Participating in [outreach
|
||||
programs](https://zulip.readthedocs.io/en/latest/contributing/contributing.html#outreach-programs)**
|
||||
like [Google Summer of Code](https://developers.google.com/open-source/gsoc/)
|
||||
and [Outreachy](https://www.outreachy.org/).
|
||||
The Zulip community has a [Code of Conduct][code-of-conduct].
|
||||
|
||||
- **Supporting Zulip**. Advocate for your organization to use Zulip, become a
|
||||
[sponsor](https://github.com/sponsors/zulip), write a review in the mobile app
|
||||
stores, or [help others find
|
||||
Zulip](https://zulip.readthedocs.io/en/latest/contributing/contributing.html#help-others-find-zulip).
|
||||
[zulip-devel]: https://groups.google.com/forum/#!forum/zulip-devel
|
||||
|
||||
You may also be interested in reading our [blog](https://blog.zulip.org/), and
|
||||
following us on [Twitter](https://twitter.com/zulip) and
|
||||
[LinkedIn](https://www.linkedin.com/company/zulip-project/).
|
||||
## Installing the Zulip Development environment
|
||||
|
||||
Zulip is distributed under the
|
||||
[Apache 2.0](https://github.com/zulip/zulip/blob/main/LICENSE) license.
|
||||
The Zulip development environment is the recommended option for folks
|
||||
interested in trying out Zulip, since it is very easy to install.
|
||||
This is documented in [the developer installation guide][dev-install].
|
||||
|
||||
## Running Zulip in production
|
||||
|
||||
Zulip in production supports Ubuntu 16.04 Xenial and Ubuntu 14.04
|
||||
Trusty. We're happy to support work to enable Zulip to run on
|
||||
additional platforms. The installation process is
|
||||
[documented here](https://zulip.readthedocs.io/en/latest/prod.html).
|
||||
|
||||
## Ways to contribute
|
||||
|
||||
Zulip welcomes all forms of contributions! This page documents the
|
||||
Zulip development process.
|
||||
|
||||
* **Pull requests**. Before a pull request can be merged, you need to
|
||||
sign the [Dropbox Contributor License Agreement][cla]. Also,
|
||||
please skim our [commit message style guidelines][doc-commit-style].
|
||||
We encourage early pull requests for work in progress. Prefix the title
|
||||
of your pull request with `[WIP]` and reference it when asking for
|
||||
community feedback. When you are ready for final review, remove
|
||||
the `[WIP]`.
|
||||
|
||||
* **Testing**. The Zulip automated tests all run automatically when
|
||||
you submit a pull request, but you can also run them all in your
|
||||
development environment following the instructions in the [testing
|
||||
docs][doc-test]. You can also try out [our new desktop
|
||||
client][electron], which is in alpha; we'd appreciate testing and
|
||||
[feedback](https://github.com/zulip/zulip-electron/issues/new).
|
||||
|
||||
* **Developer Documentation**. Zulip has a growing collection of
|
||||
developer documentation on [Read The Docs][doc]. Recommended reading
|
||||
for new contributors includes the [directory structure][doc-dirstruct]
|
||||
and [new feature tutorial][doc-newfeat]. You can also improve
|
||||
[Zulip.org][z-org].
|
||||
|
||||
* **Mailing lists and bug tracker**. Zulip has a [development
|
||||
discussion mailing list](#community) and uses [GitHub issues
|
||||
][gh-issues]. There are also lists for the [Android][email-android]
|
||||
and [iOS][email-ios] apps. Feel free to send any questions or
|
||||
suggestions of areas where you'd love to see more documentation to the
|
||||
relevant list! Check out our [bug report guidelines][bug-report]
|
||||
before submitting. Please report any security issues you discover to
|
||||
zulip-security@googlegroups.com.
|
||||
|
||||
* **App codebases**. This repository is for the Zulip server and web
|
||||
app (including most integrations). The
|
||||
[beta React Native mobile app][mobile], [Java Android app][Android]
|
||||
(see [our mobile strategy][mobile-strategy]),
|
||||
[new Electron desktop app][electron], and
|
||||
[legacy Qt-based desktop app][desktop] are all separate repositories.
|
||||
|
||||
* **Glue code**. We maintain a [Hubot adapter][hubot-adapter] and several
|
||||
integrations ([Phabricator][phab], [Jenkins][], [Puppet][], [Redmine][],
|
||||
and [Trello][]), plus [node.js API bindings][node], an [isomorphic
|
||||
JavaScript library][zulip-js], and a [full-text search PostgreSQL
|
||||
extension][tsearch], as separate repos.
|
||||
|
||||
* **Translations**. Zulip is in the process of being translated into
|
||||
10+ languages, and we love contributions to our translations. See our
|
||||
[translating documentation][transifex] if you're interested in
|
||||
contributing!
|
||||
|
||||
* **Code Reviews**. Zulip is all about community and helping each
|
||||
other out. Check out [#code review][code-review] on
|
||||
[chat.zulip.org][czo-doc] to help review PRs and give comments on
|
||||
other people's work. Everyone is welcome to participate, even those
|
||||
new to Zulip! Even just checking out the code, manually testing it,
|
||||
and posting on whether or not it worked is valuable.
|
||||
|
||||
[cla]: https://opensource.dropbox.com/cla/
|
||||
[code-of-conduct]: https://zulip.readthedocs.io/en/latest/code-of-conduct.html
|
||||
[dev-install]: https://zulip.readthedocs.io/en/latest/dev-overview.html
|
||||
[doc]: https://zulip.readthedocs.io/
|
||||
[doc-commit-style]: http://zulip.readthedocs.io/en/latest/version-control.html#commit-messages
|
||||
[doc-dirstruct]: http://zulip.readthedocs.io/en/latest/directory-structure.html
|
||||
[doc-newfeat]: http://zulip.readthedocs.io/en/latest/new-feature-tutorial.html
|
||||
[doc-test]: http://zulip.readthedocs.io/en/latest/testing.html
|
||||
[electron]: https://github.com/zulip/zulip-electron
|
||||
[gh-issues]: https://github.com/zulip/zulip/issues
|
||||
[desktop]: https://github.com/zulip/zulip-desktop
|
||||
[android]: https://github.com/zulip/zulip-android
|
||||
[mobile]: https://github.com/zulip/zulip-mobile
|
||||
[mobile-strategy]: https://github.com/zulip/zulip-android/blob/master/android-strategy.md
|
||||
[email-android]: https://groups.google.com/forum/#!forum/zulip-android
|
||||
[email-ios]: https://groups.google.com/forum/#!forum/zulip-ios
|
||||
[hubot-adapter]: https://github.com/zulip/hubot-zulip
|
||||
[jenkins]: https://github.com/zulip/zulip-jenkins-plugin
|
||||
[node]: https://github.com/zulip/zulip-node
|
||||
[zulip-js]: https://github.com/zulip/zulip-js
|
||||
[phab]: https://github.com/zulip/phabricator-to-zulip
|
||||
[puppet]: https://github.com/matthewbarr/puppet-zulip
|
||||
[redmine]: https://github.com/zulip/zulip-redmine-plugin
|
||||
[trello]: https://github.com/zulip/trello-to-zulip
|
||||
[tsearch]: https://github.com/zulip/tsearch_extras
|
||||
[transifex]: https://zulip.readthedocs.io/en/latest/translating.html#testing-translations
|
||||
[z-org]: https://github.com/zulip/zulip.github.io
|
||||
[code-review]: https://chat.zulip.org/#narrow/stream/code.20review
|
||||
[bug-report]: http://zulip.readthedocs.io/en/latest/bug-reports.html
|
||||
|
||||
## Google Summer of Code
|
||||
|
||||
We participated in
|
||||
[GSoC](https://developers.google.com/open-source/gsoc/) in 2016 (with
|
||||
[great results](https://blog.zulip.org/2016/10/13/static-types-in-python-oh-mypy/))
|
||||
and [are participating](https://github.com/zulip/zulip.github.io/blob/master/gsoc-ideas.md)
|
||||
in 2017 as well.
|
||||
|
||||
## How to get involved with contributing to Zulip
|
||||
|
||||
First, subscribe to the Zulip [development discussion mailing
|
||||
list](#community).
|
||||
|
||||
The Zulip project uses a system of labels in our [issue
|
||||
tracker][gh-issues] to make it easy to find a project if you don't
|
||||
have your own project idea in mind or want to get some experience with
|
||||
working on Zulip before embarking on a larger project you have in
|
||||
mind:
|
||||
|
||||
* [Integrations](https://github.com/zulip/zulip/labels/area%3A%20integrations).
|
||||
Integrate Zulip with another piece of software and contribute it
|
||||
back to the community! Writing an integration can be a great first
|
||||
contribution. There's detailed documentation on how to write
|
||||
integrations in [the Zulip integration writing
|
||||
guide](https://zulip.readthedocs.io/en/latest/integration-guide.html).
|
||||
|
||||
* [Good first issue](https://github.com/zulip/zulip/labels/good%20first%20issue):
|
||||
Smaller projects that might be a great first contribution.
|
||||
|
||||
* [Documentation](https://github.com/zulip/zulip/labels/area%3A%20documentation):
|
||||
The Zulip project loves contributions of new documentation.
|
||||
|
||||
* [Help Wanted](https://github.com/zulip/zulip/labels/help%20wanted):
|
||||
A broader list of projects that nobody is currently working on.
|
||||
|
||||
* [Platform support](https://github.com/zulip/zulip/labels/Platform%20support):
|
||||
These are open issues about making it possible to install Zulip on a
|
||||
wider range of platforms.
|
||||
|
||||
* [Bugs](https://github.com/zulip/zulip/labels/bug): Open bugs.
|
||||
|
||||
* [Feature requests](https://github.com/zulip/zulip/labels/enhancement):
|
||||
Browsing this list can be a great way to find feature ideas to
|
||||
implement that other Zulip users are excited about.
|
||||
|
||||
* [2016 roadmap milestone](http://zulip.readthedocs.io/en/latest/roadmap.html):
|
||||
The projects that are
|
||||
[priorities for the Zulip project](https://zulip.readthedocs.io/en/latest/roadmap.html).
|
||||
These are great projects if you're looking to make an impact.
|
||||
|
||||
Another way to find issues in Zulip is to take advantage of our
|
||||
`area:<foo>` convention in separating out issues. We partition all of
|
||||
our issues into areas like admin, compose, emoji, hotkeys, i18n,
|
||||
onboarding, search, etc. Look through our
|
||||
[list of labels](https://github.com/zulip/zulip/labels), and click on
|
||||
some of the `area:` labels to see all the tickets related to your
|
||||
areas of interest.
|
||||
|
||||
If you're excited about helping with an open issue, make sure to claim
|
||||
the issue by commenting the following in the comment section:
|
||||
"**@zulipbot** claim". **@zulipbot** will assign you to the issue and
|
||||
label the issue as **in progress**. For more details, check out
|
||||
[**@zulipbot**](https://github.com/zulip/zulipbot).
|
||||
|
||||
You're encouraged to ask questions on how to best implement or debug
|
||||
your changes -- the Zulip maintainers are excited to answer questions
|
||||
to help you stay unblocked and working efficiently. It's great to ask
|
||||
questions in comments on GitHub issues and pull requests, or
|
||||
[on chat.zulip.org][czo-doc]. We'll direct longer discussions to
|
||||
Zulip chat, but please post a summary of what you learned from the
|
||||
chat, or link to the conversation, in a comment on the GitHub issue.
|
||||
|
||||
We also welcome suggestions of features that you feel would be
|
||||
valuable or changes that you feel would make Zulip a better open
|
||||
source project, and are happy to support you in adding new features or
|
||||
other user experience improvements to Zulip.
|
||||
|
||||
If you have a new feature you'd like to add, we recommend you start by
|
||||
opening a GitHub issue about the feature idea explaining the problem
|
||||
that you're hoping to solve and that you're excited to work on it. A
|
||||
Zulip maintainer will usually reply within a day with feedback on the
|
||||
idea, notes on any important issues or concerns, and and often tips on
|
||||
how to implement or test it. Please feel free to ping the thread if
|
||||
you don't hear a response from the maintainers -- we try to be very
|
||||
responsive so this usually means we missed your message.
|
||||
|
||||
For significant changes to the visual design, user experience, data
|
||||
model, or architecture, we highly recommend posting a mockup,
|
||||
screenshot, or description of what you have in mind to the
|
||||
[#design](https://chat.zulip.org/#narrow/stream/design) stream on
|
||||
[chat.zulip.org][czo-doc] to get broad feedback before you spend too
|
||||
much time on implementation details.
|
||||
|
||||
Finally, before implementing a larger feature, we highly recommend
|
||||
looking at the
|
||||
[new feature tutorial](http://zulip.readthedocs.io/en/latest/new-feature-tutorial.html)
|
||||
and [coding style guidelines](http://zulip.readthedocs.io/en/latest/code-style.html)
|
||||
on ReadTheDocs.
|
||||
|
||||
Feedback on how to make this development process more efficient, fun,
|
||||
and friendly to new contributors is very welcome! Just send an email
|
||||
to the [zulip-devel](#community) list with your thoughts.
|
||||
|
||||
When you feel like you have completed your work on an issue, post your
|
||||
PR to the
|
||||
[#code review](https://chat.zulip.org/#narrow/stream/code.20review)
|
||||
stream on [chat.zulip.org][czo-doc]. This is our lightweight process
|
||||
that gives other developers the opportunity to give you comments and
|
||||
suggestions on your work.
|
||||
|
||||
## License
|
||||
|
||||
Copyright 2011-2017 Dropbox, Inc., Kandra Labs, Inc., and contributors
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
The software includes some works released by third parties under other
|
||||
free and open source licenses. Those works are redistributed under the
|
||||
license terms under which the works were received. For more details,
|
||||
see the ``docs/THIRDPARTY`` file included with this distribution.
|
||||
|
||||
|
||||
[czo-doc]: https://zulip.readthedocs.io/en/latest/chat-zulip-org.html
|
||||
|
37
SECURITY.md
37
SECURITY.md
@@ -1,37 +0,0 @@
|
||||
# Security policy
|
||||
|
||||
## Reporting a vulnerability
|
||||
|
||||
We love responsible reports of (potential) security issues in Zulip,
|
||||
whether in the latest release or our development branch.
|
||||
|
||||
Our security contact is security@zulip.com. Reporters should expect a
|
||||
response within 24 hours.
|
||||
|
||||
Please include details on the issue and how you'd like to be credited
|
||||
in our release notes when we publish the fix.
|
||||
|
||||
Our [security model][security-model] document may be a helpful
|
||||
resource.
|
||||
|
||||
## Security announcements
|
||||
|
||||
We send security announcements to our [announcement mailing
|
||||
list](https://groups.google.com/g/zulip-announce). If you are running
|
||||
Zulip in production, you should subscribe, by clicking "Join group" at
|
||||
the top of that page.
|
||||
|
||||
## Supported versions
|
||||
|
||||
Zulip provides security support for the latest major release, in the
|
||||
form of minor security/maintenance releases.
|
||||
|
||||
We work hard to make [upgrades][upgrades] reliable, so that there's no
|
||||
reason to run older major releases.
|
||||
|
||||
See also our documentation on the [Zulip release
|
||||
lifecycle][release-lifecycle].
|
||||
|
||||
[security-model]: https://zulip.readthedocs.io/en/latest/production/security-model.html
|
||||
[upgrades]: https://zulip.readthedocs.io/en/stable/production/upgrade.html#upgrading-to-a-release
|
||||
[release-lifecycle]: https://zulip.readthedocs.io/en/latest/overview/release-lifecycle.html
|
127
Vagrantfile
vendored
127
Vagrantfile
vendored
@@ -1,41 +1,49 @@
|
||||
# -*- mode: ruby -*-
|
||||
|
||||
Vagrant.require_version ">= 2.2.6"
|
||||
VAGRANTFILE_API_VERSION = "2"
|
||||
|
||||
def command?(name)
|
||||
`which #{name} > /dev/null 2>&1`
|
||||
$?.success?
|
||||
end
|
||||
|
||||
if Vagrant::VERSION == "1.8.7" then
|
||||
path = `which curl`
|
||||
if path.include?('/opt/vagrant/embedded/bin/curl') then
|
||||
puts "In Vagrant 1.8.7, curl is broken. Please use Vagrant 1.8.6 "\
|
||||
"or run 'sudo rm -f /opt/vagrant/embedded/bin/curl' to fix the "\
|
||||
"issue before provisioning. See "\
|
||||
"https://github.com/mitchellh/vagrant/issues/7997 "\
|
||||
"for reference."
|
||||
exit
|
||||
end
|
||||
end
|
||||
|
||||
Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
|
||||
|
||||
# For LXC. VirtualBox hosts use a different box, described below.
|
||||
config.vm.box = "fgrehm/trusty64-lxc"
|
||||
|
||||
Vagrant.configure("2") do |config|
|
||||
# The Zulip development environment runs on 9991 on the guest.
|
||||
host_port = 9991
|
||||
http_proxy = https_proxy = no_proxy = nil
|
||||
host_ip_addr = "127.0.0.1"
|
||||
|
||||
# System settings for the virtual machine.
|
||||
vm_num_cpus = "2"
|
||||
vm_memory = "2048"
|
||||
|
||||
ubuntu_mirror = ""
|
||||
vboxadd_version = nil
|
||||
|
||||
config.vm.box = "bento/ubuntu-22.04"
|
||||
|
||||
config.vm.synced_folder ".", "/vagrant", disabled: true
|
||||
config.vm.synced_folder ".", "/srv/zulip", docker_consistency: "z"
|
||||
config.vm.synced_folder ".", "/srv/zulip"
|
||||
|
||||
vagrant_config_file = ENV["HOME"] + "/.zulip-vagrant-config"
|
||||
vagrant_config_file = ENV['HOME'] + "/.zulip-vagrant-config"
|
||||
if File.file?(vagrant_config_file)
|
||||
IO.foreach(vagrant_config_file) do |line|
|
||||
line.chomp!
|
||||
key, value = line.split(nil, 2)
|
||||
case key
|
||||
when /^([#;]|$)/ # ignore comments
|
||||
when /^([#;]|$)/; # ignore comments
|
||||
when "HTTP_PROXY"; http_proxy = value
|
||||
when "HTTPS_PROXY"; https_proxy = value
|
||||
when "NO_PROXY"; no_proxy = value
|
||||
when "HOST_PORT"; host_port = value.to_i
|
||||
when "HOST_IP_ADDR"; host_ip_addr = value
|
||||
when "GUEST_CPUS"; vm_num_cpus = value
|
||||
when "GUEST_MEMORY_MB"; vm_memory = value
|
||||
when "UBUNTU_MIRROR"; ubuntu_mirror = value
|
||||
when "VBOXADD_VERSION"; vboxadd_version = value
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -53,56 +61,69 @@ Vagrant.configure("2") do |config|
|
||||
elsif !http_proxy.nil? or !https_proxy.nil?
|
||||
# This prints twice due to https://github.com/hashicorp/vagrant/issues/7504
|
||||
# We haven't figured out a workaround.
|
||||
puts "You have specified value for proxy in ~/.zulip-vagrant-config file but did not " \
|
||||
"install the vagrant-proxyconf plugin. To install it, run `vagrant plugin install " \
|
||||
"vagrant-proxyconf` in a terminal. This error will appear twice."
|
||||
puts 'You have specified value for proxy in ~/.zulip-vagrant-config file but did not ' \
|
||||
'install the vagrant-proxyconf plugin. To install it, run `vagrant plugin install ' \
|
||||
'vagrant-proxyconf` in a terminal. This error will appear twice.'
|
||||
exit
|
||||
end
|
||||
|
||||
config.vm.network "forwarded_port", guest: 9991, host: host_port, host_ip: host_ip_addr
|
||||
config.vm.network "forwarded_port", guest: 9994, host: host_port + 3, host_ip: host_ip_addr
|
||||
# Specify Docker provider before VirtualBox provider so it's preferred.
|
||||
config.vm.provider "docker" do |d, override|
|
||||
override.vm.box = nil
|
||||
d.build_dir = File.join(__dir__, "tools", "setup", "dev-vagrant-docker")
|
||||
d.build_args = ["--build-arg", "VAGRANT_UID=#{Process.uid}"]
|
||||
if !ubuntu_mirror.empty?
|
||||
d.build_args += ["--build-arg", "UBUNTU_MIRROR=#{ubuntu_mirror}"]
|
||||
# Specify LXC provider before VirtualBox provider so it's preferred.
|
||||
config.vm.provider "lxc" do |lxc|
|
||||
if command? "lxc-ls"
|
||||
LXC_VERSION = `lxc-ls --version`.strip unless defined? LXC_VERSION
|
||||
if LXC_VERSION >= "1.1.0"
|
||||
# Allow start without AppArmor, otherwise Box will not Start on Ubuntu 14.10
|
||||
# see https://github.com/fgrehm/vagrant-lxc/issues/333
|
||||
lxc.customize 'aa_allow_incomplete', 1
|
||||
end
|
||||
if LXC_VERSION >= "2.0.0"
|
||||
lxc.backingstore = 'dir'
|
||||
end
|
||||
end
|
||||
d.has_ssh = true
|
||||
d.create_args = ["--ulimit", "nofile=1024:65536"]
|
||||
end
|
||||
|
||||
config.vm.provider "virtualbox" do |vb, override|
|
||||
override.vm.box = "ubuntu/trusty64"
|
||||
# It's possible we can get away with just 1.5GB; more testing needed
|
||||
vb.memory = vm_memory
|
||||
vb.cpus = vm_num_cpus
|
||||
|
||||
if !vboxadd_version.nil?
|
||||
override.vbguest.installer = Class.new(VagrantVbguest::Installers::Ubuntu) do
|
||||
define_method(:host_version) do |reload = false|
|
||||
VagrantVbguest::Version(vboxadd_version)
|
||||
end
|
||||
end
|
||||
override.vbguest.allow_downgrade = true
|
||||
override.vbguest.iso_path = "https://download.virtualbox.org/virtualbox/#{vboxadd_version}/VBoxGuestAdditions_#{vboxadd_version}.iso"
|
||||
end
|
||||
vb.memory = 2048
|
||||
vb.cpus = 2
|
||||
end
|
||||
|
||||
config.vm.provider "hyperv" do |h, override|
|
||||
h.memory = vm_memory
|
||||
h.maxmemory = vm_memory
|
||||
h.cpus = vm_num_cpus
|
||||
config.vm.provider "vmware_fusion" do |vb, override|
|
||||
override.vm.box = "puphpet/ubuntu1404-x64"
|
||||
vb.vmx["memsize"] = "2048"
|
||||
vb.vmx["numvcpus"] = "2"
|
||||
end
|
||||
|
||||
config.vm.provider "parallels" do |prl, override|
|
||||
prl.memory = vm_memory
|
||||
prl.cpus = vm_num_cpus
|
||||
end
|
||||
$provision_script = <<SCRIPT
|
||||
set -x
|
||||
set -e
|
||||
set -o pipefail
|
||||
# If the host is running SELinux remount the /sys/fs/selinux directory as read only,
|
||||
# needed for apt-get to work.
|
||||
if [ -d "/sys/fs/selinux" ]; then
|
||||
sudo mount -o remount,ro /sys/fs/selinux
|
||||
fi
|
||||
|
||||
# Set default locale, this prevents errors if the user has another locale set.
|
||||
if ! grep -q 'LC_ALL=en_US.UTF-8' /etc/default/locale; then
|
||||
echo "LC_ALL=en_US.UTF-8" | sudo tee -a /etc/default/locale
|
||||
fi
|
||||
|
||||
# Provision the development environment
|
||||
ln -nsf /srv/zulip ~/zulip
|
||||
/srv/zulip/tools/provision
|
||||
|
||||
# Run any custom provision hooks the user has configured
|
||||
if [ -f /srv/zulip/tools/custom_provision ]; then
|
||||
chmod +x /srv/zulip/tools/custom_provision
|
||||
/srv/zulip/tools/custom_provision
|
||||
fi
|
||||
SCRIPT
|
||||
|
||||
config.vm.provision "shell",
|
||||
# We want provision to be run with the permissions of the vagrant user.
|
||||
privileged: false,
|
||||
path: "tools/setup/vagrant-provision",
|
||||
env: { "UBUNTU_MIRROR" => ubuntu_mirror }
|
||||
inline: $provision_script
|
||||
end
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -1,21 +1,19 @@
|
||||
from math import sqrt
|
||||
from random import Random
|
||||
|
||||
from zerver.models import Realm, UserProfile, Stream, Message
|
||||
from analytics.models import InstallationCount, RealmCount, UserCount, StreamCount
|
||||
from analytics.lib.counts import CountStat
|
||||
from analytics.lib.time_utils import time_range
|
||||
|
||||
from datetime import datetime
|
||||
from math import sqrt
|
||||
from random import gauss, random, seed
|
||||
from typing import List
|
||||
|
||||
def generate_time_series_data(
|
||||
days: int = 100,
|
||||
business_hours_base: float = 10,
|
||||
non_business_hours_base: float = 10,
|
||||
growth: float = 1,
|
||||
autocorrelation: float = 0,
|
||||
spikiness: float = 1,
|
||||
holiday_rate: float = 0,
|
||||
frequency: str = CountStat.DAY,
|
||||
partial_sum: bool = False,
|
||||
random_seed: int = 26,
|
||||
) -> list[int]:
|
||||
from six.moves import zip
|
||||
|
||||
def generate_time_series_data(days=100, business_hours_base=10, non_business_hours_base=10,
|
||||
growth=1, autocorrelation=0, spikiness=1, holiday_rate=0,
|
||||
frequency=CountStat.DAY, partial_sum=False, random_seed=26):
|
||||
# type: (int, float, float, float, float, float, float, str, bool, int) -> List[int]
|
||||
"""
|
||||
Generate semi-realistic looking time series data for testing analytics graphs.
|
||||
|
||||
@@ -35,43 +33,36 @@ def generate_time_series_data(
|
||||
partial_sum -- If True, return partial sum of the series.
|
||||
random_seed -- Seed for random number generator.
|
||||
"""
|
||||
rng = Random(random_seed)
|
||||
|
||||
if frequency == CountStat.HOUR:
|
||||
length = days * 24
|
||||
length = days*24
|
||||
seasonality = [non_business_hours_base] * 24 * 7
|
||||
for day in range(5):
|
||||
for hour in range(8):
|
||||
seasonality[24 * day + hour] = business_hours_base
|
||||
holidays = []
|
||||
seasonality[24*day + hour] = business_hours_base
|
||||
holidays = []
|
||||
for i in range(days):
|
||||
holidays.extend([rng.random() < holiday_rate] * 24)
|
||||
holidays.extend([random() < holiday_rate] * 24)
|
||||
elif frequency == CountStat.DAY:
|
||||
length = days
|
||||
seasonality = [8 * business_hours_base + 16 * non_business_hours_base] * 5 + [
|
||||
24 * non_business_hours_base
|
||||
] * 2
|
||||
holidays = [rng.random() < holiday_rate for i in range(days)]
|
||||
seasonality = [8*business_hours_base + 16*non_business_hours_base] * 5 + \
|
||||
[24*non_business_hours_base] * 2
|
||||
holidays = [random() < holiday_rate for i in range(days)]
|
||||
else:
|
||||
raise AssertionError(f"Unknown frequency: {frequency}")
|
||||
raise AssertionError("Unknown frequency: %s" % (frequency,))
|
||||
if length < 2:
|
||||
raise AssertionError(
|
||||
f"Must be generating at least 2 data points. Currently generating {length}"
|
||||
)
|
||||
growth_base = growth ** (1.0 / (length - 1))
|
||||
raise AssertionError("Must be generating at least 2 data points. "
|
||||
"Currently generating %s" % (length,))
|
||||
growth_base = growth ** (1. / (length-1))
|
||||
values_no_noise = [seasonality[i % len(seasonality)] * (growth_base**i) for i in range(length)]
|
||||
|
||||
noise_scalars = [rng.gauss(0, 1)]
|
||||
seed(random_seed)
|
||||
noise_scalars = [gauss(0, 1)]
|
||||
for i in range(1, length):
|
||||
noise_scalars.append(
|
||||
noise_scalars[-1] * autocorrelation + rng.gauss(0, 1) * (1 - autocorrelation)
|
||||
)
|
||||
noise_scalars.append(noise_scalars[-1]*autocorrelation + gauss(0, 1)*(1-autocorrelation))
|
||||
|
||||
values = [
|
||||
0 if holiday else int(v + sqrt(v) * noise_scalar * spikiness)
|
||||
for v, noise_scalar, holiday in zip(values_no_noise, noise_scalars, holidays, strict=False)
|
||||
]
|
||||
values = [0 if holiday else int(v + sqrt(v)*noise_scalar*spikiness)
|
||||
for v, noise_scalar, holiday in zip(values_no_noise, noise_scalars, holidays)]
|
||||
if partial_sum:
|
||||
for i in range(1, length):
|
||||
values[i] = values[i - 1] + values[i]
|
||||
values[i] = values[i-1] + values[i]
|
||||
return [max(v, 0) for v in values]
|
||||
|
@@ -1,16 +1,16 @@
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from zerver.lib.timestamp import floor_to_hour, floor_to_day, \
|
||||
timestamp_to_datetime, verify_UTC
|
||||
from analytics.lib.counts import CountStat
|
||||
from zerver.lib.timestamp import floor_to_day, floor_to_hour, verify_UTC
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Optional
|
||||
|
||||
# If min_length is None, returns end_times from ceiling(start) to floor(end), inclusive.
|
||||
# If min_length is greater than 0, pads the list to the left.
|
||||
# So informally, time_range(Sep 20, Sep 22, day, None) returns [Sep 20, Sep 21, Sep 22],
|
||||
# and time_range(Sep 20, Sep 22, day, 5) returns [Sep 18, Sep 19, Sep 20, Sep 21, Sep 22]
|
||||
def time_range(
|
||||
start: datetime, end: datetime, frequency: str, min_length: int | None
|
||||
) -> list[datetime]:
|
||||
def time_range(start, end, frequency, min_length):
|
||||
# type: (datetime, datetime, str, Optional[int]) -> List[datetime]
|
||||
verify_UTC(start)
|
||||
verify_UTC(end)
|
||||
if frequency == CountStat.HOUR:
|
||||
@@ -20,14 +20,13 @@ def time_range(
|
||||
end = floor_to_day(end)
|
||||
step = timedelta(days=1)
|
||||
else:
|
||||
raise AssertionError(f"Unknown frequency: {frequency}")
|
||||
raise AssertionError("Unknown frequency: %s" % (frequency,))
|
||||
|
||||
times = []
|
||||
if min_length is not None:
|
||||
start = min(start, end - (min_length - 1) * step)
|
||||
start = min(start, end - (min_length-1)*step)
|
||||
current = end
|
||||
while current >= start:
|
||||
times.append(current)
|
||||
current -= step
|
||||
times.reverse()
|
||||
return times
|
||||
return list(reversed(times))
|
||||
|
83
analytics/management/commands/analyze_mit.py
Normal file
83
analytics/management/commands/analyze_mit.py
Normal file
@@ -0,0 +1,83 @@
|
||||
from typing import Any, Dict
|
||||
|
||||
from django.core.management.base import BaseCommand, CommandParser
|
||||
from zerver.models import Recipient, Message
|
||||
from zerver.lib.timestamp import timestamp_to_datetime
|
||||
import datetime
|
||||
import time
|
||||
import logging
|
||||
|
||||
def compute_stats(log_level):
|
||||
# type: (int) -> None
|
||||
logger = logging.getLogger()
|
||||
logger.setLevel(log_level)
|
||||
|
||||
one_week_ago = timestamp_to_datetime(time.time()) - datetime.timedelta(weeks=1)
|
||||
mit_query = Message.objects.filter(sender__realm__string_id="zephyr",
|
||||
recipient__type=Recipient.STREAM,
|
||||
pub_date__gt=one_week_ago)
|
||||
for bot_sender_start in ["imap.", "rcmd.", "sys."]:
|
||||
mit_query = mit_query.exclude(sender__email__startswith=(bot_sender_start))
|
||||
# Filtering for "/" covers tabbott/extra@ and all the daemon/foo bots.
|
||||
mit_query = mit_query.exclude(sender__email__contains=("/"))
|
||||
mit_query = mit_query.exclude(sender__email__contains=("aim.com"))
|
||||
mit_query = mit_query.exclude(
|
||||
sender__email__in=["rss@mit.edu", "bash@mit.edu", "apache@mit.edu",
|
||||
"bitcoin@mit.edu", "lp@mit.edu", "clocks@mit.edu",
|
||||
"root@mit.edu", "nagios@mit.edu",
|
||||
"www-data|local-realm@mit.edu"])
|
||||
user_counts = {} # type: Dict[str, Dict[str, int]]
|
||||
for m in mit_query.select_related("sending_client", "sender"):
|
||||
email = m.sender.email
|
||||
user_counts.setdefault(email, {})
|
||||
user_counts[email].setdefault(m.sending_client.name, 0)
|
||||
user_counts[email][m.sending_client.name] += 1
|
||||
|
||||
total_counts = {} # type: Dict[str, int]
|
||||
total_user_counts = {} # type: Dict[str, int]
|
||||
for email, counts in user_counts.items():
|
||||
total_user_counts.setdefault(email, 0)
|
||||
for client_name, count in counts.items():
|
||||
total_counts.setdefault(client_name, 0)
|
||||
total_counts[client_name] += count
|
||||
total_user_counts[email] += count
|
||||
|
||||
logging.debug("%40s | %10s | %s" % ("User", "Messages", "Percentage Zulip"))
|
||||
top_percents = {} # type: Dict[int, float]
|
||||
for size in [10, 25, 50, 100, 200, len(total_user_counts.keys())]:
|
||||
top_percents[size] = 0.0
|
||||
for i, email in enumerate(sorted(total_user_counts.keys(),
|
||||
key=lambda x: -total_user_counts[x])):
|
||||
percent_zulip = round(100 - (user_counts[email].get("zephyr_mirror", 0)) * 100. /
|
||||
total_user_counts[email], 1)
|
||||
for size in top_percents.keys():
|
||||
top_percents.setdefault(size, 0)
|
||||
if i < size:
|
||||
top_percents[size] += (percent_zulip * 1.0 / size)
|
||||
|
||||
logging.debug("%40s | %10s | %s%%" % (email, total_user_counts[email],
|
||||
percent_zulip))
|
||||
|
||||
logging.info("")
|
||||
for size in sorted(top_percents.keys()):
|
||||
logging.info("Top %6s | %s%%" % (size, round(top_percents[size], 1)))
|
||||
|
||||
grand_total = sum(total_counts.values())
|
||||
print(grand_total)
|
||||
logging.info("%15s | %s" % ("Client", "Percentage"))
|
||||
for client in total_counts.keys():
|
||||
logging.info("%15s | %s%%" % (client, round(100. * total_counts[client] / grand_total, 1)))
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Compute statistics on MIT Zephyr usage."
|
||||
|
||||
def add_arguments(self, parser):
|
||||
# type: (CommandParser) -> None
|
||||
parser.add_argument('--verbose', default=False, action='store_true')
|
||||
|
||||
def handle(self, *args, **options):
|
||||
# type: (*Any, **Any) -> None
|
||||
level = logging.INFO
|
||||
if options["verbose"]:
|
||||
level = logging.DEBUG
|
||||
compute_stats(level)
|
59
analytics/management/commands/analyze_user_activity.py
Normal file
59
analytics/management/commands/analyze_user_activity.py
Normal file
@@ -0,0 +1,59 @@
|
||||
from typing import Any, Dict
|
||||
|
||||
from zerver.lib.statistics import seconds_usage_between
|
||||
|
||||
from django.core.management.base import BaseCommand, CommandParser
|
||||
from zerver.models import UserProfile
|
||||
import datetime
|
||||
from django.utils.timezone import utc
|
||||
|
||||
def analyze_activity(options):
|
||||
# type: (Dict[str, Any]) -> None
|
||||
day_start = datetime.datetime.strptime(options["date"], "%Y-%m-%d").replace(tzinfo=utc)
|
||||
day_end = day_start + datetime.timedelta(days=options["duration"])
|
||||
|
||||
user_profile_query = UserProfile.objects.all()
|
||||
if options["realm"]:
|
||||
user_profile_query = user_profile_query.filter(realm__string_id=options["realm"])
|
||||
|
||||
print("Per-user online duration:\n")
|
||||
total_duration = datetime.timedelta(0)
|
||||
for user_profile in user_profile_query:
|
||||
duration = seconds_usage_between(user_profile, day_start, day_end)
|
||||
|
||||
if duration == datetime.timedelta(0):
|
||||
continue
|
||||
|
||||
total_duration += duration
|
||||
print("%-*s%s" % (37, user_profile.email, duration,))
|
||||
|
||||
print("\nTotal Duration: %s" % (total_duration,))
|
||||
print("\nTotal Duration in minutes: %s" % (total_duration.total_seconds() / 60.,))
|
||||
print("Total Duration amortized to a month: %s" % (total_duration.total_seconds() * 30. / 60.,))
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = """Report analytics of user activity on a per-user and realm basis.
|
||||
|
||||
This command aggregates user activity data that is collected by each user using Zulip. It attempts
|
||||
to approximate how much each user has been using Zulip per day, measured by recording each 15 minute
|
||||
period where some activity has occurred (mouse move or keyboard activity).
|
||||
|
||||
It will correctly not count server-initiated reloads in the activity statistics.
|
||||
|
||||
The duration flag can be used to control how many days to show usage duration for
|
||||
|
||||
Usage: ./manage.py analyze_user_activity [--realm=zulip] [--date=2013-09-10] [--duration=1]
|
||||
|
||||
By default, if no date is selected 2013-09-10 is used. If no realm is provided, information
|
||||
is shown for all realms"""
|
||||
|
||||
def add_arguments(self, parser):
|
||||
# type: (CommandParser) -> None
|
||||
parser.add_argument('--realm', action='store')
|
||||
parser.add_argument('--date', action='store', default="2013-09-06")
|
||||
parser.add_argument('--duration', action='store', default=1, type=int,
|
||||
help="How many days to show usage information for")
|
||||
|
||||
def handle(self, *args, **options):
|
||||
# type: (*Any, **Any) -> None
|
||||
analyze_activity(options)
|
@@ -1,92 +0,0 @@
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
from typing import Any, Literal
|
||||
|
||||
from django.utils.timezone import now as timezone_now
|
||||
from typing_extensions import override
|
||||
|
||||
from analytics.lib.counts import ALL_COUNT_STATS, CountStat
|
||||
from analytics.models import installation_epoch
|
||||
from scripts.lib.zulip_tools import atomic_nagios_write
|
||||
from zerver.lib.management import ZulipBaseCommand
|
||||
from zerver.lib.timestamp import TimeZoneNotUTCError, floor_to_day, floor_to_hour, verify_UTC
|
||||
from zerver.models import Realm
|
||||
|
||||
states = {
|
||||
0: "OK",
|
||||
1: "WARNING",
|
||||
2: "CRITICAL",
|
||||
3: "UNKNOWN",
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class NagiosResult:
|
||||
status: Literal["ok", "warning", "critical", "unknown"]
|
||||
message: str
|
||||
|
||||
|
||||
class Command(ZulipBaseCommand):
|
||||
help = """Checks FillState table.
|
||||
|
||||
Run as a cron job that runs every hour."""
|
||||
|
||||
@override
|
||||
def handle(self, *args: Any, **options: Any) -> None:
|
||||
fill_state = self.get_fill_state()
|
||||
atomic_nagios_write("check-analytics-state", fill_state.status, fill_state.message)
|
||||
|
||||
def get_fill_state(self) -> NagiosResult:
|
||||
if not Realm.objects.exists():
|
||||
return NagiosResult(status="ok", message="No realms exist, so not checking FillState.")
|
||||
|
||||
warning_unfilled_properties = []
|
||||
critical_unfilled_properties = []
|
||||
for property, stat in ALL_COUNT_STATS.items():
|
||||
last_fill = stat.last_successful_fill()
|
||||
if last_fill is None:
|
||||
last_fill = installation_epoch()
|
||||
try:
|
||||
verify_UTC(last_fill)
|
||||
except TimeZoneNotUTCError:
|
||||
return NagiosResult(
|
||||
status="critical", message=f"FillState not in UTC for {property}"
|
||||
)
|
||||
|
||||
if stat.frequency == CountStat.DAY:
|
||||
floor_function = floor_to_day
|
||||
warning_threshold = timedelta(hours=26)
|
||||
critical_threshold = timedelta(hours=50)
|
||||
else: # CountStat.HOUR
|
||||
floor_function = floor_to_hour
|
||||
warning_threshold = timedelta(minutes=90)
|
||||
critical_threshold = timedelta(minutes=150)
|
||||
|
||||
if floor_function(last_fill) != last_fill:
|
||||
return NagiosResult(
|
||||
status="critical",
|
||||
message=f"FillState not on {stat.frequency} boundary for {property}",
|
||||
)
|
||||
|
||||
time_to_last_fill = timezone_now() - last_fill
|
||||
if time_to_last_fill > critical_threshold:
|
||||
critical_unfilled_properties.append(property)
|
||||
elif time_to_last_fill > warning_threshold:
|
||||
warning_unfilled_properties.append(property)
|
||||
|
||||
if len(critical_unfilled_properties) == 0 and len(warning_unfilled_properties) == 0:
|
||||
return NagiosResult(status="ok", message="FillState looks fine.")
|
||||
if len(critical_unfilled_properties) == 0:
|
||||
return NagiosResult(
|
||||
status="warning",
|
||||
message="Missed filling {} once.".format(
|
||||
", ".join(warning_unfilled_properties),
|
||||
),
|
||||
)
|
||||
return NagiosResult(
|
||||
status="critical",
|
||||
message="Missed filling {} once. Missed filling {} at least twice.".format(
|
||||
", ".join(warning_unfilled_properties),
|
||||
", ".join(critical_unfilled_properties),
|
||||
),
|
||||
)
|
@@ -1,25 +1,26 @@
|
||||
from argparse import ArgumentParser
|
||||
from typing import Any
|
||||
import sys
|
||||
|
||||
from django.core.management.base import CommandError
|
||||
from typing_extensions import override
|
||||
from argparse import ArgumentParser
|
||||
from django.db import connection
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from analytics.lib.counts import do_drop_all_analytics_tables
|
||||
from zerver.lib.management import ZulipBaseCommand
|
||||
|
||||
from typing import Any
|
||||
|
||||
class Command(ZulipBaseCommand):
|
||||
class Command(BaseCommand):
|
||||
help = """Clear analytics tables."""
|
||||
|
||||
@override
|
||||
def add_arguments(self, parser: ArgumentParser) -> None:
|
||||
parser.add_argument("--force", action="store_true", help="Clear analytics tables.")
|
||||
def add_arguments(self, parser):
|
||||
# type: (ArgumentParser) -> None
|
||||
parser.add_argument('--force',
|
||||
action='store_true',
|
||||
help="Clear analytics tables.")
|
||||
|
||||
@override
|
||||
def handle(self, *args: Any, **options: Any) -> None:
|
||||
if options["force"]:
|
||||
def handle(self, *args, **options):
|
||||
# type: (*Any, **Any) -> None
|
||||
if options['force']:
|
||||
do_drop_all_analytics_tables()
|
||||
else:
|
||||
raise CommandError(
|
||||
"Would delete all data from analytics tables (!); use --force to do so."
|
||||
)
|
||||
print("Would delete all data from analytics tables (!); use --force to do so.")
|
||||
sys.exit(1)
|
||||
|
@@ -1,27 +1,33 @@
|
||||
import sys
|
||||
|
||||
from argparse import ArgumentParser
|
||||
from django.db import connection
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from analytics.lib.counts import do_drop_single_stat, COUNT_STATS
|
||||
|
||||
from typing import Any
|
||||
|
||||
from django.core.management.base import CommandError
|
||||
from typing_extensions import override
|
||||
|
||||
from analytics.lib.counts import ALL_COUNT_STATS, do_drop_single_stat
|
||||
from zerver.lib.management import ZulipBaseCommand
|
||||
|
||||
|
||||
class Command(ZulipBaseCommand):
|
||||
class Command(BaseCommand):
|
||||
help = """Clear analytics tables."""
|
||||
|
||||
@override
|
||||
def add_arguments(self, parser: ArgumentParser) -> None:
|
||||
parser.add_argument("--force", action="store_true", help="Actually do it.")
|
||||
parser.add_argument("--property", help="The property of the stat to be cleared.")
|
||||
def add_arguments(self, parser):
|
||||
# type: (ArgumentParser) -> None
|
||||
parser.add_argument('--force',
|
||||
action='store_true',
|
||||
help="Actually do it.")
|
||||
parser.add_argument('--property',
|
||||
type=str,
|
||||
help="The property of the stat to be cleared.")
|
||||
|
||||
@override
|
||||
def handle(self, *args: Any, **options: Any) -> None:
|
||||
property = options["property"]
|
||||
if property not in ALL_COUNT_STATS:
|
||||
raise CommandError(f"Invalid property: {property}")
|
||||
if not options["force"]:
|
||||
raise CommandError("No action taken. Use --force.")
|
||||
def handle(self, *args, **options):
|
||||
# type: (*Any, **Any) -> None
|
||||
property = options['property']
|
||||
if property not in COUNT_STATS:
|
||||
print("Invalid property: %s" % (property,))
|
||||
sys.exit(1)
|
||||
if not options['force']:
|
||||
print("No action taken. Use --force.")
|
||||
sys.exit(1)
|
||||
|
||||
do_drop_single_stat(property)
|
||||
|
77
analytics/management/commands/client_activity.py
Normal file
77
analytics/management/commands/client_activity.py
Normal file
@@ -0,0 +1,77 @@
|
||||
from typing import Any
|
||||
|
||||
from argparse import ArgumentParser
|
||||
from django.db.models import Count, QuerySet
|
||||
from django.utils.timezone import now as timezone_now
|
||||
|
||||
from zerver.lib.management import ZulipBaseCommand
|
||||
from zerver.models import UserActivity
|
||||
|
||||
import datetime
|
||||
|
||||
class Command(ZulipBaseCommand):
|
||||
help = """Report rough client activity globally, for a realm, or for a user
|
||||
|
||||
Usage examples:
|
||||
|
||||
./manage.py client_activity --target server
|
||||
./manage.py client_activity --target realm --realm zulip
|
||||
./manage.py client_activity --target user --user hamlet@zulip.com --realm zulip"""
|
||||
|
||||
def add_arguments(self, parser):
|
||||
# type: (ArgumentParser) -> None
|
||||
parser.add_argument('--target', dest='target', required=True, type=str,
|
||||
help="'server' will calculate client activity of the entire server. "
|
||||
"'realm' will calculate client activity of realm. "
|
||||
"'user' will calculate client activity of the user.")
|
||||
parser.add_argument('--user', dest='user', type=str,
|
||||
help="The email adress of the user you want to calculate activity.")
|
||||
self.add_realm_args(parser)
|
||||
|
||||
def compute_activity(self, user_activity_objects):
|
||||
# type: (QuerySet) -> None
|
||||
# Report data from the past week.
|
||||
#
|
||||
# This is a rough report of client activity because we inconsistently
|
||||
# register activity from various clients; think of it as telling you
|
||||
# approximately how many people from a group have used a particular
|
||||
# client recently. For example, this might be useful to get a sense of
|
||||
# how popular different versions of a desktop client are.
|
||||
#
|
||||
# Importantly, this does NOT tell you anything about the relative
|
||||
# volumes of requests from clients.
|
||||
threshold = timezone_now() - datetime.timedelta(days=7)
|
||||
client_counts = user_activity_objects.filter(
|
||||
last_visit__gt=threshold).values("client__name").annotate(
|
||||
count=Count('client__name'))
|
||||
|
||||
total = 0
|
||||
counts = []
|
||||
for client_type in client_counts:
|
||||
count = client_type["count"]
|
||||
client = client_type["client__name"]
|
||||
total += count
|
||||
counts.append((count, client))
|
||||
|
||||
counts.sort()
|
||||
|
||||
for count in counts:
|
||||
print("%25s %15d" % (count[1], count[0]))
|
||||
print("Total:", total)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
# type: (*Any, **str) -> None
|
||||
realm = self.get_realm(options)
|
||||
if options["user"] is None:
|
||||
if options["target"] == "server" and realm is None:
|
||||
# Report global activity.
|
||||
self.compute_activity(UserActivity.objects.all())
|
||||
elif options["target"] == "realm" and realm is not None:
|
||||
self.compute_activity(UserActivity.objects.filter(user_profile__realm=realm))
|
||||
else:
|
||||
self.print_help("./manage.py", "client_activity")
|
||||
elif options["target"] == "user":
|
||||
user_profile = self.get_user(options["user"], realm)
|
||||
self.compute_activity(UserActivity.objects.filter(user_profile=user_profile))
|
||||
else:
|
||||
self.print_help("./manage.py", "client_activity")
|
@@ -1,350 +1,137 @@
|
||||
from collections.abc import Mapping
|
||||
from datetime import timedelta
|
||||
from typing import Any, TypeAlias
|
||||
|
||||
from django.core.files.uploadedfile import UploadedFile
|
||||
from argparse import ArgumentParser
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.utils.timezone import now as timezone_now
|
||||
from typing_extensions import override
|
||||
|
||||
from analytics.lib.counts import COUNT_STATS, CountStat, do_drop_all_analytics_tables
|
||||
from analytics.lib.fixtures import generate_time_series_data
|
||||
from analytics.lib.time_utils import time_range
|
||||
from analytics.models import (
|
||||
BaseCount,
|
||||
FillState,
|
||||
InstallationCount,
|
||||
RealmCount,
|
||||
StreamCount,
|
||||
UserCount,
|
||||
)
|
||||
from zerver.actions.create_realm import do_create_realm
|
||||
from zerver.lib.create_user import create_user
|
||||
from zerver.lib.management import ZulipBaseCommand
|
||||
from zerver.lib.storage import static_path
|
||||
from zerver.lib.stream_color import STREAM_ASSIGNMENT_COLORS
|
||||
from zerver.lib.stream_subscription import create_stream_subscription
|
||||
from zerver.lib.streams import get_default_values_for_stream_permission_group_settings
|
||||
from analytics.models import BaseCount, InstallationCount, RealmCount, \
|
||||
UserCount, StreamCount, FillState
|
||||
from zerver.lib.timestamp import floor_to_day
|
||||
from zerver.lib.upload import upload_message_attachment_from_request
|
||||
from zerver.models import Client, Realm, RealmAuditLog, Recipient, Stream, UserProfile
|
||||
from zerver.models.groups import NamedUserGroup, SystemGroups, UserGroupMembership
|
||||
from zerver.models.realm_audit_logs import AuditLogEventType
|
||||
from zerver.models import Realm, UserProfile, Stream, Message, Client, \
|
||||
RealmAuditLog
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
class Command(ZulipBaseCommand):
|
||||
from six.moves import zip
|
||||
from typing import Any, Dict, List, Optional, Text, Type, Union, Mapping
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = """Populates analytics tables with randomly generated data."""
|
||||
|
||||
DAYS_OF_DATA = 100
|
||||
random_seed = 26
|
||||
|
||||
def generate_fixture_data(
|
||||
self,
|
||||
stat: CountStat,
|
||||
business_hours_base: float,
|
||||
non_business_hours_base: float,
|
||||
growth: float,
|
||||
autocorrelation: float,
|
||||
spikiness: float,
|
||||
holiday_rate: float = 0,
|
||||
partial_sum: bool = False,
|
||||
) -> list[int]:
|
||||
def create_user(self, email, full_name, is_staff, date_joined, realm):
|
||||
# type: (Text, Text, Text, bool, datetime, Realm) -> UserProfile
|
||||
user = UserProfile.objects.create(
|
||||
email=email, full_name=full_name, is_staff=is_staff,
|
||||
realm=realm, short_name=full_name, pointer=-1, last_pointer_updater='none',
|
||||
api_key='42', date_joined=date_joined)
|
||||
RealmAuditLog.objects.create(
|
||||
realm=realm, modified_user=user, event_type='user_created',
|
||||
event_time=user.date_joined)
|
||||
return user
|
||||
|
||||
def generate_fixture_data(self, stat, business_hours_base, non_business_hours_base,
|
||||
growth, autocorrelation, spikiness, holiday_rate=0,
|
||||
partial_sum=False):
|
||||
# type: (CountStat, float, float, float, float, float, float, bool) -> List[int]
|
||||
self.random_seed += 1
|
||||
return generate_time_series_data(
|
||||
days=self.DAYS_OF_DATA,
|
||||
business_hours_base=business_hours_base,
|
||||
non_business_hours_base=non_business_hours_base,
|
||||
growth=growth,
|
||||
autocorrelation=autocorrelation,
|
||||
spikiness=spikiness,
|
||||
holiday_rate=holiday_rate,
|
||||
frequency=stat.frequency,
|
||||
partial_sum=partial_sum,
|
||||
random_seed=self.random_seed,
|
||||
)
|
||||
days=self.DAYS_OF_DATA, business_hours_base=business_hours_base,
|
||||
non_business_hours_base=non_business_hours_base, growth=growth,
|
||||
autocorrelation=autocorrelation, spikiness=spikiness, holiday_rate=holiday_rate,
|
||||
frequency=stat.frequency, partial_sum=partial_sum, random_seed=self.random_seed)
|
||||
|
||||
@override
|
||||
def handle(self, *args: Any, **options: Any) -> None:
|
||||
# TODO: This should arguably only delete the objects
|
||||
# associated with the "analytics" realm.
|
||||
def handle(self, *args, **options):
|
||||
# type: (*Any, **Any) -> None
|
||||
do_drop_all_analytics_tables()
|
||||
|
||||
# This also deletes any objects with this realm as a foreign key
|
||||
Realm.objects.filter(string_id="analytics").delete()
|
||||
|
||||
# Because we just deleted a bunch of objects in the database
|
||||
# directly (rather than deleting individual objects in Django,
|
||||
# in which case our post_save hooks would have flushed the
|
||||
# individual objects from memcached for us), we need to flush
|
||||
# memcached in order to ensure deleted objects aren't still
|
||||
# present in the memcached cache.
|
||||
from zerver.apps import flush_cache
|
||||
|
||||
flush_cache(None)
|
||||
# I believe this also deletes any objects with this realm as a foreign key
|
||||
Realm.objects.filter(string_id='analytics').delete()
|
||||
|
||||
installation_time = timezone_now() - timedelta(days=self.DAYS_OF_DATA)
|
||||
last_end_time = floor_to_day(timezone_now())
|
||||
realm = do_create_realm(
|
||||
string_id="analytics", name="Analytics", date_created=installation_time
|
||||
)
|
||||
realm = Realm.objects.create(
|
||||
string_id='analytics', name='Analytics', date_created=installation_time)
|
||||
shylock = self.create_user('shylock@analytics.ds', 'Shylock', True, installation_time, realm)
|
||||
|
||||
owners_system_group = NamedUserGroup.objects.get(
|
||||
name=SystemGroups.OWNERS, realm=realm, is_system_group=True
|
||||
)
|
||||
guests_system_group = NamedUserGroup.objects.get(
|
||||
name=SystemGroups.EVERYONE, realm=realm, is_system_group=True
|
||||
)
|
||||
|
||||
shylock = create_user(
|
||||
"shylock@analytics.ds",
|
||||
"Shylock",
|
||||
realm,
|
||||
full_name="Shylock",
|
||||
role=UserProfile.ROLE_REALM_OWNER,
|
||||
force_date_joined=installation_time,
|
||||
)
|
||||
UserGroupMembership.objects.create(user_profile=shylock, user_group=owners_system_group)
|
||||
|
||||
# Create guest user for set_guest_users_statistic.
|
||||
bassanio = create_user(
|
||||
"bassanio@analytics.ds",
|
||||
"Bassanio",
|
||||
realm,
|
||||
full_name="Bassanio",
|
||||
role=UserProfile.ROLE_GUEST,
|
||||
force_date_joined=installation_time,
|
||||
)
|
||||
UserGroupMembership.objects.create(user_profile=bassanio, user_group=guests_system_group)
|
||||
|
||||
stream = Stream.objects.create(
|
||||
name="all",
|
||||
realm=realm,
|
||||
date_created=installation_time,
|
||||
**get_default_values_for_stream_permission_group_settings(realm),
|
||||
)
|
||||
recipient = Recipient.objects.create(type_id=stream.id, type=Recipient.STREAM)
|
||||
stream.recipient = recipient
|
||||
stream.save(update_fields=["recipient"])
|
||||
|
||||
# Subscribe shylock to the stream to avoid invariant failures.
|
||||
create_stream_subscription(
|
||||
user_profile=shylock,
|
||||
recipient=recipient,
|
||||
stream=stream,
|
||||
color=STREAM_ASSIGNMENT_COLORS[0],
|
||||
)
|
||||
RealmAuditLog.objects.create(
|
||||
realm=realm,
|
||||
modified_user=shylock,
|
||||
modified_stream=stream,
|
||||
event_last_message_id=0,
|
||||
event_type=AuditLogEventType.SUBSCRIPTION_CREATED,
|
||||
event_time=installation_time,
|
||||
)
|
||||
|
||||
# Create an attachment in the database for set_storage_space_used_statistic.
|
||||
IMAGE_FILE_PATH = static_path("images/test-images/checkbox.png")
|
||||
with open(IMAGE_FILE_PATH, "rb") as fp:
|
||||
upload_message_attachment_from_request(UploadedFile(fp), shylock)
|
||||
|
||||
FixtureData: TypeAlias = Mapping[str | int | None, list[int]]
|
||||
|
||||
def insert_fixture_data(
|
||||
stat: CountStat,
|
||||
fixture_data: FixtureData,
|
||||
table: type[BaseCount],
|
||||
) -> None:
|
||||
end_times = time_range(
|
||||
last_end_time, last_end_time, stat.frequency, len(next(iter(fixture_data.values())))
|
||||
)
|
||||
if table == InstallationCount:
|
||||
id_args: dict[str, Any] = {}
|
||||
def insert_fixture_data(stat, fixture_data, table):
|
||||
# type: (CountStat, Mapping[Optional[str], List[int]], Type[BaseCount]) -> None
|
||||
end_times = time_range(last_end_time, last_end_time, stat.frequency,
|
||||
len(list(fixture_data.values())[0]))
|
||||
if table == RealmCount:
|
||||
id_args = {"realm": realm}
|
||||
id_args = {'realm': realm}
|
||||
if table == UserCount:
|
||||
id_args = {"realm": realm, "user": shylock}
|
||||
if table == StreamCount:
|
||||
id_args = {"stream": stream, "realm": realm}
|
||||
|
||||
id_args = {'realm': realm, 'user': shylock}
|
||||
for subgroup, values in fixture_data.items():
|
||||
table._default_manager.bulk_create(
|
||||
table(
|
||||
property=stat.property,
|
||||
subgroup=subgroup,
|
||||
end_time=end_time,
|
||||
value=value,
|
||||
**id_args,
|
||||
)
|
||||
for end_time, value in zip(end_times, values, strict=False)
|
||||
if value != 0
|
||||
)
|
||||
table.objects.bulk_create([
|
||||
table(property=stat.property, subgroup=subgroup, end_time=end_time,
|
||||
value=value, **id_args)
|
||||
for end_time, value in zip(end_times, values) if value != 0])
|
||||
|
||||
stat = COUNT_STATS["1day_actives::day"]
|
||||
realm_data: FixtureData = {
|
||||
None: self.generate_fixture_data(stat, 0.08, 0.02, 3, 0.3, 6, partial_sum=True),
|
||||
}
|
||||
insert_fixture_data(stat, realm_data, RealmCount)
|
||||
installation_data: FixtureData = {
|
||||
None: self.generate_fixture_data(stat, 0.8, 0.2, 4, 0.3, 6, partial_sum=True),
|
||||
}
|
||||
insert_fixture_data(stat, installation_data, InstallationCount)
|
||||
FillState.objects.create(
|
||||
property=stat.property, end_time=last_end_time, state=FillState.DONE
|
||||
)
|
||||
|
||||
stat = COUNT_STATS["7day_actives::day"]
|
||||
stat = COUNT_STATS['realm_active_humans::day']
|
||||
realm_data = {
|
||||
None: self.generate_fixture_data(stat, 0.2, 0.07, 3, 0.3, 6, partial_sum=True),
|
||||
}
|
||||
None: self.generate_fixture_data(stat, .1, .03, 3, .5, 3, partial_sum=True),
|
||||
} # type: Mapping[Optional[str], List[int]]
|
||||
insert_fixture_data(stat, realm_data, RealmCount)
|
||||
installation_data = {
|
||||
None: self.generate_fixture_data(stat, 2, 0.7, 4, 0.3, 6, partial_sum=True),
|
||||
}
|
||||
insert_fixture_data(stat, installation_data, InstallationCount)
|
||||
FillState.objects.create(
|
||||
property=stat.property, end_time=last_end_time, state=FillState.DONE
|
||||
)
|
||||
FillState.objects.create(property=stat.property, end_time=last_end_time,
|
||||
state=FillState.DONE)
|
||||
|
||||
stat = COUNT_STATS["realm_active_humans::day"]
|
||||
realm_data = {
|
||||
None: self.generate_fixture_data(stat, 0.8, 0.08, 3, 0.5, 3, partial_sum=True),
|
||||
}
|
||||
insert_fixture_data(stat, realm_data, RealmCount)
|
||||
installation_data = {
|
||||
None: self.generate_fixture_data(stat, 1, 0.3, 4, 0.5, 3, partial_sum=True),
|
||||
}
|
||||
insert_fixture_data(stat, installation_data, InstallationCount)
|
||||
FillState.objects.create(
|
||||
property=stat.property, end_time=last_end_time, state=FillState.DONE
|
||||
)
|
||||
|
||||
stat = COUNT_STATS["active_users_audit:is_bot:day"]
|
||||
realm_data = {
|
||||
"false": self.generate_fixture_data(stat, 1, 0.2, 3.5, 0.8, 2, partial_sum=True),
|
||||
"true": self.generate_fixture_data(stat, 0.3, 0.05, 3, 0.3, 2, partial_sum=True),
|
||||
}
|
||||
insert_fixture_data(stat, realm_data, RealmCount)
|
||||
installation_data = {
|
||||
"false": self.generate_fixture_data(stat, 3, 1, 4, 0.8, 2, partial_sum=True),
|
||||
"true": self.generate_fixture_data(stat, 1, 0.4, 4, 0.8, 2, partial_sum=True),
|
||||
}
|
||||
insert_fixture_data(stat, installation_data, InstallationCount)
|
||||
FillState.objects.create(
|
||||
property=stat.property, end_time=last_end_time, state=FillState.DONE
|
||||
)
|
||||
|
||||
stat = COUNT_STATS["messages_sent:is_bot:hour"]
|
||||
user_data: FixtureData = {
|
||||
"false": self.generate_fixture_data(stat, 2, 1, 1.5, 0.6, 8, holiday_rate=0.1),
|
||||
}
|
||||
stat = COUNT_STATS['messages_sent:is_bot:hour']
|
||||
user_data = {'false': self.generate_fixture_data(
|
||||
stat, 2, 1, 1.5, .6, 8, holiday_rate=.1)} # type: Mapping[Optional[str], List[int]]
|
||||
insert_fixture_data(stat, user_data, UserCount)
|
||||
realm_data = {
|
||||
"false": self.generate_fixture_data(stat, 35, 15, 6, 0.6, 4),
|
||||
"true": self.generate_fixture_data(stat, 15, 15, 3, 0.4, 2),
|
||||
}
|
||||
realm_data = {'false': self.generate_fixture_data(stat, 35, 15, 6, .6, 4),
|
||||
'true': self.generate_fixture_data(stat, 15, 15, 3, .4, 2)}
|
||||
insert_fixture_data(stat, realm_data, RealmCount)
|
||||
installation_data = {
|
||||
"false": self.generate_fixture_data(stat, 350, 150, 6, 0.6, 4),
|
||||
"true": self.generate_fixture_data(stat, 150, 150, 3, 0.4, 2),
|
||||
}
|
||||
insert_fixture_data(stat, installation_data, InstallationCount)
|
||||
FillState.objects.create(
|
||||
property=stat.property, end_time=last_end_time, state=FillState.DONE
|
||||
)
|
||||
FillState.objects.create(property=stat.property, end_time=last_end_time,
|
||||
state=FillState.DONE)
|
||||
|
||||
stat = COUNT_STATS["messages_sent:message_type:day"]
|
||||
stat = COUNT_STATS['messages_sent:message_type:day']
|
||||
user_data = {
|
||||
"public_stream": self.generate_fixture_data(stat, 1.5, 1, 3, 0.6, 8),
|
||||
"private_message": self.generate_fixture_data(stat, 0.5, 0.3, 1, 0.6, 8),
|
||||
"huddle_message": self.generate_fixture_data(stat, 0.2, 0.2, 2, 0.6, 8),
|
||||
}
|
||||
'public_stream': self.generate_fixture_data(stat, 1.5, 1, 3, .6, 8),
|
||||
'private_message': self.generate_fixture_data(stat, .5, .3, 1, .6, 8),
|
||||
'huddle_message': self.generate_fixture_data(stat, .2, .2, 2, .6, 8)}
|
||||
insert_fixture_data(stat, user_data, UserCount)
|
||||
realm_data = {
|
||||
"public_stream": self.generate_fixture_data(stat, 30, 8, 5, 0.6, 4),
|
||||
"private_stream": self.generate_fixture_data(stat, 7, 7, 5, 0.6, 4),
|
||||
"private_message": self.generate_fixture_data(stat, 13, 5, 5, 0.6, 4),
|
||||
"huddle_message": self.generate_fixture_data(stat, 6, 3, 3, 0.6, 4),
|
||||
}
|
||||
'public_stream': self.generate_fixture_data(stat, 30, 8, 5, .6, 4),
|
||||
'private_stream': self.generate_fixture_data(stat, 7, 7, 5, .6, 4),
|
||||
'private_message': self.generate_fixture_data(stat, 13, 5, 5, .6, 4),
|
||||
'huddle_message': self.generate_fixture_data(stat, 6, 3, 3, .6, 4)}
|
||||
insert_fixture_data(stat, realm_data, RealmCount)
|
||||
installation_data = {
|
||||
"public_stream": self.generate_fixture_data(stat, 300, 80, 5, 0.6, 4),
|
||||
"private_stream": self.generate_fixture_data(stat, 70, 70, 5, 0.6, 4),
|
||||
"private_message": self.generate_fixture_data(stat, 130, 50, 5, 0.6, 4),
|
||||
"huddle_message": self.generate_fixture_data(stat, 60, 30, 3, 0.6, 4),
|
||||
}
|
||||
insert_fixture_data(stat, installation_data, InstallationCount)
|
||||
FillState.objects.create(
|
||||
property=stat.property, end_time=last_end_time, state=FillState.DONE
|
||||
)
|
||||
FillState.objects.create(property=stat.property, end_time=last_end_time,
|
||||
state=FillState.DONE)
|
||||
|
||||
website, created = Client.objects.get_or_create(name="website")
|
||||
old_desktop, created = Client.objects.get_or_create(name="desktop app Linux 0.3.7")
|
||||
android, created = Client.objects.get_or_create(name="ZulipAndroid")
|
||||
iOS, created = Client.objects.get_or_create(name="ZulipiOS")
|
||||
react_native, created = Client.objects.get_or_create(name="ZulipMobile")
|
||||
flutter, created = Client.objects.get_or_create(name="ZulipFlutter")
|
||||
API, created = Client.objects.get_or_create(name="API: Python")
|
||||
zephyr_mirror, created = Client.objects.get_or_create(name="zephyr_mirror")
|
||||
unused, created = Client.objects.get_or_create(name="unused")
|
||||
long_webhook, created = Client.objects.get_or_create(name="ZulipLooooooooooongNameWebhook")
|
||||
website, created = Client.objects.get_or_create(name='website')
|
||||
old_desktop, created = Client.objects.get_or_create(name='desktop app Linux 0.3.7')
|
||||
android, created = Client.objects.get_or_create(name='ZulipAndroid')
|
||||
iOS, created = Client.objects.get_or_create(name='ZulipiOS')
|
||||
react_native, created = Client.objects.get_or_create(name='ZulipMobile')
|
||||
API, created = Client.objects.get_or_create(name='API: Python')
|
||||
zephyr_mirror, created = Client.objects.get_or_create(name='zephyr_mirror')
|
||||
unused, created = Client.objects.get_or_create(name='unused')
|
||||
long_webhook, created = Client.objects.get_or_create(name='ZulipLooooooooooongNameWebhook')
|
||||
|
||||
stat = COUNT_STATS["messages_sent:client:day"]
|
||||
stat = COUNT_STATS['messages_sent:client:day']
|
||||
user_data = {
|
||||
website.id: self.generate_fixture_data(stat, 2, 1, 1.5, 0.6, 8),
|
||||
zephyr_mirror.id: self.generate_fixture_data(stat, 0, 0.3, 1.5, 0.6, 8),
|
||||
}
|
||||
website.id: self.generate_fixture_data(stat, 2, 1, 1.5, .6, 8),
|
||||
zephyr_mirror.id: self.generate_fixture_data(stat, 0, .3, 1.5, .6, 8)}
|
||||
insert_fixture_data(stat, user_data, UserCount)
|
||||
realm_data = {
|
||||
website.id: self.generate_fixture_data(stat, 30, 20, 5, 0.6, 3),
|
||||
old_desktop.id: self.generate_fixture_data(stat, 5, 3, 8, 0.6, 3),
|
||||
android.id: self.generate_fixture_data(stat, 5, 5, 2, 0.6, 3),
|
||||
iOS.id: self.generate_fixture_data(stat, 5, 5, 2, 0.6, 3),
|
||||
react_native.id: self.generate_fixture_data(stat, 5, 5, 10, 0.6, 3),
|
||||
flutter.id: self.generate_fixture_data(stat, 5, 5, 10, 0.6, 3),
|
||||
API.id: self.generate_fixture_data(stat, 5, 5, 5, 0.6, 3),
|
||||
zephyr_mirror.id: self.generate_fixture_data(stat, 1, 1, 3, 0.6, 3),
|
||||
website.id: self.generate_fixture_data(stat, 30, 20, 5, .6, 3),
|
||||
old_desktop.id: self.generate_fixture_data(stat, 5, 3, 8, .6, 3),
|
||||
android.id: self.generate_fixture_data(stat, 5, 5, 2, .6, 3),
|
||||
iOS.id: self.generate_fixture_data(stat, 5, 5, 2, .6, 3),
|
||||
react_native.id: self.generate_fixture_data(stat, 5, 5, 10, .6, 3),
|
||||
API.id: self.generate_fixture_data(stat, 5, 5, 5, .6, 3),
|
||||
zephyr_mirror.id: self.generate_fixture_data(stat, 1, 1, 3, .6, 3),
|
||||
unused.id: self.generate_fixture_data(stat, 0, 0, 0, 0, 0),
|
||||
long_webhook.id: self.generate_fixture_data(stat, 5, 5, 2, 0.6, 3),
|
||||
}
|
||||
long_webhook.id: self.generate_fixture_data(stat, 5, 5, 2, .6, 3)}
|
||||
insert_fixture_data(stat, realm_data, RealmCount)
|
||||
installation_data = {
|
||||
website.id: self.generate_fixture_data(stat, 300, 200, 5, 0.6, 3),
|
||||
old_desktop.id: self.generate_fixture_data(stat, 50, 30, 8, 0.6, 3),
|
||||
android.id: self.generate_fixture_data(stat, 50, 50, 2, 0.6, 3),
|
||||
iOS.id: self.generate_fixture_data(stat, 50, 50, 2, 0.6, 3),
|
||||
flutter.id: self.generate_fixture_data(stat, 5, 5, 10, 0.6, 3),
|
||||
react_native.id: self.generate_fixture_data(stat, 5, 5, 10, 0.6, 3),
|
||||
API.id: self.generate_fixture_data(stat, 50, 50, 5, 0.6, 3),
|
||||
zephyr_mirror.id: self.generate_fixture_data(stat, 10, 10, 3, 0.6, 3),
|
||||
unused.id: self.generate_fixture_data(stat, 0, 0, 0, 0, 0),
|
||||
long_webhook.id: self.generate_fixture_data(stat, 50, 50, 2, 0.6, 3),
|
||||
}
|
||||
insert_fixture_data(stat, installation_data, InstallationCount)
|
||||
FillState.objects.create(
|
||||
property=stat.property, end_time=last_end_time, state=FillState.DONE
|
||||
)
|
||||
FillState.objects.create(property=stat.property, end_time=last_end_time,
|
||||
state=FillState.DONE)
|
||||
|
||||
stat = COUNT_STATS["messages_in_stream:is_bot:day"]
|
||||
realm_data = {
|
||||
"false": self.generate_fixture_data(stat, 30, 5, 6, 0.6, 4),
|
||||
"true": self.generate_fixture_data(stat, 20, 2, 3, 0.2, 3),
|
||||
}
|
||||
insert_fixture_data(stat, realm_data, RealmCount)
|
||||
stream_data: Mapping[int | str | None, list[int]] = {
|
||||
"false": self.generate_fixture_data(stat, 10, 7, 5, 0.6, 4),
|
||||
"true": self.generate_fixture_data(stat, 5, 3, 2, 0.4, 2),
|
||||
}
|
||||
insert_fixture_data(stat, stream_data, StreamCount)
|
||||
FillState.objects.create(
|
||||
property=stat.property, end_time=last_end_time, state=FillState.DONE
|
||||
)
|
||||
|
||||
stat = COUNT_STATS["messages_read::hour"]
|
||||
user_data = {
|
||||
None: self.generate_fixture_data(stat, 7, 3, 2, 0.6, 8, holiday_rate=0.1),
|
||||
}
|
||||
insert_fixture_data(stat, user_data, UserCount)
|
||||
realm_data = {None: self.generate_fixture_data(stat, 50, 35, 6, 0.6, 4)}
|
||||
insert_fixture_data(stat, realm_data, RealmCount)
|
||||
FillState.objects.create(
|
||||
property=stat.property, end_time=last_end_time, state=FillState.DONE
|
||||
)
|
||||
# TODO: messages_sent_to_stream:is_bot
|
||||
|
165
analytics/management/commands/realm_stats.py
Normal file
165
analytics/management/commands/realm_stats.py
Normal file
@@ -0,0 +1,165 @@
|
||||
from typing import Any, List
|
||||
|
||||
from argparse import ArgumentParser
|
||||
import datetime
|
||||
import pytz
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db.models import Count
|
||||
from django.utils.timezone import now as timezone_now
|
||||
|
||||
from zerver.models import UserProfile, Realm, Stream, Message, Recipient, UserActivity, \
|
||||
Subscription, UserMessage, get_realm
|
||||
|
||||
MOBILE_CLIENT_LIST = ["Android", "ios"]
|
||||
HUMAN_CLIENT_LIST = MOBILE_CLIENT_LIST + ["website"]
|
||||
|
||||
human_messages = Message.objects.filter(sending_client__name__in=HUMAN_CLIENT_LIST)
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Generate statistics on realm activity."
|
||||
|
||||
def add_arguments(self, parser):
|
||||
# type: (ArgumentParser) -> None
|
||||
parser.add_argument('realms', metavar='<realm>', type=str, nargs='*',
|
||||
help="realm to generate statistics for")
|
||||
|
||||
def active_users(self, realm):
|
||||
# type: (Realm) -> List[UserProfile]
|
||||
# Has been active (on the website, for now) in the last 7 days.
|
||||
activity_cutoff = timezone_now() - datetime.timedelta(days=7)
|
||||
return [activity.user_profile for activity in (
|
||||
UserActivity.objects.filter(user_profile__realm=realm,
|
||||
user_profile__is_active=True,
|
||||
last_visit__gt=activity_cutoff,
|
||||
query="/json/users/me/pointer",
|
||||
client__name="website"))]
|
||||
|
||||
def messages_sent_by(self, user, days_ago):
|
||||
# type: (UserProfile, int) -> int
|
||||
sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago)
|
||||
return human_messages.filter(sender=user, pub_date__gt=sent_time_cutoff).count()
|
||||
|
||||
def total_messages(self, realm, days_ago):
|
||||
# type: (Realm, int) -> int
|
||||
sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago)
|
||||
return Message.objects.filter(sender__realm=realm, pub_date__gt=sent_time_cutoff).count()
|
||||
|
||||
def human_messages(self, realm, days_ago):
|
||||
# type: (Realm, int) -> int
|
||||
sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago)
|
||||
return human_messages.filter(sender__realm=realm, pub_date__gt=sent_time_cutoff).count()
|
||||
|
||||
def api_messages(self, realm, days_ago):
|
||||
# type: (Realm, int) -> int
|
||||
return (self.total_messages(realm, days_ago) - self.human_messages(realm, days_ago))
|
||||
|
||||
def stream_messages(self, realm, days_ago):
|
||||
# type: (Realm, int) -> int
|
||||
sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago)
|
||||
return human_messages.filter(sender__realm=realm, pub_date__gt=sent_time_cutoff,
|
||||
recipient__type=Recipient.STREAM).count()
|
||||
|
||||
def private_messages(self, realm, days_ago):
|
||||
# type: (Realm, int) -> int
|
||||
sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago)
|
||||
return human_messages.filter(sender__realm=realm, pub_date__gt=sent_time_cutoff).exclude(
|
||||
recipient__type=Recipient.STREAM).exclude(recipient__type=Recipient.HUDDLE).count()
|
||||
|
||||
def group_private_messages(self, realm, days_ago):
|
||||
# type: (Realm, int) -> int
|
||||
sent_time_cutoff = timezone_now() - datetime.timedelta(days=days_ago)
|
||||
return human_messages.filter(sender__realm=realm, pub_date__gt=sent_time_cutoff).exclude(
|
||||
recipient__type=Recipient.STREAM).exclude(recipient__type=Recipient.PERSONAL).count()
|
||||
|
||||
def report_percentage(self, numerator, denominator, text):
|
||||
# type: (float, float, str) -> None
|
||||
if not denominator:
|
||||
fraction = 0.0
|
||||
else:
|
||||
fraction = numerator / float(denominator)
|
||||
print("%.2f%% of" % (fraction * 100,), text)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
# type: (*Any, **Any) -> None
|
||||
if options['realms']:
|
||||
try:
|
||||
realms = [get_realm(string_id) for string_id in options['realms']]
|
||||
except Realm.DoesNotExist as e:
|
||||
print(e)
|
||||
exit(1)
|
||||
else:
|
||||
realms = Realm.objects.all()
|
||||
|
||||
for realm in realms:
|
||||
print(realm.string_id)
|
||||
|
||||
user_profiles = UserProfile.objects.filter(realm=realm, is_active=True)
|
||||
active_users = self.active_users(realm)
|
||||
num_active = len(active_users)
|
||||
|
||||
print("%d active users (%d total)" % (num_active, len(user_profiles)))
|
||||
streams = Stream.objects.filter(realm=realm).extra(
|
||||
tables=['zerver_subscription', 'zerver_recipient'],
|
||||
where=['zerver_subscription.recipient_id = zerver_recipient.id',
|
||||
'zerver_recipient.type = 2',
|
||||
'zerver_recipient.type_id = zerver_stream.id',
|
||||
'zerver_subscription.active = true']).annotate(count=Count("name"))
|
||||
print("%d streams" % (streams.count(),))
|
||||
|
||||
for days_ago in (1, 7, 30):
|
||||
print("In last %d days, users sent:" % (days_ago,))
|
||||
sender_quantities = [self.messages_sent_by(user, days_ago) for user in user_profiles]
|
||||
for quantity in sorted(sender_quantities, reverse=True):
|
||||
print(quantity, end=' ')
|
||||
print("")
|
||||
|
||||
print("%d stream messages" % (self.stream_messages(realm, days_ago),))
|
||||
print("%d one-on-one private messages" % (self.private_messages(realm, days_ago),))
|
||||
print("%d messages sent via the API" % (self.api_messages(realm, days_ago),))
|
||||
print("%d group private messages" % (self.group_private_messages(realm, days_ago),))
|
||||
|
||||
num_notifications_enabled = len([x for x in active_users if x.enable_desktop_notifications])
|
||||
self.report_percentage(num_notifications_enabled, num_active,
|
||||
"active users have desktop notifications enabled")
|
||||
|
||||
num_enter_sends = len([x for x in active_users if x.enter_sends])
|
||||
self.report_percentage(num_enter_sends, num_active,
|
||||
"active users have enter-sends")
|
||||
|
||||
all_message_count = human_messages.filter(sender__realm=realm).count()
|
||||
multi_paragraph_message_count = human_messages.filter(
|
||||
sender__realm=realm, content__contains="\n\n").count()
|
||||
self.report_percentage(multi_paragraph_message_count, all_message_count,
|
||||
"all messages are multi-paragraph")
|
||||
|
||||
# Starred messages
|
||||
starrers = UserMessage.objects.filter(user_profile__in=user_profiles,
|
||||
flags=UserMessage.flags.starred).values(
|
||||
"user_profile").annotate(count=Count("user_profile"))
|
||||
print("%d users have starred %d messages" % (
|
||||
len(starrers), sum([elt["count"] for elt in starrers])))
|
||||
|
||||
active_user_subs = Subscription.objects.filter(
|
||||
user_profile__in=user_profiles, active=True)
|
||||
|
||||
# Streams not in home view
|
||||
non_home_view = active_user_subs.filter(in_home_view=False).values(
|
||||
"user_profile").annotate(count=Count("user_profile"))
|
||||
print("%d users have %d streams not in home view" % (
|
||||
len(non_home_view), sum([elt["count"] for elt in non_home_view])))
|
||||
|
||||
# Code block markup
|
||||
markup_messages = human_messages.filter(
|
||||
sender__realm=realm, content__contains="~~~").values(
|
||||
"sender").annotate(count=Count("sender"))
|
||||
print("%d users have used code block markup on %s messages" % (
|
||||
len(markup_messages), sum([elt["count"] for elt in markup_messages])))
|
||||
|
||||
# Notifications for stream messages
|
||||
notifications = active_user_subs.filter(notifications=True).values(
|
||||
"user_profile").annotate(count=Count("user_profile"))
|
||||
print("%d users receive desktop notifications for %d streams" % (
|
||||
len(notifications), sum([elt["count"] for elt in notifications])))
|
||||
|
||||
print("")
|
43
analytics/management/commands/stream_stats.py
Normal file
43
analytics/management/commands/stream_stats.py
Normal file
@@ -0,0 +1,43 @@
|
||||
from typing import Any
|
||||
|
||||
from argparse import ArgumentParser
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db.models import Q
|
||||
from zerver.models import Realm, Stream, Message, Subscription, Recipient, get_realm
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Generate statistics on the streams for a realm."
|
||||
|
||||
def add_arguments(self, parser):
|
||||
# type: (ArgumentParser) -> None
|
||||
parser.add_argument('realms', metavar='<realm>', type=str, nargs='*',
|
||||
help="realm to generate statistics for")
|
||||
|
||||
def handle(self, *args, **options):
|
||||
# type: (*Any, **str) -> None
|
||||
if options['realms']:
|
||||
try:
|
||||
realms = [get_realm(string_id) for string_id in options['realms']]
|
||||
except Realm.DoesNotExist as e:
|
||||
print(e)
|
||||
exit(1)
|
||||
else:
|
||||
realms = Realm.objects.all()
|
||||
|
||||
for realm in realms:
|
||||
print(realm.string_id)
|
||||
print("------------")
|
||||
print("%25s %15s %10s" % ("stream", "subscribers", "messages"))
|
||||
streams = Stream.objects.filter(realm=realm).exclude(Q(name__istartswith="tutorial-"))
|
||||
invite_only_count = 0
|
||||
for stream in streams:
|
||||
if stream.invite_only:
|
||||
invite_only_count += 1
|
||||
continue
|
||||
print("%25s" % (stream.name,), end=' ')
|
||||
recipient = Recipient.objects.filter(type=Recipient.STREAM, type_id=stream.id)
|
||||
print("%10d" % (len(Subscription.objects.filter(recipient=recipient, active=True)),), end=' ')
|
||||
num_messages = len(Message.objects.filter(recipient=recipient))
|
||||
print("%12d" % (num_messages,))
|
||||
print("%d invite-only streams" % (invite_only_count,))
|
||||
print("")
|
@@ -1,99 +1,93 @@
|
||||
import hashlib
|
||||
import time
|
||||
import os
|
||||
import sys
|
||||
from scripts.lib.zulip_tools import ENDC, WARNING
|
||||
|
||||
from argparse import ArgumentParser
|
||||
from datetime import timezone
|
||||
from typing import Any
|
||||
from datetime import timedelta
|
||||
import time
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils.dateparse import parse_datetime
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.utils.timezone import now as timezone_now
|
||||
from typing_extensions import override
|
||||
from django.utils.timezone import utc as timezone_utc
|
||||
from django.utils.dateparse import parse_datetime
|
||||
from django.conf import settings
|
||||
|
||||
from analytics.lib.counts import ALL_COUNT_STATS, logger, process_count_stat
|
||||
from zerver.lib.management import ZulipBaseCommand, abort_cron_during_deploy, abort_unless_locked
|
||||
from zerver.lib.remote_server import send_server_data_to_push_bouncer, should_send_analytics_data
|
||||
from analytics.models import RealmCount, UserCount
|
||||
from analytics.lib.counts import COUNT_STATS, logger, process_count_stat
|
||||
from zerver.lib.timestamp import floor_to_hour
|
||||
from zerver.models import Realm
|
||||
from zerver.models import UserProfile, Message, Realm
|
||||
|
||||
from typing import Any, Dict
|
||||
|
||||
class Command(ZulipBaseCommand):
|
||||
class Command(BaseCommand):
|
||||
help = """Fills Analytics tables.
|
||||
|
||||
Run as a cron job that runs every hour."""
|
||||
|
||||
@override
|
||||
def add_arguments(self, parser: ArgumentParser) -> None:
|
||||
parser.add_argument(
|
||||
"--time",
|
||||
"-t",
|
||||
help="Update stat tables from current state to --time. Defaults to the current time.",
|
||||
default=timezone_now().isoformat(),
|
||||
)
|
||||
parser.add_argument("--utc", action="store_true", help="Interpret --time in UTC.")
|
||||
parser.add_argument(
|
||||
"--stat", "-s", help="CountStat to process. If omitted, all stats are processed."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--verbose", action="store_true", help="Print timing information to stdout."
|
||||
)
|
||||
def add_arguments(self, parser):
|
||||
# type: (ArgumentParser) -> None
|
||||
parser.add_argument('--time', '-t',
|
||||
type=str,
|
||||
help='Update stat tables from current state to --time. Defaults to the current time.',
|
||||
default=timezone_now().isoformat())
|
||||
parser.add_argument('--utc',
|
||||
action='store_true',
|
||||
help="Interpret --time in UTC.",
|
||||
default=False)
|
||||
parser.add_argument('--stat', '-s',
|
||||
type=str,
|
||||
help="CountStat to process. If omitted, all stats are processed.")
|
||||
parser.add_argument('--verbose',
|
||||
action='store_true',
|
||||
help="Print timing information to stdout.",
|
||||
default=False)
|
||||
|
||||
@override
|
||||
@abort_cron_during_deploy
|
||||
@abort_unless_locked
|
||||
def handle(self, *args: Any, **options: Any) -> None:
|
||||
self.run_update_analytics_counts(options)
|
||||
def handle(self, *args, **options):
|
||||
# type: (*Any, **Any) -> None
|
||||
try:
|
||||
os.mkdir(settings.ANALYTICS_LOCK_DIR)
|
||||
except OSError:
|
||||
print(WARNING + "Analytics lock %s is unavailable; exiting... " + ENDC)
|
||||
return
|
||||
|
||||
def run_update_analytics_counts(self, options: dict[str, Any]) -> None:
|
||||
try:
|
||||
self.run_update_analytics_counts(options)
|
||||
finally:
|
||||
os.rmdir(settings.ANALYTICS_LOCK_DIR)
|
||||
|
||||
def run_update_analytics_counts(self, options):
|
||||
# type: (Dict[str, Any]) -> None
|
||||
# installation_epoch relies on there being at least one realm; we
|
||||
# shouldn't run the analytics code if that condition isn't satisfied
|
||||
if not Realm.objects.exists():
|
||||
logger.info("No realms, stopping update_analytics_counts")
|
||||
return
|
||||
|
||||
fill_to_time = parse_datetime(options["time"])
|
||||
assert fill_to_time is not None
|
||||
if options["utc"]:
|
||||
fill_to_time = fill_to_time.replace(tzinfo=timezone.utc)
|
||||
fill_to_time = parse_datetime(options['time'])
|
||||
if options['utc']:
|
||||
fill_to_time = fill_to_time.replace(tzinfo=timezone_utc)
|
||||
if fill_to_time.tzinfo is None:
|
||||
raise ValueError(
|
||||
"--time must be time-zone-aware. Maybe you meant to use the --utc option?"
|
||||
)
|
||||
raise ValueError("--time must be timezone aware. Maybe you meant to use the --utc option?")
|
||||
|
||||
fill_to_time = floor_to_hour(fill_to_time.astimezone(timezone.utc))
|
||||
fill_to_time = floor_to_hour(fill_to_time.astimezone(timezone_utc))
|
||||
|
||||
if options["stat"] is not None:
|
||||
stats = [ALL_COUNT_STATS[options["stat"]]]
|
||||
if options['stat'] is not None:
|
||||
stats = [COUNT_STATS[options['stat']]]
|
||||
else:
|
||||
stats = list(ALL_COUNT_STATS.values())
|
||||
stats = list(COUNT_STATS.values())
|
||||
|
||||
logger.info("Starting updating analytics counts through %s", fill_to_time)
|
||||
if options["verbose"]:
|
||||
logger.info("Starting updating analytics counts through %s" % (fill_to_time,))
|
||||
if options['verbose']:
|
||||
start = time.time()
|
||||
last = start
|
||||
|
||||
for stat in stats:
|
||||
process_count_stat(stat, fill_to_time)
|
||||
if options["verbose"]:
|
||||
print(f"Updated {stat.property} in {time.time() - last:.3f}s")
|
||||
if options['verbose']:
|
||||
print("Updated %s in %.3fs" % (stat.property, time.time() - last))
|
||||
last = time.time()
|
||||
|
||||
if options["verbose"]:
|
||||
print(
|
||||
f"Finished updating analytics counts through {fill_to_time} in {time.time() - start:.3f}s"
|
||||
)
|
||||
logger.info("Finished updating analytics counts through %s", fill_to_time)
|
||||
|
||||
if should_send_analytics_data():
|
||||
# Based on the specific value of the setting, the exact details to send
|
||||
# will be decided. However, we proceed just based on this not being falsey.
|
||||
|
||||
# Skew 0-10 minutes based on a hash of settings.ZULIP_ORG_ID, so
|
||||
# that each server will report in at a somewhat consistent time.
|
||||
assert settings.ZULIP_ORG_ID
|
||||
delay = int.from_bytes(
|
||||
hashlib.sha256(settings.ZULIP_ORG_ID.encode()).digest(), byteorder="big"
|
||||
) % (60 * 10)
|
||||
logger.info("Sleeping %d seconds before reporting...", delay)
|
||||
time.sleep(delay)
|
||||
|
||||
send_server_data_to_push_bouncer(consider_usage_statistics=True, raise_on_error=True)
|
||||
if options['verbose']:
|
||||
print("Finished updating analytics counts through %s in %.3fs" %
|
||||
(fill_to_time, time.time() - start))
|
||||
logger.info("Finished updating analytics counts through %s" % (fill_to_time,))
|
||||
|
46
analytics/management/commands/user_stats.py
Normal file
46
analytics/management/commands/user_stats.py
Normal file
@@ -0,0 +1,46 @@
|
||||
from argparse import ArgumentParser
|
||||
import datetime
|
||||
import pytz
|
||||
from typing import Any
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.utils.timezone import now as timezone_now
|
||||
|
||||
from zerver.models import UserProfile, Realm, Stream, Message, get_realm
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Generate statistics on user activity."
|
||||
|
||||
def add_arguments(self, parser):
|
||||
# type: (ArgumentParser) -> None
|
||||
parser.add_argument('realms', metavar='<realm>', type=str, nargs='*',
|
||||
help="realm to generate statistics for")
|
||||
|
||||
def messages_sent_by(self, user, week):
|
||||
# type: (UserProfile, int) -> int
|
||||
start = timezone_now() - datetime.timedelta(days=(week + 1)*7)
|
||||
end = timezone_now() - datetime.timedelta(days=week*7)
|
||||
return Message.objects.filter(sender=user, pub_date__gt=start, pub_date__lte=end).count()
|
||||
|
||||
def handle(self, *args, **options):
|
||||
# type: (*Any, **Any) -> None
|
||||
if options['realms']:
|
||||
try:
|
||||
realms = [get_realm(string_id) for string_id in options['realms']]
|
||||
except Realm.DoesNotExist as e:
|
||||
print(e)
|
||||
exit(1)
|
||||
else:
|
||||
realms = Realm.objects.all()
|
||||
|
||||
for realm in realms:
|
||||
print(realm.string_id)
|
||||
user_profiles = UserProfile.objects.filter(realm=realm, is_active=True)
|
||||
print("%d users" % (len(user_profiles),))
|
||||
print("%d streams" % (len(Stream.objects.filter(realm=realm)),))
|
||||
|
||||
for user_profile in user_profiles:
|
||||
print("%35s" % (user_profile.email,), end=' ')
|
||||
for week in range(10):
|
||||
print("%5d" % (self.messages_sent_by(user_profile, week)), end=' ')
|
||||
print("")
|
@@ -1,208 +1,112 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from django.db import models, migrations
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import zerver.lib.str_utils
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("zerver", "0030_realm_org_type"),
|
||||
('zerver', '0030_realm_org_type'),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="Anomaly",
|
||||
name='Anomaly',
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
|
||||
),
|
||||
),
|
||||
("info", models.CharField(max_length=1000)),
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('info', models.CharField(max_length=1000)),
|
||||
],
|
||||
bases=(models.Model,),
|
||||
bases=(zerver.lib.str_utils.ModelReprMixin, models.Model),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="HuddleCount",
|
||||
name='HuddleCount',
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"huddle",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="zerver.Recipient"
|
||||
),
|
||||
),
|
||||
(
|
||||
"user",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
|
||||
),
|
||||
),
|
||||
("property", models.CharField(max_length=40)),
|
||||
("end_time", models.DateTimeField()),
|
||||
("interval", models.CharField(max_length=20)),
|
||||
("value", models.BigIntegerField()),
|
||||
(
|
||||
"anomaly",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="analytics.Anomaly",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('huddle', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Recipient')),
|
||||
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
|
||||
('property', models.CharField(max_length=40)),
|
||||
('end_time', models.DateTimeField()),
|
||||
('interval', models.CharField(max_length=20)),
|
||||
('value', models.BigIntegerField()),
|
||||
('anomaly', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='analytics.Anomaly', null=True)),
|
||||
],
|
||||
bases=(models.Model,),
|
||||
bases=(zerver.lib.str_utils.ModelReprMixin, models.Model),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="InstallationCount",
|
||||
name='InstallationCount',
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
|
||||
),
|
||||
),
|
||||
("property", models.CharField(max_length=40)),
|
||||
("end_time", models.DateTimeField()),
|
||||
("interval", models.CharField(max_length=20)),
|
||||
("value", models.BigIntegerField()),
|
||||
(
|
||||
"anomaly",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="analytics.Anomaly",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('property', models.CharField(max_length=40)),
|
||||
('end_time', models.DateTimeField()),
|
||||
('interval', models.CharField(max_length=20)),
|
||||
('value', models.BigIntegerField()),
|
||||
('anomaly', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='analytics.Anomaly', null=True)),
|
||||
],
|
||||
bases=(models.Model,),
|
||||
bases=(zerver.lib.str_utils.ModelReprMixin, models.Model),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="RealmCount",
|
||||
name='RealmCount',
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"realm",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"
|
||||
),
|
||||
),
|
||||
("property", models.CharField(max_length=40)),
|
||||
("end_time", models.DateTimeField()),
|
||||
("interval", models.CharField(max_length=20)),
|
||||
("value", models.BigIntegerField()),
|
||||
(
|
||||
"anomaly",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="analytics.Anomaly",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('realm', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Realm')),
|
||||
('property', models.CharField(max_length=40)),
|
||||
('end_time', models.DateTimeField()),
|
||||
('interval', models.CharField(max_length=20)),
|
||||
('value', models.BigIntegerField()),
|
||||
('anomaly', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='analytics.Anomaly', null=True)),
|
||||
|
||||
],
|
||||
bases=(models.Model,),
|
||||
bases=(zerver.lib.str_utils.ModelReprMixin, models.Model),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="StreamCount",
|
||||
name='StreamCount',
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"realm",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"
|
||||
),
|
||||
),
|
||||
(
|
||||
"stream",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="zerver.Stream"
|
||||
),
|
||||
),
|
||||
("property", models.CharField(max_length=40)),
|
||||
("end_time", models.DateTimeField()),
|
||||
("interval", models.CharField(max_length=20)),
|
||||
("value", models.BigIntegerField()),
|
||||
(
|
||||
"anomaly",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="analytics.Anomaly",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('realm', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Realm')),
|
||||
('stream', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Stream')),
|
||||
('property', models.CharField(max_length=40)),
|
||||
('end_time', models.DateTimeField()),
|
||||
('interval', models.CharField(max_length=20)),
|
||||
('value', models.BigIntegerField()),
|
||||
('anomaly', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='analytics.Anomaly', null=True)),
|
||||
],
|
||||
bases=(models.Model,),
|
||||
bases=(zerver.lib.str_utils.ModelReprMixin, models.Model),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="UserCount",
|
||||
name='UserCount',
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"realm",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"
|
||||
),
|
||||
),
|
||||
(
|
||||
"user",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
|
||||
),
|
||||
),
|
||||
("property", models.CharField(max_length=40)),
|
||||
("end_time", models.DateTimeField()),
|
||||
("interval", models.CharField(max_length=20)),
|
||||
("value", models.BigIntegerField()),
|
||||
(
|
||||
"anomaly",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="analytics.Anomaly",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('realm', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='zerver.Realm')),
|
||||
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
|
||||
('property', models.CharField(max_length=40)),
|
||||
('end_time', models.DateTimeField()),
|
||||
('interval', models.CharField(max_length=20)),
|
||||
('value', models.BigIntegerField()),
|
||||
('anomaly', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='analytics.Anomaly', null=True)),
|
||||
],
|
||||
bases=(models.Model,),
|
||||
bases=(zerver.lib.str_utils.ModelReprMixin, models.Model),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="usercount",
|
||||
unique_together={("user", "property", "end_time", "interval")},
|
||||
name='usercount',
|
||||
unique_together=set([('user', 'property', 'end_time', 'interval')]),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="streamcount",
|
||||
unique_together={("stream", "property", "end_time", "interval")},
|
||||
name='streamcount',
|
||||
unique_together=set([('stream', 'property', 'end_time', 'interval')]),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="realmcount",
|
||||
unique_together={("realm", "property", "end_time", "interval")},
|
||||
name='realmcount',
|
||||
unique_together=set([('realm', 'property', 'end_time', 'interval')]),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="installationcount",
|
||||
unique_together={("property", "end_time", "interval")},
|
||||
name='installationcount',
|
||||
unique_together=set([('property', 'end_time', 'interval')]),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="huddlecount",
|
||||
unique_together={("huddle", "property", "end_time", "interval")},
|
||||
name='huddlecount',
|
||||
unique_together=set([('huddle', 'property', 'end_time', 'interval')]),
|
||||
),
|
||||
]
|
||||
|
@@ -1,224 +0,0 @@
|
||||
# Generated by Django 5.0.7 on 2024-08-13 20:16
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
replaces = [
|
||||
("analytics", "0001_initial"),
|
||||
("analytics", "0002_remove_huddlecount"),
|
||||
("analytics", "0003_fillstate"),
|
||||
("analytics", "0004_add_subgroup"),
|
||||
("analytics", "0005_alter_field_size"),
|
||||
("analytics", "0006_add_subgroup_to_unique_constraints"),
|
||||
("analytics", "0007_remove_interval"),
|
||||
("analytics", "0008_add_count_indexes"),
|
||||
("analytics", "0009_remove_messages_to_stream_stat"),
|
||||
("analytics", "0010_clear_messages_sent_values"),
|
||||
("analytics", "0011_clear_analytics_tables"),
|
||||
("analytics", "0012_add_on_delete"),
|
||||
("analytics", "0013_remove_anomaly"),
|
||||
("analytics", "0014_remove_fillstate_last_modified"),
|
||||
("analytics", "0015_clear_duplicate_counts"),
|
||||
("analytics", "0016_unique_constraint_when_subgroup_null"),
|
||||
("analytics", "0017_regenerate_partial_indexes"),
|
||||
("analytics", "0018_remove_usercount_active_users_audit"),
|
||||
("analytics", "0019_remove_unused_counts"),
|
||||
("analytics", "0020_alter_installationcount_id_alter_realmcount_id_and_more"),
|
||||
("analytics", "0021_alter_fillstate_id"),
|
||||
]
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
# Needed for foreign keys to core models like Realm.
|
||||
("zerver", "0001_initial"),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="InstallationCount",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
("property", models.CharField(max_length=32)),
|
||||
("end_time", models.DateTimeField()),
|
||||
("value", models.BigIntegerField()),
|
||||
("subgroup", models.CharField(max_length=16, null=True)),
|
||||
],
|
||||
options={
|
||||
"unique_together": set(),
|
||||
"constraints": [
|
||||
models.UniqueConstraint(
|
||||
condition=models.Q(("subgroup__isnull", False)),
|
||||
fields=("property", "subgroup", "end_time"),
|
||||
name="unique_installation_count",
|
||||
),
|
||||
models.UniqueConstraint(
|
||||
condition=models.Q(("subgroup__isnull", True)),
|
||||
fields=("property", "end_time"),
|
||||
name="unique_installation_count_null_subgroup",
|
||||
),
|
||||
],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="RealmCount",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
(
|
||||
"realm",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="zerver.realm"
|
||||
),
|
||||
),
|
||||
("property", models.CharField(max_length=32)),
|
||||
("end_time", models.DateTimeField()),
|
||||
("value", models.BigIntegerField()),
|
||||
("subgroup", models.CharField(max_length=16, null=True)),
|
||||
],
|
||||
options={
|
||||
"indexes": [
|
||||
models.Index(
|
||||
fields=["property", "end_time"],
|
||||
name="analytics_realmcount_property_end_time_3b60396b_idx",
|
||||
)
|
||||
],
|
||||
"unique_together": set(),
|
||||
"constraints": [
|
||||
models.UniqueConstraint(
|
||||
condition=models.Q(("subgroup__isnull", False)),
|
||||
fields=("realm", "property", "subgroup", "end_time"),
|
||||
name="unique_realm_count",
|
||||
),
|
||||
models.UniqueConstraint(
|
||||
condition=models.Q(("subgroup__isnull", True)),
|
||||
fields=("realm", "property", "end_time"),
|
||||
name="unique_realm_count_null_subgroup",
|
||||
),
|
||||
],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="StreamCount",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
(
|
||||
"realm",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="zerver.realm"
|
||||
),
|
||||
),
|
||||
(
|
||||
"stream",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="zerver.stream"
|
||||
),
|
||||
),
|
||||
("property", models.CharField(max_length=32)),
|
||||
("end_time", models.DateTimeField()),
|
||||
("value", models.BigIntegerField()),
|
||||
("subgroup", models.CharField(max_length=16, null=True)),
|
||||
],
|
||||
options={
|
||||
"indexes": [
|
||||
models.Index(
|
||||
fields=["property", "realm", "end_time"],
|
||||
name="analytics_streamcount_property_realm_id_end_time_155ae930_idx",
|
||||
)
|
||||
],
|
||||
"unique_together": set(),
|
||||
"constraints": [
|
||||
models.UniqueConstraint(
|
||||
condition=models.Q(("subgroup__isnull", False)),
|
||||
fields=("stream", "property", "subgroup", "end_time"),
|
||||
name="unique_stream_count",
|
||||
),
|
||||
models.UniqueConstraint(
|
||||
condition=models.Q(("subgroup__isnull", True)),
|
||||
fields=("stream", "property", "end_time"),
|
||||
name="unique_stream_count_null_subgroup",
|
||||
),
|
||||
],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="UserCount",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
(
|
||||
"realm",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="zerver.realm"
|
||||
),
|
||||
),
|
||||
(
|
||||
"user",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
|
||||
),
|
||||
),
|
||||
("property", models.CharField(max_length=32)),
|
||||
("end_time", models.DateTimeField()),
|
||||
("value", models.BigIntegerField()),
|
||||
("subgroup", models.CharField(max_length=16, null=True)),
|
||||
],
|
||||
options={
|
||||
"indexes": [
|
||||
models.Index(
|
||||
fields=["property", "realm", "end_time"],
|
||||
name="analytics_usercount_property_realm_id_end_time_591dbec1_idx",
|
||||
)
|
||||
],
|
||||
"unique_together": set(),
|
||||
"constraints": [
|
||||
models.UniqueConstraint(
|
||||
condition=models.Q(("subgroup__isnull", False)),
|
||||
fields=("user", "property", "subgroup", "end_time"),
|
||||
name="unique_user_count",
|
||||
),
|
||||
models.UniqueConstraint(
|
||||
condition=models.Q(("subgroup__isnull", True)),
|
||||
fields=("user", "property", "end_time"),
|
||||
name="unique_user_count_null_subgroup",
|
||||
),
|
||||
],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="FillState",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
("property", models.CharField(max_length=40, unique=True)),
|
||||
("end_time", models.DateTimeField()),
|
||||
("state", models.PositiveSmallIntegerField()),
|
||||
],
|
||||
),
|
||||
]
|
@@ -1,29 +1,31 @@
|
||||
from django.db import migrations
|
||||
# -*- coding: utf-8 -*-
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("analytics", "0001_initial"),
|
||||
('analytics', '0001_initial'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterUniqueTogether(
|
||||
name="huddlecount",
|
||||
unique_together=set(),
|
||||
name='huddlecount',
|
||||
unique_together=set([]),
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="huddlecount",
|
||||
name="anomaly",
|
||||
model_name='huddlecount',
|
||||
name='anomaly',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="huddlecount",
|
||||
name="huddle",
|
||||
model_name='huddlecount',
|
||||
name='huddle',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="huddlecount",
|
||||
name="user",
|
||||
model_name='huddlecount',
|
||||
name='user',
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name="HuddleCount",
|
||||
name='HuddleCount',
|
||||
),
|
||||
]
|
||||
|
@@ -1,26 +1,24 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from django.db import migrations, models
|
||||
import zerver.lib.str_utils
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("analytics", "0002_remove_huddlecount"),
|
||||
('analytics', '0002_remove_huddlecount'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="FillState",
|
||||
name='FillState',
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
|
||||
),
|
||||
),
|
||||
("property", models.CharField(unique=True, max_length=40)),
|
||||
("end_time", models.DateTimeField()),
|
||||
("state", models.PositiveSmallIntegerField()),
|
||||
("last_modified", models.DateTimeField(auto_now=True)),
|
||||
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||
('property', models.CharField(unique=True, max_length=40)),
|
||||
('end_time', models.DateTimeField()),
|
||||
('state', models.PositiveSmallIntegerField()),
|
||||
('last_modified', models.DateTimeField(auto_now=True)),
|
||||
],
|
||||
bases=(models.Model,),
|
||||
bases=(zerver.lib.str_utils.ModelReprMixin, models.Model),
|
||||
),
|
||||
]
|
||||
|
@@ -1,30 +1,32 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("analytics", "0003_fillstate"),
|
||||
('analytics', '0003_fillstate'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="installationcount",
|
||||
name="subgroup",
|
||||
model_name='installationcount',
|
||||
name='subgroup',
|
||||
field=models.CharField(max_length=16, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="realmcount",
|
||||
name="subgroup",
|
||||
model_name='realmcount',
|
||||
name='subgroup',
|
||||
field=models.CharField(max_length=16, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="streamcount",
|
||||
name="subgroup",
|
||||
model_name='streamcount',
|
||||
name='subgroup',
|
||||
field=models.CharField(max_length=16, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="usercount",
|
||||
name="subgroup",
|
||||
model_name='usercount',
|
||||
name='subgroup',
|
||||
field=models.CharField(max_length=16, null=True),
|
||||
),
|
||||
]
|
||||
|
@@ -1,50 +1,52 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("analytics", "0004_add_subgroup"),
|
||||
('analytics', '0004_add_subgroup'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="installationcount",
|
||||
name="interval",
|
||||
model_name='installationcount',
|
||||
name='interval',
|
||||
field=models.CharField(max_length=8),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="installationcount",
|
||||
name="property",
|
||||
model_name='installationcount',
|
||||
name='property',
|
||||
field=models.CharField(max_length=32),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="realmcount",
|
||||
name="interval",
|
||||
model_name='realmcount',
|
||||
name='interval',
|
||||
field=models.CharField(max_length=8),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="realmcount",
|
||||
name="property",
|
||||
model_name='realmcount',
|
||||
name='property',
|
||||
field=models.CharField(max_length=32),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="streamcount",
|
||||
name="interval",
|
||||
model_name='streamcount',
|
||||
name='interval',
|
||||
field=models.CharField(max_length=8),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="streamcount",
|
||||
name="property",
|
||||
model_name='streamcount',
|
||||
name='property',
|
||||
field=models.CharField(max_length=32),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="usercount",
|
||||
name="interval",
|
||||
model_name='usercount',
|
||||
name='interval',
|
||||
field=models.CharField(max_length=8),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="usercount",
|
||||
name="property",
|
||||
model_name='usercount',
|
||||
name='property',
|
||||
field=models.CharField(max_length=32),
|
||||
),
|
||||
]
|
||||
|
@@ -1,26 +1,28 @@
|
||||
from django.db import migrations
|
||||
# -*- coding: utf-8 -*-
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("analytics", "0005_alter_field_size"),
|
||||
('analytics', '0005_alter_field_size'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterUniqueTogether(
|
||||
name="installationcount",
|
||||
unique_together={("property", "subgroup", "end_time", "interval")},
|
||||
name='installationcount',
|
||||
unique_together=set([('property', 'subgroup', 'end_time', 'interval')]),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="realmcount",
|
||||
unique_together={("realm", "property", "subgroup", "end_time", "interval")},
|
||||
name='realmcount',
|
||||
unique_together=set([('realm', 'property', 'subgroup', 'end_time', 'interval')]),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="streamcount",
|
||||
unique_together={("stream", "property", "subgroup", "end_time", "interval")},
|
||||
name='streamcount',
|
||||
unique_together=set([('stream', 'property', 'subgroup', 'end_time', 'interval')]),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="usercount",
|
||||
unique_together={("user", "property", "subgroup", "end_time", "interval")},
|
||||
name='usercount',
|
||||
unique_together=set([('user', 'property', 'subgroup', 'end_time', 'interval')]),
|
||||
),
|
||||
]
|
||||
|
@@ -1,43 +1,46 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.10.4 on 2017-01-16 20:50
|
||||
from django.conf import settings
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("analytics", "0006_add_subgroup_to_unique_constraints"),
|
||||
('analytics', '0006_add_subgroup_to_unique_constraints'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterUniqueTogether(
|
||||
name="installationcount",
|
||||
unique_together={("property", "subgroup", "end_time")},
|
||||
name='installationcount',
|
||||
unique_together=set([('property', 'subgroup', 'end_time')]),
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="installationcount",
|
||||
name="interval",
|
||||
model_name='installationcount',
|
||||
name='interval',
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="realmcount",
|
||||
unique_together={("realm", "property", "subgroup", "end_time")},
|
||||
name='realmcount',
|
||||
unique_together=set([('realm', 'property', 'subgroup', 'end_time')]),
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="realmcount",
|
||||
name="interval",
|
||||
model_name='realmcount',
|
||||
name='interval',
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="streamcount",
|
||||
unique_together={("stream", "property", "subgroup", "end_time")},
|
||||
name='streamcount',
|
||||
unique_together=set([('stream', 'property', 'subgroup', 'end_time')]),
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="streamcount",
|
||||
name="interval",
|
||||
model_name='streamcount',
|
||||
name='interval',
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="usercount",
|
||||
unique_together={("user", "property", "subgroup", "end_time")},
|
||||
name='usercount',
|
||||
unique_together=set([('user', 'property', 'subgroup', 'end_time')]),
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="usercount",
|
||||
name="interval",
|
||||
model_name='usercount',
|
||||
name='interval',
|
||||
),
|
||||
]
|
||||
|
@@ -1,33 +1,26 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.10.5 on 2017-02-01 22:28
|
||||
from django.db import migrations, models
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("zerver", "0050_userprofile_avatar_version"),
|
||||
("analytics", "0007_remove_interval"),
|
||||
('zerver', '0050_userprofile_avatar_version'),
|
||||
('analytics', '0007_remove_interval'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="realmcount",
|
||||
index=models.Index(
|
||||
fields=["property", "end_time"],
|
||||
name="analytics_realmcount_property_end_time_3b60396b_idx",
|
||||
),
|
||||
migrations.AlterIndexTogether(
|
||||
name='realmcount',
|
||||
index_together=set([('property', 'end_time')]),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="streamcount",
|
||||
index=models.Index(
|
||||
fields=["property", "realm", "end_time"],
|
||||
name="analytics_streamcount_property_realm_id_end_time_155ae930_idx",
|
||||
),
|
||||
migrations.AlterIndexTogether(
|
||||
name='streamcount',
|
||||
index_together=set([('property', 'realm', 'end_time')]),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="usercount",
|
||||
index=models.Index(
|
||||
fields=["property", "realm", "end_time"],
|
||||
name="analytics_usercount_property_realm_id_end_time_591dbec1_idx",
|
||||
),
|
||||
migrations.AlterIndexTogether(
|
||||
name='usercount',
|
||||
index_together=set([('property', 'realm', 'end_time')]),
|
||||
),
|
||||
]
|
||||
|
@@ -1,30 +1,30 @@
|
||||
from django.db import migrations
|
||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||
# -*- coding: utf-8 -*-
|
||||
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
|
||||
from django.db.migrations.state import StateApps
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
def delete_messages_sent_to_stream_stat(
|
||||
apps: StateApps, schema_editor: BaseDatabaseSchemaEditor
|
||||
) -> None:
|
||||
UserCount = apps.get_model("analytics", "UserCount")
|
||||
StreamCount = apps.get_model("analytics", "StreamCount")
|
||||
RealmCount = apps.get_model("analytics", "RealmCount")
|
||||
InstallationCount = apps.get_model("analytics", "InstallationCount")
|
||||
FillState = apps.get_model("analytics", "FillState")
|
||||
def delete_messages_sent_to_stream_stat(apps, schema_editor):
|
||||
# type: (StateApps, DatabaseSchemaEditor) -> None
|
||||
UserCount = apps.get_model('analytics', 'UserCount')
|
||||
StreamCount = apps.get_model('analytics', 'StreamCount')
|
||||
RealmCount = apps.get_model('analytics', 'RealmCount')
|
||||
InstallationCount = apps.get_model('analytics', 'InstallationCount')
|
||||
FillState = apps.get_model('analytics', 'FillState')
|
||||
|
||||
property = "messages_sent_to_stream:is_bot"
|
||||
property = 'messages_sent_to_stream:is_bot'
|
||||
UserCount.objects.filter(property=property).delete()
|
||||
StreamCount.objects.filter(property=property).delete()
|
||||
RealmCount.objects.filter(property=property).delete()
|
||||
InstallationCount.objects.filter(property=property).delete()
|
||||
FillState.objects.filter(property=property).delete()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("analytics", "0008_add_count_indexes"),
|
||||
('analytics', '0008_add_count_indexes'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(delete_messages_sent_to_stream_stat, elidable=True),
|
||||
migrations.RunPython(delete_messages_sent_to_stream_stat),
|
||||
]
|
||||
|
@@ -1,28 +1,27 @@
|
||||
from django.db import migrations
|
||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||
# -*- coding: utf-8 -*-
|
||||
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
|
||||
from django.db.migrations.state import StateApps
|
||||
from django.db import migrations
|
||||
|
||||
def clear_message_sent_by_message_type_values(apps, schema_editor):
|
||||
# type: (StateApps, DatabaseSchemaEditor) -> None
|
||||
UserCount = apps.get_model('analytics', 'UserCount')
|
||||
StreamCount = apps.get_model('analytics', 'StreamCount')
|
||||
RealmCount = apps.get_model('analytics', 'RealmCount')
|
||||
InstallationCount = apps.get_model('analytics', 'InstallationCount')
|
||||
FillState = apps.get_model('analytics', 'FillState')
|
||||
|
||||
def clear_message_sent_by_message_type_values(
|
||||
apps: StateApps, schema_editor: BaseDatabaseSchemaEditor
|
||||
) -> None:
|
||||
UserCount = apps.get_model("analytics", "UserCount")
|
||||
StreamCount = apps.get_model("analytics", "StreamCount")
|
||||
RealmCount = apps.get_model("analytics", "RealmCount")
|
||||
InstallationCount = apps.get_model("analytics", "InstallationCount")
|
||||
FillState = apps.get_model("analytics", "FillState")
|
||||
|
||||
property = "messages_sent:message_type:day"
|
||||
property = 'messages_sent:message_type:day'
|
||||
UserCount.objects.filter(property=property).delete()
|
||||
StreamCount.objects.filter(property=property).delete()
|
||||
RealmCount.objects.filter(property=property).delete()
|
||||
InstallationCount.objects.filter(property=property).delete()
|
||||
FillState.objects.filter(property=property).delete()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [("analytics", "0009_remove_messages_to_stream_stat")]
|
||||
|
||||
dependencies = [('analytics', '0009_remove_messages_to_stream_stat')]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(clear_message_sent_by_message_type_values, elidable=True),
|
||||
migrations.RunPython(clear_message_sent_by_message_type_values),
|
||||
]
|
||||
|
@@ -1,14 +1,16 @@
|
||||
from django.db import migrations
|
||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||
# -*- coding: utf-8 -*-
|
||||
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
|
||||
from django.db.migrations.state import StateApps
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
def clear_analytics_tables(apps: StateApps, schema_editor: BaseDatabaseSchemaEditor) -> None:
|
||||
UserCount = apps.get_model("analytics", "UserCount")
|
||||
StreamCount = apps.get_model("analytics", "StreamCount")
|
||||
RealmCount = apps.get_model("analytics", "RealmCount")
|
||||
InstallationCount = apps.get_model("analytics", "InstallationCount")
|
||||
FillState = apps.get_model("analytics", "FillState")
|
||||
def clear_analytics_tables(apps, schema_editor):
|
||||
# type: (StateApps, DatabaseSchemaEditor) -> None
|
||||
UserCount = apps.get_model('analytics', 'UserCount')
|
||||
StreamCount = apps.get_model('analytics', 'StreamCount')
|
||||
RealmCount = apps.get_model('analytics', 'RealmCount')
|
||||
InstallationCount = apps.get_model('analytics', 'InstallationCount')
|
||||
FillState = apps.get_model('analytics', 'FillState')
|
||||
|
||||
UserCount.objects.all().delete()
|
||||
StreamCount.objects.all().delete()
|
||||
@@ -16,12 +18,12 @@ def clear_analytics_tables(apps: StateApps, schema_editor: BaseDatabaseSchemaEdi
|
||||
InstallationCount.objects.all().delete()
|
||||
FillState.objects.all().delete()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("analytics", "0010_clear_messages_sent_values"),
|
||||
('analytics', '0010_clear_messages_sent_values'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(clear_analytics_tables, elidable=True),
|
||||
migrations.RunPython(clear_analytics_tables),
|
||||
]
|
||||
|
@@ -1,41 +0,0 @@
|
||||
# Generated by Django 1.11.6 on 2018-01-29 08:14
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("analytics", "0011_clear_analytics_tables"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="installationcount",
|
||||
name="anomaly",
|
||||
field=models.ForeignKey(
|
||||
null=True, on_delete=django.db.models.deletion.SET_NULL, to="analytics.Anomaly"
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="realmcount",
|
||||
name="anomaly",
|
||||
field=models.ForeignKey(
|
||||
null=True, on_delete=django.db.models.deletion.SET_NULL, to="analytics.Anomaly"
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="streamcount",
|
||||
name="anomaly",
|
||||
field=models.ForeignKey(
|
||||
null=True, on_delete=django.db.models.deletion.SET_NULL, to="analytics.Anomaly"
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="usercount",
|
||||
name="anomaly",
|
||||
field=models.ForeignKey(
|
||||
null=True, on_delete=django.db.models.deletion.SET_NULL, to="analytics.Anomaly"
|
||||
),
|
||||
),
|
||||
]
|
@@ -1,31 +0,0 @@
|
||||
# Generated by Django 1.11.18 on 2019-02-02 02:47
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("analytics", "0012_add_on_delete"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name="installationcount",
|
||||
name="anomaly",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="realmcount",
|
||||
name="anomaly",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="streamcount",
|
||||
name="anomaly",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="usercount",
|
||||
name="anomaly",
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name="Anomaly",
|
||||
),
|
||||
]
|
@@ -1,16 +0,0 @@
|
||||
# Generated by Django 1.11.26 on 2020-01-27 04:32
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("analytics", "0013_remove_anomaly"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name="fillstate",
|
||||
name="last_modified",
|
||||
),
|
||||
]
|
@@ -1,66 +0,0 @@
|
||||
from django.db import migrations
|
||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||
from django.db.migrations.state import StateApps
|
||||
from django.db.models import Count, Sum
|
||||
|
||||
|
||||
def clear_duplicate_counts(apps: StateApps, schema_editor: BaseDatabaseSchemaEditor) -> None:
|
||||
"""This is a preparatory migration for our Analytics tables.
|
||||
|
||||
The backstory is that Django's unique_together indexes do not properly
|
||||
handle the subgroup=None corner case (allowing duplicate rows that have a
|
||||
subgroup of None), which meant that in race conditions, rather than updating
|
||||
an existing row for the property/(realm, stream, user)/time with subgroup=None, Django would
|
||||
create a duplicate row.
|
||||
|
||||
In the next migration, we'll add a proper constraint to fix this bug, but
|
||||
we need to fix any existing problematic rows before we can add that constraint.
|
||||
|
||||
We fix this in an appropriate fashion for each type of CountStat object; mainly
|
||||
this means deleting the extra rows, but for LoggingCountStat objects, we need to
|
||||
additionally combine the sums.
|
||||
"""
|
||||
count_tables = dict(
|
||||
realm=apps.get_model("analytics", "RealmCount"),
|
||||
user=apps.get_model("analytics", "UserCount"),
|
||||
stream=apps.get_model("analytics", "StreamCount"),
|
||||
installation=apps.get_model("analytics", "InstallationCount"),
|
||||
)
|
||||
|
||||
for name, count_table in count_tables.items():
|
||||
value = [name, "property", "end_time"]
|
||||
if name == "installation":
|
||||
value = ["property", "end_time"]
|
||||
counts = (
|
||||
count_table.objects.filter(subgroup=None)
|
||||
.values(*value)
|
||||
.annotate(Count("id"), Sum("value"))
|
||||
.filter(id__count__gt=1)
|
||||
)
|
||||
|
||||
for count in counts:
|
||||
count.pop("id__count")
|
||||
total_value = count.pop("value__sum")
|
||||
duplicate_counts = list(count_table.objects.filter(**count))
|
||||
first_count = duplicate_counts[0]
|
||||
if count["property"] in ["invites_sent::day", "active_users_log:is_bot:day"]:
|
||||
# For LoggingCountStat objects, the right fix is to combine the totals;
|
||||
# for other CountStat objects, we expect the duplicates to have the same value.
|
||||
# And so all we need to do is delete them.
|
||||
first_count.value = total_value
|
||||
first_count.save()
|
||||
to_cleanup = duplicate_counts[1:]
|
||||
for duplicate_count in to_cleanup:
|
||||
duplicate_count.delete()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("analytics", "0014_remove_fillstate_last_modified"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
clear_duplicate_counts, reverse_code=migrations.RunPython.noop, elidable=True
|
||||
),
|
||||
]
|
@@ -1,92 +0,0 @@
|
||||
# Generated by Django 2.2.10 on 2020-02-29 19:40
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("analytics", "0015_clear_duplicate_counts"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterUniqueTogether(
|
||||
name="installationcount",
|
||||
unique_together=set(),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="realmcount",
|
||||
unique_together=set(),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="streamcount",
|
||||
unique_together=set(),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="usercount",
|
||||
unique_together=set(),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="installationcount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=False),
|
||||
fields=("property", "subgroup", "end_time"),
|
||||
name="unique_installation_count",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="installationcount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=True),
|
||||
fields=("property", "end_time"),
|
||||
name="unique_installation_count_null_subgroup",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="realmcount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=False),
|
||||
fields=("realm", "property", "subgroup", "end_time"),
|
||||
name="unique_realm_count",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="realmcount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=True),
|
||||
fields=("realm", "property", "end_time"),
|
||||
name="unique_realm_count_null_subgroup",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="streamcount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=False),
|
||||
fields=("stream", "property", "subgroup", "end_time"),
|
||||
name="unique_stream_count",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="streamcount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=True),
|
||||
fields=("stream", "property", "end_time"),
|
||||
name="unique_stream_count_null_subgroup",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="usercount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=False),
|
||||
fields=("user", "property", "subgroup", "end_time"),
|
||||
name="unique_user_count",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="usercount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=True),
|
||||
fields=("user", "property", "end_time"),
|
||||
name="unique_user_count_null_subgroup",
|
||||
),
|
||||
),
|
||||
]
|
@@ -1,114 +0,0 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("analytics", "0016_unique_constraint_when_subgroup_null"),
|
||||
]
|
||||
|
||||
# If the server was installed between 7.0 and 7.4 (or main between
|
||||
# 2c20028aa451 and 7807bff52635), it contains indexes which (when
|
||||
# running 7.5 or 7807bff52635 or higher) are never used, because
|
||||
# they contain an improper cast
|
||||
# (https://code.djangoproject.com/ticket/34840).
|
||||
#
|
||||
# We regenerate the indexes here, by dropping and re-creating
|
||||
# them, so that we know that they are properly formed.
|
||||
operations = [
|
||||
migrations.RemoveConstraint(
|
||||
model_name="installationcount",
|
||||
name="unique_installation_count",
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="installationcount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=False),
|
||||
fields=("property", "subgroup", "end_time"),
|
||||
name="unique_installation_count",
|
||||
),
|
||||
),
|
||||
migrations.RemoveConstraint(
|
||||
model_name="installationcount",
|
||||
name="unique_installation_count_null_subgroup",
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="installationcount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=True),
|
||||
fields=("property", "end_time"),
|
||||
name="unique_installation_count_null_subgroup",
|
||||
),
|
||||
),
|
||||
migrations.RemoveConstraint(
|
||||
model_name="realmcount",
|
||||
name="unique_realm_count",
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="realmcount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=False),
|
||||
fields=("realm", "property", "subgroup", "end_time"),
|
||||
name="unique_realm_count",
|
||||
),
|
||||
),
|
||||
migrations.RemoveConstraint(
|
||||
model_name="realmcount",
|
||||
name="unique_realm_count_null_subgroup",
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="realmcount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=True),
|
||||
fields=("realm", "property", "end_time"),
|
||||
name="unique_realm_count_null_subgroup",
|
||||
),
|
||||
),
|
||||
migrations.RemoveConstraint(
|
||||
model_name="streamcount",
|
||||
name="unique_stream_count",
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="streamcount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=False),
|
||||
fields=("stream", "property", "subgroup", "end_time"),
|
||||
name="unique_stream_count",
|
||||
),
|
||||
),
|
||||
migrations.RemoveConstraint(
|
||||
model_name="streamcount",
|
||||
name="unique_stream_count_null_subgroup",
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="streamcount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=True),
|
||||
fields=("stream", "property", "end_time"),
|
||||
name="unique_stream_count_null_subgroup",
|
||||
),
|
||||
),
|
||||
migrations.RemoveConstraint(
|
||||
model_name="usercount",
|
||||
name="unique_user_count",
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="usercount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=False),
|
||||
fields=("user", "property", "subgroup", "end_time"),
|
||||
name="unique_user_count",
|
||||
),
|
||||
),
|
||||
migrations.RemoveConstraint(
|
||||
model_name="usercount",
|
||||
name="unique_user_count_null_subgroup",
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="usercount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=True),
|
||||
fields=("user", "property", "end_time"),
|
||||
name="unique_user_count_null_subgroup",
|
||||
),
|
||||
),
|
||||
]
|
@@ -1,16 +0,0 @@
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
elidable = True
|
||||
|
||||
dependencies = [
|
||||
("analytics", "0017_regenerate_partial_indexes"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunSQL(
|
||||
"DELETE FROM analytics_usercount WHERE property = 'active_users_audit:is_bot:day'",
|
||||
elidable=True,
|
||||
)
|
||||
]
|
@@ -1,27 +0,0 @@
|
||||
from django.db import migrations
|
||||
|
||||
REMOVED_COUNTS = (
|
||||
"active_users_log:is_bot:day",
|
||||
"active_users:is_bot:day",
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
elidable = True
|
||||
|
||||
dependencies = [
|
||||
("analytics", "0018_remove_usercount_active_users_audit"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunSQL(
|
||||
[
|
||||
("DELETE FROM analytics_realmcount WHERE property IN %s", (REMOVED_COUNTS,)),
|
||||
(
|
||||
"DELETE FROM analytics_installationcount WHERE property IN %s",
|
||||
(REMOVED_COUNTS,),
|
||||
),
|
||||
],
|
||||
elidable=True,
|
||||
)
|
||||
]
|
@@ -1,40 +0,0 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("analytics", "0019_remove_unused_counts"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="installationcount",
|
||||
name="id",
|
||||
field=models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="realmcount",
|
||||
name="id",
|
||||
field=models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="streamcount",
|
||||
name="id",
|
||||
field=models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="usercount",
|
||||
name="id",
|
||||
field=models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
]
|
@@ -1,17 +0,0 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("analytics", "0020_alter_installationcount_id_alter_realmcount_id_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="fillstate",
|
||||
name="id",
|
||||
field=models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
]
|
@@ -1,158 +1,109 @@
|
||||
from datetime import datetime
|
||||
|
||||
from django.db import models
|
||||
from django.db.models import Q, UniqueConstraint
|
||||
from typing_extensions import override
|
||||
|
||||
from zerver.models import Realm, UserProfile, Stream, Recipient
|
||||
from zerver.lib.str_utils import ModelReprMixin
|
||||
from zerver.lib.timestamp import floor_to_day
|
||||
from zerver.models import Realm, Stream, UserProfile
|
||||
|
||||
import datetime
|
||||
|
||||
class FillState(models.Model):
|
||||
property = models.CharField(max_length=40, unique=True)
|
||||
end_time = models.DateTimeField()
|
||||
from typing import Optional, Tuple, Union, Dict, Any, Text
|
||||
|
||||
class FillState(ModelReprMixin, models.Model):
|
||||
property = models.CharField(max_length=40, unique=True) # type: Text
|
||||
end_time = models.DateTimeField() # type: datetime.datetime
|
||||
|
||||
# Valid states are {DONE, STARTED}
|
||||
DONE = 1
|
||||
STARTED = 2
|
||||
state = models.PositiveSmallIntegerField()
|
||||
state = models.PositiveSmallIntegerField() # type: int
|
||||
|
||||
@override
|
||||
def __str__(self) -> str:
|
||||
return f"{self.property} {self.end_time} {self.state}"
|
||||
last_modified = models.DateTimeField(auto_now=True) # type: datetime.datetime
|
||||
|
||||
def __unicode__(self):
|
||||
# type: () -> Text
|
||||
return u"<FillState: %s %s %s>" % (self.property, self.end_time, self.state)
|
||||
|
||||
# The earliest/starting end_time in FillState
|
||||
# We assume there is at least one realm
|
||||
def installation_epoch() -> datetime:
|
||||
earliest_realm_creation = Realm.objects.aggregate(models.Min("date_created"))[
|
||||
"date_created__min"
|
||||
]
|
||||
def installation_epoch():
|
||||
# type: () -> datetime.datetime
|
||||
earliest_realm_creation = Realm.objects.aggregate(models.Min('date_created'))['date_created__min']
|
||||
return floor_to_day(earliest_realm_creation)
|
||||
|
||||
def last_successful_fill(property):
|
||||
# type: (str) -> Optional[datetime.datetime]
|
||||
fillstate = FillState.objects.filter(property=property).first()
|
||||
if fillstate is None:
|
||||
return None
|
||||
if fillstate.state == FillState.DONE:
|
||||
return fillstate.end_time
|
||||
return fillstate.end_time - datetime.timedelta(hours=1)
|
||||
|
||||
class BaseCount(models.Model):
|
||||
# would only ever make entries here by hand
|
||||
class Anomaly(ModelReprMixin, models.Model):
|
||||
info = models.CharField(max_length=1000) # type: Text
|
||||
|
||||
def __unicode__(self):
|
||||
# type: () -> Text
|
||||
return u"<Anomaly: %s... %s>" % (self.info, self.id)
|
||||
|
||||
class BaseCount(ModelReprMixin, models.Model):
|
||||
# Note: When inheriting from BaseCount, you may want to rearrange
|
||||
# the order of the columns in the migration to make sure they
|
||||
# match how you'd like the table to be arranged.
|
||||
property = models.CharField(max_length=32)
|
||||
subgroup = models.CharField(max_length=16, null=True)
|
||||
end_time = models.DateTimeField()
|
||||
value = models.BigIntegerField()
|
||||
property = models.CharField(max_length=32) # type: Text
|
||||
subgroup = models.CharField(max_length=16, null=True) # type: Optional[Text]
|
||||
end_time = models.DateTimeField() # type: datetime.datetime
|
||||
value = models.BigIntegerField() # type: int
|
||||
anomaly = models.ForeignKey(Anomaly, null=True) # type: Optional[Anomaly]
|
||||
|
||||
class Meta:
|
||||
class Meta(object):
|
||||
abstract = True
|
||||
|
||||
|
||||
class InstallationCount(BaseCount):
|
||||
class Meta:
|
||||
# Handles invalid duplicate InstallationCount data
|
||||
constraints = [
|
||||
UniqueConstraint(
|
||||
fields=["property", "subgroup", "end_time"],
|
||||
condition=Q(subgroup__isnull=False),
|
||||
name="unique_installation_count",
|
||||
),
|
||||
UniqueConstraint(
|
||||
fields=["property", "end_time"],
|
||||
condition=Q(subgroup__isnull=True),
|
||||
name="unique_installation_count_null_subgroup",
|
||||
),
|
||||
]
|
||||
|
||||
@override
|
||||
def __str__(self) -> str:
|
||||
return f"{self.property} {self.subgroup} {self.value}"
|
||||
class Meta(object):
|
||||
unique_together = ("property", "subgroup", "end_time")
|
||||
|
||||
def __unicode__(self):
|
||||
# type: () -> Text
|
||||
return u"<InstallationCount: %s %s %s>" % (self.property, self.subgroup, self.value)
|
||||
|
||||
class RealmCount(BaseCount):
|
||||
realm = models.ForeignKey(Realm, on_delete=models.CASCADE)
|
||||
realm = models.ForeignKey(Realm)
|
||||
|
||||
class Meta:
|
||||
# Handles invalid duplicate RealmCount data
|
||||
constraints = [
|
||||
UniqueConstraint(
|
||||
fields=["realm", "property", "subgroup", "end_time"],
|
||||
condition=Q(subgroup__isnull=False),
|
||||
name="unique_realm_count",
|
||||
),
|
||||
UniqueConstraint(
|
||||
fields=["realm", "property", "end_time"],
|
||||
condition=Q(subgroup__isnull=True),
|
||||
name="unique_realm_count_null_subgroup",
|
||||
),
|
||||
]
|
||||
indexes = [
|
||||
models.Index(
|
||||
fields=["property", "end_time"],
|
||||
name="analytics_realmcount_property_end_time_3b60396b_idx",
|
||||
)
|
||||
]
|
||||
|
||||
@override
|
||||
def __str__(self) -> str:
|
||||
return f"{self.realm!r} {self.property} {self.subgroup} {self.value}"
|
||||
class Meta(object):
|
||||
unique_together = ("realm", "property", "subgroup", "end_time")
|
||||
index_together = ["property", "end_time"]
|
||||
|
||||
def __unicode__(self):
|
||||
# type: () -> Text
|
||||
return u"<RealmCount: %s %s %s %s>" % (self.realm, self.property, self.subgroup, self.value)
|
||||
|
||||
class UserCount(BaseCount):
|
||||
user = models.ForeignKey(UserProfile, on_delete=models.CASCADE)
|
||||
realm = models.ForeignKey(Realm, on_delete=models.CASCADE)
|
||||
user = models.ForeignKey(UserProfile)
|
||||
realm = models.ForeignKey(Realm)
|
||||
|
||||
class Meta:
|
||||
# Handles invalid duplicate UserCount data
|
||||
constraints = [
|
||||
UniqueConstraint(
|
||||
fields=["user", "property", "subgroup", "end_time"],
|
||||
condition=Q(subgroup__isnull=False),
|
||||
name="unique_user_count",
|
||||
),
|
||||
UniqueConstraint(
|
||||
fields=["user", "property", "end_time"],
|
||||
condition=Q(subgroup__isnull=True),
|
||||
name="unique_user_count_null_subgroup",
|
||||
),
|
||||
]
|
||||
class Meta(object):
|
||||
unique_together = ("user", "property", "subgroup", "end_time")
|
||||
# This index dramatically improves the performance of
|
||||
# aggregating from users to realms
|
||||
indexes = [
|
||||
models.Index(
|
||||
fields=["property", "realm", "end_time"],
|
||||
name="analytics_usercount_property_realm_id_end_time_591dbec1_idx",
|
||||
)
|
||||
]
|
||||
|
||||
@override
|
||||
def __str__(self) -> str:
|
||||
return f"{self.user!r} {self.property} {self.subgroup} {self.value}"
|
||||
index_together = ["property", "realm", "end_time"]
|
||||
|
||||
def __unicode__(self):
|
||||
# type: () -> Text
|
||||
return u"<UserCount: %s %s %s %s>" % (self.user, self.property, self.subgroup, self.value)
|
||||
|
||||
class StreamCount(BaseCount):
|
||||
stream = models.ForeignKey(Stream, on_delete=models.CASCADE)
|
||||
realm = models.ForeignKey(Realm, on_delete=models.CASCADE)
|
||||
stream = models.ForeignKey(Stream)
|
||||
realm = models.ForeignKey(Realm)
|
||||
|
||||
class Meta:
|
||||
# Handles invalid duplicate StreamCount data
|
||||
constraints = [
|
||||
UniqueConstraint(
|
||||
fields=["stream", "property", "subgroup", "end_time"],
|
||||
condition=Q(subgroup__isnull=False),
|
||||
name="unique_stream_count",
|
||||
),
|
||||
UniqueConstraint(
|
||||
fields=["stream", "property", "end_time"],
|
||||
condition=Q(subgroup__isnull=True),
|
||||
name="unique_stream_count_null_subgroup",
|
||||
),
|
||||
]
|
||||
class Meta(object):
|
||||
unique_together = ("stream", "property", "subgroup", "end_time")
|
||||
# This index dramatically improves the performance of
|
||||
# aggregating from streams to realms
|
||||
indexes = [
|
||||
models.Index(
|
||||
fields=["property", "realm", "end_time"],
|
||||
name="analytics_streamcount_property_realm_id_end_time_155ae930_idx",
|
||||
)
|
||||
]
|
||||
index_together = ["property", "realm", "end_time"]
|
||||
|
||||
@override
|
||||
def __str__(self) -> str:
|
||||
return f"{self.stream!r} {self.property} {self.subgroup} {self.value} {self.id}"
|
||||
def __unicode__(self):
|
||||
# type: () -> Text
|
||||
return u"<StreamCount: %s %s %s %s %s>" % (self.stream, self.property, self.subgroup, self.value, self.id)
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -1,40 +1,31 @@
|
||||
from analytics.lib.counts import CountStat
|
||||
from analytics.lib.fixtures import generate_time_series_data
|
||||
from zerver.lib.test_classes import ZulipTestCase
|
||||
|
||||
from analytics.lib.counts import CountStat
|
||||
from analytics.lib.fixtures import generate_time_series_data
|
||||
|
||||
# A very light test suite; the code being tested is not run in production.
|
||||
class TestFixtures(ZulipTestCase):
|
||||
def test_deterministic_settings(self) -> None:
|
||||
def test_deterministic_settings(self):
|
||||
# type: () -> None
|
||||
# test basic business_hour / non_business_hour calculation
|
||||
# test we get an array of the right length with frequency=CountStat.DAY
|
||||
data = generate_time_series_data(
|
||||
days=7, business_hours_base=20, non_business_hours_base=15, spikiness=0
|
||||
)
|
||||
days=7, business_hours_base=20, non_business_hours_base=15, spikiness=0)
|
||||
self.assertEqual(data, [400, 400, 400, 400, 400, 360, 360])
|
||||
|
||||
data = generate_time_series_data(
|
||||
days=1,
|
||||
business_hours_base=2000,
|
||||
non_business_hours_base=1500,
|
||||
growth=2,
|
||||
spikiness=0,
|
||||
frequency=CountStat.HOUR,
|
||||
)
|
||||
days=1, business_hours_base=2000, non_business_hours_base=1500,
|
||||
growth=2, spikiness=0, frequency=CountStat.HOUR)
|
||||
# test we get an array of the right length with frequency=CountStat.HOUR
|
||||
self.assert_length(data, 24)
|
||||
self.assertEqual(len(data), 24)
|
||||
# test that growth doesn't affect the first data point
|
||||
self.assertEqual(data[0], 2000)
|
||||
# test that the last data point is growth times what it otherwise would be
|
||||
self.assertEqual(data[-1], 1500 * 2)
|
||||
self.assertEqual(data[-1], 1500*2)
|
||||
|
||||
# test autocorrelation == 1, since that's the easiest value to test
|
||||
data = generate_time_series_data(
|
||||
days=1,
|
||||
business_hours_base=2000,
|
||||
non_business_hours_base=2000,
|
||||
autocorrelation=1,
|
||||
frequency=CountStat.HOUR,
|
||||
)
|
||||
days=1, business_hours_base=2000, non_business_hours_base=2000,
|
||||
autocorrelation=1, frequency=CountStat.HOUR)
|
||||
self.assertEqual(data[0], data[1])
|
||||
self.assertEqual(data[0], data[-1])
|
||||
|
@@ -1,689 +0,0 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
from django.utils.timezone import now as timezone_now
|
||||
from typing_extensions import override
|
||||
|
||||
from analytics.lib.counts import COUNT_STATS, CountStat
|
||||
from analytics.lib.time_utils import time_range
|
||||
from analytics.models import FillState, RealmCount, StreamCount, UserCount
|
||||
from analytics.views.stats import rewrite_client_arrays, sort_by_totals, sort_client_labels
|
||||
from zerver.lib.test_classes import ZulipTestCase
|
||||
from zerver.lib.timestamp import ceiling_to_day, ceiling_to_hour, datetime_to_timestamp
|
||||
from zerver.models import Client
|
||||
from zerver.models.realms import get_realm
|
||||
|
||||
|
||||
class TestStatsEndpoint(ZulipTestCase):
|
||||
def test_stats(self) -> None:
|
||||
self.user = self.example_user("hamlet")
|
||||
self.login_user(self.user)
|
||||
result = self.client_get("/stats")
|
||||
self.assertEqual(result.status_code, 200)
|
||||
# Check that we get something back
|
||||
self.assert_in_response("Zulip analytics for", result)
|
||||
|
||||
def test_guest_user_cant_access_stats(self) -> None:
|
||||
self.user = self.example_user("polonius")
|
||||
self.login_user(self.user)
|
||||
result = self.client_get("/stats")
|
||||
self.assert_json_error(result, "Not allowed for guest users", 400)
|
||||
|
||||
result = self.client_get("/json/analytics/chart_data")
|
||||
self.assert_json_error(result, "Not allowed for guest users", 400)
|
||||
|
||||
def test_stats_for_realm(self) -> None:
|
||||
user = self.example_user("hamlet")
|
||||
self.login_user(user)
|
||||
|
||||
result = self.client_get("/stats/realm/zulip/")
|
||||
self.assertEqual(result.status_code, 302)
|
||||
|
||||
result = self.client_get("/stats/realm/not_existing_realm/")
|
||||
self.assertEqual(result.status_code, 302)
|
||||
|
||||
user = self.example_user("hamlet")
|
||||
user.is_staff = True
|
||||
user.save(update_fields=["is_staff"])
|
||||
|
||||
result = self.client_get("/stats/realm/not_existing_realm/")
|
||||
self.assertEqual(result.status_code, 404)
|
||||
|
||||
result = self.client_get("/stats/realm/zulip/")
|
||||
self.assertEqual(result.status_code, 200)
|
||||
self.assert_in_response("Zulip analytics for", result)
|
||||
|
||||
def test_stats_for_installation(self) -> None:
|
||||
user = self.example_user("hamlet")
|
||||
self.login_user(user)
|
||||
|
||||
result = self.client_get("/stats/installation")
|
||||
self.assertEqual(result.status_code, 302)
|
||||
|
||||
user = self.example_user("hamlet")
|
||||
user.is_staff = True
|
||||
user.save(update_fields=["is_staff"])
|
||||
|
||||
result = self.client_get("/stats/installation")
|
||||
self.assertEqual(result.status_code, 200)
|
||||
self.assert_in_response("Zulip analytics for", result)
|
||||
|
||||
|
||||
class TestGetChartData(ZulipTestCase):
|
||||
@override
|
||||
def setUp(self) -> None:
|
||||
super().setUp()
|
||||
self.realm = get_realm("zulip")
|
||||
self.user = self.example_user("hamlet")
|
||||
self.stream_id = self.get_stream_id(self.get_streams(self.user)[0])
|
||||
self.login_user(self.user)
|
||||
self.end_times_hour = [
|
||||
ceiling_to_hour(self.realm.date_created) + timedelta(hours=i) for i in range(4)
|
||||
]
|
||||
self.end_times_day = [
|
||||
ceiling_to_day(self.realm.date_created) + timedelta(days=i) for i in range(4)
|
||||
]
|
||||
|
||||
def data(self, i: int) -> list[int]:
|
||||
return [0, 0, i, 0]
|
||||
|
||||
def insert_data(
|
||||
self, stat: CountStat, realm_subgroups: list[str | None], user_subgroups: list[str]
|
||||
) -> None:
|
||||
if stat.frequency == CountStat.HOUR:
|
||||
insert_time = self.end_times_hour[2]
|
||||
fill_time = self.end_times_hour[-1]
|
||||
if stat.frequency == CountStat.DAY:
|
||||
insert_time = self.end_times_day[2]
|
||||
fill_time = self.end_times_day[-1]
|
||||
|
||||
RealmCount.objects.bulk_create(
|
||||
RealmCount(
|
||||
property=stat.property,
|
||||
subgroup=subgroup,
|
||||
end_time=insert_time,
|
||||
value=100 + i,
|
||||
realm=self.realm,
|
||||
)
|
||||
for i, subgroup in enumerate(realm_subgroups)
|
||||
)
|
||||
UserCount.objects.bulk_create(
|
||||
UserCount(
|
||||
property=stat.property,
|
||||
subgroup=subgroup,
|
||||
end_time=insert_time,
|
||||
value=200 + i,
|
||||
realm=self.realm,
|
||||
user=self.user,
|
||||
)
|
||||
for i, subgroup in enumerate(user_subgroups)
|
||||
)
|
||||
StreamCount.objects.bulk_create(
|
||||
StreamCount(
|
||||
property=stat.property,
|
||||
subgroup=subgroup,
|
||||
end_time=insert_time,
|
||||
value=100 + i,
|
||||
stream_id=self.stream_id,
|
||||
realm=self.realm,
|
||||
)
|
||||
for i, subgroup in enumerate(realm_subgroups)
|
||||
)
|
||||
FillState.objects.create(property=stat.property, end_time=fill_time, state=FillState.DONE)
|
||||
|
||||
def test_number_of_humans(self) -> None:
|
||||
stat = COUNT_STATS["realm_active_humans::day"]
|
||||
self.insert_data(stat, [None], [])
|
||||
stat = COUNT_STATS["1day_actives::day"]
|
||||
self.insert_data(stat, [None], [])
|
||||
stat = COUNT_STATS["active_users_audit:is_bot:day"]
|
||||
self.insert_data(stat, ["false"], [])
|
||||
result = self.client_get("/json/analytics/chart_data", {"chart_name": "number_of_humans"})
|
||||
data = self.assert_json_success(result)
|
||||
self.assertEqual(
|
||||
data,
|
||||
{
|
||||
"msg": "",
|
||||
"end_times": [datetime_to_timestamp(dt) for dt in self.end_times_day],
|
||||
"frequency": CountStat.DAY,
|
||||
"everyone": {
|
||||
"_1day": self.data(100),
|
||||
"_15day": self.data(100),
|
||||
"all_time": self.data(100),
|
||||
},
|
||||
"display_order": None,
|
||||
"result": "success",
|
||||
},
|
||||
)
|
||||
|
||||
def test_messages_sent_over_time(self) -> None:
|
||||
stat = COUNT_STATS["messages_sent:is_bot:hour"]
|
||||
self.insert_data(stat, ["true", "false"], ["false"])
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
|
||||
)
|
||||
data = self.assert_json_success(result)
|
||||
self.assertEqual(
|
||||
data,
|
||||
{
|
||||
"msg": "",
|
||||
"end_times": [datetime_to_timestamp(dt) for dt in self.end_times_hour],
|
||||
"frequency": CountStat.HOUR,
|
||||
"everyone": {"bot": self.data(100), "human": self.data(101)},
|
||||
"user": {"bot": self.data(0), "human": self.data(200)},
|
||||
"display_order": None,
|
||||
"result": "success",
|
||||
},
|
||||
)
|
||||
|
||||
def test_messages_sent_by_message_type(self) -> None:
|
||||
stat = COUNT_STATS["messages_sent:message_type:day"]
|
||||
self.insert_data(
|
||||
stat, ["public_stream", "private_message"], ["public_stream", "private_stream"]
|
||||
)
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_by_message_type"}
|
||||
)
|
||||
data = self.assert_json_success(result)
|
||||
self.assertEqual(
|
||||
data,
|
||||
{
|
||||
"msg": "",
|
||||
"end_times": [datetime_to_timestamp(dt) for dt in self.end_times_day],
|
||||
"frequency": CountStat.DAY,
|
||||
"everyone": {
|
||||
"Public channels": self.data(100),
|
||||
"Private channels": self.data(0),
|
||||
"Direct messages": self.data(101),
|
||||
"Group direct messages": self.data(0),
|
||||
},
|
||||
"user": {
|
||||
"Public channels": self.data(200),
|
||||
"Private channels": self.data(201),
|
||||
"Direct messages": self.data(0),
|
||||
"Group direct messages": self.data(0),
|
||||
},
|
||||
"display_order": [
|
||||
"Direct messages",
|
||||
"Public channels",
|
||||
"Private channels",
|
||||
"Group direct messages",
|
||||
],
|
||||
"result": "success",
|
||||
},
|
||||
)
|
||||
|
||||
def test_messages_sent_by_client(self) -> None:
|
||||
stat = COUNT_STATS["messages_sent:client:day"]
|
||||
client1 = Client.objects.create(name="client 1")
|
||||
client2 = Client.objects.create(name="client 2")
|
||||
client3 = Client.objects.create(name="client 3")
|
||||
client4 = Client.objects.create(name="client 4")
|
||||
self.insert_data(
|
||||
stat,
|
||||
[str(client4.id), str(client3.id), str(client2.id)],
|
||||
[str(client3.id), str(client1.id)],
|
||||
)
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_by_client"}
|
||||
)
|
||||
data = self.assert_json_success(result)
|
||||
self.assertEqual(
|
||||
data,
|
||||
{
|
||||
"msg": "",
|
||||
"end_times": [datetime_to_timestamp(dt) for dt in self.end_times_day],
|
||||
"frequency": CountStat.DAY,
|
||||
"everyone": {
|
||||
"client 4": self.data(100),
|
||||
"client 3": self.data(101),
|
||||
"client 2": self.data(102),
|
||||
},
|
||||
"user": {"client 3": self.data(200), "client 1": self.data(201)},
|
||||
"display_order": ["client 1", "client 2", "client 3", "client 4"],
|
||||
"result": "success",
|
||||
},
|
||||
)
|
||||
|
||||
def test_messages_read_over_time(self) -> None:
|
||||
stat = COUNT_STATS["messages_read::hour"]
|
||||
self.insert_data(stat, [None], [])
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_read_over_time"}
|
||||
)
|
||||
data = self.assert_json_success(result)
|
||||
self.assertEqual(
|
||||
data,
|
||||
{
|
||||
"msg": "",
|
||||
"end_times": [datetime_to_timestamp(dt) for dt in self.end_times_hour],
|
||||
"frequency": CountStat.HOUR,
|
||||
"everyone": {"read": self.data(100)},
|
||||
"user": {"read": self.data(0)},
|
||||
"display_order": None,
|
||||
"result": "success",
|
||||
},
|
||||
)
|
||||
|
||||
def test_messages_sent_by_stream(self) -> None:
|
||||
stat = COUNT_STATS["messages_in_stream:is_bot:day"]
|
||||
self.insert_data(stat, ["true", "false"], [])
|
||||
|
||||
result = self.client_get(
|
||||
f"/json/analytics/chart_data/stream/{self.stream_id}",
|
||||
{
|
||||
"chart_name": "messages_sent_by_stream",
|
||||
},
|
||||
)
|
||||
data = self.assert_json_success(result)
|
||||
self.assertEqual(
|
||||
data,
|
||||
{
|
||||
"msg": "",
|
||||
"end_times": [datetime_to_timestamp(dt) for dt in self.end_times_day],
|
||||
"frequency": CountStat.DAY,
|
||||
"everyone": {"bot": self.data(100), "human": self.data(101)},
|
||||
"display_order": None,
|
||||
"result": "success",
|
||||
},
|
||||
)
|
||||
|
||||
result = self.api_get(
|
||||
self.example_user("polonius"),
|
||||
f"/api/v1/analytics/chart_data/stream/{self.stream_id}",
|
||||
{
|
||||
"chart_name": "messages_sent_by_stream",
|
||||
},
|
||||
)
|
||||
self.assert_json_error(result, "Not allowed for guest users")
|
||||
|
||||
# Verify we correctly forbid access to stats of streams in other realms.
|
||||
result = self.api_get(
|
||||
self.mit_user("sipbtest"),
|
||||
f"/api/v1/analytics/chart_data/stream/{self.stream_id}",
|
||||
{
|
||||
"chart_name": "messages_sent_by_stream",
|
||||
},
|
||||
subdomain="zephyr",
|
||||
)
|
||||
self.assert_json_error(result, "Invalid channel ID")
|
||||
|
||||
def test_include_empty_subgroups(self) -> None:
|
||||
FillState.objects.create(
|
||||
property="realm_active_humans::day",
|
||||
end_time=self.end_times_day[0],
|
||||
state=FillState.DONE,
|
||||
)
|
||||
result = self.client_get("/json/analytics/chart_data", {"chart_name": "number_of_humans"})
|
||||
data = self.assert_json_success(result)
|
||||
self.assertEqual(data["everyone"], {"_1day": [0], "_15day": [0], "all_time": [0]})
|
||||
self.assertFalse("user" in data)
|
||||
|
||||
FillState.objects.create(
|
||||
property="messages_sent:is_bot:hour",
|
||||
end_time=self.end_times_hour[0],
|
||||
state=FillState.DONE,
|
||||
)
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
|
||||
)
|
||||
data = self.assert_json_success(result)
|
||||
self.assertEqual(data["everyone"], {"human": [0], "bot": [0]})
|
||||
self.assertEqual(data["user"], {"human": [0], "bot": [0]})
|
||||
|
||||
FillState.objects.create(
|
||||
property="messages_sent:message_type:day",
|
||||
end_time=self.end_times_day[0],
|
||||
state=FillState.DONE,
|
||||
)
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_by_message_type"}
|
||||
)
|
||||
data = self.assert_json_success(result)
|
||||
self.assertEqual(
|
||||
data["everyone"],
|
||||
{
|
||||
"Public channels": [0],
|
||||
"Private channels": [0],
|
||||
"Direct messages": [0],
|
||||
"Group direct messages": [0],
|
||||
},
|
||||
)
|
||||
self.assertEqual(
|
||||
data["user"],
|
||||
{
|
||||
"Public channels": [0],
|
||||
"Private channels": [0],
|
||||
"Direct messages": [0],
|
||||
"Group direct messages": [0],
|
||||
},
|
||||
)
|
||||
|
||||
FillState.objects.create(
|
||||
property="messages_sent:client:day",
|
||||
end_time=self.end_times_day[0],
|
||||
state=FillState.DONE,
|
||||
)
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_by_client"}
|
||||
)
|
||||
data = self.assert_json_success(result)
|
||||
self.assertEqual(data["everyone"], {})
|
||||
self.assertEqual(data["user"], {})
|
||||
|
||||
def test_start_and_end(self) -> None:
|
||||
stat = COUNT_STATS["realm_active_humans::day"]
|
||||
self.insert_data(stat, [None], [])
|
||||
stat = COUNT_STATS["1day_actives::day"]
|
||||
self.insert_data(stat, [None], [])
|
||||
stat = COUNT_STATS["active_users_audit:is_bot:day"]
|
||||
self.insert_data(stat, ["false"], [])
|
||||
end_time_timestamps = [datetime_to_timestamp(dt) for dt in self.end_times_day]
|
||||
|
||||
# valid start and end
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data",
|
||||
{
|
||||
"chart_name": "number_of_humans",
|
||||
"start": end_time_timestamps[1],
|
||||
"end": end_time_timestamps[2],
|
||||
},
|
||||
)
|
||||
data = self.assert_json_success(result)
|
||||
self.assertEqual(data["end_times"], end_time_timestamps[1:3])
|
||||
self.assertEqual(
|
||||
data["everyone"], {"_1day": [0, 100], "_15day": [0, 100], "all_time": [0, 100]}
|
||||
)
|
||||
|
||||
# start later then end
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data",
|
||||
{
|
||||
"chart_name": "number_of_humans",
|
||||
"start": end_time_timestamps[2],
|
||||
"end": end_time_timestamps[1],
|
||||
},
|
||||
)
|
||||
self.assert_json_error_contains(result, "Start time is later than")
|
||||
|
||||
def test_min_length(self) -> None:
|
||||
stat = COUNT_STATS["realm_active_humans::day"]
|
||||
self.insert_data(stat, [None], [])
|
||||
stat = COUNT_STATS["1day_actives::day"]
|
||||
self.insert_data(stat, [None], [])
|
||||
stat = COUNT_STATS["active_users_audit:is_bot:day"]
|
||||
self.insert_data(stat, ["false"], [])
|
||||
# test min_length is too short to change anything
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "number_of_humans", "min_length": 2}
|
||||
)
|
||||
data = self.assert_json_success(result)
|
||||
self.assertEqual(
|
||||
data["end_times"], [datetime_to_timestamp(dt) for dt in self.end_times_day]
|
||||
)
|
||||
self.assertEqual(
|
||||
data["everyone"],
|
||||
{"_1day": self.data(100), "_15day": self.data(100), "all_time": self.data(100)},
|
||||
)
|
||||
# test min_length larger than filled data
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "number_of_humans", "min_length": 5}
|
||||
)
|
||||
data = self.assert_json_success(result)
|
||||
end_times = [
|
||||
ceiling_to_day(self.realm.date_created) + timedelta(days=i) for i in range(-1, 4)
|
||||
]
|
||||
self.assertEqual(data["end_times"], [datetime_to_timestamp(dt) for dt in end_times])
|
||||
self.assertEqual(
|
||||
data["everyone"],
|
||||
{
|
||||
"_1day": [0, *self.data(100)],
|
||||
"_15day": [0, *self.data(100)],
|
||||
"all_time": [0, *self.data(100)],
|
||||
},
|
||||
)
|
||||
|
||||
def test_non_existent_chart(self) -> None:
|
||||
result = self.client_get("/json/analytics/chart_data", {"chart_name": "does_not_exist"})
|
||||
self.assert_json_error_contains(result, "Unknown chart name")
|
||||
|
||||
def test_analytics_not_running(self) -> None:
|
||||
realm = get_realm("zulip")
|
||||
|
||||
self.assertEqual(FillState.objects.count(), 0)
|
||||
|
||||
realm.date_created = timezone_now() - timedelta(days=3)
|
||||
realm.save(update_fields=["date_created"])
|
||||
with self.assertLogs(level="WARNING") as m:
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
|
||||
)
|
||||
self.assertEqual(
|
||||
m.output,
|
||||
[
|
||||
f"WARNING:root:User from realm zulip attempted to access /stats, but the computed start time: {realm.date_created} (creation of realm or installation) is later than the computed end time: 0001-01-01 00:00:00+00:00 (last successful analytics update). Is the analytics cron job running?"
|
||||
],
|
||||
)
|
||||
|
||||
self.assert_json_error_contains(result, "No analytics data available")
|
||||
|
||||
realm.date_created = timezone_now() - timedelta(days=1, hours=2)
|
||||
realm.save(update_fields=["date_created"])
|
||||
with self.assertLogs(level="WARNING") as m:
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
|
||||
)
|
||||
self.assertEqual(
|
||||
m.output,
|
||||
[
|
||||
f"WARNING:root:User from realm zulip attempted to access /stats, but the computed start time: {realm.date_created} (creation of realm or installation) is later than the computed end time: 0001-01-01 00:00:00+00:00 (last successful analytics update). Is the analytics cron job running?"
|
||||
],
|
||||
)
|
||||
|
||||
self.assert_json_error_contains(result, "No analytics data available")
|
||||
|
||||
realm.date_created = timezone_now() - timedelta(days=1, minutes=10)
|
||||
realm.save(update_fields=["date_created"])
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
|
||||
)
|
||||
self.assert_json_success(result)
|
||||
|
||||
realm.date_created = timezone_now() - timedelta(hours=10)
|
||||
realm.save(update_fields=["date_created"])
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
|
||||
)
|
||||
self.assert_json_success(result)
|
||||
|
||||
end_time = timezone_now() - timedelta(days=5)
|
||||
fill_state = FillState.objects.create(
|
||||
property="messages_sent:is_bot:hour", end_time=end_time, state=FillState.DONE
|
||||
)
|
||||
|
||||
realm.date_created = timezone_now() - timedelta(days=3)
|
||||
realm.save(update_fields=["date_created"])
|
||||
with self.assertLogs(level="WARNING") as m:
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
|
||||
)
|
||||
self.assertEqual(
|
||||
m.output,
|
||||
[
|
||||
f"WARNING:root:User from realm zulip attempted to access /stats, but the computed start time: {realm.date_created} (creation of realm or installation) is later than the computed end time: {end_time} (last successful analytics update). Is the analytics cron job running?"
|
||||
],
|
||||
)
|
||||
|
||||
self.assert_json_error_contains(result, "No analytics data available")
|
||||
|
||||
realm.date_created = timezone_now() - timedelta(days=1, minutes=10)
|
||||
realm.save(update_fields=["date_created"])
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
|
||||
)
|
||||
self.assert_json_success(result)
|
||||
|
||||
end_time = timezone_now() - timedelta(days=2)
|
||||
fill_state.end_time = end_time
|
||||
fill_state.save(update_fields=["end_time"])
|
||||
|
||||
realm.date_created = timezone_now() - timedelta(days=3)
|
||||
realm.save(update_fields=["date_created"])
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
|
||||
)
|
||||
self.assert_json_success(result)
|
||||
|
||||
realm.date_created = timezone_now() - timedelta(days=1, hours=2)
|
||||
realm.save(update_fields=["date_created"])
|
||||
with self.assertLogs(level="WARNING") as m:
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
|
||||
)
|
||||
self.assertEqual(
|
||||
m.output,
|
||||
[
|
||||
f"WARNING:root:User from realm zulip attempted to access /stats, but the computed start time: {realm.date_created} (creation of realm or installation) is later than the computed end time: {end_time} (last successful analytics update). Is the analytics cron job running?"
|
||||
],
|
||||
)
|
||||
|
||||
self.assert_json_error_contains(result, "No analytics data available")
|
||||
|
||||
realm.date_created = timezone_now() - timedelta(days=1, minutes=10)
|
||||
realm.save(update_fields=["date_created"])
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
|
||||
)
|
||||
self.assert_json_success(result)
|
||||
|
||||
def test_get_chart_data_for_realm(self) -> None:
|
||||
user = self.example_user("hamlet")
|
||||
self.login_user(user)
|
||||
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data/realm/zulip", {"chart_name": "number_of_humans"}
|
||||
)
|
||||
self.assert_json_error(result, "Must be an server administrator", 400)
|
||||
|
||||
user = self.example_user("hamlet")
|
||||
user.is_staff = True
|
||||
user.save(update_fields=["is_staff"])
|
||||
stat = COUNT_STATS["realm_active_humans::day"]
|
||||
self.insert_data(stat, [None], [])
|
||||
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data/realm/not_existing_realm",
|
||||
{"chart_name": "number_of_humans"},
|
||||
)
|
||||
self.assert_json_error(result, "Invalid organization", 400)
|
||||
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data/realm/zulip", {"chart_name": "number_of_humans"}
|
||||
)
|
||||
self.assert_json_success(result)
|
||||
|
||||
def test_get_chart_data_for_installation(self) -> None:
|
||||
user = self.example_user("hamlet")
|
||||
self.login_user(user)
|
||||
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data/installation", {"chart_name": "number_of_humans"}
|
||||
)
|
||||
self.assert_json_error(result, "Must be an server administrator", 400)
|
||||
|
||||
user = self.example_user("hamlet")
|
||||
user.is_staff = True
|
||||
user.save(update_fields=["is_staff"])
|
||||
stat = COUNT_STATS["realm_active_humans::day"]
|
||||
self.insert_data(stat, [None], [])
|
||||
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data/installation", {"chart_name": "number_of_humans"}
|
||||
)
|
||||
self.assert_json_success(result)
|
||||
|
||||
|
||||
class TestGetChartDataHelpers(ZulipTestCase):
|
||||
def test_sort_by_totals(self) -> None:
|
||||
empty: list[int] = []
|
||||
value_arrays = {"c": [0, 1], "a": [9], "b": [1, 1, 1], "d": empty}
|
||||
self.assertEqual(sort_by_totals(value_arrays), ["a", "b", "c", "d"])
|
||||
|
||||
def test_sort_client_labels(self) -> None:
|
||||
data = {
|
||||
"everyone": {"a": [16], "c": [15], "b": [14], "e": [13], "d": [12], "h": [11]},
|
||||
"user": {"a": [6], "b": [5], "d": [4], "e": [3], "f": [2], "g": [1]},
|
||||
}
|
||||
self.assertEqual(sort_client_labels(data), ["a", "b", "c", "d", "e", "f", "g", "h"])
|
||||
|
||||
|
||||
class TestTimeRange(ZulipTestCase):
|
||||
def test_time_range(self) -> None:
|
||||
HOUR = timedelta(hours=1)
|
||||
DAY = timedelta(days=1)
|
||||
|
||||
a_time = datetime(2016, 3, 14, 22, 59, tzinfo=timezone.utc)
|
||||
floor_hour = datetime(2016, 3, 14, 22, tzinfo=timezone.utc)
|
||||
floor_day = datetime(2016, 3, 14, tzinfo=timezone.utc)
|
||||
|
||||
# test start == end
|
||||
self.assertEqual(time_range(a_time, a_time, CountStat.HOUR, None), [])
|
||||
self.assertEqual(time_range(a_time, a_time, CountStat.DAY, None), [])
|
||||
# test start == end == boundary, and min_length == 0
|
||||
self.assertEqual(time_range(floor_hour, floor_hour, CountStat.HOUR, 0), [floor_hour])
|
||||
self.assertEqual(time_range(floor_day, floor_day, CountStat.DAY, 0), [floor_day])
|
||||
# test start and end on different boundaries
|
||||
self.assertEqual(
|
||||
time_range(floor_hour, floor_hour + HOUR, CountStat.HOUR, None),
|
||||
[floor_hour, floor_hour + HOUR],
|
||||
)
|
||||
self.assertEqual(
|
||||
time_range(floor_day, floor_day + DAY, CountStat.DAY, None),
|
||||
[floor_day, floor_day + DAY],
|
||||
)
|
||||
# test min_length
|
||||
self.assertEqual(
|
||||
time_range(floor_hour, floor_hour + HOUR, CountStat.HOUR, 4),
|
||||
[floor_hour - 2 * HOUR, floor_hour - HOUR, floor_hour, floor_hour + HOUR],
|
||||
)
|
||||
self.assertEqual(
|
||||
time_range(floor_day, floor_day + DAY, CountStat.DAY, 4),
|
||||
[floor_day - 2 * DAY, floor_day - DAY, floor_day, floor_day + DAY],
|
||||
)
|
||||
|
||||
|
||||
class TestMapArrays(ZulipTestCase):
|
||||
def test_map_arrays(self) -> None:
|
||||
a = {
|
||||
"desktop app 1.0": [1, 2, 3],
|
||||
"desktop app 2.0": [10, 12, 13],
|
||||
"desktop app 3.0": [21, 22, 23],
|
||||
"website": [1, 2, 3],
|
||||
"ZulipiOS": [1, 2, 3],
|
||||
"ZulipElectron": [2, 5, 7],
|
||||
"ZulipMobile": [1, 2, 3],
|
||||
"ZulipMobile/flutter": [1, 1, 1],
|
||||
"ZulipFlutter": [1, 1, 1],
|
||||
"ZulipPython": [1, 2, 3],
|
||||
"API: Python": [1, 2, 3],
|
||||
"SomethingRandom": [4, 5, 6],
|
||||
"ZulipGitHubWebhook": [7, 7, 9],
|
||||
"ZulipAndroid": [64, 63, 65],
|
||||
"ZulipTerminal": [9, 10, 11],
|
||||
}
|
||||
result = rewrite_client_arrays(a)
|
||||
self.assertEqual(
|
||||
result,
|
||||
{
|
||||
"Old desktop app": [32, 36, 39],
|
||||
"Ancient iOS app": [1, 2, 3],
|
||||
"Desktop app": [2, 5, 7],
|
||||
"Old mobile app (React Native)": [1, 2, 3],
|
||||
"Mobile app (Flutter)": [2, 2, 2],
|
||||
"Web app": [1, 2, 3],
|
||||
"Python API": [2, 4, 6],
|
||||
"SomethingRandom": [4, 5, 6],
|
||||
"GitHub webhook": [7, 7, 9],
|
||||
"Ancient Android app": [64, 63, 65],
|
||||
"Terminal app": [9, 10, 11],
|
||||
},
|
||||
)
|
323
analytics/tests/test_views.py
Normal file
323
analytics/tests/test_views.py
Normal file
@@ -0,0 +1,323 @@
|
||||
from django.utils.timezone import get_fixed_timezone, utc
|
||||
from zerver.lib.test_classes import ZulipTestCase
|
||||
from zerver.lib.timestamp import ceiling_to_hour, ceiling_to_day, \
|
||||
datetime_to_timestamp
|
||||
from zerver.models import Realm, UserProfile, Client, get_realm
|
||||
|
||||
from analytics.lib.counts import CountStat, COUNT_STATS
|
||||
from analytics.lib.time_utils import time_range
|
||||
from analytics.models import RealmCount, UserCount, BaseCount, \
|
||||
FillState, last_successful_fill
|
||||
from analytics.views import stats, get_chart_data, sort_by_totals, \
|
||||
sort_client_labels, rewrite_client_arrays
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
import mock
|
||||
import ujson
|
||||
|
||||
from typing import List, Dict, Optional
|
||||
|
||||
class TestStatsEndpoint(ZulipTestCase):
|
||||
def test_stats(self):
|
||||
# type: () -> None
|
||||
self.user = self.example_user('hamlet')
|
||||
self.login(self.user.email)
|
||||
result = self.client_get('/stats')
|
||||
self.assertEqual(result.status_code, 200)
|
||||
# Check that we get something back
|
||||
self.assert_in_response("Zulip analytics for", result)
|
||||
|
||||
class TestGetChartData(ZulipTestCase):
|
||||
def setUp(self):
|
||||
# type: () -> None
|
||||
self.realm = get_realm('zulip')
|
||||
self.user = self.example_user('hamlet')
|
||||
self.login(self.user.email)
|
||||
self.end_times_hour = [ceiling_to_hour(self.realm.date_created) + timedelta(hours=i)
|
||||
for i in range(4)]
|
||||
self.end_times_day = [ceiling_to_day(self.realm.date_created) + timedelta(days=i)
|
||||
for i in range(4)]
|
||||
|
||||
def data(self, i):
|
||||
# type: (int) -> List[int]
|
||||
return [0, 0, i, 0]
|
||||
|
||||
def insert_data(self, stat, realm_subgroups, user_subgroups):
|
||||
# type: (CountStat, List[Optional[str]], List[str]) -> None
|
||||
if stat.frequency == CountStat.HOUR:
|
||||
insert_time = self.end_times_hour[2]
|
||||
fill_time = self.end_times_hour[-1]
|
||||
if stat.frequency == CountStat.DAY:
|
||||
insert_time = self.end_times_day[2]
|
||||
fill_time = self.end_times_day[-1]
|
||||
|
||||
RealmCount.objects.bulk_create([
|
||||
RealmCount(property=stat.property, subgroup=subgroup, end_time=insert_time,
|
||||
value=100+i, realm=self.realm)
|
||||
for i, subgroup in enumerate(realm_subgroups)])
|
||||
UserCount.objects.bulk_create([
|
||||
UserCount(property=stat.property, subgroup=subgroup, end_time=insert_time,
|
||||
value=200+i, realm=self.realm, user=self.user)
|
||||
for i, subgroup in enumerate(user_subgroups)])
|
||||
FillState.objects.create(property=stat.property, end_time=fill_time, state=FillState.DONE)
|
||||
|
||||
def test_number_of_humans(self):
|
||||
# type: () -> None
|
||||
stat = COUNT_STATS['realm_active_humans::day']
|
||||
self.insert_data(stat, [None], [])
|
||||
result = self.client_get('/json/analytics/chart_data',
|
||||
{'chart_name': 'number_of_humans'})
|
||||
self.assert_json_success(result)
|
||||
data = result.json()
|
||||
self.assertEqual(data, {
|
||||
'msg': '',
|
||||
'end_times': [datetime_to_timestamp(dt) for dt in self.end_times_day],
|
||||
'frequency': CountStat.DAY,
|
||||
'realm': {'human': self.data(100)},
|
||||
'display_order': None,
|
||||
'result': 'success',
|
||||
})
|
||||
|
||||
def test_messages_sent_over_time(self):
|
||||
# type: () -> None
|
||||
stat = COUNT_STATS['messages_sent:is_bot:hour']
|
||||
self.insert_data(stat, ['true', 'false'], ['false'])
|
||||
result = self.client_get('/json/analytics/chart_data',
|
||||
{'chart_name': 'messages_sent_over_time'})
|
||||
self.assert_json_success(result)
|
||||
data = result.json()
|
||||
self.assertEqual(data, {
|
||||
'msg': '',
|
||||
'end_times': [datetime_to_timestamp(dt) for dt in self.end_times_hour],
|
||||
'frequency': CountStat.HOUR,
|
||||
'realm': {'bot': self.data(100), 'human': self.data(101)},
|
||||
'user': {'bot': self.data(0), 'human': self.data(200)},
|
||||
'display_order': None,
|
||||
'result': 'success',
|
||||
})
|
||||
|
||||
def test_messages_sent_by_message_type(self):
|
||||
# type: () -> None
|
||||
stat = COUNT_STATS['messages_sent:message_type:day']
|
||||
self.insert_data(stat, ['public_stream', 'private_message'],
|
||||
['public_stream', 'private_stream'])
|
||||
result = self.client_get('/json/analytics/chart_data',
|
||||
{'chart_name': 'messages_sent_by_message_type'})
|
||||
self.assert_json_success(result)
|
||||
data = result.json()
|
||||
self.assertEqual(data, {
|
||||
'msg': '',
|
||||
'end_times': [datetime_to_timestamp(dt) for dt in self.end_times_day],
|
||||
'frequency': CountStat.DAY,
|
||||
'realm': {'Public streams': self.data(100), 'Private streams': self.data(0),
|
||||
'Private messages': self.data(101), 'Group private messages': self.data(0)},
|
||||
'user': {'Public streams': self.data(200), 'Private streams': self.data(201),
|
||||
'Private messages': self.data(0), 'Group private messages': self.data(0)},
|
||||
'display_order': ['Private messages', 'Public streams', 'Private streams', 'Group private messages'],
|
||||
'result': 'success',
|
||||
})
|
||||
|
||||
def test_messages_sent_by_client(self):
|
||||
# type: () -> None
|
||||
stat = COUNT_STATS['messages_sent:client:day']
|
||||
client1 = Client.objects.create(name='client 1')
|
||||
client2 = Client.objects.create(name='client 2')
|
||||
client3 = Client.objects.create(name='client 3')
|
||||
client4 = Client.objects.create(name='client 4')
|
||||
self.insert_data(stat, [client4.id, client3.id, client2.id],
|
||||
[client3.id, client1.id])
|
||||
result = self.client_get('/json/analytics/chart_data',
|
||||
{'chart_name': 'messages_sent_by_client'})
|
||||
self.assert_json_success(result)
|
||||
data = result.json()
|
||||
self.assertEqual(data, {
|
||||
'msg': '',
|
||||
'end_times': [datetime_to_timestamp(dt) for dt in self.end_times_day],
|
||||
'frequency': CountStat.DAY,
|
||||
'realm': {'client 4': self.data(100), 'client 3': self.data(101),
|
||||
'client 2': self.data(102)},
|
||||
'user': {'client 3': self.data(200), 'client 1': self.data(201)},
|
||||
'display_order': ['client 1', 'client 2', 'client 3', 'client 4'],
|
||||
'result': 'success',
|
||||
})
|
||||
|
||||
def test_include_empty_subgroups(self):
|
||||
# type: () -> None
|
||||
FillState.objects.create(
|
||||
property='realm_active_humans::day', end_time=self.end_times_day[0], state=FillState.DONE)
|
||||
result = self.client_get('/json/analytics/chart_data',
|
||||
{'chart_name': 'number_of_humans'})
|
||||
self.assert_json_success(result)
|
||||
data = result.json()
|
||||
self.assertEqual(data['realm'], {'human': [0]})
|
||||
self.assertFalse('user' in data)
|
||||
|
||||
FillState.objects.create(
|
||||
property='messages_sent:is_bot:hour', end_time=self.end_times_hour[0], state=FillState.DONE)
|
||||
result = self.client_get('/json/analytics/chart_data',
|
||||
{'chart_name': 'messages_sent_over_time'})
|
||||
self.assert_json_success(result)
|
||||
data = result.json()
|
||||
self.assertEqual(data['realm'], {'human': [0], 'bot': [0]})
|
||||
self.assertEqual(data['user'], {'human': [0], 'bot': [0]})
|
||||
|
||||
FillState.objects.create(
|
||||
property='messages_sent:message_type:day', end_time=self.end_times_day[0], state=FillState.DONE)
|
||||
result = self.client_get('/json/analytics/chart_data',
|
||||
{'chart_name': 'messages_sent_by_message_type'})
|
||||
self.assert_json_success(result)
|
||||
data = result.json()
|
||||
self.assertEqual(data['realm'], {
|
||||
'Public streams': [0], 'Private streams': [0], 'Private messages': [0], 'Group private messages': [0]})
|
||||
self.assertEqual(data['user'], {
|
||||
'Public streams': [0], 'Private streams': [0], 'Private messages': [0], 'Group private messages': [0]})
|
||||
|
||||
FillState.objects.create(
|
||||
property='messages_sent:client:day', end_time=self.end_times_day[0], state=FillState.DONE)
|
||||
result = self.client_get('/json/analytics/chart_data',
|
||||
{'chart_name': 'messages_sent_by_client'})
|
||||
self.assert_json_success(result)
|
||||
data = result.json()
|
||||
self.assertEqual(data['realm'], {})
|
||||
self.assertEqual(data['user'], {})
|
||||
|
||||
def test_start_and_end(self):
|
||||
# type: () -> None
|
||||
stat = COUNT_STATS['realm_active_humans::day']
|
||||
self.insert_data(stat, [None], [])
|
||||
end_time_timestamps = [datetime_to_timestamp(dt) for dt in self.end_times_day]
|
||||
|
||||
# valid start and end
|
||||
result = self.client_get('/json/analytics/chart_data',
|
||||
{'chart_name': 'number_of_humans',
|
||||
'start': end_time_timestamps[1],
|
||||
'end': end_time_timestamps[2]})
|
||||
self.assert_json_success(result)
|
||||
data = result.json()
|
||||
self.assertEqual(data['end_times'], end_time_timestamps[1:3])
|
||||
self.assertEqual(data['realm'], {'human': [0, 100]})
|
||||
|
||||
# start later then end
|
||||
result = self.client_get('/json/analytics/chart_data',
|
||||
{'chart_name': 'number_of_humans',
|
||||
'start': end_time_timestamps[2],
|
||||
'end': end_time_timestamps[1]})
|
||||
self.assert_json_error_contains(result, 'Start time is later than')
|
||||
|
||||
def test_min_length(self):
|
||||
# type: () -> None
|
||||
stat = COUNT_STATS['realm_active_humans::day']
|
||||
self.insert_data(stat, [None], [])
|
||||
# test min_length is too short to change anything
|
||||
result = self.client_get('/json/analytics/chart_data',
|
||||
{'chart_name': 'number_of_humans',
|
||||
'min_length': 2})
|
||||
self.assert_json_success(result)
|
||||
data = result.json()
|
||||
self.assertEqual(data['end_times'], [datetime_to_timestamp(dt) for dt in self.end_times_day])
|
||||
self.assertEqual(data['realm'], {'human': self.data(100)})
|
||||
# test min_length larger than filled data
|
||||
result = self.client_get('/json/analytics/chart_data',
|
||||
{'chart_name': 'number_of_humans',
|
||||
'min_length': 5})
|
||||
self.assert_json_success(result)
|
||||
data = result.json()
|
||||
end_times = [ceiling_to_day(self.realm.date_created) + timedelta(days=i) for i in range(-1, 4)]
|
||||
self.assertEqual(data['end_times'], [datetime_to_timestamp(dt) for dt in end_times])
|
||||
self.assertEqual(data['realm'], {'human': [0]+self.data(100)})
|
||||
|
||||
def test_non_existent_chart(self):
|
||||
# type: () -> None
|
||||
result = self.client_get('/json/analytics/chart_data',
|
||||
{'chart_name': 'does_not_exist'})
|
||||
self.assert_json_error_contains(result, 'Unknown chart name')
|
||||
|
||||
def test_analytics_not_running(self):
|
||||
# type: () -> None
|
||||
# try to get data for a valid chart, but before we've put anything in the database
|
||||
# (e.g. before update_analytics_counts has been run)
|
||||
with mock.patch('logging.warning'):
|
||||
result = self.client_get('/json/analytics/chart_data',
|
||||
{'chart_name': 'number_of_humans'})
|
||||
self.assert_json_error_contains(result, 'No analytics data available')
|
||||
|
||||
class TestGetChartDataHelpers(ZulipTestCase):
|
||||
# last_successful_fill is in analytics/models.py, but get_chart_data is
|
||||
# the only function that uses it at the moment
|
||||
def test_last_successful_fill(self):
|
||||
# type: () -> None
|
||||
self.assertIsNone(last_successful_fill('non-existant'))
|
||||
a_time = datetime(2016, 3, 14, 19).replace(tzinfo=utc)
|
||||
one_hour_before = datetime(2016, 3, 14, 18).replace(tzinfo=utc)
|
||||
fillstate = FillState.objects.create(property='property', end_time=a_time,
|
||||
state=FillState.DONE)
|
||||
self.assertEqual(last_successful_fill('property'), a_time)
|
||||
fillstate.state = FillState.STARTED
|
||||
fillstate.save()
|
||||
self.assertEqual(last_successful_fill('property'), one_hour_before)
|
||||
|
||||
def test_sort_by_totals(self):
|
||||
# type: () -> None
|
||||
empty = [] # type: List[int]
|
||||
value_arrays = {'c': [0, 1], 'a': [9], 'b': [1, 1, 1], 'd': empty}
|
||||
self.assertEqual(sort_by_totals(value_arrays), ['a', 'b', 'c', 'd'])
|
||||
|
||||
def test_sort_client_labels(self):
|
||||
# type: () -> None
|
||||
data = {'realm': {'a': [16], 'c': [15], 'b': [14], 'e': [13], 'd': [12], 'h': [11]},
|
||||
'user': {'a': [6], 'b': [5], 'd': [4], 'e': [3], 'f': [2], 'g': [1]}}
|
||||
self.assertEqual(sort_client_labels(data), ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h'])
|
||||
|
||||
class TestTimeRange(ZulipTestCase):
|
||||
def test_time_range(self):
|
||||
# type: () -> None
|
||||
HOUR = timedelta(hours=1)
|
||||
DAY = timedelta(days=1)
|
||||
|
||||
a_time = datetime(2016, 3, 14, 22, 59).replace(tzinfo=utc)
|
||||
floor_hour = datetime(2016, 3, 14, 22).replace(tzinfo=utc)
|
||||
floor_day = datetime(2016, 3, 14).replace(tzinfo=utc)
|
||||
|
||||
# test start == end
|
||||
self.assertEqual(time_range(a_time, a_time, CountStat.HOUR, None), [])
|
||||
self.assertEqual(time_range(a_time, a_time, CountStat.DAY, None), [])
|
||||
# test start == end == boundary, and min_length == 0
|
||||
self.assertEqual(time_range(floor_hour, floor_hour, CountStat.HOUR, 0), [floor_hour])
|
||||
self.assertEqual(time_range(floor_day, floor_day, CountStat.DAY, 0), [floor_day])
|
||||
# test start and end on different boundaries
|
||||
self.assertEqual(time_range(floor_hour, floor_hour+HOUR, CountStat.HOUR, None),
|
||||
[floor_hour, floor_hour+HOUR])
|
||||
self.assertEqual(time_range(floor_day, floor_day+DAY, CountStat.DAY, None),
|
||||
[floor_day, floor_day+DAY])
|
||||
# test min_length
|
||||
self.assertEqual(time_range(floor_hour, floor_hour+HOUR, CountStat.HOUR, 4),
|
||||
[floor_hour-2*HOUR, floor_hour-HOUR, floor_hour, floor_hour+HOUR])
|
||||
self.assertEqual(time_range(floor_day, floor_day+DAY, CountStat.DAY, 4),
|
||||
[floor_day-2*DAY, floor_day-DAY, floor_day, floor_day+DAY])
|
||||
|
||||
class TestMapArrays(ZulipTestCase):
|
||||
def test_map_arrays(self):
|
||||
# type: () -> None
|
||||
a = {'desktop app 1.0': [1, 2, 3],
|
||||
'desktop app 2.0': [10, 12, 13],
|
||||
'desktop app 3.0': [21, 22, 23],
|
||||
'website': [1, 2, 3],
|
||||
'ZulipiOS': [1, 2, 3],
|
||||
'ZulipElectron': [2, 5, 7],
|
||||
'ZulipMobile': [1, 5, 7],
|
||||
'ZulipPython': [1, 2, 3],
|
||||
'API: Python': [1, 2, 3],
|
||||
'SomethingRandom': [4, 5, 6],
|
||||
'ZulipGitHubWebhook': [7, 7, 9],
|
||||
'ZulipAndroid': [64, 63, 65]}
|
||||
result = rewrite_client_arrays(a)
|
||||
self.assertEqual(result,
|
||||
{'Old desktop app': [32, 36, 39],
|
||||
'Old iOS app': [1, 2, 3],
|
||||
'Desktop app': [2, 5, 7],
|
||||
'Mobile app': [1, 5, 7],
|
||||
'Website': [1, 2, 3],
|
||||
'Python API': [2, 4, 6],
|
||||
'SomethingRandom': [4, 5, 6],
|
||||
'GitHub webhook': [7, 7, 9],
|
||||
'Old Android app': [64, 63, 65]})
|
@@ -1,38 +1,22 @@
|
||||
from django.conf import settings
|
||||
from django.conf.urls import include
|
||||
from django.urls import path
|
||||
from django.urls.resolvers import URLPattern, URLResolver
|
||||
from django.conf.urls import url, include
|
||||
from zerver.lib.rest import rest_dispatch
|
||||
|
||||
from analytics.views.stats import (
|
||||
get_chart_data,
|
||||
get_chart_data_for_installation,
|
||||
get_chart_data_for_realm,
|
||||
get_chart_data_for_stream,
|
||||
stats,
|
||||
stats_for_installation,
|
||||
stats_for_realm,
|
||||
)
|
||||
from zerver.lib.rest import rest_path
|
||||
import analytics.views
|
||||
|
||||
i18n_urlpatterns: list[URLPattern | URLResolver] = [
|
||||
i18n_urlpatterns = [
|
||||
# Server admin (user_profile.is_staff) visible stats pages
|
||||
path("stats/realm/<realm_str>/", stats_for_realm),
|
||||
path("stats/installation", stats_for_installation),
|
||||
url(r'^activity$', analytics.views.get_activity,
|
||||
name='analytics.views.get_activity'),
|
||||
url(r'^realm_activity/(?P<realm_str>[\S]+)/$', analytics.views.get_realm_activity,
|
||||
name='analytics.views.get_realm_activity'),
|
||||
url(r'^user_activity/(?P<email>[\S]+)/$', analytics.views.get_user_activity,
|
||||
name='analytics.views.get_user_activity'),
|
||||
|
||||
# User-visible stats page
|
||||
path("stats", stats, name="stats"),
|
||||
url(r'^stats$', analytics.views.stats,
|
||||
name='analytics.views.stats'),
|
||||
]
|
||||
|
||||
if settings.ZILENCER_ENABLED:
|
||||
from analytics.views.stats import stats_for_remote_installation, stats_for_remote_realm
|
||||
|
||||
i18n_urlpatterns += [
|
||||
path("stats/remote/<int:remote_server_id>/installation", stats_for_remote_installation),
|
||||
path(
|
||||
"stats/remote/<int:remote_server_id>/realm/<int:remote_realm_id>/",
|
||||
stats_for_remote_realm,
|
||||
),
|
||||
]
|
||||
|
||||
# These endpoints are a part of the API (V1), which uses:
|
||||
# * REST verbs
|
||||
# * Basic auth (username:password is email:apiKey)
|
||||
@@ -43,32 +27,13 @@ if settings.ZILENCER_ENABLED:
|
||||
# All of these paths are accessed by either a /json or /api prefix
|
||||
v1_api_and_json_patterns = [
|
||||
# get data for the graphs at /stats
|
||||
rest_path("analytics/chart_data", GET=get_chart_data),
|
||||
rest_path("analytics/chart_data/stream/<stream_id>", GET=get_chart_data_for_stream),
|
||||
rest_path("analytics/chart_data/realm/<realm_str>", GET=get_chart_data_for_realm),
|
||||
rest_path("analytics/chart_data/installation", GET=get_chart_data_for_installation),
|
||||
url(r'^analytics/chart_data$', rest_dispatch,
|
||||
{'GET': 'analytics.views.get_chart_data'}),
|
||||
]
|
||||
|
||||
if settings.ZILENCER_ENABLED:
|
||||
from analytics.views.stats import (
|
||||
get_chart_data_for_remote_installation,
|
||||
get_chart_data_for_remote_realm,
|
||||
)
|
||||
|
||||
v1_api_and_json_patterns += [
|
||||
rest_path(
|
||||
"analytics/chart_data/remote/<int:remote_server_id>/installation",
|
||||
GET=get_chart_data_for_remote_installation,
|
||||
),
|
||||
rest_path(
|
||||
"analytics/chart_data/remote/<int:remote_server_id>/realm/<int:remote_realm_id>",
|
||||
GET=get_chart_data_for_remote_realm,
|
||||
),
|
||||
]
|
||||
|
||||
i18n_urlpatterns += [
|
||||
path("api/v1/", include(v1_api_and_json_patterns)),
|
||||
path("json/", include(v1_api_and_json_patterns)),
|
||||
url(r'^api/v1/', include(v1_api_and_json_patterns)),
|
||||
url(r'^json/', include(v1_api_and_json_patterns)),
|
||||
]
|
||||
|
||||
urlpatterns = i18n_urlpatterns
|
||||
|
1099
analytics/views.py
Normal file
1099
analytics/views.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,654 +0,0 @@
|
||||
import logging
|
||||
from collections import defaultdict
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Annotated, Any, Optional, TypeAlias, TypeVar, cast
|
||||
|
||||
from django.conf import settings
|
||||
from django.db.models import QuerySet
|
||||
from django.http import HttpRequest, HttpResponse, HttpResponseNotFound
|
||||
from django.shortcuts import render
|
||||
from django.utils import translation
|
||||
from django.utils.timezone import now as timezone_now
|
||||
from django.utils.translation import gettext as _
|
||||
from pydantic import BeforeValidator, Json, NonNegativeInt
|
||||
|
||||
from analytics.lib.counts import COUNT_STATS, CountStat
|
||||
from analytics.lib.time_utils import time_range
|
||||
from analytics.models import (
|
||||
BaseCount,
|
||||
InstallationCount,
|
||||
RealmCount,
|
||||
StreamCount,
|
||||
UserCount,
|
||||
installation_epoch,
|
||||
)
|
||||
from zerver.decorator import (
|
||||
require_non_guest_user,
|
||||
require_server_admin,
|
||||
require_server_admin_api,
|
||||
to_utc_datetime,
|
||||
zulip_login_required,
|
||||
)
|
||||
from zerver.lib.exceptions import JsonableError
|
||||
from zerver.lib.i18n import get_and_set_request_language, get_language_translation_data
|
||||
from zerver.lib.response import json_success
|
||||
from zerver.lib.streams import access_stream_by_id
|
||||
from zerver.lib.timestamp import convert_to_UTC
|
||||
from zerver.lib.typed_endpoint import PathOnly, typed_endpoint
|
||||
from zerver.models import Client, Realm, Stream, UserProfile
|
||||
from zerver.models.realms import get_realm
|
||||
|
||||
if settings.ZILENCER_ENABLED:
|
||||
from zilencer.models import RemoteInstallationCount, RemoteRealmCount, RemoteZulipServer
|
||||
|
||||
MAX_TIME_FOR_FULL_ANALYTICS_GENERATION = timedelta(days=1, minutes=30)
|
||||
|
||||
|
||||
def is_analytics_ready(realm: Realm) -> bool:
|
||||
return (timezone_now() - realm.date_created) > MAX_TIME_FOR_FULL_ANALYTICS_GENERATION
|
||||
|
||||
|
||||
def render_stats(
|
||||
request: HttpRequest,
|
||||
data_url_suffix: str,
|
||||
realm: Realm | None,
|
||||
*,
|
||||
title: str | None = None,
|
||||
analytics_ready: bool = True,
|
||||
) -> HttpResponse:
|
||||
assert request.user.is_authenticated
|
||||
|
||||
if realm is not None:
|
||||
# Same query to get guest user count as in get_seat_count in corporate/lib/stripe.py.
|
||||
guest_users = UserProfile.objects.filter(
|
||||
realm=realm, is_active=True, is_bot=False, role=UserProfile.ROLE_GUEST
|
||||
).count()
|
||||
space_used = realm.currently_used_upload_space_bytes()
|
||||
if title:
|
||||
pass
|
||||
else:
|
||||
title = realm.name or realm.string_id
|
||||
else:
|
||||
assert title
|
||||
guest_users = None
|
||||
space_used = None
|
||||
|
||||
request_language = get_and_set_request_language(
|
||||
request,
|
||||
request.user.default_language,
|
||||
translation.get_language_from_path(request.path_info),
|
||||
)
|
||||
|
||||
# Sync this with stats_params_schema in base_page_params.ts.
|
||||
page_params = dict(
|
||||
page_type="stats",
|
||||
data_url_suffix=data_url_suffix,
|
||||
upload_space_used=space_used,
|
||||
guest_users=guest_users,
|
||||
translation_data=get_language_translation_data(request_language),
|
||||
)
|
||||
|
||||
return render(
|
||||
request,
|
||||
"analytics/stats.html",
|
||||
context=dict(
|
||||
target_name=title,
|
||||
page_params=page_params,
|
||||
analytics_ready=analytics_ready,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@zulip_login_required
|
||||
def stats(request: HttpRequest) -> HttpResponse:
|
||||
assert request.user.is_authenticated
|
||||
realm = request.user.realm
|
||||
if request.user.is_guest:
|
||||
# TODO: Make @zulip_login_required pass the UserProfile so we
|
||||
# can use @require_member_or_admin
|
||||
raise JsonableError(_("Not allowed for guest users"))
|
||||
return render_stats(request, "", realm, analytics_ready=is_analytics_ready(realm))
|
||||
|
||||
|
||||
@require_server_admin
|
||||
@typed_endpoint
|
||||
def stats_for_realm(request: HttpRequest, *, realm_str: PathOnly[str]) -> HttpResponse:
|
||||
try:
|
||||
realm = get_realm(realm_str)
|
||||
except Realm.DoesNotExist:
|
||||
return HttpResponseNotFound()
|
||||
|
||||
return render_stats(
|
||||
request,
|
||||
f"/realm/{realm_str}",
|
||||
realm,
|
||||
analytics_ready=is_analytics_ready(realm),
|
||||
)
|
||||
|
||||
|
||||
@require_server_admin
|
||||
@typed_endpoint
|
||||
def stats_for_remote_realm(
|
||||
request: HttpRequest, *, remote_server_id: PathOnly[int], remote_realm_id: PathOnly[int]
|
||||
) -> HttpResponse:
|
||||
assert settings.ZILENCER_ENABLED
|
||||
server = RemoteZulipServer.objects.get(id=remote_server_id)
|
||||
return render_stats(
|
||||
request,
|
||||
f"/remote/{server.id}/realm/{remote_realm_id}",
|
||||
None,
|
||||
title=f"Realm {remote_realm_id} on server {server.hostname}",
|
||||
)
|
||||
|
||||
|
||||
@require_server_admin_api
|
||||
@typed_endpoint
|
||||
def get_chart_data_for_realm(
|
||||
request: HttpRequest,
|
||||
user_profile: UserProfile,
|
||||
/,
|
||||
*,
|
||||
realm_str: PathOnly[str],
|
||||
chart_name: str,
|
||||
min_length: Json[NonNegativeInt] | None = None,
|
||||
start: Annotated[datetime | None, BeforeValidator(to_utc_datetime)] = None,
|
||||
end: Annotated[datetime | None, BeforeValidator(to_utc_datetime)] = None,
|
||||
) -> HttpResponse:
|
||||
try:
|
||||
realm = get_realm(realm_str)
|
||||
except Realm.DoesNotExist:
|
||||
raise JsonableError(_("Invalid organization"))
|
||||
|
||||
return do_get_chart_data(
|
||||
request,
|
||||
user_profile,
|
||||
realm=realm,
|
||||
chart_name=chart_name,
|
||||
min_length=min_length,
|
||||
start=start,
|
||||
end=end,
|
||||
)
|
||||
|
||||
|
||||
@require_non_guest_user
|
||||
@typed_endpoint
|
||||
def get_chart_data_for_stream(
|
||||
request: HttpRequest,
|
||||
user_profile: UserProfile,
|
||||
*,
|
||||
stream_id: PathOnly[int],
|
||||
chart_name: str,
|
||||
min_length: Json[NonNegativeInt] | None = None,
|
||||
start: Annotated[datetime | None, BeforeValidator(to_utc_datetime)] = None,
|
||||
end: Annotated[datetime | None, BeforeValidator(to_utc_datetime)] = None,
|
||||
) -> HttpResponse:
|
||||
stream, ignored_sub = access_stream_by_id(
|
||||
user_profile,
|
||||
stream_id,
|
||||
require_content_access=False,
|
||||
)
|
||||
|
||||
return do_get_chart_data(
|
||||
request,
|
||||
user_profile,
|
||||
stream=stream,
|
||||
chart_name=chart_name,
|
||||
min_length=min_length,
|
||||
start=start,
|
||||
end=end,
|
||||
)
|
||||
|
||||
|
||||
@require_server_admin_api
|
||||
@typed_endpoint
|
||||
def get_chart_data_for_remote_realm(
|
||||
request: HttpRequest,
|
||||
user_profile: UserProfile,
|
||||
/,
|
||||
*,
|
||||
remote_server_id: PathOnly[int],
|
||||
remote_realm_id: PathOnly[int],
|
||||
chart_name: str,
|
||||
min_length: Json[NonNegativeInt] | None = None,
|
||||
start: Annotated[datetime | None, BeforeValidator(to_utc_datetime)] = None,
|
||||
end: Annotated[datetime | None, BeforeValidator(to_utc_datetime)] = None,
|
||||
) -> HttpResponse:
|
||||
assert settings.ZILENCER_ENABLED
|
||||
server = RemoteZulipServer.objects.get(id=remote_server_id)
|
||||
return do_get_chart_data(
|
||||
request,
|
||||
user_profile,
|
||||
server=server,
|
||||
remote=True,
|
||||
remote_realm_id=remote_realm_id,
|
||||
chart_name=chart_name,
|
||||
min_length=min_length,
|
||||
start=start,
|
||||
end=end,
|
||||
)
|
||||
|
||||
|
||||
@require_server_admin
|
||||
def stats_for_installation(request: HttpRequest) -> HttpResponse:
|
||||
assert request.user.is_authenticated
|
||||
return render_stats(request, "/installation", None, title="installation")
|
||||
|
||||
|
||||
@require_server_admin
|
||||
def stats_for_remote_installation(request: HttpRequest, remote_server_id: int) -> HttpResponse:
|
||||
assert settings.ZILENCER_ENABLED
|
||||
server = RemoteZulipServer.objects.get(id=remote_server_id)
|
||||
return render_stats(
|
||||
request,
|
||||
f"/remote/{server.id}/installation",
|
||||
None,
|
||||
title=f"remote installation {server.hostname}",
|
||||
)
|
||||
|
||||
|
||||
@require_server_admin_api
|
||||
@typed_endpoint
|
||||
def get_chart_data_for_installation(
|
||||
request: HttpRequest,
|
||||
user_profile: UserProfile,
|
||||
/,
|
||||
*,
|
||||
chart_name: str,
|
||||
min_length: Json[NonNegativeInt] | None = None,
|
||||
start: Annotated[datetime | None, BeforeValidator(to_utc_datetime)] = None,
|
||||
end: Annotated[datetime | None, BeforeValidator(to_utc_datetime)] = None,
|
||||
) -> HttpResponse:
|
||||
return do_get_chart_data(
|
||||
request,
|
||||
user_profile,
|
||||
for_installation=True,
|
||||
chart_name=chart_name,
|
||||
min_length=min_length,
|
||||
start=start,
|
||||
end=end,
|
||||
)
|
||||
|
||||
|
||||
@require_server_admin_api
|
||||
@typed_endpoint
|
||||
def get_chart_data_for_remote_installation(
|
||||
request: HttpRequest,
|
||||
user_profile: UserProfile,
|
||||
/,
|
||||
*,
|
||||
remote_server_id: PathOnly[int],
|
||||
chart_name: str,
|
||||
min_length: Json[NonNegativeInt] | None = None,
|
||||
start: Annotated[datetime | None, BeforeValidator(to_utc_datetime)] = None,
|
||||
end: Annotated[datetime | None, BeforeValidator(to_utc_datetime)] = None,
|
||||
) -> HttpResponse:
|
||||
assert settings.ZILENCER_ENABLED
|
||||
server = RemoteZulipServer.objects.get(id=remote_server_id)
|
||||
return do_get_chart_data(
|
||||
request,
|
||||
user_profile,
|
||||
for_installation=True,
|
||||
remote=True,
|
||||
server=server,
|
||||
chart_name=chart_name,
|
||||
min_length=min_length,
|
||||
start=start,
|
||||
end=end,
|
||||
)
|
||||
|
||||
|
||||
@require_non_guest_user
|
||||
@typed_endpoint
|
||||
def get_chart_data(
|
||||
request: HttpRequest,
|
||||
user_profile: UserProfile,
|
||||
*,
|
||||
chart_name: str,
|
||||
min_length: Json[NonNegativeInt] | None = None,
|
||||
start: Annotated[datetime | None, BeforeValidator(to_utc_datetime)] = None,
|
||||
end: Annotated[datetime | None, BeforeValidator(to_utc_datetime)] = None,
|
||||
) -> HttpResponse:
|
||||
return do_get_chart_data(
|
||||
request,
|
||||
user_profile,
|
||||
chart_name=chart_name,
|
||||
min_length=min_length,
|
||||
start=start,
|
||||
end=end,
|
||||
)
|
||||
|
||||
|
||||
@require_non_guest_user
|
||||
def do_get_chart_data(
|
||||
request: HttpRequest,
|
||||
user_profile: UserProfile,
|
||||
*,
|
||||
# Common parameters supported by all stats endpoints.
|
||||
chart_name: str,
|
||||
min_length: NonNegativeInt | None = None,
|
||||
start: datetime | None = None,
|
||||
end: datetime | None = None,
|
||||
# The following parameters are only used by wrapping functions for
|
||||
# various contexts; the callers are responsible for validating them.
|
||||
realm: Realm | None = None,
|
||||
for_installation: bool = False,
|
||||
remote: bool = False,
|
||||
remote_realm_id: int | None = None,
|
||||
server: Optional["RemoteZulipServer"] = None,
|
||||
stream: Stream | None = None,
|
||||
) -> HttpResponse:
|
||||
TableType: TypeAlias = (
|
||||
type["RemoteInstallationCount"]
|
||||
| type[InstallationCount]
|
||||
| type["RemoteRealmCount"]
|
||||
| type[RealmCount]
|
||||
)
|
||||
if for_installation:
|
||||
if remote:
|
||||
assert settings.ZILENCER_ENABLED
|
||||
aggregate_table: TableType = RemoteInstallationCount
|
||||
assert server is not None
|
||||
else:
|
||||
aggregate_table = InstallationCount
|
||||
else:
|
||||
if remote:
|
||||
assert settings.ZILENCER_ENABLED
|
||||
aggregate_table = RemoteRealmCount
|
||||
assert server is not None
|
||||
assert remote_realm_id is not None
|
||||
else:
|
||||
aggregate_table = RealmCount
|
||||
|
||||
tables: (
|
||||
tuple[TableType] | tuple[TableType, type[UserCount]] | tuple[TableType, type[StreamCount]]
|
||||
)
|
||||
|
||||
if chart_name == "number_of_humans":
|
||||
stats = [
|
||||
COUNT_STATS["1day_actives::day"],
|
||||
COUNT_STATS["realm_active_humans::day"],
|
||||
COUNT_STATS["active_users_audit:is_bot:day"],
|
||||
]
|
||||
tables = (aggregate_table,)
|
||||
subgroup_to_label: dict[CountStat, dict[str | None, str]] = {
|
||||
stats[0]: {None: "_1day"},
|
||||
stats[1]: {None: "_15day"},
|
||||
stats[2]: {"false": "all_time"},
|
||||
}
|
||||
labels_sort_function = None
|
||||
include_empty_subgroups = True
|
||||
elif chart_name == "messages_sent_over_time":
|
||||
stats = [COUNT_STATS["messages_sent:is_bot:hour"]]
|
||||
tables = (aggregate_table, UserCount)
|
||||
subgroup_to_label = {stats[0]: {"false": "human", "true": "bot"}}
|
||||
labels_sort_function = None
|
||||
include_empty_subgroups = True
|
||||
elif chart_name == "messages_sent_by_message_type":
|
||||
stats = [COUNT_STATS["messages_sent:message_type:day"]]
|
||||
tables = (aggregate_table, UserCount)
|
||||
subgroup_to_label = {
|
||||
stats[0]: {
|
||||
"public_stream": _("Public channels"),
|
||||
"private_stream": _("Private channels"),
|
||||
"private_message": _("Direct messages"),
|
||||
"huddle_message": _("Group direct messages"),
|
||||
}
|
||||
}
|
||||
labels_sort_function = lambda data: sort_by_totals(data["everyone"])
|
||||
include_empty_subgroups = True
|
||||
elif chart_name == "messages_sent_by_client":
|
||||
stats = [COUNT_STATS["messages_sent:client:day"]]
|
||||
tables = (aggregate_table, UserCount)
|
||||
# Note that the labels are further re-written by client_label_map
|
||||
subgroup_to_label = {
|
||||
stats[0]: {str(id): name for id, name in Client.objects.values_list("id", "name")}
|
||||
}
|
||||
labels_sort_function = sort_client_labels
|
||||
include_empty_subgroups = False
|
||||
elif chart_name == "messages_read_over_time":
|
||||
stats = [COUNT_STATS["messages_read::hour"]]
|
||||
tables = (aggregate_table, UserCount)
|
||||
subgroup_to_label = {stats[0]: {None: "read"}}
|
||||
labels_sort_function = None
|
||||
include_empty_subgroups = True
|
||||
elif chart_name == "messages_sent_by_stream":
|
||||
if stream is None:
|
||||
raise JsonableError(
|
||||
_("Missing channel for chart: {chart_name}").format(chart_name=chart_name)
|
||||
)
|
||||
stats = [COUNT_STATS["messages_in_stream:is_bot:day"]]
|
||||
tables = (aggregate_table, StreamCount)
|
||||
subgroup_to_label = {stats[0]: {"false": "human", "true": "bot"}}
|
||||
labels_sort_function = None
|
||||
include_empty_subgroups = True
|
||||
else:
|
||||
raise JsonableError(_("Unknown chart name: {chart_name}").format(chart_name=chart_name))
|
||||
|
||||
# Most likely someone using our API endpoint. The /stats page does not
|
||||
# pass a start or end in its requests.
|
||||
if start is not None:
|
||||
start = convert_to_UTC(start)
|
||||
if end is not None:
|
||||
end = convert_to_UTC(end)
|
||||
if start is not None and end is not None and start > end:
|
||||
raise JsonableError(
|
||||
_("Start time is later than end time. Start: {start}, End: {end}").format(
|
||||
start=start,
|
||||
end=end,
|
||||
)
|
||||
)
|
||||
|
||||
if realm is None:
|
||||
# Note that this value is invalid for Remote tables; be
|
||||
# careful not to access it in those code paths.
|
||||
realm = user_profile.realm
|
||||
|
||||
if remote:
|
||||
# For remote servers, we don't have fillstate data, and thus
|
||||
# should simply use the first and last data points for the
|
||||
# table.
|
||||
assert server is not None
|
||||
assert aggregate_table is RemoteInstallationCount or aggregate_table is RemoteRealmCount
|
||||
aggregate_table_remote = cast(
|
||||
type[RemoteInstallationCount] | type[RemoteRealmCount], aggregate_table
|
||||
) # https://stackoverflow.com/questions/68540528/mypy-assertions-on-the-types-of-types
|
||||
if not aggregate_table_remote.objects.filter(server=server).exists():
|
||||
raise JsonableError(
|
||||
_("No analytics data available. Please contact your server administrator.")
|
||||
)
|
||||
if start is None:
|
||||
first = (
|
||||
aggregate_table_remote.objects.filter(server=server).order_by("remote_id").first()
|
||||
)
|
||||
assert first is not None
|
||||
start = first.end_time
|
||||
if end is None:
|
||||
last = aggregate_table_remote.objects.filter(server=server).order_by("remote_id").last()
|
||||
assert last is not None
|
||||
end = last.end_time
|
||||
else:
|
||||
# Otherwise, we can use tables on the current server to
|
||||
# determine a nice range, and some additional validation.
|
||||
if start is None:
|
||||
if for_installation:
|
||||
start = installation_epoch()
|
||||
else:
|
||||
start = realm.date_created
|
||||
if end is None:
|
||||
end = max(
|
||||
stat.last_successful_fill() or datetime.min.replace(tzinfo=timezone.utc)
|
||||
for stat in stats
|
||||
)
|
||||
|
||||
if start > end and (timezone_now() - start > MAX_TIME_FOR_FULL_ANALYTICS_GENERATION):
|
||||
logging.warning(
|
||||
"User from realm %s attempted to access /stats, but the computed "
|
||||
"start time: %s (creation of realm or installation) is later than the computed "
|
||||
"end time: %s (last successful analytics update). Is the "
|
||||
"analytics cron job running?",
|
||||
realm.string_id,
|
||||
start,
|
||||
end,
|
||||
)
|
||||
raise JsonableError(
|
||||
_("No analytics data available. Please contact your server administrator.")
|
||||
)
|
||||
|
||||
assert len({stat.frequency for stat in stats}) == 1
|
||||
end_times = time_range(start, end, stats[0].frequency, min_length)
|
||||
data: dict[str, Any] = {
|
||||
"end_times": [int(end_time.timestamp()) for end_time in end_times],
|
||||
"frequency": stats[0].frequency,
|
||||
}
|
||||
|
||||
aggregation_level = {
|
||||
InstallationCount: "everyone",
|
||||
RealmCount: "everyone",
|
||||
UserCount: "user",
|
||||
StreamCount: "everyone",
|
||||
}
|
||||
if settings.ZILENCER_ENABLED:
|
||||
aggregation_level[RemoteInstallationCount] = "everyone"
|
||||
aggregation_level[RemoteRealmCount] = "everyone"
|
||||
|
||||
# -1 is a placeholder value, since there is no relevant filtering on InstallationCount
|
||||
id_value = {
|
||||
InstallationCount: -1,
|
||||
RealmCount: realm.id,
|
||||
UserCount: user_profile.id,
|
||||
}
|
||||
if stream is not None:
|
||||
id_value[StreamCount] = stream.id
|
||||
|
||||
if settings.ZILENCER_ENABLED:
|
||||
if server is not None:
|
||||
id_value[RemoteInstallationCount] = server.id
|
||||
# TODO: RemoteRealmCount logic doesn't correctly handle
|
||||
# filtering by server_id as well.
|
||||
if remote_realm_id is not None:
|
||||
id_value[RemoteRealmCount] = remote_realm_id
|
||||
|
||||
for table in tables:
|
||||
data[aggregation_level[table]] = {}
|
||||
for stat in stats:
|
||||
data[aggregation_level[table]].update(
|
||||
get_time_series_by_subgroup(
|
||||
stat,
|
||||
table,
|
||||
id_value[table],
|
||||
end_times,
|
||||
subgroup_to_label[stat],
|
||||
include_empty_subgroups,
|
||||
)
|
||||
)
|
||||
|
||||
if labels_sort_function is not None:
|
||||
data["display_order"] = labels_sort_function(data)
|
||||
else:
|
||||
data["display_order"] = None
|
||||
return json_success(request, data=data)
|
||||
|
||||
|
||||
def sort_by_totals(value_arrays: dict[str, list[int]]) -> list[str]:
|
||||
totals = sorted(((sum(values), label) for label, values in value_arrays.items()), reverse=True)
|
||||
return [label for total, label in totals]
|
||||
|
||||
|
||||
# For any given user, we want to show a fixed set of clients in the chart,
|
||||
# regardless of the time aggregation or whether we're looking at realm or
|
||||
# user data. This fixed set ideally includes the clients most important in
|
||||
# understanding the realm's traffic and the user's traffic. This function
|
||||
# tries to rank the clients so that taking the first N elements of the
|
||||
# sorted list has a reasonable chance of doing so.
|
||||
def sort_client_labels(data: dict[str, dict[str, list[int]]]) -> list[str]:
|
||||
realm_order = sort_by_totals(data["everyone"])
|
||||
user_order = sort_by_totals(data["user"])
|
||||
label_sort_values: dict[str, float] = {label: i for i, label in enumerate(realm_order)}
|
||||
for i, label in enumerate(user_order):
|
||||
label_sort_values[label] = min(i - 0.1, label_sort_values.get(label, i))
|
||||
return [label for label, sort_value in sorted(label_sort_values.items(), key=lambda x: x[1])]
|
||||
|
||||
|
||||
CountT = TypeVar("CountT", bound=BaseCount)
|
||||
|
||||
|
||||
def table_filtered_to_id(table: type[CountT], key_id: int) -> QuerySet[CountT]:
|
||||
if table == RealmCount:
|
||||
return table._default_manager.filter(realm_id=key_id)
|
||||
elif table == UserCount:
|
||||
return table._default_manager.filter(user_id=key_id)
|
||||
elif table == StreamCount:
|
||||
return table._default_manager.filter(stream_id=key_id)
|
||||
elif table == InstallationCount:
|
||||
return table._default_manager.all()
|
||||
elif settings.ZILENCER_ENABLED and table == RemoteInstallationCount:
|
||||
return table._default_manager.filter(server_id=key_id)
|
||||
elif settings.ZILENCER_ENABLED and table == RemoteRealmCount:
|
||||
return table._default_manager.filter(realm_id=key_id)
|
||||
else:
|
||||
raise AssertionError(f"Unknown table: {table}")
|
||||
|
||||
|
||||
def client_label_map(name: str) -> str:
|
||||
if name == "website":
|
||||
return "Web app"
|
||||
if name.startswith("desktop app"):
|
||||
return "Old desktop app"
|
||||
if name == "ZulipElectron":
|
||||
return "Desktop app"
|
||||
if name == "ZulipTerminal":
|
||||
return "Terminal app"
|
||||
if name == "ZulipAndroid":
|
||||
return "Ancient Android app"
|
||||
if name == "ZulipiOS":
|
||||
return "Ancient iOS app"
|
||||
if name == "ZulipMobile":
|
||||
return "Old mobile app (React Native)"
|
||||
if name in ["ZulipFlutter", "ZulipMobile/flutter"]:
|
||||
return "Mobile app (Flutter)"
|
||||
if name in ["ZulipPython", "API: Python"]:
|
||||
return "Python API"
|
||||
if name.startswith("Zulip") and name.endswith("Webhook"):
|
||||
return name.removeprefix("Zulip").removesuffix("Webhook") + " webhook"
|
||||
return name
|
||||
|
||||
|
||||
def rewrite_client_arrays(value_arrays: dict[str, list[int]]) -> dict[str, list[int]]:
|
||||
mapped_arrays: dict[str, list[int]] = {}
|
||||
for label, array in value_arrays.items():
|
||||
mapped_label = client_label_map(label)
|
||||
if mapped_label in mapped_arrays:
|
||||
for i in range(len(array)):
|
||||
mapped_arrays[mapped_label][i] += array[i]
|
||||
else:
|
||||
mapped_arrays[mapped_label] = array.copy()
|
||||
return mapped_arrays
|
||||
|
||||
|
||||
def get_time_series_by_subgroup(
|
||||
stat: CountStat,
|
||||
table: type[BaseCount],
|
||||
key_id: int,
|
||||
end_times: list[datetime],
|
||||
subgroup_to_label: dict[str | None, str],
|
||||
include_empty_subgroups: bool,
|
||||
) -> dict[str, list[int]]:
|
||||
queryset = (
|
||||
table_filtered_to_id(table, key_id)
|
||||
.filter(property=stat.property)
|
||||
.values_list("subgroup", "end_time", "value")
|
||||
)
|
||||
value_dicts: dict[str | None, dict[datetime, int]] = defaultdict(lambda: defaultdict(int))
|
||||
for subgroup, end_time, value in queryset:
|
||||
value_dicts[subgroup][end_time] = value
|
||||
value_arrays = {}
|
||||
for subgroup, label in subgroup_to_label.items():
|
||||
if (subgroup in value_dicts) or include_empty_subgroups:
|
||||
value_arrays[label] = [value_dicts[subgroup][end_time] for end_time in end_times]
|
||||
|
||||
if stat == COUNT_STATS["messages_sent:client:day"]:
|
||||
# HACK: We rewrite these arrays to collapse the Client objects
|
||||
# with similar names into a single sum, and generally give
|
||||
# them better names
|
||||
return rewrite_client_arrays(value_arrays)
|
||||
return value_arrays
|
@@ -1,31 +0,0 @@
|
||||
{generate_api_header(API_ENDPOINT_NAME)}
|
||||
|
||||
## Usage examples
|
||||
|
||||
{start_tabs}
|
||||
|
||||
{generate_code_example(python)|API_ENDPOINT_NAME|example}
|
||||
|
||||
{generate_code_example(javascript)|API_ENDPOINT_NAME|example}
|
||||
|
||||
{tab|curl}
|
||||
|
||||
{generate_code_example(curl)|API_ENDPOINT_NAME|example}
|
||||
|
||||
{end_tabs}
|
||||
|
||||
## Parameters
|
||||
|
||||
{generate_api_arguments_table|zulip.yaml|API_ENDPOINT_NAME}
|
||||
|
||||
{generate_parameter_description(API_ENDPOINT_NAME)}
|
||||
|
||||
## Response
|
||||
|
||||
{generate_return_values_table|zulip.yaml|API_ENDPOINT_NAME}
|
||||
|
||||
{generate_response_description(API_ENDPOINT_NAME)}
|
||||
|
||||
#### Example response(s)
|
||||
|
||||
{generate_code_example|API_ENDPOINT_NAME|fixture}
|
@@ -1,89 +0,0 @@
|
||||
# API keys
|
||||
|
||||
An **API key** is how a bot identifies itself to Zulip. For the official
|
||||
clients, such as the Python bindings, we recommend [downloading a `zuliprc`
|
||||
file](/api/configuring-python-bindings#download-a-zuliprc-file). This file
|
||||
contains an API key and other necessary configuration values for using the
|
||||
Zulip API with a specific account on a Zulip server.
|
||||
|
||||
## Get a bot's API key
|
||||
|
||||
{start_tabs}
|
||||
|
||||
{tab|desktop-web}
|
||||
|
||||
{settings_tab|your-bots}
|
||||
|
||||
1. Click **Active bots**.
|
||||
|
||||
1. Find your bot. The bot's API key is under **API KEY**.
|
||||
|
||||
{end_tabs}
|
||||
|
||||
!!! warn ""
|
||||
|
||||
Anyone with a bot's API key can impersonate the bot, so be careful with it!
|
||||
|
||||
## Get your API key
|
||||
|
||||
{start_tabs}
|
||||
|
||||
{tab|desktop-web}
|
||||
|
||||
{settings_tab|account-and-privacy}
|
||||
|
||||
1. Under **API key**, click **Manage your API key**.
|
||||
|
||||
1. Enter your password, and click **Get API key**. If you don't know your
|
||||
password, click **reset it** and follow the instructions from there.
|
||||
|
||||
1. Copy your API key.
|
||||
|
||||
{end_tabs}
|
||||
|
||||
!!! warn ""
|
||||
|
||||
Anyone with your API key can impersonate you, so be doubly careful with it.
|
||||
|
||||
|
||||
## Invalidate an API key
|
||||
|
||||
To invalidate an existing API key, you have to generate a new key.
|
||||
|
||||
### Invalidate a bot's API key
|
||||
|
||||
{start_tabs}
|
||||
|
||||
{tab|desktop-web}
|
||||
|
||||
{settings_tab|your-bots}
|
||||
|
||||
1. Click **Active bots**.
|
||||
|
||||
1. Find your bot.
|
||||
|
||||
1. Under **API KEY**, click the **refresh** (<i class="fa fa-refresh"></i>) icon
|
||||
to the right of the bot's API key.
|
||||
|
||||
{end_tabs}
|
||||
|
||||
### Invalidate your API key
|
||||
|
||||
{start_tabs}
|
||||
|
||||
{tab|desktop-web}
|
||||
|
||||
{settings_tab|account-and-privacy}
|
||||
|
||||
1. Under **API key**, click **Manage your API key**.
|
||||
|
||||
1. Enter your password, and click **Get API key**. If you don't know your
|
||||
password, click **reset it** and follow the instructions from there.
|
||||
|
||||
1. Click **Generate new API key**
|
||||
|
||||
{end_tabs}
|
||||
|
||||
## Related articles
|
||||
|
||||
* [Configuring the Python bindings](/api/configuring-python-bindings)
|
File diff suppressed because it is too large
Load Diff
@@ -1,60 +0,0 @@
|
||||
# Client libraries
|
||||
|
||||
These API client libraries make it easy to work with Zulip's REST API
|
||||
in your favorite language.
|
||||
|
||||
## Official libraries
|
||||
|
||||
These libraries are maintained by members of the Zulip core team. The
|
||||
Python library is the most complete and best documented.
|
||||
|
||||
* [Python](https://github.com/zulip/python-zulip-api)
|
||||
* [JavaScript](https://github.com/zulip/zulip-js)
|
||||
|
||||
## User maintained libraries
|
||||
|
||||
The Zulip core team doesn't have the resources to maintain
|
||||
high-quality libraries for every programming language. We've
|
||||
collected a list of user-maintained libraries for popular languages:
|
||||
|
||||
* [Clojure](https://github.com/thieman/clojure-zulip)
|
||||
* [C#](https://github.com/zulip/zulip-csharp)
|
||||
* [Go](https://github.com/ifo/gozulipbot)
|
||||
* [Java](https://github.com/taliox/zulip-java-rest)
|
||||
* [Kotlin](https://gitlab.com/ppiag/kzulip)
|
||||
* [PHP](https://github.com/mrferos/zulip-php-client)
|
||||
* [Ruby](https://github.com/raws/wonder-llama)
|
||||
* [Swift](https://github.com/zulip/swift-zulip-api)
|
||||
|
||||
### Contributing
|
||||
|
||||
Contributing to improve language libraries is appreciated, as is
|
||||
writing new ones. If you actively maintain a Zulip language binding
|
||||
and would like it to be listed here (or would like to collaborate with
|
||||
us in making it an official library), post in [this
|
||||
topic][integrations-thread] in
|
||||
[the Zulip development community](https://zulip.com/development-community/)
|
||||
or submit a pull request [updating this
|
||||
page](https://zulip.readthedocs.io/en/latest/documentation/api.html).
|
||||
|
||||
[integrations-thread]: https://chat.zulip.org/#narrow/channel/127-integrations/topic/API.20client.20libraries/
|
||||
|
||||
### Outdated
|
||||
|
||||
!!! tip ""
|
||||
|
||||
The following projects are not actively maintained. Since
|
||||
Zulip's core APIs have been stable for 5 years, even very
|
||||
old libraries can be useful.
|
||||
|
||||
* [Lua](https://github.com/deckycoss/zulua)
|
||||
* [Erlang](https://github.com/femnad/tuplre)
|
||||
* [PHP](https://github.com/federicoq/zulip-php)
|
||||
* [Go](https://github.com/decached/go-zulip)
|
||||
* [Haskell](https://github.com/yamadapc/hzulip)
|
||||
* [Chicken Scheme](https://github.com/yamadapc/zulip-scheme)
|
||||
* [Scala](https://github.com/cqfd/zulip-scala)
|
||||
* [EventMachine](https://github.com/cqfd/zulip_machine)
|
||||
* [Ruby](https://github.com/verg/zulip-rb)
|
||||
* [Perl](https://github.com/Stantheman/WebService-Zulip)
|
||||
* [.Net](https://github.com/Shayan-To/ZulipClientApi)
|
@@ -1,161 +0,0 @@
|
||||
# Configuring the Python bindings
|
||||
|
||||
Zulip provides a set of tools that allows interacting with its API more
|
||||
easily, called the [Python bindings](https://pypi.python.org/pypi/zulip/).
|
||||
One of the most notable use cases for these bindings are bots developed
|
||||
using Zulip's [bot framework](/api/writing-bots).
|
||||
|
||||
In order to use them, you need to configure them with your identity
|
||||
(account, API key, and Zulip server URL). There are a few ways to
|
||||
achieve that:
|
||||
|
||||
- Using a `zuliprc` file, referenced via the `--config-file` option or
|
||||
the `config_file` option to the `zulip.Client` constructor
|
||||
(recommended for bots).
|
||||
- Using a `zuliprc` file in your home directory at `~/.zuliprc`
|
||||
(recommended for your own API key).
|
||||
- Using the [environment
|
||||
variables](https://en.wikipedia.org/wiki/Environment_variable)
|
||||
documented below.
|
||||
- Using the `--api-key`, `--email`, and `--site` variables as command
|
||||
line parameters.
|
||||
- Using the `api_key`, `email`, and `site` parameters to the
|
||||
`zulip.Client` constructor.
|
||||
|
||||
## Download a `zuliprc` file
|
||||
|
||||
{start_tabs}
|
||||
|
||||
{tab|for-a-bot}
|
||||
|
||||
{settings_tab|your-bots}
|
||||
|
||||
1. Click the **download** (<i class="fa fa-download"></i>) icon on the profile
|
||||
card of the desired bot to download the bot's `zuliprc` file.
|
||||
|
||||
!!! warn ""
|
||||
|
||||
Anyone with a bot's API key can impersonate the bot, so be careful with it!
|
||||
|
||||
{tab|for-yourself}
|
||||
|
||||
{settings_tab|account-and-privacy}
|
||||
|
||||
1. Under **API key**, click **Manage your API key**.
|
||||
|
||||
1. Enter your password, and click **Get API key**. If you don't know your
|
||||
password, click **reset it** and follow the
|
||||
instructions from there.
|
||||
|
||||
1. Click **Download zuliprc** to download your `zuliprc` file.
|
||||
|
||||
1. (optional) If you'd like your credentials to be used by default
|
||||
when using the Zulip API on your computer, move the `zuliprc` file
|
||||
to `~/.zuliprc` in your home directory.
|
||||
|
||||
!!! warn ""
|
||||
|
||||
Anyone with your API key can impersonate you, so be doubly careful with it.
|
||||
|
||||
{end_tabs}
|
||||
|
||||
## Configuration keys and environment variables
|
||||
|
||||
`zuliprc` is a configuration file written in the
|
||||
[INI file format](https://en.wikipedia.org/wiki/INI_file),
|
||||
which contains key-value pairs as shown in the following example:
|
||||
|
||||
```
|
||||
[api]
|
||||
key=<API key from the web interface>
|
||||
email=<your email address>
|
||||
site=<your Zulip server's URI>
|
||||
...
|
||||
```
|
||||
|
||||
The keys you can use in this file (and their equivalent environment variables)
|
||||
can be found in the following table:
|
||||
|
||||
<table class="table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th><code>zuliprc</code> key</th>
|
||||
<th>Environment variable</th>
|
||||
<th>Required</th>
|
||||
<th>Description</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tr>
|
||||
<td><code>key</code></td>
|
||||
<td><code>ZULIP_API_KEY</code></td>
|
||||
<td>Yes</td>
|
||||
<td>
|
||||
<a href="/api/api-keys">API key</a>, which you can get through
|
||||
Zulip's web interface.
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>email</code></td>
|
||||
<td><code>ZULIP_EMAIL</code></td>
|
||||
<td>Yes</td>
|
||||
<td>
|
||||
The email address of the user who owns the API key mentioned
|
||||
above.
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>site</code></td>
|
||||
<td><code>ZULIP_SITE</code></td>
|
||||
<td>No</td>
|
||||
<td>
|
||||
URL where your Zulip server is located.
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>client_cert_key</code></td>
|
||||
<td><code>ZULIP_CERT_KEY</code></td>
|
||||
<td>No</td>
|
||||
<td>
|
||||
Path to the SSL/TLS private key that the binding should use to
|
||||
connect to the server.
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>client_cert</code></td>
|
||||
<td><code>ZULIP_CERT</code></td>
|
||||
<td>No*</td>
|
||||
<td>
|
||||
The public counterpart of <code>client_cert_key</code>/
|
||||
<code>ZULIP_CERT_KEY</code>. <i>This setting is required if a cert
|
||||
key has been set.</i>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>client_bundle</code></td>
|
||||
<td><code>ZULIP_CERT_BUNDLE</code></td>
|
||||
<td>No</td>
|
||||
<td>
|
||||
Path where the server's PEM-encoded certificate is located. CA
|
||||
certificates are also accepted, in case those CA's have issued the
|
||||
server's certificate. Defaults to the built-in CA bundle trusted
|
||||
by Python.
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>insecure</code></td>
|
||||
<td><code>ZULIP_ALLOW_INSECURE</code></td>
|
||||
<td>No</td>
|
||||
<td>
|
||||
Allows connecting to Zulip servers with an invalid SSL/TLS
|
||||
certificate. Please note that enabling this will make the HTTPS
|
||||
connection insecure. Defaults to <code>false</code>.
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
## Related articles
|
||||
|
||||
* [Installation instructions](/api/installation-instructions)
|
||||
* [API keys](/api/api-keys)
|
||||
* [Running bots](/api/running-bots)
|
||||
* [Deploying bots](/api/deploying-bots)
|
@@ -1,202 +0,0 @@
|
||||
# Construct a narrow
|
||||
|
||||
A **narrow** is a set of filters for Zulip messages, that can be based
|
||||
on many different factors (like sender, channel, topic, search
|
||||
keywords, etc.). Narrows are used in various places in the Zulip
|
||||
API (most importantly, in the API for fetching messages).
|
||||
|
||||
It is simplest to explain the algorithm for encoding a search as a
|
||||
narrow using a single example. Consider the following search query
|
||||
(written as it would be entered in the Zulip web app's search box).
|
||||
It filters for messages sent to channel `announce`, not sent by
|
||||
`iago@zulip.com`, and containing the words `cool` and `sunglasses`:
|
||||
|
||||
```
|
||||
channel:announce -sender:iago@zulip.com cool sunglasses
|
||||
```
|
||||
|
||||
This query would be JSON-encoded for use in the Zulip API using JSON
|
||||
as a list of simple objects, as follows:
|
||||
|
||||
```json
|
||||
[
|
||||
{
|
||||
"operator": "channel",
|
||||
"operand": "announce"
|
||||
},
|
||||
{
|
||||
"operator": "sender",
|
||||
"operand": "iago@zulip.com",
|
||||
"negated": true
|
||||
},
|
||||
{
|
||||
"operator": "search",
|
||||
"operand": "cool sunglasses"
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
The Zulip help center article on [searching for messages](/help/search-for-messages)
|
||||
documents the majority of the search/narrow options supported by the
|
||||
Zulip API.
|
||||
|
||||
Note that many narrows, including all that lack a `channel` or `channels`
|
||||
operator, search the current user's personal message history. See
|
||||
[searching shared history](/help/search-for-messages#searching-shared-history)
|
||||
for details.
|
||||
|
||||
Clients should note that the `is:unread` filter takes advantage of the
|
||||
fact that there is a database index for unread messages, which can be an
|
||||
important optimization when fetching messages in certain cases (e.g.,
|
||||
when [adding the `read` flag to a user's personal
|
||||
messages](/api/update-message-flags-for-narrow)).
|
||||
|
||||
Note: When the value of `realm_empty_topic_display_name` found in
|
||||
the [POST /register](/api/register-queue) response is used as an operand
|
||||
for the `"topic"` operator in the narrow, it is interpreted
|
||||
as an empty string.
|
||||
|
||||
## Changes
|
||||
|
||||
* In Zulip 10.0 (feature level 366), support was added for a new
|
||||
`is:muted` operator combination, matching messages in topics and
|
||||
channels that the user has [muted](/help/mute-a-topic).
|
||||
|
||||
* Before Zulip 10.0 (feature level 334), empty string was not a valid
|
||||
topic name for channel messages.
|
||||
|
||||
* In Zulip 9.0 (feature level 271), support was added for a new filter
|
||||
operator, `with`, which uses a [message ID](#message-ids) for its
|
||||
operand, and is designed for creating permanent links to topics.
|
||||
|
||||
* In Zulip 9.0 (feature level 265), support was added for a new
|
||||
`is:followed` filter, matching messages in topics that the current
|
||||
user is [following](/help/follow-a-topic).
|
||||
|
||||
* In Zulip 9.0 (feature level 250), support was added for two filters
|
||||
related to stream messages: `channel` and `channels`. The `channel`
|
||||
operator is an alias for the `stream` operator. The `channels`
|
||||
operator is an alias for the `streams` operator. Both `channel` and
|
||||
`channels` return the same exact results as `stream` and `streams`
|
||||
respectively.
|
||||
|
||||
* In Zulip 9.0 (feature level 249), support was added for a new filter,
|
||||
`has:reaction`, which returns messages that have at least one [emoji
|
||||
reaction](/help/emoji-reactions).
|
||||
|
||||
* In Zulip 7.0 (feature level 177), support was added for three filters
|
||||
related to direct messages: `is:dm`, `dm` and `dm-including`. The
|
||||
`dm` operator replaced and deprecated the `pm-with` operator. The
|
||||
`is:dm` filter replaced and deprecated the `is:private` filter. The
|
||||
`dm-including` operator replaced and deprecated the `group-pm-with`
|
||||
operator.
|
||||
|
||||
* The `dm-including` and `group-pm-with` operators return slightly
|
||||
different results. For example, `dm-including:1234` returns all
|
||||
direct messages (1-on-1 and group) that include the current user
|
||||
and the user with the unique user ID of `1234`. On the other hand,
|
||||
`group-pm-with:1234` returned only group direct messages that
|
||||
included the current user and the user with the unique user ID of
|
||||
`1234`.
|
||||
|
||||
* Both `dm` and `is:dm` are aliases of `pm-with` and `is:private`
|
||||
respectively, and return the same exact results that the
|
||||
deprecated filters did.
|
||||
|
||||
## Narrows that use IDs
|
||||
|
||||
### Message IDs
|
||||
|
||||
The `id` and `with` operators use message IDs for their operands. The
|
||||
message ID operand for these two operators may be encoded as either a
|
||||
number or a string.
|
||||
|
||||
* `id:12345`: Search for only the message with ID `12345`.
|
||||
* `with:12345`: Search for the conversation that contains the message
|
||||
with ID `12345`.
|
||||
|
||||
The `id` operator returns the message with the specified ID if it exists,
|
||||
and if it can be accessed by the user.
|
||||
|
||||
The `with` operator is designed to be used for permanent links to
|
||||
topics, which means they should continue to work when the topic is
|
||||
[moved](/help/move-content-to-another-topic) or
|
||||
[resolved](/help/resolve-a-topic). If the message with the specified
|
||||
ID exists, and can be accessed by the user, then it will return
|
||||
messages with the `channel`/`topic`/`dm` operators corresponding to
|
||||
the current conversation containing that message, replacing any such
|
||||
operators included in the original narrow query.
|
||||
|
||||
If no such message exists, or the message ID represents a message that
|
||||
is inaccessible to the user, this operator will be ignored (rather
|
||||
than throwing an error) if the remaining operators uniquely identify a
|
||||
conversation (i.e., they contain `channel` and `topic` terms or `dm`
|
||||
term). This behavior is intended to provide the best possible
|
||||
experience for links to private channels with protected history.
|
||||
|
||||
The [help center](/help/search-for-messages#search-by-message-id) also
|
||||
documents the `near` operator for searching for messages by ID, but
|
||||
this narrow operator has no effect on filtering messages when sent to
|
||||
the server. In practice, when the `near` operator is used to search for
|
||||
messages, or is part of a URL fragment, the value of its operand should
|
||||
instead be used for the value of the `anchor` parameter in endpoints
|
||||
that also accept a `narrow` parameter; see
|
||||
[GET /messages][anchor-get-messages] and
|
||||
[POST /messages/flags/narrow][anchor-post-flags].
|
||||
|
||||
**Changes**: Prior to Zulip 8.0 (feature level 194), the message ID
|
||||
operand for the `id` operator needed to be encoded as a string.
|
||||
|
||||
|
||||
```json
|
||||
[
|
||||
{
|
||||
"operator": "id",
|
||||
"operand": 12345
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
### Channel and user IDs
|
||||
|
||||
There are a few additional narrow/search options (new in Zulip 2.1)
|
||||
that use either channel IDs or user IDs that are not documented in the
|
||||
help center because they are primarily useful to API clients:
|
||||
|
||||
* `channel:1234`: Search messages sent to the channel with ID `1234`.
|
||||
* `sender:1234`: Search messages sent by user ID `1234`.
|
||||
* `dm:1234`: Search the direct message conversation between
|
||||
you and user ID `1234`.
|
||||
* `dm:1234,5678`: Search the direct message conversation between
|
||||
you, user ID `1234`, and user ID `5678`.
|
||||
* `dm-including:1234`: Search all direct messages (1-on-1 and group)
|
||||
that include you and user ID `1234`.
|
||||
|
||||
!!! tip ""
|
||||
|
||||
A user ID can be found by [viewing a user's profile][view-profile]
|
||||
in the web or desktop apps. A channel ID can be found when [browsing
|
||||
channels][browse-channels] in the web or desktop apps.
|
||||
|
||||
The operands for these search options must be encoded either as an
|
||||
integer ID or a JSON list of integer IDs. For example, to query
|
||||
messages sent by a user 1234 to a direct message thread with yourself,
|
||||
user 1234, and user 5678, the correct JSON-encoded query is:
|
||||
|
||||
```json
|
||||
[
|
||||
{
|
||||
"operator": "dm",
|
||||
"operand": [1234, 5678]
|
||||
},
|
||||
{
|
||||
"operator": "sender",
|
||||
"operand": 1234
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
[view-profile]: /help/view-someones-profile
|
||||
[browse-channels]: /help/introduction-to-channels#browse-and-subscribe-to-channels
|
||||
[anchor-get-messages]: /api/get-messages#parameter-anchor
|
||||
[anchor-post-flags]: /api/update-message-flags-for-narrow#parameter-anchor
|
@@ -1,49 +0,0 @@
|
||||
{generate_api_header(/scheduled_messages:post)}
|
||||
|
||||
## Usage examples
|
||||
|
||||
{start_tabs}
|
||||
|
||||
{generate_code_example(python)|/scheduled_messages:post|example}
|
||||
|
||||
{generate_code_example(javascript)|/scheduled_messages:post|example}
|
||||
|
||||
{tab|curl}
|
||||
|
||||
``` curl
|
||||
# Create a scheduled channel message
|
||||
curl -X POST {{ api_url }}/v1/scheduled_messages \
|
||||
-u BOT_EMAIL_ADDRESS:BOT_API_KEY \
|
||||
--data-urlencode type=stream \
|
||||
--data-urlencode to=9 \
|
||||
--data-urlencode topic=Hello \
|
||||
--data-urlencode 'content=Nice to meet everyone!' \
|
||||
--data-urlencode scheduled_delivery_timestamp=3165826990
|
||||
|
||||
# Create a scheduled direct message
|
||||
curl -X POST {{ api_url }}/v1/messages \
|
||||
-u BOT_EMAIL_ADDRESS:BOT_API_KEY \
|
||||
--data-urlencode type=direct \
|
||||
--data-urlencode 'to=[9, 10]' \
|
||||
--data-urlencode 'content=Can we meet on Monday?' \
|
||||
--data-urlencode scheduled_delivery_timestamp=3165826990
|
||||
|
||||
```
|
||||
|
||||
{end_tabs}
|
||||
|
||||
## Parameters
|
||||
|
||||
{generate_api_arguments_table|zulip.yaml|/scheduled_messages:post}
|
||||
|
||||
{generate_parameter_description(/scheduled_messages:post)}
|
||||
|
||||
## Response
|
||||
|
||||
{generate_return_values_table|zulip.yaml|/scheduled_messages:post}
|
||||
|
||||
{generate_response_description(/scheduled_messages:post)}
|
||||
|
||||
#### Example response(s)
|
||||
|
||||
{generate_code_example|/scheduled_messages:post|fixture}
|
@@ -1,6 +0,0 @@
|
||||
# Create a channel
|
||||
|
||||
You can create a channel using Zulip's REST API by submitting a
|
||||
[subscribe](/api/subscribe) request with a channel name that
|
||||
doesn't yet exist and passing appropriate parameters to define
|
||||
the initial configuration of the new channel.
|
@@ -1,254 +0,0 @@
|
||||
# Deploying bots in production
|
||||
|
||||
Usually, work on a bot starts on a laptop. At some point, you'll want
|
||||
to deploy your bot in a production environment, so that it'll stay up
|
||||
regardless of what's happening with your laptop. There are several
|
||||
options for doing so:
|
||||
|
||||
* The simplest is running `zulip-run-bot` inside a `screen` session on
|
||||
a server. This works, but if your server reboots, you'll need to
|
||||
manually restart it, so we don't recommend it.
|
||||
* Using `supervisord` or a similar tool for managing a production
|
||||
process with `zulip-run-bot`. This consumes a bit of resources
|
||||
(since you need a persistent process running), but otherwise works
|
||||
great.
|
||||
* Using the Zulip Botserver, which is a simple Flask server for
|
||||
running a bot in production, and connecting that to Zulip's outgoing
|
||||
webhooks feature. This can be deployed in environments like
|
||||
Heroku's free tier without running a persistent process.
|
||||
|
||||
## Zulip Botserver
|
||||
|
||||
The Zulip Botserver is for people who want to
|
||||
|
||||
* run bots in production.
|
||||
* run multiple bots at once.
|
||||
|
||||
The Zulip Botserver is a Python (Flask) server that implements Zulip's
|
||||
outgoing webhooks API. You can of course write your own servers using
|
||||
the outgoing webhooks API, but the Botserver is designed to make it
|
||||
easy for a novice Python programmer to write a new bot and deploy it
|
||||
in production.
|
||||
|
||||
### How Botserver works
|
||||
|
||||
Zulip Botserver starts a web server that listens to incoming messages
|
||||
from your main Zulip server. The sequence of events in a successful
|
||||
Botserver interaction are:
|
||||
|
||||
1. Your bot user is mentioned or receives a direct message:
|
||||
|
||||
```
|
||||
@**My Bot User** hello world
|
||||
```
|
||||
|
||||
1. The Zulip server sends a POST request to your Botserver endpoint URL:
|
||||
|
||||
```
|
||||
{
|
||||
"message":{
|
||||
"content":"@**My Bot User** hello world",
|
||||
},
|
||||
"bot_email":"myuserbot-bot@example.com",
|
||||
"trigger":"mention",
|
||||
"token":"XXXX"
|
||||
}
|
||||
```
|
||||
|
||||
This URL is configured in the Zulip web-app in your Bot User's settings.
|
||||
|
||||
1. The Botserver searches for a bot to handle the message, and executes your
|
||||
bot's `handle_message` code.
|
||||
|
||||
Your bot's code should work just like it does with `zulip-run-bot`.
|
||||
|
||||
### Installing the Zulip Botserver
|
||||
|
||||
Install the `zulip_botserver` package:
|
||||
|
||||
```
|
||||
pip3 install zulip_botserver
|
||||
```
|
||||
|
||||
### Create a bot in your Zulip organization
|
||||
|
||||
{start_tabs}
|
||||
|
||||
1. Navigate to the **Bots** tab of the **Personal settings** menu, and click
|
||||
**Add a new bot**.
|
||||
|
||||
1. Set the **Bot type** to **Outgoing webhook**.
|
||||
|
||||
1. Set the **endpoint URL** to `https://<host>:<port>` where `host` is the
|
||||
hostname of the server you'll be running the Botserver on, and `port` is
|
||||
the port number. The default port is `5002`.
|
||||
|
||||
1. Click **Create bot**. You should see the new bot user in the
|
||||
**Active bots** panel.
|
||||
|
||||
{end_tabs}
|
||||
|
||||
### Running a bot using the Zulip Botserver
|
||||
|
||||
{start_tabs}
|
||||
|
||||
1. [Create your bot](#create-a-bot-in-your-zulip-organization) in your Zulip
|
||||
organization.
|
||||
|
||||
1. Download the `zuliprc` file for the bot created above from the
|
||||
**Bots** tab of the **Personal settings** menu, by clicking the download
|
||||
(<i class="fa fa-download"></i>) icon under the bot's name.
|
||||
|
||||
1. Run the Botserver, where `helloworld` is the name of the bot you
|
||||
want to run:
|
||||
|
||||
`zulip-botserver --config-file <path_to_zuliprc> --bot-name=helloworld`
|
||||
|
||||
You can specify the port number and various other options; run
|
||||
`zulip-botserver --help` to see how to do this.
|
||||
|
||||
{end_tabs}
|
||||
|
||||
Congrats, everything is set up! Test your Botserver like you would
|
||||
test a normal bot.
|
||||
|
||||
### Running multiple bots using the Zulip Botserver
|
||||
|
||||
The Zulip Botserver also supports running multiple bots from a single
|
||||
Botserver process.
|
||||
|
||||
{start_tabs}
|
||||
|
||||
1. [Create your bots](#create-a-bot-in-your-zulip-organization)
|
||||
in your Zulip organization.
|
||||
|
||||
1. Download the `botserverrc` file from the **Bots** tab of the
|
||||
**Personal settings** menu, using the **Download config of all active
|
||||
outgoing webhook bots in Zulip Botserver format** option.
|
||||
|
||||
1. Open the `botserverrc`. It should contain one or more sections that look
|
||||
like this:
|
||||
|
||||
```
|
||||
[helloworld]
|
||||
email=foo-bot@hostname
|
||||
key=dOHHlyqgpt5g0tVuVl6NHxDLlc9eFRX4
|
||||
site=http://hostname
|
||||
token=aQVQmSd6j6IHphJ9m1jhgHdbnhl5ZcsY
|
||||
bot-config-file=~/path/to/helloworld.conf
|
||||
```
|
||||
|
||||
Each section contains the configuration for an outgoing webhook bot.
|
||||
|
||||
1. For each bot, enter the name of the bot you want to run in the square
|
||||
brackets `[]`, e.g., the above example applies to the `helloworld` bot.
|
||||
To run an external bot, enter the path to the bot's python file instead,
|
||||
e.g., `[~/Documents/my_bot_script.py]`.
|
||||
|
||||
!!! tip ""
|
||||
|
||||
The `bot-config-file` setting is needed only for bots that
|
||||
use a config file.
|
||||
|
||||
1. Run the Zulip Botserver by passing the `botserverrc` to it.
|
||||
|
||||
```
|
||||
zulip-botserver --config-file <path-to-botserverrc> --hostname <address> --port <port>
|
||||
```
|
||||
|
||||
If omitted, `hostname` defaults to `127.0.0.1` and `port` to `5002`.
|
||||
|
||||
{end_tabs}
|
||||
|
||||
### Running Zulip Botserver with supervisord
|
||||
|
||||
[supervisord](http://supervisord.org/) is a popular tool for running
|
||||
services in production. It helps ensure the service starts on boot,
|
||||
manages log files, restarts the service if it crashes, etc. This
|
||||
section documents how to run the Zulip Botserver using *supervisord*.
|
||||
|
||||
Running the Zulip Botserver with *supervisord* works almost like
|
||||
running it manually.
|
||||
|
||||
{start_tabs}
|
||||
|
||||
1. Install *supervisord* via your package manager; e.g., on Debian/Ubuntu:
|
||||
|
||||
```
|
||||
sudo apt-get install supervisor
|
||||
```
|
||||
|
||||
1. Configure *supervisord*. *supervisord* stores its configuration in
|
||||
`/etc/supervisor/conf.d`.
|
||||
* Do **one** of the following:
|
||||
* Download the [sample config file][supervisord-config-file]
|
||||
and store it in `/etc/supervisor/conf.d/zulip-botserver.conf`.
|
||||
* Copy the following section into your existing supervisord config file.
|
||||
|
||||
[program:zulip-botserver]
|
||||
command=zulip-botserver --config-file=<path/to/your/botserverrc>
|
||||
--hostname <address> --port <port>
|
||||
startsecs=3
|
||||
stdout_logfile=/var/log/zulip-botserver.log ; all output of your Botserver will be logged here
|
||||
redirect_stderr=true
|
||||
|
||||
* Edit the `<>` sections according to your preferences.
|
||||
|
||||
[supervisord-config-file]: https://raw.githubusercontent.com/zulip/python-zulip-api/main/zulip_botserver/zulip-botserver-supervisord.conf
|
||||
|
||||
1. Update *supervisord* to read the configuration file:
|
||||
|
||||
```
|
||||
supervisorctl reread
|
||||
supervisorctl update
|
||||
```
|
||||
|
||||
(or you can use `/etc/init.d/supervisord restart`, but this is less
|
||||
disruptive if you're using *supervisord* for other services as well).
|
||||
|
||||
1. Test if your setup is successful:
|
||||
|
||||
```
|
||||
supervisorctl status
|
||||
```
|
||||
|
||||
The output should include a line similar to this:
|
||||
> zulip-botserver RUNNING pid 28154, uptime 0:00:27
|
||||
|
||||
The standard output of the Botserver will be logged to the path in
|
||||
your *supervisord* configuration.
|
||||
|
||||
{end_tabs}
|
||||
|
||||
If you are hosting the Botserver yourself (as opposed to using a
|
||||
hosting service that provides SSL), we recommend securing your
|
||||
Botserver with SSL using an `nginx` or `Apache` reverse proxy and
|
||||
[Certbot](https://certbot.eff.org/).
|
||||
|
||||
### Troubleshooting
|
||||
|
||||
- Make sure the API key you're using is for an [outgoing webhook
|
||||
bot](/api/outgoing-webhooks) and you've
|
||||
correctly configured the URL for your Botserver.
|
||||
|
||||
- Your Botserver needs to be accessible from your Zulip server over
|
||||
HTTP(S). Make sure any firewall allows the connection. We
|
||||
recommend using [zulip-run-bot](running-bots) instead for
|
||||
development/testing on a laptop or other non-server system.
|
||||
If your Zulip server is self-hosted, you can test by running `curl
|
||||
http://zulipbotserver.example.com:5002` from your Zulip server;
|
||||
the output should be:
|
||||
|
||||
```
|
||||
$ curl http://zulipbotserver.example.com:5002/
|
||||
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">
|
||||
<title>405 Method Not Allowed</title>
|
||||
<h1>Method Not Allowed</h1>
|
||||
<p>The method is not allowed for the requested URL.</p>
|
||||
```
|
||||
|
||||
## Related articles
|
||||
|
||||
* [Non-webhook integrations](/api/non-webhook-integrations)
|
||||
* [Running bots](/api/running-bots)
|
||||
* [Writing bots](/api/writing-bots)
|
@@ -1,122 +0,0 @@
|
||||
# Group-setting values
|
||||
|
||||
Settings defining permissions in Zulip are increasingly represented
|
||||
using [user groups](/help/user-groups), which offer much more flexible
|
||||
configuration than the older [roles](/api/roles-and-permissions) system.
|
||||
|
||||
!!! warn ""
|
||||
|
||||
**Note**: Many group-valued settings are configured to require
|
||||
a single system group for their value via
|
||||
`server_supported_permission_settings`, pending web app UI
|
||||
changes to fully support group-setting values.
|
||||
|
||||
**Changes**: Before Zulip 10.0 (feature level 309), only system
|
||||
groups were permitted values for group-setting values in
|
||||
production environments, regardless of the values in
|
||||
`server_supported_permission_settings`.
|
||||
|
||||
In the API, these settings are represented using a **group-setting
|
||||
value**, which can take two forms:
|
||||
|
||||
- An integer user group ID, which can be either a named user group
|
||||
visible in the UI or a [role-based system group](#system-groups).
|
||||
- An object with fields `direct_member_ids`, containing a list of
|
||||
integer user IDs, and `direct_subgroup_ids`, containing a list of
|
||||
integer group IDs. The setting's value is the union of the
|
||||
identified collection of users and groups.
|
||||
|
||||
Group-setting values in the object form can be thought of as an
|
||||
anonymous group. They function very much like a named user group
|
||||
object, and remove the naming and UI overhead involved in creating
|
||||
a visible user group just to store the value of a single setting.
|
||||
|
||||
The server will canonicalize an object with an empty `direct_member_ids`
|
||||
list and a `direct_subgroup_ids` list that contains just a single group
|
||||
ID to the integer format.
|
||||
|
||||
## System groups
|
||||
|
||||
The Zulip server maintains a collection of system groups that
|
||||
correspond to the users with a given role; this makes it convenient to
|
||||
store concepts like "all administrators" in a group-setting
|
||||
value. These use a special naming convention and can be recognized by
|
||||
the `is_system_group` property on their group object.
|
||||
|
||||
The following system groups are maintained by the Zulip server:
|
||||
|
||||
- `role:internet`: Everyone on the Internet has this permission; this
|
||||
is used to configure the [public access
|
||||
option](/help/public-access-option).
|
||||
- `role:everyone`: All users, including guests.
|
||||
- `role:members`: All users, excluding guests.
|
||||
- `role:fullmembers`: All [full
|
||||
members](https://zulip.com/api/roles-and-permissions#determining-if-a-user-is-a-full-member)
|
||||
of the organization.
|
||||
- `role:moderators`: All users with at least the moderator role.
|
||||
- `role:administrators`: All users with at least the administrator
|
||||
role.
|
||||
- `role:owners`: All users with the owner role.
|
||||
- `role:nobody`: The formal empty group. Used in the API to represent
|
||||
disabling a feature.
|
||||
|
||||
Client UI for setting a permission or displaying a group (when
|
||||
silently mentioned, for example) is encouraged to display system
|
||||
groups using their description, rather than using their `role:}`
|
||||
names, which are chosen to be unique and clear in the API.
|
||||
|
||||
System groups should generally not be displayed in UI for
|
||||
administering an organization's user groups, since they are not
|
||||
directly mutable.
|
||||
|
||||
## Updating group-setting values
|
||||
|
||||
The Zulip API uses a special format for modifying an existing setting
|
||||
using a group-setting value.
|
||||
|
||||
A **group-setting update** is an object with a `new` field and an
|
||||
optional `old` field, each containing a group-setting value. The
|
||||
setting's value will be set to the membership expressed by the `new`
|
||||
field.
|
||||
|
||||
The `old` field expresses the client's understanding of the current
|
||||
value of the setting. If the `old` field is present and does not match
|
||||
the actual current value of the setting, then the request will fail
|
||||
with error code `EXPECTATION_MISMATCH` and no changes will be applied.
|
||||
|
||||
When a user edits the setting in a UI, the resulting API request
|
||||
should generally always include the `old` field, giving the value
|
||||
the list had when the user started editing. This accurately expresses
|
||||
the user's intent, and if two users edit the same list around the
|
||||
same time, it prevents a situation where the second change
|
||||
accidentally reverts the first one without either user noticing.
|
||||
|
||||
Omitting `old` is appropriate where the intent really is a new complete
|
||||
list rather than an edit, for example in an integration that syncs the
|
||||
list from an external source of truth.
|
||||
|
||||
## Permitted values
|
||||
|
||||
Not every possible group-setting value is a valid configuration for a
|
||||
given group-based setting. For example, as a security hardening
|
||||
measure, some administrative permissions should never be exercised by
|
||||
guest users, and the system group for all users, including guests,
|
||||
should not be offered to users as an option for those settings.
|
||||
|
||||
Others have restrictions to only permit system groups due to UI
|
||||
components not yet having been migrated to support a broader set of
|
||||
values. In order to avoid this configuration ending up hardcoded in
|
||||
clients, every permission setting using this framework has an entry in
|
||||
the `server_supported_permission_settings` section of the [`POST
|
||||
/register`](/api/register-queue) response.
|
||||
|
||||
Clients that support mutating group-settings values must parse that
|
||||
part of the `register` payload in order to compute the set of
|
||||
permitted values to offer to the user and avoid server-side errors
|
||||
when trying to save a value.
|
||||
|
||||
Note specifically that the `allow_everyone_group` field, which
|
||||
determines whether the setting can have the value of "all user
|
||||
accounts, including guests" also controls whether guests users can
|
||||
exercise the permission regardless of their membership in the
|
||||
group-setting value.
|
@@ -1,81 +0,0 @@
|
||||
# HTTP headers
|
||||
|
||||
This page documents the HTTP headers used by the Zulip API.
|
||||
|
||||
Most important is that API clients authenticate to the server using
|
||||
HTTP Basic authentication. If you're using the official [Python or
|
||||
JavaScript bindings](/api/installation-instructions), this is taken
|
||||
care of when you configure said bindings.
|
||||
|
||||
Otherwise, see the `curl` example on each endpoint's documentation
|
||||
page, which details the request format.
|
||||
|
||||
Documented below are additional HTTP headers and header conventions
|
||||
generally used by Zulip:
|
||||
|
||||
## The `User-Agent` header
|
||||
|
||||
Clients are not required to pass a `User-Agent` HTTP header, but we
|
||||
highly recommend doing so when writing an integration. It's easy to do
|
||||
and it can help save time when debugging issues related to an API
|
||||
client.
|
||||
|
||||
If provided, the Zulip server will parse the `User-Agent` HTTP header
|
||||
in order to identify specific clients and integrations. This
|
||||
information is used by the server for logging, [usage
|
||||
statistics](/help/analytics), and on rare occasions, for
|
||||
backwards-compatibility logic to preserve support for older versions
|
||||
of official clients.
|
||||
|
||||
Official Zulip clients and integrations use a `User-Agent` that starts
|
||||
with something like `ZulipMobile/20.0.103 `, encoding the name of the
|
||||
application and it's version.
|
||||
|
||||
Zulip's official API bindings have reasonable defaults for
|
||||
`User-Agent`. For example, the official Zulip Python bindings have a
|
||||
default `User-Agent` starting with `ZulipPython/{version}`, where
|
||||
`version` is the version of the library.
|
||||
|
||||
You can give your bot/integration its own name by passing the `client`
|
||||
parameter when initializing the Python bindings. For example, the
|
||||
official Zulip Nagios integration is initialized like this:
|
||||
|
||||
``` python
|
||||
client = zulip.Client(
|
||||
config_file=opts.config, client=f"ZulipNagios/{VERSION}"
|
||||
)
|
||||
```
|
||||
|
||||
If you are working on an integration that you plan to share outside
|
||||
your organization, you can get help picking a good name in
|
||||
[#integrations][integrations-channel] in the [Zulip development
|
||||
community](https://zulip.com/development-community/).
|
||||
|
||||
## Rate-limiting response headers
|
||||
|
||||
To help clients avoid exceeding rate limits, Zulip sets the following
|
||||
HTTP headers in all API responses:
|
||||
|
||||
* `X-RateLimit-Remaining`: The number of additional requests of this
|
||||
type that the client can send before exceeding its limit.
|
||||
* `X-RateLimit-Limit`: The limit that would be applicable to a client
|
||||
that had not made any recent requests of this type. This is useful
|
||||
for designing a client's burst behavior so as to avoid ever reaching
|
||||
a rate limit.
|
||||
* `X-RateLimit-Reset`: The time at which the client will no longer
|
||||
have any rate limits applied to it (and thus could do a burst of
|
||||
`X-RateLimit-Limit` requests).
|
||||
|
||||
[Zulip's rate limiting rules are configurable][rate-limiting-rules],
|
||||
and can vary by server and over time. The default configuration
|
||||
currently limits:
|
||||
|
||||
* Every user is limited to 200 total API requests per minute.
|
||||
* Separate, much lower limits for authentication/login attempts.
|
||||
|
||||
When the Zulip server has configured multiple rate limits that apply
|
||||
to a given request, the values returned will be for the strictest
|
||||
limit.
|
||||
|
||||
[rate-limiting-rules]: https://zulip.readthedocs.io/en/latest/production/security-model.html#rate-limiting
|
||||
[integrations-channel]: https://chat.zulip.org/#narrow/channel/127-integrations/
|
@@ -1,3 +0,0 @@
|
||||
!!! warn ""
|
||||
|
||||
This endpoint is only available to organization administrators.
|
@@ -1,163 +0,0 @@
|
||||
#### Messages
|
||||
|
||||
* [Send a message](/api/send-message)
|
||||
* [Upload a file](/api/upload-file)
|
||||
* [Edit a message](/api/update-message)
|
||||
* [Delete a message](/api/delete-message)
|
||||
* [Get messages](/api/get-messages)
|
||||
* [Construct a narrow](/api/construct-narrow)
|
||||
* [Add an emoji reaction](/api/add-reaction)
|
||||
* [Remove an emoji reaction](/api/remove-reaction)
|
||||
* [Render a message](/api/render-message)
|
||||
* [Fetch a single message](/api/get-message)
|
||||
* [Check if messages match a narrow](/api/check-messages-match-narrow)
|
||||
* [Get a message's edit history](/api/get-message-history)
|
||||
* [Update personal message flags](/api/update-message-flags)
|
||||
* [Update personal message flags for narrow](/api/update-message-flags-for-narrow)
|
||||
* [Mark all messages as read](/api/mark-all-as-read)
|
||||
* [Mark messages in a channel as read](/api/mark-stream-as-read)
|
||||
* [Mark messages in a topic as read](/api/mark-topic-as-read)
|
||||
* [Get a message's read receipts](/api/get-read-receipts)
|
||||
* [Report a message](/api/report-message)
|
||||
|
||||
#### Scheduled messages
|
||||
|
||||
* [Get scheduled messages](/api/get-scheduled-messages)
|
||||
* [Create a scheduled message](/api/create-scheduled-message)
|
||||
* [Edit a scheduled message](/api/update-scheduled-message)
|
||||
* [Delete a scheduled message](/api/delete-scheduled-message)
|
||||
|
||||
#### Message reminders
|
||||
|
||||
* [Create a message reminder](/api/create-message-reminder)
|
||||
* [Get reminders](/api/get-reminders)
|
||||
* [Delete a reminder](/api/delete-reminder)
|
||||
|
||||
#### Drafts
|
||||
|
||||
* [Get drafts](/api/get-drafts)
|
||||
* [Create drafts](/api/create-drafts)
|
||||
* [Edit a draft](/api/edit-draft)
|
||||
* [Delete a draft](/api/delete-draft)
|
||||
* [Get all saved snippets](/api/get-saved-snippets)
|
||||
* [Create a saved snippet](/api/create-saved-snippet)
|
||||
* [Edit a saved snippet](/api/edit-saved-snippet)
|
||||
* [Delete a saved snippet](/api/delete-saved-snippet)
|
||||
|
||||
#### Navigation views
|
||||
|
||||
* [Get all navigation views](/api/get-navigation-views)
|
||||
* [Add a navigation view](/api/add-navigation-view)
|
||||
* [Update the navigation view](/api/edit-navigation-view)
|
||||
* [Remove a navigation view](/api/remove-navigation-view)
|
||||
|
||||
#### Channels
|
||||
|
||||
* [Get subscribed channels](/api/get-subscriptions)
|
||||
* [Subscribe to a channel](/api/subscribe)
|
||||
* [Unsubscribe from a channel](/api/unsubscribe)
|
||||
* [Get subscription status](/api/get-subscription-status)
|
||||
* [Get channel subscribers](/api/get-subscribers)
|
||||
* [Update subscription settings](/api/update-subscription-settings)
|
||||
* [Get all channels](/api/get-streams)
|
||||
* [Get a channel by ID](/api/get-stream-by-id)
|
||||
* [Get channel ID](/api/get-stream-id)
|
||||
* [Create a channel](/api/create-stream)
|
||||
* [Update a channel](/api/update-stream)
|
||||
* [Archive a channel](/api/archive-stream)
|
||||
* [Get channel's email address](/api/get-stream-email-address)
|
||||
* [Get topics in a channel](/api/get-stream-topics)
|
||||
* [Topic muting](/api/mute-topic)
|
||||
* [Update personal preferences for a topic](/api/update-user-topic)
|
||||
* [Delete a topic](/api/delete-topic)
|
||||
* [Add a default channel](/api/add-default-stream)
|
||||
* [Remove a default channel](/api/remove-default-stream)
|
||||
* [Create a channel folder](/api/create-channel-folder)
|
||||
* [Get channel folders](/api/get-channel-folders)
|
||||
* [Update a channel folder](/api/update-channel-folder)
|
||||
|
||||
#### Users
|
||||
|
||||
* [Get a user](/api/get-user)
|
||||
* [Get a user by email](/api/get-user-by-email)
|
||||
* [Get own user](/api/get-own-user)
|
||||
* [Get users](/api/get-users)
|
||||
* [Create a user](/api/create-user)
|
||||
* [Update a user](/api/update-user)
|
||||
* [Update a user by email](/api/update-user-by-email)
|
||||
* [Deactivate a user](/api/deactivate-user)
|
||||
* [Deactivate own user](/api/deactivate-own-user)
|
||||
* [Reactivate a user](/api/reactivate-user)
|
||||
* [Get a user's status](/api/get-user-status)
|
||||
* [Update your status](/api/update-status)
|
||||
* [Set "typing" status](/api/set-typing-status)
|
||||
* [Set "typing" status for message editing](/api/set-typing-status-for-message-edit)
|
||||
* [Get a user's presence](/api/get-user-presence)
|
||||
* [Get presence of all users](/api/get-presence)
|
||||
* [Update your presence](/api/update-presence)
|
||||
* [Get attachments](/api/get-attachments)
|
||||
* [Delete an attachment](/api/remove-attachment)
|
||||
* [Update settings](/api/update-settings)
|
||||
* [Get user groups](/api/get-user-groups)
|
||||
* [Create a user group](/api/create-user-group)
|
||||
* [Update a user group](/api/update-user-group)
|
||||
* [Deactivate a user group](/api/deactivate-user-group)
|
||||
* [Update user group members](/api/update-user-group-members)
|
||||
* [Update subgroups of a user group](/api/update-user-group-subgroups)
|
||||
* [Get user group membership status](/api/get-is-user-group-member)
|
||||
* [Get user group members](/api/get-user-group-members)
|
||||
* [Get subgroups of a user group](/api/get-user-group-subgroups)
|
||||
* [Mute a user](/api/mute-user)
|
||||
* [Unmute a user](/api/unmute-user)
|
||||
* [Get all alert words](/api/get-alert-words)
|
||||
* [Add alert words](/api/add-alert-words)
|
||||
* [Remove alert words](/api/remove-alert-words)
|
||||
|
||||
#### Invitations
|
||||
|
||||
* [Get all invitations](/api/get-invites)
|
||||
* [Send invitations](/api/send-invites)
|
||||
* [Create a reusable invitation link](/api/create-invite-link)
|
||||
* [Resend an email invitation](/api/resend-email-invite)
|
||||
* [Revoke an email invitation](/api/revoke-email-invite)
|
||||
* [Revoke a reusable invitation link](/api/revoke-invite-link)
|
||||
|
||||
#### Server & organizations
|
||||
|
||||
* [Get server settings](/api/get-server-settings)
|
||||
* [Get linkifiers](/api/get-linkifiers)
|
||||
* [Add a linkifier](/api/add-linkifier)
|
||||
* [Update a linkifier](/api/update-linkifier)
|
||||
* [Remove a linkifier](/api/remove-linkifier)
|
||||
* [Reorder linkifiers](/api/reorder-linkifiers)
|
||||
* [Add a code playground](/api/add-code-playground)
|
||||
* [Remove a code playground](/api/remove-code-playground)
|
||||
* [Get all custom emoji](/api/get-custom-emoji)
|
||||
* [Upload custom emoji](/api/upload-custom-emoji)
|
||||
* [Deactivate custom emoji](/api/deactivate-custom-emoji)
|
||||
* [Get all custom profile fields](/api/get-custom-profile-fields)
|
||||
* [Reorder custom profile fields](/api/reorder-custom-profile-fields)
|
||||
* [Create a custom profile field](/api/create-custom-profile-field)
|
||||
* [Update realm-level defaults of user settings](/api/update-realm-user-settings-defaults)
|
||||
* [Get all data exports](/api/get-realm-exports)
|
||||
* [Create a data export](/api/export-realm)
|
||||
* [Get data export consent state](/api/get-realm-export-consents)
|
||||
|
||||
#### Real-time events
|
||||
|
||||
* [Real time events API](/api/real-time-events)
|
||||
* [Register an event queue](/api/register-queue)
|
||||
* [Get events from an event queue](/api/get-events)
|
||||
* [Delete an event queue](/api/delete-queue)
|
||||
|
||||
#### Specialty endpoints
|
||||
|
||||
* [Fetch an API key (production)](/api/fetch-api-key)
|
||||
* [Fetch an API key (development only)](/api/dev-fetch-api-key)
|
||||
* [Send a test notification to mobile device(s)](/api/test-notify)
|
||||
* [Register E2EE push device](/api/register-push-device)
|
||||
* [Add an APNs device token](/api/add-apns-token)
|
||||
* [Remove an APNs device token](/api/remove-apns-token)
|
||||
* [Add an FCM registration token](/api/add-fcm-token)
|
||||
* [Remove an FCM registration token](/api/remove-fcm-token)
|
||||
* [Create BigBlueButton video call](/api/create-big-blue-button-video-call)
|
@@ -1,223 +0,0 @@
|
||||
# Incoming webhook integrations
|
||||
|
||||
An incoming webhook allows a third-party service to push data to Zulip when
|
||||
something happens. There are several ways to set up an incoming webhook in
|
||||
Zulip:
|
||||
|
||||
* Use our [REST API](/api/rest) endpoint for [sending
|
||||
messages](/api/send-message). This works great for internal tools
|
||||
or cases where the third-party tool wants to control the formatting
|
||||
of the messages in Zulip.
|
||||
* Use one of our supported [integration
|
||||
frameworks](/integrations/meta-integration), such as the
|
||||
[Slack-compatible incoming webhook](/integrations/doc/slack_incoming),
|
||||
[Zapier integration](/integrations/doc/zapier), or
|
||||
[IFTTT integration](/integrations/doc/ifttt).
|
||||
* Implementing an incoming webhook integration (detailed on this page),
|
||||
where all the logic for formatting the Zulip messages lives in the
|
||||
Zulip server. This is how most of [Zulip's official
|
||||
integrations](/integrations/) work, because they enable Zulip to
|
||||
support third-party services that just have an "outgoing webhook"
|
||||
feature (without the third party needing to do any work specific to
|
||||
Zulip).
|
||||
|
||||
In an incoming webhook integration, the third-party service's
|
||||
"outgoing webhook" feature sends an `HTTP POST` to a special URL when
|
||||
it has something for you, and then the Zulip "incoming webhook"
|
||||
integration handles that incoming data to format and send a message in
|
||||
Zulip.
|
||||
|
||||
New official Zulip webhook integrations can take just a few hours to
|
||||
write, including tests and documentation, if you use the right
|
||||
process.
|
||||
|
||||
## Quick guide
|
||||
|
||||
* Set up the
|
||||
[Zulip development environment](https://zulip.readthedocs.io/en/latest/development/overview.html).
|
||||
|
||||
* Use [Zulip's JSON integration](/integrations/doc/json),
|
||||
<https://webhook.site/>, or a similar site to capture an example
|
||||
webhook payload from the third-party service. Create a
|
||||
`zerver/webhooks/<mywebhook>/fixtures/` directory, and add the
|
||||
captured JSON payload as a test fixture.
|
||||
|
||||
* Create an `Integration` object, and add it to the `WEBHOOK_INTEGRATIONS`
|
||||
list in `zerver/lib/integrations.py`. Search for `WebhookIntegration` in that
|
||||
file to find an existing one to copy.
|
||||
|
||||
* Write a draft webhook handler in `zerver/webhooks/<mywebhook>/view.py`. There
|
||||
are a lot of examples in the `zerver/webhooks/` directory that you can copy.
|
||||
We recommend templating from a short one, like `zendesk`.
|
||||
|
||||
* Write a test for your fixture in `zerver/webhooks/<mywebhook>/tests.py`.
|
||||
Run the test for your integration like this:
|
||||
|
||||
```
|
||||
tools/test-backend zerver/webhooks/<mywebhook>/
|
||||
```
|
||||
|
||||
Iterate on debugging the test and webhooks handler until it all
|
||||
works.
|
||||
|
||||
* Capture payloads for the other common types of `POST`s the third-party
|
||||
service will make, and add tests for them; usually this part of the
|
||||
process is pretty fast.
|
||||
|
||||
* Document the integration in `zerver/webhooks/<mywebhook>/doc.md`(required for
|
||||
getting it merged into Zulip). You can use existing documentation, like
|
||||
[this one](https://raw.githubusercontent.com/zulip/zulip/main/zerver/webhooks/github/doc.md),
|
||||
as a template. This should not take more than 15 minutes, even if you don't speak English
|
||||
as a first language (we'll clean up the text before merging).
|
||||
|
||||
## Hello world walkthrough
|
||||
|
||||
Check out the [detailed walkthrough](incoming-webhooks-walkthrough) for step-by-step
|
||||
instructions.
|
||||
|
||||
## Checklist
|
||||
|
||||
### Files that need to be created
|
||||
|
||||
Select a name for your incoming webhook and use it consistently. The examples
|
||||
below are for a webhook named `MyWebHook`.
|
||||
|
||||
* `zerver/webhooks/mywebhook/__init__.py`: Empty file that is an obligatory
|
||||
part of every python package. Remember to `git add` it.
|
||||
* `zerver/webhooks/mywebhook/view.py`: The main webhook integration function,
|
||||
called `api_mywebhook_webhook`, along with any necessary helper functions.
|
||||
* `zerver/webhooks/mywebhook/fixtures/message_type.json`: Sample JSON payload data
|
||||
used by tests. Add one fixture file per type of message supported by your
|
||||
integration.
|
||||
* `zerver/webhooks/mywebhook/tests.py`: Tests for your webhook.
|
||||
* `zerver/webhooks/mywebhook/doc.md`: End-user documentation explaining
|
||||
how to add the integration.
|
||||
* `static/images/integrations/logos/mywebhook.svg`: A square logo for the
|
||||
platform/server/product you are integrating. Used on the documentation
|
||||
pages as well as the sender's avatar for messages sent by the integration.
|
||||
* `static/images/integrations/mywebhook/001.png`: A screenshot of a message
|
||||
sent by the integration, used on the documentation page. This can be
|
||||
generated by running `tools/screenshots/generate-integration-docs-screenshot --integration mywebhook`.
|
||||
* `static/images/integrations/bot_avatars/mywebhook.png`: A square logo for the
|
||||
platform/server/product you are integrating which is used to create the avatar
|
||||
for generating screenshots with. This can be generated automatically from
|
||||
`static/images/integrations/logos/mywebhook.svg` by running
|
||||
`tools/setup/generate_integration_bots_avatars.py`.
|
||||
|
||||
### Files that need to be updated
|
||||
|
||||
* `zerver/lib/integrations.py`: Add your integration to
|
||||
`WEBHOOK_INTEGRATIONS`. This will automatically register a
|
||||
URL for the incoming webhook of the form `api/v1/external/mywebhook` and
|
||||
associate it with the function called `api_mywebhook_webhook` in
|
||||
`zerver/webhooks/mywebhook/view.py`. Also add your integration to
|
||||
`DOC_SCREENSHOT_CONFIG`. This will allow you to automatically generate
|
||||
a screenshot for the documentation by running
|
||||
`tools/screenshots/generate-integration-docs-screenshot --integration mywebhook`.
|
||||
|
||||
## Common Helpers
|
||||
|
||||
* If your integration will receive a test webhook payload, you can use
|
||||
`get_setup_webhook_message` to create our standard message for test payloads.
|
||||
You can import this from `zerver/lib/webhooks/common.py`, and it will generate
|
||||
a message like this: "GitHub webhook is successfully configured! 🎉"
|
||||
|
||||
## General advice
|
||||
|
||||
* Consider using our Zulip markup to make the output from your
|
||||
integration especially attractive or useful (e.g., emoji, Markdown
|
||||
emphasis, or @-mentions).
|
||||
|
||||
* Use topics effectively to ensure sequential messages about the same
|
||||
thing are threaded together; this makes for much better consumption
|
||||
by users. E.g., for a bug tracker integration, put the bug number in
|
||||
the topic for all messages; for an integration like Nagios, put the
|
||||
service in the topic.
|
||||
|
||||
* Integrations that don't match a team's workflow can often be
|
||||
uselessly spammy. Give careful thought to providing options for
|
||||
triggering Zulip messages only for certain message types, certain
|
||||
projects, or sending different messages to different channels/topics,
|
||||
to make it easy for teams to configure the integration to support
|
||||
their workflow.
|
||||
|
||||
* Consistently capitalize the name of the integration in the
|
||||
documentation and the Client name the way the vendor does. It's OK
|
||||
to use all-lower-case in the implementation.
|
||||
|
||||
* Sometimes it can be helpful to contact the vendor if it appears they
|
||||
don't have an API or webhook we can use; sometimes the right API
|
||||
is just not properly documented.
|
||||
|
||||
* A helpful tool for testing your integration is
|
||||
[UltraHook](http://www.ultrahook.com/), which allows you to receive webhook
|
||||
calls via your local Zulip development environment. This enables you to do end-to-end
|
||||
testing with live data from the service you're integrating and can help you
|
||||
spot why something isn't working or if the service is using custom HTTP
|
||||
headers.
|
||||
|
||||
## URL specification
|
||||
|
||||
The base URL for an incoming webhook integration bot, where
|
||||
`INTEGRATION_NAME` is the name of the specific webhook integration and
|
||||
`API_KEY` is the API key of the bot created by the user for the
|
||||
integration, is:
|
||||
|
||||
```
|
||||
{{ api_url }}/v1/external/INTEGRATION_NAME?api_key=API_KEY
|
||||
```
|
||||
|
||||
The list of existing webhook integrations can be found by browsing the
|
||||
[Integrations documentation](/integrations/) or in
|
||||
`zerver/lib/integrations.py` at `WEBHOOK_INTEGRATIONS`.
|
||||
|
||||
Parameters accepted in the URL include:
|
||||
|
||||
### api_key *(required)*
|
||||
|
||||
The API key of the bot created by the user for the integration. To get a
|
||||
bot's API key, see the [API keys](/api/api-keys) documentation.
|
||||
|
||||
### stream
|
||||
|
||||
The channel for the integration to send notifications to. Can be either
|
||||
the channel ID or the [URL-encoded][url-encoder] channel name. By default
|
||||
the integration will send direct messages to the bot's owner.
|
||||
|
||||
!!! tip ""
|
||||
|
||||
A channel ID can be found when [browsing channels][browse-channels]
|
||||
in the web or desktop apps.
|
||||
|
||||
### topic
|
||||
|
||||
The topic in the specified channel for the integration to send
|
||||
notifications to. The topic should also be [URL-encoded][url-encoder].
|
||||
By default the integration will have a topic configured for channel
|
||||
messages.
|
||||
|
||||
### only_events, exclude_events
|
||||
|
||||
Some incoming webhook integrations support these parameters to filter
|
||||
which events will trigger a notification. You can append either
|
||||
`&only_events=["event_a","event_b"]` or
|
||||
`&exclude_events=["event_a","event_b"]` (or both, with different events)
|
||||
to the URL, with an arbitrary number of supported events.
|
||||
|
||||
You can use UNIX-style wildcards like `*` to include multiple events.
|
||||
For example, `test*` matches every event that starts with `test`.
|
||||
|
||||
!!! tip ""
|
||||
|
||||
For a list of supported events, see a specific [integration's
|
||||
documentation](/integrations) page.
|
||||
|
||||
[browse-channels]: /help/introduction-to-channels#browse-and-subscribe-to-channels
|
||||
[add-bot]: /help/add-a-bot-or-integration
|
||||
[url-encoder]: https://www.urlencoder.org/
|
||||
|
||||
## Related articles
|
||||
|
||||
* [Integrations overview](/api/integrations-overview)
|
||||
* [Incoming webhook walkthrough](/api/incoming-webhooks-walkthrough)
|
||||
* [Non-webhook integrations](/api/non-webhook-integrations)
|
@@ -1,689 +0,0 @@
|
||||
# Incoming webhook walkthrough
|
||||
|
||||
Below, we explain each part of a simple incoming webhook integration,
|
||||
called **Hello World**. This integration sends a "hello" message to the `test`
|
||||
channel and includes a link to the Wikipedia article of the day, which
|
||||
it formats from json data it receives in the http request.
|
||||
|
||||
Use this walkthrough to learn how to write your first webhook
|
||||
integration.
|
||||
|
||||
## Step 0: Create fixtures
|
||||
|
||||
The first step in creating an incoming webhook is to examine the data that the
|
||||
service you want to integrate will be sending to Zulip.
|
||||
|
||||
* Use [Zulip's JSON integration](/integrations/doc/json),
|
||||
<https://webhook.site/>, or a similar tool to capture webhook
|
||||
payload(s) from the service you are integrating. Examining this data
|
||||
allows you to do two things:
|
||||
|
||||
1. Determine how you will need to structure your webhook code, including what
|
||||
message types your integration should support and how.
|
||||
2. Create fixtures for your webhook tests.
|
||||
|
||||
A test fixture is a small file containing test data, one for each test.
|
||||
Fixtures enable the testing of webhook integration code without the need to
|
||||
actually contact the service being integrated.
|
||||
|
||||
Because `Hello World` is a very simple integration that does one
|
||||
thing, it requires only one fixture,
|
||||
`zerver/webhooks/helloworld/fixtures/hello.json`:
|
||||
|
||||
```json
|
||||
{
|
||||
"featured_title":"Marilyn Monroe",
|
||||
"featured_url":"https://en.wikipedia.org/wiki/Marilyn_Monroe",
|
||||
}
|
||||
```
|
||||
|
||||
When writing your own incoming webhook integration, you'll want to write a test function
|
||||
for each distinct message condition your integration supports. You'll also need a
|
||||
corresponding fixture for each of these tests. Depending on the type of data
|
||||
the 3rd party service sends, your fixture may contain JSON, URL encoded text, or
|
||||
some other kind of data. See [Step 5: Create automated tests](#step-5-create-automated-tests) or
|
||||
[Testing](https://zulip.readthedocs.io/en/latest/testing/testing.html) for further details.
|
||||
|
||||
### HTTP Headers
|
||||
|
||||
Some third-party webhook APIs, such as GitHub's, don't encode all the
|
||||
information about an event in the JSON request body. Instead, they
|
||||
put key details like the event type in a separate HTTP header
|
||||
(generally this is clear in their API documentation). In order to
|
||||
test Zulip's handling of that integration, you will need to record
|
||||
which HTTP headers are used with each fixture you capture.
|
||||
|
||||
Since this is integration-dependent, Zulip offers a simple API for
|
||||
doing this, which is probably best explained by looking at the example
|
||||
for GitHub: `zerver/webhooks/github/view.py`; basically, as part of
|
||||
writing your integration, you'll write a special function in your
|
||||
view.py file that maps the filename of the fixture to the set of HTTP
|
||||
headers to use. This function must be named "fixture_to_headers". Most
|
||||
integrations will use the same strategy as the GitHub integration:
|
||||
encoding the third party variable header data (usually just an event
|
||||
type) in the fixture filename, in such a case, you won't need to
|
||||
explicitly write the logic for such a special function again,
|
||||
instead you can just use the same helper method that the GitHub
|
||||
integration uses.
|
||||
|
||||
## Step 1: Initialize your webhook python package
|
||||
|
||||
In the `zerver/webhooks/` directory, create new subdirectory that will
|
||||
contain all of the corresponding code. In our example, it will be
|
||||
`helloworld`. The new directory will be a python package, so you have
|
||||
to create an empty `__init__.py` file in that directory via, for
|
||||
example, `touch zerver/webhooks/helloworld/__init__.py`.
|
||||
|
||||
## Step 2: Create main webhook code
|
||||
|
||||
The majority of the code for your new integration will be in a single
|
||||
python file, `zerver/webhooks/mywebhook/view.py`.
|
||||
|
||||
The Hello World integration is in `zerver/webhooks/helloworld/view.py`:
|
||||
|
||||
```python
|
||||
from django.http import HttpRequest, HttpResponse
|
||||
|
||||
from zerver.decorator import webhook_view
|
||||
from zerver.lib.response import json_success
|
||||
from zerver.lib.typed_endpoint import JsonBodyPayload, typed_endpoint
|
||||
from zerver.lib.validator import WildValue, check_string
|
||||
from zerver.lib.webhooks.common import check_send_webhook_message
|
||||
from zerver.models import UserProfile
|
||||
|
||||
|
||||
@webhook_view("HelloWorld")
|
||||
@typed_endpoint
|
||||
def api_helloworld_webhook(
|
||||
request: HttpRequest,
|
||||
user_profile: UserProfile,
|
||||
*,
|
||||
payload: JsonBodyPayload[WildValue],
|
||||
) -> HttpResponse:
|
||||
# construct the body of the message
|
||||
body = "Hello! I am happy to be here! :smile:"
|
||||
|
||||
# try to add the Wikipedia article of the day
|
||||
body_template = (
|
||||
"\nThe Wikipedia featured article for today is **[{featured_title}]({featured_url})**"
|
||||
)
|
||||
body += body_template.format(
|
||||
featured_title=payload["featured_title"].tame(check_string),
|
||||
featured_url=payload["featured_url"].tame(check_string),
|
||||
)
|
||||
|
||||
topic = "Hello World"
|
||||
|
||||
# send the message
|
||||
check_send_webhook_message(request, user_profile, topic, body)
|
||||
|
||||
return json_success(request)
|
||||
```
|
||||
|
||||
The above code imports the required functions and defines the main webhook
|
||||
function `api_helloworld_webhook`, decorating it with `webhook_view` and
|
||||
`typed_endpoint`. The `typed_endpoint` decorator allows you to
|
||||
access request variables with `JsonBodyPayload()`. You can find more about `JsonBodyPayload` and request variables in [Writing views](
|
||||
https://zulip.readthedocs.io/en/latest/tutorials/writing-views.html#request-variables).
|
||||
|
||||
You must pass the name of your integration to the
|
||||
`webhook_view` decorator; that name will be used to
|
||||
describe your integration in Zulip's analytics (e.g., the `/stats`
|
||||
page). Here we have used `HelloWorld`. To be consistent with other
|
||||
integrations, use the name of the product you are integrating in camel
|
||||
case, spelled as the product spells its own name (except always first
|
||||
letter upper-case).
|
||||
|
||||
The `webhook_view` decorator indicates that the 3rd party service will
|
||||
send the authorization as an API key in the query parameters. If your service uses
|
||||
HTTP basic authentication, you would instead use the `authenticated_rest_api_view`
|
||||
decorator.
|
||||
|
||||
You should name your webhook function as such
|
||||
`api_webhookname_webhook` where `webhookname` is the name of your
|
||||
integration and is always lower-case.
|
||||
|
||||
At minimum, the webhook function must accept `request` (Django
|
||||
[HttpRequest](https://docs.djangoproject.com/en/5.0/ref/request-response/#django.http.HttpRequest)
|
||||
object), and `user_profile` (Zulip's user object). You may also want to
|
||||
define additional parameters using the `typed_endpoint` decorator.
|
||||
|
||||
In the example above, we have defined `payload` which is populated
|
||||
from the body of the http request, `stream` with a default of `test`
|
||||
(available by default in the Zulip development environment), and
|
||||
`topic` with a default of `Hello World`. If your webhook uses a custom channel,
|
||||
it must exist before a message can be created in it. (See
|
||||
[Step 4: Create automated tests](#step-5-create-automated-tests) for how to handle this in tests.)
|
||||
|
||||
The line that begins `# type` is a mypy type annotation. See [this
|
||||
page](https://zulip.readthedocs.io/en/latest/testing/mypy.html) for details about
|
||||
how to properly annotate your webhook functions.
|
||||
|
||||
In the body of the function we define the body of the message as `Hello! I am
|
||||
happy to be here! :smile:`. The `:smile:` indicates an emoji. Then we append a
|
||||
link to the Wikipedia article of the day as provided by the json payload.
|
||||
|
||||
* Sometimes, it might occur that a json payload does not contain all required keys your
|
||||
integration checks for. In such a case, any `KeyError` thrown is handled by the server
|
||||
backend and will create an appropriate response.
|
||||
|
||||
Then we send a message with `check_send_webhook_message`, which will
|
||||
validate the message and do the following:
|
||||
|
||||
* Send a public (channel) message if the `stream` query parameter is
|
||||
specified in the webhook URL.
|
||||
* If the `stream` query parameter isn't specified, it will send a direct
|
||||
message to the owner of the webhook bot.
|
||||
|
||||
Finally, we return a 200 http status with a JSON format success message via
|
||||
`json_success(request)`.
|
||||
|
||||
## Step 3: Create an API endpoint for the webhook
|
||||
|
||||
In order for an incoming webhook to be externally available, it must be mapped
|
||||
to a URL. This is done in `zerver/lib/integrations.py`.
|
||||
|
||||
Look for the lines beginning with:
|
||||
|
||||
```python
|
||||
WEBHOOK_INTEGRATIONS: List[WebhookIntegration] = [
|
||||
```
|
||||
|
||||
And you'll find the entry for Hello World:
|
||||
|
||||
```python
|
||||
WebhookIntegration("helloworld", ["misc"], display_name="Hello World"),
|
||||
```
|
||||
|
||||
This tells the Zulip API to call the `api_helloworld_webhook` function in
|
||||
`zerver/webhooks/helloworld/view.py` when it receives a request at
|
||||
`/api/v1/external/helloworld`.
|
||||
|
||||
This line also tells Zulip to generate an entry for Hello World on the Zulip
|
||||
integrations page using `static/images/integrations/logos/helloworld.svg` as its
|
||||
icon. The second positional argument defines a list of categories for the
|
||||
integration.
|
||||
|
||||
At this point, if you're following along and/or writing your own Hello World
|
||||
webhook, you have written enough code to test your integration. There are three
|
||||
tools which you can use to test your webhook - 2 command line tools and a GUI.
|
||||
|
||||
### Webhooks requiring custom configuration
|
||||
|
||||
In cases where an incoming webhook integration supports optional URL parameters,
|
||||
one can use the `url_options` feature. It's a field in the `WebhookIntegration`
|
||||
class that is used when [generating a URL for an integration](/help/generate-integration-url)
|
||||
in the web app, which encodes the user input for each URL parameter in the
|
||||
incoming webhook's URL.
|
||||
|
||||
These URL options are declared as follows:
|
||||
|
||||
```python
|
||||
WebhookIntegration(
|
||||
'helloworld',
|
||||
...
|
||||
url_options=[
|
||||
WebhookUrlOption(
|
||||
name='ignore_private_repositories',
|
||||
label='Exclude notifications from private repositories',
|
||||
validator=check_string
|
||||
),
|
||||
],
|
||||
)
|
||||
```
|
||||
|
||||
`url_options` is a list describing the parameters the web app UI should offer when
|
||||
generating the incoming webhook URL:
|
||||
|
||||
- `name`: The parameter name that is used to encode the user input in the
|
||||
integration's webhook URL.
|
||||
- `label`: A short descriptive label for this URL parameter in the web app UI.
|
||||
- `validator`: A validator function, which is used to determine the input type
|
||||
for this option in the UI, and to indicate how to validate the input.
|
||||
Currently, the web app UI only supports these validators:
|
||||
- `check_bool` for checkbox/select input.
|
||||
- `check_string` for text input.
|
||||
|
||||
!!! warn ""
|
||||
|
||||
**Note**: To add support for other validators, you can update
|
||||
`web/src/integration_url_modal.ts`. Common validators are available in
|
||||
`zerver/lib/validator.py`.
|
||||
|
||||
In rare cases, it may be necessary for an incoming webhook to require
|
||||
additional user configuration beyond what is specified in the POST
|
||||
URL. A typical use case for this would be APIs that require clients
|
||||
to do a callback to get details beyond an opaque object ID that one
|
||||
would want to include in a Zulip notification message.
|
||||
|
||||
The `config_options` field in the `WebhookIntegration` class is reserved
|
||||
for this use case.
|
||||
|
||||
## Step 4: Manually testing the webhook
|
||||
|
||||
For either one of the command line tools, first, you'll need to get an
|
||||
API key from the **Bots** section of your Zulip user's **Personal
|
||||
settings**. To test the webhook, you'll need to [create a
|
||||
bot](https://zulip.com/help/add-a-bot-or-integration) with the
|
||||
**Incoming webhook** type. Replace `<api_key>` with your bot's API key
|
||||
in the examples presented below! This is how Zulip knows that the
|
||||
request was made by an authorized user.
|
||||
|
||||
### Curl
|
||||
|
||||
Using curl:
|
||||
```bash
|
||||
curl -X POST -H "Content-Type: application/json" -d '{ "featured_title":"Marilyn Monroe", "featured_url":"https://en.wikipedia.org/wiki/Marilyn_Monroe" }' http://localhost:9991/api/v1/external/helloworld\?api_key\=<api_key>
|
||||
```
|
||||
|
||||
After running the above command, you should see something similar to:
|
||||
|
||||
```json
|
||||
{"msg":"","result":"success"}
|
||||
```
|
||||
|
||||
### Management command: send_webhook_fixture_message
|
||||
|
||||
Using `manage.py` from within the Zulip development environment:
|
||||
|
||||
```console
|
||||
(zulip-server) vagrant@vagrant:/srv/zulip$
|
||||
./manage.py send_webhook_fixture_message \
|
||||
--fixture=zerver/webhooks/helloworld/fixtures/hello.json \
|
||||
'--url=http://localhost:9991/api/v1/external/helloworld?api_key=<api_key>'
|
||||
```
|
||||
|
||||
After running the above command, you should see something similar to:
|
||||
|
||||
```
|
||||
2016-07-07 15:06:59,187 INFO 127.0.0.1 POST 200 143ms (mem: 6ms/13) (md: 43ms/1) (db: 20ms/9q) (+start: 147ms) /api/v1/external/helloworld (helloworld-bot@zulip.com via ZulipHelloWorldWebhook)
|
||||
```
|
||||
|
||||
Some webhooks require custom HTTP headers, which can be passed using
|
||||
`./manage.py send_webhook_fixture_message --custom-headers`. For
|
||||
example:
|
||||
|
||||
--custom-headers='{"X-Custom-Header": "value"}'
|
||||
|
||||
The format is a JSON dictionary, so make sure that the header names do
|
||||
not contain any spaces in them and that you use the precise quoting
|
||||
approach shown above.
|
||||
|
||||
For more information about `manage.py` command-line tools in Zulip, see
|
||||
the [management commands][management-commands] documentation.
|
||||
|
||||
[management-commands]: https://zulip.readthedocs.io/en/latest/production/management-commands.html
|
||||
|
||||
### Integrations Dev Panel
|
||||
This is the GUI tool.
|
||||
|
||||
{start_tabs}
|
||||
|
||||
1. Run `./tools/run-dev` then go to http://localhost:9991/devtools/integrations/.
|
||||
|
||||
1. Set the following mandatory fields:
|
||||
**Bot** - Any incoming webhook bot.
|
||||
**Integration** - One of the integrations.
|
||||
**Fixture** - Though not mandatory, it's recommended that you select one and then tweak it if necessary.
|
||||
The remaining fields are optional, and the URL will automatically be generated.
|
||||
|
||||
1. Click **Send**!
|
||||
|
||||
{end_tabs}
|
||||
|
||||
By opening Zulip in one tab and then this tool in another, you can quickly tweak
|
||||
your code and send sample messages for many different test fixtures.
|
||||
|
||||
Note: Custom HTTP Headers must be entered as a JSON dictionary, if you want to use any in the first place that is.
|
||||
Feel free to use 4-spaces as tabs for indentation if you'd like!
|
||||
|
||||
Your sample notification may look like:
|
||||
|
||||
<img class="screenshot" src="/static/images/api/helloworld-webhook.png" alt="screenshot" />
|
||||
|
||||
|
||||
|
||||
## Step 5: Create automated tests
|
||||
|
||||
Every webhook integration should have a corresponding test file:
|
||||
`zerver/webhooks/mywebhook/tests.py`.
|
||||
|
||||
The Hello World integration's tests are in `zerver/webhooks/helloworld/tests.py`
|
||||
|
||||
You should name the class `<WebhookName>HookTests` and have it inherit from
|
||||
the base class `WebhookTestCase`. For our HelloWorld webhook, we name the test
|
||||
class `HelloWorldHookTests`:
|
||||
|
||||
```python
|
||||
class HelloWorldHookTests(WebhookTestCase):
|
||||
CHANNEL_NAME = "test"
|
||||
URL_TEMPLATE = "/api/v1/external/helloworld?&api_key={api_key}&stream={stream}"
|
||||
DIRECT_MESSAGE_URL_TEMPLATE = "/api/v1/external/helloworld?&api_key={api_key}"
|
||||
WEBHOOK_DIR_NAME = "helloworld"
|
||||
|
||||
# Note: Include a test function per each distinct message condition your integration supports
|
||||
def test_hello_message(self) -> None:
|
||||
expected_topic = "Hello World"
|
||||
expected_message = "Hello! I am happy to be here! :smile:\nThe Wikipedia featured article for today is **[Marilyn Monroe](https://en.wikipedia.org/wiki/Marilyn_Monroe)**"
|
||||
|
||||
# use fixture named helloworld_hello
|
||||
self.check_webhook(
|
||||
"hello",
|
||||
expected_topic,
|
||||
expected_message,
|
||||
content_type="application/x-www-form-urlencoded",
|
||||
)
|
||||
```
|
||||
|
||||
In the above example, `CHANNEL_NAME`, `URL_TEMPLATE`, and `WEBHOOK_DIR_NAME` refer
|
||||
to class attributes from the base class, `WebhookTestCase`. These are needed by
|
||||
the helper function `check_webhook` to determine how to execute
|
||||
your test. `CHANNEL_NAME` should be set to your default channel. If it doesn't exist,
|
||||
`check_webhook` will create it while executing your test.
|
||||
|
||||
If your test expects a channel name from a test fixture, the value in the fixture
|
||||
and the value you set for `CHANNEL_NAME` must match. The test helpers use `CHANNEL_NAME`
|
||||
to create the destination channel, and then create the message to send using the
|
||||
value from the fixture. If these don't match, the test will fail.
|
||||
|
||||
`URL_TEMPLATE` defines how the test runner will call your incoming webhook, in the same way
|
||||
you would provide a webhook URL to the 3rd party service. `api_key={api_key}` says
|
||||
that an API key is expected.
|
||||
|
||||
When writing tests for your webhook, you'll want to include one test function
|
||||
(and corresponding fixture) per each distinct message condition that your
|
||||
integration supports.
|
||||
|
||||
If, for example, we added support for sending a goodbye message to our `Hello
|
||||
World` webhook, we would add another test function to `HelloWorldHookTests`
|
||||
class called something like `test_goodbye_message`:
|
||||
|
||||
```python
|
||||
def test_goodbye_message(self) -> None:
|
||||
expected_topic = "Hello World"
|
||||
expected_message = "Hello! I am happy to be here! :smile:\nThe Wikipedia featured article for today is **[Goodbye](https://en.wikipedia.org/wiki/Goodbye)**"
|
||||
|
||||
# use fixture named helloworld_goodbye
|
||||
self.check_webhook(
|
||||
"goodbye",
|
||||
expected_topic,
|
||||
expected_message,
|
||||
content_type="application/x-www-form-urlencoded",
|
||||
)
|
||||
```
|
||||
|
||||
As well as a new fixture `goodbye.json` in
|
||||
`zerver/webhooks/helloworld/fixtures/`:
|
||||
|
||||
```json
|
||||
{
|
||||
"featured_title":"Goodbye",
|
||||
"featured_url":"https://en.wikipedia.org/wiki/Goodbye",
|
||||
}
|
||||
```
|
||||
|
||||
Also consider if your integration should have negative tests, a test where the
|
||||
data from the test fixture should result in an error. For details see
|
||||
[Negative tests](#negative-tests), below.
|
||||
|
||||
Once you have written some tests, you can run just these new tests from within
|
||||
the Zulip development environment with this command:
|
||||
|
||||
```console
|
||||
(zulip-server) vagrant@vagrant:/srv/zulip$
|
||||
./tools/test-backend zerver/webhooks/helloworld
|
||||
```
|
||||
|
||||
(Note: You must run the tests from the top level of your development directory.
|
||||
The standard location in a Vagrant environment is `/srv/zulip`. If you are not
|
||||
using Vagrant, use the directory where you have your development environment.)
|
||||
|
||||
You will see some script output and if all the tests have passed, you will see:
|
||||
|
||||
```console
|
||||
Running zerver.webhooks.helloworld.tests.HelloWorldHookTests.test_goodbye_message
|
||||
Running zerver.webhooks.helloworld.tests.HelloWorldHookTests.test_hello_message
|
||||
DONE!
|
||||
```
|
||||
|
||||
## Step 6: Create documentation
|
||||
|
||||
Next, we add end-user documentation for our integration. You
|
||||
can see the existing examples at <https://zulip.com/integrations>
|
||||
or by accessing `/integrations` in your Zulip development environment.
|
||||
|
||||
There are two parts to the end-user documentation on this page.
|
||||
|
||||
The first is the lozenge in the grid of integrations, showing your
|
||||
integration logo and name, which links to the full documentation.
|
||||
This is generated automatically once you've registered the integration
|
||||
in `WEBHOOK_INTEGRATIONS` in `zerver/lib/integrations.py`, and supports
|
||||
some customization via options to the `WebhookIntegration` class.
|
||||
|
||||
Second, you need to write the actual documentation content in
|
||||
`zerver/webhooks/mywebhook/doc.md`.
|
||||
|
||||
```md
|
||||
Learn how Zulip integrations work with this simple Hello World example!
|
||||
|
||||
1. The Hello World webhook will use the `test` channel, which is created
|
||||
by default in the Zulip development environment. If you are running
|
||||
Zulip in production, you should make sure that this channel exists.
|
||||
|
||||
1. {!create-an-incoming-webhook.md!}
|
||||
|
||||
1. {!generate-webhook-url-basic.md!}
|
||||
|
||||
1. To trigger a notification using this example webhook, you can use
|
||||
`send_webhook_fixture_message` from a [Zulip development
|
||||
environment](https://zulip.readthedocs.io/en/latest/development/overview.html):
|
||||
|
||||
```
|
||||
(zulip-server) vagrant@vagrant:/srv/zulip$
|
||||
./manage.py send_webhook_fixture_message \
|
||||
> --fixture=zerver/tests/fixtures/helloworld/hello.json \
|
||||
> '--url=http://localhost:9991/api/v1/external/helloworld?api_key=abcdefgh&stream=stream%20name;'
|
||||
```
|
||||
|
||||
Or, use curl:
|
||||
|
||||
```
|
||||
curl -X POST -H "Content-Type: application/json" -d '{ "featured_title":"Marilyn Monroe", "featured_url":"https://en.wikipedia.org/wiki/Marilyn_Monroe" }' http://localhost:9991/api/v1/external/helloworld\?api_key=abcdefgh&stream=stream%20name;
|
||||
```
|
||||
|
||||
{!congrats.md!}
|
||||
|
||||

|
||||
|
||||
```
|
||||
|
||||
`{!create-an-incoming-webhook.md!}` and `{!congrats.md!}` are examples of
|
||||
a Markdown macro. Zulip has a macro-based Markdown/Jinja2 framework that
|
||||
includes macros for common instructions in Zulip's webhooks/integrations
|
||||
documentation.
|
||||
|
||||
See
|
||||
[our guide on documenting an integration][integration-docs-guide]
|
||||
for further details, including how to easily create the message
|
||||
screenshot. Mostly you should plan on templating off an existing guide, like
|
||||
[this one](https://raw.githubusercontent.com/zulip/zulip/main/zerver/webhooks/github/doc.md).
|
||||
|
||||
[integration-docs-guide]: https://zulip.readthedocs.io/en/latest/documentation/integrations.html
|
||||
|
||||
## Step 7: Preparing a pull request to zulip/zulip
|
||||
|
||||
When you have finished your webhook integration, follow these guidelines before
|
||||
pushing the code to your fork and submitting a pull request to zulip/zulip:
|
||||
|
||||
- Run tests including linters and ensure you have addressed any issues they
|
||||
report. See [Testing](https://zulip.readthedocs.io/en/latest/testing/testing.html)
|
||||
and [Linters](https://zulip.readthedocs.io/en/latest/testing/linters.html) for details.
|
||||
- Read through [Code styles and conventions](
|
||||
https://zulip.readthedocs.io/en/latest/contributing/code-style.html) and take a look
|
||||
through your code to double-check that you've followed Zulip's guidelines.
|
||||
- Take a look at your Git history to ensure your commits have been clear and
|
||||
logical (see [Commit discipline](
|
||||
https://zulip.readthedocs.io/en/latest/contributing/commit-discipline.html) for tips). If not,
|
||||
consider revising them with `git rebase --interactive`. For most incoming webhooks,
|
||||
you'll want to squash your changes into a single commit and include a good,
|
||||
clear commit message.
|
||||
|
||||
If you would like feedback on your integration as you go, feel free to post a
|
||||
message on the [public Zulip instance](https://chat.zulip.org/#narrow/channel/integrations).
|
||||
You can also create a [draft pull request](
|
||||
https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/about-pull-requests#draft-pull-requests) while you
|
||||
are still working on your integration. See the
|
||||
[Git guide](https://zulip.readthedocs.io/en/latest/git/pull-requests.html#create-a-pull-request)
|
||||
for more on Zulip's pull request process.
|
||||
|
||||
## Advanced topics
|
||||
|
||||
More complex implementation or testing needs may require additional code, beyond
|
||||
what the standard helper functions provide. This section discusses some of
|
||||
these situations.
|
||||
|
||||
### Negative tests
|
||||
|
||||
A negative test is one that should result in an error, such as incorrect data.
|
||||
The helper functions may interpret this as a test failure, when it should instead
|
||||
be a successful test of an error condition. To correctly test these cases, you
|
||||
must explicitly code your test's execution (using other helpers, as needed)
|
||||
rather than call the usual helper function.
|
||||
|
||||
Here is an example from the WordPress integration:
|
||||
|
||||
```python
|
||||
def test_unknown_action_no_data(self) -> None:
|
||||
# Mimic check_webhook() to manually execute a negative test.
|
||||
# Otherwise its call to send_webhook_payload() would assert on the non-success
|
||||
# we are testing. The value of result is the error message the webhook should
|
||||
# return if no params are sent. The fixture for this test is an empty file.
|
||||
|
||||
# subscribe to the target channel
|
||||
self.subscribe(self.test_user, self.CHANNEL_NAME)
|
||||
|
||||
# post to the webhook url
|
||||
post_params = {'stream_name': self.CHANNEL_NAME,
|
||||
'content_type': 'application/x-www-form-urlencoded'}
|
||||
result = self.client_post(self.url, 'unknown_action', **post_params)
|
||||
|
||||
# check that we got the expected error message
|
||||
self.assert_json_error(result, "Unknown WordPress webhook action: WordPress action")
|
||||
```
|
||||
|
||||
In a normal test, `check_webhook` would handle all the setup
|
||||
and then check that the incoming webhook's response matches the expected result. If
|
||||
the webhook returns an error, the test fails. Instead, explicitly do the
|
||||
setup it would have done, and check the result yourself.
|
||||
|
||||
Here, `subscribe_to_stream` is a test helper that uses `TEST_USER_EMAIL` and
|
||||
`CHANNEL_NAME` (attributes from the base class) to register the user to receive
|
||||
messages in the given channel. If the channel doesn't exist, it creates it.
|
||||
|
||||
`client_post`, another helper, performs the HTTP POST that calls the incoming
|
||||
webhook. As long as `self.url` is correct, you don't need to construct the webhook
|
||||
URL yourself. (In most cases, it is.)
|
||||
|
||||
`assert_json_error` then checks if the result matches the expected error.
|
||||
If you had used `check_webhook`, it would have called
|
||||
`send_webhook_payload`, which checks the result with `assert_json_success`.
|
||||
|
||||
### Custom query parameters
|
||||
|
||||
Custom arguments passed in URL query parameters work as expected in the webhook
|
||||
code, but require special handling in tests.
|
||||
|
||||
For example, here is the definition of a webhook function that gets both `stream`
|
||||
and `topic` from the query parameters:
|
||||
|
||||
```python
|
||||
@typed_endpoint
|
||||
def api_querytest_webhook(request: HttpRequest, user_profile: UserProfile,
|
||||
payload: Annotated[str, ApiParamConfig(argument_type_is_body=True)],
|
||||
stream: str = "test",
|
||||
topic: str= "Default Alert":
|
||||
```
|
||||
|
||||
In actual use, you might configure the 3rd party service to call your Zulip
|
||||
integration with a URL like this:
|
||||
|
||||
```
|
||||
http://myhost/api/v1/external/querytest?api_key=abcdefgh&stream=alerts&topic=queries
|
||||
```
|
||||
|
||||
It provides values for `stream` and `topic`, and the webhook can get those
|
||||
using `@typed_endpoint` without any special handling. How does this work in a test?
|
||||
|
||||
The new attribute `TOPIC` exists only in our class so far. In order to
|
||||
construct a URL with a query parameter for `topic`, you can pass the
|
||||
attribute `TOPIC` as a keyword argument to `build_webhook_url`, like so:
|
||||
|
||||
```python
|
||||
class QuerytestHookTests(WebhookTestCase):
|
||||
|
||||
CHANNEL_NAME = 'querytest'
|
||||
TOPIC = "Default topic"
|
||||
URL_TEMPLATE = "/api/v1/external/querytest?api_key={api_key}&stream={stream}"
|
||||
FIXTURE_DIR_NAME = 'querytest'
|
||||
|
||||
def test_querytest_test_one(self) -> None:
|
||||
# construct the URL used for this test
|
||||
self.TOPIC = "Query test"
|
||||
self.url = self.build_webhook_url(topic=self.TOPIC)
|
||||
|
||||
# define the expected message contents
|
||||
expected_topic = "Query test"
|
||||
expected_message = "This is a test of custom query parameters."
|
||||
|
||||
self.check_webhook('test_one', expected_topic, expected_message,
|
||||
content_type="application/x-www-form-urlencoded")
|
||||
```
|
||||
|
||||
You can also override `get_body` or `get_payload` if your test data
|
||||
needs to be constructed in an unusual way.
|
||||
|
||||
For more, see the definition for the base class, `WebhookTestCase`
|
||||
in `zerver/lib/test_classes.py`, or just grep for examples.
|
||||
|
||||
|
||||
### Custom HTTP event-type headers
|
||||
|
||||
Some third-party services set a custom HTTP header to indicate the event type that
|
||||
generates a particular payload. To extract such headers, we recommend using the
|
||||
`validate_extract_webhook_http_header` function in `zerver/lib/webhooks/common.py`,
|
||||
like so:
|
||||
|
||||
```python
|
||||
event = validate_extract_webhook_http_header(request, header, integration_name)
|
||||
```
|
||||
|
||||
`request` is the `HttpRequest` object passed to your main webhook function. `header`
|
||||
is the name of the custom header you'd like to extract, such as `X-Event-Key`, and
|
||||
`integration_name` is the name of the third-party service in question, such as
|
||||
`GitHub`.
|
||||
|
||||
Because such headers are how some integrations indicate the event types of their
|
||||
payloads, the absence of such a header usually indicates a configuration
|
||||
issue, where one either entered the URL for a different integration, or happens to
|
||||
be running an older version of the integration that doesn't set that header.
|
||||
|
||||
If the requisite header is missing, this function sends a direct message to the
|
||||
owner of the webhook bot, notifying them of the missing header.
|
||||
|
||||
### Handling unexpected webhook event types
|
||||
|
||||
Many third-party services have dozens of different event types. In
|
||||
some cases, we may choose to explicitly ignore specific events. In
|
||||
other cases, there may be events that are new or events that we don't
|
||||
know about. In such cases, we recommend raising
|
||||
`UnsupportedWebhookEventTypeError` (found in `zerver/lib/exceptions.py`),
|
||||
with a string describing the unsupported event type, like so:
|
||||
|
||||
```
|
||||
raise UnsupportedWebhookEventTypeError(event_type)
|
||||
```
|
||||
|
||||
## Related articles
|
||||
|
||||
* [Integrations overview](/api/integrations-overview)
|
||||
* [Incoming webhook integrations](/api/incoming-webhooks-overview)
|
@@ -1,26 +0,0 @@
|
||||
# The Zulip API
|
||||
|
||||
Zulip's APIs allow you to integrate other services with Zulip. This
|
||||
guide should help you find the API you need:
|
||||
|
||||
* First, check if the tool you'd like to integrate with Zulip
|
||||
[already has a native integration](/integrations/).
|
||||
* Next, check if [Zapier](https://zapier.com/apps) or
|
||||
[IFTTT](https://ifttt.com/search) has an integration.
|
||||
[Zulip's Zapier integration](/integrations/doc/zapier) and
|
||||
[Zulip's IFTTT integration](/integrations/doc/ifttt) often allow
|
||||
integrating a new service with Zulip without writing any code.
|
||||
* If you'd like to send content into Zulip, you can
|
||||
[write a native incoming webhook integration](/api/incoming-webhooks-overview)
|
||||
or use [Zulip's API for sending messages](/api/send-message).
|
||||
* If you're building an interactive bot that reacts to activity inside
|
||||
Zulip, you'll want to look at Zulip's
|
||||
[Python framework for interactive bots](/api/running-bots) or
|
||||
[Zulip's real-time events API](/api/get-events).
|
||||
|
||||
And if you still need to build your own integration with Zulip, check out
|
||||
the full [REST API](/api/rest), generally starting with
|
||||
[installing the API client bindings](/api/installation-instructions).
|
||||
|
||||
In case you already know how you want to build your integration and you're
|
||||
just looking for an API key, we've got you covered [here](/api/api-keys).
|
@@ -1,46 +0,0 @@
|
||||
# Installation instructions
|
||||
|
||||
Zulip's REST API is easy to work with directly, but there are API
|
||||
libraries available for a few popular languages.
|
||||
|
||||
The Python library is the most advanced (and has tools for easily
|
||||
writing interactive bots that react to messages), so we recommend it
|
||||
if you're trying to decide.
|
||||
|
||||
{start_tabs}
|
||||
{tab|python}
|
||||
|
||||
Install the Python API with [pip](https://pypi.python.org/pypi/zulip/):
|
||||
|
||||
```
|
||||
pip install zulip
|
||||
```
|
||||
|
||||
{tab|zulip-send}
|
||||
|
||||
Included with the Python bindings:
|
||||
|
||||
```
|
||||
pip install zulip
|
||||
```
|
||||
|
||||
{tab|js}
|
||||
|
||||
Install the JavaScript API with [npm](https://www.npmjs.com/package/zulip-js):
|
||||
|
||||
```
|
||||
npm install zulip-js
|
||||
```
|
||||
|
||||
{tab|curl}
|
||||
|
||||
No download required!
|
||||
|
||||
{end_tabs}
|
||||
|
||||
See also [user-contributed client libraries](/api/client-libraries)
|
||||
for many other languages.
|
||||
|
||||
## Related articles
|
||||
|
||||
* [Configuring the Python bindings](/api/configuring-python-bindings)
|
@@ -1,139 +0,0 @@
|
||||
# Integrations overview
|
||||
|
||||
Integrations let you connect Zulip with other products. For example, you can get
|
||||
notification messages in Zulip when an issue in your tracker is updated, or for
|
||||
alerts from your monitoring tool.
|
||||
|
||||
Zulip offers [over 120 native integrations](/integrations/), which take
|
||||
advantage of Zulip's [topics](/help/introduction-to-topics) to organize
|
||||
notification messages. Additionally, thousands of integrations are available
|
||||
through [Zapier](https://zapier.com/apps) and [IFTTT](https://ifttt.com/search).
|
||||
You can also [connect any webhook designed to work with
|
||||
Slack](/integrations/doc/slack_incoming) to Zulip.
|
||||
|
||||
If you don't find an integration you need, you can:
|
||||
|
||||
- [Write your own integration](#write-your-own-integration). You can [submit a
|
||||
pull
|
||||
request](https://zulip.readthedocs.io/en/latest/contributing/reviewable-prs.html)
|
||||
to get your integration merged into the main Zulip repository.
|
||||
|
||||
- [File an issue](https://github.com/zulip/zulip/issues/new/choose) to request
|
||||
an integration (if it's a nice-to-have).
|
||||
|
||||
- [Contact Zulip Sales](mailto:sales@zulip.com) to inquire about a custom
|
||||
development contract.
|
||||
|
||||
## Set up an integration
|
||||
|
||||
### Native integrations
|
||||
|
||||
{start_tabs}
|
||||
|
||||
1. [Search Zulip's integrations](/integrations/) for the product you'd like to
|
||||
connect to Zulip.
|
||||
|
||||
1. Click on the card for the product, and follow the instructions on the page.
|
||||
|
||||
{end_tabs}
|
||||
|
||||
### Integrate via Zapier or IFTTT
|
||||
|
||||
If you don't see a native Zulip integration, you can access thousands of
|
||||
additional integrations through [Zapier](https://zapier.com/apps) and
|
||||
[IFTTT](https://ifttt.com/search).
|
||||
|
||||
{start_tabs}
|
||||
|
||||
1. Search [Zapier](https://zapier.com/apps) or [IFTTT](https://ifttt.com/search)
|
||||
for the product you'd like to connect to Zulip.
|
||||
|
||||
1. Follow the integration instructions for [Zapier](/integrations/doc/zapier) or
|
||||
[IFTTT](/integrations/doc/ifttt).
|
||||
|
||||
{end_tabs}
|
||||
|
||||
### Integrate via Slack-compatible webhook API
|
||||
|
||||
Zulip can process incoming webhook messages written to work with [Slack's
|
||||
webhook API](https://api.slack.com/messaging/webhooks). This makes it easy to
|
||||
quickly move your integrations when [migrating your
|
||||
organization](/help/import-from-slack) from Slack to Zulip, or integrate any
|
||||
product that has a Slack webhook integration with Zulip .
|
||||
|
||||
!!! warn ""
|
||||
|
||||
**Note:** In the long term, the recommended approach is to use
|
||||
Zulip's native integrations, which take advantage of Zulip's topics.
|
||||
There may also be some quirks when Slack's formatting system is
|
||||
translated into Zulip's.
|
||||
|
||||
{start_tabs}
|
||||
|
||||
1. [Create a bot](/help/add-a-bot-or-integration) for the Slack-compatible
|
||||
webhook. Make sure that you select **Incoming webhook** as the **Bot type**.
|
||||
|
||||
1. Decide where to send Slack-compatible webhook notifications, and [generate
|
||||
the integration URL](https://zulip.com/help/generate-integration-url).
|
||||
|
||||
1. Use the generated URL anywhere you would use a Slack webhook.
|
||||
|
||||
{end_tabs}
|
||||
|
||||
### Integrate via email
|
||||
|
||||
If the product you'd like to integrate can send email notifications, you can
|
||||
[send those emails to a Zulip channel](/help/message-a-channel-by-email). The
|
||||
email subject will become the Zulip topic, and the email body will become the
|
||||
Zulip message.
|
||||
|
||||
For example, you can configure your personal GitHub notifications to go to a
|
||||
Zulip channel rather than your email inbox. Notifications for each issue or pull
|
||||
request will be grouped into a single topic.
|
||||
|
||||
## Write your own integration
|
||||
|
||||
You can write your own Zulip integrations using the well-documented APIs below.
|
||||
For example, if your company develops software, you can create a custom
|
||||
integration to connect your product to Zulip.
|
||||
|
||||
If you need help, best-effort community support is available in the [Zulip
|
||||
development community](https://zulip.com/development-community/). To inquire
|
||||
about options for custom development, [contact Zulip
|
||||
Sales](mailto:sales@zulip.com).
|
||||
|
||||
### Sending content into Zulip
|
||||
|
||||
* If the third-party service supports outgoing webhooks, you likely want to
|
||||
build an [incoming webhook integration](/api/incoming-webhooks-overview).
|
||||
|
||||
* If it doesn't, you may want to write a
|
||||
[script or plugin integration](/api/non-webhook-integrations).
|
||||
|
||||
* The [`zulip-send` tool](/api/send-message) makes it easy to send Zulip
|
||||
messages from shell scripts.
|
||||
|
||||
* Finally, you can
|
||||
[send messages using Zulip's API](/api/send-message), with bindings for
|
||||
Python, JavaScript and [other languages](/api/client-libraries).
|
||||
|
||||
### Sending and receiving content
|
||||
|
||||
* To react to activity inside Zulip, look at Zulip's
|
||||
[Python framework for interactive bots](/api/running-bots) or
|
||||
[Zulip's real-time events API](/api/get-events).
|
||||
|
||||
* If what you want isn't covered by the above, check out the full
|
||||
[REST API](/api/rest). The web, mobile, desktop, and terminal apps are
|
||||
built on top of this API, so it can do anything a human user can do. Most
|
||||
but not all of the endpoints are documented on this site; if you need
|
||||
something that isn't there check out Zulip's
|
||||
[REST endpoints](https://github.com/zulip/zulip/blob/main/zproject/urls.py).
|
||||
|
||||
## Related articles
|
||||
|
||||
* [Bots overview](/help/bots-overview)
|
||||
* [Set up integrations](/help/set-up-integrations)
|
||||
* [Add a bot or integration](/help/add-a-bot-or-integration)
|
||||
* [Generate integration URL](/help/generate-integration-url)
|
||||
* [Request an integration](/help/request-an-integration)
|
@@ -1,447 +0,0 @@
|
||||
# Message formatting
|
||||
|
||||
Zulip supports an extended version of Markdown for messages, as well as
|
||||
some HTML level special behavior. The Zulip help center article on [message
|
||||
formatting](/help/format-your-message-using-markdown) is the primary
|
||||
documentation for Zulip's markup features. This article is currently a
|
||||
changelog for updates to these features.
|
||||
|
||||
The [render a message](/api/render-message) endpoint can be used to get
|
||||
the current HTML version of any Markdown syntax for message content.
|
||||
|
||||
## Code blocks
|
||||
|
||||
**Changes**: As of Zulip 4.0 (feature level 33), [code blocks][help-code]
|
||||
can have a `data-code-language` attribute attached to the outer HTML
|
||||
`div` element, which records the programming language that was selected
|
||||
for syntax highlighting. This field is used in the
|
||||
[playgrounds][help-playgrounds] feature for code blocks.
|
||||
|
||||
## Global times
|
||||
|
||||
**Changes**: In Zulip 3.0 (feature level 8), added [global time
|
||||
mentions][help-global-time] to supported Markdown message formatting
|
||||
features.
|
||||
|
||||
## Links to channels, topics, and messages
|
||||
|
||||
Zulip's markup supports special readable Markdown syntax for [linking
|
||||
to channels, topics, and messages](/help/link-to-a-message-or-conversation).
|
||||
|
||||
Sample HTML formats are as follows:
|
||||
``` html
|
||||
<!-- Syntax: #**announce** -->
|
||||
<a class="stream" data-stream-id="9"
|
||||
href="/#narrow/channel/9-announce">
|
||||
#announce
|
||||
</a>
|
||||
|
||||
<!-- Syntax: #**announce>Zulip updates** -->
|
||||
<a class="stream-topic" data-stream-id="9"
|
||||
href="/#narrow/channel/9-announce/topic/Zulip.20updates/with/214">
|
||||
#announce > Zulip updates
|
||||
</a>
|
||||
|
||||
<!-- Syntax: #**announce>Zulip updates**
|
||||
Generated only if topic had no messages or the link was rendered
|
||||
before Zulip 10.0 (feature level 347) -->
|
||||
<a class="stream-topic" data-stream-id="9"
|
||||
href="/#narrow/channel/9-announce/topic/Zulip.20updates">
|
||||
#announce > Zulip updates
|
||||
</a>
|
||||
|
||||
<!-- Syntax: #**announce>Zulip updates@214** -->
|
||||
<a class="message-link"
|
||||
href="/#narrow/channel/9-announce/topic/Zulip.20updates/near/214">
|
||||
#announce > Zulip updates @ 💬
|
||||
</a>
|
||||
```
|
||||
|
||||
The `near` and `with` operators are documented in more detail in the
|
||||
[search and URL documentation](/api/construct-narrow). When rendering
|
||||
topic links with the `with` operator, the code doing the rendering may
|
||||
pick the ID arbitrarily among messages accessible to the client and/or
|
||||
acting user at the time of rendering. Currently, the server chooses
|
||||
the message ID to use for `with` operators as the latest message ID in
|
||||
the topic accessible to the user who wrote the message.
|
||||
|
||||
The older stream/topic link elements include a `data-stream-id`, which
|
||||
historically was used in order to display the current channel name if
|
||||
the channel had been renamed. That field is **deprecated**, because
|
||||
displaying an updated value for the most common forms of this syntax
|
||||
requires parsing the URL to get the topic to use anyway.
|
||||
|
||||
When a topic is an empty string, it is replaced with
|
||||
`realm_empty_topic_display_name` found in the [`POST /register`](/api/register-queue)
|
||||
response and wrapped with the `<em>` tag.
|
||||
|
||||
Sample HTML formats with `"realm_empty_topic_display_name": "general chat"`
|
||||
are as follows:
|
||||
```html
|
||||
<!-- Syntax: #**announce>** -->
|
||||
<a class="stream-topic" data-stream-id="9"
|
||||
href="/#narrow/channel/9-announce/topic/with/214">
|
||||
#announce > <em>general chat</em>
|
||||
</a>
|
||||
|
||||
<!-- Syntax: #**announce>**
|
||||
Generated only if topic had no messages or the link was rendered
|
||||
before Zulip 10.0 (feature level 347) -->
|
||||
<a class="stream-topic" data-stream-id="9"
|
||||
href="/#narrow/channel/9-announce/topic/">
|
||||
#announce > <em>general chat</em>
|
||||
</a>
|
||||
|
||||
<!-- Syntax: #**announce>@214** -->
|
||||
<a class="message-link"
|
||||
href="/#narrow/channel/9-announce/topic//near/214">
|
||||
#announce > <em>general chat</em> @ 💬
|
||||
</a>
|
||||
```
|
||||
|
||||
**Changes**: In Zulip 11.0 (feature level 400), the server switched
|
||||
its strategy for `with` URL construction to choose the latest
|
||||
accessible message ID in a topic. Previously, it used the oldest.
|
||||
|
||||
Before Zulip 10.0 (feature level 347), the `with` field
|
||||
was never used in topic link URLs generated by the server; the markup
|
||||
currently used only for empty topics was used for all topic links.
|
||||
|
||||
Before Zulip 10.0 (feature level 346), empty string
|
||||
was not a valid topic name in syntaxes for linking to topics and
|
||||
messages.
|
||||
|
||||
In Zulip 10.0 (feature level 319), added Markdown syntax
|
||||
for linking to a specific message in a conversation. Declared the
|
||||
`data-stream-id` field to be deprecated as detailed above.
|
||||
|
||||
In Zulip 11.0 (feature level 383), clients can decide what
|
||||
channel view a.stream channel link elements take you to -- i.e.,
|
||||
the href for those is the default behavior of the link that also
|
||||
encodes the channel alongside the data-stream-id field, but clients
|
||||
can override that default based on `web_channel_default_view` setting.
|
||||
|
||||
## Image previews
|
||||
|
||||
When a Zulip message is sent linking to an uploaded image, Zulip will
|
||||
generate an image preview element with the following format.
|
||||
|
||||
``` html
|
||||
<div class="message_inline_image">
|
||||
<a href="/user_uploads/path/to/image.png" title="image.png">
|
||||
<img data-original-dimensions="1920x1080"
|
||||
data-original-content-type="image/png"
|
||||
src="/user_uploads/thumbnail/path/to/image.png/840x560.webp">
|
||||
</a>
|
||||
</div>
|
||||
```
|
||||
|
||||
If the server has not yet generated thumbnails for the image yet at
|
||||
the time the message is sent, the `img` element will be a temporary
|
||||
loading indicator image and have the `image-loading-placeholder`
|
||||
class, which clients can use to identify loading indicators and
|
||||
replace them with a more native loading indicator element if
|
||||
desired. For example:
|
||||
|
||||
``` html
|
||||
<div class="message_inline_image">
|
||||
<a href="/user_uploads/path/to/image.png" title="image.png">
|
||||
<img class="image-loading-placeholder"
|
||||
data-original-dimensions="1920x1080"
|
||||
data-original-content-type="image/png"
|
||||
src="/path/to/spinner.png">
|
||||
</a>
|
||||
</div>
|
||||
```
|
||||
|
||||
Once the server has a working thumbnail, such messages will be updated
|
||||
via an `update_message` event, with the `rendering_only: true` flag
|
||||
(telling clients not to adjust message edit history), with appropriate
|
||||
adjusted `rendered_content`. A client should process those events by
|
||||
just using the updated rendering. If thumbnailing failed, the same
|
||||
type of event will edit the message's rendered form to remove the
|
||||
image preview element, so no special client-side logic should be
|
||||
required to process such errors.
|
||||
|
||||
Note that in the uncommon situation that the thumbnailing system is
|
||||
backlogged, an individual message containing multiple image previews
|
||||
may be re-rendered multiple times as each image finishes thumbnailing
|
||||
and triggers a message update.
|
||||
|
||||
Clients are recommended to do the following when processing image
|
||||
previews:
|
||||
|
||||
- Clients that would like to use the image's aspect ratio to lay out
|
||||
one or more images in the message feed may use the
|
||||
`data-original-dimensions` attribute, which is present even if the
|
||||
image is a placeholder spinner. This attribute encodes the
|
||||
dimensions of the original image as `{width}x{height}`. These
|
||||
dimensions are for the image as rendered, _after_ any EXIF rotation
|
||||
and mirroring has been applied.
|
||||
- If the client would like to control the thumbnail resolution used,
|
||||
it can replace the final section of the URL (`840x560.webp` in the
|
||||
example above) with the `name` of its preferred format from the set
|
||||
of supported formats provided by the server in the
|
||||
`server_thumbnail_formats` portion of the `register`
|
||||
response. Clients should not make any assumptions about what format
|
||||
the server will use as the "default" thumbnail resolution, as it may
|
||||
change over time.
|
||||
- Download button type elements should provide the original image
|
||||
(encoded via the `href` of the containing `a` tag).
|
||||
- The content-type of the original image is provided on a
|
||||
`data-original-content-type` attribute, so clients can decide if
|
||||
they are capable of rendering the original image.
|
||||
- For images whose formats which are not widely-accepted by browsers
|
||||
(e.g., HEIC and TIFF), the image may contain a
|
||||
`data-transcoded-image` attribute, which specifies a high-resolution
|
||||
thumbnail format which clients may use instead of the original
|
||||
image.
|
||||
- Lightbox elements for viewing an image should be designed to
|
||||
immediately display any already-downloaded thumbnail while fetching
|
||||
the original-quality image or an appropriate higher-quality
|
||||
thumbnail from the server, to be transparently swapped in once it is
|
||||
available. Clients that would like to size the lightbox based on the
|
||||
size of the original image can use the `data-original-dimensions`
|
||||
attribute, as described above.
|
||||
- Animated images will have a `data-animated` attribute on the `img`
|
||||
tag. As detailed in `server_thumbnail_formats`, both animated and
|
||||
still images are available for clients to use, depending on their
|
||||
preference. See, for example, the [web setting][help-previews]
|
||||
to control whether animated images are autoplayed in the message
|
||||
feed.
|
||||
- Clients should not assume that the requested format is the format
|
||||
that they will receive; in rare cases where the client has an
|
||||
out-of-date list of `server_thumbnail_formats`, the server will
|
||||
provide an approximation of the client's requested format. Because
|
||||
of this, clients should not assume that the pixel dimensions or file
|
||||
format match what they requested.
|
||||
- No other processing of the URLs is recommended.
|
||||
|
||||
**Changes**: In Zulip 10.0 (feature level 336), added
|
||||
`data-original-content-type` attribute to convey the type of the
|
||||
original image, and optional `data-transcoded-image` attribute for
|
||||
images with formats which are not widely supported by browsers.
|
||||
|
||||
**Changes**: In Zulip 9.2 (feature levels 278-279, and 287+), added
|
||||
`data-original-dimensions` to the `image-loading-placeholder` spinner
|
||||
images, containing the dimensions of the original image.
|
||||
|
||||
In Zulip 9.0 (feature level 276), added `data-original-dimensions`
|
||||
attribute to images that have been thumbnailed, containing the
|
||||
dimensions of the full-size version of the image. Thumbnailing itself
|
||||
was reintroduced at feature level 275.
|
||||
|
||||
Previously, with the exception of Zulip servers that used the beta
|
||||
Thumbor-based implementation years ago, all image previews in Zulip
|
||||
messages were not thumbnailed; the `a` tag and the `img` tag would both
|
||||
point to the original image.
|
||||
|
||||
Clients that correctly implement the current API should handle
|
||||
Thumbor-based older thumbnails correctly, as long as they do not
|
||||
assume that `data-original-dimensions` is present. Clients should not
|
||||
assume that messages sent prior to the introduction of thumbnailing
|
||||
have been re-rendered to use the new format or have thumbnails
|
||||
available.
|
||||
|
||||
## Video embeddings and previews
|
||||
|
||||
When a Zulip message is sent linking to an uploaded video, Zulip may
|
||||
generate a video preview element with the following format.
|
||||
|
||||
|
||||
``` html
|
||||
<div class="message_inline_image message_inline_video">
|
||||
<a href="/user_uploads/path/to/video.mp4">
|
||||
<video preload="metadata" src="/user_uploads/path/to/video.mp4">
|
||||
</video>
|
||||
</a>
|
||||
</div>
|
||||
```
|
||||
|
||||
## Audio Players
|
||||
|
||||
When the Markdown media syntax is used with an uploaded file with an
|
||||
audio `Content-Type`, Zulip will generate an HTML5 `<audio>` player
|
||||
element. Supported MIME types are currently `audio/aac`, `audio/flac`,
|
||||
`audio/mpeg`, and `audio/wav`.
|
||||
|
||||
For example, `[file.mp3](/user_uploads/path/to/file.mp3)` renders as:
|
||||
|
||||
``` html
|
||||
<audio controls preload="metadata"
|
||||
src="/user_uploads/path/to/file.mp3" title="file.mp3">
|
||||
</audio>
|
||||
```
|
||||
|
||||
If the Zulip server has rewritten the URL of the audio file, it will
|
||||
provide the URL in a `data-original-url` parameter. The Zulip server
|
||||
does this for all non-uploaded file audio URLs.
|
||||
|
||||
``` html
|
||||
<audio controls preload="metadata"
|
||||
data-original-url="https://example.com/path/to/original/file.mp3"
|
||||
src="https://zulipcdn.example.com/path/to/playable/file.mp3" title="file.mp3">
|
||||
</audio>
|
||||
```
|
||||
|
||||
Clients that cannot render an audio player are recommended to convert
|
||||
audio elements into a link to the original URL.
|
||||
|
||||
The Zulip server does not validate whether uploaded files with an
|
||||
audio `Content-Type` are actually playable.
|
||||
|
||||
**Changes**: New in Zulip 11.0 (feature level 405).
|
||||
|
||||
## Mentions and silent mentions
|
||||
|
||||
Zulip markup supports [mentioning](/help/mention-a-user-or-group)
|
||||
users, user groups, and a few special "wildcard" mentions (the three
|
||||
spellings of a channel wildcard mention: `@**all**`, `@**everyone**`,
|
||||
`@**channel**` and the topic wildcard mention `@**topic**`).
|
||||
|
||||
Mentions result in a message being highlighted for the target user(s),
|
||||
both in the UI and in notifications, and may also result in the target
|
||||
user(s) following the conversation, [depending on their
|
||||
settings](/help/follow-a-topic#follow-topics-where-you-are-mentioned).
|
||||
|
||||
Silent mentions of users or groups have none of those side effects,
|
||||
but nonetheless uniquely identify the user or group
|
||||
identified. (There's no such thing as a silent wildcard mention).
|
||||
|
||||
Permissions for mentioning users work as follows:
|
||||
|
||||
- Any user can mention any other user, though mentions by [muted
|
||||
users](/help/mute-a-user) are automatically marked as read and thus do
|
||||
not trigger notifications or otherwise get highlighted like unread
|
||||
mentions.
|
||||
|
||||
- Wildcard mentions are permitted except where [organization-level
|
||||
restrictions](/help/restrict-wildcard-mentions) apply.
|
||||
|
||||
- User groups can be mentioned if and only if the acting user is in
|
||||
the `can_mention_group` group for that group. All user groups can be
|
||||
silently mentioned by any user.
|
||||
|
||||
- System groups, when (silently) mentioned, should be displayed using
|
||||
their description, not their `role:nobody` style API names; see the
|
||||
main [system group
|
||||
documentation](/api/group-setting-values#system-groups) for
|
||||
details. System groups can only be silently mentioned right now,
|
||||
because they happen to all use the empty `Nobody` group for
|
||||
`can_mention_group`; clients should just use `can_mention_group` to
|
||||
determine which groups to offer in typeahead in similar contexts.
|
||||
|
||||
- Requests to send or edit a message that are impermissible due to
|
||||
including a mention where the acting user does not have permission to
|
||||
mention the target will return an error. Mention syntax that does not
|
||||
correspond to a real user or group is ignored.
|
||||
|
||||
Sample markup for `@**Example User**`:
|
||||
|
||||
``` html
|
||||
<span class="user-mention" data-user-id="31">@Example User</span>
|
||||
```
|
||||
|
||||
Sample markup for `@_**Example User**`:
|
||||
|
||||
``` html
|
||||
<span class="user-mention silent" data-user-id="31">Example User</span>
|
||||
```
|
||||
|
||||
Sample markup for `@**topic**`:
|
||||
|
||||
``` html
|
||||
<span class="topic-mention">@topic</span>
|
||||
```
|
||||
|
||||
Sample markup for `@**channel**`:
|
||||
|
||||
``` html
|
||||
<span class="user-mention channel-wildcard-mention"
|
||||
data-user-id="*">@channel</span>
|
||||
```
|
||||
|
||||
Sample markup for `@*support*`, assuming "support" is a valid group:
|
||||
``` html
|
||||
<span class="user-group-mention"
|
||||
data-user-group-id="17">@support</span>
|
||||
```
|
||||
|
||||
Sample markup for `@_*support*`, assuming "support" is a valid group:
|
||||
``` html
|
||||
<span class="user-group-mention silent"
|
||||
data-user-group-id="17">support</span>
|
||||
```
|
||||
|
||||
Sample markup for `@_*role:administrators*`:
|
||||
``` html
|
||||
<span class="user-group-mention silent"
|
||||
data-user-group-id="5">Administrators</span>
|
||||
```
|
||||
|
||||
When processing mentions, clients should look up the user or group
|
||||
referenced by ID, and update the textual name for the mention to the
|
||||
current name for the user or group with that ID. Note that for system
|
||||
groups, this requires special logic to look up the user-facing name
|
||||
for that group; see [system
|
||||
groups](/api/group-setting-values#system-groups) for details.
|
||||
|
||||
**Changes**: Prior to Zulip 10.0 (feature level 333), it was not
|
||||
possible to silently mention [system
|
||||
groups](/api/group-setting-values#system-groups).
|
||||
|
||||
In Zulip 9.0 (feature level 247), `channel` was added to the supported
|
||||
[wildcard][help-mention-all] options used in the
|
||||
[mentions][help-mentions] Markdown message formatting feature.
|
||||
|
||||
## Spoilers
|
||||
|
||||
**Changes**: In Zulip 3.0 (feature level 15), added
|
||||
[spoilers][help-spoilers] to supported Markdown message formatting
|
||||
features.
|
||||
|
||||
## Removed features
|
||||
|
||||
### Removed legacy Dropbox link preview markup
|
||||
|
||||
In Zulip 11.0 (feature level 395), the Zulip server stopped generating
|
||||
legacy Dropbox link previews. Dropbox links are now previewed just
|
||||
like standard Zulip image/link previews. However, some legacy Dropbox
|
||||
previews may exist in existing messages.
|
||||
|
||||
Clients are recommended to prune these previews from message HTML;
|
||||
since they always appear after the actual link, there is no loss of
|
||||
information/functionality. They can be recognized via the classes
|
||||
`message_inline_ref`, `message_inline_image_desc`, and
|
||||
`message_inline_image_title`:
|
||||
|
||||
``` html
|
||||
<div class="message_inline_ref">
|
||||
<a href="https://www.dropbox.com/sh/cm39k9e04z7fhim/AAAII5NK-9daee3FcF41anEua?dl=" title="Saves">
|
||||
<img src="/path/to/folder_dropbox.png">
|
||||
</a>
|
||||
<div><div class="message_inline_image_title">Saves</div>
|
||||
<desc class="message_inline_image_desc"></desc>
|
||||
</div>
|
||||
</div>
|
||||
```
|
||||
|
||||
### Removed legacy avatar markup
|
||||
|
||||
In Zulip 4.0 (feature level 24), the rarely used `!avatar()`
|
||||
and `!gravatar()` markup syntax, which was never documented and had an
|
||||
inconsistent syntax, were removed.
|
||||
|
||||
## Related articles
|
||||
|
||||
* [Markdown formatting](/help/format-your-message-using-markdown)
|
||||
* [Send a message](/api/send-message)
|
||||
* [Render a message](/api/render-message)
|
||||
|
||||
[help-code]: /help/code-blocks
|
||||
[help-playgrounds]: /help/code-blocks#code-playgrounds
|
||||
[help-spoilers]: /help/spoilers
|
||||
[help-global-time]: /help/global-times
|
||||
[help-mentions]: /help/mention-a-user-or-group
|
||||
[help-mention-all]: /help/mention-a-user-or-group#mention-everyone-on-a-channel
|
||||
[help-previews]: /help/image-video-and-website-previews#configure-how-animated-images-are-played
|
@@ -1,59 +0,0 @@
|
||||
# Non-webhook integrations
|
||||
|
||||
[Incoming webhook integrations](/api/incoming-webhooks-overview) are the
|
||||
fastest to write, but sometimes a third-party product just doesn't support
|
||||
them. Zulip supports several other types of integrations.
|
||||
|
||||
* **Python script integrations**
|
||||
(examples: SVN, Git), where we can get the service to call our integration
|
||||
(by shelling out or otherwise), passing in the required data. Our preferred
|
||||
model for these is to ship these integrations in the
|
||||
[Zulip Python API distribution](https://github.com/zulip/python-zulip-api/tree/main/zulip),
|
||||
within the `integrations` directory there.
|
||||
|
||||
* **Plugin integrations** (examples:
|
||||
Jenkins, Hubot, Trac) where the user needs to install a plugin into their
|
||||
existing software. These are often more work, but for some products are the
|
||||
only way to integrate with the product at all.
|
||||
|
||||
For plugin integrations, usually you will need to consult the
|
||||
documentation for the third party software in order to learn how to
|
||||
write the integration.
|
||||
|
||||
* **Interactive bots**. See [Writing bots](/api/writing-bots).
|
||||
|
||||
A few notes on how to do these:
|
||||
|
||||
* You should always send messages by POSTing to URLs of the form
|
||||
`https://zulip.example.com/v1/messages/`.
|
||||
|
||||
* We usually build Python script integrations with (at least) 2 files:
|
||||
`zulip_foo_config.py` containing the configuration for the
|
||||
integration including the bots' API keys, plus a script that reads
|
||||
from this configuration to actually do the work (that way, it's
|
||||
possible to update the script without breaking users' configurations).
|
||||
|
||||
* Be sure to test your integration carefully and
|
||||
[document](https://zulip.readthedocs.io/en/latest/documentation/integrations.html)
|
||||
how to install it.
|
||||
|
||||
* You should specify a clear HTTP User-Agent for your integration. The
|
||||
user agent should at a minimum identify the integration and version
|
||||
number, separated by a slash. If possible, you should collect platform
|
||||
information and include that in `()`s after the version number. Some
|
||||
examples of ideal UAs are:
|
||||
|
||||
```
|
||||
ZulipDesktop/0.7.0 (Ubuntu; 14.04)
|
||||
ZulipJenkins/0.1.0 (Windows; 7.2)
|
||||
ZulipMobile/0.5.4 (Android; 4.2; maguro)
|
||||
```
|
||||
|
||||
* The [general advice](/api/incoming-webhooks-overview#general-advice) for
|
||||
webhook integrations applies here as well.
|
||||
|
||||
## Related articles
|
||||
|
||||
* [Running bots](/api/running-bots)
|
||||
* [Deploying bots](/api/deploying-bots)
|
||||
* [Writing bots](/api/writing-bots)
|
@@ -1,184 +0,0 @@
|
||||
# Outgoing webhooks
|
||||
|
||||
Outgoing webhooks allow you to build or set up Zulip integrations
|
||||
which are notified when certain types of messages are sent in
|
||||
Zulip. When one of those events is triggered, we'll send an HTTP POST
|
||||
payload to the webhook's configured URL. Webhooks can be used to
|
||||
power a wide range of Zulip integrations. For example, the
|
||||
[Zulip Botserver][zulip-botserver] is built on top of this API.
|
||||
|
||||
Zulip supports outgoing webhooks both in a clean native Zulip format,
|
||||
as well as a format that's compatible with
|
||||
[Slack's outgoing webhook API][slack-outgoing-webhook], which can help
|
||||
with porting an existing Slack integration to work with Zulip.
|
||||
|
||||
[zulip-botserver]: /api/deploying-bots#zulip-botserver
|
||||
[slack-outgoing-webhook]: https://api.slack.com/custom-integrations/outgoing-webhooks
|
||||
|
||||
To register an outgoing webhook:
|
||||
|
||||
* Log in to the Zulip server.
|
||||
* Navigate to *Personal settings (<i class="zulip-icon zulip-icon-gear"></i>)* -> *Bots* ->
|
||||
*Add a new bot*. Select *Outgoing webhook* for bot type, the URL
|
||||
you'd like Zulip to post to as the **Endpoint URL**, the format you
|
||||
want, and click on *Create bot*. to submit the form/
|
||||
* Your new bot user will appear in the *Active bots* panel, which you
|
||||
can use to edit the bot's settings.
|
||||
|
||||
## Triggering
|
||||
|
||||
There are currently two ways to trigger an outgoing webhook:
|
||||
|
||||
* **@-mention** the bot user in a channel. If the bot replies, its
|
||||
reply will be sent to that channel and topic.
|
||||
* **Send a direct message** with the bot as one of the recipients.
|
||||
If the bot replies, its reply will be sent to that thread.
|
||||
|
||||
## Timeouts
|
||||
|
||||
The remote server must respond to a `POST` request in a timely manner.
|
||||
The default timeout for outgoing webhooks is 10 seconds, though this
|
||||
can be configured by the administrator of the Zulip server by setting
|
||||
`OUTGOING_WEBHOOKS_TIMEOUT_SECONDS` in the [server's
|
||||
settings][settings].
|
||||
|
||||
[settings]: https://zulip.readthedocs.io/en/latest/subsystems/settings.html#server-settings
|
||||
|
||||
## Outgoing webhook format
|
||||
|
||||
{generate_code_example|/zulip-outgoing-webhook:post|fixture}
|
||||
|
||||
### Fields documentation
|
||||
|
||||
{generate_return_values_table|zulip.yaml|/zulip-outgoing-webhook:post}
|
||||
|
||||
## Replying with a message
|
||||
|
||||
Many bots implemented using this outgoing webhook API will want to
|
||||
send a reply message into Zulip. Zulip's outgoing webhook API
|
||||
provides a convenient way to do that by simply returning an
|
||||
appropriate HTTP response to the Zulip server.
|
||||
|
||||
A correctly implemented bot will return a JSON object containing one
|
||||
of two possible formats, described below.
|
||||
|
||||
### Example response payloads
|
||||
|
||||
If the bot code wants to opt out of responding, it can explicitly
|
||||
encode a JSON dictionary that contains `response_not_required` set
|
||||
to `True`, so that no response message is sent to the user. (This
|
||||
is helpful to distinguish deliberate non-responses from bugs.)
|
||||
|
||||
Here's an example of the JSON your server should respond with if
|
||||
you would not like to send a response message:
|
||||
|
||||
```json
|
||||
{
|
||||
"response_not_required": true
|
||||
}
|
||||
```
|
||||
|
||||
Here's an example of the JSON your server should respond with if
|
||||
you would like to send a response message:
|
||||
|
||||
```json
|
||||
{
|
||||
"content": "Hey, we just received **something** from Zulip!"
|
||||
}
|
||||
```
|
||||
|
||||
The `content` field should contain Zulip-format Markdown.
|
||||
|
||||
Note that an outgoing webhook bot can use the [Zulip REST
|
||||
API](/api/rest) with its API key in case your bot needs to do
|
||||
something else, like add an emoji reaction or upload a file.
|
||||
|
||||
## Slack-compatible webhook format
|
||||
|
||||
The Slack-compatible webhook format allows immediate integration with
|
||||
many third-party systems that already support Slack outgoing webhooks.
|
||||
|
||||
The following table details how the Zulip server translates a Zulip
|
||||
message into the Slack-compatible webhook format.
|
||||
|
||||
<table class="table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Name</th>
|
||||
<th>Description</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td><code>token</code></td>
|
||||
<td>A string of alphanumeric characters you can use to
|
||||
authenticate the webhook request (each bot user uses a fixed token)</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>team_id</code></td>
|
||||
<td>ID of the Zulip organization prefixed by "T".</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>team_domain</code></td>
|
||||
<td>Hostname of the Zulip organization</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>channel_id</code></td>
|
||||
<td>Channel ID prefixed by "C"</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>channel_name</code></td>
|
||||
<td>Channel name</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>thread_ts</code></td>
|
||||
<td>Timestamp for when message was sent</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>timestamp</code></td>
|
||||
<td>Timestamp for when message was sent</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>user_id</code></td>
|
||||
<td>ID of the user who sent the message prefixed by "U"</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>user_name</code></td>
|
||||
<td>Full name of sender</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>text</code></td>
|
||||
<td>The content of the message (in Markdown)</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>trigger_word</code></td>
|
||||
<td>Trigger method</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><code>service_id</code></td>
|
||||
<td>ID of the bot user</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
The above data is posted as list of tuples (not JSON), here's an example:
|
||||
|
||||
```
|
||||
[('token', 'v9fpCdldZIej2bco3uoUvGp06PowKFOf'),
|
||||
('team_id', 'T1512'),
|
||||
('team_domain', 'zulip.example.com'),
|
||||
('channel_id', 'C123'),
|
||||
('channel_name', 'integrations'),
|
||||
('thread_ts', 1532078950),
|
||||
('timestamp', 1532078950),
|
||||
('user_id', 'U21'),
|
||||
('user_name', 'Full Name'),
|
||||
('text', '@**test**'),
|
||||
('trigger_word', 'mention'),
|
||||
('service_id', 27)]
|
||||
```
|
||||
|
||||
* For successful request, if data is returned, it returns that data,
|
||||
else it returns a blank response.
|
||||
* For failed request, it returns the reason of failure, as returned by
|
||||
the server, or the exception message.
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user