mirror of
https://github.com/zulip/zulip.git
synced 2025-11-04 14:03:30 +00:00
Compare commits
10 Commits
4.0-rc1-br
...
1.5.x
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
76809b87a6 | ||
|
|
5baeb35ac8 | ||
|
|
75fbce0532 | ||
|
|
8ad7e08375 | ||
|
|
bd01b1e2e4 | ||
|
|
58a7f6085f | ||
|
|
3367593b52 | ||
|
|
1a92ec5d86 | ||
|
|
7a8d685a71 | ||
|
|
3c3a8747c3 |
@@ -1,5 +0,0 @@
|
|||||||
> 0.15%
|
|
||||||
> 0.15% in US
|
|
||||||
last 2 versions
|
|
||||||
Firefox ESR
|
|
||||||
not dead
|
|
||||||
12
.codecov.yml
12
.codecov.yml
@@ -1,12 +0,0 @@
|
|||||||
comment: off
|
|
||||||
|
|
||||||
coverage:
|
|
||||||
status:
|
|
||||||
project:
|
|
||||||
default:
|
|
||||||
target: auto
|
|
||||||
# Codecov has the tendency to report a lot of false negatives,
|
|
||||||
# so we basically suppress comments completely.
|
|
||||||
threshold: 50%
|
|
||||||
base: auto
|
|
||||||
patch: off
|
|
||||||
2
.coveralls.yml
Normal file
2
.coveralls.yml
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
service_name: travis-pro
|
||||||
|
repo_token: hnXUEBKsORKHc8xIENGs9JjktlTb2HKlG
|
||||||
@@ -3,22 +3,17 @@ root = true
|
|||||||
[*]
|
[*]
|
||||||
end_of_line = lf
|
end_of_line = lf
|
||||||
charset = utf-8
|
charset = utf-8
|
||||||
indent_size = 4
|
|
||||||
indent_style = space
|
|
||||||
insert_final_newline = true
|
|
||||||
trim_trailing_whitespace = true
|
trim_trailing_whitespace = true
|
||||||
|
insert_final_newline = true
|
||||||
|
|
||||||
binary_next_line = true # for shfmt
|
[*.{sh,py,js, json,yml,xml, css, md,markdown, handlebars,html}]
|
||||||
switch_case_indent = true # for shfmt
|
indent_style = space
|
||||||
|
indent_size = 4
|
||||||
|
|
||||||
[{*.{js,json,ts},check-openapi}]
|
[*.{svg,rb,pp,pl}]
|
||||||
max_line_length = 100
|
indent_style = space
|
||||||
|
|
||||||
[*.{py,pyi}]
|
|
||||||
max_line_length = 110
|
|
||||||
|
|
||||||
[*.{svg,rb,pp,yaml,yml}]
|
|
||||||
indent_size = 2
|
indent_size = 2
|
||||||
|
|
||||||
[package.json]
|
[*.{cfg}]
|
||||||
indent_size = 2
|
indent_style = space
|
||||||
|
indent_size = 8
|
||||||
|
|||||||
@@ -1,15 +1,3 @@
|
|||||||
# This is intended for generated files and vendored third-party files.
|
static/js/bundle.js
|
||||||
# For our source code, instead of adding files here, consider using
|
static/js/blueslip.js
|
||||||
# specific eslint-disable comments in the files themselves.
|
puppet/zulip_ops/files/statsd/local.js
|
||||||
|
|
||||||
/docs/_build
|
|
||||||
/static/generated
|
|
||||||
/static/third
|
|
||||||
/static/webpack-bundles
|
|
||||||
/var/*
|
|
||||||
!/var/puppeteer
|
|
||||||
/var/puppeteer/*
|
|
||||||
!/var/puppeteer/test_credentials.d.ts
|
|
||||||
/zulip-current-venv
|
|
||||||
/zulip-py3-venv
|
|
||||||
/zulip-thumbor-venv
|
|
||||||
|
|||||||
451
.eslintrc.json
451
.eslintrc.json
@@ -1,227 +1,246 @@
|
|||||||
{
|
{
|
||||||
"env": {
|
"env": {
|
||||||
"es2020": true,
|
|
||||||
"node": true
|
"node": true
|
||||||
},
|
},
|
||||||
"extends": [
|
|
||||||
"eslint:recommended",
|
|
||||||
"plugin:import/errors",
|
|
||||||
"plugin:import/warnings",
|
|
||||||
"plugin:unicorn/recommended",
|
|
||||||
"prettier"
|
|
||||||
],
|
|
||||||
"parser": "@babel/eslint-parser",
|
|
||||||
"parserOptions": {
|
|
||||||
"warnOnUnsupportedTypeScriptVersion": false,
|
|
||||||
"sourceType": "unambiguous"
|
|
||||||
},
|
|
||||||
"reportUnusedDisableDirectives": true,
|
|
||||||
"rules": {
|
|
||||||
"array-callback-return": "error",
|
|
||||||
"arrow-body-style": "error",
|
|
||||||
"block-scoped-var": "error",
|
|
||||||
"consistent-return": "error",
|
|
||||||
"curly": "error",
|
|
||||||
"dot-notation": "error",
|
|
||||||
"eqeqeq": "error",
|
|
||||||
"guard-for-in": "error",
|
|
||||||
"import/extensions": "error",
|
|
||||||
"import/first": "error",
|
|
||||||
"import/newline-after-import": "error",
|
|
||||||
"import/no-useless-path-segments": "error",
|
|
||||||
"import/order": [
|
|
||||||
"error",
|
|
||||||
{
|
|
||||||
"alphabetize": {"order": "asc"},
|
|
||||||
"newlines-between": "always"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"import/unambiguous": "error",
|
|
||||||
"lines-around-directive": "error",
|
|
||||||
"new-cap": "error",
|
|
||||||
"no-alert": "error",
|
|
||||||
"no-array-constructor": "error",
|
|
||||||
"no-bitwise": "error",
|
|
||||||
"no-caller": "error",
|
|
||||||
"no-catch-shadow": "error",
|
|
||||||
"no-constant-condition": ["error", {"checkLoops": false}],
|
|
||||||
"no-div-regex": "error",
|
|
||||||
"no-duplicate-imports": "error",
|
|
||||||
"no-else-return": "error",
|
|
||||||
"no-eq-null": "error",
|
|
||||||
"no-eval": "error",
|
|
||||||
"no-implicit-coercion": "error",
|
|
||||||
"no-implied-eval": "error",
|
|
||||||
"no-inner-declarations": "off",
|
|
||||||
"no-iterator": "error",
|
|
||||||
"no-label-var": "error",
|
|
||||||
"no-labels": "error",
|
|
||||||
"no-loop-func": "error",
|
|
||||||
"no-multi-str": "error",
|
|
||||||
"no-native-reassign": "error",
|
|
||||||
"no-new-func": "error",
|
|
||||||
"no-new-object": "error",
|
|
||||||
"no-new-wrappers": "error",
|
|
||||||
"no-octal-escape": "error",
|
|
||||||
"no-plusplus": "error",
|
|
||||||
"no-proto": "error",
|
|
||||||
"no-return-assign": "error",
|
|
||||||
"no-script-url": "error",
|
|
||||||
"no-self-compare": "error",
|
|
||||||
"no-sync": "error",
|
|
||||||
"no-throw-literal": "error",
|
|
||||||
"no-undef-init": "error",
|
|
||||||
"no-unneeded-ternary": ["error", {"defaultAssignment": false}],
|
|
||||||
"no-unused-expressions": "error",
|
|
||||||
"no-use-before-define": ["error", {"functions": false}],
|
|
||||||
"no-useless-concat": "error",
|
|
||||||
"no-useless-constructor": "error",
|
|
||||||
"no-var": "error",
|
|
||||||
"object-shorthand": "error",
|
|
||||||
"one-var": ["error", "never"],
|
|
||||||
"prefer-arrow-callback": "error",
|
|
||||||
"prefer-const": [
|
|
||||||
"error",
|
|
||||||
{
|
|
||||||
"ignoreReadBeforeAssign": true
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"radix": "error",
|
|
||||||
"sort-imports": ["error", {"ignoreDeclarationSort": true}],
|
|
||||||
"spaced-comment": ["error", "always", {"markers": ["/"]}],
|
|
||||||
"strict": "error",
|
|
||||||
"unicorn/consistent-function-scoping": "off",
|
|
||||||
"unicorn/explicit-length-check": "off",
|
|
||||||
"unicorn/filename-case": "off",
|
|
||||||
"unicorn/no-nested-ternary": "off",
|
|
||||||
"unicorn/no-null": "off",
|
|
||||||
"unicorn/no-process-exit": "off",
|
|
||||||
"unicorn/no-useless-undefined": "off",
|
|
||||||
"unicorn/number-literal-case": "off",
|
|
||||||
"unicorn/prefer-spread": "off",
|
|
||||||
"unicorn/prefer-ternary": "off",
|
|
||||||
"unicorn/prevent-abbreviations": "off",
|
|
||||||
"valid-typeof": ["error", {"requireStringLiterals": true}],
|
|
||||||
"yoda": "error"
|
|
||||||
},
|
|
||||||
"overrides": [
|
|
||||||
{
|
|
||||||
"files": ["frontend_tests/puppeteer_lib/**", "frontend_tests/puppeteer_tests/**"],
|
|
||||||
"globals": {
|
"globals": {
|
||||||
"$": false,
|
"$": false,
|
||||||
"zulip_test": false
|
"_": false,
|
||||||
}
|
"jQuery": false,
|
||||||
},
|
"Spinner": false,
|
||||||
{
|
"Handlebars": false,
|
||||||
"files": ["static/js/**"],
|
"XDate": false,
|
||||||
"globals": {
|
"zxcvbn": false,
|
||||||
"StripeCheckout": false
|
"LazyLoad": false,
|
||||||
}
|
"Dropbox": false,
|
||||||
},
|
"SockJS": false,
|
||||||
{
|
"marked": false,
|
||||||
"files": ["**/*.ts"],
|
"i18n": false,
|
||||||
"extends": ["plugin:@typescript-eslint/recommended", "plugin:import/typescript"],
|
"bridge": false,
|
||||||
"parserOptions": {
|
"page_params": false,
|
||||||
"project": "tsconfig.json"
|
"status_classes": false,
|
||||||
|
"password_quality": false,
|
||||||
|
"csrf_token": false,
|
||||||
|
"typeahead_helper": false,
|
||||||
|
"popovers": false,
|
||||||
|
"server_events": false,
|
||||||
|
"ui": false,
|
||||||
|
"stream_color": false,
|
||||||
|
"people": false,
|
||||||
|
"navigate": false,
|
||||||
|
"settings": false,
|
||||||
|
"resize": false,
|
||||||
|
"loading": false,
|
||||||
|
"compose": false,
|
||||||
|
"compose_fade": false,
|
||||||
|
"subs": false,
|
||||||
|
"timerender": false,
|
||||||
|
"message_live_update": false,
|
||||||
|
"message_edit": false,
|
||||||
|
"reload": false,
|
||||||
|
"composebox_typeahead": false,
|
||||||
|
"search": false,
|
||||||
|
"topic_list": false,
|
||||||
|
"gear_menu": false,
|
||||||
|
"hashchange": false,
|
||||||
|
"message_list": false,
|
||||||
|
"Filter": false,
|
||||||
|
"pointer": false,
|
||||||
|
"util": false,
|
||||||
|
"MessageListView": false,
|
||||||
|
"blueslip": false,
|
||||||
|
"rows": false,
|
||||||
|
"WinChan": false,
|
||||||
|
"muting_ui": false,
|
||||||
|
"Socket": false,
|
||||||
|
"channel": false,
|
||||||
|
"components": false,
|
||||||
|
"viewport": false,
|
||||||
|
"avatar": false,
|
||||||
|
"feature_flags": false,
|
||||||
|
"search_suggestion": false,
|
||||||
|
"referral": false,
|
||||||
|
"notifications": false,
|
||||||
|
"message_flags": false,
|
||||||
|
"bot_data": false,
|
||||||
|
"stream_list": false,
|
||||||
|
"narrow": false,
|
||||||
|
"admin": false,
|
||||||
|
"stream_data": false,
|
||||||
|
"muting": false,
|
||||||
|
"Dict": false,
|
||||||
|
"unread": false,
|
||||||
|
"alert_words_ui": false,
|
||||||
|
"message_store": false,
|
||||||
|
"favicon": false,
|
||||||
|
"condense": false,
|
||||||
|
"floating_recipient_bar": false,
|
||||||
|
"tab_bar": false,
|
||||||
|
"emoji": false,
|
||||||
|
"activity": false,
|
||||||
|
"invite": false,
|
||||||
|
"colorspace": false,
|
||||||
|
"reactions": false,
|
||||||
|
"tutorial": false,
|
||||||
|
"templates": false,
|
||||||
|
"alert_words": false,
|
||||||
|
"fenced_code": false,
|
||||||
|
"echo": false,
|
||||||
|
"localstorage": false,
|
||||||
|
"current_msg_list": true,
|
||||||
|
"home_msg_list": false,
|
||||||
|
"pm_list": false,
|
||||||
|
"unread_ui": false,
|
||||||
|
"user_events": false,
|
||||||
|
"Plotly": false,
|
||||||
|
"emoji_codes": false
|
||||||
},
|
},
|
||||||
"rules": {
|
"rules": {
|
||||||
// Disable base rule to avoid conflict
|
"no-restricted-syntax": 0,
|
||||||
"no-duplicate-imports": "off",
|
"no-nested-ternary": 0,
|
||||||
"no-unused-vars": "off",
|
"spaced-comment": 0,
|
||||||
"no-useless-constructor": "off",
|
"space-infix-ops": 0,
|
||||||
|
"newline-per-chained-call": 0,
|
||||||
"@typescript-eslint/array-type": "error",
|
"no-whitespace-before-property": 0,
|
||||||
"@typescript-eslint/await-thenable": "error",
|
"padded-blocks": 0,
|
||||||
"@typescript-eslint/consistent-type-assertions": "error",
|
"space-in-parens": 0,
|
||||||
"@typescript-eslint/consistent-type-imports": "error",
|
"eol-last": ["error", "always"],
|
||||||
"@typescript-eslint/explicit-function-return-type": [
|
"no-unneeded-ternary": ["error", { "defaultAssignment": false }],
|
||||||
"error",
|
"no-case-declarations": "error",
|
||||||
{"allowExpressions": true}
|
"eqeqeq": ["error", "allow-null"],
|
||||||
],
|
"no-duplicate-imports": "error",
|
||||||
"@typescript-eslint/member-ordering": "error",
|
"no-undef": "error",
|
||||||
"@typescript-eslint/no-duplicate-imports": "off",
|
"dot-notation": ["error", { "allowKeywords": true }],
|
||||||
"@typescript-eslint/no-explicit-any": "off",
|
"no-iterator": "error",
|
||||||
"@typescript-eslint/no-extraneous-class": "error",
|
"no-dupe-class-members": "error",
|
||||||
"@typescript-eslint/no-non-null-assertion": "off",
|
"no-useless-constructor": "error",
|
||||||
"@typescript-eslint/no-parameter-properties": "error",
|
"prefer-const": ["error", {
|
||||||
"@typescript-eslint/no-unnecessary-qualifier": "error",
|
"destructuring": "any",
|
||||||
"@typescript-eslint/no-unnecessary-type-assertion": "error",
|
"ignoreReadBeforeAssign": true
|
||||||
"@typescript-eslint/no-unused-vars": ["error", {"varsIgnorePattern": "^_"}],
|
}],
|
||||||
"@typescript-eslint/no-use-before-define": "error",
|
"no-const-assign": "error",
|
||||||
"@typescript-eslint/no-useless-constructor": "error",
|
"no-new-object": 2,
|
||||||
"@typescript-eslint/prefer-includes": "error",
|
"quote-props": ["error", "as-needed", {
|
||||||
"@typescript-eslint/prefer-regexp-exec": "error",
|
"keywords": false,
|
||||||
"@typescript-eslint/prefer-string-starts-ends-with": "error",
|
"unnecessary": true,
|
||||||
"@typescript-eslint/promise-function-async": "error",
|
"numbers": false
|
||||||
"@typescript-eslint/unified-signatures": "error",
|
}],
|
||||||
"no-undef": "error"
|
"no-array-constructor": "error",
|
||||||
|
"array-callback-return": "error",
|
||||||
|
"template-curly-spacing": "error",
|
||||||
|
//The Zulip codebase complies partially with the "no-useless-escape" rule; only regex expressions haven't been updated yet.
|
||||||
|
//Updated regex expressions are currently being tested in casper files and will decide about a potential future enforcement of this rule.
|
||||||
|
"no-useless-escape": 0,
|
||||||
|
"func-style": ["off", "expression"],
|
||||||
|
"wrap-iife": ["error", "outside", { "functionPrototypeMethods": false }],
|
||||||
|
"no-new-func": "error",
|
||||||
|
"space-before-function-paren": ["error", { "anonymous": "always", "named": "never", "asyncArrow": "always" }],
|
||||||
|
"no-param-reassign": 0,
|
||||||
|
"prefer-spread": "error",
|
||||||
|
"arrow-spacing": ["error", { "before": true, "after": true }],
|
||||||
|
"no-alert": 2,
|
||||||
|
"no-array-constructor": 2,
|
||||||
|
"no-caller": 2,
|
||||||
|
"no-bitwise": 2,
|
||||||
|
"no-catch-shadow": 2,
|
||||||
|
"comma-dangle": ["error", {
|
||||||
|
"arrays": "always-multiline",
|
||||||
|
"objects": "always-multiline",
|
||||||
|
"imports": "always-multiline",
|
||||||
|
"exports": "always-multiline",
|
||||||
|
"functions": "never"
|
||||||
|
}],
|
||||||
|
"no-console": 0,
|
||||||
|
"no-control-regex": 2,
|
||||||
|
"no-debugger": 2,
|
||||||
|
"no-div-regex": 2,
|
||||||
|
"no-dupe-keys": 2,
|
||||||
|
"no-else-return": 2,
|
||||||
|
"no-empty": 2,
|
||||||
|
"no-empty-character-class": 2,
|
||||||
|
"no-eq-null": 2,
|
||||||
|
"no-eval": 2,
|
||||||
|
"no-ex-assign": 2,
|
||||||
|
"no-extra-semi": 2,
|
||||||
|
"no-func-assign": 2,
|
||||||
|
"no-floating-decimal": 2,
|
||||||
|
"no-implied-eval": 2,
|
||||||
|
"no-with": 2,
|
||||||
|
"no-fallthrough": 2,
|
||||||
|
"no-unreachable": 2,
|
||||||
|
"no-undef": 2,
|
||||||
|
"no-undef-init": 2,
|
||||||
|
"no-unused-expressions": 2,
|
||||||
|
"no-octal": 2,
|
||||||
|
"no-octal-escape": 2,
|
||||||
|
"no-obj-calls": 2,
|
||||||
|
"no-multi-str": 2,
|
||||||
|
"no-new-wrappers": 2,
|
||||||
|
"no-new": 2,
|
||||||
|
"no-new-func": 2,
|
||||||
|
"no-native-reassign": 2,
|
||||||
|
"no-plusplus": 2,
|
||||||
|
"no-delete-var": 2,
|
||||||
|
"no-return-assign": 2,
|
||||||
|
"no-new-object": 2,
|
||||||
|
"no-label-var": 2,
|
||||||
|
"no-ternary": 0,
|
||||||
|
"no-self-compare": 2,
|
||||||
|
"no-sync": 2,
|
||||||
|
"no-underscore-dangle": 0,
|
||||||
|
"no-loop-func": 2,
|
||||||
|
"no-labels": 2,
|
||||||
|
"no-unused-vars": ["error", { "vars": "local", "args": "after-used",
|
||||||
|
"varsIgnorePattern": "print_elapsed_time|check_duplicate_ids"
|
||||||
|
}],
|
||||||
|
"no-script-url": 2,
|
||||||
|
"no-proto": 2,
|
||||||
|
"no-iterator": 2,
|
||||||
|
"no-mixed-requires": [0, false],
|
||||||
|
"no-extra-parens": ["error", "functions"],
|
||||||
|
"no-shadow": 0,
|
||||||
|
"no-use-before-define": 2,
|
||||||
|
"no-redeclare": 2,
|
||||||
|
"no-regex-spaces": 2,
|
||||||
|
"brace-style": ["error", "1tbs", { "allowSingleLine": true }],
|
||||||
|
"block-scoped-var": 2,
|
||||||
|
"camelcase": 0,
|
||||||
|
"complexity": [0, 4],
|
||||||
|
"curly": 2,
|
||||||
|
"dot-notation": 2,
|
||||||
|
"guard-for-in": 2,
|
||||||
|
"max-depth": [0, 4],
|
||||||
|
"max-len": ["error", 100, 2, {
|
||||||
|
"ignoreUrls": true,
|
||||||
|
"ignoreComments": false,
|
||||||
|
"ignoreRegExpLiterals": true,
|
||||||
|
"ignoreStrings": true,
|
||||||
|
"ignoreTemplateLiterals": true
|
||||||
|
}],
|
||||||
|
"max-params": [0, 3],
|
||||||
|
"max-statements": [0, 10],
|
||||||
|
"new-cap": ["error", { "newIsCap": true, "capIsNew": false }],
|
||||||
|
"new-parens": 2,
|
||||||
|
"one-var": ["error", "never"],
|
||||||
|
"quotes": [0, "single"],
|
||||||
|
"quote-props": ["error", "as-needed", { "keywords": false, "unnecessary": true, "numbers": false }],
|
||||||
|
"radix": 2,
|
||||||
|
"semi": 2,
|
||||||
|
"keyword-spacing": ["error", {
|
||||||
|
"before": true,
|
||||||
|
"after": true,
|
||||||
|
"overrides": {
|
||||||
|
"return": { "after": true },
|
||||||
|
"throw": { "after": true },
|
||||||
|
"case": { "after": true }
|
||||||
}
|
}
|
||||||
},
|
}],
|
||||||
{
|
"space-before-blocks": 2,
|
||||||
"files": ["**/*.d.ts"],
|
"strict": 0,
|
||||||
"rules": {
|
"unnecessary-strict": 0,
|
||||||
"import/unambiguous": "off"
|
"use-isnan": 2,
|
||||||
}
|
"valid-typeof": ["error", { "requireStringLiterals": true }],
|
||||||
},
|
"wrap-iife": 2,
|
||||||
{
|
"wrap-regex": 0,
|
||||||
"files": ["frontend_tests/**"],
|
"yoda": 2
|
||||||
"globals": {
|
|
||||||
"CSS": false,
|
|
||||||
"document": false,
|
|
||||||
"navigator": false,
|
|
||||||
"window": false
|
|
||||||
},
|
|
||||||
"rules": {
|
|
||||||
"no-sync": "off"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"files": ["tools/debug-require.js"],
|
|
||||||
"env": {
|
|
||||||
"browser": true,
|
|
||||||
"es2020": false
|
|
||||||
},
|
|
||||||
"rules": {
|
|
||||||
// Don’t require ES features that PhantomJS doesn’t support
|
|
||||||
// TODO: Toggle these settings now that we don't use PhantomJS
|
|
||||||
"no-var": "off",
|
|
||||||
"object-shorthand": "off",
|
|
||||||
"prefer-arrow-callback": "off"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"files": ["static/**"],
|
|
||||||
"env": {
|
|
||||||
"browser": true,
|
|
||||||
"node": false
|
|
||||||
},
|
|
||||||
"rules": {
|
|
||||||
"no-console": "error"
|
|
||||||
},
|
|
||||||
"settings": {
|
|
||||||
"import/resolver": "webpack"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"files": ["static/shared/**"],
|
|
||||||
"env": {
|
|
||||||
"browser": false,
|
|
||||||
"shared-node-browser": true
|
|
||||||
},
|
|
||||||
"rules": {
|
|
||||||
"import/no-restricted-paths": [
|
|
||||||
"error",
|
|
||||||
{
|
|
||||||
"zones": [
|
|
||||||
{
|
|
||||||
"target": "./static/shared",
|
|
||||||
"from": ".",
|
|
||||||
"except": ["./node_modules", "./static/shared"]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|||||||
22
.gitattributes
vendored
22
.gitattributes
vendored
@@ -1,13 +1,29 @@
|
|||||||
* text=auto eol=lf
|
* text=auto eol=lf
|
||||||
*.gif binary
|
*.gif binary
|
||||||
*.jpg binary
|
*.jpg binary
|
||||||
*.jpeg binary
|
|
||||||
*.eot binary
|
*.eot binary
|
||||||
*.woff binary
|
*.woff binary
|
||||||
*.woff2 binary
|
|
||||||
*.svg binary
|
*.svg binary
|
||||||
*.ttf binary
|
*.ttf binary
|
||||||
*.png binary
|
*.png binary
|
||||||
*.otf binary
|
*.otf binary
|
||||||
*.tif binary
|
*.tif binary
|
||||||
*.ogg binary
|
.gitignore export-ignore
|
||||||
|
.gitattributes export-ignore
|
||||||
|
/static/assets export-ignore
|
||||||
|
/bots export-ignore
|
||||||
|
/corporate export-ignore
|
||||||
|
/static export-ignore
|
||||||
|
/tools export-ignore
|
||||||
|
/zilencer export-ignore
|
||||||
|
/templates/corporate export-ignore
|
||||||
|
/templates/zilencer export-ignore
|
||||||
|
/puppet/zulip_internal export-ignore
|
||||||
|
/zproject/local_settings.py export-ignore
|
||||||
|
/zproject/test_settings.py export-ignore
|
||||||
|
/zerver/fixtures export-ignore
|
||||||
|
/zerver/tests export-ignore
|
||||||
|
/frontend_tests export-ignore
|
||||||
|
/node_modules export-ignore
|
||||||
|
/humbug export-ignore
|
||||||
|
/locale export-ignore
|
||||||
|
|||||||
3
.github/FUNDING.yml
vendored
3
.github/FUNDING.yml
vendored
@@ -1,3 +0,0 @@
|
|||||||
github: zulip
|
|
||||||
patreon: zulip
|
|
||||||
open_collective: zulip
|
|
||||||
14
.github/pull_request_template.md
vendored
14
.github/pull_request_template.md
vendored
@@ -1,14 +0,0 @@
|
|||||||
<!-- What's this PR for? (Just a link to an issue is fine.) -->
|
|
||||||
|
|
||||||
|
|
||||||
**Testing plan:** <!-- How have you tested? -->
|
|
||||||
|
|
||||||
|
|
||||||
**GIFs or screenshots:** <!-- If a UI change. See:
|
|
||||||
https://zulip.readthedocs.io/en/latest/tutorials/screenshot-and-gif-software.html
|
|
||||||
-->
|
|
||||||
|
|
||||||
|
|
||||||
<!-- Also be sure to make clear, coherent commits:
|
|
||||||
https://zulip.readthedocs.io/en/latest/contributing/version-control.html
|
|
||||||
-->
|
|
||||||
41
.github/workflows/cancel-previous-runs.yml
vendored
41
.github/workflows/cancel-previous-runs.yml
vendored
@@ -1,41 +0,0 @@
|
|||||||
name: Cancel previous runs
|
|
||||||
on: [push, pull_request]
|
|
||||||
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
shell: bash
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
cancel:
|
|
||||||
name: Cancel previous runs
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
timeout-minutes: 3
|
|
||||||
|
|
||||||
# Don't run this job for zulip/zulip pushes since we
|
|
||||||
# want to run those jobs.
|
|
||||||
if: ${{ github.event_name != 'push' || github.event.repository.full_name != 'zulip/zulip' }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
# We get workflow IDs from GitHub API so we don't have to maintain
|
|
||||||
# a hard-coded list of IDs which need to be updated when a workflow
|
|
||||||
# is added or removed. And, workflow IDs are different for other forks
|
|
||||||
# so this is required.
|
|
||||||
- name: Get workflow IDs.
|
|
||||||
id: workflow_ids
|
|
||||||
env:
|
|
||||||
# This is in <owner>/<repo> format e.g. zulip/zulip
|
|
||||||
REPOSITORY: ${{ github.repository }}
|
|
||||||
run: |
|
|
||||||
workflow_api_url=https://api.github.com/repos/$REPOSITORY/actions/workflows
|
|
||||||
curl $workflow_api_url -o workflows.json
|
|
||||||
|
|
||||||
script="const {workflows} = require('./workflows'); \
|
|
||||||
const ids = workflows.map(workflow => workflow.id); \
|
|
||||||
console.log(ids.join(','));"
|
|
||||||
ids=$(node -e "$script")
|
|
||||||
echo "::set-output name=ids::$ids"
|
|
||||||
|
|
||||||
- uses: styfle/cancel-workflow-action@0.4.1
|
|
||||||
with:
|
|
||||||
workflow_id: ${{ steps.workflow_ids.outputs.ids }}
|
|
||||||
access_token: ${{ github.token }}
|
|
||||||
30
.github/workflows/codeql-analysis.yml
vendored
30
.github/workflows/codeql-analysis.yml
vendored
@@ -1,30 +0,0 @@
|
|||||||
name: "Code scanning"
|
|
||||||
|
|
||||||
on: [push, pull_request]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
CodeQL:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Check out repository
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
# We must fetch at least the immediate parents so that if this is
|
|
||||||
# a pull request then we can check out the head.
|
|
||||||
fetch-depth: 2
|
|
||||||
|
|
||||||
# If this run was triggered by a pull request event, then check out
|
|
||||||
# the head of the pull request instead of the merge commit.
|
|
||||||
- run: git checkout HEAD^2
|
|
||||||
if: ${{ github.event_name == 'pull_request' }}
|
|
||||||
|
|
||||||
# Initializes the CodeQL tools for scanning.
|
|
||||||
- name: Initialize CodeQL
|
|
||||||
uses: github/codeql-action/init@v1
|
|
||||||
|
|
||||||
# Override language selection by uncommenting this and choosing your languages
|
|
||||||
# with:
|
|
||||||
# languages: go, javascript, csharp, python, cpp, java
|
|
||||||
- name: Perform CodeQL Analysis
|
|
||||||
uses: github/codeql-action/analyze@v1
|
|
||||||
24
.github/workflows/legacy-os.yml
vendored
24
.github/workflows/legacy-os.yml
vendored
@@ -1,24 +0,0 @@
|
|||||||
name: Legacy OS
|
|
||||||
|
|
||||||
on: [push, pull_request]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
xenial:
|
|
||||||
name: Ubuntu 16.04 Xenial (Python 3.5, legacy)
|
|
||||||
runs-on: ubuntu-16.04
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Check tools/provision error message on xenial
|
|
||||||
run: |
|
|
||||||
{ { ! tools/provision 2>&1 >&3; } | tee provision.err; } 3>&1 >&2
|
|
||||||
grep -Fqx 'Error: ubuntu 16.04 is no longer a supported platform for Zulip.' provision.err
|
|
||||||
- name: Check scripts/lib/upgrade-zulip-stage-2 error message on xenial
|
|
||||||
run: |
|
|
||||||
{ { ! sudo scripts/lib/upgrade-zulip-stage-2 2>&1 >&3; } | tee upgrade.err; } 3>&1 >&2
|
|
||||||
grep -Fq 'upgrade-zulip-stage-2: Unsupported platform: ubuntu 16.04' upgrade.err
|
|
||||||
|
|
||||||
- name: Report status
|
|
||||||
if: failure()
|
|
||||||
env:
|
|
||||||
ZULIP_BOT_KEY: ${{ secrets.ZULIP_BOT_KEY }}
|
|
||||||
run: tools/ci/send-failure-message
|
|
||||||
208
.github/workflows/production-suite.yml
vendored
208
.github/workflows/production-suite.yml
vendored
@@ -1,208 +0,0 @@
|
|||||||
name: Zulip production suite
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
paths:
|
|
||||||
- "**/migrations/**"
|
|
||||||
- puppet/**
|
|
||||||
- requirements/**
|
|
||||||
- scripts/**
|
|
||||||
- static/**
|
|
||||||
- tools/**
|
|
||||||
- zproject/**
|
|
||||||
- yarn.lock
|
|
||||||
- .github/workflows/production-suite.yml
|
|
||||||
pull_request:
|
|
||||||
paths:
|
|
||||||
- "**/migrations/**"
|
|
||||||
- puppet/**
|
|
||||||
- requirements/**
|
|
||||||
- scripts/**
|
|
||||||
- static/**
|
|
||||||
- tools/**
|
|
||||||
- zproject/**
|
|
||||||
- yarn.lock
|
|
||||||
- .github/workflows/production-suite.yml
|
|
||||||
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
shell: bash
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
production_build:
|
|
||||||
name: Bionic production build
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
# This docker image was created by a generated Dockerfile at:
|
|
||||||
# tools/ci/images/bionic/Dockerfile
|
|
||||||
# Bionic ships with Python 3.6.
|
|
||||||
container: zulip/ci:bionic
|
|
||||||
steps:
|
|
||||||
- name: Add required permissions
|
|
||||||
run: |
|
|
||||||
# The checkout actions doesn't clone to ~/zulip or allow
|
|
||||||
# us to use the path option to clone outside the current
|
|
||||||
# /__w/zulip/zulip directory. Since this directory is owned
|
|
||||||
# by root we need to change it's ownership to allow the
|
|
||||||
# github user to clone the code here.
|
|
||||||
# Note: /__w/ is a docker volume mounted to $GITHUB_WORKSPACE
|
|
||||||
# which is /home/runner/work/.
|
|
||||||
sudo chown -R github .
|
|
||||||
|
|
||||||
# This is the GitHub Actions specific cache directory the
|
|
||||||
# the current github user must be able to access for the
|
|
||||||
# cache action to work. It is owned by root currently.
|
|
||||||
sudo chmod -R 0777 /__w/_temp/
|
|
||||||
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Create cache directories
|
|
||||||
run: |
|
|
||||||
dirs=(/srv/zulip-{npm,venv,emoji}-cache)
|
|
||||||
sudo mkdir -p "${dirs[@]}"
|
|
||||||
sudo chown -R github "${dirs[@]}"
|
|
||||||
|
|
||||||
- name: Restore node_modules cache
|
|
||||||
uses: actions/cache@v2
|
|
||||||
with:
|
|
||||||
path: /srv/zulip-npm-cache
|
|
||||||
key: v1-yarn-deps-${{ github.job }}-${{ hashFiles('package.json') }}-${{ hashFiles('yarn.lock') }}
|
|
||||||
restore-keys: v1-yarn-deps-${{ github.job }}
|
|
||||||
|
|
||||||
- name: Restore python cache
|
|
||||||
uses: actions/cache@v2
|
|
||||||
with:
|
|
||||||
path: /srv/zulip-venv-cache
|
|
||||||
key: v1-venv-${{ github.job }}-${{ hashFiles('requirements/thumbor-dev.txt') }}-${{ hashFiles('requirements/dev.txt') }}
|
|
||||||
restore-keys: v1-venv-${{ github.job }}
|
|
||||||
|
|
||||||
- name: Restore emoji cache
|
|
||||||
uses: actions/cache@v2
|
|
||||||
with:
|
|
||||||
path: /srv/zulip-emoji-cache
|
|
||||||
key: v1-emoji-${{ github.job }}-${{ hashFiles('tools/setup/emoji/emoji_map.json') }}-${{ hashFiles('tools/setup/emoji/build_emoji') }}-${{ hashFiles('tools/setup/emoji/emoji_setup_utils.py') }}-${{ hashFiles('tools/setup/emoji/emoji_names.py') }}-${{ hashFiles('package.json') }}
|
|
||||||
restore-keys: v1-emoji-${{ github.job }}
|
|
||||||
|
|
||||||
- name: Do Bionic hack
|
|
||||||
run: |
|
|
||||||
# Temporary hack till `sudo service redis-server start` gets fixes in Bionic. See
|
|
||||||
# https://chat.zulip.org/#narrow/stream/3-backend/topic/Ubuntu.20bionic.20CircleCI
|
|
||||||
sudo sed -i '/^bind/s/bind.*/bind 0.0.0.0/' /etc/redis/redis.conf
|
|
||||||
|
|
||||||
- name: Build production tarball
|
|
||||||
run: ./tools/ci/production-build
|
|
||||||
|
|
||||||
- name: Upload production build artifacts for install jobs
|
|
||||||
uses: actions/upload-artifact@v2
|
|
||||||
with:
|
|
||||||
name: production-tarball
|
|
||||||
path: /tmp/production-build
|
|
||||||
retention-days: 14
|
|
||||||
|
|
||||||
- name: Report status
|
|
||||||
if: failure()
|
|
||||||
env:
|
|
||||||
ZULIP_BOT_KEY: ${{ secrets.ZULIP_BOT_KEY }}
|
|
||||||
run: tools/ci/send-failure-message
|
|
||||||
|
|
||||||
production_install:
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
# Base images are built using `tools/ci/Dockerfile.template`.
|
|
||||||
# The comments at the top explain how to build and upload these images.
|
|
||||||
- docker_image: zulip/ci:bionic
|
|
||||||
name: Bionic production install
|
|
||||||
is_bionic: true
|
|
||||||
os: bionic
|
|
||||||
|
|
||||||
- docker_image: zulip/ci:focal
|
|
||||||
name: Focal production install
|
|
||||||
is_focal: true
|
|
||||||
os: focal
|
|
||||||
|
|
||||||
- docker_image: zulip/ci:buster
|
|
||||||
name: Buster production install
|
|
||||||
is_buster: true
|
|
||||||
os: buster
|
|
||||||
|
|
||||||
- docker_image: zulip/ci:bullseye
|
|
||||||
name: Bullseye production install
|
|
||||||
is_bullseye: true
|
|
||||||
os: bullseye
|
|
||||||
|
|
||||||
name: ${{ matrix.name }}
|
|
||||||
container: ${{ matrix.docker_image }}
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: production_build
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Download built production tarball
|
|
||||||
uses: actions/download-artifact@v2
|
|
||||||
with:
|
|
||||||
name: production-tarball
|
|
||||||
path: /tmp
|
|
||||||
|
|
||||||
- name: Add required permissions and setup
|
|
||||||
run: |
|
|
||||||
# This is the GitHub Actions specific cache directory the
|
|
||||||
# the current github user must be able to access for the
|
|
||||||
# cache action to work. It is owned by root currently.
|
|
||||||
sudo chmod -R 0777 /__w/_temp/
|
|
||||||
|
|
||||||
# Create the zulip directory that the tools/ci/ scripts needs
|
|
||||||
mkdir -p /home/github/zulip
|
|
||||||
|
|
||||||
# Since actions/download-artifact@v2 loses all the permissions
|
|
||||||
# of the tarball uploaded by the upload artifact fix those.
|
|
||||||
chmod +x /tmp/production-extract-tarball
|
|
||||||
chmod +x /tmp/production-upgrade-pg
|
|
||||||
chmod +x /tmp/production-install
|
|
||||||
chmod +x /tmp/production-verify
|
|
||||||
chmod +x /tmp/send-failure-message
|
|
||||||
|
|
||||||
- name: Create cache directories
|
|
||||||
run: |
|
|
||||||
dirs=(/srv/zulip-{npm,venv,emoji}-cache)
|
|
||||||
sudo mkdir -p "${dirs[@]}"
|
|
||||||
sudo chown -R github "${dirs[@]}"
|
|
||||||
|
|
||||||
- name: Restore node_modules cache
|
|
||||||
uses: actions/cache@v2
|
|
||||||
with:
|
|
||||||
path: /srv/zulip-npm-cache
|
|
||||||
key: v1-yarn-deps-${{ matrix.os }}-${{ hashFiles('/tmp/package.json') }}-${{ hashFiles('/tmp/yarn.lock') }}
|
|
||||||
restore-keys: v1-yarn-deps-${{ matrix.os }}
|
|
||||||
|
|
||||||
- name: Do Bionic hack
|
|
||||||
if: ${{ matrix.is_bionic }}
|
|
||||||
run: |
|
|
||||||
# Temporary hack till `sudo service redis-server start` gets fixes in Bionic. See
|
|
||||||
# https://chat.zulip.org/#narrow/stream/3-backend/topic/Ubuntu.20bionic.20CircleCI
|
|
||||||
sudo sed -i '/^bind/s/bind.*/bind 0.0.0.0/' /etc/redis/redis.conf
|
|
||||||
|
|
||||||
- name: Production extract tarball
|
|
||||||
run: /tmp/production-extract-tarball
|
|
||||||
|
|
||||||
- name: Install production
|
|
||||||
run: |
|
|
||||||
sudo service rabbitmq-server restart
|
|
||||||
sudo /tmp/production-install
|
|
||||||
|
|
||||||
- name: Verify install
|
|
||||||
run: sudo /tmp/production-verify
|
|
||||||
|
|
||||||
- name: Upgrade postgresql
|
|
||||||
if: ${{ matrix.is_bionic }}
|
|
||||||
run: sudo /tmp/production-upgrade-pg
|
|
||||||
|
|
||||||
- name: Verify install after upgrading postgresql
|
|
||||||
if: ${{ matrix.is_bionic }}
|
|
||||||
run: sudo /tmp/production-verify
|
|
||||||
|
|
||||||
- name: Report status
|
|
||||||
if: failure()
|
|
||||||
env:
|
|
||||||
ZULIP_BOT_KEY: ${{ secrets.ZULIP_BOT_KEY }}
|
|
||||||
run: /tmp/send-failure-message
|
|
||||||
24
.github/workflows/update-oneclick-apps.yml
vendored
24
.github/workflows/update-oneclick-apps.yml
vendored
@@ -1,24 +0,0 @@
|
|||||||
name: Update one click apps
|
|
||||||
on:
|
|
||||||
release:
|
|
||||||
types: [published]
|
|
||||||
jobs:
|
|
||||||
update-digitalocean-oneclick-app:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Update DigitalOcean one click app
|
|
||||||
env:
|
|
||||||
DIGITALOCEAN_API_KEY: ${{ secrets.ONE_CLICK_ACTION_DIGITALOCEAN_API_KEY }}
|
|
||||||
ZULIP_API_KEY: ${{ secrets.ONE_CLICK_ACTION_ZULIP_BOT_API_KEY }}
|
|
||||||
ZULIP_EMAIL: ${{ secrets.ONE_CLICK_ACTION_ZULIP_BOT_EMAIL }}
|
|
||||||
ZULIP_SITE: https://chat.zulip.org
|
|
||||||
ONE_CLICK_ACTION_STREAM: kandra ops
|
|
||||||
PYTHON_DIGITALOCEAN_REQUEST_TIMEOUT_SEC: 30
|
|
||||||
RELEASE_VERSION: ${{ github.event.release.tag_name }}
|
|
||||||
run: |
|
|
||||||
export PATH="$HOME/.local/bin:$PATH"
|
|
||||||
git clone https://github.com/zulip/marketplace-partners
|
|
||||||
pip3 install python-digitalocean zulip fab-classic
|
|
||||||
echo $PATH
|
|
||||||
python3 tools/oneclickapps/prepare_digital_ocean_one_click_app_release.py
|
|
||||||
250
.github/workflows/zulip-ci.yml
vendored
250
.github/workflows/zulip-ci.yml
vendored
@@ -1,250 +0,0 @@
|
|||||||
# NOTE: Everything test in this file should be in `tools/test-all`. If there's a
|
|
||||||
# reason not to run it there, it should be there as a comment
|
|
||||||
# explaining why.
|
|
||||||
|
|
||||||
name: Zulip CI
|
|
||||||
|
|
||||||
on: [push, pull_request]
|
|
||||||
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
shell: bash
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
tests:
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
# This docker image was created by a generated Dockerfile at:
|
|
||||||
# tools/ci/images/bionic/Dockerfile
|
|
||||||
# Bionic ships with Python 3.6.
|
|
||||||
- docker_image: zulip/ci:bionic
|
|
||||||
name: Ubuntu 18.04 Bionic (Python 3.6, backend + frontend)
|
|
||||||
os: bionic
|
|
||||||
is_bionic: true
|
|
||||||
include_frontend_tests: true
|
|
||||||
|
|
||||||
# This docker image was created by a generated Dockerfile at:
|
|
||||||
# tools/ci/images/focal/Dockerfile
|
|
||||||
# Focal ships with Python 3.8.2.
|
|
||||||
- docker_image: zulip/ci:focal
|
|
||||||
name: Ubuntu 20.04 Focal (Python 3.8, backend)
|
|
||||||
os: focal
|
|
||||||
is_focal: true
|
|
||||||
include_frontend_tests: false
|
|
||||||
|
|
||||||
# This docker image was created by a generated Dockerfile at:
|
|
||||||
# tools/ci/images/focal/Dockerfile
|
|
||||||
# Bullseye ships with Python 3.9.2.
|
|
||||||
- docker_image: zulip/ci:bullseye
|
|
||||||
name: Debian 11 Bullseye (Python 3.9, backend)
|
|
||||||
os: bullseye
|
|
||||||
is_bullseye: true
|
|
||||||
include_frontend_tests: false
|
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
name: ${{ matrix.name }}
|
|
||||||
container: ${{ matrix.docker_image }}
|
|
||||||
env:
|
|
||||||
# GitHub Actions sets HOME to /github/home which causes
|
|
||||||
# problem later in provison and frontend test that runs
|
|
||||||
# tools/setup/postgresql-init-dev-db because of the .pgpass
|
|
||||||
# location. PostgreSQL (psql) expects .pgpass to be at
|
|
||||||
# /home/github/.pgpass and setting home to `/home/github/`
|
|
||||||
# ensures it written there because we write it to ~/.pgpass.
|
|
||||||
HOME: /home/github/
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Add required permissions
|
|
||||||
run: |
|
|
||||||
# The checkout actions doesn't clone to ~/zulip or allow
|
|
||||||
# us to use the path option to clone outside the current
|
|
||||||
# /__w/zulip/zulip directory. Since this directory is owned
|
|
||||||
# by root we need to change it's ownership to allow the
|
|
||||||
# github user to clone the code here.
|
|
||||||
# Note: /__w/ is a docker volume mounted to $GITHUB_WORKSPACE
|
|
||||||
# which is /home/runner/work/.
|
|
||||||
sudo chown -R github .
|
|
||||||
|
|
||||||
# This is the GitHub Actions specific cache directory the
|
|
||||||
# the current github user must be able to access for the
|
|
||||||
# cache action to work. It is owned by root currently.
|
|
||||||
sudo chmod -R 0777 /__w/_temp/
|
|
||||||
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
|
|
||||||
- name: Create cache directories
|
|
||||||
run: |
|
|
||||||
dirs=(/srv/zulip-{npm,venv,emoji}-cache)
|
|
||||||
sudo mkdir -p "${dirs[@]}"
|
|
||||||
sudo chown -R github "${dirs[@]}"
|
|
||||||
|
|
||||||
- name: Restore node_modules cache
|
|
||||||
uses: actions/cache@v2
|
|
||||||
with:
|
|
||||||
path: /srv/zulip-npm-cache
|
|
||||||
key: v1-yarn-deps-${{ matrix.os }}-${{ hashFiles('package.json') }}-${{ hashFiles('yarn.lock') }}
|
|
||||||
restore-keys: v1-yarn-deps-${{ matrix.os }}
|
|
||||||
|
|
||||||
- name: Restore python cache
|
|
||||||
uses: actions/cache@v2
|
|
||||||
with:
|
|
||||||
path: /srv/zulip-venv-cache
|
|
||||||
key: v1-venv-${{ matrix.os }}-${{ hashFiles('requirements/thumbor-dev.txt') }}-${{ hashFiles('requirements/dev.txt') }}
|
|
||||||
restore-keys: v1-venv-${{ matrix.os }}
|
|
||||||
|
|
||||||
- name: Restore emoji cache
|
|
||||||
uses: actions/cache@v2
|
|
||||||
with:
|
|
||||||
path: /srv/zulip-emoji-cache
|
|
||||||
key: v1-emoji-${{ matrix.os }}-${{ hashFiles('tools/setup/emoji/emoji_map.json') }}-${{ hashFiles('tools/setup/emoji/build_emoji') }}-${{ hashFiles('tools/setup/emoji/emoji_setup_utils.py') }}-${{ hashFiles('tools/setup/emoji/emoji_names.py') }}-${{ hashFiles('package.json') }}
|
|
||||||
restore-keys: v1-emoji-${{ matrix.os }}
|
|
||||||
|
|
||||||
- name: Do Bionic hack
|
|
||||||
if: ${{ matrix.is_bionic }}
|
|
||||||
run: |
|
|
||||||
# Temporary hack till `sudo service redis-server start` gets fixes in Bionic. See
|
|
||||||
# https://chat.zulip.org/#narrow/stream/3-backend/topic/Ubuntu.20bionic.20CircleCI
|
|
||||||
sudo sed -i '/^bind/s/bind.*/bind 0.0.0.0/' /etc/redis/redis.conf
|
|
||||||
|
|
||||||
- name: Install dependencies
|
|
||||||
run: |
|
|
||||||
# This is the main setup job for the test suite
|
|
||||||
./tools/ci/setup-backend --skip-dev-db-build
|
|
||||||
|
|
||||||
# Cleaning caches is mostly unnecessary in GitHub Actions, because
|
|
||||||
# most builds don't get to write to the cache.
|
|
||||||
# scripts/lib/clean-unused-caches --verbose --threshold 0
|
|
||||||
|
|
||||||
- name: Run tools test
|
|
||||||
run: |
|
|
||||||
source tools/ci/activate-venv
|
|
||||||
./tools/test-tools
|
|
||||||
|
|
||||||
- name: Run backend lint
|
|
||||||
run: |
|
|
||||||
source tools/ci/activate-venv
|
|
||||||
echo "Test suite is running under $(python --version)."
|
|
||||||
./tools/lint --groups=backend --skip=gitlint,mypy # gitlint disabled because flaky
|
|
||||||
|
|
||||||
- name: Run frontend lint
|
|
||||||
if: ${{ matrix.include_frontend_tests }}
|
|
||||||
run: |
|
|
||||||
source tools/ci/activate-venv
|
|
||||||
./tools/lint --groups=frontend --skip=gitlint # gitlint disabled because flaky
|
|
||||||
|
|
||||||
- name: Run backend tests
|
|
||||||
run: |
|
|
||||||
source tools/ci/activate-venv
|
|
||||||
./tools/test-backend --coverage --include-webhooks --no-cov-cleanup --ban-console-output
|
|
||||||
|
|
||||||
- name: Run mypy
|
|
||||||
run: |
|
|
||||||
source tools/ci/activate-venv
|
|
||||||
# We run mypy after the backend tests so we get output from the
|
|
||||||
# backend tests, which tend to uncover more serious problems, first.
|
|
||||||
./tools/run-mypy --version
|
|
||||||
./tools/run-mypy
|
|
||||||
|
|
||||||
- name: Run miscellaneous tests
|
|
||||||
run: |
|
|
||||||
source tools/ci/activate-venv
|
|
||||||
|
|
||||||
# Currently our compiled requirements files will differ for different python versions
|
|
||||||
# so we will run test-locked-requirements only for Bionic.
|
|
||||||
# ./tools/test-locked-requirements
|
|
||||||
# ./tools/test-run-dev # https://github.com/zulip/zulip/pull/14233
|
|
||||||
#
|
|
||||||
# This test has been persistently flaky at like 1% frequency, is slow,
|
|
||||||
# and is for a very specific single feature, so we don't run it by default:
|
|
||||||
# ./tools/test-queue-worker-reload
|
|
||||||
|
|
||||||
./tools/test-migrations
|
|
||||||
./tools/setup/optimize-svg --check
|
|
||||||
./tools/setup/generate_integration_bots_avatars.py --check-missing
|
|
||||||
|
|
||||||
- name: Run documentation and api tests
|
|
||||||
run: |
|
|
||||||
source tools/ci/activate-venv
|
|
||||||
# In CI, we only test links we control in test-documentation to avoid flakes
|
|
||||||
./tools/test-documentation --skip-external-links
|
|
||||||
./tools/test-help-documentation --skip-external-links
|
|
||||||
./tools/test-api
|
|
||||||
|
|
||||||
- name: Run node tests
|
|
||||||
if: ${{ matrix.include_frontend_tests }}
|
|
||||||
run: |
|
|
||||||
source tools/ci/activate-venv
|
|
||||||
# Run the node tests first, since they're fast and deterministic
|
|
||||||
./tools/test-js-with-node --coverage
|
|
||||||
|
|
||||||
- name: Check schemas
|
|
||||||
if: ${{ matrix.include_frontend_tests }}
|
|
||||||
run: |
|
|
||||||
source tools/ci/activate-venv
|
|
||||||
# Check that various schemas are consistent. (is fast)
|
|
||||||
./tools/check-schemas
|
|
||||||
|
|
||||||
- name: Check capitalization of strings
|
|
||||||
if: ${{ matrix.include_frontend_tests }}
|
|
||||||
run: |
|
|
||||||
source tools/ci/activate-venv
|
|
||||||
./manage.py makemessages --locale en
|
|
||||||
PYTHONWARNINGS=ignore ./tools/check-capitalization --no-generate
|
|
||||||
PYTHONWARNINGS=ignore ./tools/check-frontend-i18n --no-generate
|
|
||||||
|
|
||||||
- name: Run puppeteer tests
|
|
||||||
if: ${{ matrix.include_frontend_tests }}
|
|
||||||
run: |
|
|
||||||
source tools/ci/activate-venv
|
|
||||||
./tools/test-js-with-puppeteer
|
|
||||||
|
|
||||||
- name: Check for untracked files
|
|
||||||
run: |
|
|
||||||
source tools/ci/activate-venv
|
|
||||||
# This final check looks for untracked files that may have been
|
|
||||||
# created by test-backend or provision.
|
|
||||||
untracked="$(git ls-files --exclude-standard --others)"
|
|
||||||
if [ -n "$untracked" ]; then
|
|
||||||
printf >&2 "Error: untracked files:\n%s\n" "$untracked"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Test locked requirements
|
|
||||||
if: ${{ matrix.is_bionic }}
|
|
||||||
run: |
|
|
||||||
. /srv/zulip-py3-venv/bin/activate && \
|
|
||||||
./tools/test-locked-requirements
|
|
||||||
|
|
||||||
- name: Upload coverage reports
|
|
||||||
|
|
||||||
# Only upload coverage when both frontend and backend
|
|
||||||
# tests are ran.
|
|
||||||
if: ${{ matrix.include_frontend_tests }}
|
|
||||||
run: |
|
|
||||||
# Codcov requires `.coverage` file to be stored in the
|
|
||||||
# current working directory.
|
|
||||||
mv ./var/.coverage ./.coverage
|
|
||||||
. /srv/zulip-py3-venv/bin/activate || true
|
|
||||||
|
|
||||||
pip install codecov && codecov || echo "Error in uploading coverage reports to codecov.io."
|
|
||||||
|
|
||||||
- name: Store Puppeteer artifacts
|
|
||||||
# Upload these on failure, as well
|
|
||||||
if: ${{ always() && matrix.include_frontend_tests }}
|
|
||||||
uses: actions/upload-artifact@v2
|
|
||||||
with:
|
|
||||||
name: puppeteer
|
|
||||||
path: ./var/puppeteer
|
|
||||||
retention-days: 60
|
|
||||||
|
|
||||||
- name: Check development database build
|
|
||||||
if: ${{ matrix.is_focal || matrix.is_bullseye }}
|
|
||||||
run: ./tools/ci/setup-backend
|
|
||||||
|
|
||||||
- name: Report status
|
|
||||||
if: failure()
|
|
||||||
env:
|
|
||||||
ZULIP_BOT_KEY: ${{ secrets.ZULIP_BOT_KEY }}
|
|
||||||
run: tools/ci/send-failure-message
|
|
||||||
96
.gitignore
vendored
96
.gitignore
vendored
@@ -1,89 +1,29 @@
|
|||||||
# Quick format and style primer:
|
|
||||||
#
|
|
||||||
# * If a pattern is meant only for a specific location, it should have a
|
|
||||||
# leading slash, like `/staticfiles.json`.
|
|
||||||
# * In principle any non-trailing slash (like `zproject/dev-secrets.conf`)
|
|
||||||
# will do, but this makes a confusing pattern. Adding a leading slash
|
|
||||||
# is clearer.
|
|
||||||
#
|
|
||||||
# * Patterns like `.vscode/` without slashes, or with only a trailing slash,
|
|
||||||
# match in any subdirectory.
|
|
||||||
#
|
|
||||||
# * Subdirectories with several internal things to ignore get their own
|
|
||||||
# `.gitignore` files.
|
|
||||||
#
|
|
||||||
# * Comments must be on their own line. (Otherwise they don't work.)
|
|
||||||
#
|
|
||||||
# See `git help ignore` for details on the format.
|
|
||||||
|
|
||||||
## Config files for the dev environment
|
|
||||||
/zproject/dev-secrets.conf
|
|
||||||
/tools/conf.ini
|
|
||||||
/tools/custom_provision
|
|
||||||
/tools/droplets/conf.ini
|
|
||||||
|
|
||||||
## Byproducts of setting up and using the dev environment
|
|
||||||
*.pyc
|
*.pyc
|
||||||
package-lock.json
|
|
||||||
|
|
||||||
/.vagrant
|
|
||||||
/var/*
|
|
||||||
!/var/puppeteer
|
|
||||||
/var/puppeteer/*
|
|
||||||
!/var/puppeteer/test_credentials.d.ts
|
|
||||||
|
|
||||||
/.dmypy.json
|
|
||||||
|
|
||||||
# Dockerfiles generated for continuous integration
|
|
||||||
/tools/ci/images
|
|
||||||
|
|
||||||
# Generated i18n data
|
|
||||||
/locale/en
|
|
||||||
/locale/language_options.json
|
|
||||||
/locale/language_name_map.json
|
|
||||||
/locale/*/mobile.json
|
|
||||||
|
|
||||||
# Static build
|
|
||||||
*.mo
|
|
||||||
npm-debug.log
|
|
||||||
/node_modules
|
|
||||||
/prod-static
|
|
||||||
/staticfiles.json
|
|
||||||
/webpack-stats-production.json
|
|
||||||
/yarn-error.log
|
|
||||||
zulip-git-version
|
|
||||||
|
|
||||||
# Test / analysis tools
|
|
||||||
.coverage
|
|
||||||
|
|
||||||
## Files (or really symlinks) created in a prod deployment
|
|
||||||
/zproject/prod_settings.py
|
|
||||||
/zulip-current-venv
|
|
||||||
/zulip-py3-venv
|
|
||||||
/zulip-thumbor-venv
|
|
||||||
|
|
||||||
## Files left by various editors and local environments
|
|
||||||
# (Ideally these should be in everyone's respective personal gitignore files.)
|
|
||||||
*~
|
*~
|
||||||
|
/prod-static
|
||||||
*.sw[po]
|
*.sw[po]
|
||||||
.idea
|
*.DS_Store
|
||||||
.kdev4
|
.kdev4
|
||||||
|
.idea
|
||||||
zulip.kdev4
|
zulip.kdev4
|
||||||
|
coverage/
|
||||||
|
.coverage
|
||||||
|
/queue_error
|
||||||
.kateproject.d/
|
.kateproject.d/
|
||||||
.kateproject
|
.kateproject
|
||||||
*.kate-swp
|
*.kate-swp
|
||||||
*.sublime-project
|
*.sublime-project
|
||||||
*.sublime-workspace
|
*.sublime-workspace
|
||||||
|
.vagrant
|
||||||
|
/zproject/dev-secrets.conf
|
||||||
|
static/js/bundle.js
|
||||||
|
static/generated/emoji
|
||||||
|
static/generated/github-contributors.json
|
||||||
|
static/locale/language_options.json
|
||||||
|
/node_modules
|
||||||
|
/staticfiles.json
|
||||||
|
npm-debug.log
|
||||||
|
*.mo
|
||||||
|
var/*
|
||||||
.vscode/
|
.vscode/
|
||||||
*.DS_Store
|
tools/conf.ini
|
||||||
# .cache/ is generated by VSCode's test runner
|
|
||||||
.cache/
|
|
||||||
.eslintcache
|
|
||||||
|
|
||||||
# Core dump files
|
|
||||||
core
|
|
||||||
|
|
||||||
## Miscellaneous
|
|
||||||
# (Ideally this section is empty.)
|
|
||||||
zthumbor/thumbor_local_settings.py
|
|
||||||
.transifexrc
|
|
||||||
|
|||||||
13
.gitlint
13
.gitlint
@@ -1,13 +0,0 @@
|
|||||||
[general]
|
|
||||||
ignore=title-trailing-punctuation, body-min-length, body-is-missing
|
|
||||||
|
|
||||||
extra-path=tools/lib/gitlint-rules.py
|
|
||||||
|
|
||||||
[title-match-regex]
|
|
||||||
regex=^(.+:\ )?[A-Z].+\.$
|
|
||||||
|
|
||||||
[title-max-length]
|
|
||||||
line-length=76
|
|
||||||
|
|
||||||
[body-max-line-length]
|
|
||||||
line-length=76
|
|
||||||
40
.mailmap
40
.mailmap
@@ -1,40 +0,0 @@
|
|||||||
Alex Vandiver <alexmv@zulip.com> <alex@chmrr.net>
|
|
||||||
Alex Vandiver <alexmv@zulip.com> <github@chmrr.net>
|
|
||||||
Allen Rabinovich <allenrabinovich@yahoo.com> <allenr@humbughq.com>
|
|
||||||
Allen Rabinovich <allenrabinovich@yahoo.com> <allenr@zulip.com>
|
|
||||||
Aman Agrawal <amanagr@zulip.com> <f2016561@pilani.bits-pilani.ac.in>
|
|
||||||
Anders Kaseorg <anders@zulip.com> <anders@zulipchat.com>
|
|
||||||
Anders Kaseorg <anders@zulip.com> <andersk@mit.edu>
|
|
||||||
Brock Whittaker <brock@zulipchat.com> <bjwhitta@asu.edu>
|
|
||||||
Brock Whittaker <brock@zulipchat.com> <brockwhittaker@Brocks-MacBook.local>
|
|
||||||
Brock Whittaker <brock@zulipchat.com> <brock@zulipchat.org>
|
|
||||||
Chris Bobbe <cbobbe@zulip.com> <cbobbe@zulipchat.com>
|
|
||||||
Chris Bobbe <cbobbe@zulip.com> <csbobbe@gmail.com>
|
|
||||||
Greg Price <greg@zulip.com> <gnprice@gmail.com>
|
|
||||||
Greg Price <greg@zulip.com> <greg@zulipchat.com>
|
|
||||||
Greg Price <greg@zulip.com> <price@mit.edu>
|
|
||||||
Jeff Arnold <jbarnold@gmail.com> <jbarnold@humbughq.com>
|
|
||||||
Jeff Arnold <jbarnold@gmail.com> <jbarnold@zulip.com>
|
|
||||||
Jessica McKellar <jesstess@mit.edu> <jesstess@humbughq.com>
|
|
||||||
Jessica McKellar <jesstess@mit.edu> <jesstess@zulip.com>
|
|
||||||
Kevin Mehall <km@kevinmehall.net> <kevin@humbughq.com>
|
|
||||||
Kevin Mehall <km@kevinmehall.net> <kevin@zulip.com>
|
|
||||||
Ray Kraesig <rkraesig@zulip.com> <rkraesig@zulipchat.com>
|
|
||||||
Rishi Gupta <rishig@zulipchat.com> <rishig+git@mit.edu>
|
|
||||||
Rishi Gupta <rishig@zulipchat.com> <rishig@kandralabs.com>
|
|
||||||
Rishi Gupta <rishig@zulipchat.com> <rishig@users.noreply.github.com>
|
|
||||||
Reid Barton <rwbarton@gmail.com> <rwbarton@humbughq.com>
|
|
||||||
Scott Feeney <scott@oceanbase.org> <scott@humbughq.com>
|
|
||||||
Scott Feeney <scott@oceanbase.org> <scott@zulip.com>
|
|
||||||
Steve Howell <showell@zulip.com> <showell30@yahoo.com>
|
|
||||||
Steve Howell <showell@zulip.com> <showell@yahoo.com>
|
|
||||||
Steve Howell <showell@zulip.com> <showell@zulipchat.com>
|
|
||||||
Steve Howell <showell@zulip.com> <steve@humbughq.com>
|
|
||||||
Steve Howell <showell@zulip.com> <steve@zulip.com>
|
|
||||||
Tim Abbott <tabbott@zulip.com> <tabbott@dropbox.com>
|
|
||||||
Tim Abbott <tabbott@zulip.com> <tabbott@humbughq.com>
|
|
||||||
Tim Abbott <tabbott@zulip.com> <tabbott@mit.edu>
|
|
||||||
Tim Abbott <tabbott@zulip.com> <tabbott@zulipchat.com>
|
|
||||||
Vishnu KS <vishnu@zulip.com> <hackerkid@vishnuks.com>
|
|
||||||
Vishnu KS <vishnu@zulip.com> <yo@vishnuks.com>
|
|
||||||
Alya Abbott <alya@zulip.com> <alyaabbott@elance-odesk.com>
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
/corporate/tests/stripe_fixtures
|
|
||||||
/locale
|
|
||||||
/static/third
|
|
||||||
/tools/setup/emoji/emoji_map.json
|
|
||||||
/zerver/tests/fixtures
|
|
||||||
/zerver/webhooks/*/fixtures
|
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
{
|
|
||||||
"source_directories": ["."],
|
|
||||||
"taint_models_path": [
|
|
||||||
"stubs/taint",
|
|
||||||
"zulip-py3-venv/lib/pyre_check/taint/"
|
|
||||||
],
|
|
||||||
"search_path": [
|
|
||||||
"stubs/",
|
|
||||||
"zulip-py3-venv/lib/pyre_check/stubs/"
|
|
||||||
],
|
|
||||||
"typeshed": "zulip-py3-venv/lib/pyre_check/typeshed/",
|
|
||||||
"exclude": [
|
|
||||||
"/srv/zulip/zulip-py3-venv/.*"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
sonar.inclusions=**/*.py,**/*.html
|
|
||||||
54
.travis.yml
Normal file
54
.travis.yml
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
dist: trusty
|
||||||
|
before_install:
|
||||||
|
- nvm install 0.10
|
||||||
|
install:
|
||||||
|
# Disable Travis CI's built-in NVM installation
|
||||||
|
- mv ~/.nvm ~/.travis-nvm-disabled
|
||||||
|
- pip install coveralls
|
||||||
|
- tools/travis/setup-$TEST_SUITE
|
||||||
|
- tools/clean-venv-cache --travis
|
||||||
|
cache:
|
||||||
|
- apt: false
|
||||||
|
- directories:
|
||||||
|
- $HOME/zulip-venv-cache
|
||||||
|
- node_modules
|
||||||
|
- $HOME/node
|
||||||
|
env:
|
||||||
|
global:
|
||||||
|
- COVERAGE_FILE=var/.coverage
|
||||||
|
- COVERALLS_PARALLEL=true
|
||||||
|
- COVERALLS_SERVICE_NAME=travis-pro
|
||||||
|
- COVERALLS_REPO_TOKEN=hnXUEBKsORKHc8xIENGs9JjktlTb2HKlG
|
||||||
|
- BOTO_CONFIG=/tmp/nowhere
|
||||||
|
matrix:
|
||||||
|
- TEST_SUITE=frontend
|
||||||
|
- TEST_SUITE=backend
|
||||||
|
language: python
|
||||||
|
python:
|
||||||
|
- "2.7"
|
||||||
|
- "3.4"
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- python: "3.4"
|
||||||
|
env: TEST_SUITE=static-analysis
|
||||||
|
- python: "3.4"
|
||||||
|
env: TEST_SUITE=production
|
||||||
|
- python: "2.7"
|
||||||
|
env: TEST_SUITE=production
|
||||||
|
# command to run tests
|
||||||
|
script:
|
||||||
|
- unset GEM_PATH
|
||||||
|
- ./tools/travis/$TEST_SUITE
|
||||||
|
sudo: required
|
||||||
|
services:
|
||||||
|
- docker
|
||||||
|
addons:
|
||||||
|
artifacts:
|
||||||
|
paths:
|
||||||
|
- $(ls var/casper/* | tr "\n" ":")
|
||||||
|
- $(ls /tmp/zulip-test-event-log/* | tr "\n" ":")
|
||||||
|
postgresql: "9.3"
|
||||||
|
after_success:
|
||||||
|
coveralls
|
||||||
|
notifications:
|
||||||
|
webhooks: https://coveralls.io/webhook?repo_token=$COVERALLS_REPO_TOKEN
|
||||||
27
.tx/config
27
.tx/config
@@ -1,33 +1,14 @@
|
|||||||
[main]
|
[main]
|
||||||
host = https://www.transifex.com
|
host = https://www.transifex.com
|
||||||
lang_map = zh-Hans: zh_Hans, zh-Hant: zh_Hant
|
|
||||||
|
|
||||||
[zulip.djangopo]
|
[zulip.djangopo]
|
||||||
file_filter = locale/<lang>/LC_MESSAGES/django.po
|
source_file = static/locale/en/LC_MESSAGES/django.po
|
||||||
source_file = locale/en/LC_MESSAGES/django.po
|
|
||||||
source_lang = en
|
source_lang = en
|
||||||
type = PO
|
type = PO
|
||||||
|
file_filter = static/locale/<lang>/LC_MESSAGES/django.po
|
||||||
|
|
||||||
[zulip.translationsjson]
|
[zulip.translationsjson]
|
||||||
file_filter = locale/<lang>/translations.json
|
source_file = static/locale/en/translations.json
|
||||||
source_file = locale/en/translations.json
|
|
||||||
source_lang = en
|
|
||||||
type = KEYVALUEJSON
|
|
||||||
|
|
||||||
[zulip.mobile]
|
|
||||||
file_filter = locale/<lang>/mobile.json
|
|
||||||
source_file = locale/en/mobile.json
|
|
||||||
source_lang = en
|
|
||||||
type = KEYVALUEJSON
|
|
||||||
|
|
||||||
[zulip-test.djangopo]
|
|
||||||
file_filter = locale/<lang>/LC_MESSAGES/django.po
|
|
||||||
source_file = locale/en/LC_MESSAGES/django.po
|
|
||||||
source_lang = en
|
|
||||||
type = PO
|
|
||||||
|
|
||||||
[zulip-test.translationsjson]
|
|
||||||
file_filter = locale/<lang>/translations.json
|
|
||||||
source_file = locale/en/translations.json
|
|
||||||
source_lang = en
|
source_lang = en
|
||||||
type = KEYVALUEJSON
|
type = KEYVALUEJSON
|
||||||
|
file_filter = static/locale/<lang>/translations.json
|
||||||
|
|||||||
@@ -1,104 +0,0 @@
|
|||||||
# Zulip Code of Conduct
|
|
||||||
|
|
||||||
Like the technical community as a whole, the Zulip team and community is
|
|
||||||
made up of a mixture of professionals and volunteers from all over the
|
|
||||||
world, working on every aspect of the mission, including mentorship,
|
|
||||||
teaching, and connecting people.
|
|
||||||
|
|
||||||
Diversity is one of our huge strengths, but it can also lead to
|
|
||||||
communication issues and unhappiness. To that end, we have a few ground
|
|
||||||
rules that we ask people to adhere to. This code applies equally to
|
|
||||||
founders, mentors, and those seeking help and guidance.
|
|
||||||
|
|
||||||
This isn't an exhaustive list of things that you can't do. Rather, take it
|
|
||||||
in the spirit in which it's intended --- a guide to make it easier to enrich
|
|
||||||
all of us and the technical communities in which we participate.
|
|
||||||
|
|
||||||
## Expected behavior
|
|
||||||
|
|
||||||
The following behaviors are expected and requested of all community members:
|
|
||||||
|
|
||||||
* Participate. In doing so, you contribute to the health and longevity of
|
|
||||||
the community.
|
|
||||||
* Exercise consideration and respect in your speech and actions.
|
|
||||||
* Attempt collaboration before conflict. Assume good faith.
|
|
||||||
* Refrain from demeaning, discriminatory, or harassing behavior and speech.
|
|
||||||
* Take action or alert community leaders if you notice a dangerous
|
|
||||||
situation, someone in distress, or violations of this code, even if they
|
|
||||||
seem inconsequential.
|
|
||||||
* Community event venues may be shared with members of the public; be
|
|
||||||
respectful to all patrons of these locations.
|
|
||||||
|
|
||||||
## Unacceptable behavior
|
|
||||||
|
|
||||||
The following behaviors are considered harassment and are unacceptable
|
|
||||||
within the Zulip community:
|
|
||||||
|
|
||||||
* Jokes or derogatory language that singles out members of any race,
|
|
||||||
ethnicity, culture, national origin, color, immigration status, social and
|
|
||||||
economic class, educational level, language proficiency, sex, sexual
|
|
||||||
orientation, gender identity and expression, age, size, family status,
|
|
||||||
political belief, religion, and mental and physical ability.
|
|
||||||
* Violence, threats of violence, or violent language directed against
|
|
||||||
another person.
|
|
||||||
* Disseminating or threatening to disseminate another person's personal
|
|
||||||
information.
|
|
||||||
* Personal insults of any sort.
|
|
||||||
* Posting or displaying sexually explicit or violent material.
|
|
||||||
* Inappropriate photography or recording.
|
|
||||||
* Deliberate intimidation, stalking, or following (online or in person).
|
|
||||||
* Unwelcome sexual attention. This includes sexualized comments or jokes,
|
|
||||||
inappropriate touching or groping, and unwelcomed sexual advances.
|
|
||||||
* Sustained disruption of community events, including talks and
|
|
||||||
presentations.
|
|
||||||
* Advocating for, or encouraging, any of the behaviors above.
|
|
||||||
|
|
||||||
## Reporting and enforcement
|
|
||||||
|
|
||||||
Harassment and other code of conduct violations reduce the value of the
|
|
||||||
community for everyone. If someone makes you or anyone else feel unsafe or
|
|
||||||
unwelcome, please report it to the community organizers at
|
|
||||||
zulip-code-of-conduct@googlegroups.com as soon as possible. You can make a
|
|
||||||
report either personally or anonymously.
|
|
||||||
|
|
||||||
If a community member engages in unacceptable behavior, the community
|
|
||||||
organizers may take any action they deem appropriate, up to and including a
|
|
||||||
temporary ban or permanent expulsion from the community without warning (and
|
|
||||||
without refund in the case of a paid event).
|
|
||||||
|
|
||||||
If someone outside the development community (e.g. a user of the Zulip
|
|
||||||
software) engages in unacceptable behavior that affects someone in the
|
|
||||||
community, we still want to know. Even if we don't have direct control over
|
|
||||||
the violator, the community organizers can still support the people
|
|
||||||
affected, reduce the chance of a similar violation in the future, and take
|
|
||||||
any direct action we can.
|
|
||||||
|
|
||||||
The nature of reporting means it can only help after the fact. If you see
|
|
||||||
something you can do while a violation is happening, do it. A lot of the
|
|
||||||
harms of harassment and other violations can be mitigated by the victim
|
|
||||||
knowing that the other people present are on their side.
|
|
||||||
|
|
||||||
All reports will be kept confidential. In some cases, we may determine that a
|
|
||||||
public statement will need to be made. In such cases, the identities of all
|
|
||||||
victims and reporters will remain confidential unless those individuals
|
|
||||||
instruct us otherwise.
|
|
||||||
|
|
||||||
## Scope
|
|
||||||
|
|
||||||
We expect all community participants (contributors, paid or otherwise,
|
|
||||||
sponsors, and other guests) to abide by this Code of Conduct in all
|
|
||||||
community venues, online and in-person, as well as in all private
|
|
||||||
communications pertaining to community business.
|
|
||||||
|
|
||||||
This Code of Conduct and its related procedures also applies to unacceptable
|
|
||||||
behavior occurring outside the scope of community activities when such
|
|
||||||
behavior has the potential to adversely affect the safety and well-being of
|
|
||||||
community members.
|
|
||||||
|
|
||||||
## License and attribution
|
|
||||||
|
|
||||||
This Code of Conduct is adapted from the
|
|
||||||
[Django Code of Conduct](https://www.djangoproject.com/conduct/), and is
|
|
||||||
under a
|
|
||||||
[Creative Commons BY-SA](https://creativecommons.org/licenses/by-sa/4.0/)
|
|
||||||
license.
|
|
||||||
342
CONTRIBUTING.md
342
CONTRIBUTING.md
@@ -1,342 +0,0 @@
|
|||||||
# Contributing to Zulip
|
|
||||||
|
|
||||||
Welcome to the Zulip community!
|
|
||||||
|
|
||||||
## Community
|
|
||||||
|
|
||||||
The
|
|
||||||
[Zulip community server](https://zulip.readthedocs.io/en/latest/contributing/chat-zulip-org.html)
|
|
||||||
is the primary communication forum for the Zulip community. It is a good
|
|
||||||
place to start whether you have a question, are a new contributor, are a new
|
|
||||||
user, or anything else. Make sure to read the
|
|
||||||
[community norms](https://zulip.readthedocs.io/en/latest/contributing/chat-zulip-org.html#community-norms)
|
|
||||||
before posting. The Zulip community is also governed by a
|
|
||||||
[code of conduct](https://zulip.readthedocs.io/en/latest/code-of-conduct.html).
|
|
||||||
|
|
||||||
You can subscribe to
|
|
||||||
[zulip-devel-announce@googlegroups.com](https://groups.google.com/g/zulip-devel-announce)
|
|
||||||
or our [Twitter](https://twitter.com/zulip) account for a very low
|
|
||||||
traffic (<1 email/month) way to hear about things like mentorship
|
|
||||||
opportunities with Google Summer of Code, in-person sprints at
|
|
||||||
conferences, and other opportunities to contribute.
|
|
||||||
|
|
||||||
## Ways to contribute
|
|
||||||
|
|
||||||
To make a code or documentation contribution, read our
|
|
||||||
[step-by-step guide](#your-first-codebase-contribution) to getting
|
|
||||||
started with the Zulip codebase. A small sample of the type of work that
|
|
||||||
needs doing:
|
|
||||||
* Bug squashing and feature development on our Python/Django
|
|
||||||
[backend](https://github.com/zulip/zulip), web
|
|
||||||
[frontend](https://github.com/zulip/zulip), React Native
|
|
||||||
[mobile app](https://github.com/zulip/zulip-mobile), or Electron
|
|
||||||
[desktop app](https://github.com/zulip/zulip-desktop).
|
|
||||||
* Building out our
|
|
||||||
[Python API and bots](https://github.com/zulip/python-zulip-api) framework.
|
|
||||||
* [Writing an integration](https://zulip.com/api/integrations-overview).
|
|
||||||
* Improving our [user](https://zulip.com/help/) or
|
|
||||||
[developer](https://zulip.readthedocs.io/en/latest/) documentation.
|
|
||||||
* [Reviewing code](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html)
|
|
||||||
and manually testing pull requests.
|
|
||||||
|
|
||||||
**Non-code contributions**: Some of the most valuable ways to contribute
|
|
||||||
don't require touching the codebase at all. We list a few of them below:
|
|
||||||
|
|
||||||
* [Reporting issues](#reporting-issues), including both feature requests and
|
|
||||||
bug reports.
|
|
||||||
* [Giving feedback](#user-feedback) if you are evaluating or using Zulip.
|
|
||||||
* [Sponsor Zulip](https://github.com/sponsors/zulip) through the GitHub sponsors program.
|
|
||||||
* [Translating](https://zulip.readthedocs.io/en/latest/translating/translating.html)
|
|
||||||
Zulip.
|
|
||||||
* [Outreach](#zulip-outreach): Star us on GitHub, upvote us
|
|
||||||
on product comparison sites, or write for [the Zulip blog](https://blog.zulip.org/).
|
|
||||||
|
|
||||||
## Your first (codebase) contribution
|
|
||||||
|
|
||||||
This section has a step by step guide to starting as a Zulip codebase
|
|
||||||
contributor. It's long, but don't worry about doing all the steps perfectly;
|
|
||||||
no one gets it right the first time, and there are a lot of people available
|
|
||||||
to help.
|
|
||||||
* First, make an account on the
|
|
||||||
[Zulip community server](https://zulip.readthedocs.io/en/latest/contributing/chat-zulip-org.html),
|
|
||||||
paying special attention to the community norms. If you'd like, introduce
|
|
||||||
yourself in
|
|
||||||
[#new members](https://chat.zulip.org/#narrow/stream/95-new-members), using
|
|
||||||
your name as the topic. Bonus: tell us about your first impressions of
|
|
||||||
Zulip, and anything that felt confusing/broken as you started using the
|
|
||||||
product.
|
|
||||||
* Read [What makes a great Zulip contributor](#what-makes-a-great-zulip-contributor).
|
|
||||||
* [Install the development environment](https://zulip.readthedocs.io/en/latest/development/overview.html),
|
|
||||||
getting help in
|
|
||||||
[#development help](https://chat.zulip.org/#narrow/stream/49-development-help)
|
|
||||||
if you run into any troubles.
|
|
||||||
* Read the
|
|
||||||
[Zulip guide to Git](https://zulip.readthedocs.io/en/latest/git/index.html)
|
|
||||||
and do the Git tutorial (coming soon) if you are unfamiliar with
|
|
||||||
Git, getting help in
|
|
||||||
[#git help](https://chat.zulip.org/#narrow/stream/44-git-help) if
|
|
||||||
you run into any troubles. Be sure to check out the
|
|
||||||
[extremely useful Zulip-specific tools page](https://zulip.readthedocs.io/en/latest/git/zulip-tools.html).
|
|
||||||
|
|
||||||
### Picking an issue
|
|
||||||
|
|
||||||
Now, you're ready to pick your first issue! There are hundreds of open issues
|
|
||||||
in the main codebase alone. This section will help you find an issue to work
|
|
||||||
on.
|
|
||||||
|
|
||||||
* If you're interested in
|
|
||||||
[mobile](https://github.com/zulip/zulip-mobile/issues?q=is%3Aopen+is%3Aissue),
|
|
||||||
[desktop](https://github.com/zulip/zulip-desktop/issues?q=is%3Aopen+is%3Aissue),
|
|
||||||
or
|
|
||||||
[bots](https://github.com/zulip/python-zulip-api/issues?q=is%3Aopen+is%3Aissue)
|
|
||||||
development, check the respective links for open issues, or post in
|
|
||||||
[#mobile](https://chat.zulip.org/#narrow/stream/48-mobile),
|
|
||||||
[#desktop](https://chat.zulip.org/#narrow/stream/16-desktop), or
|
|
||||||
[#integration](https://chat.zulip.org/#narrow/stream/127-integrations).
|
|
||||||
* For the main server and web repository, we recommend browsing
|
|
||||||
recently opened issues to look for issues you are confident you can
|
|
||||||
fix correctly in a way that clearly communicates why your changes
|
|
||||||
are the correct fix. Our GitHub workflow bot, zulipbot, limits
|
|
||||||
users who have 0 commits merged to claiming a single issue labeled
|
|
||||||
with "good first issue" or "help wanted".
|
|
||||||
* We also partition all of our issues in the main repo into areas like
|
|
||||||
admin, compose, emoji, hotkeys, i18n, onboarding, search, etc. Look
|
|
||||||
through our [list of labels](https://github.com/zulip/zulip/labels), and
|
|
||||||
click on some of the `area:` labels to see all the issues related to your
|
|
||||||
areas of interest.
|
|
||||||
* If the lists of issues are overwhelming, post in
|
|
||||||
[#new members](https://chat.zulip.org/#narrow/stream/95-new-members) with a
|
|
||||||
bit about your background and interests, and we'll help you out. The most
|
|
||||||
important thing to say is whether you're looking for a backend (Python),
|
|
||||||
frontend (JavaScript and TypeScript), mobile (React Native), desktop (Electron),
|
|
||||||
documentation (English) or visual design (JavaScript/TypeScript + CSS) issue, and a
|
|
||||||
bit about your programming experience and available time.
|
|
||||||
|
|
||||||
We also welcome suggestions of features that you feel would be valuable or
|
|
||||||
changes that you feel would make Zulip a better open source project. If you
|
|
||||||
have a new feature you'd like to add, we recommend you start by posting in
|
|
||||||
[#new members](https://chat.zulip.org/#narrow/stream/95-new-members) with the
|
|
||||||
feature idea and the problem that you're hoping to solve.
|
|
||||||
|
|
||||||
Other notes:
|
|
||||||
* For a first pull request, it's better to aim for a smaller contribution
|
|
||||||
than a bigger one. Many first contributions have fewer than 10 lines of
|
|
||||||
changes (not counting changes to tests).
|
|
||||||
* The full list of issues explicitly looking for a contributor can be
|
|
||||||
found with the
|
|
||||||
[good first issue](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22)
|
|
||||||
and
|
|
||||||
[help wanted](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22)
|
|
||||||
labels. Avoid issues with the "difficult" label unless you
|
|
||||||
understand why it is difficult and are confident you can resolve the
|
|
||||||
issue correctly and completely. Issues without one of these labels
|
|
||||||
are fair game if Tim has written a clear technical design proposal
|
|
||||||
in the issue, or it is a bug that you can reproduce and you are
|
|
||||||
confident you can fix the issue correctly.
|
|
||||||
* For most new contributors, there's a lot to learn while making your first
|
|
||||||
pull request. It's OK if it takes you a while; that's normal! You'll be
|
|
||||||
able to work a lot faster as you build experience.
|
|
||||||
|
|
||||||
### Working on an issue
|
|
||||||
|
|
||||||
To work on an issue, claim it by adding a comment with `@zulipbot claim` to
|
|
||||||
the issue thread. [Zulipbot](https://github.com/zulip/zulipbot) is a GitHub
|
|
||||||
workflow bot; it will assign you to the issue and label the issue as "in
|
|
||||||
progress". Some additional notes:
|
|
||||||
|
|
||||||
* You can only claim issues with the
|
|
||||||
[good first issue](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22)
|
|
||||||
or
|
|
||||||
[help wanted](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22)
|
|
||||||
labels. Zulipbot will give you an error if you try to claim an issue
|
|
||||||
without one of those labels.
|
|
||||||
* You're encouraged to ask questions on how to best implement or debug your
|
|
||||||
changes -- the Zulip maintainers are excited to answer questions to help
|
|
||||||
you stay unblocked and working efficiently. You can ask questions on
|
|
||||||
chat.zulip.org, or on the GitHub issue or pull request.
|
|
||||||
* We encourage early pull requests for work in progress. Prefix the title of
|
|
||||||
work in progress pull requests with `[WIP]`, and remove the prefix when
|
|
||||||
you think it might be mergeable and want it to be reviewed.
|
|
||||||
* After updating a PR, add a comment to the GitHub thread mentioning that it
|
|
||||||
is ready for another review. GitHub only notifies maintainers of the
|
|
||||||
changes when you post a comment, so if you don't, your PR will likely be
|
|
||||||
neglected by accident!
|
|
||||||
|
|
||||||
### And beyond
|
|
||||||
|
|
||||||
A great place to look for a second issue is to look for issues with the same
|
|
||||||
`area:` label as the last issue you resolved. You'll be able to reuse the
|
|
||||||
work you did learning how that part of the codebase works. Also, the path to
|
|
||||||
becoming a core developer often involves taking ownership of one of these area
|
|
||||||
labels.
|
|
||||||
|
|
||||||
## What makes a great Zulip contributor?
|
|
||||||
|
|
||||||
Zulip has a lot of experience working with new contributors. In our
|
|
||||||
experience, these are the best predictors of success:
|
|
||||||
|
|
||||||
* Posting good questions. This generally means explaining your current
|
|
||||||
understanding, saying what you've done or tried so far, and including
|
|
||||||
tracebacks or other error messages if appropriate.
|
|
||||||
* Learning and practicing
|
|
||||||
[Git commit discipline](https://zulip.readthedocs.io/en/latest/contributing/version-control.html#commit-discipline).
|
|
||||||
* Submitting carefully tested code. This generally means checking your work
|
|
||||||
through a combination of automated tests and manually clicking around the
|
|
||||||
UI trying to find bugs in your work. See
|
|
||||||
[things to look for](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html#things-to-look-for)
|
|
||||||
for additional ideas.
|
|
||||||
* Posting
|
|
||||||
[screenshots or GIFs](https://zulip.readthedocs.io/en/latest/tutorials/screenshot-and-gif-software.html)
|
|
||||||
for frontend changes.
|
|
||||||
* Being responsive to feedback on pull requests. This means incorporating or
|
|
||||||
responding to all suggested changes, and leaving a note if you won't be
|
|
||||||
able to address things within a few days.
|
|
||||||
* Being helpful and friendly on chat.zulip.org.
|
|
||||||
|
|
||||||
These are also the main criteria we use to select candidates for all
|
|
||||||
of our outreach programs.
|
|
||||||
|
|
||||||
## Reporting issues
|
|
||||||
|
|
||||||
If you find an easily reproducible bug and/or are experienced in reporting
|
|
||||||
bugs, feel free to just open an issue on the relevant project on GitHub.
|
|
||||||
|
|
||||||
If you have a feature request or are not yet sure what the underlying bug
|
|
||||||
is, the best place to post issues is
|
|
||||||
[#issues](https://chat.zulip.org/#narrow/stream/9-issues) (or
|
|
||||||
[#mobile](https://chat.zulip.org/#narrow/stream/48-mobile) or
|
|
||||||
[#desktop](https://chat.zulip.org/#narrow/stream/16-desktop)) on the
|
|
||||||
[Zulip community server](https://zulip.readthedocs.io/en/latest/contributing/chat-zulip-org.html).
|
|
||||||
This allows us to interactively figure out what is going on, let you know if
|
|
||||||
a similar issue has already been opened, and collect any other information
|
|
||||||
we need. Choose a 2-4 word topic that describes the issue, explain the issue
|
|
||||||
and how to reproduce it if known, your browser/OS if relevant, and a
|
|
||||||
[screenshot or screenGIF](https://zulip.readthedocs.io/en/latest/tutorials/screenshot-and-gif-software.html)
|
|
||||||
if appropriate.
|
|
||||||
|
|
||||||
**Reporting security issues**. Please do not report security issues
|
|
||||||
publicly, including on public streams on chat.zulip.org. You can
|
|
||||||
email security@zulip.com. We create a CVE for every security
|
|
||||||
issue in our released software.
|
|
||||||
|
|
||||||
## User feedback
|
|
||||||
|
|
||||||
Nearly every feature we develop starts with a user request. If you are part
|
|
||||||
of a group that is either using or considering using Zulip, we would love to
|
|
||||||
hear about your experience with the product. If you're not sure what to
|
|
||||||
write, here are some questions we're always very curious to know the answer
|
|
||||||
to:
|
|
||||||
|
|
||||||
* Evaluation: What is the process by which your organization chose or will
|
|
||||||
choose a group chat product?
|
|
||||||
* Pros and cons: What are the pros and cons of Zulip for your organization,
|
|
||||||
and the pros and cons of other products you are evaluating?
|
|
||||||
* Features: What are the features that are most important for your
|
|
||||||
organization? In the best-case scenario, what would your chat solution do
|
|
||||||
for you?
|
|
||||||
* Onboarding: If you remember it, what was your impression during your first
|
|
||||||
few minutes of using Zulip? What did you notice, and how did you feel? Was
|
|
||||||
there anything that stood out to you as confusing, or broken, or great?
|
|
||||||
* Organization: What does your organization do? How big is the organization?
|
|
||||||
A link to your organization's website?
|
|
||||||
|
|
||||||
## Outreach programs
|
|
||||||
|
|
||||||
Zulip participates in [Google Summer of Code
|
|
||||||
(GSoC)](https://developers.google.com/open-source/gsoc/) every year.
|
|
||||||
In the past, we've also participated in
|
|
||||||
[Outreachy](https://www.outreachy.org/), [Google
|
|
||||||
Code-In](https://developers.google.com/open-source/gci/), and hosted
|
|
||||||
summer interns from Harvard, MIT, and Stanford.
|
|
||||||
|
|
||||||
While each third-party program has its own rules and requirements, the
|
|
||||||
Zulip community's approaches all of these programs with these ideas in
|
|
||||||
mind:
|
|
||||||
* We try to make the application process as valuable for the applicant as
|
|
||||||
possible. Expect high-quality code reviews, a supportive community, and
|
|
||||||
publicly viewable patches you can link to from your resume, regardless of
|
|
||||||
whether you are selected.
|
|
||||||
* To apply, you'll have to submit at least one pull request to a Zulip
|
|
||||||
repository. Most students accepted to one of our programs have
|
|
||||||
several merged pull requests (including at least one larger PR) by
|
|
||||||
the time of the application deadline.
|
|
||||||
* The main criteria we use is quality of your best contributions, and
|
|
||||||
the bullets listed at
|
|
||||||
[What makes a great Zulip contributor](#what-makes-a-great-zulip-contributor).
|
|
||||||
Because we focus on evaluating your best work, it doesn't hurt your
|
|
||||||
application to makes mistakes in your first few PRs as long as your
|
|
||||||
work improves.
|
|
||||||
|
|
||||||
Most of our outreach program participants end up sticking around the
|
|
||||||
project long-term, and many have become core team members, maintaining
|
|
||||||
important parts of the project. We hope you apply!
|
|
||||||
|
|
||||||
### Google Summer of Code
|
|
||||||
|
|
||||||
The largest outreach program Zulip participates in is GSoC (14
|
|
||||||
students in 2017; 11 in 2018; 17 in 2019; 18 in 2020). While we don't control how
|
|
||||||
many slots Google allocates to Zulip, we hope to mentor a similar
|
|
||||||
number of students in future summers.
|
|
||||||
|
|
||||||
If you're reading this well before the application deadline and want
|
|
||||||
to make your application strong, we recommend getting involved in the
|
|
||||||
community and fixing issues in Zulip now. Having good contributions
|
|
||||||
and building a reputation for doing good work is the best way to have
|
|
||||||
a strong application. About half of Zulip's GSoC students for Summer
|
|
||||||
2017 had made significant contributions to the project by February
|
|
||||||
2017, and about half had not. Our
|
|
||||||
[GSoC project ideas page][gsoc-guide] has lots more details on how
|
|
||||||
Zulip does GSoC, as well as project ideas (though the project idea
|
|
||||||
list is maintained only during the GSoC application period, so if
|
|
||||||
you're looking at some other time of year, the project list is likely
|
|
||||||
out-of-date).
|
|
||||||
|
|
||||||
We also have in some past years run a Zulip Summer of Code (ZSoC)
|
|
||||||
program for students who we didn't have enough slots to accept for
|
|
||||||
GSoC but were able to find funding for. Student expectations are the
|
|
||||||
same as with GSoC, and it has no separate application process; your
|
|
||||||
GSoC application is your ZSoC application. If we'd like to select you
|
|
||||||
for ZSoC, we'll contact you when the GSoC results are announced.
|
|
||||||
|
|
||||||
[gsoc-guide]: https://zulip.readthedocs.io/en/latest/contributing/gsoc-ideas.html
|
|
||||||
[gsoc-faq]: https://developers.google.com/open-source/gsoc/faq
|
|
||||||
|
|
||||||
## Zulip outreach
|
|
||||||
|
|
||||||
**Upvoting Zulip**. Upvotes and reviews make a big difference in the public
|
|
||||||
perception of projects like Zulip. We've collected a few sites below
|
|
||||||
where we know Zulip has been discussed. Doing everything in the following
|
|
||||||
list typically takes about 15 minutes.
|
|
||||||
* Star us on GitHub. There are four main repositories:
|
|
||||||
[server/web](https://github.com/zulip/zulip),
|
|
||||||
[mobile](https://github.com/zulip/zulip-mobile),
|
|
||||||
[desktop](https://github.com/zulip/zulip-desktop), and
|
|
||||||
[Python API](https://github.com/zulip/python-zulip-api).
|
|
||||||
* [Follow us](https://twitter.com/zulip) on Twitter.
|
|
||||||
|
|
||||||
For both of the following, you'll need to make an account on the site if you
|
|
||||||
don't already have one.
|
|
||||||
|
|
||||||
* [Like Zulip](https://alternativeto.net/software/zulip-chat-server/) on
|
|
||||||
AlternativeTo. We recommend upvoting a couple of other products you like
|
|
||||||
as well, both to give back to their community, and since single-upvote
|
|
||||||
accounts are generally given less weight. You can also
|
|
||||||
[upvote Zulip](https://alternativeto.net/software/slack/) on their page
|
|
||||||
for Slack.
|
|
||||||
* [Add Zulip to your stack](https://stackshare.io/zulip) on StackShare, star
|
|
||||||
it, and upvote the reasons why people like Zulip that you find most
|
|
||||||
compelling. Again, we recommend adding a few other products that you like
|
|
||||||
as well.
|
|
||||||
|
|
||||||
We have a doc with more detailed instructions and a few other sites, if you
|
|
||||||
have been using Zulip for a while and want to contribute more.
|
|
||||||
|
|
||||||
**Blog posts**. Writing a blog post about your experiences with Zulip, or
|
|
||||||
about a technical aspect of Zulip can be a great way to spread the word
|
|
||||||
about Zulip.
|
|
||||||
|
|
||||||
We also occasionally [publish](https://blog.zulip.org/) long-form
|
|
||||||
articles related to Zulip. Our posts typically get tens of thousands
|
|
||||||
of views, and we always have good ideas for blog posts that we can
|
|
||||||
outline but don't have time to write. If you are an experienced writer
|
|
||||||
or copyeditor, send us a portfolio; we'd love to talk!
|
|
||||||
19
Dockerfile
Normal file
19
Dockerfile
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
FROM ubuntu:trusty
|
||||||
|
|
||||||
|
EXPOSE 9991
|
||||||
|
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
python-pbs \
|
||||||
|
wget
|
||||||
|
|
||||||
|
RUN locale-gen en_US.UTF-8
|
||||||
|
|
||||||
|
RUN useradd -d /home/zulip -m zulip && echo 'zulip ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers
|
||||||
|
|
||||||
|
USER zulip
|
||||||
|
|
||||||
|
RUN ln -nsf /srv/zulip ~/zulip
|
||||||
|
|
||||||
|
RUN echo 'export LC_ALL="en_US.UTF-8" LANG="en_US.UTF-8" LANGUAGE="en_US.UTF-8"' >> ~zulip/.bashrc
|
||||||
|
|
||||||
|
WORKDIR /srv/zulip
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
# To build run `docker build -f Dockerfile-postgresql .` from the root of the
|
|
||||||
# zulip repo.
|
|
||||||
|
|
||||||
# Currently the PostgreSQL images do not support automatic upgrading of
|
|
||||||
# the on-disk data in volumes. So the base image can not currently be upgraded
|
|
||||||
# without users needing a manual pgdump and restore.
|
|
||||||
|
|
||||||
# Install hunspell, Zulip stop words, and run Zulip database
|
|
||||||
# init.
|
|
||||||
FROM groonga/pgroonga:latest-alpine-10-slim
|
|
||||||
RUN apk add -U --no-cache hunspell-en
|
|
||||||
RUN ln -sf /usr/share/hunspell/en_US.dic /usr/local/share/postgresql/tsearch_data/en_us.dict && ln -sf /usr/share/hunspell/en_US.aff /usr/local/share/postgresql/tsearch_data/en_us.affix
|
|
||||||
COPY puppet/zulip/files/postgresql/zulip_english.stop /usr/local/share/postgresql/tsearch_data/zulip_english.stop
|
|
||||||
COPY scripts/setup/create-db.sql /docker-entrypoint-initdb.d/zulip-create-db.sql
|
|
||||||
COPY scripts/setup/create-pgroonga.sql /docker-entrypoint-initdb.d/zulip-create-pgroonga.sql
|
|
||||||
18
NOTICE
18
NOTICE
@@ -1,18 +0,0 @@
|
|||||||
Copyright 2012–2015 Dropbox, Inc., 2015–2021 Kandra Labs, Inc., and contributors
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this project except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
|
|
||||||
The software includes some works released by third parties under other
|
|
||||||
free and open source licenses. Those works are redistributed under the
|
|
||||||
license terms under which the works were received. For more details,
|
|
||||||
see the ``docs/THIRDPARTY`` file included with this distribution.
|
|
||||||
311
README.md
311
README.md
@@ -1,80 +1,263 @@
|
|||||||
|
**[Zulip overview](#zulip-overview)** |
|
||||||
|
**[Community](#community)** |
|
||||||
|
**[Installing for dev](#installing-the-zulip-development-environment)** |
|
||||||
|
**[Installing for production](#running-zulip-in-production)** |
|
||||||
|
**[Ways to contribute](#ways-to-contribute)** |
|
||||||
|
**[How to get involved](#how-to-get-involved-with-contributing-to-zulip)** |
|
||||||
|
**[License](#license)**
|
||||||
|
|
||||||
# Zulip overview
|
# Zulip overview
|
||||||
|
|
||||||
Zulip is a powerful, open source group chat application that combines the
|
Zulip is a powerful, open source group chat application. Written in
|
||||||
immediacy of real-time chat with the productivity benefits of threaded
|
Python and using the Django framework, Zulip supports both private
|
||||||
conversations. Zulip is used by open source projects, Fortune 500 companies,
|
messaging and group chats via conversation streams.
|
||||||
large standards bodies, and others who need a real-time chat system that
|
|
||||||
allows users to easily process hundreds or thousands of messages a day. With
|
|
||||||
over 700 contributors merging over 500 commits a month, Zulip is also the
|
|
||||||
largest and fastest growing open source group chat project.
|
|
||||||
|
|
||||||
[](https://github.com/zulip/zulip/actions/workflows/zulip-ci.yml?query=branch%3Amaster)
|
Zulip also supports fast search, drag-and-drop file uploads, image
|
||||||
[](https://codecov.io/gh/zulip/zulip/branch/master)
|
previews, group private messages, audible notifications,
|
||||||
[][mypy-coverage]
|
missed-message emails, desktop apps, and much more.
|
||||||
[](https://github.com/psf/black)
|
|
||||||
[](https://github.com/prettier/prettier)
|
|
||||||
[](https://github.com/zulip/zulip/releases/latest)
|
|
||||||
[](https://zulip.readthedocs.io/en/latest/)
|
|
||||||
[](https://chat.zulip.org)
|
|
||||||
[](https://twitter.com/zulip)
|
|
||||||
[](https://github.com/sponsors/zulip)
|
|
||||||
|
|
||||||
[mypy-coverage]: https://blog.zulip.org/2016/10/13/static-types-in-python-oh-mypy/
|
Further information on the Zulip project and its features can be found
|
||||||
|
at https://www.zulip.org.
|
||||||
|
|
||||||
## Getting started
|
[](https://travis-ci.org/zulip/zulip) [](https://coveralls.io/github/zulip/zulip?branch=master) [](http://zulip.readthedocs.io/en/latest/) [](https://chat.zulip.org)
|
||||||
|
|
||||||
Click on the appropriate link below. If nothing seems to apply,
|
## Community
|
||||||
join us on the
|
|
||||||
[Zulip community server](https://zulip.readthedocs.io/en/latest/contributing/chat-zulip-org.html)
|
|
||||||
and tell us what's up!
|
|
||||||
|
|
||||||
You might be interested in:
|
There are several places online where folks discuss Zulip.
|
||||||
|
|
||||||
* **Contributing code**. Check out our
|
One of those places is our [public Zulip instance](https://chat.zulip.org/).
|
||||||
[guide for new contributors](https://zulip.readthedocs.io/en/latest/overview/contributing.html)
|
You can go through the simple signup process at that link, and then you
|
||||||
to get started. Zulip prides itself on maintaining a clean and
|
will soon be talking to core Zulip developers and other users. To get
|
||||||
well-tested codebase, and a stock of hundreds of
|
help in real time, you will have the best luck finding core developers
|
||||||
[beginner-friendly issues][beginner-friendly].
|
roughly between 16:00 UTC and 23:59 UTC. Most questions get a reply
|
||||||
|
within minutes to a few hours, depending on time of day.
|
||||||
|
|
||||||
* **Contributing non-code**.
|
For Google Summer of Code students and applicants, we have [a mailing
|
||||||
[Report an issue](https://zulip.readthedocs.io/en/latest/overview/contributing.html#reporting-issues),
|
list](https://groups.google.com/forum/#!forum/zulip-gsoc) for help,
|
||||||
[translate](https://zulip.readthedocs.io/en/latest/translating/translating.html) Zulip
|
questions, and announcements.
|
||||||
into your language,
|
|
||||||
[write](https://zulip.readthedocs.io/en/latest/overview/contributing.html#zulip-outreach)
|
|
||||||
for the Zulip blog, or
|
|
||||||
[give us feedback](https://zulip.readthedocs.io/en/latest/overview/contributing.html#user-feedback). We
|
|
||||||
would love to hear from you, even if you're just trying the product out.
|
|
||||||
|
|
||||||
* **Supporting Zulip**. Advocate for your organization to use Zulip, become a [sponsor](https://github.com/sponsors/zulip), write a
|
We have
|
||||||
review in the mobile app stores, or
|
[a public mailing list](https://groups.google.com/forum/#!forum/zulip-devel)
|
||||||
[upvote Zulip](https://zulip.readthedocs.io/en/latest/overview/contributing.html#zulip-outreach) on
|
that is currently pretty low traffic because most discussions happen
|
||||||
product comparison sites.
|
in our public Zulip instance. We use it to announce Zulip developer
|
||||||
|
community gatherings and ask for feedback on major technical or design
|
||||||
|
decisions. It has several hundred subscribers, so you can use it to
|
||||||
|
ask questions about features or possible bugs, but please don't use it
|
||||||
|
ask for generic help getting started as a contributor (e.g. because
|
||||||
|
you want to do Google Summer of Code). The rest of this page covers
|
||||||
|
how to get involved in the Zulip project in detail.
|
||||||
|
|
||||||
* **Checking Zulip out**. The best way to see Zulip in action is to drop by
|
Zulip also has a [blog](https://blog.zulip.org/).
|
||||||
the
|
|
||||||
[Zulip community server](https://zulip.readthedocs.io/en/latest/contributing/chat-zulip-org.html). We
|
|
||||||
also recommend reading Zulip for
|
|
||||||
[open source](https://zulip.com/for/open-source/), Zulip for
|
|
||||||
[companies](https://zulip.com/for/companies/), or Zulip for
|
|
||||||
[working groups and part time communities](https://zulip.com/for/working-groups-and-communities/).
|
|
||||||
|
|
||||||
* **Running a Zulip server**. Use a preconfigured [DigitalOcean droplet](https://marketplace.digitalocean.com/apps/zulip),
|
Last but not least, we use [GitHub](https://github.com/zulip/zulip) to
|
||||||
[install Zulip](https://zulip.readthedocs.io/en/stable/production/install.html)
|
track Zulip-related issues (and store our code, of course).
|
||||||
directly, or use Zulip's
|
Anybody with a GitHub account should be able to create Issues there
|
||||||
experimental [Docker image](https://zulip.readthedocs.io/en/latest/production/deployment.html#zulip-in-docker).
|
pertaining to bugs or enhancement requests. We also use Pull
|
||||||
Commercial support is available; see <https://zulip.com/plans> for details.
|
Requests as our primary mechanism to receive code contributions.
|
||||||
|
|
||||||
* **Using Zulip without setting up a server**. <https://zulip.com>
|
The Zulip community has a [Code of Conduct][code-of-conduct].
|
||||||
offers free and commercial hosting, including providing our paid
|
|
||||||
plan for free to fellow open source projects.
|
|
||||||
|
|
||||||
* **Participating in [outreach
|
## Installing the Zulip Development environment
|
||||||
programs](https://zulip.readthedocs.io/en/latest/overview/contributing.html#outreach-programs)**
|
|
||||||
like Google Summer of Code.
|
|
||||||
|
|
||||||
You may also be interested in reading our [blog](https://blog.zulip.org/) or
|
The Zulip development environment is the recommended option for folks
|
||||||
following us on [Twitter](https://twitter.com/zulip).
|
interested in trying out Zulip. This is documented in [the developer
|
||||||
Zulip is distributed under the
|
installation guide][dev-install].
|
||||||
[Apache 2.0](https://github.com/zulip/zulip/blob/master/LICENSE) license.
|
|
||||||
|
|
||||||
[beginner-friendly]: https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22
|
## Running Zulip in production
|
||||||
|
|
||||||
|
Zulip in production supports Ubuntu 14.04 Trusty and Ubuntu 16.04
|
||||||
|
Xenial. Work is ongoing on adding support for additional
|
||||||
|
platforms. The installation process is documented at
|
||||||
|
https://zulip.org/server.html and in more detail in [the
|
||||||
|
documentation](https://zulip.readthedocs.io/en/latest/prod-install.html).
|
||||||
|
|
||||||
|
## Ways to contribute
|
||||||
|
|
||||||
|
Zulip welcomes all forms of contributions! The page documents the
|
||||||
|
Zulip development process.
|
||||||
|
|
||||||
|
* **Pull requests**. Before a pull request can be merged, you need to
|
||||||
|
sign the [Dropbox Contributor License Agreement][cla]. Also,
|
||||||
|
please skim our [commit message style guidelines][doc-commit-style].
|
||||||
|
|
||||||
|
* **Testing**. The Zulip automated tests all run automatically when
|
||||||
|
you submit a pull request, but you can also run them all in your
|
||||||
|
development environment following the instructions in the [testing
|
||||||
|
docs][doc-test]. You can also try out [our new desktop
|
||||||
|
client][electron], which is in alpha; we'd appreciate testing and
|
||||||
|
[feedback](https://github.com/zulip/zulip-electron/issues/new).
|
||||||
|
|
||||||
|
* **Developer Documentation**. Zulip has a growing collection of
|
||||||
|
developer documentation on [Read The Docs][doc]. Recommended reading
|
||||||
|
for new contributors includes the [directory structure][doc-dirstruct]
|
||||||
|
and [new feature tutorial][doc-newfeat]. You can also improve
|
||||||
|
[Zulip.org][z-org].
|
||||||
|
|
||||||
|
* **Mailing lists and bug tracker**. Zulip has a [development
|
||||||
|
discussion mailing list](#community) and uses [GitHub issues
|
||||||
|
][gh-issues]. There are also lists for the [Android][email-android]
|
||||||
|
and [iOS][email-ios] apps. Feel free to send any questions or
|
||||||
|
suggestions of areas where you'd love to see more documentation to the
|
||||||
|
relevant list! Please report any security issues you discover to
|
||||||
|
zulip-security@googlegroups.com.
|
||||||
|
|
||||||
|
* **App codebases**. This repository is for the Zulip server and web
|
||||||
|
app (including most integrations); the [desktop][], [Android][], and
|
||||||
|
[iOS][] apps, are separate repositories, as are our [experimental
|
||||||
|
React Native iOS app][ios-exp] and [alpha Electron desktop
|
||||||
|
app][electron].
|
||||||
|
|
||||||
|
* **Glue code**. We maintain a [Hubot adapter][hubot-adapter] and several
|
||||||
|
integrations ([Phabricator][phab], [Jenkins][], [Puppet][], [Redmine][],
|
||||||
|
and [Trello][]), plus [node.js API bindings][node], an [isomorphic
|
||||||
|
JavaScript library][zulip-js], and a [full-text search PostgreSQL
|
||||||
|
extension][tsearch], as separate repos.
|
||||||
|
|
||||||
|
* **Translations**. Zulip is in the process of being translated into
|
||||||
|
10+ languages, and we love contributions to our translations. See our
|
||||||
|
[translating documentation][transifex] if you're interested in
|
||||||
|
contributing!
|
||||||
|
|
||||||
|
[cla]: https://opensource.dropbox.com/cla/
|
||||||
|
[code-of-conduct]: https://zulip.readthedocs.io/en/latest/code-of-conduct.html
|
||||||
|
[dev-install]: https://zulip.readthedocs.io/en/latest/dev-overview.html
|
||||||
|
[doc]: https://zulip.readthedocs.io/
|
||||||
|
[doc-commit-style]: http://zulip.readthedocs.io/en/latest/version-control.html#commit-messages
|
||||||
|
[doc-dirstruct]: http://zulip.readthedocs.io/en/latest/directory-structure.html
|
||||||
|
[doc-newfeat]: http://zulip.readthedocs.io/en/latest/new-feature-tutorial.html
|
||||||
|
[doc-test]: http://zulip.readthedocs.io/en/latest/testing.html
|
||||||
|
[electron]: https://github.com/zulip/zulip-electron
|
||||||
|
[gh-issues]: https://github.com/zulip/zulip/issues
|
||||||
|
[desktop]: https://github.com/zulip/zulip-desktop
|
||||||
|
[android]: https://github.com/zulip/zulip-android
|
||||||
|
[ios]: https://github.com/zulip/zulip-ios
|
||||||
|
[ios-exp]: https://github.com/zulip/zulip-mobile
|
||||||
|
[email-android]: https://groups.google.com/forum/#!forum/zulip-android
|
||||||
|
[email-ios]: https://groups.google.com/forum/#!forum/zulip-ios
|
||||||
|
[hubot-adapter]: https://github.com/zulip/hubot-zulip
|
||||||
|
[jenkins]: https://github.com/zulip/zulip-jenkins-plugin
|
||||||
|
[node]: https://github.com/zulip/zulip-node
|
||||||
|
[zulip-js]: https://github.com/zulip/zulip-js
|
||||||
|
[phab]: https://github.com/zulip/phabricator-to-zulip
|
||||||
|
[puppet]: https://github.com/matthewbarr/puppet-zulip
|
||||||
|
[redmine]: https://github.com/zulip/zulip-redmine-plugin
|
||||||
|
[trello]: https://github.com/zulip/trello-to-zulip
|
||||||
|
[tsearch]: https://github.com/zulip/tsearch_extras
|
||||||
|
[transifex]: https://zulip.readthedocs.io/en/latest/translating.html#testing-translations
|
||||||
|
[z-org]: https://github.com/zulip/zulip.github.io
|
||||||
|
|
||||||
|
## Google Summer of Code
|
||||||
|
|
||||||
|
We participated in
|
||||||
|
[GSoC](https://developers.google.com/open-source/gsoc/) last year and
|
||||||
|
hope to do so again in 2017. For guidance, please read
|
||||||
|
[our GSoC instructions and ideas page](https://github.com/zulip/zulip.github.io/blob/master/gsoc-ideas.md)
|
||||||
|
and feel free to email
|
||||||
|
[our GSoC mailing list](https://groups.google.com/forum/#!forum/zulip-gsoc).
|
||||||
|
|
||||||
|
## How to get involved with contributing to Zulip
|
||||||
|
|
||||||
|
First, subscribe to the Zulip [development discussion mailing
|
||||||
|
list](#community).
|
||||||
|
|
||||||
|
The Zulip project uses a system of labels in our [issue
|
||||||
|
tracker][gh-issues] to make it easy to find a project if you don't
|
||||||
|
have your own project idea in mind or want to get some experience with
|
||||||
|
working on Zulip before embarking on a larger project you have in
|
||||||
|
mind:
|
||||||
|
|
||||||
|
* [Integrations](https://github.com/zulip/zulip/labels/area%3A%20integrations).
|
||||||
|
Integrate Zulip with another piece of software and contribute it
|
||||||
|
back to the community! Writing an integration can be a great first
|
||||||
|
contribution. There's detailed documentation on how to write
|
||||||
|
integrations in [the Zulip integration writing
|
||||||
|
guide](https://zulip.readthedocs.io/en/latest/integration-guide.html).
|
||||||
|
|
||||||
|
* [Bite Size](https://github.com/zulip/zulip/labels/bite%20size):
|
||||||
|
Smaller projects that might be a great first contribution.
|
||||||
|
|
||||||
|
* [Documentation](https://github.com/zulip/zulip/labels/area%3A%20documentation):
|
||||||
|
The Zulip project loves contributions of new documentation.
|
||||||
|
|
||||||
|
* [Help Wanted](https://github.com/zulip/zulip/labels/help%20wanted):
|
||||||
|
A broader list of projects that nobody is currently working on.
|
||||||
|
|
||||||
|
* [Platform support](https://github.com/zulip/zulip/labels/Platform%20support):
|
||||||
|
These are open issues about making it possible to install Zulip on a
|
||||||
|
wider range of platforms.
|
||||||
|
|
||||||
|
* [Bugs](https://github.com/zulip/zulip/labels/bug): Open bugs.
|
||||||
|
|
||||||
|
* [Feature requests](https://github.com/zulip/zulip/labels/enhancement):
|
||||||
|
Browsing this list can be a great way to find feature ideas to
|
||||||
|
implement that other Zulip users are excited about.
|
||||||
|
|
||||||
|
* [2016 roadmap milestone](http://zulip.readthedocs.io/en/latest/roadmap.html):
|
||||||
|
The projects that are
|
||||||
|
[priorities for the Zulip project](https://zulip.readthedocs.io/en/latest/roadmap.html).
|
||||||
|
These are great projects if you're looking to make an impact.
|
||||||
|
|
||||||
|
Another way to find issues in Zulip is to take advantage of our
|
||||||
|
"area:<foo>" convention in separating out issues. We partition all of
|
||||||
|
our issues into areas like admin, compose, emoji, hotkeys, i18n,
|
||||||
|
onboarding, search, etc. You can see this here:
|
||||||
|
|
||||||
|
[https://github.com/zulip/zulip/labels]
|
||||||
|
|
||||||
|
Click on any of the "area:" labels and you will see all the tickets
|
||||||
|
related to your area of interest.
|
||||||
|
|
||||||
|
If you're excited about helping with an open issue, just post on the
|
||||||
|
conversation thread that you're working on it. You're encouraged to
|
||||||
|
ask questions on how to best implement or debug your changes -- the
|
||||||
|
Zulip maintainers are excited to answer questions to help you stay
|
||||||
|
unblocked and working efficiently.
|
||||||
|
|
||||||
|
We also welcome suggestions of features that you feel would be
|
||||||
|
valuable or changes that you feel would make Zulip a better open
|
||||||
|
source project, and are happy to support you in adding new features or
|
||||||
|
other user experience improvements to Zulip.
|
||||||
|
|
||||||
|
If you have a new feature you'd like to add, we recommend you start by
|
||||||
|
opening a GitHub issue about the feature idea explaining the problem
|
||||||
|
that you're hoping to solve and that you're excited to work on it. A
|
||||||
|
Zulip maintainer will usually reply within a day with feedback on the
|
||||||
|
idea, notes on any important issues or concerns, and and often tips on
|
||||||
|
how to implement or test it. Please feel free to ping the thread if
|
||||||
|
you don't hear a response from the maintainers -- we try to be very
|
||||||
|
responsive so this usually means we missed your message.
|
||||||
|
|
||||||
|
For significant changes to the visual design, user experience, data
|
||||||
|
model, or architecture, we highly recommend posting a mockup,
|
||||||
|
screenshot, or description of what you have in mind to zulip-devel@ to
|
||||||
|
get broad feedback before you spend too much time on implementation
|
||||||
|
details.
|
||||||
|
|
||||||
|
Finally, before implementing a larger feature, we highly recommend
|
||||||
|
looking at the new feature tutorial and coding style guidelines on
|
||||||
|
ReadTheDocs.
|
||||||
|
|
||||||
|
Feedback on how to make this development process more efficient, fun,
|
||||||
|
and friendly to new contributors is very welcome! Just send an email
|
||||||
|
to the Zulip Developers list with your thoughts.
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
Copyright 2011-2016 Dropbox, Inc. and contributors
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
|
||||||
|
The software includes some works released by third parties under other
|
||||||
|
free and open source licenses. Those works are redistributed under the
|
||||||
|
license terms under which the works were received. For more details,
|
||||||
|
see the ``docs/THIRDPARTY`` file included with this distribution.
|
||||||
|
|||||||
28
SECURITY.md
28
SECURITY.md
@@ -1,28 +0,0 @@
|
|||||||
# Security policy
|
|
||||||
|
|
||||||
Security announcements are sent to zulip-announce@googlegroups.com,
|
|
||||||
so you should subscribe if you are running Zulip in production.
|
|
||||||
|
|
||||||
## Reporting a vulnerability
|
|
||||||
|
|
||||||
We love responsible reports of (potential) security issues in Zulip,
|
|
||||||
whether in the latest release or our development branch.
|
|
||||||
|
|
||||||
Our security contact is security@zulip.com. Reporters should expect a
|
|
||||||
response within 24 hours.
|
|
||||||
|
|
||||||
Please include details on the issue and how you'd like to be credited
|
|
||||||
in our release notes when we publish the fix.
|
|
||||||
|
|
||||||
Our [security
|
|
||||||
model](https://zulip.readthedocs.io/en/latest/production/security-model.html)
|
|
||||||
document may be a helpful resource.
|
|
||||||
|
|
||||||
## Supported versions
|
|
||||||
|
|
||||||
Zulip provides security support for the latest major release, in the
|
|
||||||
form of minor security/maintenance releases.
|
|
||||||
|
|
||||||
We work hard to make
|
|
||||||
[upgrades](https://zulip.readthedocs.io/en/latest/production/upgrade-or-modify.html#upgrading-to-a-release)
|
|
||||||
reliable, so that there's no reason to run older major releases.
|
|
||||||
189
Vagrantfile
vendored
189
Vagrantfile
vendored
@@ -7,10 +7,10 @@ def command?(name)
|
|||||||
$?.success?
|
$?.success?
|
||||||
end
|
end
|
||||||
|
|
||||||
if Vagrant::VERSION == "1.8.7"
|
if Vagrant::VERSION == "1.8.7" then
|
||||||
path = `which curl`
|
path = `which curl`
|
||||||
if path.include?("/opt/vagrant/embedded/bin/curl")
|
if path.include?('/opt/vagrant/embedded/bin/curl') then
|
||||||
puts "In Vagrant 1.8.7, curl is broken. Please use Vagrant 2.0.2 " \
|
puts "In Vagrant 1.8.7, curl is broken. Please use Vagrant 1.8.6 "\
|
||||||
"or run 'sudo rm -f /opt/vagrant/embedded/bin/curl' to fix the "\
|
"or run 'sudo rm -f /opt/vagrant/embedded/bin/curl' to fix the "\
|
||||||
"issue before provisioning. See "\
|
"issue before provisioning. See "\
|
||||||
"https://github.com/mitchellh/vagrant/issues/7997 "\
|
"https://github.com/mitchellh/vagrant/issues/7997 "\
|
||||||
@@ -19,187 +19,88 @@ if Vagrant::VERSION == "1.8.7"
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
# Workaround: Vagrant removed the atlas.hashicorp.com to
|
|
||||||
# vagrantcloud.com redirect in February 2018. The value of
|
|
||||||
# DEFAULT_SERVER_URL in Vagrant versions less than 1.9.3 is
|
|
||||||
# atlas.hashicorp.com, which means that removal broke the fetching and
|
|
||||||
# updating of boxes (since the old URL doesn't work). See
|
|
||||||
# https://github.com/hashicorp/vagrant/issues/9442
|
|
||||||
if Vagrant::DEFAULT_SERVER_URL == "atlas.hashicorp.com"
|
|
||||||
Vagrant::DEFAULT_SERVER_URL.replace("https://vagrantcloud.com")
|
|
||||||
end
|
|
||||||
|
|
||||||
# Monkey patch https://github.com/hashicorp/vagrant/pull/10879 so we
|
|
||||||
# can fall back to another provider if docker is not installed.
|
|
||||||
begin
|
|
||||||
require Vagrant.source_root.join("plugins", "providers", "docker", "provider")
|
|
||||||
rescue LoadError
|
|
||||||
else
|
|
||||||
VagrantPlugins::DockerProvider::Provider.class_eval do
|
|
||||||
method(:usable?).owner == singleton_class or def self.usable?(raise_error = false)
|
|
||||||
VagrantPlugins::DockerProvider::Driver.new.execute("docker", "version")
|
|
||||||
true
|
|
||||||
rescue Vagrant::Errors::CommandUnavailable, VagrantPlugins::DockerProvider::Errors::ExecuteError
|
|
||||||
raise if raise_error
|
|
||||||
return false
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
|
Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
|
||||||
|
|
||||||
|
# For LXC. VirtualBox hosts use a different box, described below.
|
||||||
|
config.vm.box = "fgrehm/trusty64-lxc"
|
||||||
|
|
||||||
# The Zulip development environment runs on 9991 on the guest.
|
# The Zulip development environment runs on 9991 on the guest.
|
||||||
host_port = 9991
|
host_port = 9991
|
||||||
http_proxy = https_proxy = no_proxy = nil
|
http_proxy = https_proxy = no_proxy = ""
|
||||||
host_ip_addr = "127.0.0.1"
|
host_ip_addr = "127.0.0.1"
|
||||||
|
|
||||||
# System settings for the virtual machine.
|
|
||||||
vm_num_cpus = "2"
|
|
||||||
vm_memory = "2048"
|
|
||||||
|
|
||||||
ubuntu_mirror = ""
|
|
||||||
vboxadd_version = nil
|
|
||||||
|
|
||||||
config.vm.synced_folder ".", "/vagrant", disabled: true
|
config.vm.synced_folder ".", "/vagrant", disabled: true
|
||||||
config.vm.synced_folder ".", "/srv/zulip"
|
config.vm.synced_folder ".", "/srv/zulip"
|
||||||
|
|
||||||
vagrant_config_file = ENV["HOME"] + "/.zulip-vagrant-config"
|
vagrant_config_file = ENV['HOME'] + "/.zulip-vagrant-config"
|
||||||
if File.file?(vagrant_config_file)
|
if File.file?(vagrant_config_file)
|
||||||
IO.foreach(vagrant_config_file) do |line|
|
IO.foreach(vagrant_config_file) do |line|
|
||||||
line.chomp!
|
line.chomp!
|
||||||
key, value = line.split(nil, 2)
|
key, value = line.split(nil, 2)
|
||||||
case key
|
case key
|
||||||
when /^([#;]|$)/ # ignore comments
|
when /^([#;]|$)/; # ignore comments
|
||||||
when "HTTP_PROXY"; http_proxy = value
|
when "HTTP_PROXY"; http_proxy = value
|
||||||
when "HTTPS_PROXY"; https_proxy = value
|
when "HTTPS_PROXY"; https_proxy = value
|
||||||
when "NO_PROXY"; no_proxy = value
|
when "NO_PROXY"; no_proxy = value
|
||||||
when "HOST_PORT"; host_port = value.to_i
|
when "HOST_PORT"; host_port = value.to_i
|
||||||
when "HOST_IP_ADDR"; host_ip_addr = value
|
when "HOST_IP_ADDR"; host_ip_addr = value
|
||||||
when "GUEST_CPUS"; vm_num_cpus = value
|
|
||||||
when "GUEST_MEMORY_MB"; vm_memory = value
|
|
||||||
when "UBUNTU_MIRROR"; ubuntu_mirror = value
|
|
||||||
when "VBOXADD_VERSION"; vboxadd_version = value
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
if Vagrant.has_plugin?("vagrant-proxyconf")
|
|
||||||
if !http_proxy.nil?
|
|
||||||
config.proxy.http = http_proxy
|
|
||||||
end
|
|
||||||
if !https_proxy.nil?
|
|
||||||
config.proxy.https = https_proxy
|
|
||||||
end
|
|
||||||
if !no_proxy.nil?
|
|
||||||
config.proxy.no_proxy = no_proxy
|
|
||||||
end
|
|
||||||
elsif !http_proxy.nil? or !https_proxy.nil?
|
|
||||||
# This prints twice due to https://github.com/hashicorp/vagrant/issues/7504
|
|
||||||
# We haven't figured out a workaround.
|
|
||||||
puts "You have specified value for proxy in ~/.zulip-vagrant-config file but did not " \
|
|
||||||
"install the vagrant-proxyconf plugin. To install it, run `vagrant plugin install " \
|
|
||||||
"vagrant-proxyconf` in a terminal. This error will appear twice."
|
|
||||||
exit
|
|
||||||
end
|
|
||||||
|
|
||||||
config.vm.network "forwarded_port", guest: 9991, host: host_port, host_ip: host_ip_addr
|
config.vm.network "forwarded_port", guest: 9991, host: host_port, host_ip: host_ip_addr
|
||||||
config.vm.network "forwarded_port", guest: 9994, host: host_port + 3, host_ip: host_ip_addr
|
|
||||||
# Specify Docker provider before VirtualBox provider so it's preferred.
|
if Vagrant.has_plugin?("vagrant-proxyconf")
|
||||||
config.vm.provider "docker" do |d, override|
|
if http_proxy != ""
|
||||||
d.build_dir = File.join(__dir__, "tools", "setup", "dev-vagrant-docker")
|
config.proxy.http = http_proxy
|
||||||
d.build_args = ["--build-arg", "VAGRANT_UID=#{Process.uid}"]
|
end
|
||||||
if !ubuntu_mirror.empty?
|
if https_proxy != ""
|
||||||
d.build_args += ["--build-arg", "UBUNTU_MIRROR=#{ubuntu_mirror}"]
|
config.proxy.https = https_proxy
|
||||||
|
end
|
||||||
|
if https_proxy != ""
|
||||||
|
config.proxy.no_proxy = no_proxy
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Specify LXC provider before VirtualBox provider so it's preferred.
|
||||||
|
config.vm.provider "lxc" do |lxc|
|
||||||
|
if command? "lxc-ls"
|
||||||
|
LXC_VERSION = `lxc-ls --version`.strip unless defined? LXC_VERSION
|
||||||
|
if LXC_VERSION >= "1.1.0"
|
||||||
|
# Allow start without AppArmor, otherwise Box will not Start on Ubuntu 14.10
|
||||||
|
# see https://github.com/fgrehm/vagrant-lxc/issues/333
|
||||||
|
lxc.customize 'aa_allow_incomplete', 1
|
||||||
|
end
|
||||||
|
if LXC_VERSION >= "2.0.0"
|
||||||
|
lxc.backingstore = 'dir'
|
||||||
|
end
|
||||||
end
|
end
|
||||||
d.has_ssh = true
|
|
||||||
d.create_args = ["--ulimit", "nofile=1024:65536"]
|
|
||||||
end
|
end
|
||||||
|
|
||||||
config.vm.provider "virtualbox" do |vb, override|
|
config.vm.provider "virtualbox" do |vb, override|
|
||||||
override.vm.box = "hashicorp/bionic64"
|
override.vm.box = "ubuntu/trusty64"
|
||||||
# It's possible we can get away with just 1.5GB; more testing needed
|
# It's possible we can get away with just 1.5GB; more testing needed
|
||||||
vb.memory = vm_memory
|
vb.memory = 2048
|
||||||
vb.cpus = vm_num_cpus
|
vb.cpus = 2
|
||||||
|
|
||||||
if !vboxadd_version.nil?
|
|
||||||
override.vbguest.installer = Class.new(VagrantVbguest::Installers::Ubuntu) do
|
|
||||||
define_method(:host_version) do |reload = false|
|
|
||||||
VagrantVbguest::Version(vboxadd_version)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
override.vbguest.allow_downgrade = true
|
|
||||||
override.vbguest.iso_path = "https://download.virtualbox.org/virtualbox/#{vboxadd_version}/VBoxGuestAdditions_#{vboxadd_version}.iso"
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
config.vm.provider "hyperv" do |h, override|
|
config.vm.provider "vmware_fusion" do |vb, override|
|
||||||
override.vm.box = "bento/ubuntu-18.04"
|
override.vm.box = "puphpet/ubuntu1404-x64"
|
||||||
h.memory = vm_memory
|
vb.vmx["memsize"] = "2048"
|
||||||
h.maxmemory = vm_memory
|
vb.vmx["numvcpus"] = "2"
|
||||||
h.cpus = vm_num_cpus
|
|
||||||
end
|
|
||||||
|
|
||||||
config.vm.provider "parallels" do |prl, override|
|
|
||||||
override.vm.box = "bento/ubuntu-18.04"
|
|
||||||
override.vm.box_version = "202005.21.0"
|
|
||||||
prl.memory = vm_memory
|
|
||||||
prl.cpus = vm_num_cpus
|
|
||||||
end
|
end
|
||||||
|
|
||||||
$provision_script = <<SCRIPT
|
$provision_script = <<SCRIPT
|
||||||
set -x
|
set -x
|
||||||
set -e
|
set -e
|
||||||
set -o pipefail
|
set -o pipefail
|
||||||
|
# If the host is running SELinux remount the /sys/fs/selinux directory as read only,
|
||||||
# Code should go here, rather than tools/provision, only if it is
|
# needed for apt-get to work.
|
||||||
# something that we don't want to happen when running provision in a
|
if [ -d "/sys/fs/selinux" ]; then
|
||||||
# development environment not using Vagrant.
|
sudo mount -o remount,ro /sys/fs/selinux
|
||||||
|
|
||||||
# Set the Ubuntu mirror
|
|
||||||
[ ! '#{ubuntu_mirror}' ] || sudo sed -i 's|http://\\(\\w*\\.\\)*archive\\.ubuntu\\.com/ubuntu/\\? |#{ubuntu_mirror} |' /etc/apt/sources.list
|
|
||||||
|
|
||||||
# Set the MOTD on the system to have Zulip instructions
|
|
||||||
sudo ln -nsf /srv/zulip/tools/setup/dev-motd /etc/update-motd.d/99-zulip-dev
|
|
||||||
sudo rm -f /etc/update-motd.d/10-help-text
|
|
||||||
sudo dpkg --purge landscape-client landscape-common ubuntu-release-upgrader-core update-manager-core update-notifier-common ubuntu-server
|
|
||||||
sudo dpkg-divert --add --rename /etc/default/motd-news
|
|
||||||
sudo sh -c 'echo ENABLED=0 > /etc/default/motd-news'
|
|
||||||
|
|
||||||
# Set default locale, this prevents errors if the user has another locale set.
|
|
||||||
if ! grep -q 'LC_ALL=en_US.UTF-8' /etc/default/locale; then
|
|
||||||
echo "LC_ALL=en_US.UTF-8" | sudo tee -a /etc/default/locale
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Set an environment variable, so that we won't print the virtualenv
|
|
||||||
# shell warning (it'll be wrong, since the shell is dying anyway)
|
|
||||||
export SKIP_VENV_SHELL_WARNING=1
|
|
||||||
|
|
||||||
# End `set -x`, so that the end of provision doesn't look like an error
|
|
||||||
# message after a successful run.
|
|
||||||
set +x
|
|
||||||
|
|
||||||
# Check if the zulip directory is writable
|
|
||||||
if [ ! -w /srv/zulip ]; then
|
|
||||||
echo "The vagrant user is unable to write to the zulip directory."
|
|
||||||
echo "To fix this, run the following commands on the host machine:"
|
|
||||||
# sudo is required since our uid is not 1000
|
|
||||||
echo ' vagrant halt -f'
|
|
||||||
echo ' rm -rf /PATH/TO/ZULIP/CLONE/.vagrant'
|
|
||||||
echo ' sudo chown -R 1000:$(id -g) /PATH/TO/ZULIP/CLONE'
|
|
||||||
echo "Replace /PATH/TO/ZULIP/CLONE with the path to where zulip code is cloned."
|
|
||||||
echo "You can resume setting up your vagrant environment by running:"
|
|
||||||
echo " vagrant up"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
# Provision the development environment
|
|
||||||
ln -nsf /srv/zulip ~/zulip
|
ln -nsf /srv/zulip ~/zulip
|
||||||
/srv/zulip/tools/provision
|
/srv/zulip/tools/provision
|
||||||
|
|
||||||
# Run any custom provision hooks the user has configured
|
|
||||||
if [ -f /srv/zulip/tools/custom_provision ]; then
|
|
||||||
chmod +x /srv/zulip/tools/custom_provision
|
|
||||||
/srv/zulip/tools/custom_provision
|
|
||||||
fi
|
|
||||||
SCRIPT
|
SCRIPT
|
||||||
|
|
||||||
config.vm.provision "shell",
|
config.vm.provision "shell",
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,22 +1,20 @@
|
|||||||
|
from __future__ import division, absolute_import
|
||||||
|
|
||||||
|
from zerver.models import Realm, UserProfile, Stream, Message
|
||||||
|
from analytics.models import InstallationCount, RealmCount, UserCount, StreamCount
|
||||||
|
from analytics.lib.counts import CountStat
|
||||||
|
from analytics.lib.time_utils import time_range
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
from math import sqrt
|
from math import sqrt
|
||||||
from random import gauss, random, seed
|
from random import gauss, random, seed
|
||||||
from typing import List
|
|
||||||
|
|
||||||
from analytics.lib.counts import CountStat
|
from six.moves import range, zip
|
||||||
|
|
||||||
|
def generate_time_series_data(days=100, business_hours_base=10, non_business_hours_base=10,
|
||||||
def generate_time_series_data(
|
growth=1, autocorrelation=0, spikiness=1, holiday_rate=0,
|
||||||
days: int = 100,
|
frequency=CountStat.DAY, is_gauge=False, random_seed=26):
|
||||||
business_hours_base: float = 10,
|
# type: (int, float, float, float, float, float, float, str, bool, int) -> List[int]
|
||||||
non_business_hours_base: float = 10,
|
|
||||||
growth: float = 1,
|
|
||||||
autocorrelation: float = 0,
|
|
||||||
spikiness: float = 1,
|
|
||||||
holiday_rate: float = 0,
|
|
||||||
frequency: str = CountStat.DAY,
|
|
||||||
partial_sum: bool = False,
|
|
||||||
random_seed: int = 26,
|
|
||||||
) -> List[int]:
|
|
||||||
"""
|
"""
|
||||||
Generate semi-realistic looking time series data for testing analytics graphs.
|
Generate semi-realistic looking time series data for testing analytics graphs.
|
||||||
|
|
||||||
@@ -33,7 +31,7 @@ def generate_time_series_data(
|
|||||||
the variance.
|
the variance.
|
||||||
holiday_rate -- Fraction of days randomly set to 0, largely for testing how we handle 0s.
|
holiday_rate -- Fraction of days randomly set to 0, largely for testing how we handle 0s.
|
||||||
frequency -- Should be CountStat.HOUR or CountStat.DAY.
|
frequency -- Should be CountStat.HOUR or CountStat.DAY.
|
||||||
partial_sum -- If True, return partial sum of the series.
|
is_gauge -- If True, return partial sum of the series.
|
||||||
random_seed -- Seed for random number generator.
|
random_seed -- Seed for random number generator.
|
||||||
"""
|
"""
|
||||||
if frequency == CountStat.HOUR:
|
if frequency == CountStat.HOUR:
|
||||||
@@ -47,33 +45,25 @@ def generate_time_series_data(
|
|||||||
holidays.extend([random() < holiday_rate] * 24)
|
holidays.extend([random() < holiday_rate] * 24)
|
||||||
elif frequency == CountStat.DAY:
|
elif frequency == CountStat.DAY:
|
||||||
length = days
|
length = days
|
||||||
seasonality = [8 * business_hours_base + 16 * non_business_hours_base] * 5 + [
|
seasonality = [8*business_hours_base + 16*non_business_hours_base] * 5 + \
|
||||||
24 * non_business_hours_base
|
[24*non_business_hours_base] * 2
|
||||||
] * 2
|
|
||||||
holidays = [random() < holiday_rate for i in range(days)]
|
holidays = [random() < holiday_rate for i in range(days)]
|
||||||
else:
|
else:
|
||||||
raise AssertionError(f"Unknown frequency: {frequency}")
|
raise ValueError("Unknown frequency: %s" % (frequency,))
|
||||||
if length < 2:
|
if length < 2:
|
||||||
raise AssertionError(
|
raise ValueError("Must be generating at least 2 data points. "
|
||||||
f"Must be generating at least 2 data points. Currently generating {length}"
|
"Currently generating %s" % (length,))
|
||||||
)
|
growth_base = growth ** (1. / (length-1))
|
||||||
growth_base = growth ** (1.0 / (length - 1))
|
values_no_noise = [seasonality[i % len(seasonality)] * (growth_base**i) for i in range(length)]
|
||||||
values_no_noise = [
|
|
||||||
seasonality[i % len(seasonality)] * (growth_base ** i) for i in range(length)
|
|
||||||
]
|
|
||||||
|
|
||||||
seed(random_seed)
|
seed(random_seed)
|
||||||
noise_scalars = [gauss(0, 1)]
|
noise_scalars = [gauss(0, 1)]
|
||||||
for i in range(1, length):
|
for i in range(1, length):
|
||||||
noise_scalars.append(
|
noise_scalars.append(noise_scalars[-1]*autocorrelation + gauss(0, 1)*(1-autocorrelation))
|
||||||
noise_scalars[-1] * autocorrelation + gauss(0, 1) * (1 - autocorrelation)
|
|
||||||
)
|
|
||||||
|
|
||||||
values = [
|
values = [0 if holiday else int(v + sqrt(v)*noise_scalar*spikiness)
|
||||||
0 if holiday else int(v + sqrt(v) * noise_scalar * spikiness)
|
for v, noise_scalar, holiday in zip(values_no_noise, noise_scalars, holidays)]
|
||||||
for v, noise_scalar, holiday in zip(values_no_noise, noise_scalars, holidays)
|
if is_gauge:
|
||||||
]
|
|
||||||
if partial_sum:
|
|
||||||
for i in range(1, length):
|
for i in range(1, length):
|
||||||
values[i] = values[i-1] + values[i]
|
values[i] = values[i-1] + values[i]
|
||||||
return [max(v, 0) for v in values]
|
return [max(v, 0) for v in values]
|
||||||
|
|||||||
@@ -1,19 +1,15 @@
|
|||||||
|
from zerver.lib.timestamp import floor_to_hour, floor_to_day, timestamp_to_datetime
|
||||||
|
from analytics.lib.counts import CountStat
|
||||||
|
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
|
|
||||||
from analytics.lib.counts import CountStat
|
|
||||||
from zerver.lib.timestamp import floor_to_day, floor_to_hour, verify_UTC
|
|
||||||
|
|
||||||
|
|
||||||
# If min_length is None, returns end_times from ceiling(start) to floor(end), inclusive.
|
# If min_length is None, returns end_times from ceiling(start) to floor(end), inclusive.
|
||||||
# If min_length is greater than 0, pads the list to the left.
|
# If min_length is greater than 0, pads the list to the left.
|
||||||
# So informally, time_range(Sep 20, Sep 22, day, None) returns [Sep 20, Sep 21, Sep 22],
|
# So informally, time_range(Sep 20, Sep 22, day, None) returns [Sep 20, Sep 21, Sep 22],
|
||||||
# and time_range(Sep 20, Sep 22, day, 5) returns [Sep 18, Sep 19, Sep 20, Sep 21, Sep 22]
|
# and time_range(Sep 20, Sep 22, day, 5) returns [Sep 18, Sep 19, Sep 20, Sep 21, Sep 22]
|
||||||
def time_range(
|
def time_range(start, end, frequency, min_length):
|
||||||
start: datetime, end: datetime, frequency: str, min_length: Optional[int]
|
# type: (datetime, datetime, str, Optional[int]) -> List[datetime]
|
||||||
) -> List[datetime]:
|
|
||||||
verify_UTC(start)
|
|
||||||
verify_UTC(end)
|
|
||||||
if frequency == CountStat.HOUR:
|
if frequency == CountStat.HOUR:
|
||||||
end = floor_to_hour(end)
|
end = floor_to_hour(end)
|
||||||
step = timedelta(hours=1)
|
step = timedelta(hours=1)
|
||||||
@@ -21,7 +17,7 @@ def time_range(
|
|||||||
end = floor_to_day(end)
|
end = floor_to_day(end)
|
||||||
step = timedelta(days=1)
|
step = timedelta(days=1)
|
||||||
else:
|
else:
|
||||||
raise AssertionError(f"Unknown frequency: {frequency}")
|
raise ValueError("Unknown frequency: %s" % (frequency,))
|
||||||
|
|
||||||
times = []
|
times = []
|
||||||
if min_length is not None:
|
if min_length is not None:
|
||||||
|
|||||||
60
analytics/management/commands/active_user_stats.py
Normal file
60
analytics/management/commands/active_user_stats.py
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from zerver.models import UserPresence, UserActivity
|
||||||
|
from zerver.lib.utils import statsd, statsd_key
|
||||||
|
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from collections import defaultdict
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = """Sends active user statistics to statsd.
|
||||||
|
|
||||||
|
Run as a cron job that runs every 10 minutes."""
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
# type: (*Any, **Any) -> None
|
||||||
|
# Get list of all active users in the last 1 week
|
||||||
|
cutoff = datetime.now() - timedelta(minutes=30, hours=168)
|
||||||
|
|
||||||
|
users = UserPresence.objects.select_related().filter(timestamp__gt=cutoff)
|
||||||
|
|
||||||
|
# Calculate 10min, 2hrs, 12hrs, 1day, 2 business days (TODO business days), 1 week bucket of stats
|
||||||
|
hour_buckets = [0.16, 2, 12, 24, 48, 168]
|
||||||
|
user_info = defaultdict(dict) # type: Dict[str, Dict[float, List[str]]]
|
||||||
|
|
||||||
|
for last_presence in users:
|
||||||
|
if last_presence.status == UserPresence.IDLE:
|
||||||
|
known_active = last_presence.timestamp - timedelta(minutes=30)
|
||||||
|
else:
|
||||||
|
known_active = last_presence.timestamp
|
||||||
|
|
||||||
|
for bucket in hour_buckets:
|
||||||
|
if bucket not in user_info[last_presence.user_profile.realm.string_id]:
|
||||||
|
user_info[last_presence.user_profile.realm.string_id][bucket] = []
|
||||||
|
if datetime.now(known_active.tzinfo) - known_active < timedelta(hours=bucket):
|
||||||
|
user_info[last_presence.user_profile.realm.string_id][bucket].append(last_presence.user_profile.email)
|
||||||
|
|
||||||
|
for realm, buckets in user_info.items():
|
||||||
|
print("Realm %s" % (realm,))
|
||||||
|
for hr, users in sorted(buckets.items()):
|
||||||
|
print("\tUsers for %s: %s" % (hr, len(users)))
|
||||||
|
statsd.gauge("users.active.%s.%shr" % (statsd_key(realm, True), statsd_key(hr, True)), len(users))
|
||||||
|
|
||||||
|
# Also do stats for how many users have been reading the app.
|
||||||
|
users_reading = UserActivity.objects.select_related().filter(query="/json/messages/flags")
|
||||||
|
user_info = defaultdict(dict)
|
||||||
|
for activity in users_reading:
|
||||||
|
for bucket in hour_buckets:
|
||||||
|
if bucket not in user_info[activity.user_profile.realm.string_id]:
|
||||||
|
user_info[activity.user_profile.realm.string_id][bucket] = []
|
||||||
|
if datetime.now(activity.last_visit.tzinfo) - activity.last_visit < timedelta(hours=bucket):
|
||||||
|
user_info[activity.user_profile.realm.string_id][bucket].append(activity.user_profile.email)
|
||||||
|
for realm, buckets in user_info.items():
|
||||||
|
print("Realm %s" % (realm,))
|
||||||
|
for hr, users in sorted(buckets.items()):
|
||||||
|
print("\tUsers reading for %s: %s" % (hr, len(users)))
|
||||||
|
statsd.gauge("users.reading.%s.%shr" % (statsd_key(realm, True), statsd_key(hr, True)), len(users))
|
||||||
28
analytics/management/commands/active_user_stats_by_day.py
Normal file
28
analytics/management/commands/active_user_stats_by_day.py
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import datetime
|
||||||
|
import pytz
|
||||||
|
|
||||||
|
from optparse import make_option
|
||||||
|
from typing import Any
|
||||||
|
from django.core.management.base import BaseCommand, CommandParser
|
||||||
|
from zerver.lib.statistics import activity_averages_during_day
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = "Generate statistics on user activity for a given day."
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
# type: (CommandParser) -> None
|
||||||
|
parser.add_argument('--date', default=None, action='store',
|
||||||
|
help="Day to query in format 2013-12-05. Default is yesterday")
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
# type: (*Any, **Any) -> None
|
||||||
|
if options["date"] is None:
|
||||||
|
date = datetime.datetime.now() - datetime.timedelta(days=1)
|
||||||
|
else:
|
||||||
|
date = datetime.datetime.strptime(options["date"], "%Y-%m-%d")
|
||||||
|
print("Activity data for", date)
|
||||||
|
print(activity_averages_during_day(date))
|
||||||
|
print("Please note that the total registered user count is a total for today")
|
||||||
87
analytics/management/commands/analyze_mit.py
Normal file
87
analytics/management/commands/analyze_mit.py
Normal file
@@ -0,0 +1,87 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from optparse import make_option
|
||||||
|
from django.core.management.base import BaseCommand, CommandParser
|
||||||
|
from zerver.models import Recipient, Message
|
||||||
|
from zerver.lib.timestamp import timestamp_to_datetime
|
||||||
|
import datetime
|
||||||
|
import time
|
||||||
|
import logging
|
||||||
|
|
||||||
|
def compute_stats(log_level):
|
||||||
|
# type: (int) -> None
|
||||||
|
logger = logging.getLogger()
|
||||||
|
logger.setLevel(log_level)
|
||||||
|
|
||||||
|
one_week_ago = timestamp_to_datetime(time.time()) - datetime.timedelta(weeks=1)
|
||||||
|
mit_query = Message.objects.filter(sender__realm__string_id="mit",
|
||||||
|
recipient__type=Recipient.STREAM,
|
||||||
|
pub_date__gt=one_week_ago)
|
||||||
|
for bot_sender_start in ["imap.", "rcmd.", "sys."]:
|
||||||
|
mit_query = mit_query.exclude(sender__email__startswith=(bot_sender_start))
|
||||||
|
# Filtering for "/" covers tabbott/extra@ and all the daemon/foo bots.
|
||||||
|
mit_query = mit_query.exclude(sender__email__contains=("/"))
|
||||||
|
mit_query = mit_query.exclude(sender__email__contains=("aim.com"))
|
||||||
|
mit_query = mit_query.exclude(
|
||||||
|
sender__email__in=["rss@mit.edu", "bash@mit.edu", "apache@mit.edu",
|
||||||
|
"bitcoin@mit.edu", "lp@mit.edu", "clocks@mit.edu",
|
||||||
|
"root@mit.edu", "nagios@mit.edu",
|
||||||
|
"www-data|local-realm@mit.edu"])
|
||||||
|
user_counts = {} # type: Dict[str, Dict[str, int]]
|
||||||
|
for m in mit_query.select_related("sending_client", "sender"):
|
||||||
|
email = m.sender.email
|
||||||
|
user_counts.setdefault(email, {})
|
||||||
|
user_counts[email].setdefault(m.sending_client.name, 0)
|
||||||
|
user_counts[email][m.sending_client.name] += 1
|
||||||
|
|
||||||
|
total_counts = {} # type: Dict[str, int]
|
||||||
|
total_user_counts = {} # type: Dict[str, int]
|
||||||
|
for email, counts in user_counts.items():
|
||||||
|
total_user_counts.setdefault(email, 0)
|
||||||
|
for client_name, count in counts.items():
|
||||||
|
total_counts.setdefault(client_name, 0)
|
||||||
|
total_counts[client_name] += count
|
||||||
|
total_user_counts[email] += count
|
||||||
|
|
||||||
|
logging.debug("%40s | %10s | %s" % ("User", "Messages", "Percentage Zulip"))
|
||||||
|
top_percents = {} # type: Dict[int, float]
|
||||||
|
for size in [10, 25, 50, 100, 200, len(total_user_counts.keys())]:
|
||||||
|
top_percents[size] = 0.0
|
||||||
|
for i, email in enumerate(sorted(total_user_counts.keys(),
|
||||||
|
key=lambda x: -total_user_counts[x])):
|
||||||
|
percent_zulip = round(100 - (user_counts[email].get("zephyr_mirror", 0)) * 100. /
|
||||||
|
total_user_counts[email], 1)
|
||||||
|
for size in top_percents.keys():
|
||||||
|
top_percents.setdefault(size, 0)
|
||||||
|
if i < size:
|
||||||
|
top_percents[size] += (percent_zulip * 1.0 / size)
|
||||||
|
|
||||||
|
logging.debug("%40s | %10s | %s%%" % (email, total_user_counts[email],
|
||||||
|
percent_zulip))
|
||||||
|
|
||||||
|
logging.info("")
|
||||||
|
for size in sorted(top_percents.keys()):
|
||||||
|
logging.info("Top %6s | %s%%" % (size, round(top_percents[size], 1)))
|
||||||
|
|
||||||
|
grand_total = sum(total_counts.values())
|
||||||
|
print(grand_total)
|
||||||
|
logging.info("%15s | %s" % ("Client", "Percentage"))
|
||||||
|
for client in total_counts.keys():
|
||||||
|
logging.info("%15s | %s%%" % (client, round(100. * total_counts[client] / grand_total, 1)))
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = "Compute statistics on MIT Zephyr usage."
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
# type: (CommandParser) -> None
|
||||||
|
parser.add_argument('--verbose', default=False, action='store_true')
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
# type: (*Any, **Any) -> None
|
||||||
|
level = logging.INFO
|
||||||
|
if options["verbose"]:
|
||||||
|
level = logging.DEBUG
|
||||||
|
compute_stats(level)
|
||||||
64
analytics/management/commands/analyze_user_activity.py
Normal file
64
analytics/management/commands/analyze_user_activity.py
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
from typing import Any, Dict
|
||||||
|
|
||||||
|
from zerver.lib.statistics import seconds_usage_between
|
||||||
|
|
||||||
|
from optparse import make_option
|
||||||
|
from django.core.management.base import BaseCommand, CommandParser
|
||||||
|
from zerver.models import UserProfile
|
||||||
|
import datetime
|
||||||
|
from django.utils.timezone import utc
|
||||||
|
|
||||||
|
def analyze_activity(options):
|
||||||
|
# type: (Dict[str, Any]) -> None
|
||||||
|
day_start = datetime.datetime.strptime(options["date"], "%Y-%m-%d").replace(tzinfo=utc)
|
||||||
|
day_end = day_start + datetime.timedelta(days=options["duration"])
|
||||||
|
|
||||||
|
user_profile_query = UserProfile.objects.all()
|
||||||
|
if options["realm"]:
|
||||||
|
user_profile_query = user_profile_query.filter(realm__string_id=options["realm"])
|
||||||
|
|
||||||
|
print("Per-user online duration:\n")
|
||||||
|
total_duration = datetime.timedelta(0)
|
||||||
|
for user_profile in user_profile_query:
|
||||||
|
duration = seconds_usage_between(user_profile, day_start, day_end)
|
||||||
|
|
||||||
|
if duration == datetime.timedelta(0):
|
||||||
|
continue
|
||||||
|
|
||||||
|
total_duration += duration
|
||||||
|
print("%-*s%s" % (37, user_profile.email, duration,))
|
||||||
|
|
||||||
|
print("\nTotal Duration: %s" % (total_duration,))
|
||||||
|
print("\nTotal Duration in minutes: %s" % (total_duration.total_seconds() / 60.,))
|
||||||
|
print("Total Duration amortized to a month: %s" % (total_duration.total_seconds() * 30. / 60.,))
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = """Report analytics of user activity on a per-user and realm basis.
|
||||||
|
|
||||||
|
This command aggregates user activity data that is collected by each user using Zulip. It attempts
|
||||||
|
to approximate how much each user has been using Zulip per day, measured by recording each 15 minute
|
||||||
|
period where some activity has occurred (mouse move or keyboard activity).
|
||||||
|
|
||||||
|
It will correctly not count server-initiated reloads in the activity statistics.
|
||||||
|
|
||||||
|
The duration flag can be used to control how many days to show usage duration for
|
||||||
|
|
||||||
|
Usage: ./manage.py analyze_user_activity [--realm=zulip] [--date=2013-09-10] [--duration=1]
|
||||||
|
|
||||||
|
By default, if no date is selected 2013-09-10 is used. If no realm is provided, information
|
||||||
|
is shown for all realms"""
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
# type: (CommandParser) -> None
|
||||||
|
parser.add_argument('--realm', action='store')
|
||||||
|
parser.add_argument('--date', action='store', default="2013-09-06")
|
||||||
|
parser.add_argument('--duration', action='store', default=1, type=int,
|
||||||
|
help="How many days to show usage information for")
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
# type: (*Any, **Any) -> None
|
||||||
|
analyze_activity(options)
|
||||||
@@ -1,90 +0,0 @@
|
|||||||
import os
|
|
||||||
import time
|
|
||||||
from datetime import timedelta
|
|
||||||
from typing import Any, Dict
|
|
||||||
|
|
||||||
from django.core.management.base import BaseCommand
|
|
||||||
from django.utils.timezone import now as timezone_now
|
|
||||||
|
|
||||||
from analytics.lib.counts import COUNT_STATS, CountStat
|
|
||||||
from analytics.models import installation_epoch
|
|
||||||
from zerver.lib.timestamp import TimezoneNotUTCException, floor_to_day, floor_to_hour, verify_UTC
|
|
||||||
from zerver.models import Realm
|
|
||||||
|
|
||||||
states = {
|
|
||||||
0: "OK",
|
|
||||||
1: "WARNING",
|
|
||||||
2: "CRITICAL",
|
|
||||||
3: "UNKNOWN",
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
|
||||||
help = """Checks FillState table.
|
|
||||||
|
|
||||||
Run as a cron job that runs every hour."""
|
|
||||||
|
|
||||||
def handle(self, *args: Any, **options: Any) -> None:
|
|
||||||
fill_state = self.get_fill_state()
|
|
||||||
status = fill_state["status"]
|
|
||||||
message = fill_state["message"]
|
|
||||||
|
|
||||||
state_file_path = "/var/lib/nagios_state/check-analytics-state"
|
|
||||||
state_file_tmp = state_file_path + "-tmp"
|
|
||||||
|
|
||||||
with open(state_file_tmp, "w") as f:
|
|
||||||
f.write(f"{int(time.time())}|{status}|{states[status]}|{message}\n")
|
|
||||||
os.rename(state_file_tmp, state_file_path)
|
|
||||||
|
|
||||||
def get_fill_state(self) -> Dict[str, Any]:
|
|
||||||
if not Realm.objects.exists():
|
|
||||||
return {"status": 0, "message": "No realms exist, so not checking FillState."}
|
|
||||||
|
|
||||||
warning_unfilled_properties = []
|
|
||||||
critical_unfilled_properties = []
|
|
||||||
for property, stat in COUNT_STATS.items():
|
|
||||||
last_fill = stat.last_successful_fill()
|
|
||||||
if last_fill is None:
|
|
||||||
last_fill = installation_epoch()
|
|
||||||
try:
|
|
||||||
verify_UTC(last_fill)
|
|
||||||
except TimezoneNotUTCException:
|
|
||||||
return {"status": 2, "message": f"FillState not in UTC for {property}"}
|
|
||||||
|
|
||||||
if stat.frequency == CountStat.DAY:
|
|
||||||
floor_function = floor_to_day
|
|
||||||
warning_threshold = timedelta(hours=26)
|
|
||||||
critical_threshold = timedelta(hours=50)
|
|
||||||
else: # CountStat.HOUR
|
|
||||||
floor_function = floor_to_hour
|
|
||||||
warning_threshold = timedelta(minutes=90)
|
|
||||||
critical_threshold = timedelta(minutes=150)
|
|
||||||
|
|
||||||
if floor_function(last_fill) != last_fill:
|
|
||||||
return {
|
|
||||||
"status": 2,
|
|
||||||
"message": f"FillState not on {stat.frequency} boundary for {property}",
|
|
||||||
}
|
|
||||||
|
|
||||||
time_to_last_fill = timezone_now() - last_fill
|
|
||||||
if time_to_last_fill > critical_threshold:
|
|
||||||
critical_unfilled_properties.append(property)
|
|
||||||
elif time_to_last_fill > warning_threshold:
|
|
||||||
warning_unfilled_properties.append(property)
|
|
||||||
|
|
||||||
if len(critical_unfilled_properties) == 0 and len(warning_unfilled_properties) == 0:
|
|
||||||
return {"status": 0, "message": "FillState looks fine."}
|
|
||||||
if len(critical_unfilled_properties) == 0:
|
|
||||||
return {
|
|
||||||
"status": 1,
|
|
||||||
"message": "Missed filling {} once.".format(
|
|
||||||
", ".join(warning_unfilled_properties),
|
|
||||||
),
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
"status": 2,
|
|
||||||
"message": "Missed filling {} once. Missed filling {} at least twice.".format(
|
|
||||||
", ".join(warning_unfilled_properties),
|
|
||||||
", ".join(critical_unfilled_properties),
|
|
||||||
),
|
|
||||||
}
|
|
||||||
@@ -1,21 +1,29 @@
|
|||||||
from argparse import ArgumentParser
|
from __future__ import absolute_import
|
||||||
from typing import Any
|
from __future__ import print_function
|
||||||
|
|
||||||
from django.core.management.base import BaseCommand, CommandError
|
import sys
|
||||||
|
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
from django.db import connection
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
|
||||||
from analytics.lib.counts import do_drop_all_analytics_tables
|
from analytics.lib.counts import do_drop_all_analytics_tables
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
class Command(BaseCommand):
|
class Command(BaseCommand):
|
||||||
help = """Clear analytics tables."""
|
help = """Clear analytics tables."""
|
||||||
|
|
||||||
def add_arguments(self, parser: ArgumentParser) -> None:
|
def add_arguments(self, parser):
|
||||||
parser.add_argument("--force", action="store_true", help="Clear analytics tables.")
|
# type: (ArgumentParser) -> None
|
||||||
|
parser.add_argument('--force',
|
||||||
|
action='store_true',
|
||||||
|
help="Clear analytics tables.")
|
||||||
|
|
||||||
def handle(self, *args: Any, **options: Any) -> None:
|
def handle(self, *args, **options):
|
||||||
if options["force"]:
|
# type: (*Any, **Any) -> None
|
||||||
|
if options['force']:
|
||||||
do_drop_all_analytics_tables()
|
do_drop_all_analytics_tables()
|
||||||
else:
|
else:
|
||||||
raise CommandError(
|
print("Would delete all data from analytics tables (!); use --force to do so.")
|
||||||
"Would delete all data from analytics tables (!); use --force to do so."
|
sys.exit(1)
|
||||||
)
|
|
||||||
|
|||||||
@@ -1,23 +0,0 @@
|
|||||||
from argparse import ArgumentParser
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
from django.core.management.base import BaseCommand, CommandError
|
|
||||||
|
|
||||||
from analytics.lib.counts import COUNT_STATS, do_drop_single_stat
|
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
|
||||||
help = """Clear analytics tables."""
|
|
||||||
|
|
||||||
def add_arguments(self, parser: ArgumentParser) -> None:
|
|
||||||
parser.add_argument("--force", action="store_true", help="Actually do it.")
|
|
||||||
parser.add_argument("--property", help="The property of the stat to be cleared.")
|
|
||||||
|
|
||||||
def handle(self, *args: Any, **options: Any) -> None:
|
|
||||||
property = options["property"]
|
|
||||||
if property not in COUNT_STATS:
|
|
||||||
raise CommandError(f"Invalid property: {property}")
|
|
||||||
if not options["force"]:
|
|
||||||
raise CommandError("No action taken. Use --force.")
|
|
||||||
|
|
||||||
do_drop_single_stat(property)
|
|
||||||
80
analytics/management/commands/client_activity.py
Normal file
80
analytics/management/commands/client_activity.py
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
from django.db.models import Count, QuerySet
|
||||||
|
|
||||||
|
from zerver.models import UserActivity, UserProfile, Realm, \
|
||||||
|
get_realm, get_user_profile_by_email
|
||||||
|
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = """Report rough client activity globally, for a realm, or for a user
|
||||||
|
|
||||||
|
Usage examples:
|
||||||
|
|
||||||
|
./manage.py client_activity
|
||||||
|
./manage.py client_activity zulip
|
||||||
|
./manage.py client_activity hamlet@zulip.com"""
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
# type: (ArgumentParser) -> None
|
||||||
|
parser.add_argument('arg', metavar='<arg>', type=str, nargs='?', default=None,
|
||||||
|
help="realm or user to estimate client activity for")
|
||||||
|
|
||||||
|
def compute_activity(self, user_activity_objects):
|
||||||
|
# type: (QuerySet) -> None
|
||||||
|
# Report data from the past week.
|
||||||
|
#
|
||||||
|
# This is a rough report of client activity because we inconsistently
|
||||||
|
# register activity from various clients; think of it as telling you
|
||||||
|
# approximately how many people from a group have used a particular
|
||||||
|
# client recently. For example, this might be useful to get a sense of
|
||||||
|
# how popular different versions of a desktop client are.
|
||||||
|
#
|
||||||
|
# Importantly, this does NOT tell you anything about the relative
|
||||||
|
# volumes of requests from clients.
|
||||||
|
threshold = datetime.datetime.now() - datetime.timedelta(days=7)
|
||||||
|
client_counts = user_activity_objects.filter(
|
||||||
|
last_visit__gt=threshold).values("client__name").annotate(
|
||||||
|
count=Count('client__name'))
|
||||||
|
|
||||||
|
total = 0
|
||||||
|
counts = []
|
||||||
|
for client_type in client_counts:
|
||||||
|
count = client_type["count"]
|
||||||
|
client = client_type["client__name"]
|
||||||
|
total += count
|
||||||
|
counts.append((count, client))
|
||||||
|
|
||||||
|
counts.sort()
|
||||||
|
|
||||||
|
for count in counts:
|
||||||
|
print("%25s %15d" % (count[1], count[0]))
|
||||||
|
print("Total:", total)
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
# type: (*Any, **str) -> None
|
||||||
|
if options['arg'] is None:
|
||||||
|
# Report global activity.
|
||||||
|
self.compute_activity(UserActivity.objects.all())
|
||||||
|
else:
|
||||||
|
arg = options['arg']
|
||||||
|
try:
|
||||||
|
# Report activity for a user.
|
||||||
|
user_profile = get_user_profile_by_email(arg)
|
||||||
|
self.compute_activity(UserActivity.objects.filter(
|
||||||
|
user_profile=user_profile))
|
||||||
|
except UserProfile.DoesNotExist:
|
||||||
|
try:
|
||||||
|
# Report activity for a realm.
|
||||||
|
realm = get_realm(arg)
|
||||||
|
self.compute_activity(UserActivity.objects.filter(
|
||||||
|
user_profile__realm=realm))
|
||||||
|
except Realm.DoesNotExist:
|
||||||
|
print("Unknown user or realm %s" % (arg,))
|
||||||
|
exit(1)
|
||||||
@@ -1,26 +1,22 @@
|
|||||||
from datetime import timedelta
|
from __future__ import absolute_import, print_function
|
||||||
from typing import Any, Dict, List, Mapping, Optional, Type
|
|
||||||
from unittest import mock
|
from argparse import ArgumentParser
|
||||||
|
|
||||||
from django.core.management.base import BaseCommand
|
from django.core.management.base import BaseCommand
|
||||||
from django.utils.timezone import now as timezone_now
|
from django.utils import timezone
|
||||||
|
|
||||||
|
from analytics.models import BaseCount, InstallationCount, RealmCount, \
|
||||||
|
UserCount, StreamCount
|
||||||
from analytics.lib.counts import COUNT_STATS, CountStat, do_drop_all_analytics_tables
|
from analytics.lib.counts import COUNT_STATS, CountStat, do_drop_all_analytics_tables
|
||||||
from analytics.lib.fixtures import generate_time_series_data
|
from analytics.lib.fixtures import generate_time_series_data
|
||||||
from analytics.lib.time_utils import time_range
|
from analytics.lib.time_utils import time_range
|
||||||
from analytics.models import (
|
|
||||||
BaseCount,
|
|
||||||
FillState,
|
|
||||||
InstallationCount,
|
|
||||||
RealmCount,
|
|
||||||
StreamCount,
|
|
||||||
UserCount,
|
|
||||||
)
|
|
||||||
from zerver.lib.actions import STREAM_ASSIGNMENT_COLORS, do_change_user_role, do_create_realm
|
|
||||||
from zerver.lib.create_user import create_user
|
|
||||||
from zerver.lib.timestamp import floor_to_day
|
from zerver.lib.timestamp import floor_to_day
|
||||||
from zerver.models import Client, Realm, Recipient, Stream, Subscription, UserProfile
|
from zerver.models import Realm, UserProfile, Stream, Message, Client
|
||||||
|
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
|
from six.moves import zip
|
||||||
|
from typing import Any, List, Optional, Text, Type, Union
|
||||||
|
|
||||||
class Command(BaseCommand):
|
class Command(BaseCommand):
|
||||||
help = """Populates analytics tables with randomly generated data."""
|
help = """Populates analytics tables with randomly generated data."""
|
||||||
@@ -28,273 +24,100 @@ class Command(BaseCommand):
|
|||||||
DAYS_OF_DATA = 100
|
DAYS_OF_DATA = 100
|
||||||
random_seed = 26
|
random_seed = 26
|
||||||
|
|
||||||
def generate_fixture_data(
|
def create_user(self, email, full_name, is_staff, date_joined, realm):
|
||||||
self,
|
# type: (Text, Text, Text, bool, datetime, Realm) -> UserProfile
|
||||||
stat: CountStat,
|
return UserProfile.objects.create(
|
||||||
business_hours_base: float,
|
email=email, full_name=full_name, is_staff=is_staff,
|
||||||
non_business_hours_base: float,
|
realm=realm, short_name=full_name, pointer=-1, last_pointer_updater='none',
|
||||||
growth: float,
|
api_key='42', date_joined=date_joined)
|
||||||
autocorrelation: float,
|
|
||||||
spikiness: float,
|
def generate_fixture_data(self, stat, business_hours_base, non_business_hours_base,
|
||||||
holiday_rate: float = 0,
|
growth, autocorrelation, spikiness, holiday_rate=0):
|
||||||
partial_sum: bool = False,
|
# type: (CountStat, float, float, float, float, float, float) -> List[int]
|
||||||
) -> List[int]:
|
|
||||||
self.random_seed += 1
|
self.random_seed += 1
|
||||||
return generate_time_series_data(
|
return generate_time_series_data(
|
||||||
days=self.DAYS_OF_DATA,
|
days=self.DAYS_OF_DATA, business_hours_base=business_hours_base,
|
||||||
business_hours_base=business_hours_base,
|
non_business_hours_base=non_business_hours_base, growth=growth,
|
||||||
non_business_hours_base=non_business_hours_base,
|
autocorrelation=autocorrelation, spikiness=spikiness, holiday_rate=holiday_rate,
|
||||||
growth=growth,
|
frequency=stat.frequency, is_gauge=(stat.interval == CountStat.GAUGE),
|
||||||
autocorrelation=autocorrelation,
|
random_seed=self.random_seed)
|
||||||
spikiness=spikiness,
|
|
||||||
holiday_rate=holiday_rate,
|
|
||||||
frequency=stat.frequency,
|
|
||||||
partial_sum=partial_sum,
|
|
||||||
random_seed=self.random_seed,
|
|
||||||
)
|
|
||||||
|
|
||||||
def handle(self, *args: Any, **options: Any) -> None:
|
def handle(self, *args, **options):
|
||||||
# TODO: This should arguably only delete the objects
|
# type: (*Any, **Any) -> None
|
||||||
# associated with the "analytics" realm.
|
|
||||||
do_drop_all_analytics_tables()
|
do_drop_all_analytics_tables()
|
||||||
|
# I believe this also deletes any objects with this realm as a foreign key
|
||||||
|
Realm.objects.filter(string_id='analytics').delete()
|
||||||
|
Client.objects.filter(name__endswith='_').delete()
|
||||||
|
|
||||||
# This also deletes any objects with this realm as a foreign key
|
installation_time = timezone.now() - timedelta(days=self.DAYS_OF_DATA)
|
||||||
Realm.objects.filter(string_id="analytics").delete()
|
last_end_time = floor_to_day(timezone.now())
|
||||||
|
realm = Realm.objects.create(
|
||||||
|
string_id='analytics', name='Analytics', domain='analytics.ds',
|
||||||
|
date_created=installation_time)
|
||||||
|
shylock = self.create_user('shylock@analytics.ds', 'Shylock', True, installation_time, realm)
|
||||||
|
|
||||||
# Because we just deleted a bunch of objects in the database
|
def insert_fixture_data(stat, fixture_data, table):
|
||||||
# directly (rather than deleting individual objects in Django,
|
# type: (CountStat, Dict[Optional[str], List[int]], Type[BaseCount]) -> None
|
||||||
# in which case our post_save hooks would have flushed the
|
end_times = time_range(last_end_time, last_end_time, stat.frequency,
|
||||||
# individual objects from memcached for us), we need to flush
|
len(list(fixture_data.values())[0]))
|
||||||
# memcached in order to ensure deleted objects aren't still
|
|
||||||
# present in the memcached cache.
|
|
||||||
from zerver.apps import flush_cache
|
|
||||||
|
|
||||||
flush_cache(None)
|
|
||||||
|
|
||||||
installation_time = timezone_now() - timedelta(days=self.DAYS_OF_DATA)
|
|
||||||
last_end_time = floor_to_day(timezone_now())
|
|
||||||
realm = do_create_realm(
|
|
||||||
string_id="analytics", name="Analytics", date_created=installation_time
|
|
||||||
)
|
|
||||||
|
|
||||||
with mock.patch("zerver.lib.create_user.timezone_now", return_value=installation_time):
|
|
||||||
shylock = create_user(
|
|
||||||
"shylock@analytics.ds",
|
|
||||||
"Shylock",
|
|
||||||
realm,
|
|
||||||
full_name="Shylock",
|
|
||||||
role=UserProfile.ROLE_REALM_OWNER,
|
|
||||||
)
|
|
||||||
do_change_user_role(shylock, UserProfile.ROLE_REALM_OWNER, acting_user=None)
|
|
||||||
stream = Stream.objects.create(name="all", realm=realm, date_created=installation_time)
|
|
||||||
recipient = Recipient.objects.create(type_id=stream.id, type=Recipient.STREAM)
|
|
||||||
stream.recipient = recipient
|
|
||||||
stream.save(update_fields=["recipient"])
|
|
||||||
|
|
||||||
# Subscribe shylock to the stream to avoid invariant failures.
|
|
||||||
# TODO: This should use subscribe_users_to_streams from populate_db.
|
|
||||||
subs = [
|
|
||||||
Subscription(
|
|
||||||
recipient=recipient,
|
|
||||||
user_profile=shylock,
|
|
||||||
is_user_active=shylock.is_active,
|
|
||||||
color=STREAM_ASSIGNMENT_COLORS[0],
|
|
||||||
),
|
|
||||||
]
|
|
||||||
Subscription.objects.bulk_create(subs)
|
|
||||||
|
|
||||||
def insert_fixture_data(
|
|
||||||
stat: CountStat, fixture_data: Mapping[Optional[str], List[int]], table: Type[BaseCount]
|
|
||||||
) -> None:
|
|
||||||
end_times = time_range(
|
|
||||||
last_end_time, last_end_time, stat.frequency, len(list(fixture_data.values())[0])
|
|
||||||
)
|
|
||||||
if table == InstallationCount:
|
|
||||||
id_args: Dict[str, Any] = {}
|
|
||||||
if table == RealmCount:
|
if table == RealmCount:
|
||||||
id_args = {"realm": realm}
|
id_args = {'realm': realm}
|
||||||
if table == UserCount:
|
if table == UserCount:
|
||||||
id_args = {"realm": realm, "user": shylock}
|
id_args = {'realm': realm, 'user': shylock}
|
||||||
if table == StreamCount:
|
|
||||||
id_args = {"stream": stream, "realm": realm}
|
|
||||||
|
|
||||||
for subgroup, values in fixture_data.items():
|
for subgroup, values in fixture_data.items():
|
||||||
table.objects.bulk_create(
|
table.objects.bulk_create([
|
||||||
table(
|
table(property=stat.property, subgroup=subgroup, end_time=end_time,
|
||||||
property=stat.property,
|
value=value, **id_args)
|
||||||
subgroup=subgroup,
|
for end_time, value in zip(end_times, values) if value != 0])
|
||||||
end_time=end_time,
|
|
||||||
value=value,
|
|
||||||
**id_args,
|
|
||||||
)
|
|
||||||
for end_time, value in zip(end_times, values)
|
|
||||||
if value != 0
|
|
||||||
)
|
|
||||||
|
|
||||||
stat = COUNT_STATS["1day_actives::day"]
|
stat = COUNT_STATS['active_users:is_bot:day']
|
||||||
realm_data: Mapping[Optional[str], List[int]] = {
|
|
||||||
None: self.generate_fixture_data(stat, 0.08, 0.02, 3, 0.3, 6, partial_sum=True),
|
|
||||||
}
|
|
||||||
insert_fixture_data(stat, realm_data, RealmCount)
|
|
||||||
installation_data: Mapping[Optional[str], List[int]] = {
|
|
||||||
None: self.generate_fixture_data(stat, 0.8, 0.2, 4, 0.3, 6, partial_sum=True),
|
|
||||||
}
|
|
||||||
insert_fixture_data(stat, installation_data, InstallationCount)
|
|
||||||
FillState.objects.create(
|
|
||||||
property=stat.property, end_time=last_end_time, state=FillState.DONE
|
|
||||||
)
|
|
||||||
|
|
||||||
stat = COUNT_STATS["7day_actives::day"]
|
|
||||||
realm_data = {
|
realm_data = {
|
||||||
None: self.generate_fixture_data(stat, 0.2, 0.07, 3, 0.3, 6, partial_sum=True),
|
'false': self.generate_fixture_data(stat, .1, .03, 3, .5, 3),
|
||||||
}
|
'true': self.generate_fixture_data(stat, .01, 0, 1, 0, 1)
|
||||||
|
} # type: Dict[Optional[str], List[int]]
|
||||||
insert_fixture_data(stat, realm_data, RealmCount)
|
insert_fixture_data(stat, realm_data, RealmCount)
|
||||||
installation_data = {
|
|
||||||
None: self.generate_fixture_data(stat, 2, 0.7, 4, 0.3, 6, partial_sum=True),
|
|
||||||
}
|
|
||||||
insert_fixture_data(stat, installation_data, InstallationCount)
|
|
||||||
FillState.objects.create(
|
|
||||||
property=stat.property, end_time=last_end_time, state=FillState.DONE
|
|
||||||
)
|
|
||||||
|
|
||||||
stat = COUNT_STATS["realm_active_humans::day"]
|
stat = COUNT_STATS['messages_sent:is_bot:hour']
|
||||||
realm_data = {
|
user_data = {'false': self.generate_fixture_data(stat, 2, 1, 1.5, .6, 8, holiday_rate=.1)}
|
||||||
None: self.generate_fixture_data(stat, 0.8, 0.08, 3, 0.5, 3, partial_sum=True),
|
|
||||||
}
|
|
||||||
insert_fixture_data(stat, realm_data, RealmCount)
|
|
||||||
installation_data = {
|
|
||||||
None: self.generate_fixture_data(stat, 1, 0.3, 4, 0.5, 3, partial_sum=True),
|
|
||||||
}
|
|
||||||
insert_fixture_data(stat, installation_data, InstallationCount)
|
|
||||||
FillState.objects.create(
|
|
||||||
property=stat.property, end_time=last_end_time, state=FillState.DONE
|
|
||||||
)
|
|
||||||
|
|
||||||
stat = COUNT_STATS["active_users_audit:is_bot:day"]
|
|
||||||
realm_data = {
|
|
||||||
"false": self.generate_fixture_data(stat, 1, 0.2, 3.5, 0.8, 2, partial_sum=True),
|
|
||||||
"true": self.generate_fixture_data(stat, 0.3, 0.05, 3, 0.3, 2, partial_sum=True),
|
|
||||||
}
|
|
||||||
insert_fixture_data(stat, realm_data, RealmCount)
|
|
||||||
installation_data = {
|
|
||||||
"false": self.generate_fixture_data(stat, 3, 1, 4, 0.8, 2, partial_sum=True),
|
|
||||||
"true": self.generate_fixture_data(stat, 1, 0.4, 4, 0.8, 2, partial_sum=True),
|
|
||||||
}
|
|
||||||
insert_fixture_data(stat, installation_data, InstallationCount)
|
|
||||||
FillState.objects.create(
|
|
||||||
property=stat.property, end_time=last_end_time, state=FillState.DONE
|
|
||||||
)
|
|
||||||
|
|
||||||
stat = COUNT_STATS["messages_sent:is_bot:hour"]
|
|
||||||
user_data: Mapping[Optional[str], List[int]] = {
|
|
||||||
"false": self.generate_fixture_data(stat, 2, 1, 1.5, 0.6, 8, holiday_rate=0.1),
|
|
||||||
}
|
|
||||||
insert_fixture_data(stat, user_data, UserCount)
|
insert_fixture_data(stat, user_data, UserCount)
|
||||||
realm_data = {
|
realm_data = {'false': self.generate_fixture_data(stat, 35, 15, 6, .6, 4),
|
||||||
"false": self.generate_fixture_data(stat, 35, 15, 6, 0.6, 4),
|
'true': self.generate_fixture_data(stat, 15, 15, 3, .4, 2)}
|
||||||
"true": self.generate_fixture_data(stat, 15, 15, 3, 0.4, 2),
|
|
||||||
}
|
|
||||||
insert_fixture_data(stat, realm_data, RealmCount)
|
insert_fixture_data(stat, realm_data, RealmCount)
|
||||||
installation_data = {
|
|
||||||
"false": self.generate_fixture_data(stat, 350, 150, 6, 0.6, 4),
|
|
||||||
"true": self.generate_fixture_data(stat, 150, 150, 3, 0.4, 2),
|
|
||||||
}
|
|
||||||
insert_fixture_data(stat, installation_data, InstallationCount)
|
|
||||||
FillState.objects.create(
|
|
||||||
property=stat.property, end_time=last_end_time, state=FillState.DONE
|
|
||||||
)
|
|
||||||
|
|
||||||
stat = COUNT_STATS["messages_sent:message_type:day"]
|
stat = COUNT_STATS['messages_sent:message_type:day']
|
||||||
user_data = {
|
user_data = {
|
||||||
"public_stream": self.generate_fixture_data(stat, 1.5, 1, 3, 0.6, 8),
|
'public_stream': self.generate_fixture_data(stat, 1.5, 1, 3, .6, 8),
|
||||||
"private_message": self.generate_fixture_data(stat, 0.5, 0.3, 1, 0.6, 8),
|
'private_message': self.generate_fixture_data(stat, .5, .3, 1, .6, 8)}
|
||||||
"huddle_message": self.generate_fixture_data(stat, 0.2, 0.2, 2, 0.6, 8),
|
|
||||||
}
|
|
||||||
insert_fixture_data(stat, user_data, UserCount)
|
insert_fixture_data(stat, user_data, UserCount)
|
||||||
realm_data = {
|
realm_data = {
|
||||||
"public_stream": self.generate_fixture_data(stat, 30, 8, 5, 0.6, 4),
|
'public_stream': self.generate_fixture_data(stat, 30, 8, 5, .6, 4),
|
||||||
"private_stream": self.generate_fixture_data(stat, 7, 7, 5, 0.6, 4),
|
'private_stream': self.generate_fixture_data(stat, 7, 7, 5, .6, 4),
|
||||||
"private_message": self.generate_fixture_data(stat, 13, 5, 5, 0.6, 4),
|
'private_message': self.generate_fixture_data(stat, 13, 5, 5, .6, 4)}
|
||||||
"huddle_message": self.generate_fixture_data(stat, 6, 3, 3, 0.6, 4),
|
|
||||||
}
|
|
||||||
insert_fixture_data(stat, realm_data, RealmCount)
|
insert_fixture_data(stat, realm_data, RealmCount)
|
||||||
installation_data = {
|
|
||||||
"public_stream": self.generate_fixture_data(stat, 300, 80, 5, 0.6, 4),
|
|
||||||
"private_stream": self.generate_fixture_data(stat, 70, 70, 5, 0.6, 4),
|
|
||||||
"private_message": self.generate_fixture_data(stat, 130, 50, 5, 0.6, 4),
|
|
||||||
"huddle_message": self.generate_fixture_data(stat, 60, 30, 3, 0.6, 4),
|
|
||||||
}
|
|
||||||
insert_fixture_data(stat, installation_data, InstallationCount)
|
|
||||||
FillState.objects.create(
|
|
||||||
property=stat.property, end_time=last_end_time, state=FillState.DONE
|
|
||||||
)
|
|
||||||
|
|
||||||
website, created = Client.objects.get_or_create(name="website")
|
website_ = Client.objects.create(name='website_')
|
||||||
old_desktop, created = Client.objects.get_or_create(name="desktop app Linux 0.3.7")
|
API_ = Client.objects.create(name='API_')
|
||||||
android, created = Client.objects.get_or_create(name="ZulipAndroid")
|
android_ = Client.objects.create(name='android_')
|
||||||
iOS, created = Client.objects.get_or_create(name="ZulipiOS")
|
iOS_ = Client.objects.create(name='iOS_')
|
||||||
react_native, created = Client.objects.get_or_create(name="ZulipMobile")
|
react_native_ = Client.objects.create(name='react_native_')
|
||||||
API, created = Client.objects.get_or_create(name="API: Python")
|
electron_ = Client.objects.create(name='electron_')
|
||||||
zephyr_mirror, created = Client.objects.get_or_create(name="zephyr_mirror")
|
barnowl_ = Client.objects.create(name='barnowl_')
|
||||||
unused, created = Client.objects.get_or_create(name="unused")
|
plan9_ = Client.objects.create(name='plan9_')
|
||||||
long_webhook, created = Client.objects.get_or_create(name="ZulipLooooooooooongNameWebhook")
|
|
||||||
|
|
||||||
stat = COUNT_STATS["messages_sent:client:day"]
|
stat = COUNT_STATS['messages_sent:client:day']
|
||||||
user_data = {
|
user_data = {
|
||||||
website.id: self.generate_fixture_data(stat, 2, 1, 1.5, 0.6, 8),
|
website_.id: self.generate_fixture_data(stat, 2, 1, 1.5, .6, 8),
|
||||||
zephyr_mirror.id: self.generate_fixture_data(stat, 0, 0.3, 1.5, 0.6, 8),
|
barnowl_.id: self.generate_fixture_data(stat, 0, .3, 1.5, .6, 8)}
|
||||||
}
|
|
||||||
insert_fixture_data(stat, user_data, UserCount)
|
insert_fixture_data(stat, user_data, UserCount)
|
||||||
realm_data = {
|
realm_data = {
|
||||||
website.id: self.generate_fixture_data(stat, 30, 20, 5, 0.6, 3),
|
website_.id: self.generate_fixture_data(stat, 30, 20, 5, .6, 3),
|
||||||
old_desktop.id: self.generate_fixture_data(stat, 5, 3, 8, 0.6, 3),
|
API_.id: self.generate_fixture_data(stat, 5, 5, 5, .6, 3),
|
||||||
android.id: self.generate_fixture_data(stat, 5, 5, 2, 0.6, 3),
|
android_.id: self.generate_fixture_data(stat, 5, 5, 2, .6, 3),
|
||||||
iOS.id: self.generate_fixture_data(stat, 5, 5, 2, 0.6, 3),
|
iOS_.id: self.generate_fixture_data(stat, 5, 5, 2, .6, 3),
|
||||||
react_native.id: self.generate_fixture_data(stat, 5, 5, 10, 0.6, 3),
|
react_native_.id: self.generate_fixture_data(stat, 5, 5, 10, .6, 3),
|
||||||
API.id: self.generate_fixture_data(stat, 5, 5, 5, 0.6, 3),
|
electron_.id: self.generate_fixture_data(stat, 5, 3, 8, .6, 3),
|
||||||
zephyr_mirror.id: self.generate_fixture_data(stat, 1, 1, 3, 0.6, 3),
|
barnowl_.id: self.generate_fixture_data(stat, 1, 1, 3, .6, 3),
|
||||||
unused.id: self.generate_fixture_data(stat, 0, 0, 0, 0, 0),
|
plan9_.id: self.generate_fixture_data(stat, 0, 0, 0, 0, 0, 0)}
|
||||||
long_webhook.id: self.generate_fixture_data(stat, 5, 5, 2, 0.6, 3),
|
|
||||||
}
|
|
||||||
insert_fixture_data(stat, realm_data, RealmCount)
|
insert_fixture_data(stat, realm_data, RealmCount)
|
||||||
installation_data = {
|
|
||||||
website.id: self.generate_fixture_data(stat, 300, 200, 5, 0.6, 3),
|
|
||||||
old_desktop.id: self.generate_fixture_data(stat, 50, 30, 8, 0.6, 3),
|
|
||||||
android.id: self.generate_fixture_data(stat, 50, 50, 2, 0.6, 3),
|
|
||||||
iOS.id: self.generate_fixture_data(stat, 50, 50, 2, 0.6, 3),
|
|
||||||
react_native.id: self.generate_fixture_data(stat, 5, 5, 10, 0.6, 3),
|
|
||||||
API.id: self.generate_fixture_data(stat, 50, 50, 5, 0.6, 3),
|
|
||||||
zephyr_mirror.id: self.generate_fixture_data(stat, 10, 10, 3, 0.6, 3),
|
|
||||||
unused.id: self.generate_fixture_data(stat, 0, 0, 0, 0, 0),
|
|
||||||
long_webhook.id: self.generate_fixture_data(stat, 50, 50, 2, 0.6, 3),
|
|
||||||
}
|
|
||||||
insert_fixture_data(stat, installation_data, InstallationCount)
|
|
||||||
FillState.objects.create(
|
|
||||||
property=stat.property, end_time=last_end_time, state=FillState.DONE
|
|
||||||
)
|
|
||||||
|
|
||||||
stat = COUNT_STATS["messages_in_stream:is_bot:day"]
|
# TODO: messages_sent_to_stream:is_bot
|
||||||
realm_data = {
|
|
||||||
"false": self.generate_fixture_data(stat, 30, 5, 6, 0.6, 4),
|
|
||||||
"true": self.generate_fixture_data(stat, 20, 2, 3, 0.2, 3),
|
|
||||||
}
|
|
||||||
insert_fixture_data(stat, realm_data, RealmCount)
|
|
||||||
stream_data: Mapping[Optional[str], List[int]] = {
|
|
||||||
"false": self.generate_fixture_data(stat, 10, 7, 5, 0.6, 4),
|
|
||||||
"true": self.generate_fixture_data(stat, 5, 3, 2, 0.4, 2),
|
|
||||||
}
|
|
||||||
insert_fixture_data(stat, stream_data, StreamCount)
|
|
||||||
FillState.objects.create(
|
|
||||||
property=stat.property, end_time=last_end_time, state=FillState.DONE
|
|
||||||
)
|
|
||||||
|
|
||||||
stat = COUNT_STATS["messages_read::hour"]
|
|
||||||
user_data = {
|
|
||||||
None: self.generate_fixture_data(stat, 7, 3, 2, 0.6, 8, holiday_rate=0.1),
|
|
||||||
}
|
|
||||||
insert_fixture_data(stat, user_data, UserCount)
|
|
||||||
realm_data = {None: self.generate_fixture_data(stat, 50, 35, 6, 0.6, 4)}
|
|
||||||
insert_fixture_data(stat, realm_data, RealmCount)
|
|
||||||
FillState.objects.create(
|
|
||||||
property=stat.property, end_time=last_end_time, state=FillState.DONE
|
|
||||||
)
|
|
||||||
|
|||||||
167
analytics/management/commands/realm_stats.py
Normal file
167
analytics/management/commands/realm_stats.py
Normal file
@@ -0,0 +1,167 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import division
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
import datetime
|
||||||
|
import pytz
|
||||||
|
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
from django.db.models import Count
|
||||||
|
from zerver.models import UserProfile, Realm, Stream, Message, Recipient, UserActivity, \
|
||||||
|
Subscription, UserMessage, get_realm
|
||||||
|
|
||||||
|
MOBILE_CLIENT_LIST = ["Android", "ios"]
|
||||||
|
HUMAN_CLIENT_LIST = MOBILE_CLIENT_LIST + ["website"]
|
||||||
|
|
||||||
|
human_messages = Message.objects.filter(sending_client__name__in=HUMAN_CLIENT_LIST)
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = "Generate statistics on realm activity."
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
# type: (ArgumentParser) -> None
|
||||||
|
parser.add_argument('realms', metavar='<realm>', type=str, nargs='*',
|
||||||
|
help="realm to generate statistics for")
|
||||||
|
|
||||||
|
def active_users(self, realm):
|
||||||
|
# type: (Realm) -> List[UserProfile]
|
||||||
|
# Has been active (on the website, for now) in the last 7 days.
|
||||||
|
activity_cutoff = datetime.datetime.now(tz=pytz.utc) - datetime.timedelta(days=7)
|
||||||
|
return [activity.user_profile for activity in (
|
||||||
|
UserActivity.objects.filter(user_profile__realm=realm,
|
||||||
|
user_profile__is_active=True,
|
||||||
|
last_visit__gt=activity_cutoff,
|
||||||
|
query="/json/users/me/pointer",
|
||||||
|
client__name="website"))]
|
||||||
|
|
||||||
|
def messages_sent_by(self, user, days_ago):
|
||||||
|
# type: (UserProfile, int) -> int
|
||||||
|
sent_time_cutoff = datetime.datetime.now(tz=pytz.utc) - datetime.timedelta(days=days_ago)
|
||||||
|
return human_messages.filter(sender=user, pub_date__gt=sent_time_cutoff).count()
|
||||||
|
|
||||||
|
def total_messages(self, realm, days_ago):
|
||||||
|
# type: (Realm, int) -> int
|
||||||
|
sent_time_cutoff = datetime.datetime.now(tz=pytz.utc) - datetime.timedelta(days=days_ago)
|
||||||
|
return Message.objects.filter(sender__realm=realm, pub_date__gt=sent_time_cutoff).count()
|
||||||
|
|
||||||
|
def human_messages(self, realm, days_ago):
|
||||||
|
# type: (Realm, int) -> int
|
||||||
|
sent_time_cutoff = datetime.datetime.now(tz=pytz.utc) - datetime.timedelta(days=days_ago)
|
||||||
|
return human_messages.filter(sender__realm=realm, pub_date__gt=sent_time_cutoff).count()
|
||||||
|
|
||||||
|
def api_messages(self, realm, days_ago):
|
||||||
|
# type: (Realm, int) -> int
|
||||||
|
return (self.total_messages(realm, days_ago) - self.human_messages(realm, days_ago))
|
||||||
|
|
||||||
|
def stream_messages(self, realm, days_ago):
|
||||||
|
# type: (Realm, int) -> int
|
||||||
|
sent_time_cutoff = datetime.datetime.now(tz=pytz.utc) - datetime.timedelta(days=days_ago)
|
||||||
|
return human_messages.filter(sender__realm=realm, pub_date__gt=sent_time_cutoff,
|
||||||
|
recipient__type=Recipient.STREAM).count()
|
||||||
|
|
||||||
|
def private_messages(self, realm, days_ago):
|
||||||
|
# type: (Realm, int) -> int
|
||||||
|
sent_time_cutoff = datetime.datetime.now(tz=pytz.utc) - datetime.timedelta(days=days_ago)
|
||||||
|
return human_messages.filter(sender__realm=realm, pub_date__gt=sent_time_cutoff).exclude(
|
||||||
|
recipient__type=Recipient.STREAM).exclude(recipient__type=Recipient.HUDDLE).count()
|
||||||
|
|
||||||
|
def group_private_messages(self, realm, days_ago):
|
||||||
|
# type: (Realm, int) -> int
|
||||||
|
sent_time_cutoff = datetime.datetime.now(tz=pytz.utc) - datetime.timedelta(days=days_ago)
|
||||||
|
return human_messages.filter(sender__realm=realm, pub_date__gt=sent_time_cutoff).exclude(
|
||||||
|
recipient__type=Recipient.STREAM).exclude(recipient__type=Recipient.PERSONAL).count()
|
||||||
|
|
||||||
|
def report_percentage(self, numerator, denominator, text):
|
||||||
|
# type: (float, float, str) -> None
|
||||||
|
if not denominator:
|
||||||
|
fraction = 0.0
|
||||||
|
else:
|
||||||
|
fraction = numerator / float(denominator)
|
||||||
|
print("%.2f%% of" % (fraction * 100,), text)
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
# type: (*Any, **Any) -> None
|
||||||
|
if options['realms']:
|
||||||
|
try:
|
||||||
|
realms = [get_realm(string_id) for string_id in options['realms']]
|
||||||
|
except Realm.DoesNotExist as e:
|
||||||
|
print(e)
|
||||||
|
exit(1)
|
||||||
|
else:
|
||||||
|
realms = Realm.objects.all()
|
||||||
|
|
||||||
|
for realm in realms:
|
||||||
|
print(realm.string_id)
|
||||||
|
|
||||||
|
user_profiles = UserProfile.objects.filter(realm=realm, is_active=True)
|
||||||
|
active_users = self.active_users(realm)
|
||||||
|
num_active = len(active_users)
|
||||||
|
|
||||||
|
print("%d active users (%d total)" % (num_active, len(user_profiles)))
|
||||||
|
streams = Stream.objects.filter(realm=realm).extra(
|
||||||
|
tables=['zerver_subscription', 'zerver_recipient'],
|
||||||
|
where=['zerver_subscription.recipient_id = zerver_recipient.id',
|
||||||
|
'zerver_recipient.type = 2',
|
||||||
|
'zerver_recipient.type_id = zerver_stream.id',
|
||||||
|
'zerver_subscription.active = true']).annotate(count=Count("name"))
|
||||||
|
print("%d streams" % (streams.count(),))
|
||||||
|
|
||||||
|
for days_ago in (1, 7, 30):
|
||||||
|
print("In last %d days, users sent:" % (days_ago,))
|
||||||
|
sender_quantities = [self.messages_sent_by(user, days_ago) for user in user_profiles]
|
||||||
|
for quantity in sorted(sender_quantities, reverse=True):
|
||||||
|
print(quantity, end=' ')
|
||||||
|
print("")
|
||||||
|
|
||||||
|
print("%d stream messages" % (self.stream_messages(realm, days_ago),))
|
||||||
|
print("%d one-on-one private messages" % (self.private_messages(realm, days_ago),))
|
||||||
|
print("%d messages sent via the API" % (self.api_messages(realm, days_ago),))
|
||||||
|
print("%d group private messages" % (self.group_private_messages(realm, days_ago),))
|
||||||
|
|
||||||
|
num_notifications_enabled = len([x for x in active_users if x.enable_desktop_notifications])
|
||||||
|
self.report_percentage(num_notifications_enabled, num_active,
|
||||||
|
"active users have desktop notifications enabled")
|
||||||
|
|
||||||
|
num_enter_sends = len([x for x in active_users if x.enter_sends])
|
||||||
|
self.report_percentage(num_enter_sends, num_active,
|
||||||
|
"active users have enter-sends")
|
||||||
|
|
||||||
|
all_message_count = human_messages.filter(sender__realm=realm).count()
|
||||||
|
multi_paragraph_message_count = human_messages.filter(
|
||||||
|
sender__realm=realm, content__contains="\n\n").count()
|
||||||
|
self.report_percentage(multi_paragraph_message_count, all_message_count,
|
||||||
|
"all messages are multi-paragraph")
|
||||||
|
|
||||||
|
# Starred messages
|
||||||
|
starrers = UserMessage.objects.filter(user_profile__in=user_profiles,
|
||||||
|
flags=UserMessage.flags.starred).values(
|
||||||
|
"user_profile").annotate(count=Count("user_profile"))
|
||||||
|
print("%d users have starred %d messages" % (
|
||||||
|
len(starrers), sum([elt["count"] for elt in starrers])))
|
||||||
|
|
||||||
|
active_user_subs = Subscription.objects.filter(
|
||||||
|
user_profile__in=user_profiles, active=True)
|
||||||
|
|
||||||
|
# Streams not in home view
|
||||||
|
non_home_view = active_user_subs.filter(in_home_view=False).values(
|
||||||
|
"user_profile").annotate(count=Count("user_profile"))
|
||||||
|
print("%d users have %d streams not in home view" % (
|
||||||
|
len(non_home_view), sum([elt["count"] for elt in non_home_view])))
|
||||||
|
|
||||||
|
# Code block markup
|
||||||
|
markup_messages = human_messages.filter(
|
||||||
|
sender__realm=realm, content__contains="~~~").values(
|
||||||
|
"sender").annotate(count=Count("sender"))
|
||||||
|
print("%d users have used code block markup on %s messages" % (
|
||||||
|
len(markup_messages), sum([elt["count"] for elt in markup_messages])))
|
||||||
|
|
||||||
|
# Notifications for stream messages
|
||||||
|
notifications = active_user_subs.filter(notifications=True).values(
|
||||||
|
"user_profile").annotate(count=Count("user_profile"))
|
||||||
|
print("%d users receive desktop notifications for %d streams" % (
|
||||||
|
len(notifications), sum([elt["count"] for elt in notifications])))
|
||||||
|
|
||||||
|
print("")
|
||||||
@@ -1,61 +1,46 @@
|
|||||||
from argparse import ArgumentParser
|
from __future__ import absolute_import
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from django.core.management.base import BaseCommand, CommandError
|
from argparse import ArgumentParser
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
from django.db.models import Q
|
from django.db.models import Q
|
||||||
|
from zerver.models import Realm, Stream, Message, Subscription, Recipient, get_realm
|
||||||
from zerver.models import Message, Realm, Recipient, Stream, Subscription, get_realm
|
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
class Command(BaseCommand):
|
||||||
help = "Generate statistics on the streams for a realm."
|
help = "Generate statistics on the streams for a realm."
|
||||||
|
|
||||||
def add_arguments(self, parser: ArgumentParser) -> None:
|
def add_arguments(self, parser):
|
||||||
parser.add_argument(
|
# type: (ArgumentParser) -> None
|
||||||
"realms", metavar="<realm>", nargs="*", help="realm to generate statistics for"
|
parser.add_argument('realms', metavar='<realm>', type=str, nargs='*',
|
||||||
)
|
help="realm to generate statistics for")
|
||||||
|
|
||||||
def handle(self, *args: Any, **options: str) -> None:
|
def handle(self, *args, **options):
|
||||||
if options["realms"]:
|
# type: (*Any, **str) -> None
|
||||||
|
if options['realms']:
|
||||||
try:
|
try:
|
||||||
realms = [get_realm(string_id) for string_id in options["realms"]]
|
realms = [get_realm(string_id) for string_id in options['realms']]
|
||||||
except Realm.DoesNotExist as e:
|
except Realm.DoesNotExist as e:
|
||||||
raise CommandError(e)
|
print(e)
|
||||||
|
exit(1)
|
||||||
else:
|
else:
|
||||||
realms = Realm.objects.all()
|
realms = Realm.objects.all()
|
||||||
|
|
||||||
for realm in realms:
|
for realm in realms:
|
||||||
|
print(realm.string_id)
|
||||||
|
print("------------")
|
||||||
|
print("%25s %15s %10s" % ("stream", "subscribers", "messages"))
|
||||||
streams = Stream.objects.filter(realm=realm).exclude(Q(name__istartswith="tutorial-"))
|
streams = Stream.objects.filter(realm=realm).exclude(Q(name__istartswith="tutorial-"))
|
||||||
# private stream count
|
invite_only_count = 0
|
||||||
private_count = 0
|
|
||||||
# public stream count
|
|
||||||
public_count = 0
|
|
||||||
for stream in streams:
|
for stream in streams:
|
||||||
if stream.invite_only:
|
if stream.invite_only:
|
||||||
private_count += 1
|
invite_only_count += 1
|
||||||
else:
|
continue
|
||||||
public_count += 1
|
print("%25s" % (stream.name,), end=' ')
|
||||||
print("------------")
|
|
||||||
print(realm.string_id, end=" ")
|
|
||||||
print("{:>10} {} public streams and".format("(", public_count), end=" ")
|
|
||||||
print(f"{private_count} private streams )")
|
|
||||||
print("------------")
|
|
||||||
print("{:>25} {:>15} {:>10} {:>12}".format("stream", "subscribers", "messages", "type"))
|
|
||||||
|
|
||||||
for stream in streams:
|
|
||||||
if stream.invite_only:
|
|
||||||
stream_type = "private"
|
|
||||||
else:
|
|
||||||
stream_type = "public"
|
|
||||||
print(f"{stream.name:>25}", end=" ")
|
|
||||||
recipient = Recipient.objects.filter(type=Recipient.STREAM, type_id=stream.id)
|
recipient = Recipient.objects.filter(type=Recipient.STREAM, type_id=stream.id)
|
||||||
print(
|
print("%10d" % (len(Subscription.objects.filter(recipient=recipient, active=True)),), end=' ')
|
||||||
"{:10}".format(
|
|
||||||
len(Subscription.objects.filter(recipient=recipient, active=True))
|
|
||||||
),
|
|
||||||
end=" ",
|
|
||||||
)
|
|
||||||
num_messages = len(Message.objects.filter(recipient=recipient))
|
num_messages = len(Message.objects.filter(recipient=recipient))
|
||||||
print(f"{num_messages:12}", end=" ")
|
print("%12d" % (num_messages,))
|
||||||
print(f"{stream_type:>15}")
|
print("%d invite-only streams" % (invite_only_count,))
|
||||||
print("")
|
print("")
|
||||||
|
|||||||
@@ -1,50 +1,53 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import time
|
import sys
|
||||||
from argparse import ArgumentParser
|
|
||||||
from datetime import timezone
|
|
||||||
from typing import Any, Dict
|
|
||||||
|
|
||||||
from django.conf import settings
|
|
||||||
from django.core.management.base import BaseCommand
|
|
||||||
from django.utils.dateparse import parse_datetime
|
|
||||||
from django.utils.timezone import now as timezone_now
|
|
||||||
|
|
||||||
from analytics.lib.counts import COUNT_STATS, logger, process_count_stat
|
|
||||||
from scripts.lib.zulip_tools import ENDC, WARNING
|
from scripts.lib.zulip_tools import ENDC, WARNING
|
||||||
from zerver.lib.remote_server import send_analytics_to_remote_server
|
|
||||||
from zerver.lib.timestamp import floor_to_hour
|
|
||||||
from zerver.models import Realm
|
|
||||||
|
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
from datetime import timedelta
|
||||||
|
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
from django.utils import timezone
|
||||||
|
from django.utils.dateparse import parse_datetime
|
||||||
|
from django.conf import settings
|
||||||
|
|
||||||
|
from analytics.models import RealmCount, UserCount
|
||||||
|
from analytics.lib.counts import COUNT_STATS, logger, process_count_stat
|
||||||
|
from zerver.lib.timestamp import datetime_to_string, is_timezone_aware
|
||||||
|
from zerver.models import UserProfile, Message
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
class Command(BaseCommand):
|
class Command(BaseCommand):
|
||||||
help = """Fills Analytics tables.
|
help = """Fills Analytics tables.
|
||||||
|
|
||||||
Run as a cron job that runs every hour."""
|
Run as a cron job that runs every hour."""
|
||||||
|
|
||||||
def add_arguments(self, parser: ArgumentParser) -> None:
|
def add_arguments(self, parser):
|
||||||
parser.add_argument(
|
# type: (ArgumentParser) -> None
|
||||||
"--time",
|
parser.add_argument('--time', '-t',
|
||||||
"-t",
|
type=str,
|
||||||
help="Update stat tables from current state to "
|
help='Update stat tables from current state to --time. Defaults to the current time.',
|
||||||
"--time. Defaults to the current time.",
|
default=datetime_to_string(timezone.now()))
|
||||||
default=timezone_now().isoformat(),
|
parser.add_argument('--utc',
|
||||||
)
|
type=bool,
|
||||||
parser.add_argument("--utc", action="store_true", help="Interpret --time in UTC.")
|
help="Interpret --time in UTC.",
|
||||||
parser.add_argument(
|
default=False)
|
||||||
"--stat", "-s", help="CountStat to process. If omitted, all stats are processed."
|
parser.add_argument('--stat', '-s',
|
||||||
)
|
type=str,
|
||||||
parser.add_argument(
|
help="CountStat to process. If omitted, all stats are processed.")
|
||||||
"--verbose", action="store_true", help="Print timing information to stdout."
|
parser.add_argument('--quiet', '-q',
|
||||||
)
|
type=str,
|
||||||
|
help="Suppress output to stdout.")
|
||||||
|
|
||||||
def handle(self, *args: Any, **options: Any) -> None:
|
def handle(self, *args, **options):
|
||||||
|
# type: (*Any, **Any) -> None
|
||||||
try:
|
try:
|
||||||
os.mkdir(settings.ANALYTICS_LOCK_DIR)
|
os.mkdir(settings.ANALYTICS_LOCK_DIR)
|
||||||
except OSError:
|
except OSError:
|
||||||
print(
|
print(WARNING + "Analytics lock %s is unavailable; exiting... " + ENDC)
|
||||||
f"{WARNING}Analytics lock {settings.ANALYTICS_LOCK_DIR} is unavailable;"
|
|
||||||
f" exiting.{ENDC}"
|
|
||||||
)
|
|
||||||
return
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -52,44 +55,21 @@ class Command(BaseCommand):
|
|||||||
finally:
|
finally:
|
||||||
os.rmdir(settings.ANALYTICS_LOCK_DIR)
|
os.rmdir(settings.ANALYTICS_LOCK_DIR)
|
||||||
|
|
||||||
def run_update_analytics_counts(self, options: Dict[str, Any]) -> None:
|
def run_update_analytics_counts(self, options):
|
||||||
# installation_epoch relies on there being at least one realm; we
|
# type: (Dict[str, Any]) -> None
|
||||||
# shouldn't run the analytics code if that condition isn't satisfied
|
fill_to_time = parse_datetime(options['time'])
|
||||||
if not Realm.objects.exists():
|
if options['utc']:
|
||||||
logger.info("No realms, stopping update_analytics_counts")
|
|
||||||
return
|
|
||||||
|
|
||||||
fill_to_time = parse_datetime(options["time"])
|
|
||||||
if options["utc"]:
|
|
||||||
fill_to_time = fill_to_time.replace(tzinfo=timezone.utc)
|
fill_to_time = fill_to_time.replace(tzinfo=timezone.utc)
|
||||||
if fill_to_time.tzinfo is None:
|
|
||||||
raise ValueError(
|
|
||||||
"--time must be timezone aware. Maybe you meant to use the --utc option?"
|
|
||||||
)
|
|
||||||
|
|
||||||
fill_to_time = floor_to_hour(fill_to_time.astimezone(timezone.utc))
|
if not (is_timezone_aware(fill_to_time)):
|
||||||
|
raise ValueError("--time must be timezone aware. Maybe you meant to use the --utc option?")
|
||||||
|
|
||||||
if options["stat"] is not None:
|
logger.info("Starting updating analytics counts through %s" % (fill_to_time,))
|
||||||
stats = [COUNT_STATS[options["stat"]]]
|
|
||||||
|
if options['stat'] is not None:
|
||||||
|
process_count_stat(COUNT_STATS[options['stat']], fill_to_time)
|
||||||
else:
|
else:
|
||||||
stats = list(COUNT_STATS.values())
|
for stat in COUNT_STATS.values():
|
||||||
|
|
||||||
logger.info("Starting updating analytics counts through %s", fill_to_time)
|
|
||||||
if options["verbose"]:
|
|
||||||
start = time.time()
|
|
||||||
last = start
|
|
||||||
|
|
||||||
for stat in stats:
|
|
||||||
process_count_stat(stat, fill_to_time)
|
process_count_stat(stat, fill_to_time)
|
||||||
if options["verbose"]:
|
|
||||||
print(f"Updated {stat.property} in {time.time() - last:.3f}s")
|
|
||||||
last = time.time()
|
|
||||||
|
|
||||||
if options["verbose"]:
|
logger.info("Finished updating analytics counts through %s" % (fill_to_time,))
|
||||||
print(
|
|
||||||
f"Finished updating analytics counts through {fill_to_time} in {time.time() - start:.3f}s"
|
|
||||||
)
|
|
||||||
logger.info("Finished updating analytics counts through %s", fill_to_time)
|
|
||||||
|
|
||||||
if settings.PUSH_NOTIFICATION_BOUNCER_URL and settings.SUBMIT_USAGE_STATISTICS:
|
|
||||||
send_analytics_to_remote_server()
|
|
||||||
|
|||||||
48
analytics/management/commands/user_stats.py
Normal file
48
analytics/management/commands/user_stats.py
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
from __future__ import absolute_import
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
import datetime
|
||||||
|
import pytz
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
from zerver.models import UserProfile, Realm, Stream, Message, get_realm
|
||||||
|
from six.moves import range
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
help = "Generate statistics on user activity."
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
# type: (ArgumentParser) -> None
|
||||||
|
parser.add_argument('realms', metavar='<realm>', type=str, nargs='*',
|
||||||
|
help="realm to generate statistics for")
|
||||||
|
|
||||||
|
def messages_sent_by(self, user, week):
|
||||||
|
# type: (UserProfile, int) -> int
|
||||||
|
start = datetime.datetime.now(tz=pytz.utc) - datetime.timedelta(days=(week + 1)*7)
|
||||||
|
end = datetime.datetime.now(tz=pytz.utc) - datetime.timedelta(days=week*7)
|
||||||
|
return Message.objects.filter(sender=user, pub_date__gt=start, pub_date__lte=end).count()
|
||||||
|
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
# type: (*Any, **Any) -> None
|
||||||
|
if options['realms']:
|
||||||
|
try:
|
||||||
|
realms = [get_realm(string_id) for string_id in options['realms']]
|
||||||
|
except Realm.DoesNotExist as e:
|
||||||
|
print(e)
|
||||||
|
exit(1)
|
||||||
|
else:
|
||||||
|
realms = Realm.objects.all()
|
||||||
|
|
||||||
|
for realm in realms:
|
||||||
|
print(realm.string_id)
|
||||||
|
user_profiles = UserProfile.objects.filter(realm=realm, is_active=True)
|
||||||
|
print("%d users" % (len(user_profiles),))
|
||||||
|
print("%d streams" % (len(Stream.objects.filter(realm=realm)),))
|
||||||
|
|
||||||
|
for user_profile in user_profiles:
|
||||||
|
print("%35s" % (user_profile.email,), end=' ')
|
||||||
|
for week in range(10):
|
||||||
|
print("%5d" % (self.messages_sent_by(user_profile, week)), end=' ')
|
||||||
|
print("")
|
||||||
@@ -1,209 +1,113 @@
|
|||||||
import django.db.models.deletion
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from django.db import models, migrations
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.db import migrations, models
|
import zerver.lib.str_utils
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("zerver", "0030_realm_org_type"),
|
('zerver', '0030_realm_org_type'),
|
||||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.CreateModel(
|
migrations.CreateModel(
|
||||||
name="Anomaly",
|
name='Anomaly',
|
||||||
fields=[
|
fields=[
|
||||||
(
|
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||||
"id",
|
('info', models.CharField(max_length=1000)),
|
||||||
models.AutoField(
|
|
||||||
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
|
|
||||||
),
|
|
||||||
),
|
|
||||||
("info", models.CharField(max_length=1000)),
|
|
||||||
],
|
],
|
||||||
bases=(models.Model,),
|
bases=(zerver.lib.str_utils.ModelReprMixin, models.Model),
|
||||||
),
|
),
|
||||||
migrations.CreateModel(
|
migrations.CreateModel(
|
||||||
name="HuddleCount",
|
name='HuddleCount',
|
||||||
fields=[
|
fields=[
|
||||||
(
|
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||||
"id",
|
('huddle', models.ForeignKey(to='zerver.Recipient')),
|
||||||
models.AutoField(
|
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
|
||||||
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
|
('property', models.CharField(max_length=40)),
|
||||||
),
|
('end_time', models.DateTimeField()),
|
||||||
),
|
('interval', models.CharField(max_length=20)),
|
||||||
(
|
('value', models.BigIntegerField()),
|
||||||
"huddle",
|
('anomaly', models.ForeignKey(to='analytics.Anomaly', null=True)),
|
||||||
models.ForeignKey(
|
|
||||||
on_delete=django.db.models.deletion.CASCADE, to="zerver.Recipient"
|
|
||||||
),
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"user",
|
|
||||||
models.ForeignKey(
|
|
||||||
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
|
|
||||||
),
|
|
||||||
),
|
|
||||||
("property", models.CharField(max_length=40)),
|
|
||||||
("end_time", models.DateTimeField()),
|
|
||||||
("interval", models.CharField(max_length=20)),
|
|
||||||
("value", models.BigIntegerField()),
|
|
||||||
(
|
|
||||||
"anomaly",
|
|
||||||
models.ForeignKey(
|
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
|
||||||
to="analytics.Anomaly",
|
|
||||||
null=True,
|
|
||||||
),
|
|
||||||
),
|
|
||||||
],
|
],
|
||||||
bases=(models.Model,),
|
bases=(zerver.lib.str_utils.ModelReprMixin, models.Model),
|
||||||
),
|
),
|
||||||
migrations.CreateModel(
|
migrations.CreateModel(
|
||||||
name="InstallationCount",
|
name='InstallationCount',
|
||||||
fields=[
|
fields=[
|
||||||
(
|
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||||
"id",
|
('property', models.CharField(max_length=40)),
|
||||||
models.AutoField(
|
('end_time', models.DateTimeField()),
|
||||||
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
|
('interval', models.CharField(max_length=20)),
|
||||||
),
|
('value', models.BigIntegerField()),
|
||||||
),
|
('anomaly', models.ForeignKey(to='analytics.Anomaly', null=True)),
|
||||||
("property", models.CharField(max_length=40)),
|
|
||||||
("end_time", models.DateTimeField()),
|
|
||||||
("interval", models.CharField(max_length=20)),
|
|
||||||
("value", models.BigIntegerField()),
|
|
||||||
(
|
|
||||||
"anomaly",
|
|
||||||
models.ForeignKey(
|
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
|
||||||
to="analytics.Anomaly",
|
|
||||||
null=True,
|
|
||||||
),
|
|
||||||
),
|
|
||||||
],
|
],
|
||||||
bases=(models.Model,),
|
bases=(zerver.lib.str_utils.ModelReprMixin, models.Model),
|
||||||
),
|
),
|
||||||
migrations.CreateModel(
|
migrations.CreateModel(
|
||||||
name="RealmCount",
|
name='RealmCount',
|
||||||
fields=[
|
fields=[
|
||||||
(
|
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||||
"id",
|
('realm', models.ForeignKey(to='zerver.Realm')),
|
||||||
models.AutoField(
|
('property', models.CharField(max_length=40)),
|
||||||
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
|
('end_time', models.DateTimeField()),
|
||||||
),
|
('interval', models.CharField(max_length=20)),
|
||||||
),
|
('value', models.BigIntegerField()),
|
||||||
(
|
('anomaly', models.ForeignKey(to='analytics.Anomaly', null=True)),
|
||||||
"realm",
|
|
||||||
models.ForeignKey(
|
|
||||||
on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"
|
|
||||||
),
|
|
||||||
),
|
|
||||||
("property", models.CharField(max_length=40)),
|
|
||||||
("end_time", models.DateTimeField()),
|
|
||||||
("interval", models.CharField(max_length=20)),
|
|
||||||
("value", models.BigIntegerField()),
|
|
||||||
(
|
|
||||||
"anomaly",
|
|
||||||
models.ForeignKey(
|
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
|
||||||
to="analytics.Anomaly",
|
|
||||||
null=True,
|
|
||||||
),
|
|
||||||
),
|
|
||||||
],
|
],
|
||||||
bases=(models.Model,),
|
bases=(zerver.lib.str_utils.ModelReprMixin, models.Model),
|
||||||
),
|
),
|
||||||
migrations.CreateModel(
|
migrations.CreateModel(
|
||||||
name="StreamCount",
|
name='StreamCount',
|
||||||
fields=[
|
fields=[
|
||||||
(
|
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||||
"id",
|
('realm', models.ForeignKey(to='zerver.Realm')),
|
||||||
models.AutoField(
|
('stream', models.ForeignKey(to='zerver.Stream')),
|
||||||
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
|
('property', models.CharField(max_length=40)),
|
||||||
),
|
('end_time', models.DateTimeField()),
|
||||||
),
|
('interval', models.CharField(max_length=20)),
|
||||||
(
|
('value', models.BigIntegerField()),
|
||||||
"realm",
|
('anomaly', models.ForeignKey(to='analytics.Anomaly', null=True)),
|
||||||
models.ForeignKey(
|
|
||||||
on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"
|
|
||||||
),
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"stream",
|
|
||||||
models.ForeignKey(
|
|
||||||
on_delete=django.db.models.deletion.CASCADE, to="zerver.Stream"
|
|
||||||
),
|
|
||||||
),
|
|
||||||
("property", models.CharField(max_length=40)),
|
|
||||||
("end_time", models.DateTimeField()),
|
|
||||||
("interval", models.CharField(max_length=20)),
|
|
||||||
("value", models.BigIntegerField()),
|
|
||||||
(
|
|
||||||
"anomaly",
|
|
||||||
models.ForeignKey(
|
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
|
||||||
to="analytics.Anomaly",
|
|
||||||
null=True,
|
|
||||||
),
|
|
||||||
),
|
|
||||||
],
|
],
|
||||||
bases=(models.Model,),
|
bases=(zerver.lib.str_utils.ModelReprMixin, models.Model),
|
||||||
),
|
),
|
||||||
migrations.CreateModel(
|
migrations.CreateModel(
|
||||||
name="UserCount",
|
name='UserCount',
|
||||||
fields=[
|
fields=[
|
||||||
(
|
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||||
"id",
|
('realm', models.ForeignKey(to='zerver.Realm')),
|
||||||
models.AutoField(
|
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
|
||||||
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
|
('property', models.CharField(max_length=40)),
|
||||||
),
|
('end_time', models.DateTimeField()),
|
||||||
),
|
('interval', models.CharField(max_length=20)),
|
||||||
(
|
('value', models.BigIntegerField()),
|
||||||
"realm",
|
('anomaly', models.ForeignKey(to='analytics.Anomaly', null=True)),
|
||||||
models.ForeignKey(
|
|
||||||
on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"
|
|
||||||
),
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"user",
|
|
||||||
models.ForeignKey(
|
|
||||||
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
|
|
||||||
),
|
|
||||||
),
|
|
||||||
("property", models.CharField(max_length=40)),
|
|
||||||
("end_time", models.DateTimeField()),
|
|
||||||
("interval", models.CharField(max_length=20)),
|
|
||||||
("value", models.BigIntegerField()),
|
|
||||||
(
|
|
||||||
"anomaly",
|
|
||||||
models.ForeignKey(
|
|
||||||
on_delete=django.db.models.deletion.CASCADE,
|
|
||||||
to="analytics.Anomaly",
|
|
||||||
null=True,
|
|
||||||
),
|
|
||||||
),
|
|
||||||
],
|
],
|
||||||
bases=(models.Model,),
|
bases=(zerver.lib.str_utils.ModelReprMixin, models.Model),
|
||||||
),
|
),
|
||||||
migrations.AlterUniqueTogether(
|
migrations.AlterUniqueTogether(
|
||||||
name="usercount",
|
name='usercount',
|
||||||
unique_together={("user", "property", "end_time", "interval")},
|
unique_together=set([('user', 'property', 'end_time', 'interval')]),
|
||||||
),
|
),
|
||||||
migrations.AlterUniqueTogether(
|
migrations.AlterUniqueTogether(
|
||||||
name="streamcount",
|
name='streamcount',
|
||||||
unique_together={("stream", "property", "end_time", "interval")},
|
unique_together=set([('stream', 'property', 'end_time', 'interval')]),
|
||||||
),
|
),
|
||||||
migrations.AlterUniqueTogether(
|
migrations.AlterUniqueTogether(
|
||||||
name="realmcount",
|
name='realmcount',
|
||||||
unique_together={("realm", "property", "end_time", "interval")},
|
unique_together=set([('realm', 'property', 'end_time', 'interval')]),
|
||||||
),
|
),
|
||||||
migrations.AlterUniqueTogether(
|
migrations.AlterUniqueTogether(
|
||||||
name="installationcount",
|
name='installationcount',
|
||||||
unique_together={("property", "end_time", "interval")},
|
unique_together=set([('property', 'end_time', 'interval')]),
|
||||||
),
|
),
|
||||||
migrations.AlterUniqueTogether(
|
migrations.AlterUniqueTogether(
|
||||||
name="huddlecount",
|
name='huddlecount',
|
||||||
unique_together={("huddle", "property", "end_time", "interval")},
|
unique_together=set([('huddle', 'property', 'end_time', 'interval')]),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,30 +1,33 @@
|
|||||||
from django.db import migrations
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("analytics", "0001_initial"),
|
('analytics', '0001_initial'),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.AlterUniqueTogether(
|
migrations.AlterUniqueTogether(
|
||||||
name="huddlecount",
|
name='huddlecount',
|
||||||
unique_together=set(),
|
unique_together=set([]),
|
||||||
),
|
),
|
||||||
migrations.RemoveField(
|
migrations.RemoveField(
|
||||||
model_name="huddlecount",
|
model_name='huddlecount',
|
||||||
name="anomaly",
|
name='anomaly',
|
||||||
),
|
),
|
||||||
migrations.RemoveField(
|
migrations.RemoveField(
|
||||||
model_name="huddlecount",
|
model_name='huddlecount',
|
||||||
name="huddle",
|
name='huddle',
|
||||||
),
|
),
|
||||||
migrations.RemoveField(
|
migrations.RemoveField(
|
||||||
model_name="huddlecount",
|
model_name='huddlecount',
|
||||||
name="user",
|
name='user',
|
||||||
),
|
),
|
||||||
migrations.DeleteModel(
|
migrations.DeleteModel(
|
||||||
name="HuddleCount",
|
name='HuddleCount',
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,27 +1,26 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
|
import zerver.lib.str_utils
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("analytics", "0002_remove_huddlecount"),
|
('analytics', '0002_remove_huddlecount'),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.CreateModel(
|
migrations.CreateModel(
|
||||||
name="FillState",
|
name='FillState',
|
||||||
fields=[
|
fields=[
|
||||||
(
|
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
|
||||||
"id",
|
('property', models.CharField(unique=True, max_length=40)),
|
||||||
models.AutoField(
|
('end_time', models.DateTimeField()),
|
||||||
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
|
('state', models.PositiveSmallIntegerField()),
|
||||||
),
|
('last_modified', models.DateTimeField(auto_now=True)),
|
||||||
),
|
|
||||||
("property", models.CharField(unique=True, max_length=40)),
|
|
||||||
("end_time", models.DateTimeField()),
|
|
||||||
("state", models.PositiveSmallIntegerField()),
|
|
||||||
("last_modified", models.DateTimeField(auto_now=True)),
|
|
||||||
],
|
],
|
||||||
bases=(models.Model,),
|
bases=(zerver.lib.str_utils.ModelReprMixin, models.Model),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,31 +1,34 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("analytics", "0003_fillstate"),
|
('analytics', '0003_fillstate'),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name="installationcount",
|
model_name='installationcount',
|
||||||
name="subgroup",
|
name='subgroup',
|
||||||
field=models.CharField(max_length=16, null=True),
|
field=models.CharField(max_length=16, null=True),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name="realmcount",
|
model_name='realmcount',
|
||||||
name="subgroup",
|
name='subgroup',
|
||||||
field=models.CharField(max_length=16, null=True),
|
field=models.CharField(max_length=16, null=True),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name="streamcount",
|
model_name='streamcount',
|
||||||
name="subgroup",
|
name='subgroup',
|
||||||
field=models.CharField(max_length=16, null=True),
|
field=models.CharField(max_length=16, null=True),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name="usercount",
|
model_name='usercount',
|
||||||
name="subgroup",
|
name='subgroup',
|
||||||
field=models.CharField(max_length=16, null=True),
|
field=models.CharField(max_length=16, null=True),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,51 +1,54 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
from django.db import migrations, models
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("analytics", "0004_add_subgroup"),
|
('analytics', '0004_add_subgroup'),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
model_name="installationcount",
|
model_name='installationcount',
|
||||||
name="interval",
|
name='interval',
|
||||||
field=models.CharField(max_length=8),
|
field=models.CharField(max_length=8),
|
||||||
),
|
),
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
model_name="installationcount",
|
model_name='installationcount',
|
||||||
name="property",
|
name='property',
|
||||||
field=models.CharField(max_length=32),
|
field=models.CharField(max_length=32),
|
||||||
),
|
),
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
model_name="realmcount",
|
model_name='realmcount',
|
||||||
name="interval",
|
name='interval',
|
||||||
field=models.CharField(max_length=8),
|
field=models.CharField(max_length=8),
|
||||||
),
|
),
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
model_name="realmcount",
|
model_name='realmcount',
|
||||||
name="property",
|
name='property',
|
||||||
field=models.CharField(max_length=32),
|
field=models.CharField(max_length=32),
|
||||||
),
|
),
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
model_name="streamcount",
|
model_name='streamcount',
|
||||||
name="interval",
|
name='interval',
|
||||||
field=models.CharField(max_length=8),
|
field=models.CharField(max_length=8),
|
||||||
),
|
),
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
model_name="streamcount",
|
model_name='streamcount',
|
||||||
name="property",
|
name='property',
|
||||||
field=models.CharField(max_length=32),
|
field=models.CharField(max_length=32),
|
||||||
),
|
),
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
model_name="usercount",
|
model_name='usercount',
|
||||||
name="interval",
|
name='interval',
|
||||||
field=models.CharField(max_length=8),
|
field=models.CharField(max_length=8),
|
||||||
),
|
),
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
model_name="usercount",
|
model_name='usercount',
|
||||||
name="property",
|
name='property',
|
||||||
field=models.CharField(max_length=32),
|
field=models.CharField(max_length=32),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,27 +1,30 @@
|
|||||||
from django.db import migrations
|
# -*- coding: utf-8 -*-
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("analytics", "0005_alter_field_size"),
|
('analytics', '0005_alter_field_size'),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.AlterUniqueTogether(
|
migrations.AlterUniqueTogether(
|
||||||
name="installationcount",
|
name='installationcount',
|
||||||
unique_together={("property", "subgroup", "end_time", "interval")},
|
unique_together=set([('property', 'subgroup', 'end_time', 'interval')]),
|
||||||
),
|
),
|
||||||
migrations.AlterUniqueTogether(
|
migrations.AlterUniqueTogether(
|
||||||
name="realmcount",
|
name='realmcount',
|
||||||
unique_together={("realm", "property", "subgroup", "end_time", "interval")},
|
unique_together=set([('realm', 'property', 'subgroup', 'end_time', 'interval')]),
|
||||||
),
|
),
|
||||||
migrations.AlterUniqueTogether(
|
migrations.AlterUniqueTogether(
|
||||||
name="streamcount",
|
name='streamcount',
|
||||||
unique_together={("stream", "property", "subgroup", "end_time", "interval")},
|
unique_together=set([('stream', 'property', 'subgroup', 'end_time', 'interval')]),
|
||||||
),
|
),
|
||||||
migrations.AlterUniqueTogether(
|
migrations.AlterUniqueTogether(
|
||||||
name="usercount",
|
name='usercount',
|
||||||
unique_together={("user", "property", "subgroup", "end_time", "interval")},
|
unique_together=set([('user', 'property', 'subgroup', 'end_time', 'interval')]),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,44 +1,48 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
# Generated by Django 1.10.4 on 2017-01-16 20:50
|
# Generated by Django 1.10.4 on 2017-01-16 20:50
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
from django.db import migrations
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("analytics", "0006_add_subgroup_to_unique_constraints"),
|
('analytics', '0006_add_subgroup_to_unique_constraints'),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.AlterUniqueTogether(
|
migrations.AlterUniqueTogether(
|
||||||
name="installationcount",
|
name='installationcount',
|
||||||
unique_together={("property", "subgroup", "end_time")},
|
unique_together=set([('property', 'subgroup', 'end_time')]),
|
||||||
),
|
),
|
||||||
migrations.RemoveField(
|
migrations.RemoveField(
|
||||||
model_name="installationcount",
|
model_name='installationcount',
|
||||||
name="interval",
|
name='interval',
|
||||||
),
|
),
|
||||||
migrations.AlterUniqueTogether(
|
migrations.AlterUniqueTogether(
|
||||||
name="realmcount",
|
name='realmcount',
|
||||||
unique_together={("realm", "property", "subgroup", "end_time")},
|
unique_together=set([('realm', 'property', 'subgroup', 'end_time')]),
|
||||||
),
|
),
|
||||||
migrations.RemoveField(
|
migrations.RemoveField(
|
||||||
model_name="realmcount",
|
model_name='realmcount',
|
||||||
name="interval",
|
name='interval',
|
||||||
),
|
),
|
||||||
migrations.AlterUniqueTogether(
|
migrations.AlterUniqueTogether(
|
||||||
name="streamcount",
|
name='streamcount',
|
||||||
unique_together={("stream", "property", "subgroup", "end_time")},
|
unique_together=set([('stream', 'property', 'subgroup', 'end_time')]),
|
||||||
),
|
),
|
||||||
migrations.RemoveField(
|
migrations.RemoveField(
|
||||||
model_name="streamcount",
|
model_name='streamcount',
|
||||||
name="interval",
|
name='interval',
|
||||||
),
|
),
|
||||||
migrations.AlterUniqueTogether(
|
migrations.AlterUniqueTogether(
|
||||||
name="usercount",
|
name='usercount',
|
||||||
unique_together={("user", "property", "subgroup", "end_time")},
|
unique_together=set([('user', 'property', 'subgroup', 'end_time')]),
|
||||||
),
|
),
|
||||||
migrations.RemoveField(
|
migrations.RemoveField(
|
||||||
model_name="usercount",
|
model_name='usercount',
|
||||||
name="interval",
|
name='interval',
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,25 +1,28 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
# Generated by Django 1.10.5 on 2017-02-01 22:28
|
# Generated by Django 1.10.5 on 2017-02-01 22:28
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
from django.db import migrations
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
("zerver", "0050_userprofile_avatar_version"),
|
('zerver', '0050_userprofile_avatar_version'),
|
||||||
("analytics", "0007_remove_interval"),
|
('analytics', '0007_remove_interval'),
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
migrations.AlterIndexTogether(
|
migrations.AlterIndexTogether(
|
||||||
name="realmcount",
|
name='realmcount',
|
||||||
index_together={("property", "end_time")},
|
index_together=set([('property', 'end_time')]),
|
||||||
),
|
),
|
||||||
migrations.AlterIndexTogether(
|
migrations.AlterIndexTogether(
|
||||||
name="streamcount",
|
name='streamcount',
|
||||||
index_together={("property", "realm", "end_time")},
|
index_together=set([('property', 'realm', 'end_time')]),
|
||||||
),
|
),
|
||||||
migrations.AlterIndexTogether(
|
migrations.AlterIndexTogether(
|
||||||
name="usercount",
|
name='usercount',
|
||||||
index_together={("property", "realm", "end_time")},
|
index_together=set([('property', 'realm', 'end_time')]),
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,31 +0,0 @@
|
|||||||
from django.db import migrations
|
|
||||||
from django.db.backends.postgresql.schema import DatabaseSchemaEditor
|
|
||||||
from django.db.migrations.state import StateApps
|
|
||||||
|
|
||||||
|
|
||||||
def delete_messages_sent_to_stream_stat(
|
|
||||||
apps: StateApps, schema_editor: DatabaseSchemaEditor
|
|
||||||
) -> None:
|
|
||||||
UserCount = apps.get_model("analytics", "UserCount")
|
|
||||||
StreamCount = apps.get_model("analytics", "StreamCount")
|
|
||||||
RealmCount = apps.get_model("analytics", "RealmCount")
|
|
||||||
InstallationCount = apps.get_model("analytics", "InstallationCount")
|
|
||||||
FillState = apps.get_model("analytics", "FillState")
|
|
||||||
|
|
||||||
property = "messages_sent_to_stream:is_bot"
|
|
||||||
UserCount.objects.filter(property=property).delete()
|
|
||||||
StreamCount.objects.filter(property=property).delete()
|
|
||||||
RealmCount.objects.filter(property=property).delete()
|
|
||||||
InstallationCount.objects.filter(property=property).delete()
|
|
||||||
FillState.objects.filter(property=property).delete()
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
("analytics", "0008_add_count_indexes"),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.RunPython(delete_messages_sent_to_stream_stat),
|
|
||||||
]
|
|
||||||
@@ -1,29 +0,0 @@
|
|||||||
from django.db import migrations
|
|
||||||
from django.db.backends.postgresql.schema import DatabaseSchemaEditor
|
|
||||||
from django.db.migrations.state import StateApps
|
|
||||||
|
|
||||||
|
|
||||||
def clear_message_sent_by_message_type_values(
|
|
||||||
apps: StateApps, schema_editor: DatabaseSchemaEditor
|
|
||||||
) -> None:
|
|
||||||
UserCount = apps.get_model("analytics", "UserCount")
|
|
||||||
StreamCount = apps.get_model("analytics", "StreamCount")
|
|
||||||
RealmCount = apps.get_model("analytics", "RealmCount")
|
|
||||||
InstallationCount = apps.get_model("analytics", "InstallationCount")
|
|
||||||
FillState = apps.get_model("analytics", "FillState")
|
|
||||||
|
|
||||||
property = "messages_sent:message_type:day"
|
|
||||||
UserCount.objects.filter(property=property).delete()
|
|
||||||
StreamCount.objects.filter(property=property).delete()
|
|
||||||
RealmCount.objects.filter(property=property).delete()
|
|
||||||
InstallationCount.objects.filter(property=property).delete()
|
|
||||||
FillState.objects.filter(property=property).delete()
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [("analytics", "0009_remove_messages_to_stream_stat")]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.RunPython(clear_message_sent_by_message_type_values),
|
|
||||||
]
|
|
||||||
@@ -1,28 +0,0 @@
|
|||||||
from django.db import migrations
|
|
||||||
from django.db.backends.postgresql.schema import DatabaseSchemaEditor
|
|
||||||
from django.db.migrations.state import StateApps
|
|
||||||
|
|
||||||
|
|
||||||
def clear_analytics_tables(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
|
|
||||||
UserCount = apps.get_model("analytics", "UserCount")
|
|
||||||
StreamCount = apps.get_model("analytics", "StreamCount")
|
|
||||||
RealmCount = apps.get_model("analytics", "RealmCount")
|
|
||||||
InstallationCount = apps.get_model("analytics", "InstallationCount")
|
|
||||||
FillState = apps.get_model("analytics", "FillState")
|
|
||||||
|
|
||||||
UserCount.objects.all().delete()
|
|
||||||
StreamCount.objects.all().delete()
|
|
||||||
RealmCount.objects.all().delete()
|
|
||||||
InstallationCount.objects.all().delete()
|
|
||||||
FillState.objects.all().delete()
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
("analytics", "0010_clear_messages_sent_values"),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.RunPython(clear_analytics_tables),
|
|
||||||
]
|
|
||||||
@@ -1,42 +0,0 @@
|
|||||||
# Generated by Django 1.11.6 on 2018-01-29 08:14
|
|
||||||
|
|
||||||
import django.db.models.deletion
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
("analytics", "0011_clear_analytics_tables"),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name="installationcount",
|
|
||||||
name="anomaly",
|
|
||||||
field=models.ForeignKey(
|
|
||||||
null=True, on_delete=django.db.models.deletion.SET_NULL, to="analytics.Anomaly"
|
|
||||||
),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name="realmcount",
|
|
||||||
name="anomaly",
|
|
||||||
field=models.ForeignKey(
|
|
||||||
null=True, on_delete=django.db.models.deletion.SET_NULL, to="analytics.Anomaly"
|
|
||||||
),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name="streamcount",
|
|
||||||
name="anomaly",
|
|
||||||
field=models.ForeignKey(
|
|
||||||
null=True, on_delete=django.db.models.deletion.SET_NULL, to="analytics.Anomaly"
|
|
||||||
),
|
|
||||||
),
|
|
||||||
migrations.AlterField(
|
|
||||||
model_name="usercount",
|
|
||||||
name="anomaly",
|
|
||||||
field=models.ForeignKey(
|
|
||||||
null=True, on_delete=django.db.models.deletion.SET_NULL, to="analytics.Anomaly"
|
|
||||||
),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@@ -1,32 +0,0 @@
|
|||||||
# Generated by Django 1.11.18 on 2019-02-02 02:47
|
|
||||||
|
|
||||||
from django.db import migrations
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
("analytics", "0012_add_on_delete"),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.RemoveField(
|
|
||||||
model_name="installationcount",
|
|
||||||
name="anomaly",
|
|
||||||
),
|
|
||||||
migrations.RemoveField(
|
|
||||||
model_name="realmcount",
|
|
||||||
name="anomaly",
|
|
||||||
),
|
|
||||||
migrations.RemoveField(
|
|
||||||
model_name="streamcount",
|
|
||||||
name="anomaly",
|
|
||||||
),
|
|
||||||
migrations.RemoveField(
|
|
||||||
model_name="usercount",
|
|
||||||
name="anomaly",
|
|
||||||
),
|
|
||||||
migrations.DeleteModel(
|
|
||||||
name="Anomaly",
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@@ -1,17 +0,0 @@
|
|||||||
# Generated by Django 1.11.26 on 2020-01-27 04:32
|
|
||||||
|
|
||||||
from django.db import migrations
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
("analytics", "0013_remove_anomaly"),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.RemoveField(
|
|
||||||
model_name="fillstate",
|
|
||||||
name="last_modified",
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@@ -1,65 +0,0 @@
|
|||||||
from django.db import migrations
|
|
||||||
from django.db.backends.postgresql.schema import DatabaseSchemaEditor
|
|
||||||
from django.db.migrations.state import StateApps
|
|
||||||
from django.db.models import Count, Sum
|
|
||||||
|
|
||||||
|
|
||||||
def clear_duplicate_counts(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
|
|
||||||
"""This is a preparatory migration for our Analytics tables.
|
|
||||||
|
|
||||||
The backstory is that Django's unique_together indexes do not properly
|
|
||||||
handle the subgroup=None corner case (allowing duplicate rows that have a
|
|
||||||
subgroup of None), which meant that in race conditions, rather than updating
|
|
||||||
an existing row for the property/(realm, stream, user)/time with subgroup=None, Django would
|
|
||||||
create a duplicate row.
|
|
||||||
|
|
||||||
In the next migration, we'll add a proper constraint to fix this bug, but
|
|
||||||
we need to fix any existing problematic rows before we can add that constraint.
|
|
||||||
|
|
||||||
We fix this in an appropriate fashion for each type of CountStat object; mainly
|
|
||||||
this means deleting the extra rows, but for LoggingCountStat objects, we need to
|
|
||||||
additionally combine the sums.
|
|
||||||
"""
|
|
||||||
count_tables = dict(
|
|
||||||
realm=apps.get_model("analytics", "RealmCount"),
|
|
||||||
user=apps.get_model("analytics", "UserCount"),
|
|
||||||
stream=apps.get_model("analytics", "StreamCount"),
|
|
||||||
installation=apps.get_model("analytics", "InstallationCount"),
|
|
||||||
)
|
|
||||||
|
|
||||||
for name, count_table in count_tables.items():
|
|
||||||
value = [name, "property", "end_time"]
|
|
||||||
if name == "installation":
|
|
||||||
value = ["property", "end_time"]
|
|
||||||
counts = (
|
|
||||||
count_table.objects.filter(subgroup=None)
|
|
||||||
.values(*value)
|
|
||||||
.annotate(Count("id"), Sum("value"))
|
|
||||||
.filter(id__count__gt=1)
|
|
||||||
)
|
|
||||||
|
|
||||||
for count in counts:
|
|
||||||
count.pop("id__count")
|
|
||||||
total_value = count.pop("value__sum")
|
|
||||||
duplicate_counts = list(count_table.objects.filter(**count))
|
|
||||||
first_count = duplicate_counts[0]
|
|
||||||
if count["property"] in ["invites_sent::day", "active_users_log:is_bot:day"]:
|
|
||||||
# For LoggingCountStat objects, the right fix is to combine the totals;
|
|
||||||
# for other CountStat objects, we expect the duplicates to have the same value.
|
|
||||||
# And so all we need to do is delete them.
|
|
||||||
first_count.value = total_value
|
|
||||||
first_count.save()
|
|
||||||
to_cleanup = duplicate_counts[1:]
|
|
||||||
for duplicate_count in to_cleanup:
|
|
||||||
duplicate_count.delete()
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
("analytics", "0014_remove_fillstate_last_modified"),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.RunPython(clear_duplicate_counts, reverse_code=migrations.RunPython.noop),
|
|
||||||
]
|
|
||||||
@@ -1,93 +0,0 @@
|
|||||||
# Generated by Django 2.2.10 on 2020-02-29 19:40
|
|
||||||
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
("analytics", "0015_clear_duplicate_counts"),
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.AlterUniqueTogether(
|
|
||||||
name="installationcount",
|
|
||||||
unique_together=set(),
|
|
||||||
),
|
|
||||||
migrations.AlterUniqueTogether(
|
|
||||||
name="realmcount",
|
|
||||||
unique_together=set(),
|
|
||||||
),
|
|
||||||
migrations.AlterUniqueTogether(
|
|
||||||
name="streamcount",
|
|
||||||
unique_together=set(),
|
|
||||||
),
|
|
||||||
migrations.AlterUniqueTogether(
|
|
||||||
name="usercount",
|
|
||||||
unique_together=set(),
|
|
||||||
),
|
|
||||||
migrations.AddConstraint(
|
|
||||||
model_name="installationcount",
|
|
||||||
constraint=models.UniqueConstraint(
|
|
||||||
condition=models.Q(subgroup__isnull=False),
|
|
||||||
fields=("property", "subgroup", "end_time"),
|
|
||||||
name="unique_installation_count",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
migrations.AddConstraint(
|
|
||||||
model_name="installationcount",
|
|
||||||
constraint=models.UniqueConstraint(
|
|
||||||
condition=models.Q(subgroup__isnull=True),
|
|
||||||
fields=("property", "end_time"),
|
|
||||||
name="unique_installation_count_null_subgroup",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
migrations.AddConstraint(
|
|
||||||
model_name="realmcount",
|
|
||||||
constraint=models.UniqueConstraint(
|
|
||||||
condition=models.Q(subgroup__isnull=False),
|
|
||||||
fields=("realm", "property", "subgroup", "end_time"),
|
|
||||||
name="unique_realm_count",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
migrations.AddConstraint(
|
|
||||||
model_name="realmcount",
|
|
||||||
constraint=models.UniqueConstraint(
|
|
||||||
condition=models.Q(subgroup__isnull=True),
|
|
||||||
fields=("realm", "property", "end_time"),
|
|
||||||
name="unique_realm_count_null_subgroup",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
migrations.AddConstraint(
|
|
||||||
model_name="streamcount",
|
|
||||||
constraint=models.UniqueConstraint(
|
|
||||||
condition=models.Q(subgroup__isnull=False),
|
|
||||||
fields=("stream", "property", "subgroup", "end_time"),
|
|
||||||
name="unique_stream_count",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
migrations.AddConstraint(
|
|
||||||
model_name="streamcount",
|
|
||||||
constraint=models.UniqueConstraint(
|
|
||||||
condition=models.Q(subgroup__isnull=True),
|
|
||||||
fields=("stream", "property", "end_time"),
|
|
||||||
name="unique_stream_count_null_subgroup",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
migrations.AddConstraint(
|
|
||||||
model_name="usercount",
|
|
||||||
constraint=models.UniqueConstraint(
|
|
||||||
condition=models.Q(subgroup__isnull=False),
|
|
||||||
fields=("user", "property", "subgroup", "end_time"),
|
|
||||||
name="unique_user_count",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
migrations.AddConstraint(
|
|
||||||
model_name="usercount",
|
|
||||||
constraint=models.UniqueConstraint(
|
|
||||||
condition=models.Q(subgroup__isnull=True),
|
|
||||||
fields=("user", "property", "end_time"),
|
|
||||||
name="unique_user_count_null_subgroup",
|
|
||||||
),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
@@ -1,140 +1,151 @@
|
|||||||
import datetime
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.db.models import Q, UniqueConstraint
|
from django.utils import timezone
|
||||||
|
|
||||||
from zerver.lib.timestamp import floor_to_day
|
from zerver.models import Realm, UserProfile, Stream, Recipient
|
||||||
from zerver.models import Realm, Stream, UserProfile
|
from zerver.lib.str_utils import ModelReprMixin
|
||||||
|
from zerver.lib.timestamp import datetime_to_UTC, floor_to_day
|
||||||
|
|
||||||
|
import datetime
|
||||||
|
|
||||||
class FillState(models.Model):
|
from typing import Optional, Tuple, Union, Dict, Any, Text
|
||||||
property: str = models.CharField(max_length=40, unique=True)
|
|
||||||
end_time: datetime.datetime = models.DateTimeField()
|
class FillState(ModelReprMixin, models.Model):
|
||||||
|
property = models.CharField(max_length=40, unique=True) # type: Text
|
||||||
|
end_time = models.DateTimeField() # type: datetime.datetime
|
||||||
|
|
||||||
# Valid states are {DONE, STARTED}
|
# Valid states are {DONE, STARTED}
|
||||||
DONE = 1
|
DONE = 1
|
||||||
STARTED = 2
|
STARTED = 2
|
||||||
state: int = models.PositiveSmallIntegerField()
|
state = models.PositiveSmallIntegerField() # type: int
|
||||||
|
|
||||||
def __str__(self) -> str:
|
last_modified = models.DateTimeField(auto_now=True) # type: datetime.datetime
|
||||||
return f"<FillState: {self.property} {self.end_time} {self.state}>"
|
|
||||||
|
|
||||||
|
def __unicode__(self):
|
||||||
|
# type: () -> Text
|
||||||
|
return u"<FillState: %s %s %s>" % (self.property, self.end_time, self.state)
|
||||||
|
|
||||||
# The earliest/starting end_time in FillState
|
# The earliest/starting end_time in FillState
|
||||||
# We assume there is at least one realm
|
# We assume there is at least one realm
|
||||||
def installation_epoch() -> datetime.datetime:
|
def installation_epoch():
|
||||||
earliest_realm_creation = Realm.objects.aggregate(models.Min("date_created"))[
|
# type: () -> datetime.datetime
|
||||||
"date_created__min"
|
earliest_realm_creation = Realm.objects.aggregate(models.Min('date_created'))['date_created__min']
|
||||||
]
|
return floor_to_day(datetime_to_UTC(earliest_realm_creation))
|
||||||
return floor_to_day(earliest_realm_creation)
|
|
||||||
|
|
||||||
|
# would only ever make entries here by hand
|
||||||
|
class Anomaly(ModelReprMixin, models.Model):
|
||||||
|
info = models.CharField(max_length=1000) # type: Text
|
||||||
|
|
||||||
class BaseCount(models.Model):
|
def __unicode__(self):
|
||||||
|
# type: () -> Text
|
||||||
|
return u"<Anomaly: %s... %s>" % (self.info, self.id)
|
||||||
|
|
||||||
|
class BaseCount(ModelReprMixin, models.Model):
|
||||||
# Note: When inheriting from BaseCount, you may want to rearrange
|
# Note: When inheriting from BaseCount, you may want to rearrange
|
||||||
# the order of the columns in the migration to make sure they
|
# the order of the columns in the migration to make sure they
|
||||||
# match how you'd like the table to be arranged.
|
# match how you'd like the table to be arranged.
|
||||||
property: str = models.CharField(max_length=32)
|
property = models.CharField(max_length=32) # type: Text
|
||||||
subgroup: Optional[str] = models.CharField(max_length=16, null=True)
|
subgroup = models.CharField(max_length=16, null=True) # type: Text
|
||||||
end_time: datetime.datetime = models.DateTimeField()
|
end_time = models.DateTimeField() # type: datetime.datetime
|
||||||
value: int = models.BigIntegerField()
|
value = models.BigIntegerField() # type: int
|
||||||
|
anomaly = models.ForeignKey(Anomaly, null=True) # type: Optional[Anomaly]
|
||||||
|
|
||||||
class Meta:
|
class Meta(object):
|
||||||
abstract = True
|
abstract = True
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def extended_id():
|
||||||
|
# type: () -> Tuple[str, ...]
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def key_model():
|
||||||
|
# type: () -> models.Model
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
class InstallationCount(BaseCount):
|
class InstallationCount(BaseCount):
|
||||||
class Meta:
|
|
||||||
# Handles invalid duplicate InstallationCount data
|
|
||||||
constraints = [
|
|
||||||
UniqueConstraint(
|
|
||||||
fields=["property", "subgroup", "end_time"],
|
|
||||||
condition=Q(subgroup__isnull=False),
|
|
||||||
name="unique_installation_count",
|
|
||||||
),
|
|
||||||
UniqueConstraint(
|
|
||||||
fields=["property", "end_time"],
|
|
||||||
condition=Q(subgroup__isnull=True),
|
|
||||||
name="unique_installation_count_null_subgroup",
|
|
||||||
),
|
|
||||||
]
|
|
||||||
|
|
||||||
def __str__(self) -> str:
|
class Meta(object):
|
||||||
return f"<InstallationCount: {self.property} {self.subgroup} {self.value}>"
|
unique_together = ("property", "subgroup", "end_time")
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def extended_id():
|
||||||
|
# type: () -> Tuple[str, ...]
|
||||||
|
return ()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def key_model():
|
||||||
|
# type: () -> models.Model
|
||||||
|
return None
|
||||||
|
|
||||||
|
def __unicode__(self):
|
||||||
|
# type: () -> Text
|
||||||
|
return u"<InstallationCount: %s %s %s>" % (self.property, self.subgroup, self.value)
|
||||||
|
|
||||||
class RealmCount(BaseCount):
|
class RealmCount(BaseCount):
|
||||||
realm = models.ForeignKey(Realm, on_delete=models.CASCADE)
|
realm = models.ForeignKey(Realm)
|
||||||
|
|
||||||
class Meta:
|
class Meta(object):
|
||||||
# Handles invalid duplicate RealmCount data
|
unique_together = ("realm", "property", "subgroup", "end_time")
|
||||||
constraints = [
|
|
||||||
UniqueConstraint(
|
|
||||||
fields=["realm", "property", "subgroup", "end_time"],
|
|
||||||
condition=Q(subgroup__isnull=False),
|
|
||||||
name="unique_realm_count",
|
|
||||||
),
|
|
||||||
UniqueConstraint(
|
|
||||||
fields=["realm", "property", "end_time"],
|
|
||||||
condition=Q(subgroup__isnull=True),
|
|
||||||
name="unique_realm_count_null_subgroup",
|
|
||||||
),
|
|
||||||
]
|
|
||||||
index_together = ["property", "end_time"]
|
index_together = ["property", "end_time"]
|
||||||
|
|
||||||
def __str__(self) -> str:
|
@staticmethod
|
||||||
return f"<RealmCount: {self.realm} {self.property} {self.subgroup} {self.value}>"
|
def extended_id():
|
||||||
|
# type: () -> Tuple[str, ...]
|
||||||
|
return ('realm_id',)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def key_model():
|
||||||
|
# type: () -> models.Model
|
||||||
|
return Realm
|
||||||
|
|
||||||
|
def __unicode__(self):
|
||||||
|
# type: () -> Text
|
||||||
|
return u"<RealmCount: %s %s %s %s>" % (self.realm, self.property, self.subgroup, self.value)
|
||||||
|
|
||||||
class UserCount(BaseCount):
|
class UserCount(BaseCount):
|
||||||
user = models.ForeignKey(UserProfile, on_delete=models.CASCADE)
|
user = models.ForeignKey(UserProfile)
|
||||||
realm = models.ForeignKey(Realm, on_delete=models.CASCADE)
|
realm = models.ForeignKey(Realm)
|
||||||
|
|
||||||
class Meta:
|
class Meta(object):
|
||||||
# Handles invalid duplicate UserCount data
|
unique_together = ("user", "property", "subgroup", "end_time")
|
||||||
constraints = [
|
|
||||||
UniqueConstraint(
|
|
||||||
fields=["user", "property", "subgroup", "end_time"],
|
|
||||||
condition=Q(subgroup__isnull=False),
|
|
||||||
name="unique_user_count",
|
|
||||||
),
|
|
||||||
UniqueConstraint(
|
|
||||||
fields=["user", "property", "end_time"],
|
|
||||||
condition=Q(subgroup__isnull=True),
|
|
||||||
name="unique_user_count_null_subgroup",
|
|
||||||
),
|
|
||||||
]
|
|
||||||
# This index dramatically improves the performance of
|
# This index dramatically improves the performance of
|
||||||
# aggregating from users to realms
|
# aggregating from users to realms
|
||||||
index_together = ["property", "realm", "end_time"]
|
index_together = ["property", "realm", "end_time"]
|
||||||
|
|
||||||
def __str__(self) -> str:
|
@staticmethod
|
||||||
return f"<UserCount: {self.user} {self.property} {self.subgroup} {self.value}>"
|
def extended_id():
|
||||||
|
# type: () -> Tuple[str, ...]
|
||||||
|
return ('user_id', 'realm_id')
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def key_model():
|
||||||
|
# type: () -> models.Model
|
||||||
|
return UserProfile
|
||||||
|
|
||||||
|
def __unicode__(self):
|
||||||
|
# type: () -> Text
|
||||||
|
return u"<UserCount: %s %s %s %s>" % (self.user, self.property, self.subgroup, self.value)
|
||||||
|
|
||||||
class StreamCount(BaseCount):
|
class StreamCount(BaseCount):
|
||||||
stream = models.ForeignKey(Stream, on_delete=models.CASCADE)
|
stream = models.ForeignKey(Stream)
|
||||||
realm = models.ForeignKey(Realm, on_delete=models.CASCADE)
|
realm = models.ForeignKey(Realm)
|
||||||
|
|
||||||
class Meta:
|
class Meta(object):
|
||||||
# Handles invalid duplicate StreamCount data
|
unique_together = ("stream", "property", "subgroup", "end_time")
|
||||||
constraints = [
|
|
||||||
UniqueConstraint(
|
|
||||||
fields=["stream", "property", "subgroup", "end_time"],
|
|
||||||
condition=Q(subgroup__isnull=False),
|
|
||||||
name="unique_stream_count",
|
|
||||||
),
|
|
||||||
UniqueConstraint(
|
|
||||||
fields=["stream", "property", "end_time"],
|
|
||||||
condition=Q(subgroup__isnull=True),
|
|
||||||
name="unique_stream_count_null_subgroup",
|
|
||||||
),
|
|
||||||
]
|
|
||||||
# This index dramatically improves the performance of
|
# This index dramatically improves the performance of
|
||||||
# aggregating from streams to realms
|
# aggregating from streams to realms
|
||||||
index_together = ["property", "realm", "end_time"]
|
index_together = ["property", "realm", "end_time"]
|
||||||
|
|
||||||
def __str__(self) -> str:
|
@staticmethod
|
||||||
return (
|
def extended_id():
|
||||||
f"<StreamCount: {self.stream} {self.property} {self.subgroup} {self.value} {self.id}>"
|
# type: () -> Tuple[str, ...]
|
||||||
)
|
return ('stream_id', 'realm_id')
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def key_model():
|
||||||
|
# type: () -> models.Model
|
||||||
|
return Stream
|
||||||
|
|
||||||
|
def __unicode__(self):
|
||||||
|
# type: () -> Text
|
||||||
|
return u"<StreamCount: %s %s %s %s %s>" % (self.stream, self.property, self.subgroup, self.value, self.id)
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,40 +0,0 @@
|
|||||||
from analytics.lib.counts import CountStat
|
|
||||||
from analytics.lib.fixtures import generate_time_series_data
|
|
||||||
from zerver.lib.test_classes import ZulipTestCase
|
|
||||||
|
|
||||||
|
|
||||||
# A very light test suite; the code being tested is not run in production.
|
|
||||||
class TestFixtures(ZulipTestCase):
|
|
||||||
def test_deterministic_settings(self) -> None:
|
|
||||||
# test basic business_hour / non_business_hour calculation
|
|
||||||
# test we get an array of the right length with frequency=CountStat.DAY
|
|
||||||
data = generate_time_series_data(
|
|
||||||
days=7, business_hours_base=20, non_business_hours_base=15, spikiness=0
|
|
||||||
)
|
|
||||||
self.assertEqual(data, [400, 400, 400, 400, 400, 360, 360])
|
|
||||||
|
|
||||||
data = generate_time_series_data(
|
|
||||||
days=1,
|
|
||||||
business_hours_base=2000,
|
|
||||||
non_business_hours_base=1500,
|
|
||||||
growth=2,
|
|
||||||
spikiness=0,
|
|
||||||
frequency=CountStat.HOUR,
|
|
||||||
)
|
|
||||||
# test we get an array of the right length with frequency=CountStat.HOUR
|
|
||||||
self.assertEqual(len(data), 24)
|
|
||||||
# test that growth doesn't affect the first data point
|
|
||||||
self.assertEqual(data[0], 2000)
|
|
||||||
# test that the last data point is growth times what it otherwise would be
|
|
||||||
self.assertEqual(data[-1], 1500 * 2)
|
|
||||||
|
|
||||||
# test autocorrelation == 1, since that's the easiest value to test
|
|
||||||
data = generate_time_series_data(
|
|
||||||
days=1,
|
|
||||||
business_hours_base=2000,
|
|
||||||
non_business_hours_base=2000,
|
|
||||||
autocorrelation=1,
|
|
||||||
frequency=CountStat.HOUR,
|
|
||||||
)
|
|
||||||
self.assertEqual(data[0], data[1])
|
|
||||||
self.assertEqual(data[0], data[-1])
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1,38 +1,20 @@
|
|||||||
from django.conf.urls import include
|
from django.conf.urls import url, include
|
||||||
from django.urls import path
|
from zerver.lib.rest import rest_dispatch
|
||||||
|
|
||||||
from analytics.views import (
|
import analytics.views
|
||||||
get_activity,
|
|
||||||
get_chart_data,
|
|
||||||
get_chart_data_for_installation,
|
|
||||||
get_chart_data_for_realm,
|
|
||||||
get_chart_data_for_remote_installation,
|
|
||||||
get_chart_data_for_remote_realm,
|
|
||||||
get_realm_activity,
|
|
||||||
get_user_activity,
|
|
||||||
stats,
|
|
||||||
stats_for_installation,
|
|
||||||
stats_for_realm,
|
|
||||||
stats_for_remote_installation,
|
|
||||||
stats_for_remote_realm,
|
|
||||||
support,
|
|
||||||
)
|
|
||||||
from zerver.lib.rest import rest_path
|
|
||||||
|
|
||||||
i18n_urlpatterns = [
|
i18n_urlpatterns = [
|
||||||
# Server admin (user_profile.is_staff) visible stats pages
|
# Server admin (user_profile.is_staff) visible stats pages
|
||||||
path("activity", get_activity),
|
url(r'^activity$', analytics.views.get_activity,
|
||||||
path("activity/support", support, name="support"),
|
name='analytics.views.get_activity'),
|
||||||
path("realm_activity/<realm_str>/", get_realm_activity),
|
url(r'^realm_activity/(?P<realm_str>[\S]+)/$', analytics.views.get_realm_activity,
|
||||||
path("user_activity/<email>/", get_user_activity),
|
name='analytics.views.get_realm_activity'),
|
||||||
path("stats/realm/<realm_str>/", stats_for_realm),
|
url(r'^user_activity/(?P<email>[\S]+)/$', analytics.views.get_user_activity,
|
||||||
path("stats/installation", stats_for_installation),
|
name='analytics.views.get_user_activity'),
|
||||||
path("stats/remote/<int:remote_server_id>/installation", stats_for_remote_installation),
|
|
||||||
path(
|
|
||||||
"stats/remote/<int:remote_server_id>/realm/<int:remote_realm_id>/", stats_for_remote_realm
|
|
||||||
),
|
|
||||||
# User-visible stats page
|
# User-visible stats page
|
||||||
path("stats", stats, name="stats"),
|
url(r'^stats$', analytics.views.stats,
|
||||||
|
name='analytics.views.stats'),
|
||||||
]
|
]
|
||||||
|
|
||||||
# These endpoints are a part of the API (V1), which uses:
|
# These endpoints are a part of the API (V1), which uses:
|
||||||
@@ -45,22 +27,13 @@ i18n_urlpatterns = [
|
|||||||
# All of these paths are accessed by either a /json or /api prefix
|
# All of these paths are accessed by either a /json or /api prefix
|
||||||
v1_api_and_json_patterns = [
|
v1_api_and_json_patterns = [
|
||||||
# get data for the graphs at /stats
|
# get data for the graphs at /stats
|
||||||
rest_path("analytics/chart_data", GET=get_chart_data),
|
url(r'^analytics/chart_data$', rest_dispatch,
|
||||||
rest_path("analytics/chart_data/realm/<realm_str>", GET=get_chart_data_for_realm),
|
{'GET': 'analytics.views.get_chart_data'}),
|
||||||
rest_path("analytics/chart_data/installation", GET=get_chart_data_for_installation),
|
|
||||||
rest_path(
|
|
||||||
"analytics/chart_data/remote/<int:remote_server_id>/installation",
|
|
||||||
GET=get_chart_data_for_remote_installation,
|
|
||||||
),
|
|
||||||
rest_path(
|
|
||||||
"analytics/chart_data/remote/<int:remote_server_id>/realm/<int:remote_realm_id>",
|
|
||||||
GET=get_chart_data_for_remote_realm,
|
|
||||||
),
|
|
||||||
]
|
]
|
||||||
|
|
||||||
i18n_urlpatterns += [
|
i18n_urlpatterns += [
|
||||||
path("api/v1/", include(v1_api_and_json_patterns)),
|
url(r'^api/v1/', include(v1_api_and_json_patterns)),
|
||||||
path("json/", include(v1_api_and_json_patterns)),
|
url(r'^json/', include(v1_api_and_json_patterns)),
|
||||||
]
|
]
|
||||||
|
|
||||||
urlpatterns = i18n_urlpatterns
|
urlpatterns = i18n_urlpatterns
|
||||||
|
|||||||
1731
analytics/views.py
1731
analytics/views.py
File diff suppressed because it is too large
Load Diff
11
api/MANIFEST.in
Normal file
11
api/MANIFEST.in
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
recursive-include integrations *
|
||||||
|
include README.md
|
||||||
|
include examples/zuliprc
|
||||||
|
include examples/send-message
|
||||||
|
include examples/subscribe
|
||||||
|
include examples/get-public-streams
|
||||||
|
include examples/unsubscribe
|
||||||
|
include examples/list-members
|
||||||
|
include examples/list-subscriptions
|
||||||
|
include examples/print-messages
|
||||||
|
include examples/recent-messages
|
||||||
172
api/README.md
Normal file
172
api/README.md
Normal file
@@ -0,0 +1,172 @@
|
|||||||
|
#### Dependencies
|
||||||
|
|
||||||
|
The [Zulip API](https://zulipchat.com/api) Python bindings require the
|
||||||
|
following Python libraries:
|
||||||
|
|
||||||
|
* requests (version >= 0.12.1)
|
||||||
|
* simplejson
|
||||||
|
* six
|
||||||
|
* typing (version >= 3.5.2.2)
|
||||||
|
|
||||||
|
#### Installing
|
||||||
|
|
||||||
|
This package uses distutils, so you can just run:
|
||||||
|
|
||||||
|
python setup.py install
|
||||||
|
|
||||||
|
#### Using the API
|
||||||
|
|
||||||
|
For now, the only fully supported API operation is sending a message.
|
||||||
|
The other API queries work, but are under active development, so
|
||||||
|
please make sure we know you're using them so that we can notify you
|
||||||
|
as we make any changes to them.
|
||||||
|
|
||||||
|
The easiest way to use these API bindings is to base your tools off
|
||||||
|
of the example tools under examples/ in this distribution.
|
||||||
|
|
||||||
|
If you place your API key in the config file `~/.zuliprc` the Python
|
||||||
|
API bindings will automatically read it in. The format of the config
|
||||||
|
file is as follows:
|
||||||
|
|
||||||
|
[api]
|
||||||
|
key=<api key from the web interface>
|
||||||
|
email=<your email address>
|
||||||
|
site=<your Zulip server's URI>
|
||||||
|
insecure=<true or false, true means do not verify the server certificate>
|
||||||
|
cert_bundle=<path to a file containing CA or server certificates to trust>
|
||||||
|
|
||||||
|
If omitted, these settings have the following defaults:
|
||||||
|
|
||||||
|
insecure=false
|
||||||
|
cert_bundle=<the default CA bundle trusted by Python>
|
||||||
|
|
||||||
|
Alternatively, you may explicitly use "--user", "--api-key", and
|
||||||
|
`--site` in our examples, which is especially useful when testing. If
|
||||||
|
you are running several bots which share a home directory, we
|
||||||
|
recommend using `--config` to specify the path to the `zuliprc` file
|
||||||
|
for a specific bot.
|
||||||
|
|
||||||
|
The command line equivalents for other configuration options are:
|
||||||
|
|
||||||
|
--insecure
|
||||||
|
--cert-bundle=<file>
|
||||||
|
|
||||||
|
You can obtain your Zulip API key, create bots, and manage bots all
|
||||||
|
from your Zulip settings page; with current Zulip there's also a
|
||||||
|
button to download a `zuliprc` file for your account/server pair.
|
||||||
|
|
||||||
|
A typical simple bot sending API messages will look as follows:
|
||||||
|
|
||||||
|
At the top of the file:
|
||||||
|
|
||||||
|
# Make sure the Zulip API distribution's root directory is in sys.path, then:
|
||||||
|
import zulip
|
||||||
|
zulip_client = zulip.Client(email="your-bot@example.com", client="MyTestClient/0.1")
|
||||||
|
|
||||||
|
When you want to send a message:
|
||||||
|
|
||||||
|
message = {
|
||||||
|
"type": "stream",
|
||||||
|
"to": ["support"],
|
||||||
|
"subject": "your subject",
|
||||||
|
"content": "your content",
|
||||||
|
}
|
||||||
|
zulip_client.send_message(message)
|
||||||
|
|
||||||
|
If you are parsing arguments, you may find it useful to use Zulip's
|
||||||
|
option group; see any of our API examples for details on how to do this.
|
||||||
|
|
||||||
|
Additional examples:
|
||||||
|
|
||||||
|
client.send_message({'type': 'stream', 'content': 'Zulip rules!',
|
||||||
|
'subject': 'feedback', 'to': ['support']})
|
||||||
|
client.send_message({'type': 'private', 'content': 'Zulip rules!',
|
||||||
|
'to': ['user1@example.com', 'user2@example.com']})
|
||||||
|
|
||||||
|
send_message() returns a dict guaranteed to contain the following
|
||||||
|
keys: msg, result. For successful calls, result will be "success" and
|
||||||
|
msg will be the empty string. On error, result will be "error" and
|
||||||
|
msg will describe what went wrong.
|
||||||
|
|
||||||
|
#### Examples
|
||||||
|
|
||||||
|
The API bindings package comes with several nice example scripts that
|
||||||
|
show how to use the APIs; they are installed as part of the API
|
||||||
|
bindings bundle.
|
||||||
|
|
||||||
|
#### Logging
|
||||||
|
|
||||||
|
The Zulip API comes with a ZulipStream class which can be used with the
|
||||||
|
logging module:
|
||||||
|
|
||||||
|
```
|
||||||
|
import zulip
|
||||||
|
import logging
|
||||||
|
stream = zulip.ZulipStream(type="stream", to=["support"], subject="your subject")
|
||||||
|
logger = logging.getLogger("your_logger")
|
||||||
|
logger.addHandler(logging.StreamHandler(stream))
|
||||||
|
logger.setLevel(logging.DEBUG)
|
||||||
|
logger.info("This is an INFO test.")
|
||||||
|
logger.debug("This is a DEBUG test.")
|
||||||
|
logger.warn("This is a WARN test.")
|
||||||
|
logger.error("This is a ERROR test.")
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Sending messages
|
||||||
|
|
||||||
|
You can use the included `zulip-send` script to send messages via the
|
||||||
|
API directly from existing scripts.
|
||||||
|
|
||||||
|
zulip-send hamlet@example.com cordelia@example.com -m \
|
||||||
|
"Conscience doth make cowards of us all."
|
||||||
|
|
||||||
|
Alternatively, if you don't want to use your ~/.zuliprc file:
|
||||||
|
|
||||||
|
zulip-send --user shakespeare-bot@example.com \
|
||||||
|
--api-key a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5 \
|
||||||
|
--site https://zulip.example.com \
|
||||||
|
hamlet@example.com cordelia@example.com -m \
|
||||||
|
"Conscience doth make cowards of us all."
|
||||||
|
|
||||||
|
#### Working with an untrusted server certificate
|
||||||
|
|
||||||
|
If your server has either a self-signed certificate, or a certificate signed
|
||||||
|
by a CA that you don't wish to globally trust then by default the API will
|
||||||
|
fail with an SSL verification error.
|
||||||
|
|
||||||
|
You can add `insecure=true` to your .zuliprc file.
|
||||||
|
|
||||||
|
[api]
|
||||||
|
site=https://zulip.example.com
|
||||||
|
insecure=true
|
||||||
|
|
||||||
|
This disables verification of the server certificate, so connections are
|
||||||
|
encrypted but unauthenticated. This is not secure, but may be good enough
|
||||||
|
for a development environment.
|
||||||
|
|
||||||
|
|
||||||
|
You can explicitly trust the server certificate using `cert_bundle=<filename>`
|
||||||
|
in your .zuliprc file.
|
||||||
|
|
||||||
|
[api]
|
||||||
|
site=https://zulip.example.com
|
||||||
|
cert_bundle=/home/bots/certs/zulip.example.com.crt
|
||||||
|
|
||||||
|
You can also explicitly trust a different set of Certificate Authorities from
|
||||||
|
the default bundle that is trusted by Python. For example to trust a company
|
||||||
|
internal CA.
|
||||||
|
|
||||||
|
[api]
|
||||||
|
site=https://zulip.example.com
|
||||||
|
cert_bundle=/home/bots/certs/example.com.ca-bundle
|
||||||
|
|
||||||
|
Save the server certificate (or the CA certificate) in its own file,
|
||||||
|
converting to PEM format first if necessary.
|
||||||
|
Verify that the certificate you have saved is the same as the one on the
|
||||||
|
server.
|
||||||
|
|
||||||
|
The `cert_bundle` option trusts the server / CA certificate only for
|
||||||
|
interaction with the zulip site, and is relatively secure.
|
||||||
|
|
||||||
|
Note that a certificate bundle is merely one or more certificates combined
|
||||||
|
into a single file.
|
||||||
128
api/bin/zulip-send
Executable file
128
api/bin/zulip-send
Executable file
@@ -0,0 +1,128 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
# zulip-send -- Sends a message to the specified recipients.
|
||||||
|
|
||||||
|
# Copyright © 2012 Zulip, Inc.
|
||||||
|
#
|
||||||
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
# of this software and associated documentation files (the "Software"), to deal
|
||||||
|
# in the Software without restriction, including without limitation the rights
|
||||||
|
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
# copies of the Software, and to permit persons to whom the Software is
|
||||||
|
# furnished to do so, subject to the following conditions:
|
||||||
|
#
|
||||||
|
# The above copyright notice and this permission notice shall be included in
|
||||||
|
# all copies or substantial portions of the Software.
|
||||||
|
#
|
||||||
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
# THE SOFTWARE.
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import optparse
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
|
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
|
||||||
|
|
||||||
|
import zulip
|
||||||
|
|
||||||
|
logging.basicConfig()
|
||||||
|
|
||||||
|
log = logging.getLogger('zulip-send')
|
||||||
|
|
||||||
|
def do_send_message(client, message_data):
|
||||||
|
# type: (zulip.Client, Dict[str, Any]) -> bool
|
||||||
|
'''Sends a message and optionally prints status about the same.'''
|
||||||
|
|
||||||
|
if message_data['type'] == 'stream':
|
||||||
|
log.info('Sending message to stream "%s", subject "%s"... ' %
|
||||||
|
(message_data['to'], message_data['subject']))
|
||||||
|
else:
|
||||||
|
log.info('Sending message to %s... ' % message_data['to'])
|
||||||
|
response = client.send_message(message_data)
|
||||||
|
if response['result'] == 'success':
|
||||||
|
log.info('Message sent.')
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
log.error(response['msg'])
|
||||||
|
return False
|
||||||
|
|
||||||
|
def main(argv=None):
|
||||||
|
# type: (Optional[List[str]]) -> int
|
||||||
|
if argv is None:
|
||||||
|
argv = sys.argv
|
||||||
|
|
||||||
|
usage = """%prog [options] [recipient...]
|
||||||
|
|
||||||
|
Sends a message specified recipients.
|
||||||
|
|
||||||
|
Examples: %prog --stream denmark --subject castle -m "Something is rotten in the state of Denmark."
|
||||||
|
%prog hamlet@example.com cordelia@example.com -m "Conscience doth make cowards of us all."
|
||||||
|
|
||||||
|
These examples assume you have a proper '~/.zuliprc'. You may also set your credentials with the
|
||||||
|
'--user' and '--api-key' arguments.
|
||||||
|
"""
|
||||||
|
|
||||||
|
parser = optparse.OptionParser(usage=usage)
|
||||||
|
|
||||||
|
# Grab parser options from the API common set
|
||||||
|
parser.add_option_group(zulip.generate_option_group(parser))
|
||||||
|
|
||||||
|
parser.add_option('-m', '--message',
|
||||||
|
help='Specifies the message to send, prevents interactive prompting.')
|
||||||
|
|
||||||
|
group = optparse.OptionGroup(parser, 'Stream parameters')
|
||||||
|
group.add_option('-s', '--stream',
|
||||||
|
dest='stream',
|
||||||
|
action='store',
|
||||||
|
help='Allows the user to specify a stream for the message.')
|
||||||
|
group.add_option('-S', '--subject',
|
||||||
|
dest='subject',
|
||||||
|
action='store',
|
||||||
|
help='Allows the user to specify a subject for the message.')
|
||||||
|
parser.add_option_group(group)
|
||||||
|
|
||||||
|
(options, recipients) = parser.parse_args(argv[1:])
|
||||||
|
|
||||||
|
if options.verbose:
|
||||||
|
logging.getLogger().setLevel(logging.INFO)
|
||||||
|
# Sanity check user data
|
||||||
|
if len(recipients) != 0 and (options.stream or options.subject):
|
||||||
|
parser.error('You cannot specify both a username and a stream/subject.')
|
||||||
|
if len(recipients) == 0 and (bool(options.stream) != bool(options.subject)):
|
||||||
|
parser.error('Stream messages must have a subject')
|
||||||
|
if len(recipients) == 0 and not (options.stream and options.subject):
|
||||||
|
parser.error('You must specify a stream/subject or at least one recipient.')
|
||||||
|
|
||||||
|
client = zulip.init_from_options(options)
|
||||||
|
|
||||||
|
if not options.message:
|
||||||
|
options.message = sys.stdin.read()
|
||||||
|
|
||||||
|
if options.stream:
|
||||||
|
message_data = {
|
||||||
|
'type': 'stream',
|
||||||
|
'content': options.message,
|
||||||
|
'subject': options.subject,
|
||||||
|
'to': options.stream,
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
message_data = {
|
||||||
|
'type': 'private',
|
||||||
|
'content': options.message,
|
||||||
|
'to': recipients,
|
||||||
|
}
|
||||||
|
|
||||||
|
if not do_send_message(client, message_data):
|
||||||
|
return 1
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.exit(main())
|
||||||
55
api/examples/create-user
Executable file
55
api/examples/create-user
Executable file
@@ -0,0 +1,55 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Copyright © 2012-2014 Zulip, Inc.
|
||||||
|
#
|
||||||
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
# of this software and associated documentation files (the "Software"), to deal
|
||||||
|
# in the Software without restriction, including without limitation the rights
|
||||||
|
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
# copies of the Software, and to permit persons to whom the Software is
|
||||||
|
# furnished to do so, subject to the following conditions:
|
||||||
|
#
|
||||||
|
# The above copyright notice and this permission notice shall be included in
|
||||||
|
# all copies or substantial portions of the Software.
|
||||||
|
#
|
||||||
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
# THE SOFTWARE.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
import sys
|
||||||
|
from os import path
|
||||||
|
import optparse
|
||||||
|
|
||||||
|
usage = """create-user --new-email=<email address> --new-password=<password> --new-full-name=<full name> --new-short-name=<short name> [options]
|
||||||
|
|
||||||
|
Create a user. You must be a realm admin to use this API, and the user
|
||||||
|
will be created in your realm.
|
||||||
|
|
||||||
|
Example: create-user --site=http://localhost:9991 --user=rwbarton@example.com --new-email=jarthur@example.com --new-password=random17 --new-full-name 'J. Arthur Random' --new-short-name='jarthur'
|
||||||
|
"""
|
||||||
|
|
||||||
|
sys.path.append(path.join(path.dirname(__file__), '..'))
|
||||||
|
import zulip
|
||||||
|
|
||||||
|
parser = optparse.OptionParser(usage=usage)
|
||||||
|
parser.add_option_group(zulip.generate_option_group(parser))
|
||||||
|
parser.add_option('--new-email')
|
||||||
|
parser.add_option('--new-password')
|
||||||
|
parser.add_option('--new-full-name')
|
||||||
|
parser.add_option('--new-short-name')
|
||||||
|
(options, args) = parser.parse_args()
|
||||||
|
|
||||||
|
client = zulip.init_from_options(options)
|
||||||
|
|
||||||
|
print(client.create_user({
|
||||||
|
'email': options.new_email,
|
||||||
|
'password': options.new_password,
|
||||||
|
'full_name': options.new_full_name,
|
||||||
|
'short_name': options.new_short_name
|
||||||
|
}))
|
||||||
57
api/examples/edit-message
Executable file
57
api/examples/edit-message
Executable file
@@ -0,0 +1,57 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Copyright © 2012 Zulip, Inc.
|
||||||
|
#
|
||||||
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
# of this software and associated documentation files (the "Software"), to deal
|
||||||
|
# in the Software without restriction, including without limitation the rights
|
||||||
|
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
# copies of the Software, and to permit persons to whom the Software is
|
||||||
|
# furnished to do so, subject to the following conditions:
|
||||||
|
#
|
||||||
|
# The above copyright notice and this permission notice shall be included in
|
||||||
|
# all copies or substantial portions of the Software.
|
||||||
|
#
|
||||||
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
# THE SOFTWARE.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import optparse
|
||||||
|
|
||||||
|
usage = """edit-message [options] --message=<msg_id> --subject=<new subject> --content=<new content> --user=<sender's email address> --api-key=<sender's api key>
|
||||||
|
|
||||||
|
Edits a message that you sent
|
||||||
|
|
||||||
|
Example: edit-message --message-id="348135" --subject="my subject" --content="test message" --user=othello-bot@example.com --api-key=a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5
|
||||||
|
|
||||||
|
You can omit --user and --api-key arguments if you have a properly set up ~/.zuliprc
|
||||||
|
"""
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
||||||
|
import zulip
|
||||||
|
|
||||||
|
parser = optparse.OptionParser(usage=usage)
|
||||||
|
parser.add_option('--message-id', default="")
|
||||||
|
parser.add_option('--subject', default="")
|
||||||
|
parser.add_option('--content', default="")
|
||||||
|
parser.add_option_group(zulip.generate_option_group(parser))
|
||||||
|
(options, args) = parser.parse_args()
|
||||||
|
|
||||||
|
client = zulip.init_from_options(options)
|
||||||
|
|
||||||
|
message_data = {
|
||||||
|
"message_id": options.message_id,
|
||||||
|
}
|
||||||
|
if options.subject != "":
|
||||||
|
message_data["subject"] = options.subject
|
||||||
|
if options.content != "":
|
||||||
|
message_data["content"] = options.content
|
||||||
|
print(client.update_message(message_data))
|
||||||
47
api/examples/get-public-streams
Executable file
47
api/examples/get-public-streams
Executable file
@@ -0,0 +1,47 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Copyright © 2012 Zulip, Inc.
|
||||||
|
#
|
||||||
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
# of this software and associated documentation files (the "Software"), to deal
|
||||||
|
# in the Software without restriction, including without limitation the rights
|
||||||
|
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
# copies of the Software, and to permit persons to whom the Software is
|
||||||
|
# furnished to do so, subject to the following conditions:
|
||||||
|
#
|
||||||
|
# The above copyright notice and this permission notice shall be included in
|
||||||
|
# all copies or substantial portions of the Software.
|
||||||
|
#
|
||||||
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
# THE SOFTWARE.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import optparse
|
||||||
|
|
||||||
|
usage = """get-public-streams --user=<bot's email address> --api-key=<bot's api key> [options]
|
||||||
|
|
||||||
|
Prints out all the public streams in the realm.
|
||||||
|
|
||||||
|
Example: get-public-streams --user=othello-bot@example.com --api-key=a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5
|
||||||
|
|
||||||
|
You can omit --user and --api-key arguments if you have a properly set up ~/.zuliprc
|
||||||
|
"""
|
||||||
|
|
||||||
|
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
|
||||||
|
import zulip
|
||||||
|
|
||||||
|
parser = optparse.OptionParser(usage=usage)
|
||||||
|
parser.add_option_group(zulip.generate_option_group(parser))
|
||||||
|
(options, args) = parser.parse_args()
|
||||||
|
|
||||||
|
client = zulip.init_from_options(options)
|
||||||
|
|
||||||
|
print(client.get_streams(include_public=True, include_subscribed=False))
|
||||||
46
api/examples/list-members
Executable file
46
api/examples/list-members
Executable file
@@ -0,0 +1,46 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Copyright © 2014 Zulip, Inc.
|
||||||
|
#
|
||||||
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
# of this software and associated documentation files (the "Software"), to deal
|
||||||
|
# in the Software without restriction, including without limitation the rights
|
||||||
|
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
# copies of the Software, and to permit persons to whom the Software is
|
||||||
|
# furnished to do so, subject to the following conditions:
|
||||||
|
#
|
||||||
|
# The above copyright notice and this permission notice shall be included in
|
||||||
|
# all copies or substantial portions of the Software.
|
||||||
|
#
|
||||||
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
# THE SOFTWARE.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import optparse
|
||||||
|
|
||||||
|
usage = """list-members --user=<bot's email address> --api-key=<bot's api key> [options]
|
||||||
|
|
||||||
|
List the names and e-mail addresses of the people in your realm.
|
||||||
|
|
||||||
|
You can omit --user and --api-key arguments if you have a properly set up ~/.zuliprc
|
||||||
|
"""
|
||||||
|
|
||||||
|
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
|
||||||
|
import zulip
|
||||||
|
|
||||||
|
parser = optparse.OptionParser(usage=usage)
|
||||||
|
parser.add_option_group(zulip.generate_option_group(parser))
|
||||||
|
(options, args) = parser.parse_args()
|
||||||
|
|
||||||
|
client = zulip.init_from_options(options)
|
||||||
|
|
||||||
|
for user in client.get_members()["members"]:
|
||||||
|
print(user["full_name"], user["email"])
|
||||||
46
api/examples/list-subscriptions
Executable file
46
api/examples/list-subscriptions
Executable file
@@ -0,0 +1,46 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Copyright © 2012 Zulip, Inc.
|
||||||
|
#
|
||||||
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
# of this software and associated documentation files (the "Software"), to deal
|
||||||
|
# in the Software without restriction, including without limitation the rights
|
||||||
|
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
# copies of the Software, and to permit persons to whom the Software is
|
||||||
|
# furnished to do so, subject to the following conditions:
|
||||||
|
#
|
||||||
|
# The above copyright notice and this permission notice shall be included in
|
||||||
|
# all copies or substantial portions of the Software.
|
||||||
|
#
|
||||||
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
# THE SOFTWARE.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import optparse
|
||||||
|
|
||||||
|
usage = """list-subscriptions --user=<bot's email address> --api-key=<bot's api key> [options]
|
||||||
|
|
||||||
|
Prints out a list of the user's subscriptions.
|
||||||
|
|
||||||
|
Example: list-subscriptions --user=username@example.com --api-key=a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5
|
||||||
|
|
||||||
|
You can omit --user and --api-key arguments if you have a properly set up ~/.zuliprc
|
||||||
|
"""
|
||||||
|
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
|
||||||
|
import zulip
|
||||||
|
|
||||||
|
parser = optparse.OptionParser(usage=usage)
|
||||||
|
parser.add_option_group(zulip.generate_option_group(parser))
|
||||||
|
(options, args) = parser.parse_args()
|
||||||
|
|
||||||
|
client = zulip.init_from_options(options)
|
||||||
|
|
||||||
|
print(client.list_subscriptions())
|
||||||
55
api/examples/print-events
Executable file
55
api/examples/print-events
Executable file
@@ -0,0 +1,55 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Copyright © 2012 Zulip, Inc.
|
||||||
|
#
|
||||||
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
# of this software and associated documentation files (the "Software"), to deal
|
||||||
|
# in the Software without restriction, including without limitation the rights
|
||||||
|
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
# copies of the Software, and to permit persons to whom the Software is
|
||||||
|
# furnished to do so, subject to the following conditions:
|
||||||
|
#
|
||||||
|
# The above copyright notice and this permission notice shall be included in
|
||||||
|
# all copies or substantial portions of the Software.
|
||||||
|
#
|
||||||
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
# THE SOFTWARE.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import optparse
|
||||||
|
|
||||||
|
from typing import Any, Dict
|
||||||
|
|
||||||
|
usage = """print-events --user=<bot's email address> --api-key=<bot's api key> [options]
|
||||||
|
|
||||||
|
Prints out certain events received by the indicated bot or user matching the filter below.
|
||||||
|
|
||||||
|
Example: print-events --user=username@example.com --api-key=a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5
|
||||||
|
|
||||||
|
You can omit --user and --api-key arguments if you have a properly set up ~/.zuliprc
|
||||||
|
"""
|
||||||
|
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
|
||||||
|
import zulip
|
||||||
|
|
||||||
|
parser = optparse.OptionParser(usage=usage)
|
||||||
|
parser.add_option_group(zulip.generate_option_group(parser))
|
||||||
|
(options, args) = parser.parse_args()
|
||||||
|
|
||||||
|
client = zulip.init_from_options(options)
|
||||||
|
|
||||||
|
def print_event(event):
|
||||||
|
# type: (Dict[str, Any]) -> None
|
||||||
|
print(event)
|
||||||
|
|
||||||
|
# This is a blocking call, and will continuously poll for new events
|
||||||
|
# Note also the filter here is messages to the stream Denmark; if you
|
||||||
|
# don't specify event_types it'll print all events.
|
||||||
|
client.call_on_each_event(print_event, event_types=["message"], narrow=[["stream", "Denmark"]])
|
||||||
53
api/examples/print-messages
Executable file
53
api/examples/print-messages
Executable file
@@ -0,0 +1,53 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Copyright © 2012 Zulip, Inc.
|
||||||
|
#
|
||||||
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
# of this software and associated documentation files (the "Software"), to deal
|
||||||
|
# in the Software without restriction, including without limitation the rights
|
||||||
|
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
# copies of the Software, and to permit persons to whom the Software is
|
||||||
|
# furnished to do so, subject to the following conditions:
|
||||||
|
#
|
||||||
|
# The above copyright notice and this permission notice shall be included in
|
||||||
|
# all copies or substantial portions of the Software.
|
||||||
|
#
|
||||||
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
# THE SOFTWARE.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import optparse
|
||||||
|
|
||||||
|
from typing import Any, Dict
|
||||||
|
|
||||||
|
usage = """print-messages --user=<bot's email address> --api-key=<bot's api key> [options]
|
||||||
|
|
||||||
|
Prints out each message received by the indicated bot or user.
|
||||||
|
|
||||||
|
Example: print-messages --user=username@example.com --api-key=a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5
|
||||||
|
|
||||||
|
You can omit --user and --api-key arguments if you have a properly set up ~/.zuliprc
|
||||||
|
"""
|
||||||
|
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
|
||||||
|
import zulip
|
||||||
|
|
||||||
|
parser = optparse.OptionParser(usage=usage)
|
||||||
|
parser.add_option_group(zulip.generate_option_group(parser))
|
||||||
|
(options, args) = parser.parse_args()
|
||||||
|
|
||||||
|
client = zulip.init_from_options(options)
|
||||||
|
|
||||||
|
def print_message(message):
|
||||||
|
# type: (Dict[str, Any]) -> None
|
||||||
|
print(message)
|
||||||
|
|
||||||
|
# This is a blocking call, and will continuously poll for new messages
|
||||||
|
client.call_on_each_message(print_message)
|
||||||
66
api/examples/recent-messages
Executable file
66
api/examples/recent-messages
Executable file
@@ -0,0 +1,66 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Copyright © 2012 Zulip, Inc.
|
||||||
|
#
|
||||||
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
# of this software and associated documentation files (the "Software"), to deal
|
||||||
|
# in the Software without restriction, including without limitation the rights
|
||||||
|
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
# copies of the Software, and to permit persons to whom the Software is
|
||||||
|
# furnished to do so, subject to the following conditions:
|
||||||
|
#
|
||||||
|
# The above copyright notice and this permission notice shall be included in
|
||||||
|
# all copies or substantial portions of the Software.
|
||||||
|
#
|
||||||
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
# THE SOFTWARE.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
import optparse
|
||||||
|
|
||||||
|
usage = """recent-messages [options] --count=<no. of previous messages> --user=<sender's email address> --api-key=<sender's api key>
|
||||||
|
|
||||||
|
Prints out last count messages received by the indicated bot or user
|
||||||
|
|
||||||
|
Example: recent-messages --count=101 --user=username@example.com --api-key=a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5
|
||||||
|
|
||||||
|
You can omit --user and --api-key arguments if you have a properly set up ~/.zuliprc
|
||||||
|
"""
|
||||||
|
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
||||||
|
import zulip
|
||||||
|
|
||||||
|
parser = optparse.OptionParser(usage=usage)
|
||||||
|
parser.add_option('--count', default=100)
|
||||||
|
parser.add_option_group(zulip.generate_option_group(parser))
|
||||||
|
(options, args) = parser.parse_args()
|
||||||
|
|
||||||
|
client = zulip.init_from_options(options)
|
||||||
|
|
||||||
|
request = {
|
||||||
|
'narrow': [["stream", "Denmark"]],
|
||||||
|
'num_before': options.count,
|
||||||
|
'num_after': 0,
|
||||||
|
'anchor': 1000000000,
|
||||||
|
'apply_markdown': False
|
||||||
|
}
|
||||||
|
|
||||||
|
old_messages = client.call_endpoint(
|
||||||
|
url='messages',
|
||||||
|
method='GET',
|
||||||
|
request=request,
|
||||||
|
)
|
||||||
|
|
||||||
|
if 'messages' in old_messages:
|
||||||
|
for message in old_messages['messages']:
|
||||||
|
print(json.dumps(message, indent=4))
|
||||||
|
else:
|
||||||
|
print([])
|
||||||
58
api/examples/send-message
Executable file
58
api/examples/send-message
Executable file
@@ -0,0 +1,58 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Copyright © 2012 Zulip, Inc.
|
||||||
|
#
|
||||||
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
# of this software and associated documentation files (the "Software"), to deal
|
||||||
|
# in the Software without restriction, including without limitation the rights
|
||||||
|
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
# copies of the Software, and to permit persons to whom the Software is
|
||||||
|
# furnished to do so, subject to the following conditions:
|
||||||
|
#
|
||||||
|
# The above copyright notice and this permission notice shall be included in
|
||||||
|
# all copies or substantial portions of the Software.
|
||||||
|
#
|
||||||
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
# THE SOFTWARE.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import optparse
|
||||||
|
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
|
||||||
|
import zulip
|
||||||
|
|
||||||
|
usage = """send-message --user=<bot's email address> --api-key=<bot's api key> [options] <recipients>
|
||||||
|
|
||||||
|
Sends a test message to the specified recipients.
|
||||||
|
|
||||||
|
Example: send-message --user=your-bot@example.com --api-key=a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5 --type=stream commits --subject="my subject" --message="test message"
|
||||||
|
Example: send-message --user=your-bot@example.com --api-key=a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5 user1@example.com user2@example.com
|
||||||
|
|
||||||
|
You can omit --user and --api-key arguments if you have a properly set up ~/.zuliprc
|
||||||
|
"""
|
||||||
|
parser = optparse.OptionParser(usage=usage)
|
||||||
|
parser.add_option('--subject', default="test")
|
||||||
|
parser.add_option('--message', default="test message")
|
||||||
|
parser.add_option('--type', default='private')
|
||||||
|
parser.add_option_group(zulip.generate_option_group(parser))
|
||||||
|
(options, args) = parser.parse_args()
|
||||||
|
|
||||||
|
if len(args) == 0:
|
||||||
|
parser.error("You must specify recipients")
|
||||||
|
|
||||||
|
client = zulip.init_from_options(options)
|
||||||
|
|
||||||
|
message_data = {
|
||||||
|
"type": options.type,
|
||||||
|
"content": options.message,
|
||||||
|
"subject": options.subject,
|
||||||
|
"to": args,
|
||||||
|
}
|
||||||
|
print(client.send_message(message_data))
|
||||||
53
api/examples/subscribe
Executable file
53
api/examples/subscribe
Executable file
@@ -0,0 +1,53 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Copyright © 2012 Zulip, Inc.
|
||||||
|
#
|
||||||
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
# of this software and associated documentation files (the "Software"), to deal
|
||||||
|
# in the Software without restriction, including without limitation the rights
|
||||||
|
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
# copies of the Software, and to permit persons to whom the Software is
|
||||||
|
# furnished to do so, subject to the following conditions:
|
||||||
|
#
|
||||||
|
# The above copyright notice and this permission notice shall be included in
|
||||||
|
# all copies or substantial portions of the Software.
|
||||||
|
#
|
||||||
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
# THE SOFTWARE.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import optparse
|
||||||
|
|
||||||
|
usage = """subscribe --user=<bot's email address> --api-key=<bot's api key> [options] --streams=<streams>
|
||||||
|
|
||||||
|
Ensures the user is subscribed to the listed streams.
|
||||||
|
|
||||||
|
Examples: subscribe --user=username@example.com --api-key=a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5 --streams=foo
|
||||||
|
subscribe --user=username@example.com --api-key=a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5 --streams='foo bar'
|
||||||
|
|
||||||
|
You can omit --user and --api-key arguments if you have a properly set up ~/.zuliprc
|
||||||
|
"""
|
||||||
|
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
|
||||||
|
import zulip
|
||||||
|
|
||||||
|
parser = optparse.OptionParser(usage=usage)
|
||||||
|
parser.add_option_group(zulip.generate_option_group(parser))
|
||||||
|
parser.add_option('--streams', default='')
|
||||||
|
(options, args) = parser.parse_args()
|
||||||
|
|
||||||
|
client = zulip.init_from_options(options)
|
||||||
|
|
||||||
|
if options.streams == "":
|
||||||
|
print("Usage:", parser.usage, file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
print(client.add_subscriptions([{"name": stream_name} for stream_name in
|
||||||
|
options.streams.split()]))
|
||||||
52
api/examples/unsubscribe
Executable file
52
api/examples/unsubscribe
Executable file
@@ -0,0 +1,52 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Copyright © 2012 Zulip, Inc.
|
||||||
|
#
|
||||||
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
# of this software and associated documentation files (the "Software"), to deal
|
||||||
|
# in the Software without restriction, including without limitation the rights
|
||||||
|
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
# copies of the Software, and to permit persons to whom the Software is
|
||||||
|
# furnished to do so, subject to the following conditions:
|
||||||
|
#
|
||||||
|
# The above copyright notice and this permission notice shall be included in
|
||||||
|
# all copies or substantial portions of the Software.
|
||||||
|
#
|
||||||
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
# THE SOFTWARE.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import optparse
|
||||||
|
|
||||||
|
usage = """unsubscribe --user=<bot's email address> --api-key=<bot's api key> [options] --streams=<streams>
|
||||||
|
|
||||||
|
Ensures the user is not subscribed to the listed streams.
|
||||||
|
|
||||||
|
Examples: unsubscribe --user=username@example.com --api-key=a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5 --streams=foo
|
||||||
|
unsubscribe --user=username@example.com --api-key=a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5 --streams='foo bar'
|
||||||
|
|
||||||
|
You can omit --user and --api-key arguments if you have a properly set up ~/.zuliprc
|
||||||
|
"""
|
||||||
|
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
|
||||||
|
import zulip
|
||||||
|
|
||||||
|
parser = optparse.OptionParser(usage=usage)
|
||||||
|
parser.add_option_group(zulip.generate_option_group(parser))
|
||||||
|
parser.add_option('--streams', default='')
|
||||||
|
(options, args) = parser.parse_args()
|
||||||
|
|
||||||
|
client = zulip.init_from_options(options)
|
||||||
|
|
||||||
|
if options.streams == "":
|
||||||
|
print("Usage:", parser.usage, file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
print(client.remove_subscriptions(options.streams.split()))
|
||||||
66
api/examples/upload-file
Executable file
66
api/examples/upload-file
Executable file
@@ -0,0 +1,66 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Copyright © 2012-2017 Zulip, Inc.
|
||||||
|
#
|
||||||
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
# of this software and associated documentation files (the "Software"), to deal
|
||||||
|
# in the Software without restriction, including without limitation the rights
|
||||||
|
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
# copies of the Software, and to permit persons to whom the Software is
|
||||||
|
# furnished to do so, subject to the following conditions:
|
||||||
|
#
|
||||||
|
# The above copyright notice and this permission notice shall be included in
|
||||||
|
# all copies or substantial portions of the Software.
|
||||||
|
#
|
||||||
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
# THE SOFTWARE.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
import importlib
|
||||||
|
import optparse
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from six.moves import StringIO as _StringIO
|
||||||
|
sys.path.insert(0, './api')
|
||||||
|
from typing import IO
|
||||||
|
import zulip
|
||||||
|
|
||||||
|
class StringIO(_StringIO):
|
||||||
|
name = '' # https://github.com/python/typeshed/issues/598
|
||||||
|
|
||||||
|
usage = """upload-file --user=<user's email address> --api-key=<user's api key> [options]
|
||||||
|
|
||||||
|
Upload a file, and print the corresponding URI.
|
||||||
|
|
||||||
|
Example: upload-file --user=cordelia@zulip.com --api-key=a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5 --file-path=cat.png
|
||||||
|
|
||||||
|
You can omit --user and --api-key arguments if you have a properly set up ~/.zuliprc
|
||||||
|
If no --file-path is specified, a placeholder text file will be used instead.
|
||||||
|
"""
|
||||||
|
|
||||||
|
parser = optparse.OptionParser(usage=usage)
|
||||||
|
parser.add_option('--file-path')
|
||||||
|
parser.add_option_group(zulip.generate_option_group(parser))
|
||||||
|
(options, args) = parser.parse_args()
|
||||||
|
|
||||||
|
client = zulip.init_from_options(options)
|
||||||
|
|
||||||
|
file = None # type: IO
|
||||||
|
if options.file_path:
|
||||||
|
file = open(options.file_path, 'rb')
|
||||||
|
else:
|
||||||
|
file = StringIO('This is a test file.')
|
||||||
|
file.name = 'test.txt'
|
||||||
|
|
||||||
|
response = client.upload_file(file)
|
||||||
|
|
||||||
|
try:
|
||||||
|
print('File URI: {}'.format(response['uri']))
|
||||||
|
except KeyError:
|
||||||
|
print('Error! API response was: {}'.format(response))
|
||||||
4
api/examples/zuliprc
Normal file
4
api/examples/zuliprc
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
; Save this file as ~/.zuliprc
|
||||||
|
[api]
|
||||||
|
key=<your bot's api key from the web interface>
|
||||||
|
email=<your bot's email address>
|
||||||
56
api/integrations/asana/zulip_asana_config.py
Normal file
56
api/integrations/asana/zulip_asana_config.py
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright © 2014 Zulip, Inc.
|
||||||
|
#
|
||||||
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
# of this software and associated documentation files (the "Software"), to deal
|
||||||
|
# in the Software without restriction, including without limitation the rights
|
||||||
|
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
# copies of the Software, and to permit persons to whom the Software is
|
||||||
|
# furnished to do so, subject to the following conditions:
|
||||||
|
#
|
||||||
|
# The above copyright notice and this permission notice shall be included in
|
||||||
|
# all copies or substantial portions of the Software.
|
||||||
|
#
|
||||||
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
# THE SOFTWARE.
|
||||||
|
|
||||||
|
|
||||||
|
### REQUIRED CONFIGURATION ###
|
||||||
|
|
||||||
|
# Change these values to your Asana credentials.
|
||||||
|
ASANA_API_KEY = "0123456789abcdef0123456789abcdef"
|
||||||
|
|
||||||
|
# Change these values to the credentials for your Asana bot.
|
||||||
|
ZULIP_USER = "asana-bot@example.com"
|
||||||
|
ZULIP_API_KEY = "0123456789abcdef0123456789abcdef"
|
||||||
|
|
||||||
|
# The Zulip stream that will receive Asana task updates.
|
||||||
|
ZULIP_STREAM_NAME = "asana"
|
||||||
|
|
||||||
|
|
||||||
|
### OPTIONAL CONFIGURATION ###
|
||||||
|
|
||||||
|
# Set to None for logging to stdout when testing, and to a file for
|
||||||
|
# logging in production.
|
||||||
|
#LOG_FILE = "/var/tmp/zulip_asana.log"
|
||||||
|
LOG_FILE = None
|
||||||
|
|
||||||
|
# This file is used to resume this mirror in case the script shuts down.
|
||||||
|
# It is required and needs to be writeable.
|
||||||
|
RESUME_FILE = "/var/tmp/zulip_asana.state"
|
||||||
|
|
||||||
|
# When initially started, how many hours of messages to include.
|
||||||
|
ASANA_INITIAL_HISTORY_HOURS = 1
|
||||||
|
|
||||||
|
# Set this to your Zulip API server URI
|
||||||
|
ZULIP_SITE = "https://zulip.example.com"
|
||||||
|
|
||||||
|
# If properly installed, the Zulip API should be in your import
|
||||||
|
# path, but if not, set a custom path below
|
||||||
|
ZULIP_API_PATH = None
|
||||||
306
api/integrations/asana/zulip_asana_mirror
Executable file
306
api/integrations/asana/zulip_asana_mirror
Executable file
@@ -0,0 +1,306 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
# Asana integration for Zulip
|
||||||
|
#
|
||||||
|
# Copyright © 2014 Zulip, Inc.
|
||||||
|
#
|
||||||
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
# of this software and associated documentation files (the "Software"), to deal
|
||||||
|
# in the Software without restriction, including without limitation the rights
|
||||||
|
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
# copies of the Software, and to permit persons to whom the Software is
|
||||||
|
# furnished to do so, subject to the following conditions:
|
||||||
|
#
|
||||||
|
# The above copyright notice and this permission notice shall be included in
|
||||||
|
# all copies or substantial portions of the Software.
|
||||||
|
#
|
||||||
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
# THE SOFTWARE.
|
||||||
|
#
|
||||||
|
# The "zulip_asana_mirror" script is run continuously, possibly on a work computer
|
||||||
|
# or preferably on a server.
|
||||||
|
#
|
||||||
|
# When restarted, it will attempt to pick up where it left off.
|
||||||
|
#
|
||||||
|
# python-dateutil is a dependency for this script.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
import base64
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from typing import List, Dict, Optional, Any, Tuple
|
||||||
|
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import time
|
||||||
|
from six.moves import urllib
|
||||||
|
from six.moves.urllib import request as urllib_request
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
import dateutil.parser
|
||||||
|
from dateutil.tz import gettz
|
||||||
|
except ImportError as e:
|
||||||
|
print(e, file=sys.stderr)
|
||||||
|
print("Please install the python-dateutil package.", file=sys.stderr)
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(__file__))
|
||||||
|
import zulip_asana_config as config
|
||||||
|
VERSION = "0.9"
|
||||||
|
|
||||||
|
if config.ZULIP_API_PATH is not None:
|
||||||
|
sys.path.append(config.ZULIP_API_PATH)
|
||||||
|
import zulip
|
||||||
|
|
||||||
|
if config.LOG_FILE:
|
||||||
|
logging.basicConfig(filename=config.LOG_FILE, level=logging.WARNING)
|
||||||
|
else:
|
||||||
|
logging.basicConfig(level=logging.INFO)
|
||||||
|
|
||||||
|
client = zulip.Client(email=config.ZULIP_USER, api_key=config.ZULIP_API_KEY,
|
||||||
|
site=config.ZULIP_SITE, client="ZulipAsana/" + VERSION)
|
||||||
|
|
||||||
|
def fetch_from_asana(path):
|
||||||
|
# type: (str) -> Optional[Dict[str, Any]]
|
||||||
|
"""
|
||||||
|
Request a resource through the Asana API, authenticating using
|
||||||
|
HTTP basic auth.
|
||||||
|
"""
|
||||||
|
auth = base64.encodestring(b'%s:' % (config.ASANA_API_KEY,))
|
||||||
|
headers = {"Authorization": "Basic %s" % auth}
|
||||||
|
|
||||||
|
url = "https://app.asana.com/api/1.0" + path
|
||||||
|
request = urllib_request.Request(url, None, headers) # type: ignore
|
||||||
|
result = urllib_request.urlopen(request) # type: ignore
|
||||||
|
|
||||||
|
return json.load(result)
|
||||||
|
|
||||||
|
def send_zulip(topic, content):
|
||||||
|
# type: (str, str) -> Dict[str, str]
|
||||||
|
"""
|
||||||
|
Send a message to Zulip using the configured stream and bot credentials.
|
||||||
|
"""
|
||||||
|
message = {"type": "stream",
|
||||||
|
"sender": config.ZULIP_USER,
|
||||||
|
"to": config.ZULIP_STREAM_NAME,
|
||||||
|
"subject": topic,
|
||||||
|
"content": content,
|
||||||
|
}
|
||||||
|
return client.send_message(message)
|
||||||
|
|
||||||
|
def datestring_to_datetime(datestring):
|
||||||
|
# type: (str) -> datetime
|
||||||
|
"""
|
||||||
|
Given an ISO 8601 datestring, return the corresponding datetime object.
|
||||||
|
"""
|
||||||
|
return dateutil.parser.parse(datestring).replace(
|
||||||
|
tzinfo=gettz('Z'))
|
||||||
|
|
||||||
|
class TaskDict(dict):
|
||||||
|
"""
|
||||||
|
A helper class to turn a dictionary with task information into an
|
||||||
|
object where each of the keys is an attribute for easy access.
|
||||||
|
"""
|
||||||
|
def __getattr__(self, field):
|
||||||
|
# type: (TaskDict, str) -> Any
|
||||||
|
return self.get(field)
|
||||||
|
|
||||||
|
def format_topic(task, projects):
|
||||||
|
# type: (TaskDict, Dict[str, str]) -> str
|
||||||
|
"""
|
||||||
|
Return a string that will be the Zulip message topic for this task.
|
||||||
|
"""
|
||||||
|
# Tasks can be associated with multiple projects, but in practice they seem
|
||||||
|
# to mostly be associated with one.
|
||||||
|
project_name = projects[task.projects[0]["id"]]
|
||||||
|
return "%s: %s" % (project_name, task.name)
|
||||||
|
|
||||||
|
def format_assignee(task, users):
|
||||||
|
# type: (TaskDict, Dict[str, str]) -> str
|
||||||
|
"""
|
||||||
|
Return a string describing the task's assignee.
|
||||||
|
"""
|
||||||
|
if task.assignee:
|
||||||
|
assignee_name = users[task.assignee["id"]]
|
||||||
|
assignee_info = "**Assigned to**: %s (%s)" % (
|
||||||
|
assignee_name, task.assignee_status)
|
||||||
|
else:
|
||||||
|
assignee_info = "**Status**: Unassigned"
|
||||||
|
|
||||||
|
return assignee_info
|
||||||
|
|
||||||
|
def format_due_date(task):
|
||||||
|
# type: (TaskDict) -> str
|
||||||
|
"""
|
||||||
|
Return a string describing the task's due date.
|
||||||
|
"""
|
||||||
|
if task.due_on:
|
||||||
|
due_date_info = "**Due on**: %s" % (task.due_on,)
|
||||||
|
else:
|
||||||
|
due_date_info = "**Due date**: None"
|
||||||
|
return due_date_info
|
||||||
|
|
||||||
|
def format_task_creation_event(task, projects, users):
|
||||||
|
# type: (TaskDict, Dict[str, str], Dict[str, str]) -> Tuple[str, str]
|
||||||
|
"""
|
||||||
|
Format the topic and content for a newly-created task.
|
||||||
|
"""
|
||||||
|
topic = format_topic(task, projects)
|
||||||
|
assignee_info = format_assignee(task, users)
|
||||||
|
due_date_info = format_due_date(task)
|
||||||
|
|
||||||
|
content = """Task **%s** created:
|
||||||
|
|
||||||
|
~~~ quote
|
||||||
|
%s
|
||||||
|
~~~
|
||||||
|
|
||||||
|
%s
|
||||||
|
%s
|
||||||
|
""" % (task.name, task.notes, assignee_info, due_date_info)
|
||||||
|
return topic, content
|
||||||
|
|
||||||
|
def format_task_completion_event(task, projects, users):
|
||||||
|
# type: (TaskDict, Dict[str, str], Dict[str, str]) -> Tuple[str, str]
|
||||||
|
"""
|
||||||
|
Format the topic and content for a completed task.
|
||||||
|
"""
|
||||||
|
topic = format_topic(task, projects)
|
||||||
|
assignee_info = format_assignee(task, users)
|
||||||
|
due_date_info = format_due_date(task)
|
||||||
|
|
||||||
|
content = """Task **%s** completed. :white_check_mark:
|
||||||
|
|
||||||
|
%s
|
||||||
|
%s
|
||||||
|
""" % (task.name, assignee_info, due_date_info)
|
||||||
|
return topic, content
|
||||||
|
|
||||||
|
def since():
|
||||||
|
# type: () -> datetime
|
||||||
|
"""
|
||||||
|
Return a newness threshold for task events to be processed.
|
||||||
|
"""
|
||||||
|
# If we have a record of the last event processed and it is recent, use it,
|
||||||
|
# else process everything from ASANA_INITIAL_HISTORY_HOURS ago.
|
||||||
|
def default_since():
|
||||||
|
# type: () -> datetime
|
||||||
|
return datetime.utcnow() - timedelta(
|
||||||
|
hours=config.ASANA_INITIAL_HISTORY_HOURS)
|
||||||
|
|
||||||
|
if os.path.exists(config.RESUME_FILE):
|
||||||
|
try:
|
||||||
|
with open(config.RESUME_FILE, "r") as f:
|
||||||
|
datestring = f.readline().strip()
|
||||||
|
timestamp = float(datestring)
|
||||||
|
max_timestamp_processed = datetime.fromtimestamp(timestamp)
|
||||||
|
logging.info("Reading from resume file: " + datestring)
|
||||||
|
except (ValueError, IOError) as e:
|
||||||
|
logging.warn("Could not open resume file: " + str(e))
|
||||||
|
max_timestamp_processed = default_since()
|
||||||
|
else:
|
||||||
|
logging.info("No resume file, processing an initial history.")
|
||||||
|
max_timestamp_processed = default_since()
|
||||||
|
|
||||||
|
# Even if we can read a timestamp from RESUME_FILE, if it is old don't use
|
||||||
|
# it.
|
||||||
|
return max(max_timestamp_processed, default_since())
|
||||||
|
|
||||||
|
def process_new_events():
|
||||||
|
# type: () -> None
|
||||||
|
"""
|
||||||
|
Forward new Asana task events to Zulip.
|
||||||
|
"""
|
||||||
|
# In task queries, Asana only exposes IDs for projects and users, so we need
|
||||||
|
# to look up the mappings.
|
||||||
|
projects = dict((elt["id"], elt["name"]) for elt in
|
||||||
|
fetch_from_asana("/projects")["data"])
|
||||||
|
users = dict((elt["id"], elt["name"]) for elt in
|
||||||
|
fetch_from_asana("/users")["data"])
|
||||||
|
|
||||||
|
cutoff = since()
|
||||||
|
max_timestamp_processed = cutoff
|
||||||
|
time_operations = (("created_at", format_task_creation_event),
|
||||||
|
("completed_at", format_task_completion_event))
|
||||||
|
task_fields = ["assignee", "assignee_status", "created_at", "completed_at",
|
||||||
|
"modified_at", "due_on", "name", "notes", "projects"]
|
||||||
|
|
||||||
|
# First, gather all of the tasks that need processing. We'll
|
||||||
|
# process them in order.
|
||||||
|
new_events = []
|
||||||
|
|
||||||
|
for project_id in projects:
|
||||||
|
project_url = "/projects/%d/tasks?opt_fields=%s" % (
|
||||||
|
project_id, ",".join(task_fields))
|
||||||
|
tasks = fetch_from_asana(project_url)["data"]
|
||||||
|
|
||||||
|
for task in tasks:
|
||||||
|
task = TaskDict(task)
|
||||||
|
|
||||||
|
for time_field, operation in time_operations:
|
||||||
|
if task[time_field]:
|
||||||
|
operation_time = datestring_to_datetime(task[time_field])
|
||||||
|
if operation_time > cutoff:
|
||||||
|
new_events.append((operation_time, time_field, operation, task))
|
||||||
|
|
||||||
|
new_events.sort()
|
||||||
|
now = datetime.utcnow()
|
||||||
|
|
||||||
|
for operation_time, time_field, operation, task in new_events:
|
||||||
|
# Unfortunately, creating an Asana task is not an atomic operation. If
|
||||||
|
# the task was just created, or is missing basic information, it is
|
||||||
|
# probably because the task is still being filled out -- wait until the
|
||||||
|
# next round to process it.
|
||||||
|
if (time_field == "created_at") and \
|
||||||
|
(now - operation_time < timedelta(seconds=30)):
|
||||||
|
# The task was just created, give the user some time to fill out
|
||||||
|
# more information.
|
||||||
|
return
|
||||||
|
|
||||||
|
if (time_field == "created_at") and (not task.name) and \
|
||||||
|
(now - operation_time < timedelta(seconds=60)):
|
||||||
|
# If this new task hasn't had a name for a full 30 seconds, assume
|
||||||
|
# you don't plan on giving it one.
|
||||||
|
return
|
||||||
|
|
||||||
|
topic, content = operation(task, projects, users)
|
||||||
|
logging.info("Sending Zulip for " + topic)
|
||||||
|
result = send_zulip(topic, content)
|
||||||
|
|
||||||
|
# If the Zulip wasn't sent successfully, don't update the
|
||||||
|
# max timestamp processed so the task has another change to
|
||||||
|
# be forwarded. Exit, giving temporary issues time to
|
||||||
|
# resolve.
|
||||||
|
if not result.get("result"):
|
||||||
|
logging.warn("Malformed result, exiting:")
|
||||||
|
logging.warn(str(result))
|
||||||
|
return
|
||||||
|
|
||||||
|
if result["result"] != "success":
|
||||||
|
logging.warn(result["msg"])
|
||||||
|
return
|
||||||
|
|
||||||
|
if operation_time > max_timestamp_processed:
|
||||||
|
max_timestamp_processed = operation_time
|
||||||
|
|
||||||
|
if max_timestamp_processed > cutoff:
|
||||||
|
max_datestring = max_timestamp_processed.strftime("%s.%f")
|
||||||
|
logging.info("Updating resume file: " + max_datestring)
|
||||||
|
open(config.RESUME_FILE, 'w').write(max_datestring)
|
||||||
|
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
process_new_events()
|
||||||
|
time.sleep(5)
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
logging.info("Shutting down...")
|
||||||
|
logging.info("Set LOG_FILE to log to a file instead of stdout.")
|
||||||
|
break
|
||||||
51
api/integrations/basecamp/zulip_basecamp_config.py
Normal file
51
api/integrations/basecamp/zulip_basecamp_config.py
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright © 2014 Zulip, Inc.
|
||||||
|
#
|
||||||
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
# of this software and associated documentation files (the "Software"), to deal
|
||||||
|
# in the Software without restriction, including without limitation the rights
|
||||||
|
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
# copies of the Software, and to permit persons to whom the Software is
|
||||||
|
# furnished to do so, subject to the following conditions:
|
||||||
|
#
|
||||||
|
# The above copyright notice and this permission notice shall be included in
|
||||||
|
# all copies or substantial portions of the Software.
|
||||||
|
#
|
||||||
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
# THE SOFTWARE.
|
||||||
|
|
||||||
|
|
||||||
|
# Change these values to configure authentication for basecamp account
|
||||||
|
BASECAMP_ACCOUNT_ID = "12345678"
|
||||||
|
BASECAMP_USERNAME = "foo@example.com"
|
||||||
|
BASECAMP_PASSWORD = "p455w0rd"
|
||||||
|
|
||||||
|
# This script will mirror this many hours of history on the first run.
|
||||||
|
# On subsequent runs this value is ignored.
|
||||||
|
BASECAMP_INITIAL_HISTORY_HOURS = 0
|
||||||
|
|
||||||
|
# Change these values to configure Zulip authentication for the plugin
|
||||||
|
ZULIP_USER = "basecamp-bot@example.com"
|
||||||
|
ZULIP_API_KEY = "0123456789abcdef0123456789abcdef"
|
||||||
|
ZULIP_STREAM_NAME = "basecamp"
|
||||||
|
|
||||||
|
## If properly installed, the Zulip API should be in your import
|
||||||
|
## path, but if not, set a custom path below
|
||||||
|
ZULIP_API_PATH = None
|
||||||
|
|
||||||
|
# Set this to your Zulip API server URI
|
||||||
|
ZULIP_SITE = "https://zulip.example.com"
|
||||||
|
|
||||||
|
# If you wish to log to a file rather than stdout/stderr,
|
||||||
|
# please fill this out your desired path
|
||||||
|
LOG_FILE = None
|
||||||
|
|
||||||
|
# This file is used to resume this mirror in case the script shuts down.
|
||||||
|
# It is required and needs to be writeable.
|
||||||
|
RESUME_FILE = "/var/tmp/zulip_basecamp.state"
|
||||||
186
api/integrations/basecamp/zulip_basecamp_mirror
Executable file
186
api/integrations/basecamp/zulip_basecamp_mirror
Executable file
@@ -0,0 +1,186 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
# Zulip mirror of Basecamp activity
|
||||||
|
# Copyright © 2014 Zulip, Inc.
|
||||||
|
#
|
||||||
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
# of this software and associated documentation files (the "Software"), to deal
|
||||||
|
# in the Software without restriction, including without limitation the rights
|
||||||
|
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
# copies of the Software, and to permit persons to whom the Software is
|
||||||
|
# furnished to do so, subject to the following conditions:
|
||||||
|
#
|
||||||
|
# The above copyright notice and this permission notice shall be included in
|
||||||
|
# all copies or substantial portions of the Software.
|
||||||
|
#
|
||||||
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
# THE SOFTWARE.
|
||||||
|
#
|
||||||
|
# The "basecamp-mirror.py" script is run continuously, possibly on a work computer
|
||||||
|
# or preferably on a server.
|
||||||
|
# You may need to install the python-requests library.
|
||||||
|
|
||||||
|
from __future__ import absolute_import
|
||||||
|
import requests
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from stderror import write
|
||||||
|
import os
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(__file__))
|
||||||
|
import zulip_basecamp_config as config
|
||||||
|
VERSION = "0.9"
|
||||||
|
|
||||||
|
if config.ZULIP_API_PATH is not None:
|
||||||
|
sys.path.append(config.ZULIP_API_PATH)
|
||||||
|
import zulip
|
||||||
|
from six.moves.html_parser import HTMLParser
|
||||||
|
from typing import Any, Dict
|
||||||
|
import six
|
||||||
|
|
||||||
|
client = zulip.Client(
|
||||||
|
email=config.ZULIP_USER,
|
||||||
|
site=config.ZULIP_SITE,
|
||||||
|
api_key=config.ZULIP_API_KEY,
|
||||||
|
client="ZulipBasecamp/" + VERSION)
|
||||||
|
user_agent = "Basecamp To Zulip Mirroring script (zulip-devel@googlegroups.com)"
|
||||||
|
htmlParser = HTMLParser()
|
||||||
|
|
||||||
|
# find some form of JSON loader/dumper, with a preference order for speed.
|
||||||
|
json_implementations = ['ujson', 'cjson', 'simplejson', 'json']
|
||||||
|
|
||||||
|
while len(json_implementations):
|
||||||
|
try:
|
||||||
|
json = __import__(json_implementations.pop(0))
|
||||||
|
break
|
||||||
|
except ImportError:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# void function that checks the permissions of the files this script needs.
|
||||||
|
def check_permissions():
|
||||||
|
# type: () -> None
|
||||||
|
# check that the log file can be written
|
||||||
|
if config.LOG_FILE:
|
||||||
|
try:
|
||||||
|
open(config.LOG_FILE, "w")
|
||||||
|
except IOError as e:
|
||||||
|
sys.stderr.write("Could not open up log for writing:")
|
||||||
|
sys.stderr.write(str(e))
|
||||||
|
# check that the resume file can be written (this creates if it doesn't exist)
|
||||||
|
try:
|
||||||
|
open(config.RESUME_FILE, "a+")
|
||||||
|
except IOError as e:
|
||||||
|
sys.stderr.write("Could not open up the file %s for reading and writing" % (config.RESUME_FILE),)
|
||||||
|
sys.stderr.write(str(e))
|
||||||
|
|
||||||
|
# builds the message dict for sending a message with the Zulip API
|
||||||
|
def build_message(event):
|
||||||
|
# type: (Dict[str, Any]) -> Dict[str, Any]
|
||||||
|
if not ('bucket' in event and 'creator' in event and 'html_url' in event):
|
||||||
|
logging.error("Perhaps the Basecamp API changed behavior? "
|
||||||
|
"This event doesn't have the expected format:\n%s" % (event,))
|
||||||
|
return None
|
||||||
|
# adjust the topic length to be bounded to 60 characters
|
||||||
|
topic = event['bucket']['name']
|
||||||
|
if len(topic) > 60:
|
||||||
|
topic = topic[0:57] + "..."
|
||||||
|
# get the action and target values
|
||||||
|
action = htmlParser.unescape(re.sub(r"<[^<>]+>", "", event.get('action', '')))
|
||||||
|
target = htmlParser.unescape(event.get('target', ''))
|
||||||
|
# Some events have "excerpts", which we blockquote
|
||||||
|
excerpt = htmlParser.unescape(event.get('excerpt', ''))
|
||||||
|
if excerpt.strip() == "":
|
||||||
|
message = '**%s** %s [%s](%s).' % (event['creator']['name'], action, target, event['html_url'])
|
||||||
|
else:
|
||||||
|
message = '**%s** %s [%s](%s).\n> %s' % (event['creator']['name'], action, target, event['html_url'], excerpt)
|
||||||
|
# assemble the message data dict
|
||||||
|
message_data = {
|
||||||
|
"type": "stream",
|
||||||
|
"to": config.ZULIP_STREAM_NAME,
|
||||||
|
"subject": topic,
|
||||||
|
"content": message,
|
||||||
|
}
|
||||||
|
return message_data
|
||||||
|
|
||||||
|
# the main run loop for this mirror script
|
||||||
|
def run_mirror():
|
||||||
|
# type: () -> None
|
||||||
|
# we should have the right (write) permissions on the resume file, as seen
|
||||||
|
# in check_permissions, but it may still be empty or corrupted
|
||||||
|
try:
|
||||||
|
with open(config.RESUME_FILE) as f:
|
||||||
|
since = f.read() # type: Any
|
||||||
|
since = re.search(r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d{3}-\d{2}:\d{2}", since)
|
||||||
|
assert since, "resume file does not meet expected format"
|
||||||
|
since = since.string
|
||||||
|
except (AssertionError, IOError) as e:
|
||||||
|
logging.warn("Could not open resume file: %s" % (e,))
|
||||||
|
since = (datetime.utcnow() - timedelta(hours=config.BASECAMP_INITIAL_HISTORY_HOURS)).isoformat() + "-00:00"
|
||||||
|
try:
|
||||||
|
# we use an exponential backoff approach when we get 429 (Too Many Requests).
|
||||||
|
sleepInterval = 1
|
||||||
|
while True:
|
||||||
|
time.sleep(sleepInterval)
|
||||||
|
response = requests.get("https://basecamp.com/%s/api/v1/events.json" % (config.BASECAMP_ACCOUNT_ID),
|
||||||
|
params={'since': since},
|
||||||
|
auth=(config.BASECAMP_USERNAME, config.BASECAMP_PASSWORD),
|
||||||
|
headers = {"User-Agent": user_agent})
|
||||||
|
if response.status_code == 200:
|
||||||
|
sleepInterval = 1
|
||||||
|
events = json.loads(response.text)
|
||||||
|
if len(events):
|
||||||
|
logging.info("Got event(s): %s" % (response.text,))
|
||||||
|
if response.status_code >= 500:
|
||||||
|
logging.error(str(response.status_code))
|
||||||
|
continue
|
||||||
|
if response.status_code == 429:
|
||||||
|
# exponential backoff
|
||||||
|
sleepInterval *= 2
|
||||||
|
logging.error(str(response.status_code))
|
||||||
|
continue
|
||||||
|
if response.status_code == 400:
|
||||||
|
logging.error("Something went wrong. Basecamp must be unhappy for this reason: %s" % (response.text,))
|
||||||
|
sys.exit(-1)
|
||||||
|
if response.status_code == 401:
|
||||||
|
logging.error("Bad authorization from Basecamp. Please check your Basecamp login credentials")
|
||||||
|
sys.exit(-1)
|
||||||
|
if len(events):
|
||||||
|
since = events[0]['created_at']
|
||||||
|
for event in reversed(events):
|
||||||
|
message_data = build_message(event)
|
||||||
|
if not message_data:
|
||||||
|
continue
|
||||||
|
zulip_api_result = client.send_message(message_data)
|
||||||
|
if zulip_api_result['result'] == "success":
|
||||||
|
logging.info("sent zulip with id: %s" % (zulip_api_result['id'],))
|
||||||
|
else:
|
||||||
|
logging.warn("%s %s" % (zulip_api_result['result'], zulip_api_result['msg']))
|
||||||
|
# update 'since' each time in case we get KeyboardInterrupted
|
||||||
|
since = event['created_at']
|
||||||
|
# avoid hitting rate-limit
|
||||||
|
time.sleep(0.2)
|
||||||
|
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
logging.info("Shutting down, please hold")
|
||||||
|
open("events.last", 'w').write(since)
|
||||||
|
logging.info("Done!")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
if not isinstance(config.RESUME_FILE, six.string_types):
|
||||||
|
sys.stderr.write("RESUME_FILE path not given; refusing to continue")
|
||||||
|
check_permissions()
|
||||||
|
if config.LOG_FILE:
|
||||||
|
logging.basicConfig(filename=config.LOG_FILE, level=logging.INFO)
|
||||||
|
else:
|
||||||
|
logging.basicConfig(level=logging.INFO)
|
||||||
|
run_mirror()
|
||||||
60
api/integrations/codebase/zulip_codebase_config.py
Normal file
60
api/integrations/codebase/zulip_codebase_config.py
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright © 2014 Zulip, Inc.
|
||||||
|
#
|
||||||
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
# of this software and associated documentation files (the "Software"), to deal
|
||||||
|
# in the Software without restriction, including without limitation the rights
|
||||||
|
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
# copies of the Software, and to permit persons to whom the Software is
|
||||||
|
# furnished to do so, subject to the following conditions:
|
||||||
|
#
|
||||||
|
# The above copyright notice and this permission notice shall be included in
|
||||||
|
# all copies or substantial portions of the Software.
|
||||||
|
#
|
||||||
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
# THE SOFTWARE.
|
||||||
|
|
||||||
|
|
||||||
|
# Change these values to configure authentication for your codebase account
|
||||||
|
# Note that this is the Codebase API Username, found in the Settings page
|
||||||
|
# for your account
|
||||||
|
CODEBASE_API_USERNAME = "foo@example.com"
|
||||||
|
CODEBASE_API_KEY = "1234561234567abcdef"
|
||||||
|
|
||||||
|
# The URL of your codebase setup
|
||||||
|
CODEBASE_ROOT_URL = "https://YOUR_COMPANY.codebasehq.com"
|
||||||
|
|
||||||
|
# When initially started, how many hours of messages to include.
|
||||||
|
# Note that the Codebase API only returns the 20 latest events,
|
||||||
|
# if you have more than 20 events that fit within this window,
|
||||||
|
# earlier ones may be lost
|
||||||
|
CODEBASE_INITIAL_HISTORY_HOURS = 12
|
||||||
|
|
||||||
|
# Change these values to configure Zulip authentication for the plugin
|
||||||
|
ZULIP_USER = "codebase-bot@example.com"
|
||||||
|
ZULIP_API_KEY = "0123456789abcdef0123456789abcdef"
|
||||||
|
|
||||||
|
# The streams to send commit information and ticket information to
|
||||||
|
ZULIP_COMMITS_STREAM_NAME = "codebase"
|
||||||
|
ZULIP_TICKETS_STREAM_NAME = "tickets"
|
||||||
|
|
||||||
|
# If properly installed, the Zulip API should be in your import
|
||||||
|
# path, but if not, set a custom path below
|
||||||
|
ZULIP_API_PATH = None
|
||||||
|
|
||||||
|
# Set this to your Zulip API server URI
|
||||||
|
ZULIP_SITE = "https://zulip.example.com"
|
||||||
|
|
||||||
|
# If you wish to log to a file rather than stdout/stderr,
|
||||||
|
# please fill this out your desired path
|
||||||
|
LOG_FILE = None
|
||||||
|
|
||||||
|
# This file is used to resume this mirror in case the script shuts down.
|
||||||
|
# It is required and needs to be writeable.
|
||||||
|
RESUME_FILE = "/var/tmp/zulip_codebase.state"
|
||||||
332
api/integrations/codebase/zulip_codebase_mirror
Executable file
332
api/integrations/codebase/zulip_codebase_mirror
Executable file
@@ -0,0 +1,332 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
# Zulip mirror of Codebase HQ activity
|
||||||
|
# Copyright © 2014 Zulip, Inc.
|
||||||
|
#
|
||||||
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
# of this software and associated documentation files (the "Software"), to deal
|
||||||
|
# in the Software without restriction, including without limitation the rights
|
||||||
|
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
# copies of the Software, and to permit persons to whom the Software is
|
||||||
|
# furnished to do so, subject to the following conditions:
|
||||||
|
#
|
||||||
|
# The above copyright notice and this permission notice shall be included in
|
||||||
|
# all copies or substantial portions of the Software.
|
||||||
|
#
|
||||||
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
# THE SOFTWARE.
|
||||||
|
#
|
||||||
|
# The "zulip_codebase_mirror" script is run continuously, possibly on a work
|
||||||
|
# computer or preferably on a server.
|
||||||
|
#
|
||||||
|
# When restarted, it will attempt to pick up where it left off.
|
||||||
|
#
|
||||||
|
# python-dateutil is a dependency for this script.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
from __future__ import absolute_import
|
||||||
|
import requests
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
|
try:
|
||||||
|
import dateutil.parser
|
||||||
|
except ImportError as e:
|
||||||
|
print(e, file=sys.stderr)
|
||||||
|
print("Please install the python-dateutil package.", file=sys.stderr)
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(__file__))
|
||||||
|
import zulip_codebase_config as config
|
||||||
|
VERSION = "0.9"
|
||||||
|
|
||||||
|
if config.ZULIP_API_PATH is not None:
|
||||||
|
sys.path.append(config.ZULIP_API_PATH)
|
||||||
|
import six
|
||||||
|
import zulip
|
||||||
|
from typing import Any, List, Dict, Optional
|
||||||
|
|
||||||
|
client = zulip.Client(
|
||||||
|
email=config.ZULIP_USER,
|
||||||
|
site=config.ZULIP_SITE,
|
||||||
|
api_key=config.ZULIP_API_KEY,
|
||||||
|
client="ZulipCodebase/" + VERSION)
|
||||||
|
user_agent = "Codebase To Zulip Mirroring script (zulip-devel@googlegroups.com)"
|
||||||
|
|
||||||
|
# find some form of JSON loader/dumper, with a preference order for speed.
|
||||||
|
json_implementations = ['ujson', 'cjson', 'simplejson', 'json']
|
||||||
|
|
||||||
|
while len(json_implementations):
|
||||||
|
try:
|
||||||
|
json = __import__(json_implementations.pop(0))
|
||||||
|
break
|
||||||
|
except ImportError:
|
||||||
|
continue
|
||||||
|
|
||||||
|
def make_api_call(path):
|
||||||
|
# type: (str) -> Optional[List[Dict[str, Any]]]
|
||||||
|
response = requests.get("https://api3.codebasehq.com/%s" % (path,),
|
||||||
|
auth=(config.CODEBASE_API_USERNAME, config.CODEBASE_API_KEY),
|
||||||
|
params={'raw': True},
|
||||||
|
headers = {"User-Agent": user_agent,
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
"Accept": "application/json"})
|
||||||
|
if response.status_code == 200:
|
||||||
|
return json.loads(response.text)
|
||||||
|
|
||||||
|
if response.status_code >= 500:
|
||||||
|
logging.error(str(response.status_code))
|
||||||
|
return None
|
||||||
|
if response.status_code == 403:
|
||||||
|
logging.error("Bad authorization from Codebase. Please check your credentials")
|
||||||
|
sys.exit(-1)
|
||||||
|
else:
|
||||||
|
logging.warn("Found non-success response status code: %s %s" % (response.status_code, response.text))
|
||||||
|
return None
|
||||||
|
|
||||||
|
def make_url(path):
|
||||||
|
# type: (str) -> str
|
||||||
|
return "%s/%s" % (config.CODEBASE_ROOT_URL, path)
|
||||||
|
|
||||||
|
def handle_event(event):
|
||||||
|
# type: (Dict[str, Any]) -> None
|
||||||
|
event = event['event']
|
||||||
|
event_type = event['type']
|
||||||
|
actor_name = event['actor_name']
|
||||||
|
|
||||||
|
raw_props = event.get('raw_properties', {})
|
||||||
|
|
||||||
|
project_link = raw_props.get('project_permalink')
|
||||||
|
|
||||||
|
subject = None
|
||||||
|
content = None
|
||||||
|
if event_type == 'repository_creation':
|
||||||
|
stream = config.ZULIP_COMMITS_STREAM_NAME
|
||||||
|
|
||||||
|
project_name = raw_props.get('name')
|
||||||
|
project_repo_type = raw_props.get('scm_type')
|
||||||
|
|
||||||
|
url = make_url("projects/%s" % (project_link,))
|
||||||
|
scm = "of type %s" % (project_repo_type,) if project_repo_type else ""
|
||||||
|
|
||||||
|
subject = "Repository %s Created" % (project_name,)
|
||||||
|
content = "%s created a new repository %s [%s](%s)" % (actor_name, scm, project_name, url)
|
||||||
|
elif event_type == 'push':
|
||||||
|
stream = config.ZULIP_COMMITS_STREAM_NAME
|
||||||
|
|
||||||
|
num_commits = raw_props.get('commits_count')
|
||||||
|
branch = raw_props.get('ref_name')
|
||||||
|
project = raw_props.get('project_name')
|
||||||
|
repo_link = raw_props.get('repository_permalink')
|
||||||
|
deleted_ref = raw_props.get('deleted_ref')
|
||||||
|
new_ref = raw_props.get('new_ref')
|
||||||
|
|
||||||
|
subject = "Push to %s on %s" % (branch, project)
|
||||||
|
|
||||||
|
if deleted_ref:
|
||||||
|
content = "%s deleted branch %s from %s" % (actor_name, branch, project)
|
||||||
|
else:
|
||||||
|
if new_ref:
|
||||||
|
branch = "new branch %s" % (branch,)
|
||||||
|
content = ("%s pushed %s commit(s) to %s in project %s:\n\n" %
|
||||||
|
(actor_name, num_commits, branch, project))
|
||||||
|
for commit in raw_props.get('commits'):
|
||||||
|
ref = commit.get('ref')
|
||||||
|
url = make_url("projects/%s/repositories/%s/commit/%s" % (project_link, repo_link, ref))
|
||||||
|
message = commit.get('message')
|
||||||
|
content += "* [%s](%s): %s\n" % (ref, url, message)
|
||||||
|
elif event_type == 'ticketing_ticket':
|
||||||
|
stream = config.ZULIP_TICKETS_STREAM_NAME
|
||||||
|
|
||||||
|
num = raw_props.get('number')
|
||||||
|
name = raw_props.get('subject')
|
||||||
|
assignee = raw_props.get('assignee')
|
||||||
|
priority = raw_props.get('priority')
|
||||||
|
url = make_url("projects/%s/tickets/%s" % (project_link, num))
|
||||||
|
|
||||||
|
if assignee is None:
|
||||||
|
assignee = "no one"
|
||||||
|
subject = "#%s: %s" % (num, name)
|
||||||
|
content = ("""%s created a new ticket [#%s](%s) priority **%s** assigned to %s:\n\n~~~ quote\n %s""" %
|
||||||
|
(actor_name, num, url, priority, assignee, name))
|
||||||
|
elif event_type == 'ticketing_note':
|
||||||
|
stream = config.ZULIP_TICKETS_STREAM_NAME
|
||||||
|
|
||||||
|
num = raw_props.get('number')
|
||||||
|
name = raw_props.get('subject')
|
||||||
|
body = raw_props.get('content')
|
||||||
|
changes = raw_props.get('changes')
|
||||||
|
|
||||||
|
url = make_url("projects/%s/tickets/%s" % (project_link, num))
|
||||||
|
subject = "#%s: %s" % (num, name)
|
||||||
|
|
||||||
|
content = ""
|
||||||
|
if body is not None and len(body) > 0:
|
||||||
|
content = "%s added a comment to ticket [#%s](%s):\n\n~~~ quote\n%s\n\n" % (actor_name, num, url, body)
|
||||||
|
|
||||||
|
if 'status_id' in changes:
|
||||||
|
status_change = changes.get('status_id')
|
||||||
|
content += "Status changed from **%s** to **%s**\n\n" % (status_change[0], status_change[1])
|
||||||
|
elif event_type == 'ticketing_milestone':
|
||||||
|
stream = config.ZULIP_TICKETS_STREAM_NAME
|
||||||
|
|
||||||
|
name = raw_props.get('name')
|
||||||
|
identifier = raw_props.get('identifier')
|
||||||
|
url = make_url("projects/%s/milestone/%s" % (project_link, identifier))
|
||||||
|
|
||||||
|
subject = name
|
||||||
|
content = "%s created a new milestone [%s](%s)" % (actor_name, name, url)
|
||||||
|
elif event_type == 'comment':
|
||||||
|
stream = config.ZULIP_COMMITS_STREAM_NAME
|
||||||
|
|
||||||
|
comment = raw_props.get('content')
|
||||||
|
commit = raw_props.get('commit_ref')
|
||||||
|
|
||||||
|
# If there's a commit id, it's a comment to a commit
|
||||||
|
if commit:
|
||||||
|
repo_link = raw_props.get('repository_permalink')
|
||||||
|
|
||||||
|
url = make_url('projects/%s/repositories/%s/commit/%s' % (project_link, repo_link, commit))
|
||||||
|
|
||||||
|
subject = "%s commented on %s" % (actor_name, commit)
|
||||||
|
content = "%s commented on [%s](%s):\n\n~~~ quote\n%s" % (actor_name, commit, url, comment)
|
||||||
|
else:
|
||||||
|
# Otherwise, this is a Discussion item, and handle it
|
||||||
|
subj = raw_props.get("subject")
|
||||||
|
category = raw_props.get("category")
|
||||||
|
comment_content = raw_props.get("content")
|
||||||
|
|
||||||
|
subject = "Discussion: %s" % (subj,)
|
||||||
|
|
||||||
|
if category:
|
||||||
|
format_str = "%s started a new discussion in %s:\n\n~~~ quote\n%s\n~~~"
|
||||||
|
content = format_str % (actor_name, category, comment_content)
|
||||||
|
else:
|
||||||
|
content = "%s posted:\n\n~~~ quote\n%s\n~~~" % (actor_name, comment_content)
|
||||||
|
|
||||||
|
elif event_type == 'deployment':
|
||||||
|
stream = config.ZULIP_COMMITS_STREAM_NAME
|
||||||
|
|
||||||
|
start_ref = raw_props.get('start_ref')
|
||||||
|
end_ref = raw_props.get('end_ref')
|
||||||
|
environment = raw_props.get('environment')
|
||||||
|
servers = raw_props.get('servers')
|
||||||
|
repo_link = raw_props.get('repository_permalink')
|
||||||
|
|
||||||
|
start_ref_url = make_url("projects/%s/repositories/%s/commit/%s" % (project_link, repo_link, start_ref))
|
||||||
|
end_ref_url = make_url("projects/%s/repositories/%s/commit/%s" % (project_link, repo_link, end_ref))
|
||||||
|
between_url = make_url("projects/%s/repositories/%s/compare/%s...%s" % (
|
||||||
|
project_link, repo_link, start_ref, end_ref))
|
||||||
|
|
||||||
|
subject = "Deployment to %s" % (environment,)
|
||||||
|
|
||||||
|
content = ("%s deployed [%s](%s) [through](%s) [%s](%s) to the **%s** environment." %
|
||||||
|
(actor_name, start_ref, start_ref_url, between_url, end_ref, end_ref_url, environment))
|
||||||
|
if servers is not None:
|
||||||
|
content += "\n\nServers deployed to: %s" % (", ".join(["`%s`" % (server,) for server in servers]))
|
||||||
|
|
||||||
|
elif event_type == 'named_tree':
|
||||||
|
# Docs say named_tree type used for new/deleting branches and tags,
|
||||||
|
# but experimental testing showed that they were all sent as 'push' events
|
||||||
|
pass
|
||||||
|
elif event_type == 'wiki_page':
|
||||||
|
logging.warn("Wiki page notifications not yet implemented")
|
||||||
|
elif event_type == 'sprint_creation':
|
||||||
|
logging.warn("Sprint notifications not yet implemented")
|
||||||
|
elif event_type == 'sprint_ended':
|
||||||
|
logging.warn("Sprint notifications not yet implemented")
|
||||||
|
else:
|
||||||
|
logging.info("Unknown event type %s, ignoring!" % (event_type,))
|
||||||
|
|
||||||
|
if subject and content:
|
||||||
|
if len(subject) > 60:
|
||||||
|
subject = subject[:57].rstrip() + '...'
|
||||||
|
|
||||||
|
res = client.send_message({"type": "stream",
|
||||||
|
"to": stream,
|
||||||
|
"subject": subject,
|
||||||
|
"content": content})
|
||||||
|
if res['result'] == 'success':
|
||||||
|
logging.info("Successfully sent Zulip with id: %s" % (res['id']))
|
||||||
|
else:
|
||||||
|
logging.warn("Failed to send Zulip: %s %s" % (res['result'], res['msg']))
|
||||||
|
|
||||||
|
|
||||||
|
# the main run loop for this mirror script
|
||||||
|
def run_mirror():
|
||||||
|
# type: () -> None
|
||||||
|
# we should have the right (write) permissions on the resume file, as seen
|
||||||
|
# in check_permissions, but it may still be empty or corrupted
|
||||||
|
def default_since():
|
||||||
|
# type: () -> datetime
|
||||||
|
return datetime.utcnow() - timedelta(hours=config.CODEBASE_INITIAL_HISTORY_HOURS)
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(config.RESUME_FILE) as f:
|
||||||
|
timestamp = f.read()
|
||||||
|
if timestamp == '':
|
||||||
|
since = default_since()
|
||||||
|
else:
|
||||||
|
since = datetime.fromtimestamp(float(timestamp))
|
||||||
|
except (ValueError, IOError) as e:
|
||||||
|
logging.warn("Could not open resume file: %s" % (str(e)))
|
||||||
|
since = default_since()
|
||||||
|
|
||||||
|
try:
|
||||||
|
sleepInterval = 1
|
||||||
|
while True:
|
||||||
|
events = make_api_call("activity")[::-1]
|
||||||
|
if events is not None:
|
||||||
|
sleepInterval = 1
|
||||||
|
for event in events:
|
||||||
|
timestamp = event.get('event', {}).get('timestamp', '')
|
||||||
|
event_date = dateutil.parser.parse(timestamp).replace(tzinfo=None)
|
||||||
|
if event_date > since:
|
||||||
|
handle_event(event)
|
||||||
|
since = event_date
|
||||||
|
else:
|
||||||
|
# back off a bit
|
||||||
|
if sleepInterval < 22:
|
||||||
|
sleepInterval += 4
|
||||||
|
time.sleep(sleepInterval)
|
||||||
|
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
open(config.RESUME_FILE, 'w').write(since.strftime("%s"))
|
||||||
|
logging.info("Shutting down Codebase mirror")
|
||||||
|
|
||||||
|
# void function that checks the permissions of the files this script needs.
|
||||||
|
def check_permissions():
|
||||||
|
# type: () -> None
|
||||||
|
# check that the log file can be written
|
||||||
|
if config.LOG_FILE:
|
||||||
|
try:
|
||||||
|
open(config.LOG_FILE, "w")
|
||||||
|
except IOError as e:
|
||||||
|
sys.stderr.write("Could not open up log for writing:")
|
||||||
|
sys.stderr.write(str(e))
|
||||||
|
# check that the resume file can be written (this creates if it doesn't exist)
|
||||||
|
try:
|
||||||
|
open(config.RESUME_FILE, "a+")
|
||||||
|
except IOError as e:
|
||||||
|
sys.stderr.write("Could not open up the file %s for reading and writing" % (config.RESUME_FILE,))
|
||||||
|
sys.stderr.write(str(e))
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
if not isinstance(config.RESUME_FILE, six.string_types):
|
||||||
|
sys.stderr.write("RESUME_FILE path not given; refusing to continue")
|
||||||
|
check_permissions()
|
||||||
|
if config.LOG_FILE:
|
||||||
|
logging.basicConfig(filename=config.LOG_FILE, level=logging.WARNING)
|
||||||
|
else:
|
||||||
|
logging.basicConfig(level=logging.WARNING)
|
||||||
|
run_mirror()
|
||||||
120
api/integrations/git/post-receive
Executable file
120
api/integrations/git/post-receive
Executable file
@@ -0,0 +1,120 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
# Zulip notification post-receive hook.
|
||||||
|
# Copyright © 2012-2014 Zulip, Inc.
|
||||||
|
#
|
||||||
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
# of this software and associated documentation files (the "Software"), to deal
|
||||||
|
# in the Software without restriction, including without limitation the rights
|
||||||
|
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
# copies of the Software, and to permit persons to whom the Software is
|
||||||
|
# furnished to do so, subject to the following conditions:
|
||||||
|
#
|
||||||
|
# The above copyright notice and this permission notice shall be included in
|
||||||
|
# all copies or substantial portions of the Software.
|
||||||
|
#
|
||||||
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
# THE SOFTWARE.
|
||||||
|
#
|
||||||
|
# The "post-receive" script is run after receive-pack has accepted a pack
|
||||||
|
# and the repository has been updated. It is passed arguments in through
|
||||||
|
# stdin in the form
|
||||||
|
# <oldrev> <newrev> <refname>
|
||||||
|
# For example:
|
||||||
|
# aa453216d1b3e49e7f6f98441fa56946ddcd6a20 68f7abf4e6f922807889f52bc043ecd31b79f814 refs/heads/master
|
||||||
|
|
||||||
|
from __future__ import absolute_import
|
||||||
|
from typing import Text
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import subprocess
|
||||||
|
import os.path
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.dirname(__file__))
|
||||||
|
import zulip_git_config as config
|
||||||
|
VERSION = "0.9"
|
||||||
|
|
||||||
|
if config.ZULIP_API_PATH is not None:
|
||||||
|
sys.path.append(config.ZULIP_API_PATH)
|
||||||
|
|
||||||
|
import zulip
|
||||||
|
client = zulip.Client(
|
||||||
|
email=config.ZULIP_USER,
|
||||||
|
site=config.ZULIP_SITE,
|
||||||
|
api_key=config.ZULIP_API_KEY,
|
||||||
|
client="ZulipGit/" + VERSION)
|
||||||
|
|
||||||
|
def git_repository_name():
|
||||||
|
# type: () -> Text
|
||||||
|
output = subprocess.check_output(["git", "rev-parse", "--is-bare-repository"])
|
||||||
|
if output.strip() == "true":
|
||||||
|
return os.path.basename(os.getcwd())[:-len(".git")]
|
||||||
|
else:
|
||||||
|
return os.path.basename(os.path.dirname(os.getcwd()))
|
||||||
|
|
||||||
|
def git_commit_range(oldrev, newrev):
|
||||||
|
# type: (str, str) -> str
|
||||||
|
log_cmd = ["git", "log", "--reverse",
|
||||||
|
"--pretty=%aE %H %s", "%s..%s" % (oldrev, newrev)]
|
||||||
|
commits = ''
|
||||||
|
for ln in subprocess.check_output(log_cmd).splitlines():
|
||||||
|
author_email, commit_id, subject = ln.split(None, 2)
|
||||||
|
if hasattr(config, "format_commit_message"):
|
||||||
|
commits += config.format_commit_message(author_email, subject, commit_id)
|
||||||
|
else:
|
||||||
|
commits += '!avatar(%s) %s\n' % (author_email, subject)
|
||||||
|
return commits
|
||||||
|
|
||||||
|
def send_bot_message(oldrev, newrev, refname):
|
||||||
|
# type: (str, str, str) -> None
|
||||||
|
repo_name = git_repository_name()
|
||||||
|
branch = refname.replace('refs/heads/', '')
|
||||||
|
destination = config.commit_notice_destination(repo_name, branch, newrev)
|
||||||
|
if destination is None:
|
||||||
|
# Don't forward the notice anywhere
|
||||||
|
return
|
||||||
|
|
||||||
|
new_head = newrev[:12]
|
||||||
|
old_head = oldrev[:12]
|
||||||
|
|
||||||
|
if (oldrev == '0000000000000000000000000000000000000000' or
|
||||||
|
newrev == '0000000000000000000000000000000000000000'):
|
||||||
|
# New branch pushed or old branch removed
|
||||||
|
added = ''
|
||||||
|
removed = ''
|
||||||
|
else:
|
||||||
|
added = git_commit_range(oldrev, newrev)
|
||||||
|
removed = git_commit_range(newrev, oldrev)
|
||||||
|
|
||||||
|
if oldrev == '0000000000000000000000000000000000000000':
|
||||||
|
message = '`%s` was pushed to new branch `%s`' % (new_head, branch)
|
||||||
|
elif newrev == '0000000000000000000000000000000000000000':
|
||||||
|
message = 'branch `%s` was removed (was `%s`)' % (branch, old_head)
|
||||||
|
elif removed:
|
||||||
|
message = '`%s` was pushed to `%s`, **REMOVING**:\n\n%s' % (new_head, branch, removed)
|
||||||
|
if added:
|
||||||
|
message += '\n**and adding**:\n\n' + added
|
||||||
|
message += '\n**A HISTORY REWRITE HAS OCCURRED!**'
|
||||||
|
message += '\n@everyone: Please check your local branches to deal with this.'
|
||||||
|
elif added:
|
||||||
|
message = '`%s` was deployed to `%s` with:\n\n%s' % (new_head, branch, added)
|
||||||
|
else:
|
||||||
|
message = '`%s` was pushed to `%s`... but nothing changed?' % (new_head, branch)
|
||||||
|
|
||||||
|
message_data = {
|
||||||
|
"type": "stream",
|
||||||
|
"to": destination["stream"],
|
||||||
|
"subject": destination["subject"],
|
||||||
|
"content": message,
|
||||||
|
}
|
||||||
|
client.send_message(message_data)
|
||||||
|
|
||||||
|
for ln in sys.stdin:
|
||||||
|
oldrev, newrev, refname = ln.strip().split()
|
||||||
|
send_bot_message(oldrev, newrev, refname)
|
||||||
64
api/integrations/git/zulip_git_config.py
Normal file
64
api/integrations/git/zulip_git_config.py
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright © 2014 Zulip, Inc.
|
||||||
|
#
|
||||||
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
# of this software and associated documentation files (the "Software"), to deal
|
||||||
|
# in the Software without restriction, including without limitation the rights
|
||||||
|
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
# copies of the Software, and to permit persons to whom the Software is
|
||||||
|
# furnished to do so, subject to the following conditions:
|
||||||
|
#
|
||||||
|
# The above copyright notice and this permission notice shall be included in
|
||||||
|
# all copies or substantial portions of the Software.
|
||||||
|
#
|
||||||
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
# THE SOFTWARE.
|
||||||
|
|
||||||
|
|
||||||
|
# Change these values to configure authentication for the plugin
|
||||||
|
ZULIP_USER = "git-bot@example.com"
|
||||||
|
ZULIP_API_KEY = "0123456789abcdef0123456789abcdef"
|
||||||
|
|
||||||
|
# commit_notice_destination() lets you customize where commit notices
|
||||||
|
# are sent to with the full power of a Python function.
|
||||||
|
#
|
||||||
|
# It takes the following arguments:
|
||||||
|
# * repo = the name of the git repository
|
||||||
|
# * branch = the name of the branch that was pushed to
|
||||||
|
# * commit = the commit id
|
||||||
|
#
|
||||||
|
# Returns a dictionary encoding the stream and subject to send the
|
||||||
|
# notification to (or None to send no notification).
|
||||||
|
#
|
||||||
|
# The default code below will send every commit pushed to "master" to
|
||||||
|
# * stream "commits"
|
||||||
|
# * topic "master"
|
||||||
|
# And similarly for branch "test-post-receive" (for use when testing).
|
||||||
|
def commit_notice_destination(repo, branch, commit):
|
||||||
|
if branch in ["master", "test-post-receive"]:
|
||||||
|
return dict(stream = "commits",
|
||||||
|
subject = u"%s" % (branch,))
|
||||||
|
|
||||||
|
# Return None for cases where you don't want a notice sent
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Modify this function to change how commits are displayed; the most
|
||||||
|
# common customization is to include a link to the commit in your
|
||||||
|
# graphical repository viewer, e.g.
|
||||||
|
#
|
||||||
|
# return '!avatar(%s) [%s](https://example.com/commits/%s)\n' % (author, subject, commit_id)
|
||||||
|
def format_commit_message(author, subject, commit_id):
|
||||||
|
return '!avatar(%s) %s\n' % (author, subject)
|
||||||
|
|
||||||
|
## If properly installed, the Zulip API should be in your import
|
||||||
|
## path, but if not, set a custom path below
|
||||||
|
ZULIP_API_PATH = None
|
||||||
|
|
||||||
|
# Set this to your Zulip server's API URI
|
||||||
|
ZULIP_SITE = "https://zulip.example.com"
|
||||||
56
api/integrations/google/get-google-credentials
Normal file
56
api/integrations/google/get-google-credentials
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
from __future__ import print_function
|
||||||
|
import datetime
|
||||||
|
import httplib2
|
||||||
|
import os
|
||||||
|
|
||||||
|
from oauth2client import client
|
||||||
|
from oauth2client import tools
|
||||||
|
from oauth2client.file import Storage
|
||||||
|
|
||||||
|
try:
|
||||||
|
import argparse
|
||||||
|
flags = argparse.ArgumentParser(parents=[tools.argparser]).parse_args()
|
||||||
|
except ImportError:
|
||||||
|
flags = None
|
||||||
|
|
||||||
|
# If modifying these scopes, delete your previously saved credentials
|
||||||
|
# at zulip/bots/gcal/
|
||||||
|
# NOTE: When adding more scopes, add them after the previous one in the same field, with a space
|
||||||
|
# seperating them.
|
||||||
|
SCOPES = 'https://www.googleapis.com/auth/calendar.readonly'
|
||||||
|
# This file contains the information that google uses to figure out which application is requesting
|
||||||
|
# this client's data.
|
||||||
|
CLIENT_SECRET_FILE = 'client_secret.json'
|
||||||
|
APPLICATION_NAME = 'Zulip Calendar Bot'
|
||||||
|
HOME_DIR = os.path.expanduser('~')
|
||||||
|
|
||||||
|
def get_credentials():
|
||||||
|
# type: () -> client.Credentials
|
||||||
|
"""Gets valid user credentials from storage.
|
||||||
|
|
||||||
|
If nothing has been stored, or if the stored credentials are invalid,
|
||||||
|
the OAuth2 flow is completed to obtain the new credentials.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Credentials, the obtained credential.
|
||||||
|
"""
|
||||||
|
|
||||||
|
credential_path = os.path.join(HOME_DIR,
|
||||||
|
'google-credentials.json')
|
||||||
|
|
||||||
|
store = Storage(credential_path)
|
||||||
|
credentials = store.get()
|
||||||
|
if not credentials or credentials.invalid:
|
||||||
|
flow = client.flow_from_clientsecrets(os.path.join(HOME_DIR, CLIENT_SECRET_FILE), SCOPES)
|
||||||
|
flow.user_agent = APPLICATION_NAME
|
||||||
|
if flags:
|
||||||
|
# This attempts to open an authorization page in the default web browser, and asks the user
|
||||||
|
# to grant the bot access to their data. If the user grants permission, the run_flow()
|
||||||
|
# function returns new credentials.
|
||||||
|
credentials = tools.run_flow(flow, store, flags)
|
||||||
|
else: # Needed only for compatibility with Python 2.6
|
||||||
|
credentials = tools.run(flow, store)
|
||||||
|
print('Storing credentials to ' + credential_path)
|
||||||
|
|
||||||
|
get_credentials()
|
||||||
179
api/integrations/google/google-calendar
Executable file
179
api/integrations/google/google-calendar
Executable file
@@ -0,0 +1,179 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
from __future__ import print_function
|
||||||
|
import datetime
|
||||||
|
import httplib2
|
||||||
|
import itertools
|
||||||
|
import logging
|
||||||
|
import optparse
|
||||||
|
import os
|
||||||
|
from six.moves import urllib
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
import traceback
|
||||||
|
from typing import List, Set, Tuple, Iterable, Optional
|
||||||
|
|
||||||
|
from oauth2client import client, tools
|
||||||
|
from oauth2client.file import Storage
|
||||||
|
try:
|
||||||
|
from googleapiclient import discovery
|
||||||
|
except ImportError:
|
||||||
|
logging.exception('Install google-api-python-client')
|
||||||
|
|
||||||
|
sys.path.append(os.path.join(os.path.dirname(__file__), '../../'))
|
||||||
|
import zulip
|
||||||
|
|
||||||
|
SCOPES = 'https://www.googleapis.com/auth/calendar.readonly'
|
||||||
|
CLIENT_SECRET_FILE = 'client_secret.json'
|
||||||
|
APPLICATION_NAME = 'Zulip'
|
||||||
|
HOME_DIR = os.path.expanduser('~')
|
||||||
|
|
||||||
|
# Our cached view of the calendar, updated periodically.
|
||||||
|
events = [] # type: List[Tuple[int, datetime.datetime, str]]
|
||||||
|
|
||||||
|
# Unique keys for events we've already sent, so we don't remind twice.
|
||||||
|
sent = set() # type: Set[Tuple[int, datetime.datetime]]
|
||||||
|
|
||||||
|
sys.path.append(os.path.dirname(__file__))
|
||||||
|
|
||||||
|
parser = optparse.OptionParser(r"""
|
||||||
|
|
||||||
|
%prog \
|
||||||
|
--user foo@zulip.com \
|
||||||
|
--calendar calendarID@example.calendar.google.com
|
||||||
|
|
||||||
|
This integration can be used to send yourself reminders, on Zulip, of Google Calendar Events.
|
||||||
|
|
||||||
|
Before running this integration make sure you run the get-google-credentials file to give Zulip
|
||||||
|
access to certain aspects of your Google Account.
|
||||||
|
|
||||||
|
This integration should be run on your local machine. Your API key and other information are
|
||||||
|
revealed to local users through the command line.
|
||||||
|
|
||||||
|
Depends on: google-api-python-client
|
||||||
|
""")
|
||||||
|
|
||||||
|
|
||||||
|
parser.add_option('--interval',
|
||||||
|
dest='interval',
|
||||||
|
default=30,
|
||||||
|
type=int,
|
||||||
|
action='store',
|
||||||
|
help='Minutes before event for reminder [default: 30]',
|
||||||
|
metavar='MINUTES')
|
||||||
|
|
||||||
|
parser.add_option('--calendar',
|
||||||
|
dest = 'calendarID',
|
||||||
|
default = 'primary',
|
||||||
|
type = str,
|
||||||
|
action = 'store',
|
||||||
|
help = 'Calendar ID for the calendar you want to receive reminders from.')
|
||||||
|
|
||||||
|
parser.add_option_group(zulip.generate_option_group(parser))
|
||||||
|
|
||||||
|
(options, args) = parser.parse_args()
|
||||||
|
|
||||||
|
if not (options.zulip_email):
|
||||||
|
parser.error('You must specify --user')
|
||||||
|
|
||||||
|
zulip_client = zulip.init_from_options(options)
|
||||||
|
|
||||||
|
def get_credentials():
|
||||||
|
# type: () -> client.Credentials
|
||||||
|
"""Gets valid user credentials from storage.
|
||||||
|
|
||||||
|
If nothing has been stored, or if the stored credentials are invalid,
|
||||||
|
an exception is thrown and the user is informed to run the script in this directory to get
|
||||||
|
credentials.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Credentials, the obtained credential.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
credential_path = os.path.join(HOME_DIR,
|
||||||
|
'google-credentials.json')
|
||||||
|
|
||||||
|
store = Storage(credential_path)
|
||||||
|
credentials = store.get()
|
||||||
|
|
||||||
|
return credentials
|
||||||
|
except client.Error:
|
||||||
|
logging.exception('Error while trying to open the `google-credentials.json` file.')
|
||||||
|
except IOError:
|
||||||
|
logging.error("Run the get-google-credentials script from this directory first.")
|
||||||
|
|
||||||
|
|
||||||
|
def get_events():
|
||||||
|
# type: () -> Iterable[Tuple[int, datetime.datetime, str]]
|
||||||
|
credentials = get_credentials()
|
||||||
|
creds = credentials.authorize(httplib2.Http())
|
||||||
|
service = discovery.build('calendar', 'v3', http=creds)
|
||||||
|
|
||||||
|
now = datetime.datetime.utcnow().isoformat() + 'Z' # 'Z' indicates UTC time
|
||||||
|
feed = service.events().list(calendarId=options.calendarID, timeMin=now, maxResults=5,
|
||||||
|
singleEvents=True, orderBy='startTime').execute()
|
||||||
|
|
||||||
|
for event in feed["items"]:
|
||||||
|
try:
|
||||||
|
start = event["start"]["dateTime"]
|
||||||
|
except KeyError:
|
||||||
|
start = event["start"]["date"]
|
||||||
|
start = start[:19]
|
||||||
|
# All-day events can have only a date
|
||||||
|
fmt = '%Y-%m-%dT%H:%M:%S' if 'T' in start else '%Y-%m-%d'
|
||||||
|
start = datetime.datetime.strptime(start, fmt)
|
||||||
|
try:
|
||||||
|
yield (event["id"], start, event["summary"])
|
||||||
|
except KeyError:
|
||||||
|
yield (event["id"], start, "(No Title)")
|
||||||
|
|
||||||
|
|
||||||
|
def send_reminders():
|
||||||
|
# type: () -> Optional[None]
|
||||||
|
global sent
|
||||||
|
|
||||||
|
messages = []
|
||||||
|
keys = set()
|
||||||
|
now = datetime.datetime.now()
|
||||||
|
|
||||||
|
for id, start, summary in events:
|
||||||
|
dt = start - now
|
||||||
|
if dt.days == 0 and dt.seconds < 60 * options.interval:
|
||||||
|
# The unique key includes the start time, because of
|
||||||
|
# repeating events.
|
||||||
|
key = (id, start)
|
||||||
|
if key not in sent:
|
||||||
|
if start.hour == 0 and start.minute == 0:
|
||||||
|
line = '%s is today.' % (summary,)
|
||||||
|
else:
|
||||||
|
line = '%s starts at %s' % (summary, start.strftime('%H:%M'))
|
||||||
|
print('Sending reminder:', line)
|
||||||
|
messages.append(line)
|
||||||
|
keys.add(key)
|
||||||
|
|
||||||
|
if not messages:
|
||||||
|
return
|
||||||
|
|
||||||
|
if len(messages) == 1:
|
||||||
|
message = 'Reminder: ' + messages[0]
|
||||||
|
else:
|
||||||
|
message = 'Reminder:\n\n' + '\n'.join('* ' + m for m in messages)
|
||||||
|
|
||||||
|
zulip_client.send_message(dict(
|
||||||
|
type = 'private',
|
||||||
|
to = options.zulip_email,
|
||||||
|
sender = options.zulip_email,
|
||||||
|
content = message))
|
||||||
|
|
||||||
|
sent.update(keys)
|
||||||
|
|
||||||
|
# Loop forever
|
||||||
|
for i in itertools.count():
|
||||||
|
try:
|
||||||
|
# We check reminders every minute, but only
|
||||||
|
# download the calendar every 10 minutes.
|
||||||
|
if not i % 10:
|
||||||
|
events = list(get_events())
|
||||||
|
send_reminders()
|
||||||
|
except:
|
||||||
|
logging.exception("Couldn't download Google calendar and/or couldn't post to Zulip.")
|
||||||
|
time.sleep(60)
|
||||||
179
api/integrations/hg/zulip-changegroup.py
Executable file
179
api/integrations/hg/zulip-changegroup.py
Executable file
@@ -0,0 +1,179 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
# Zulip hook for Mercurial changeset pushes.
|
||||||
|
# Copyright © 2012-2014 Zulip, Inc.
|
||||||
|
#
|
||||||
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
# of this software and associated documentation files (the "Software"), to deal
|
||||||
|
# in the Software without restriction, including without limitation the rights
|
||||||
|
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
# copies of the Software, and to permit persons to whom the Software is
|
||||||
|
# furnished to do so, subject to the following conditions:
|
||||||
|
#
|
||||||
|
# The above copyright notice and this permission notice shall be included in
|
||||||
|
# all copies or substantial portions of the Software.
|
||||||
|
#
|
||||||
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
# THE SOFTWARE.
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# This hook is called when changesets are pushed to the master repository (ie
|
||||||
|
# `hg push`). See https://zulipchat.com/integrations for installation instructions.
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import zulip
|
||||||
|
from six.moves import range
|
||||||
|
from typing import Any, Optional, Text
|
||||||
|
from mercurial import ui, repo
|
||||||
|
|
||||||
|
VERSION = "0.9"
|
||||||
|
|
||||||
|
def format_summary_line(web_url, user, base, tip, branch, node):
|
||||||
|
# type: (str, str, int, int, str, Text) -> Text
|
||||||
|
"""
|
||||||
|
Format the first line of the message, which contains summary
|
||||||
|
information about the changeset and links to the changelog if a
|
||||||
|
web URL has been configured:
|
||||||
|
|
||||||
|
Jane Doe <jane@example.com> pushed 1 commit to master (170:e494a5be3393):
|
||||||
|
"""
|
||||||
|
revcount = tip - base
|
||||||
|
plural = "s" if revcount > 1 else ""
|
||||||
|
|
||||||
|
if web_url:
|
||||||
|
shortlog_base_url = web_url.rstrip("/") + "/shortlog/"
|
||||||
|
summary_url = "{shortlog}{tip}?revcount={revcount}".format(
|
||||||
|
shortlog=shortlog_base_url, tip=tip - 1, revcount=revcount)
|
||||||
|
formatted_commit_count = "[{revcount} commit{s}]({url})".format(
|
||||||
|
revcount=revcount, s=plural, url=summary_url)
|
||||||
|
else:
|
||||||
|
formatted_commit_count = "{revcount} commit{s}".format(
|
||||||
|
revcount=revcount, s=plural)
|
||||||
|
|
||||||
|
return u"**{user}** pushed {commits} to **{branch}** (`{tip}:{node}`):\n\n".format(
|
||||||
|
user=user, commits=formatted_commit_count, branch=branch, tip=tip,
|
||||||
|
node=node[:12])
|
||||||
|
|
||||||
|
def format_commit_lines(web_url, repo, base, tip):
|
||||||
|
# type: (str, repo, int, int) -> str
|
||||||
|
"""
|
||||||
|
Format the per-commit information for the message, including the one-line
|
||||||
|
commit summary and a link to the diff if a web URL has been configured:
|
||||||
|
"""
|
||||||
|
if web_url:
|
||||||
|
rev_base_url = web_url.rstrip("/") + "/rev/"
|
||||||
|
|
||||||
|
commit_summaries = []
|
||||||
|
for rev in range(base, tip):
|
||||||
|
rev_node = repo.changelog.node(rev)
|
||||||
|
rev_ctx = repo.changectx(rev_node)
|
||||||
|
one_liner = rev_ctx.description().split("\n")[0]
|
||||||
|
|
||||||
|
if web_url:
|
||||||
|
summary_url = rev_base_url + str(rev_ctx)
|
||||||
|
summary = "* [{summary}]({url})".format(
|
||||||
|
summary=one_liner, url=summary_url)
|
||||||
|
else:
|
||||||
|
summary = "* {summary}".format(summary=one_liner)
|
||||||
|
|
||||||
|
commit_summaries.append(summary)
|
||||||
|
|
||||||
|
return "\n".join(summary for summary in commit_summaries)
|
||||||
|
|
||||||
|
def send_zulip(email, api_key, site, stream, subject, content):
|
||||||
|
# type: (str, str, str, str, str, Text) -> str
|
||||||
|
"""
|
||||||
|
Send a message to Zulip using the provided credentials, which should be for
|
||||||
|
a bot in most cases.
|
||||||
|
"""
|
||||||
|
client = zulip.Client(email=email, api_key=api_key,
|
||||||
|
site=site,
|
||||||
|
client="ZulipMercurial/" + VERSION)
|
||||||
|
|
||||||
|
message_data = {
|
||||||
|
"type": "stream",
|
||||||
|
"to": stream,
|
||||||
|
"subject": subject,
|
||||||
|
"content": content,
|
||||||
|
}
|
||||||
|
|
||||||
|
client.send_message(message_data)
|
||||||
|
|
||||||
|
def get_config(ui, item):
|
||||||
|
# type: (ui, str) -> Optional[str]
|
||||||
|
try:
|
||||||
|
# configlist returns everything in lists.
|
||||||
|
return ui.configlist('zulip', item)[0]
|
||||||
|
except IndexError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def hook(ui, repo, **kwargs):
|
||||||
|
# type: (ui, repo, Optional[Text]) -> None
|
||||||
|
"""
|
||||||
|
Invoked by configuring a [hook] entry in .hg/hgrc.
|
||||||
|
"""
|
||||||
|
hooktype = kwargs["hooktype"]
|
||||||
|
node = kwargs["node"]
|
||||||
|
|
||||||
|
ui.debug("Zulip: received {hooktype} event\n".format(hooktype=hooktype))
|
||||||
|
|
||||||
|
if hooktype != "changegroup":
|
||||||
|
ui.warn("Zulip: {hooktype} not supported\n".format(hooktype=hooktype))
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
ctx = repo.changectx(node)
|
||||||
|
branch = ctx.branch()
|
||||||
|
|
||||||
|
# If `branches` isn't specified, notify on all branches.
|
||||||
|
branch_whitelist = get_config(ui, "branches")
|
||||||
|
branch_blacklist = get_config(ui, "ignore_branches")
|
||||||
|
|
||||||
|
if branch_whitelist:
|
||||||
|
# Only send notifications on branches we are watching.
|
||||||
|
watched_branches = [b.lower().strip() for b in branch_whitelist.split(",")]
|
||||||
|
if branch.lower() not in watched_branches:
|
||||||
|
ui.debug("Zulip: ignoring event for {branch}\n".format(branch=branch))
|
||||||
|
exit(0)
|
||||||
|
|
||||||
|
if branch_blacklist:
|
||||||
|
# Don't send notifications for branches we've ignored.
|
||||||
|
ignored_branches = [b.lower().strip() for b in branch_blacklist.split(",")]
|
||||||
|
if branch.lower() in ignored_branches:
|
||||||
|
ui.debug("Zulip: ignoring event for {branch}\n".format(branch=branch))
|
||||||
|
exit(0)
|
||||||
|
|
||||||
|
# The first and final commits in the changeset.
|
||||||
|
base = repo[node].rev()
|
||||||
|
tip = len(repo)
|
||||||
|
|
||||||
|
email = get_config(ui, "email")
|
||||||
|
api_key = get_config(ui, "api_key")
|
||||||
|
site = get_config(ui, "site")
|
||||||
|
|
||||||
|
if not (email and api_key):
|
||||||
|
ui.warn("Zulip: missing email or api_key configurations\n")
|
||||||
|
ui.warn("in the [zulip] section of your .hg/hgrc.\n")
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
stream = get_config(ui, "stream")
|
||||||
|
# Give a default stream if one isn't provided.
|
||||||
|
if not stream:
|
||||||
|
stream = "commits"
|
||||||
|
|
||||||
|
web_url = get_config(ui, "web_url")
|
||||||
|
user = ctx.user()
|
||||||
|
content = format_summary_line(web_url, user, base, tip, branch, node)
|
||||||
|
content += format_commit_lines(web_url, repo, base, tip)
|
||||||
|
|
||||||
|
subject = branch
|
||||||
|
|
||||||
|
ui.debug("Sending to Zulip:\n")
|
||||||
|
ui.debug(content + "\n")
|
||||||
|
|
||||||
|
send_zulip(email, api_key, site, stream, subject, content)
|
||||||
149
api/integrations/jira/org/humbug/jira/ZulipListener.groovy
Normal file
149
api/integrations/jira/org/humbug/jira/ZulipListener.groovy
Normal file
@@ -0,0 +1,149 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (c) 2014 Zulip, Inc
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.zulip.jira
|
||||||
|
|
||||||
|
import static com.atlassian.jira.event.type.EventType.*
|
||||||
|
|
||||||
|
import com.atlassian.jira.event.issue.AbstractIssueEventListener
|
||||||
|
import com.atlassian.jira.event.issue.IssueEvent
|
||||||
|
|
||||||
|
import java.util.logging.Level
|
||||||
|
import java.util.logging.Logger
|
||||||
|
|
||||||
|
import org.apache.commons.httpclient.HttpClient
|
||||||
|
import org.apache.commons.httpclient.HttpStatus;
|
||||||
|
import org.apache.commons.httpclient.methods.PostMethod
|
||||||
|
import org.apache.commons.httpclient.NameValuePair
|
||||||
|
|
||||||
|
class ZulipListener extends AbstractIssueEventListener {
|
||||||
|
Logger LOGGER = Logger.getLogger(ZulipListener.class.getName());
|
||||||
|
|
||||||
|
// The email address of one of the bots you created on your Zulip settings page.
|
||||||
|
String zulipEmail = ""
|
||||||
|
// That bot's API key.
|
||||||
|
String zulipAPIKey = ""
|
||||||
|
|
||||||
|
// What stream to send messages to. Must already exist.
|
||||||
|
String zulipStream = "jira"
|
||||||
|
|
||||||
|
// The base JIRA url for browsing
|
||||||
|
String issueBaseUrl = "https://jira.COMPANY.com/browse/"
|
||||||
|
|
||||||
|
// Your zulip domain
|
||||||
|
String base_url = "https://zulip.example.com/"
|
||||||
|
|
||||||
|
@Override
|
||||||
|
void workflowEvent(IssueEvent event) {
|
||||||
|
processIssueEvent(event)
|
||||||
|
}
|
||||||
|
|
||||||
|
String processIssueEvent(IssueEvent event) {
|
||||||
|
String author = event.user.displayName
|
||||||
|
String issueId = event.issue.key
|
||||||
|
String issueUrl = issueBaseUrl + issueId
|
||||||
|
String issueUrlMd = String.format("[%s](%s)", issueId, issueBaseUrl + issueId)
|
||||||
|
String title = event.issue.summary
|
||||||
|
String subject = truncate(String.format("%s: %s", issueId, title), 60)
|
||||||
|
String assignee = "no one"
|
||||||
|
if (event.issue.assignee) {
|
||||||
|
assignee = event.issue.assignee.name
|
||||||
|
}
|
||||||
|
String comment = "";
|
||||||
|
if (event.comment) {
|
||||||
|
comment = event.comment.body
|
||||||
|
}
|
||||||
|
|
||||||
|
String content;
|
||||||
|
|
||||||
|
// Event types:
|
||||||
|
// https://docs.atlassian.com/jira/5.0/com/atlassian/jira/event/type/EventType.html
|
||||||
|
// Issue API:
|
||||||
|
// https://docs.atlassian.com/jira/5.0/com/atlassian/jira/issue/Issue.html
|
||||||
|
switch (event.getEventTypeId()) {
|
||||||
|
case ISSUE_COMMENTED_ID:
|
||||||
|
content = String.format("%s **updated** %s with comment:\n\n> %s",
|
||||||
|
author, issueUrlMd, comment)
|
||||||
|
break
|
||||||
|
case ISSUE_CREATED_ID:
|
||||||
|
content = String.format("%s **created** %s priority %s, assigned to @**%s**: \n\n> %s",
|
||||||
|
author, issueUrlMd, event.issue.priorityObject.name,
|
||||||
|
assignee, title)
|
||||||
|
break
|
||||||
|
case ISSUE_ASSIGNED_ID:
|
||||||
|
content = String.format("%s **reassigned** %s to **%s**",
|
||||||
|
author, issueUrlMd, assignee)
|
||||||
|
break
|
||||||
|
case ISSUE_DELETED_ID:
|
||||||
|
content = String.format("%s **deleted** %s!",
|
||||||
|
author, issueUrlMd)
|
||||||
|
break
|
||||||
|
case ISSUE_RESOLVED_ID:
|
||||||
|
content = String.format("%s **resolved** %s as %s:\n\n> %s",
|
||||||
|
author, issueUrlMd, event.issue.resolutionObject.name,
|
||||||
|
comment)
|
||||||
|
break
|
||||||
|
case ISSUE_CLOSED_ID:
|
||||||
|
content = String.format("%s **closed** %s with resolution %s:\n\n> %s",
|
||||||
|
author, issueUrlMd, event.issue.resolutionObject.name,
|
||||||
|
comment)
|
||||||
|
break
|
||||||
|
case ISSUE_REOPENED_ID:
|
||||||
|
content = String.format("%s **reopened** %s:\n\n> %s",
|
||||||
|
author, issueUrlMd, comment)
|
||||||
|
break
|
||||||
|
default:
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
sendStreamMessage(zulipStream, subject, content)
|
||||||
|
}
|
||||||
|
|
||||||
|
String post(String method, NameValuePair[] parameters) {
|
||||||
|
PostMethod post = new PostMethod(zulipUrl(method))
|
||||||
|
post.setRequestHeader("Content-Type", post.FORM_URL_ENCODED_CONTENT_TYPE)
|
||||||
|
// TODO: Include more useful data in the User-agent
|
||||||
|
post.setRequestHeader("User-agent", "ZulipJira/0.1")
|
||||||
|
try {
|
||||||
|
post.setRequestBody(parameters)
|
||||||
|
HttpClient client = new HttpClient()
|
||||||
|
client.executeMethod(post)
|
||||||
|
String response = post.getResponseBodyAsString()
|
||||||
|
if (post.getStatusCode() != HttpStatus.SC_OK) {
|
||||||
|
String params = ""
|
||||||
|
for (NameValuePair pair: parameters) {
|
||||||
|
params += "\n" + pair.getName() + ":" + pair.getValue()
|
||||||
|
}
|
||||||
|
LOGGER.log(Level.SEVERE, "Error sending Zulip message:\n" + response + "\n\n" +
|
||||||
|
"We sent:" + params)
|
||||||
|
}
|
||||||
|
return response;
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new RuntimeException(e)
|
||||||
|
} finally {
|
||||||
|
post.releaseConnection()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
String truncate(String string, int length) {
|
||||||
|
if (string.length() < length) {
|
||||||
|
return string
|
||||||
|
}
|
||||||
|
return string.substring(0, length - 3) + "..."
|
||||||
|
}
|
||||||
|
|
||||||
|
String sendStreamMessage(String stream, String subject, String message) {
|
||||||
|
NameValuePair[] body = [new NameValuePair("api-key", zulipAPIKey),
|
||||||
|
new NameValuePair("email", zulipEmail),
|
||||||
|
new NameValuePair("type", "stream"),
|
||||||
|
new NameValuePair("to", stream),
|
||||||
|
new NameValuePair("subject", subject),
|
||||||
|
new NameValuePair("content", message)]
|
||||||
|
return post("send_message", body);
|
||||||
|
}
|
||||||
|
|
||||||
|
String zulipUrl(method) {
|
||||||
|
return base_url.replaceAll("/+$", "") + "/api/v1/" + method
|
||||||
|
}
|
||||||
|
}
|
||||||
52
api/integrations/nagios/nagios-notify-zulip
Executable file
52
api/integrations/nagios/nagios-notify-zulip
Executable file
@@ -0,0 +1,52 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
import optparse
|
||||||
|
import zulip
|
||||||
|
|
||||||
|
from typing import List, Text, Dict, Any
|
||||||
|
|
||||||
|
VERSION = "0.9"
|
||||||
|
# Nagios passes the notification details as command line options.
|
||||||
|
# In Nagios, "output" means "first line of output", and "long
|
||||||
|
# output" means "other lines of output".
|
||||||
|
parser = optparse.OptionParser() # type: optparse.OptionParser
|
||||||
|
parser.add_option('--output', default='')
|
||||||
|
parser.add_option('--long-output', default='')
|
||||||
|
parser.add_option('--stream', default='nagios')
|
||||||
|
parser.add_option('--config', default='/etc/nagios3/zuliprc')
|
||||||
|
for opt in ('type', 'host', 'service', 'state'):
|
||||||
|
parser.add_option('--' + opt)
|
||||||
|
(opts, args) = parser.parse_args() # type: Any, List[Text]
|
||||||
|
|
||||||
|
client = zulip.Client(config_file=opts.config,
|
||||||
|
client="ZulipNagios/" + VERSION) # type: zulip.Client
|
||||||
|
|
||||||
|
msg = dict(type='stream', to=opts.stream) # type: Dict[str, Any]
|
||||||
|
|
||||||
|
# Set a subject based on the host or service in question. This enables
|
||||||
|
# threaded discussion of multiple concurrent issues, and provides useful
|
||||||
|
# context when narrowed.
|
||||||
|
#
|
||||||
|
# We send PROBLEM and RECOVERY messages to the same subject.
|
||||||
|
if opts.service is None:
|
||||||
|
# Host notification
|
||||||
|
thing = 'host' # type: Text
|
||||||
|
msg['subject'] = 'host %s' % (opts.host,)
|
||||||
|
else:
|
||||||
|
# Service notification
|
||||||
|
thing = 'service'
|
||||||
|
msg['subject'] = 'service %s on %s' % (opts.service, opts.host)
|
||||||
|
|
||||||
|
if len(msg['subject']) > 60:
|
||||||
|
msg['subject'] = msg['subject'][0:57].rstrip() + "..."
|
||||||
|
# e.g. **PROBLEM**: service is CRITICAL
|
||||||
|
msg['content'] = '**%s**: %s is %s' % (opts.type, thing, opts.state)
|
||||||
|
|
||||||
|
# The "long output" can contain newlines represented by "\n" escape sequences.
|
||||||
|
# The Nagios mail command uses /usr/bin/printf "%b" to expand these.
|
||||||
|
# We will be more conservative and handle just this one escape sequence.
|
||||||
|
output = (opts.output + '\n' + opts.long_output.replace(r'\n', '\n')).strip() # type: Text
|
||||||
|
if output:
|
||||||
|
# Put any command output in a code block.
|
||||||
|
msg['content'] += ('\n\n~~~~\n' + output + "\n~~~~\n")
|
||||||
|
|
||||||
|
client.send_message(msg)
|
||||||
21
api/integrations/nagios/zulip_nagios.cfg
Normal file
21
api/integrations/nagios/zulip_nagios.cfg
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
define contact{
|
||||||
|
contact_name zulip
|
||||||
|
alias zulip
|
||||||
|
service_notification_period 24x7
|
||||||
|
host_notification_period 24x7
|
||||||
|
service_notification_options w,u,c,r
|
||||||
|
host_notification_options d,r
|
||||||
|
service_notification_commands notify-service-by-zulip
|
||||||
|
host_notification_commands notify-host-by-zulip
|
||||||
|
}
|
||||||
|
|
||||||
|
# Zulip commands
|
||||||
|
define command {
|
||||||
|
command_name notify-host-by-zulip
|
||||||
|
command_line /usr/local/share/zulip/integrations/nagios/nagios-notify-zulip --stream=nagios --type="$NOTIFICATIONTYPE$" --host="$HOSTADDRESS$" --state="$HOSTSTATE$" --output="$HOSTOUTPUT$" --long-output="$LONGHOSTOUTPUT$"
|
||||||
|
}
|
||||||
|
|
||||||
|
define command {
|
||||||
|
command_name notify-service-by-zulip
|
||||||
|
command_line /usr/local/share/zulip/integrations/nagios/nagios-notify-zulip --stream=nagios --type="$NOTIFICATIONTYPE$" --host="$HOSTADDRESS$" --service="$SERVICEDESC$" --state="$SERVICESTATE$" --output="$SERVICEOUTPUT$" --long-output="$LONGSERVICEOUTPUT$"
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user