mirror of
https://github.com/zulip/zulip.git
synced 2025-10-23 04:52:12 +00:00
Compare commits
1 Commits
11.3
...
enterprise
Author | SHA1 | Date | |
---|---|---|---|
|
2a67775c27 |
12
.codecov.yml
12
.codecov.yml
@@ -1,12 +0,0 @@
|
||||
comment: off
|
||||
|
||||
coverage:
|
||||
status:
|
||||
project:
|
||||
default:
|
||||
target: auto
|
||||
# Codecov has the tendency to report a lot of false negatives,
|
||||
# so we basically suppress comments completely.
|
||||
threshold: 50%
|
||||
base: auto
|
||||
patch: off
|
@@ -1,32 +0,0 @@
|
||||
te
|
||||
ans
|
||||
pullrequest
|
||||
ist
|
||||
cros
|
||||
wit
|
||||
nwe
|
||||
circularly
|
||||
ned
|
||||
ba
|
||||
ressemble
|
||||
ser
|
||||
sur
|
||||
hel
|
||||
fpr
|
||||
alls
|
||||
nd
|
||||
ot
|
||||
womens
|
||||
vise
|
||||
falsy
|
||||
ro
|
||||
derails
|
||||
forin
|
||||
uper
|
||||
slac
|
||||
couldn
|
||||
ges
|
||||
assertIn
|
||||
thirdparty
|
||||
asend
|
||||
COO
|
@@ -1,25 +0,0 @@
|
||||
root = true
|
||||
|
||||
[*]
|
||||
end_of_line = lf
|
||||
charset = utf-8
|
||||
indent_size = 4
|
||||
indent_style = space
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
|
||||
[[shell]]
|
||||
binary_next_line = true
|
||||
switch_case_indent = true
|
||||
|
||||
[{*.{cjs,cts,js,json,mjs,mts,ts},check-openapi}]
|
||||
max_line_length = 100
|
||||
|
||||
[*.{py,pyi}]
|
||||
max_line_length = 110
|
||||
|
||||
[*.{md,svg,rb,pp,yaml,yml}]
|
||||
indent_size = 2
|
||||
|
||||
[package.json]
|
||||
indent_size = 2
|
54
.gitattributes
vendored
54
.gitattributes
vendored
@@ -1,34 +1,20 @@
|
||||
# DIFFS: Noise suppression.
|
||||
#
|
||||
# Suppress noisy generated files in diffs.
|
||||
# (When you actually want to see these diffs, use `git diff -a`.)
|
||||
|
||||
# Large test fixtures:
|
||||
corporate/tests/stripe_fixtures/*.json -diff
|
||||
|
||||
|
||||
# FORMATTING
|
||||
|
||||
# Maintain LF (Unix-style) newlines in text files.
|
||||
* text=auto eol=lf
|
||||
|
||||
# Make sure various media files never get somehow auto-detected as text
|
||||
# and then newline-converted.
|
||||
*.gif binary
|
||||
*.jpg binary
|
||||
*.jpeg binary
|
||||
*.eot binary
|
||||
*.woff binary
|
||||
*.woff2 binary
|
||||
*.ttf binary
|
||||
*.png binary
|
||||
*.otf binary
|
||||
*.tif binary
|
||||
*.ogg binary
|
||||
*.bson binary
|
||||
*.bmp binary
|
||||
*.mp3 binary
|
||||
*.pdf binary
|
||||
|
||||
# Treat SVG files as code for diffing purposes.
|
||||
*.svg diff
|
||||
.gitignore export-ignore
|
||||
.gitattributes export-ignore
|
||||
/analytics export-ignore
|
||||
/assets export-ignore
|
||||
/bots export-ignore
|
||||
/corporate export-ignore
|
||||
/static export-ignore
|
||||
/tools export-ignore
|
||||
/zilencer export-ignore
|
||||
/templates/analytics export-ignore
|
||||
/templates/corporate export-ignore
|
||||
/templates/zilencer export-ignore
|
||||
/puppet/zulip_internal export-ignore
|
||||
/zproject/local_settings.py export-ignore
|
||||
/zproject/test_settings.py export-ignore
|
||||
/zerver/fixtures export-ignore
|
||||
/zerver/tests.py export-ignore
|
||||
/zerver/tests export-ignore
|
||||
/node_modules export-ignore
|
||||
/humbug export-ignore
|
||||
|
3
.github/FUNDING.yml
vendored
3
.github/FUNDING.yml
vendored
@@ -1,3 +0,0 @@
|
||||
github: zulip
|
||||
patreon: zulip
|
||||
open_collective: zulip
|
10
.github/ISSUE_TEMPLATE/1_discussed_on_czo.md
vendored
10
.github/ISSUE_TEMPLATE/1_discussed_on_czo.md
vendored
@@ -1,10 +0,0 @@
|
||||
---
|
||||
name: Issue discussed in the Zulip development community
|
||||
about: Bug report, feature or improvement already discussed on chat.zulip.org.
|
||||
---
|
||||
|
||||
<!-- Issue description -->
|
||||
|
||||
<!-- Link to a message in the chat.zulip.org discussion. Message links will still work even if the topic is renamed or resolved. Link back to this issue from the chat.zulip.org thread. -->
|
||||
|
||||
CZO thread
|
18
.github/ISSUE_TEMPLATE/2_bug_report.md
vendored
18
.github/ISSUE_TEMPLATE/2_bug_report.md
vendored
@@ -1,18 +0,0 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: A concrete bug report with steps to reproduce the behavior. (See also "Possible bug" below.)
|
||||
labels: ["bug"]
|
||||
---
|
||||
|
||||
<!-- Describe what you were expecting to see, what you saw instead, and steps to take in order to reproduce the buggy behavior. Screenshots can be helpful. -->
|
||||
|
||||
<!-- Check the box for the version of Zulip you are using (see https://zulip.com/help/view-zulip-version).-->
|
||||
|
||||
**Zulip Server and web app version:**
|
||||
|
||||
- [ ] Zulip Cloud (`*.zulipchat.com`)
|
||||
- [ ] Zulip Server 10.x
|
||||
- [ ] Zulip Server 9.x
|
||||
- [ ] Zulip Server 8.x
|
||||
- [ ] Zulip Server 7.x or older
|
||||
- [ ] Other or not sure
|
6
.github/ISSUE_TEMPLATE/3_feature_request.md
vendored
6
.github/ISSUE_TEMPLATE/3_feature_request.md
vendored
@@ -1,6 +0,0 @@
|
||||
---
|
||||
name: Feature or improvement request
|
||||
about: A specific proposal for a new feature of improvement. (See also "Feature suggestion or feedback" below.)
|
||||
---
|
||||
|
||||
<!-- Describe the proposal, including how it would help you or your organization. -->
|
14
.github/ISSUE_TEMPLATE/config.yml
vendored
14
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,14 +0,0 @@
|
||||
blank_issues_enabled: true
|
||||
contact_links:
|
||||
- name: Possible bug
|
||||
url: https://zulip.readthedocs.io/en/latest/contributing/reporting-bugs.html
|
||||
about: Report unexpected behavior that may be a bug.
|
||||
- name: Feature suggestion or feedback
|
||||
url: https://zulip.readthedocs.io/en/latest/contributing/suggesting-features.html
|
||||
about: Start a discussion about your idea for improving Zulip.
|
||||
- name: Issue with running or upgrading a Zulip server
|
||||
url: https://zulip.readthedocs.io/en/latest/production/troubleshooting.html
|
||||
about: We provide free, interactive support for the vast majority of questions about running a Zulip server.
|
||||
- name: Other support requests and sales questions
|
||||
url: https://zulip.com/help/contact-support
|
||||
about: Contact us — we're happy to help!
|
82
.github/funding.json
vendored
82
.github/funding.json
vendored
@@ -1,82 +0,0 @@
|
||||
{
|
||||
"version": "v1.0.0",
|
||||
"entity": {
|
||||
"type": "organisation",
|
||||
"role": "steward",
|
||||
"name": "Kandra Labs, Inc.",
|
||||
"email": "support@zulip.com",
|
||||
"description": "Guiding the Zulip community in developing a world-class organized team chat product with apps for every major desktop and mobile platform requires leadership from a talented, dedicated team. We believe that the only sustainable model is for our core team to be compensated fairly for their time. We have thus founded a company (Kandra Labs) to steward and financially support Zulip’s development. We are growing our business sustainably, without venture capital funding. VCs are incentivized to push companies to gamble for explosive growth. Often, the result is that a company with a useful product burns rapidly through its resources and goes out of business. We have built Zulip as a sustainable business (also supported by SBIR grants from the US National Science Foundation), and are being thoughtful about our pace of spending. Funding our company without venture capital also allows us to live by our values, without investor pressure to compromise them when doing so might be “good business” or “what everyone does”.",
|
||||
"webpageUrl": {
|
||||
"url": "https://zulip.com/values/",
|
||||
"wellKnown": "https://zulip.com/.well-known/funding-manifest-urls"
|
||||
}
|
||||
},
|
||||
"projects": [
|
||||
{
|
||||
"guid": "zulip",
|
||||
"name": "Zulip",
|
||||
"description": "Zulip is an open-source team chat application designed for seamless remote and hybrid work. With conversations organized by topic, Zulip is ideal for both live and asynchronous communication. Zulip’s 100% open-source software is available as a cloud service or a self-hosted solution, and is used by thousands of organizations around the world. An important part of Zulip’s mission is ensuring that worthy organizations, from programming-language developers to research communities, are able to use Zulip whether or not they have funding. For this reason, we sponsor Zulip Cloud Standard for open source projects, non-profits, education, and academic research. This program has grown exponentially since its inception; today we are proud to fully sponsor Zulip hosting for several hundred organizations. Support from the community will help us continue to afford these programs as their popularity grows. ",
|
||||
"webpageUrl": {
|
||||
"url": "https://zulip.com/",
|
||||
"wellKnown": "https://zulip.com/.well-known/funding-manifest-urls"
|
||||
},
|
||||
"repositoryUrl": {
|
||||
"url": "https://github.com/zulip"
|
||||
},
|
||||
"licenses": ["spdx:Apache-2.0"],
|
||||
"tags": ["communication", "team-chat", "collaboration"]
|
||||
}
|
||||
],
|
||||
"funding": {
|
||||
"channels": [
|
||||
{
|
||||
"guid": "github-sponsors",
|
||||
"type": "payment-provider",
|
||||
"address": "https://github.com/sponsors/zulip",
|
||||
"description": "Preferred channel for sponsoring Zulip, since GitHub Sponsors does not charge any fees to sponsored projects."
|
||||
},
|
||||
{
|
||||
"guid": "patreon",
|
||||
"type": "payment-provider",
|
||||
"address": "https://patreon.com/zulip"
|
||||
},
|
||||
{
|
||||
"guid": "open-collective",
|
||||
"type": "payment-provider",
|
||||
"address": "https://opencollective.com/zulip"
|
||||
}
|
||||
],
|
||||
"plans": [
|
||||
{
|
||||
"guid": "github-sponsors",
|
||||
"status": "active",
|
||||
"name": "Support Zulip",
|
||||
"description": "Contribute to Zulip's development and free hosting for open source projects and other worthy organizations!",
|
||||
"amount": 0,
|
||||
"currency": "USD",
|
||||
"frequency": "monthly",
|
||||
"channels": ["github-sponsors"]
|
||||
},
|
||||
{
|
||||
"guid": "patreon",
|
||||
"status": "active",
|
||||
"name": "Support Zulip",
|
||||
"description": "Contribute to Zulip's development and free hosting for open source projects and other worthy organizations!",
|
||||
"amount": 0,
|
||||
"currency": "USD",
|
||||
"frequency": "monthly",
|
||||
"channels": ["patreon"]
|
||||
},
|
||||
{
|
||||
"guid": "open-collective",
|
||||
"status": "active",
|
||||
"name": "Support Zulip",
|
||||
"description": "Contribute to Zulip's development and free hosting for open source projects and other worthy organizations!",
|
||||
"amount": 0,
|
||||
"currency": "USD",
|
||||
"frequency": "monthly",
|
||||
"channels": ["open-collective"]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
43
.github/pull_request_template.md
vendored
43
.github/pull_request_template.md
vendored
@@ -1,43 +0,0 @@
|
||||
<!-- Describe your pull request here.-->
|
||||
|
||||
Fixes: <!-- Issue link, or clear description.-->
|
||||
|
||||
<!-- If the PR makes UI changes, always include one or more still screenshots to demonstrate your changes. If it seems helpful, add a screen capture of the new functionality as well.
|
||||
|
||||
Tooling tips: https://zulip.readthedocs.io/en/latest/tutorials/screenshot-and-gif-software.html
|
||||
-->
|
||||
|
||||
**Screenshots and screen captures:**
|
||||
|
||||
<details>
|
||||
<summary>Self-review checklist</summary>
|
||||
|
||||
<!-- Prior to submitting a PR, follow our step-by-step guide to review your own code:
|
||||
https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html#how-to-review-code -->
|
||||
|
||||
<!-- Once you create the PR, check off all the steps below that you have completed.
|
||||
If any of these steps are not relevant or you have not completed, leave them unchecked.-->
|
||||
|
||||
- [ ] [Self-reviewed](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html#how-to-review-code) the changes for clarity and maintainability
|
||||
(variable names, code reuse, readability, etc.).
|
||||
|
||||
Communicate decisions, questions, and potential concerns.
|
||||
|
||||
- [ ] Explains differences from previous plans (e.g., issue description).
|
||||
- [ ] Highlights technical choices and bugs encountered.
|
||||
- [ ] Calls out remaining decisions and concerns.
|
||||
- [ ] Automated tests verify logic where appropriate.
|
||||
|
||||
Individual commits are ready for review (see [commit discipline](https://zulip.readthedocs.io/en/latest/contributing/commit-discipline.html)).
|
||||
|
||||
- [ ] Each commit is a coherent idea.
|
||||
- [ ] Commit message(s) explain reasoning and motivation for changes.
|
||||
|
||||
Completed manual review and testing of the following:
|
||||
|
||||
- [ ] Visual appearance of the changes.
|
||||
- [ ] Responsiveness and internationalization.
|
||||
- [ ] Strings and tooltips.
|
||||
- [ ] End-to-end functionality of buttons, interactions and flows.
|
||||
- [ ] Corner cases, error conditions, and easily imagined bugs.
|
||||
</details>
|
@@ -1,46 +0,0 @@
|
||||
name: Check feature level updated
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- "api_docs/**"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-feature-level-updated:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
||||
- name: Add required permissions
|
||||
run: chmod +x ./tools/check-feature-level-updated
|
||||
|
||||
- name: Run tools/check-feature-level-updated
|
||||
id: run_check
|
||||
run: ./tools/check-feature-level-updated >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Report status to CZO
|
||||
if: ${{ steps.run_check.outputs.fail == 'true' && github.repository == 'zulip/zulip'}}
|
||||
uses: zulip/github-actions-zulip/send-message@v1
|
||||
with:
|
||||
api-key: ${{ secrets.ZULIP_BOT_KEY }}
|
||||
email: "github-actions-bot@chat.zulip.org"
|
||||
organization-url: "https://chat.zulip.org"
|
||||
to: "automated testing"
|
||||
topic: ${{ steps.run_check.outputs.topic }}
|
||||
type: "stream"
|
||||
content: ${{ steps.run_check.outputs.content }}
|
||||
|
||||
- name: Fail job if feature level not updated in API docs
|
||||
if: ${{ steps.run_check.outputs.fail == 'true' }}
|
||||
run: exit 1
|
40
.github/workflows/codeql-analysis.yml
vendored
40
.github/workflows/codeql-analysis.yml
vendored
@@ -1,40 +0,0 @@
|
||||
name: "Code scanning"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["*.x", chat.zulip.org, main]
|
||||
tags: ["*"]
|
||||
pull_request:
|
||||
branches: ["*.x", chat.zulip.org, main]
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: "${{ github.workflow }}-${{ github.head_ref || github.run_id }}"
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
CodeQL:
|
||||
permissions:
|
||||
actions: read # for github/codeql-action/init to get workflow details
|
||||
contents: read # for actions/checkout to fetch code
|
||||
security-events: write # for github/codeql-action/analyze to upload SARIF results
|
||||
if: ${{!github.event.repository.private}}
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Check out repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
|
||||
# Override language selection by uncommenting this and choosing your languages
|
||||
# with:
|
||||
# languages: go, javascript, csharp, python, cpp, java
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
325
.github/workflows/production-suite.yml
vendored
325
.github/workflows/production-suite.yml
vendored
@@ -1,325 +0,0 @@
|
||||
name: Zulip production suite
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["*.x", chat.zulip.org, main]
|
||||
tags: ["*"]
|
||||
pull_request:
|
||||
paths:
|
||||
- .github/workflows/production-suite.yml
|
||||
- "**/migrations/**"
|
||||
- manage.py
|
||||
- pnpm-lock.yaml
|
||||
- puppet/**
|
||||
- scripts/**
|
||||
- tools/**
|
||||
- uv.lock
|
||||
- web/babel.config.js
|
||||
- web/postcss.config.js
|
||||
- web/third/**
|
||||
- web/webpack.config.ts
|
||||
- zerver/worker/queue_processors.py
|
||||
- zerver/lib/push_notifications.py
|
||||
- zerver/lib/storage.py
|
||||
- zerver/decorator.py
|
||||
- zproject/**
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: "${{ github.workflow }}-${{ github.head_ref || github.run_id }}"
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
production_build:
|
||||
# This job builds a release tarball from the current commit, which
|
||||
# will be used for all of the following install/upgrade tests.
|
||||
name: Ubuntu 22.04 production build
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
# Docker images are built from 'tools/ci/Dockerfile'; the comments at
|
||||
# the top explain how to build and upload these images.
|
||||
# Ubuntu 22.04 ships with Python 3.10.12.
|
||||
container: zulip/ci:jammy
|
||||
|
||||
steps:
|
||||
- name: Add required permissions
|
||||
run: |
|
||||
# The checkout actions doesn't clone to ~/zulip or allow
|
||||
# us to use the path option to clone outside the current
|
||||
# /__w/zulip/zulip directory. Since this directory is owned
|
||||
# by root we need to change it's ownership to allow the
|
||||
# github user to clone the code here.
|
||||
# Note: /__w/ is a docker volume mounted to $GITHUB_WORKSPACE
|
||||
# which is /home/runner/work/.
|
||||
sudo chown -R github .
|
||||
|
||||
# This is the GitHub Actions specific cache directory the
|
||||
# the current github user must be able to access for the
|
||||
# cache action to work. It is owned by root currently.
|
||||
sudo chmod -R 0777 /__w/_temp/
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Create cache directories
|
||||
run: |
|
||||
dirs=(/srv/zulip-emoji-cache)
|
||||
sudo mkdir -p "${dirs[@]}"
|
||||
sudo chown -R github "${dirs[@]}"
|
||||
|
||||
- name: Restore pnpm store
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: /__w/.pnpm-store
|
||||
key: v1-pnpm-store-jammy-${{ hashFiles('pnpm-lock.yaml') }}
|
||||
|
||||
- name: Restore uv cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/.cache/uv
|
||||
key: uv-jammy-${{ hashFiles('uv.lock') }}
|
||||
restore-keys: uv-jammy-
|
||||
|
||||
- name: Restore emoji cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: /srv/zulip-emoji-cache
|
||||
key: v1-emoji-jammy-${{ hashFiles('tools/setup/emoji/emoji_map.json') }}-${{ hashFiles('tools/setup/emoji/build_emoji') }}-${{ hashFiles('tools/setup/emoji/emoji_setup_utils.py') }}-${{ hashFiles('tools/setup/emoji/emoji_names.py') }}-${{ hashFiles('package.json') }}
|
||||
restore-keys: v1-emoji-jammy
|
||||
|
||||
- name: Build production tarball
|
||||
run: ./tools/ci/production-build
|
||||
|
||||
- name: Upload production build artifacts for install jobs
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: production-tarball
|
||||
path: /tmp/production-build
|
||||
retention-days: 1
|
||||
|
||||
- name: Verify pnpm store path
|
||||
run: |
|
||||
set -x
|
||||
path="$(pnpm store path)"
|
||||
[[ "$path" == /__w/.pnpm-store/* ]]
|
||||
|
||||
- name: Minimize uv cache
|
||||
run: uv cache prune --ci
|
||||
|
||||
- name: Generate failure report string
|
||||
id: failure_report_string
|
||||
if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }}
|
||||
run: tools/ci/generate-failure-message >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Report status to CZO
|
||||
if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }}
|
||||
uses: zulip/github-actions-zulip/send-message@v1
|
||||
with:
|
||||
api-key: ${{ secrets.ZULIP_BOT_KEY }}
|
||||
email: "github-actions-bot@chat.zulip.org"
|
||||
organization-url: "https://chat.zulip.org"
|
||||
to: "automated testing"
|
||||
topic: ${{ steps.failure_report_string.outputs.topic }}
|
||||
type: "stream"
|
||||
content: ${{ steps.failure_report_string.outputs.content }}
|
||||
|
||||
production_install:
|
||||
# This job installs the server release tarball built above on a
|
||||
# range of platforms, and does some basic health checks on the
|
||||
# resulting installer Zulip server.
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
# Docker images are built from 'tools/ci/Dockerfile'; the comments at
|
||||
# the top explain how to build and upload these images.
|
||||
- docker_image: zulip/ci:jammy
|
||||
name: Ubuntu 22.04 production install and PostgreSQL upgrade with pgroonga
|
||||
os: jammy
|
||||
extra-args: ""
|
||||
|
||||
- docker_image: zulip/ci:noble
|
||||
name: Ubuntu 24.04 production install
|
||||
os: noble
|
||||
extra-args: ""
|
||||
|
||||
- docker_image: zulip/ci:bookworm
|
||||
name: Debian 12 production install with custom db name and user
|
||||
os: bookworm
|
||||
extra-args: --test-custom-db
|
||||
|
||||
- docker_image: zulip/ci:trixie
|
||||
name: Debian 13 production install
|
||||
os: trixie
|
||||
extra-args: ""
|
||||
|
||||
name: ${{ matrix.name }}
|
||||
container:
|
||||
image: ${{ matrix.docker_image }}
|
||||
options: --init
|
||||
runs-on: ubuntu-latest
|
||||
needs: production_build
|
||||
|
||||
steps:
|
||||
- name: Download built production tarball
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: production-tarball
|
||||
path: /tmp
|
||||
|
||||
- name: Add required permissions and setup
|
||||
run: |
|
||||
# This is the GitHub Actions specific cache directory the
|
||||
# the current github user must be able to access for the
|
||||
# cache action to work. It is owned by root currently.
|
||||
sudo chmod -R 0777 /__w/_temp/
|
||||
|
||||
# Since actions/download-artifact@v4 loses all the permissions
|
||||
# of the tarball uploaded by the upload artifact fix those.
|
||||
chmod +x /tmp/production-upgrade-pg
|
||||
chmod +x /tmp/production-pgroonga
|
||||
chmod +x /tmp/production-install
|
||||
chmod +x /tmp/production-verify
|
||||
chmod +x /tmp/generate-failure-message
|
||||
|
||||
- name: Create cache directories
|
||||
run: |
|
||||
dirs=(/srv/zulip-emoji-cache)
|
||||
sudo mkdir -p "${dirs[@]}"
|
||||
sudo chown -R github "${dirs[@]}"
|
||||
|
||||
- name: Install production
|
||||
run: sudo /tmp/production-install ${{ matrix.extra-args }}
|
||||
|
||||
- name: Verify install
|
||||
run: sudo /tmp/production-verify ${{ matrix.extra-args }}
|
||||
|
||||
- name: Install pgroonga
|
||||
if: ${{ matrix.os == 'jammy' }}
|
||||
run: sudo /tmp/production-pgroonga
|
||||
|
||||
- name: Verify install after installing pgroonga
|
||||
if: ${{ matrix.os == 'jammy' }}
|
||||
run: sudo /tmp/production-verify ${{ matrix.extra-args }}
|
||||
|
||||
- name: Upgrade postgresql
|
||||
if: ${{ matrix.os == 'jammy' }}
|
||||
run: sudo /tmp/production-upgrade-pg
|
||||
|
||||
- name: Verify install after upgrading postgresql
|
||||
if: ${{ matrix.os == 'jammy' }}
|
||||
run: sudo /tmp/production-verify ${{ matrix.extra-args }}
|
||||
|
||||
- name: Generate failure report string
|
||||
id: failure_report_string
|
||||
if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }}
|
||||
run: /tmp/generate-failure-message >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Report status to CZO
|
||||
if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }}
|
||||
uses: zulip/github-actions-zulip/send-message@v1
|
||||
with:
|
||||
api-key: ${{ secrets.ZULIP_BOT_KEY }}
|
||||
email: "github-actions-bot@chat.zulip.org"
|
||||
organization-url: "https://chat.zulip.org"
|
||||
to: "automated testing"
|
||||
topic: ${{ steps.failure_report_string.outputs.topic }}
|
||||
type: "stream"
|
||||
content: ${{ steps.failure_report_string.outputs.content }}
|
||||
|
||||
production_upgrade:
|
||||
# The production upgrade job starts with a container with a
|
||||
# previous Zulip release installed, and attempts to upgrade it to
|
||||
# the release tarball built for the current commit being tested.
|
||||
#
|
||||
# This is intended to catch bugs that result in the upgrade
|
||||
# process failing.
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
# Docker images are built from 'tools/ci/Dockerfile.prod'; the comments at
|
||||
# the top explain how to build and upload these images.
|
||||
- docker_image: zulip/ci:jammy-6.0
|
||||
name: 6.0 Version Upgrade
|
||||
os: jammy
|
||||
- docker_image: zulip/ci:bookworm-7.0
|
||||
name: 7.0 Version Upgrade
|
||||
os: bookworm
|
||||
- docker_image: zulip/ci:bookworm-8.0
|
||||
name: 8.0 Version Upgrade
|
||||
os: bookworm
|
||||
- docker_image: zulip/ci:noble-9.0
|
||||
name: 9.0 Version Upgrade
|
||||
os: noble
|
||||
- docker_image: zulip/ci:noble-10.0
|
||||
name: 10.0 Version Upgrade
|
||||
os: noble
|
||||
- docker_image: zulip/ci:trixie-11.0
|
||||
name: 11.0 Version Upgrade
|
||||
os: trixie
|
||||
|
||||
name: ${{ matrix.name }}
|
||||
container:
|
||||
image: ${{ matrix.docker_image }}
|
||||
options: --init
|
||||
runs-on: ubuntu-latest
|
||||
needs: production_build
|
||||
|
||||
steps:
|
||||
- name: Download built production tarball
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: production-tarball
|
||||
path: /tmp
|
||||
|
||||
- name: Add required permissions and setup
|
||||
run: |
|
||||
# This is the GitHub Actions specific cache directory the
|
||||
# the current github user must be able to access for the
|
||||
# cache action to work. It is owned by root currently.
|
||||
sudo chmod -R 0777 /__w/_temp/
|
||||
|
||||
# Since actions/download-artifact@v4 loses all the permissions
|
||||
# of the tarball uploaded by the upload artifact fix those.
|
||||
chmod +x /tmp/production-upgrade
|
||||
chmod +x /tmp/production-verify
|
||||
chmod +x /tmp/generate-failure-message
|
||||
|
||||
- name: Create cache directories
|
||||
run: |
|
||||
dirs=(/srv/zulip-emoji-cache)
|
||||
sudo mkdir -p "${dirs[@]}"
|
||||
sudo chown -R github "${dirs[@]}"
|
||||
|
||||
- name: Upgrade production
|
||||
run: sudo /tmp/production-upgrade
|
||||
|
||||
# TODO: We should be running production-verify here, but it
|
||||
# doesn't pass yet.
|
||||
#
|
||||
# - name: Verify install
|
||||
# run: sudo /tmp/production-verify
|
||||
|
||||
- name: Generate failure report string
|
||||
id: failure_report_string
|
||||
if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }}
|
||||
run: /tmp/generate-failure-message >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Report status to CZO
|
||||
if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }}
|
||||
uses: zulip/github-actions-zulip/send-message@v1
|
||||
with:
|
||||
api-key: ${{ secrets.ZULIP_BOT_KEY }}
|
||||
email: "github-actions-bot@chat.zulip.org"
|
||||
organization-url: "https://chat.zulip.org"
|
||||
to: "automated testing"
|
||||
topic: ${{ steps.failure_report_string.outputs.topic }}
|
||||
type: "stream"
|
||||
content: ${{ steps.failure_report_string.outputs.content }}
|
27
.github/workflows/update-oneclick-apps.yml
vendored
27
.github/workflows/update-oneclick-apps.yml
vendored
@@ -1,27 +0,0 @@
|
||||
name: Update one click apps
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
update-digitalocean-oneclick-app:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Update DigitalOcean one click app
|
||||
env:
|
||||
DIGITALOCEAN_API_KEY: ${{ secrets.ONE_CLICK_ACTION_DIGITALOCEAN_API_KEY }}
|
||||
ZULIP_API_KEY: ${{ secrets.ONE_CLICK_ACTION_ZULIP_BOT_API_KEY }}
|
||||
ZULIP_EMAIL: ${{ secrets.ONE_CLICK_ACTION_ZULIP_BOT_EMAIL }}
|
||||
ZULIP_SITE: https://chat.zulip.org
|
||||
ONE_CLICK_ACTION_STREAM: kandra ops
|
||||
PYTHON_DIGITALOCEAN_REQUEST_TIMEOUT_SEC: 30
|
||||
RELEASE_VERSION: ${{ github.event.release.tag_name }}
|
||||
run: |
|
||||
export PATH="$HOME/.local/bin:$PATH"
|
||||
git clone https://github.com/zulip/marketplace-partners
|
||||
pip3 install python-digitalocean zulip fab-classic PyNaCl
|
||||
echo $PATH
|
||||
python3 tools/oneclickapps/prepare_digital_ocean_one_click_app_release.py
|
266
.github/workflows/zulip-ci.yml
vendored
266
.github/workflows/zulip-ci.yml
vendored
@@ -1,266 +0,0 @@
|
||||
# NOTE: Everything test in this file should be in `tools/test-all`. If there's a
|
||||
# reason not to run it there, it should be there as a comment
|
||||
# explaining why.
|
||||
|
||||
name: Zulip CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["*.x", chat.zulip.org, main]
|
||||
tags: ["*"]
|
||||
pull_request:
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: "${{ github.workflow }}-${{ github.head_ref || github.run_id }}"
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
# Base images are built using `tools/ci/Dockerfile`.
|
||||
# The comments at the top explain how to build and upload these images.
|
||||
# Ubuntu 22.04 ships with Python 3.10.12.
|
||||
- docker_image: zulip/ci:jammy
|
||||
name: Ubuntu 22.04 (Python 3.10, backend + frontend)
|
||||
os: jammy
|
||||
include_documentation_tests: false
|
||||
include_frontend_tests: true
|
||||
# Debian 12 ships with Python 3.11.2.
|
||||
- docker_image: zulip/ci:bookworm
|
||||
name: Debian 12 (Python 3.11, backend + documentation)
|
||||
os: bookworm
|
||||
include_documentation_tests: true
|
||||
include_frontend_tests: false
|
||||
# Ubuntu 24.04 ships with Python 3.12.2.
|
||||
- docker_image: zulip/ci:noble
|
||||
name: Ubuntu 24.04 (Python 3.12, backend)
|
||||
os: noble
|
||||
include_documentation_tests: false
|
||||
include_frontend_tests: false
|
||||
# Debian 13 ships with Python 3.13.5.
|
||||
- docker_image: zulip/ci:trixie
|
||||
name: Debian 13 (Python 3.13, backend)
|
||||
os: trixie
|
||||
include_documentation_tests: false
|
||||
include_frontend_tests: false
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
name: ${{ matrix.name }}
|
||||
container: ${{ matrix.docker_image }}
|
||||
env:
|
||||
# GitHub Actions sets HOME to /github/home which causes
|
||||
# problem later in provision and frontend test that runs
|
||||
# tools/setup/postgresql-init-dev-db because of the .pgpass
|
||||
# location. PostgreSQL (psql) expects .pgpass to be at
|
||||
# /home/github/.pgpass and setting home to `/home/github/`
|
||||
# ensures it written there because we write it to ~/.pgpass.
|
||||
HOME: /home/github/
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Create cache directories
|
||||
run: |
|
||||
dirs=(/srv/zulip-emoji-cache)
|
||||
sudo mkdir -p "${dirs[@]}"
|
||||
sudo chown -R github "${dirs[@]}"
|
||||
|
||||
- name: Restore pnpm store
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: /__w/.pnpm-store
|
||||
key: v1-pnpm-store-${{ matrix.os }}-${{ hashFiles('pnpm-lock.yaml') }}
|
||||
|
||||
- name: Restore uv cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/.cache/uv
|
||||
key: uv-${{ matrix.os }}-${{ hashFiles('uv.lock') }}
|
||||
restore-keys: uv-${{ matrix.os }}-
|
||||
|
||||
- name: Restore emoji cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: /srv/zulip-emoji-cache
|
||||
key: v1-emoji-${{ matrix.os }}-${{ hashFiles('tools/setup/emoji/emoji_map.json', 'tools/setup/emoji/build_emoji', 'tools/setup/emoji/emoji_setup_utils.py', 'tools/setup/emoji/emoji_names.py', 'package.json') }}
|
||||
restore-keys: v1-emoji-${{ matrix.os }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
# This is the main setup job for the test suite
|
||||
./tools/ci/setup-backend --skip-dev-db-build
|
||||
scripts/lib/clean_unused_caches.py --verbose --threshold=0
|
||||
|
||||
- name: Run tools test
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
./tools/test-tools
|
||||
|
||||
- name: Run Codespell lint
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
./tools/run-codespell
|
||||
|
||||
# We run the tests that are only run in a specific job early, so
|
||||
# that we get feedback to the developer about likely failures as
|
||||
# quickly as possible. Backend/mypy failures that aren't
|
||||
# identical across different versions are much more rare than
|
||||
# frontend linter or node test failures.
|
||||
- name: Run documentation and api tests
|
||||
if: ${{ matrix.include_documentation_tests }}
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
# In CI, we only test links we control in test-documentation to avoid flakes
|
||||
./tools/test-documentation --skip-external-links
|
||||
./tools/test-help-documentation --skip-external-links
|
||||
./tools/test-api
|
||||
|
||||
- name: Run node tests
|
||||
if: ${{ matrix.include_frontend_tests }}
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
# Run the node tests first, since they're fast and deterministic
|
||||
./tools/test-js-with-node --coverage --parallel=1
|
||||
|
||||
- name: Run frontend lint
|
||||
if: ${{ matrix.include_frontend_tests }}
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
./tools/lint --groups=frontend --skip=gitlint # gitlint disabled because flaky
|
||||
|
||||
- name: Check schemas
|
||||
if: ${{ matrix.include_frontend_tests }}
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
# Check that various schemas are consistent. (is fast)
|
||||
./tools/check-schemas
|
||||
|
||||
- name: Check capitalization of strings
|
||||
if: ${{ matrix.include_frontend_tests }}
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
./manage.py makemessages --locale en
|
||||
PYTHONWARNINGS=ignore ./tools/check-capitalization --no-generate
|
||||
PYTHONWARNINGS=ignore ./tools/check-frontend-i18n --no-generate
|
||||
|
||||
- name: Run puppeteer tests
|
||||
if: ${{ matrix.include_frontend_tests }}
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
./tools/test-js-with-puppeteer
|
||||
|
||||
- name: Check pnpm dedupe
|
||||
if: ${{ matrix.include_frontend_tests }}
|
||||
run: pnpm dedupe --check
|
||||
|
||||
- name: Run backend lint
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
echo "Test suite is running under $(python --version)."
|
||||
./tools/lint --groups=backend --skip=gitlint,mypy # gitlint disabled because flaky
|
||||
|
||||
- name: Run backend tests
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
./tools/test-backend ${{ matrix.os != 'bookworm' && '--coverage' || '' }} --xml-report --no-html-report --include-webhooks --include-transaction-tests --no-cov-cleanup --ban-console-output
|
||||
|
||||
- name: Run mypy
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
# We run mypy after the backend tests so we get output from the
|
||||
# backend tests, which tend to uncover more serious problems, first.
|
||||
./tools/run-mypy --version
|
||||
./tools/run-mypy
|
||||
|
||||
- name: Run miscellaneous tests
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
uv lock --check
|
||||
|
||||
# ./tools/test-run-dev # https://github.com/zulip/zulip/pull/14233
|
||||
#
|
||||
# This test has been persistently flaky at like 1% frequency, is slow,
|
||||
# and is for a very specific single feature, so we don't run it by default:
|
||||
# ./tools/test-queue-worker-reload
|
||||
|
||||
./tools/test-migrations
|
||||
./tools/setup/optimize-svg --check
|
||||
./tools/setup/generate_integration_bots_avatars.py --check-missing
|
||||
./tools/ci/check-executables
|
||||
|
||||
# Ban check-database-compatibility from transitively
|
||||
# relying on static/generated, because it might not be
|
||||
# up-to-date at that point in upgrade-zulip-stage-2.
|
||||
chmod 000 static/generated web/generated
|
||||
./scripts/lib/check-database-compatibility
|
||||
chmod 755 static/generated web/generated
|
||||
|
||||
- name: Check for untracked files
|
||||
run: |
|
||||
source tools/ci/activate-venv
|
||||
# This final check looks for untracked files that may have been
|
||||
# created by test-backend or provision.
|
||||
untracked="$(git ls-files --exclude-standard --others)"
|
||||
if [ -n "$untracked" ]; then
|
||||
printf >&2 "Error: untracked files:\n%s\n" "$untracked"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Upload coverage reports
|
||||
|
||||
# Only upload coverage when both frontend and backend
|
||||
# tests are run.
|
||||
if: ${{ matrix.include_frontend_tests }}
|
||||
uses: codecov/codecov-action@v4
|
||||
with:
|
||||
files: var/coverage.xml,var/node-coverage/lcov.info
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
- name: Store Puppeteer artifacts
|
||||
# Upload these on failure, as well
|
||||
if: ${{ always() && matrix.include_frontend_tests }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: puppeteer
|
||||
path: ./var/puppeteer
|
||||
retention-days: 60
|
||||
|
||||
- name: Check development database build
|
||||
run: ./tools/ci/setup-backend
|
||||
|
||||
- name: Verify pnpm store path
|
||||
run: |
|
||||
set -x
|
||||
path="$(pnpm store path)"
|
||||
[[ "$path" == /__w/.pnpm-store/* ]]
|
||||
|
||||
- name: Minimize uv cache
|
||||
run: uv cache prune --ci
|
||||
|
||||
- name: Generate failure report string
|
||||
id: failure_report_string
|
||||
if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }}
|
||||
run: tools/ci/generate-failure-message >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Report status to CZO
|
||||
if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }}
|
||||
uses: zulip/github-actions-zulip/send-message@v1
|
||||
with:
|
||||
api-key: ${{ secrets.ZULIP_BOT_KEY }}
|
||||
email: "github-actions-bot@chat.zulip.org"
|
||||
organization-url: "https://chat.zulip.org"
|
||||
to: "automated testing"
|
||||
topic: ${{ steps.failure_report_string.outputs.topic }}
|
||||
type: "stream"
|
||||
content: ${{ steps.failure_report_string.outputs.content }}
|
103
.gitignore
vendored
103
.gitignore
vendored
@@ -1,91 +1,28 @@
|
||||
# Quick format and style primer:
|
||||
#
|
||||
# * If a pattern is meant only for a specific location, it should have a
|
||||
# leading slash, like `/staticfiles.json`.
|
||||
# * In principle any non-trailing slash (like `zproject/dev-secrets.conf`)
|
||||
# will do, but this makes a confusing pattern. Adding a leading slash
|
||||
# is clearer.
|
||||
#
|
||||
# * Patterns like `.vscode/` without slashes, or with only a trailing slash,
|
||||
# match in any subdirectory.
|
||||
#
|
||||
# * Subdirectories with several internal things to ignore get their own
|
||||
# `.gitignore` files.
|
||||
#
|
||||
# * Comments must be on their own line. (Otherwise they don't work.)
|
||||
#
|
||||
# See `git help ignore` for details on the format.
|
||||
|
||||
## Config files for the dev environment
|
||||
/zproject/apns-dev.pem
|
||||
/zproject/apns-dev-key.p8
|
||||
/zproject/dev-secrets.conf
|
||||
/zproject/custom_dev_settings.py
|
||||
/tools/conf.ini
|
||||
/tools/custom_provision
|
||||
/tools/droplets/conf.ini
|
||||
|
||||
## Byproducts of setting up and using the dev environment
|
||||
*.pyc
|
||||
*.tsbuildinfo
|
||||
package-lock.json
|
||||
|
||||
/.vagrant
|
||||
/var
|
||||
|
||||
/.dmypy.json
|
||||
/.ruff_cache
|
||||
/.venv
|
||||
|
||||
# Generated i18n data
|
||||
/locale/en
|
||||
/locale/language_options.json
|
||||
/locale/language_name_map.json
|
||||
/locale/*/mobile.json
|
||||
|
||||
# Static build
|
||||
*.mo
|
||||
npm-debug.log
|
||||
/.pnpm-store
|
||||
/node_modules
|
||||
/prod-static
|
||||
/staticfiles.json
|
||||
/webpack-stats-production.json
|
||||
zulip-git-version
|
||||
|
||||
# Test / analysis tools
|
||||
.coverage
|
||||
|
||||
## Files (or really symlinks) created in a prod deployment
|
||||
/zproject/prod_settings.py
|
||||
|
||||
## Files left by various editors and local environments
|
||||
# (Ideally these should be in everyone's respective personal gitignore files.)
|
||||
*~
|
||||
/all_messages_log.*
|
||||
/event_log/*
|
||||
/server.log
|
||||
/update-prod-static.log
|
||||
/prod-static
|
||||
/errors/*
|
||||
*.sw[po]
|
||||
.idea
|
||||
.DS_Store
|
||||
event_queues.pickle
|
||||
stats/
|
||||
zerver/fixtures/available-migrations
|
||||
zerver/fixtures/migration-status
|
||||
zerver/fixtures/test_data1.json
|
||||
.kdev4
|
||||
zulip.kdev4
|
||||
memcached_prefix
|
||||
coverage/
|
||||
/queue_error
|
||||
/workers.log
|
||||
.test-js-with-node.html
|
||||
digest.log
|
||||
errors.log
|
||||
manage.log
|
||||
.kateproject.d/
|
||||
.kateproject
|
||||
*.kate-swp
|
||||
*.sublime-project
|
||||
*.sublime-workspace
|
||||
*.DS_Store
|
||||
# VS Code. Avoid checking in .vscode in general, while still specifying
|
||||
# recommended extensions for working with this repository.
|
||||
/.vscode/**/*
|
||||
!/.vscode/extensions.json
|
||||
# .cache/ is generated by VS Code test runner
|
||||
.cache/
|
||||
.eslintcache
|
||||
|
||||
# Core dump files
|
||||
core
|
||||
|
||||
# Static generated files for landing page.
|
||||
/static/images/landing-page/hello/generated
|
||||
|
||||
## Miscellaneous
|
||||
# (Ideally this section is empty.)
|
||||
.transifexrc
|
||||
|
13
.gitlint
13
.gitlint
@@ -1,13 +0,0 @@
|
||||
[general]
|
||||
ignore=title-trailing-punctuation, body-min-length, body-is-missing
|
||||
|
||||
extra-path=tools/lib/gitlint_rules.py
|
||||
|
||||
[title-match-regex]
|
||||
regex=^(.+:\ )?[A-Z].+\.$
|
||||
|
||||
[title-max-length]
|
||||
line-length=72
|
||||
|
||||
[body-max-line-length]
|
||||
line-length=76
|
185
.mailmap
185
.mailmap
@@ -1,185 +0,0 @@
|
||||
# This file teaches `git log` and friends the canonical names
|
||||
# and email addresses to use for our contributors.
|
||||
#
|
||||
# For details on the format, see:
|
||||
# https://git.github.io/htmldocs/gitmailmap.html
|
||||
#
|
||||
# Handy commands for examining or adding to this file:
|
||||
#
|
||||
# # shows all names/emails after mapping, sorted:
|
||||
# $ git shortlog -es | sort -k2
|
||||
#
|
||||
# # shows raw names/emails, filtered by mapped name:
|
||||
# $ git log --format='%an %ae' --author=$NAME | uniq -c
|
||||
|
||||
acrefoot <acrefoot@zulip.com> <acrefoot@alum.mit.edu>
|
||||
acrefoot <acrefoot@zulip.com> <acrefoot@dropbox.com>
|
||||
acrefoot <acrefoot@zulip.com> <acrefoot@humbughq.com>
|
||||
Adam Benesh <Adam.Benesh@gmail.com>
|
||||
Adam Benesh <Adam.Benesh@gmail.com> <Adam-Daniel.Benesh@t-systems.com>
|
||||
Adarsh Tiwari <xoldyckk@gmail.com>
|
||||
Aditya Chaudhary <aditya.chaudhary1558@gmail.com>
|
||||
Adnan Shabbir Husain <generaladnan139@gmail.com>
|
||||
Adnan Shabbir Husain <generaladnan139@gmail.com> <78212328+adnan-td@users.noreply.github.com>
|
||||
Alex Vandiver <alexmv@zulip.com> <alex@chmrr.net>
|
||||
Alex Vandiver <alexmv@zulip.com> <github@chmrr.net>
|
||||
Allen Rabinovich <allenrabinovich@yahoo.com> <allenr@humbughq.com>
|
||||
Allen Rabinovich <allenrabinovich@yahoo.com> <allenr@zulip.com>
|
||||
Alya Abbott <alya@zulip.com> <2090066+alya@users.noreply.github.com>
|
||||
Alya Abbott <alya@zulip.com> <alyaabbott@elance-odesk.com>
|
||||
Aman Agrawal <amanagr@zulip.com>
|
||||
Aman Agrawal <amanagr@zulip.com> <f2016561@pilani.bits-pilani.ac.in>
|
||||
Aman Vishwakarma <vishwakarmarambhawan572@gmail.com>
|
||||
Aman Vishwakarma <vishwakarmarambhawan572@gmail.com> <185982038+whilstsomebody@users.noreply.github.com>
|
||||
Aman Vishwakarma <vishwakarmarambhawan572@gmail.com> <whilstsomebody@gmail.com>
|
||||
Anders Kaseorg <anders@zulip.com> <anders@zulipchat.com>
|
||||
Anders Kaseorg <anders@zulip.com> <andersk@mit.edu>
|
||||
aparna-bhatt <aparnabhatt2001@gmail.com> <86338542+aparna-bhatt@users.noreply.github.com>
|
||||
Apoorva Pendse <apoorvavpendse@gmail.com>
|
||||
Aryan Bhokare <aryan1bhokare@gmail.com>
|
||||
Aryan Bhokare <aryan1bhokare@gmail.com> <92683836+aryan-bhokare@users.noreply.github.com>
|
||||
Aryan Shridhar <aryanshridhar7@gmail.com>
|
||||
Aryan Shridhar <aryanshridhar7@gmail.com> <53977614+aryanshridhar@users.noreply.github.com>
|
||||
Ashwat Kumar Singh <ashwat.kumarsingh.met20@itbhu.ac.in>
|
||||
Austin Riba <austin@zulip.com> <austin@m51.io>
|
||||
Bedo Khaled <bedokhaled66@gmail.com>
|
||||
Bedo Khaled <bedokhaled66@gmail.com> <64221784+abdelrahman725@users.noreply.github.com>
|
||||
BIKI DAS <bikid475@gmail.com>
|
||||
Brijmohan Siyag <brijsiyag@gmail.com>
|
||||
Brock Whittaker <whittakerbrock@gmail.com> <bjwhitta@asu.edu>
|
||||
Brock Whittaker <whittakerbrock@gmail.com> <brock@zulip.com>
|
||||
Brock Whittaker <whittakerbrock@gmail.com> <brock@zulip.org>
|
||||
Brock Whittaker <whittakerbrock@gmail.com> <brock@zulipchat.org>
|
||||
Brock Whittaker <whittakerbrock@gmail.com> <brockwhittaker@Brocks-MacBook.local>
|
||||
Chris Bobbe <cbobbe@zulip.com> <cbobbe@zulipchat.com>
|
||||
Chris Bobbe <cbobbe@zulip.com> <csbobbe@gmail.com>
|
||||
codewithnick <nikhilsingh526452@gmail.com>
|
||||
Danny Su <contact@dannysu.com> <opensource@emailengine.org>
|
||||
Dhruv Goyal <dhruvgoyal.dev@gmail.com>
|
||||
Dinesh <chdinesh1089@gmail.com>
|
||||
Dinesh <chdinesh1089@gmail.com> <chdinesh1089>
|
||||
Eeshan Garg <eeshan@zulip.com> <jerryguitarist@gmail.com>
|
||||
Eric Smith <erwsmith@gmail.com> <99841919+erwsmith@users.noreply.github.com>
|
||||
Evy Kassirer <evy@zulip.com>
|
||||
Evy Kassirer <evy@zulip.com> <evy.kassirer@gmail.com>
|
||||
Evy Kassirer <evy@zulip.com> <evykassirer@users.noreply.github.com>
|
||||
Ganesh Pawar <pawarg256@gmail.com> <58626718+ganpa3@users.noreply.github.com>
|
||||
Greg Price <greg@zulip.com> <gnprice@gmail.com>
|
||||
Greg Price <greg@zulip.com> <greg@zulipchat.com>
|
||||
Greg Price <greg@zulip.com> <price@mit.edu>
|
||||
Hardik Dharmani <Ddharmani99@gmail.com> <ddharmani99@gmail.com>
|
||||
Harsh Bansal <harsh@harshbansal.in>
|
||||
Harsh Meena <reharshmeena@gmail.com>
|
||||
Harsh Meena <reharshmeena@gmail.com> <116981900+reharsh@users.noreply.github.com>
|
||||
Hemant Umre <hemantumre12@gmail.com> <87542880+HemantUmre12@users.noreply.github.com>
|
||||
Jai soni <jai_s@me.iitr.ac.in>
|
||||
Jai soni <jai_s@me.iitr.ac.in> <76561593+jai2201@users.noreply.github.com>
|
||||
Jeff Arnold <jbarnold@gmail.com> <jbarnold@humbughq.com>
|
||||
Jeff Arnold <jbarnold@gmail.com> <jbarnold@zulip.com>
|
||||
Jessica McKellar <jesstess@mit.edu> <jesstess@humbughq.com>
|
||||
Jessica McKellar <jesstess@mit.edu> <jesstess@zulip.com>
|
||||
Jitendra Kumar <jk69854@gmail.com>
|
||||
Jitendra Kumar <jk69854@gmail.com> <36557466+jitendra-ky@users.noreply.github.com>
|
||||
John Lu <JohnLu10212004@gmail.com>
|
||||
John Lu <JohnLu10212004@gmail.com> <87673068+JohnLu2004@users.noreply.github.com>
|
||||
Joseph Ho <josephho678@gmail.com>
|
||||
Joseph Ho <josephho678@gmail.com> <62449508+Joelute@users.noreply.github.com>
|
||||
Julia Bichler <julia.bichler@tum.de> <74348920+juliaBichler01@users.noreply.github.com>
|
||||
Karl Stolley <karl@zulip.com> <karl@stolley.dev>
|
||||
Kartikay Sambher <kartikaysambher@gmail.com>
|
||||
Kevin Mehall <km@kevinmehall.net> <kevin@humbughq.com>
|
||||
Kevin Mehall <km@kevinmehall.net> <kevin@zulip.com>
|
||||
Kevin Scott <kevin.scott.98@gmail.com>
|
||||
Kislay Verma <kislayuv27@gmail.com>
|
||||
Klara Brrettby <klara.bratteby@gmail.com>
|
||||
Klara Brrettby <klara.bratteby@gmail.com> <93648999+klarabratteby@users.noreply.github.com>
|
||||
Kumar Aniket <sachinaniket2004@gmail.com>
|
||||
Kumar Aniket <sachinaniket2004@gmail.com> <142340063+opmkumar@users.noreply.github.com>
|
||||
Kunal Sharma <v.shm.kunal@gmail.com>
|
||||
Lalit Kumar Singh <lalitkumarsingh3716@gmail.com>
|
||||
Lalit Kumar Singh <lalitkumarsingh3716@gmail.com> <lalits01@smartek21.com>
|
||||
Lauryn Menard <lauryn@zulip.com> <63245456+laurynmm@users.noreply.github.com>
|
||||
Lauryn Menard <lauryn@zulip.com> <lauryn.menard@gmail.com>
|
||||
m-e-l-u-h-a-n <purushottam.tiwari.cd.cse19@itbhu.ac.in>
|
||||
m-e-l-u-h-a-n <purushottam.tiwari.cd.cse19@itbhu.ac.in> <pururshottam.tiwari.cd.cse19@itbhu.ac.in>
|
||||
Maneesh Shukla <shuklamaneesh24@gmail.com> <143504391+shuklamaneesh23@users.noreply.github.com>
|
||||
Mateusz Mandera <mateusz.mandera@zulip.com> <mateusz.mandera@protonmail.com>
|
||||
Matt Keller <matt@zulip.com>
|
||||
Matt Keller <matt@zulip.com> <m@cognusion.com>
|
||||
Nehal Sharma <bablinaneh@gmail.com>
|
||||
Nehal Sharma <bablinaneh@gmail.com> <68962290+N-Shar-ma@users.noreply.github.com>
|
||||
Nimish Medatwal <medatwalnimish@gmail.com>
|
||||
Noble Mittal <noblemittal@outlook.com> <62551163+beingnoble03@users.noreply.github.com>
|
||||
nzai <nzaih18@gmail.com> <70953556+nzaih1999@users.noreply.github.com>
|
||||
Palash Baderia <palash.baderia@outlook.com>
|
||||
Palash Baderia <palash.baderia@outlook.com> <66828942+palashb01@users.noreply.github.com>
|
||||
Palash Raghuwanshi <singhpalash0@gmail.com>
|
||||
Parth <mittalparth22@gmail.com>
|
||||
Prakhar Pratyush <prakhar@zulip.com> <prakhar841301@gmail.com>
|
||||
Pratik Chanda <pratikchanda2000@gmail.com>
|
||||
Pratik Solanki <pratiksolanki2021@gmail.com>
|
||||
Priyam Seth <sethpriyam1@gmail.com> <b19188@students.iitmandi.ac.in>
|
||||
Ray Kraesig <rkraesig@zulip.com> <rkraesig@zulipchat.com>
|
||||
Reid Barton <rwbarton@gmail.com> <rwbarton@humbughq.com>
|
||||
Rein Zustand (rht) <rhtbot@protonmail.com>
|
||||
Rishabh Maheshwari <b20063@students.iitmandi.ac.in>
|
||||
Rishi Gupta <rishig@zulipchat.com> <rishig+git@mit.edu>
|
||||
Rishi Gupta <rishig@zulipchat.com> <rishig@kandralabs.com>
|
||||
Rishi Gupta <rishig@zulipchat.com> <rishig@users.noreply.github.com>
|
||||
Ritwik Patnaik <ritwikpatnaik@gmail.com>
|
||||
Rixant Rokaha <rixantrokaha@gmail.com>
|
||||
Rixant Rokaha <rixantrokaha@gmail.com> <rishantrokaha@gmail.com>
|
||||
Rixant Rokaha <rixantrokaha@gmail.com> <rrokaha@caldwell.edu>
|
||||
Rohan Gudimetla <rohan.gudimetla07@gmail.com>
|
||||
Sahil Batra <sahil@zulip.com> <35494118+sahil839@users.noreply.github.com>
|
||||
Sahil Batra <sahil@zulip.com> <sahilbatra839@gmail.com>
|
||||
Sanchit Sharma <ssharmas10662@gmail.com>
|
||||
Satyam Bansal <sbansal1999@gmail.com>
|
||||
Sayam Samal <samal.sayam@gmail.com>
|
||||
Scott Feeney <scott@oceanbase.org> <scott@humbughq.com>
|
||||
Scott Feeney <scott@oceanbase.org> <scott@zulip.com>
|
||||
Shashank Singh <21bec103@iiitdmj.ac.in>
|
||||
Shlok Patel <shlokcpatel2001@gmail.com>
|
||||
Shu Chen <shu@zulip.com>
|
||||
Shubham Padia <shubham@zulip.com>
|
||||
Shubham Padia <shubham@zulip.com> <shubham-padia@users.noreply.github.com>
|
||||
Shubham Padia <shubham@zulip.com> <shubham@glints.com>
|
||||
Somesh Ranjan <somesh.ranjan.met20@itbhu.ac.in> <77766761+somesh202@users.noreply.github.com>
|
||||
Steve Howell <showell@zulip.com> <showell30@yahoo.com>
|
||||
Steve Howell <showell@zulip.com> <showell@yahoo.com>
|
||||
Steve Howell <showell@zulip.com> <showell@zulipchat.com>
|
||||
Steve Howell <showell@zulip.com> <steve@humbughq.com>
|
||||
Steve Howell <showell@zulip.com> <steve@zulip.com>
|
||||
strifel <info@strifel.de>
|
||||
Sujal Shah <sujalshah28092004@gmail.com>
|
||||
Tanmay Kumar <tnmdotkr@gmail.com>
|
||||
Tanmay Kumar <tnmdotkr@gmail.com> <133781250+tnmkr@users.noreply.github.com>
|
||||
Tim Abbott <tabbott@zulip.com>
|
||||
Tim Abbott <tabbott@zulip.com> <tabbott@dropbox.com>
|
||||
Tim Abbott <tabbott@zulip.com> <tabbott@humbughq.com>
|
||||
Tim Abbott <tabbott@zulip.com> <tabbott@mit.edu>
|
||||
Tim Abbott <tabbott@zulip.com> <tabbott@zulipchat.com>
|
||||
Tomasz Kolek <tomasz-kolek@o2.pl> <tomasz-kolek@go2.pl>
|
||||
Ujjawal Modi <umodi2003@gmail.com> <99073049+Ujjawal3@users.noreply.github.com>
|
||||
umkay <ukhan@zulipchat.com> <umaimah.k@gmail.com>
|
||||
umkay <ukhan@zulipchat.com> <umkay@users.noreply.github.com>
|
||||
Viktor Illmer <1476338+v-ji@users.noreply.github.com>
|
||||
Vishesh Singh <vishesh.bhu1971@gmail.com>
|
||||
Vishesh Singh <vishesh.bhu1971@gmail.com> <142628839+NotVishesh@users.noreply.github.com>
|
||||
Vishnu KS <vishnu@zulip.com> <hackerkid@vishnuks.com>
|
||||
Vishnu KS <vishnu@zulip.com> <yo@vishnuks.com>
|
||||
Vivek Tripathi <vivektripathi8005@gmail.com>
|
||||
Waseem Daher <wdaher@zulip.com> <wdaher@dropbox.com>
|
||||
Waseem Daher <wdaher@zulip.com> <wdaher@humbughq.com>
|
||||
Yash RE <33805964+YashRE42@users.noreply.github.com>
|
||||
Yash RE <33805964+YashRE42@users.noreply.github.com> <YashRE42@github.com>
|
||||
Yogesh Sirsat <yogeshsirsat56@gmail.com>
|
||||
Yogesh Sirsat <yogeshsirsat56@gmail.com> <41695888+yogesh-sirsat@users.noreply.github.com>
|
||||
Zeeshan Equbal <equbalzeeshan@gmail.com>
|
||||
Zeeshan Equbal <equbalzeeshan@gmail.com> <54993043+zee-bit@users.noreply.github.com>
|
||||
Zev Benjamin <zev@zulip.com> <zev@dropbox.com>
|
||||
Zev Benjamin <zev@zulip.com> <zev@humbughq.com>
|
||||
Zev Benjamin <zev@zulip.com> <zev@mit.edu>
|
||||
Zixuan James Li <p359101898@gmail.com>
|
||||
Zixuan James Li <p359101898@gmail.com> <359101898@qq.com>
|
||||
Zixuan James Li <p359101898@gmail.com> <39874143+PIG208@users.noreply.github.com>
|
@@ -1,17 +0,0 @@
|
||||
pnpm-lock.yaml
|
||||
/api_docs/**/*.md
|
||||
/corporate/tests/stripe_fixtures
|
||||
/help/**/*.md
|
||||
/locale
|
||||
/templates/**/*.md
|
||||
/tools/setup/emoji/emoji_map.json
|
||||
/web/third/*
|
||||
!/web/third/marked
|
||||
/web/third/marked/*
|
||||
!/web/third/marked/lib
|
||||
/web/third/marked/lib/*
|
||||
!/web/third/marked/lib/marked.d.cts
|
||||
/zerver/tests/fixtures
|
||||
/zerver/webhooks/*/doc.md
|
||||
/zerver/webhooks/github/githubsponsors.md
|
||||
/zerver/webhooks/*/fixtures
|
@@ -1,19 +0,0 @@
|
||||
# https://docs.readthedocs.io/en/stable/config-file/v2.html
|
||||
version: 2
|
||||
|
||||
build:
|
||||
os: ubuntu-22.04
|
||||
tools:
|
||||
python: "3.10"
|
||||
jobs:
|
||||
create_environment:
|
||||
- asdf plugin add uv
|
||||
- asdf install uv 0.6.6
|
||||
- asdf global uv 0.6.6
|
||||
- UV_PROJECT_ENVIRONMENT=$READTHEDOCS_VIRTUALENV_PATH uv venv
|
||||
install:
|
||||
- UV_PROJECT_ENVIRONMENT=$READTHEDOCS_VIRTUALENV_PATH uv sync --frozen --only-group=docs
|
||||
|
||||
sphinx:
|
||||
configuration: docs/conf.py
|
||||
fail_on_warning: true
|
@@ -1 +0,0 @@
|
||||
sonar.inclusions=**/*.py,**/*.html
|
23
.vscode/extensions.json
vendored
23
.vscode/extensions.json
vendored
@@ -1,23 +0,0 @@
|
||||
{
|
||||
// Recommended VS Code extensions for zulip/zulip.
|
||||
//
|
||||
// VS Code prompts a user to install the recommended extensions
|
||||
// when a workspace is opened for the first time. The user can
|
||||
// also review the list with the 'Extensions: Show Recommended
|
||||
// Extensions' command. See
|
||||
// https://code.visualstudio.com/docs/editor/extension-marketplace#_workspace-recommended-extensions
|
||||
// for more information.
|
||||
//
|
||||
// Extension identifier format: ${publisher}.${name}.
|
||||
// Example: vscode.csharp
|
||||
|
||||
"recommendations": [
|
||||
"42crunch.vscode-openapi",
|
||||
"dbaeumer.vscode-eslint",
|
||||
"esbenp.prettier-vscode",
|
||||
"ms-vscode-remote.vscode-remote-extensionpack"
|
||||
],
|
||||
|
||||
// Extensions recommended by VS Code which are not recommended for users of zulip/zulip.
|
||||
"unwantedRecommendations": []
|
||||
}
|
@@ -1,173 +0,0 @@
|
||||
# Zulip Code of Conduct
|
||||
|
||||
Like the technical community as a whole, the Zulip team and community is
|
||||
made up of a mixture of professionals and volunteers from all over the
|
||||
world, working on every aspect of the mission, including mentorship,
|
||||
teaching, and connecting people.
|
||||
|
||||
Diversity is one of our huge strengths, but it can also lead to
|
||||
communication issues and unhappiness. To that end, we have a few ground
|
||||
rules that we ask people to adhere to. This code applies equally to
|
||||
founders, mentors, and those seeking help and guidance.
|
||||
|
||||
This isn't an exhaustive list of things that you can't do. Rather, take it
|
||||
in the spirit in which it's intended --- a guide to make it easier to enrich
|
||||
all of us and the technical communities in which we participate.
|
||||
|
||||
## Expected behavior
|
||||
|
||||
The following behaviors are expected and requested of all community members:
|
||||
|
||||
- Participate. In doing so, you contribute to the health and longevity of
|
||||
the community.
|
||||
- Exercise consideration and respect in your speech and actions.
|
||||
- Attempt collaboration before conflict. Assume good faith.
|
||||
- Refrain from demeaning, discriminatory, or harassing behavior and speech.
|
||||
- Take action or alert community leaders if you notice a dangerous
|
||||
situation, someone in distress, or violations of this code, even if they
|
||||
seem inconsequential.
|
||||
- Community event venues may be shared with members of the public; be
|
||||
respectful to all patrons of these locations.
|
||||
|
||||
## Unacceptable behavior
|
||||
|
||||
The following behaviors are considered harassment and are unacceptable
|
||||
within the Zulip community:
|
||||
|
||||
- Jokes or derogatory language that singles out members of any race,
|
||||
ethnicity, culture, national origin, color, immigration status, social and
|
||||
economic class, educational level, language proficiency, sex, sexual
|
||||
orientation, gender identity and expression, age, size, family status,
|
||||
political belief, religion, and mental and physical ability.
|
||||
- Violence, threats of violence, or violent language directed against
|
||||
another person.
|
||||
- Disseminating or threatening to disseminate another person's personal
|
||||
information.
|
||||
- Personal insults of any sort.
|
||||
- Posting or displaying sexually explicit or violent material.
|
||||
- Inappropriate photography or recording.
|
||||
- Deliberate intimidation, stalking, or following (online or in person).
|
||||
- Unwelcome sexual attention. This includes sexualized comments or jokes,
|
||||
inappropriate touching or groping, and unwelcomed sexual advances.
|
||||
- Sustained disruption of community events, including talks and
|
||||
presentations.
|
||||
- Advocating for, or encouraging, any of the behaviors above.
|
||||
|
||||
## Reporting and enforcement
|
||||
|
||||
Harassment and other code of conduct violations reduce the value of the
|
||||
community for everyone. If someone makes you or anyone else feel unsafe or
|
||||
unwelcome, please report it to the community organizers at
|
||||
zulip-code-of-conduct@googlegroups.com as soon as possible. You can make a
|
||||
report either personally or anonymously.
|
||||
|
||||
If a community member engages in unacceptable behavior, the community
|
||||
organizers may take any action they deem appropriate, up to and including a
|
||||
temporary ban or permanent expulsion from the community without warning (and
|
||||
without refund in the case of a paid event).
|
||||
|
||||
If someone outside the development community (e.g., a user of the Zulip
|
||||
software) engages in unacceptable behavior that affects someone in the
|
||||
community, we still want to know. Even if we don't have direct control over
|
||||
the violator, the community organizers can still support the people
|
||||
affected, reduce the chance of a similar violation in the future, and take
|
||||
any direct action we can.
|
||||
|
||||
The nature of reporting means it can only help after the fact. If you see
|
||||
something you can do while a violation is happening, do it. A lot of the
|
||||
harms of harassment and other violations can be mitigated by the victim
|
||||
knowing that the other people present are on their side.
|
||||
|
||||
All reports will be kept confidential. In some cases, we may determine that a
|
||||
public statement will need to be made. In such cases, the identities of all
|
||||
victims and reporters will remain confidential unless those individuals
|
||||
instruct us otherwise.
|
||||
|
||||
## Scope
|
||||
|
||||
We expect all community participants (contributors, paid or otherwise,
|
||||
sponsors, and other guests) to abide by this Code of Conduct in all
|
||||
community venues, online and in-person, as well as in all private
|
||||
communications pertaining to community business.
|
||||
|
||||
This Code of Conduct and its related procedures also applies to unacceptable
|
||||
behavior occurring outside the scope of community activities when such
|
||||
behavior has the potential to adversely affect the safety and well-being of
|
||||
community members.
|
||||
|
||||
## License and attribution
|
||||
|
||||
This Code of Conduct is adapted from the
|
||||
[Django Code of Conduct](https://www.djangoproject.com/conduct/), and is
|
||||
under a
|
||||
[Creative Commons BY-SA](https://creativecommons.org/licenses/by-sa/4.0/)
|
||||
license.
|
||||
|
||||
## Moderating the Zulip community
|
||||
|
||||
Anyone can help moderate the Zulip community by helping make sure that folks are
|
||||
aware of the [community guidelines](https://zulip.com/development-community/)
|
||||
and this Code of Conduct, and that we maintain a positive and respectful
|
||||
atmosphere.
|
||||
|
||||
Here are some guidelines for you how can help:
|
||||
|
||||
- Be friendly! Welcoming folks, thanking them for their feedback, ideas and effort,
|
||||
and just trying to keep the atmosphere warm make the whole community function
|
||||
more smoothly. New participants who feel accepted, listened to and respected
|
||||
are likely to treat others the same way.
|
||||
|
||||
- Be familiar with the [community
|
||||
guidelines](https://zulip.com/development-community/), and cite them liberally
|
||||
when a user violates them. Be polite but firm. Some examples:
|
||||
|
||||
- @user please note that there is no need to @-mention @\_**Tim Abbott** when
|
||||
you ask a question. As noted in the [guidelines for this
|
||||
community](https://zulip.com/development-community/):
|
||||
|
||||
> Use @-mentions sparingly… there is generally no need to @-mention a
|
||||
> core contributor unless you need their timely attention.
|
||||
|
||||
- @user, please keep in mind the following [community
|
||||
guideline](https://zulip.com/development-community/):
|
||||
|
||||
> Don’t ask the same question in multiple places. Moderators read every
|
||||
> public stream, and make sure every question gets a reply.
|
||||
|
||||
I’ve gone ahead and moved the other copy of this message to this thread.
|
||||
|
||||
- If asked a question in a direct message that is better discussed in a public
|
||||
stream:
|
||||
> Hi @user! Please start by reviewing
|
||||
> https://zulip.com/development-community/#community-norms to learn how to
|
||||
> get help in this community.
|
||||
|
||||
- Users sometimes think chat.zulip.org is a testing instance. When this happens,
|
||||
kindly direct them to use the **#test here** stream.
|
||||
|
||||
- If you see a message that’s posted in the wrong place, go ahead and move it if
|
||||
you have permissions to do so, even if you don’t plan to respond to it.
|
||||
Leaving the “Send automated notice to new topic” option enabled helps make it
|
||||
clear what happened to the person who sent the message.
|
||||
|
||||
If you are responding to a message that's been moved, mention the user in your
|
||||
reply, so that the mention serves as a notification of the new location for
|
||||
their conversation.
|
||||
|
||||
- If a user is posting spam, please report it to an administrator. They will:
|
||||
|
||||
- Change the user's name to `<name> (spammer)` and deactivate them.
|
||||
- Delete any spam messages they posted in public streams.
|
||||
|
||||
- We care very much about maintaining a respectful tone in our community. If you
|
||||
see someone being mean or rude, point out that their tone is inappropriate,
|
||||
and ask them to communicate their perspective in a respectful way in the
|
||||
future. If you don’t feel comfortable doing so yourself, feel free to ask a
|
||||
member of Zulip's core team to take care of the situation.
|
||||
|
||||
- Try to assume the best intentions from others (given the range of
|
||||
possibilities presented by their visible behavior), and stick with a friendly
|
||||
and positive tone even when someone’s behavior is poor or disrespectful.
|
||||
Everyone has bad days and stressful situations that can result in them
|
||||
behaving not their best, and while we should be firm about our community
|
||||
rules, we should also enforce them with kindness.
|
418
CONTRIBUTING.md
418
CONTRIBUTING.md
@@ -1,418 +0,0 @@
|
||||
# Contributing guide
|
||||
|
||||
Welcome! This is a step-by-step guide on how to get started contributing code to
|
||||
the [Zulip](https://zulip.com/) organized team chat [open-source
|
||||
project](https://github.com/zulip). Thousands of people use Zulip every day, and
|
||||
your work on Zulip will have a meaningful impact on their experience. We hope
|
||||
you'll join us!
|
||||
|
||||
To learn about ways to contribute without writing code, please see our
|
||||
suggestions for how you can [support the Zulip
|
||||
project](https://zulip.com/help/support-zulip-project).
|
||||
|
||||
## Learning from the docs
|
||||
|
||||
Zulip has a documentation-based approach to onboarding new contributors. As you
|
||||
are getting started, this page will be your go-to for figuring out what to do
|
||||
next. You will also explore other guides, learning about how to put together
|
||||
your first pull request, diving into [Zulip's
|
||||
subsystems](https://zulip.readthedocs.io/en/latest/subsystems/index.html), and
|
||||
much more. We hope you'll find this process to be a great learning experience.
|
||||
|
||||
This page will guide you through the following steps:
|
||||
|
||||
1. [Getting started](#getting-started)
|
||||
1. [Finding an issue to work on](#finding-an-issue-to-work-on)
|
||||
1. [Getting help](#getting-help) as you work on your first pull request
|
||||
1. Learning [what makes a great Zulip contributor](#what-makes-a-great-zulip-contributor)
|
||||
1. [Submitting a pull request](#submitting-a-pull-request)
|
||||
1. [Going beyond the first issue](#beyond-the-first-issue)
|
||||
|
||||
Any time you feel lost, come back to this guide. The information you need is
|
||||
likely somewhere on this page (perhaps in the list of [common
|
||||
questions](#common-questions)), or in one of the many references it points to.
|
||||
|
||||
If you've done all you can with the documentation and are still feeling stuck,
|
||||
join the [Zulip development community](https://zulip.com/development-community/)
|
||||
to ask for help! Before you post, be sure to review [community
|
||||
norms](https://zulip.com/development-community/#community-norms) and [where to
|
||||
post](https://zulip.com/development-community/#where-do-i-send-my-message) your
|
||||
question. The Zulip community is governed by a [code of
|
||||
conduct](https://zulip.readthedocs.io/en/latest/code-of-conduct.html).
|
||||
|
||||
## Getting started
|
||||
|
||||
### Learning how to use Git (the Zulip way)
|
||||
|
||||
Zulip uses GitHub for source control and code review, and becoming familiar with
|
||||
Git is essential for navigating and contributing to the Zulip codebase. [Our
|
||||
guide to Git](https://zulip.readthedocs.io/en/latest/git/index.html) will help
|
||||
you get started even if you've never used Git before.
|
||||
|
||||
If you're familiar with Git, you'll still want to take a look at [our
|
||||
Zulip-specific Git
|
||||
tools](https://zulip.readthedocs.io/en/latest/git/zulip-tools.html).
|
||||
|
||||
### Setting up your development environment and diving in
|
||||
|
||||
To get started contributing code to Zulip, you will need to set up the
|
||||
development environment for the Zulip codebase you want to work on. You'll then
|
||||
want to take some time to familiarize yourself with the code.
|
||||
|
||||
#### Server and web app
|
||||
|
||||
1. [Install the development
|
||||
environment](https://zulip.readthedocs.io/en/latest/development/overview.html).
|
||||
1. Familiarize yourself with [using the development
|
||||
environment](https://zulip.readthedocs.io/en/latest/development/using.html).
|
||||
1. Go through the [new application feature
|
||||
tutorial](https://zulip.readthedocs.io/en/latest/tutorials/new-feature-tutorial.html)
|
||||
to get familiar with how the Zulip codebase is organized and how to find code
|
||||
in it.
|
||||
|
||||
#### Flutter-based mobile app
|
||||
|
||||
1. Set up a development environment following the instructions in [the project
|
||||
README](https://github.com/zulip/zulip-flutter).
|
||||
1. Start reading recent commits to see the code we're writing.
|
||||
Use either a [graphical Git viewer][] like `gitk`, or `git log -p`
|
||||
with [the "secret" to reading its output][git-log-secret].
|
||||
1. Pick some of the code that appears in those Git commits and that looks
|
||||
interesting. Use your IDE to visit that code and to navigate to related code,
|
||||
reading to see how it works and how the codebase is organized.
|
||||
|
||||
[graphical Git viewer]: https://zulip.readthedocs.io/en/latest/git/setup.html#get-a-graphical-client
|
||||
[git-log-secret]: https://github.com/zulip/zulip-mobile/blob/main/docs/howto/git.md#git-log-secret
|
||||
|
||||
#### Desktop app
|
||||
|
||||
Follow [this
|
||||
documentation](https://github.com/zulip/zulip-desktop/blob/main/development.md)
|
||||
to set up the Zulip Desktop development environment.
|
||||
|
||||
#### Terminal app
|
||||
|
||||
Follow [this
|
||||
documentation](https://github.com/zulip/zulip-terminal?tab=readme-ov-file#setting-up-a-development-environment)
|
||||
to set up the Zulip Terminal development environment.
|
||||
|
||||
## Finding an issue to work on
|
||||
|
||||
### Where to look for an issue
|
||||
|
||||
Now you're ready to pick your first issue! Zulip has several repositories you
|
||||
can check out, depending on your interests. There are hundreds of open issues in
|
||||
the [main Zulip server and web app
|
||||
repository](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22)
|
||||
alone.
|
||||
|
||||
You can look through issues tagged with the "help wanted" label, which is used
|
||||
to indicate the issues that are open for contributions. You'll be able to claim
|
||||
unassigned issues, which you can find using the `no:assignee` filter in GitHub.
|
||||
You can also pick up issues that are assigned but are no longer being worked on.
|
||||
|
||||
Some repositories use the "good first issue" label to tag issues that are
|
||||
especially approachable for new contributors.
|
||||
|
||||
Here are some handy links for issues to look through:
|
||||
|
||||
- [Server and web app](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22)
|
||||
- Mobile apps: no "help wanted" label, but see the
|
||||
[project board](https://github.com/orgs/zulip/projects/5/views/4)
|
||||
for the upcoming Flutter-based app. Look for issues up through the
|
||||
"Launch" milestone, and that aren't already assigned.
|
||||
- [Desktop app](https://github.com/zulip/zulip-desktop/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22)
|
||||
- [Terminal app](https://github.com/zulip/zulip-terminal/issues?q=is%3Aopen+is%3Aissue+label%3A"help+wanted")
|
||||
- [Python API bindings and bots](https://github.com/zulip/python-zulip-api/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22)
|
||||
|
||||
### Picking an issue to work on
|
||||
|
||||
There's a lot to learn while making your first pull request, so start small!
|
||||
Many first contributions have fewer than 10 lines of changes (not counting
|
||||
changes to tests).
|
||||
|
||||
We recommend the following process for finding an issue to work on:
|
||||
|
||||
1. Find an issue tagged with the "help wanted" label that is either unassigned,
|
||||
or looks to be abandoned.
|
||||
1. Read the description of the issue and make sure you understand it.
|
||||
1. If it seems promising, poke around the product
|
||||
(on [chat.zulip.org](https://chat.zulip.org) or in the development
|
||||
environment) until you know how the piece being
|
||||
described fits into the bigger picture. If after some exploration the
|
||||
description seems confusing or ambiguous, post a question on the GitHub
|
||||
issue, as others may benefit from the clarification as well.
|
||||
1. When you find an issue you like, try to get started working on it. See if you
|
||||
can find the part of the code you'll need to modify (`git grep` is your
|
||||
friend!) and get some idea of how you'll approach the problem.
|
||||
1. If you feel lost, that's OK! Go through these steps again with another issue.
|
||||
There's plenty to work on, and the exploration you do will help you learn
|
||||
more about the project.
|
||||
|
||||
An assigned issue can be considered abandoned if:
|
||||
|
||||
- There is no recent contributor activity.
|
||||
- There are no open PRs, or an open PR needs work in order to be ready for
|
||||
review. For example, a PR may need to be updated to address reviewer feedback
|
||||
or to pass tests.
|
||||
|
||||
Note that you are _not_ claiming an issue while you are iterating through steps
|
||||
1-4. _Before you claim an issue_, you should be confident that you will be able to
|
||||
tackle it effectively.
|
||||
|
||||
Additional tips for the [main server and web app
|
||||
repository](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22):
|
||||
|
||||
- We especially recommend browsing recently opened issues, as there are more
|
||||
likely to be easy ones for you to find.
|
||||
- Take a look at issues with the ["good first issue"
|
||||
label](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22),
|
||||
as they are especially accessible to new contributors. However, you will
|
||||
likely find issues without this label that are accessible as well.
|
||||
- All issues are partitioned into areas like
|
||||
admin, compose, emoji, hotkeys, i18n, onboarding, search, etc. Look
|
||||
through our [list of labels](https://github.com/zulip/zulip/labels), and
|
||||
click on some of the `area:` labels to see all the issues related to your
|
||||
areas of interest.
|
||||
- Avoid issues with the "difficult" label unless you
|
||||
understand why it is difficult and are highly confident you can resolve the
|
||||
issue correctly and completely.
|
||||
|
||||
### Claiming an issue
|
||||
|
||||
#### In the main server/web app repository and Zulip Terminal repository
|
||||
|
||||
The Zulip server/web app repository
|
||||
([`zulip/zulip`](https://github.com/zulip/zulip/)) and the Zulip Terminal
|
||||
repository ([`zulip/zulip-terminal`](https://github.com/zulip/zulip-terminal/))
|
||||
are set up with a GitHub workflow bot called
|
||||
[Zulipbot](https://github.com/zulip/zulipbot), which manages issues and pull
|
||||
requests in order to create a better workflow for Zulip contributors.
|
||||
|
||||
To claim an issue in these repositories, simply post a comment that says
|
||||
`@zulipbot claim` to the issue thread. If the issue is [tagged with a help
|
||||
wanted label and is not assigned to someone
|
||||
else](https://github.com/zulip/zulip/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22+no%3Aassignee),
|
||||
Zulipbot will immediately assign the issue to you.
|
||||
|
||||
Note that new contributors can only claim one issue until their first pull request is
|
||||
merged. This is to encourage folks to finish ongoing work before starting
|
||||
something new. If you would like to pick up a new issue while waiting for review
|
||||
on an almost-ready pull request, you can post a comment to this effect on the
|
||||
issue you're interested in.
|
||||
|
||||
#### In other Zulip repositories
|
||||
|
||||
There is no bot for other Zulip repositories
|
||||
([`zulip/zulip-flutter`](https://github.com/zulip/zulip-flutter/), etc.). If
|
||||
you are interested in claiming an issue in one of these repositories, simply
|
||||
post a comment on the issue thread saying that you've started work on the
|
||||
issue and would like to claim it. In your comment, describe what part of the
|
||||
code you're modifying and how you plan to approach the problem, based on
|
||||
what you learned in steps 1–4 [above](#picking-an-issue-to-work-on).
|
||||
|
||||
There is no need to @-mention the issue creator in your comment. There is
|
||||
also no need to post the same information in multiple places, for example in
|
||||
a chat thread in addition to the GitHub issue.
|
||||
|
||||
Please follow the same guidelines as described above: find an issue labeled
|
||||
"help wanted", and only pick up one issue at a time to start with.
|
||||
|
||||
## Getting help
|
||||
|
||||
You may have questions as you work on your pull request. For example, you might
|
||||
not be sure about some details of what's required, or have questions about your
|
||||
implementation approach. Zulip's maintainers are happy to answer thoughtfully
|
||||
posed questions, and discuss any difficulties that might arise as you work on
|
||||
your PR.
|
||||
|
||||
If you haven't done so yet, now is the time to join the [Zulip development
|
||||
community](https://zulip.com/development-community/). If you'd like, introduce
|
||||
yourself in the [#new
|
||||
members](https://chat.zulip.org/#narrow/channel/95-new-members) channel, using
|
||||
your name as the [topic](https://zulip.com/help/introduction-to-topics).
|
||||
|
||||
You can get help in public channels in the community:
|
||||
|
||||
1. **Review** the [Zulip development community
|
||||
guidelines](https://zulip.com/development-community/#community-norms).
|
||||
|
||||
1. **Decide where to post.** If there is a discussion thread linked from the
|
||||
issue you're working on, that's usually the best place to post any
|
||||
clarification questions about the issue. Otherwise, follow [these
|
||||
guidelines](https://zulip.com/development-community/#where-do-i-send-my-message)
|
||||
to figure out where to post your question. Don’t stress too much about
|
||||
picking the right place if you’re not sure, as moderators can [move your
|
||||
question thread to a different
|
||||
channel](https://zulip.com/help/move-content-to-another-channel) if needed.
|
||||
|
||||
1. **Write** up your question, being sure to follow our [guide on asking great
|
||||
questions](https://zulip.readthedocs.io/en/latest/contributing/asking-great-questions.html).
|
||||
The guide explains what you need to do make sure that folks will be able to
|
||||
help you out, and that you're making good use of maintainers' limited time.
|
||||
|
||||
1. **Review** your message before you send it. Will your question make sense to
|
||||
someone who is familiar with Zulip, but might not have the details of what
|
||||
you are working on fresh in mind?
|
||||
|
||||
Well-posed questions will generally get a response within 1-2 business days.
|
||||
There is no need to @-mention anyone when you ask a question, as maintainers
|
||||
keep a close eye on all the ongoing discussions.
|
||||
|
||||
## What makes a great Zulip contributor?
|
||||
|
||||
As you're working on your first code contribution, here are some best practices
|
||||
to keep in mind.
|
||||
|
||||
- [Asking great questions][great-questions]. It's very hard to answer a general
|
||||
question like, "How do I do this issue?" When asking for help, explain your
|
||||
current understanding, including what you've done or tried so far and where
|
||||
you got stuck. Post tracebacks or other error messages if appropriate. For
|
||||
more advice, check out [our guide][great-questions]!
|
||||
- Learning and practicing
|
||||
[Git commit discipline](https://zulip.readthedocs.io/en/latest/contributing/commit-discipline.html).
|
||||
- Submitting carefully tested code. See our [detailed guide on how to review
|
||||
code](https://zulip.readthedocs.io/en/latest/contributing/code-reviewing.html#how-to-review-code)
|
||||
(yours or someone else's).
|
||||
- Posting
|
||||
[screenshots or GIFs](https://zulip.readthedocs.io/en/latest/tutorials/screenshot-and-gif-software.html)
|
||||
for frontend changes.
|
||||
- Working to [make your pull requests easy to
|
||||
review](https://zulip.readthedocs.io/en/latest/contributing/reviewable-prs.html).
|
||||
- Clearly describing what you have implemented and why. For example, if your
|
||||
implementation differs from the issue description in some way or is a partial
|
||||
step towards the requirements described in the issue, be sure to call
|
||||
out those differences.
|
||||
- Being responsive to feedback on pull requests. This means incorporating or
|
||||
responding to all suggested changes, and leaving a note if you won't be
|
||||
able to address things within a few days.
|
||||
- Being helpful and friendly on the [Zulip community
|
||||
server](https://zulip.com/development-community/).
|
||||
|
||||
[great-questions]: https://zulip.readthedocs.io/en/latest/contributing/asking-great-questions.html
|
||||
|
||||
## Submitting a pull request
|
||||
|
||||
See the [guide on submitting a pull
|
||||
request](https://zulip.readthedocs.io/en/latest/contributing/reviewable-prs.html)
|
||||
for detailed instructions on how to present your proposed changes to Zulip.
|
||||
|
||||
The [pull request review process
|
||||
guide](https://zulip.readthedocs.io/en/latest/contributing/review-process.html)
|
||||
explains the stages of review your PR will go through, and offers guidance on
|
||||
how to help the review process move forward.
|
||||
|
||||
It's OK if your first issue takes you a while; that's normal! You'll be able to
|
||||
work a lot faster as you build experience.
|
||||
|
||||
## Beyond the first issue
|
||||
|
||||
To find a second issue to work on, we recommend looking through issues with the same
|
||||
`area:` label as the last issue you resolved. You'll be able to reuse the
|
||||
work you did learning how that part of the codebase works. Also, the path to
|
||||
becoming a core developer often involves taking ownership of one of these area
|
||||
labels.
|
||||
|
||||
## Common questions
|
||||
|
||||
- **What if somebody is already working on the issue I want to claim?** There
|
||||
are lots of issues to work on (likely
|
||||
[hundreds](https://github.com/zulip/zulip/issues?q=is%3Aissue%20state%3Aopen%20label%3A%22help%20wanted%22%20no%3Aassignee)
|
||||
in the server repository)! If somebody else is actively working on the issue,
|
||||
you can find a different one, or help with reviewing their work.
|
||||
|
||||
- **What if it looks like the person who's assigned an issue is no longer
|
||||
working on it?** Post a comment on the issue, e.g., "Hi @ someone! Are you
|
||||
still working on this one? I'd like to pick it up if not." You can pick up the
|
||||
issue if they say they don't plan to work on it more.
|
||||
|
||||
- **What if I don't get a response?** If you don't get a reply within 2-3
|
||||
days, go ahead and post a comment that you are working on the issue, and
|
||||
submit a pull request. If the original assignee ends up submitting a pull
|
||||
request first, no worries! You can help by providing feedback on their work,
|
||||
or submit your own PR if you think a different approach is needed (as
|
||||
described above).
|
||||
|
||||
- **What if there is already a pull request for the issue I want to work on?**
|
||||
See our [guide on continuing unfinished
|
||||
work](https://zulip.readthedocs.io/en/latest/contributing/continuing-unfinished-work.html).
|
||||
|
||||
- **What if somebody else claims an issue while I'm figuring out whether or not to
|
||||
work on it?** No worries! You can contribute by providing feedback on
|
||||
their pull request. If you've made good progress in understanding part of the
|
||||
codebase, you can also find another "help wanted" issue in the same area to
|
||||
work on.
|
||||
|
||||
- **Can I work on an old issue?** Of course! Open issues marked as “help wanted”
|
||||
are generally eligible to be worked on. If you find that the context around
|
||||
the issue has changed (e.g., the UI looks different), do your best to apply
|
||||
the current patterns, and comment on any differences from the spec in your PR
|
||||
description.
|
||||
|
||||
If picking up a bug, start by checking if you can replicate it. If it no longer
|
||||
replicates, post a comment on the issue explaining how you tested the
|
||||
behavior, and what you saw, with screenshots as appropriate. And if you _can_
|
||||
replicate it, fixing it is great!
|
||||
|
||||
If you're starting a major project where the issue was filed more than a
|
||||
couple of years ago, it's a good idea to post to the development community
|
||||
discussion thread for that issue to check if the thinking around it has
|
||||
changed.
|
||||
|
||||
- **Can I come up with my own feature idea and work on it?** We welcome
|
||||
suggestions of features or other improvements that you feel would be valuable. If you
|
||||
have a new feature you'd like to add, you can start a conversation [in our
|
||||
development community](https://zulip.com/development-community/#where-do-i-send-my-message)
|
||||
explaining the feature idea and the problem that you're hoping to solve.
|
||||
- **I'm waiting for the next round of review on my PR. Can I pick up
|
||||
another issue in the meantime?** Someone's first Zulip PR often
|
||||
requires quite a bit of iteration, so please [make sure your pull
|
||||
request is reviewable][reviewable-pull-requests] and go through at
|
||||
least one round of feedback from others before picking up a second
|
||||
issue. After that, sure! If
|
||||
[Zulipbot](https://github.com/zulip/zulipbot) does not allow you to
|
||||
claim an issue, you can post a comment describing the status of your
|
||||
other work on the issue you're interested in (including links to all open
|
||||
PRs), and asking for the issue to be assigned to you. Note that addressing
|
||||
feedback on in-progress PRs should always take priority over starting a new
|
||||
PR.
|
||||
- **I think my PR is done, but it hasn't been merged yet. What's going on?**
|
||||
1. **Double-check that you have addressed all the feedback**, including any comments
|
||||
on [Git commit
|
||||
discipline](https://zulip.readthedocs.io/en/latest/contributing/commit-discipline.html),
|
||||
and that automated tests are passing.
|
||||
2. If all the feedback has been addressed, did you [leave a
|
||||
comment](https://zulip.readthedocs.io/en/latest/contributing/review-process.html#how-to-help-move-the-review-process-forward)
|
||||
explaining that you have done so and **requesting another review**? If not,
|
||||
it may not be clear to project maintainers or reviewers that your PR is
|
||||
ready for another look.
|
||||
3. There may be a pause between initial rounds of review for your PR and final
|
||||
review by project maintainers. This is normal, and we encourage you to **work
|
||||
on other issues** while you wait.
|
||||
4. If you think the PR is ready and haven't seen any updates for a couple
|
||||
of weeks, it can be helpful to **leave another comment**. Summarize the
|
||||
overall state of the review process and your work, and indicate that you
|
||||
are waiting for a review.
|
||||
5. Finally, **Zulip project maintainers are people too**! They may be busy
|
||||
with other work, and sometimes they might even take a vacation. ;) It can
|
||||
occasionally take a few weeks for a PR in the final stages of the review
|
||||
process to be merged.
|
||||
|
||||
[reviewable-pull-requests]: https://zulip.readthedocs.io/en/latest/contributing/reviewable-prs.html
|
||||
|
||||
## Outreach programs
|
||||
|
||||
Zulip regularly participates in [Google Summer of Code
|
||||
(GSoC)](https://developers.google.com/open-source/gsoc/) and
|
||||
[Outreachy](https://www.outreachy.org/). We have been a GSoC mentoring
|
||||
organization since 2016, and we accept 15-20 GSoC participants each summer. In
|
||||
the past, we’ve also participated in [Google
|
||||
Code-In](https://developers.google.com/open-source/gci/), and hosted summer
|
||||
interns from Harvard, MIT, and Stanford.
|
||||
|
||||
Check out our [outreach programs
|
||||
overview](https://zulip.readthedocs.io/en/latest/outreach/overview.html) to learn
|
||||
more about participating in an outreach program with Zulip. Most of our program
|
||||
participants end up sticking around the project long-term, and many have become
|
||||
core team members, maintaining important parts of the project. We hope you
|
||||
apply!
|
@@ -1,25 +0,0 @@
|
||||
# This is a multiarch Dockerfile. See https://docs.docker.com/desktop/multi-arch/
|
||||
#
|
||||
# To set up the first time:
|
||||
# docker buildx create --name multiarch --use
|
||||
#
|
||||
# To build:
|
||||
# docker buildx build --pull --platform linux/amd64,linux/arm64 \
|
||||
# -f ./Dockerfile-postgresql -t zulip/zulip-postgresql:14 --push .
|
||||
|
||||
# Currently the PostgreSQL images do not support automatic upgrading of
|
||||
# the on-disk data in volumes. So the base image cannot currently be upgraded
|
||||
# without users needing a manual pgdump and restore.
|
||||
|
||||
# https://hub.docker.com/r/groonga/pgroonga/tags
|
||||
ARG PGROONGA_VERSION=latest
|
||||
ARG POSTGRESQL_VERSION=14
|
||||
FROM groonga/pgroonga:$PGROONGA_VERSION-alpine-$POSTGRESQL_VERSION-slim
|
||||
|
||||
# Install hunspell, Zulip stop words, and run Zulip database
|
||||
# init.
|
||||
RUN apk add -U --no-cache hunspell-en
|
||||
RUN ln -sf /usr/share/hunspell/en_US.dic /usr/local/share/postgresql/tsearch_data/en_us.dict && ln -sf /usr/share/hunspell/en_US.aff /usr/local/share/postgresql/tsearch_data/en_us.affix
|
||||
COPY puppet/zulip/files/postgresql/zulip_english.stop /usr/local/share/postgresql/tsearch_data/zulip_english.stop
|
||||
COPY scripts/setup/create-db.sql /docker-entrypoint-initdb.d/zulip-create-db.sql
|
||||
COPY scripts/setup/create-pgroonga.sql /docker-entrypoint-initdb.d/zulip-create-pgroonga.sql
|
203
LICENSE
203
LICENSE
@@ -1,202 +1,5 @@
|
||||
Copyright <20> 2012-2013 Zulip, Inc.
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
This software is licensed under the Zulip Enterprise License Agreement.
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
Zulip can be reached at support@zulip.com.
|
||||
|
18
NOTICE
18
NOTICE
@@ -1,18 +0,0 @@
|
||||
Copyright 2012–2015 Dropbox, Inc., 2015–2021 Kandra Labs, Inc., and contributors
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this project except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
The software includes some works released by third parties under other
|
||||
free and open source licenses. Those works are redistributed under the
|
||||
license terms under which the works were received. For more details,
|
||||
see the ``docs/THIRDPARTY`` file included with this distribution.
|
82
README.md
82
README.md
@@ -1,82 +0,0 @@
|
||||
# Zulip overview
|
||||
|
||||
[Zulip](https://zulip.com) is an open-source team collaboration tool with unique
|
||||
[topic-based threading][why-zulip] that combines the best of email and chat to
|
||||
make remote work productive and delightful. Fortune 500 companies, [leading open
|
||||
source projects][rust-case-study], and thousands of other organizations use
|
||||
Zulip every day. Zulip is the only [modern team chat app][features] that is
|
||||
designed for both live and asynchronous conversations.
|
||||
|
||||
Zulip is built by a distributed community of developers from all around the
|
||||
world, with 97+ people who have each contributed 100+ commits. With
|
||||
over 1,500 contributors merging over 500 commits a month, Zulip is the
|
||||
largest and fastest growing open source team chat project.
|
||||
|
||||
Come find us on the [development community chat](https://zulip.com/development-community/)!
|
||||
|
||||
[](https://github.com/zulip/zulip/actions/workflows/zulip-ci.yml?query=branch%3Amain)
|
||||
[](https://codecov.io/gh/zulip/zulip)
|
||||
[][mypy-coverage]
|
||||
[](https://github.com/astral-sh/ruff)
|
||||
[](https://github.com/prettier/prettier)
|
||||
[](https://github.com/zulip/zulip/releases/latest)
|
||||
[](https://zulip.readthedocs.io/en/latest/)
|
||||
[](https://chat.zulip.org)
|
||||
[](https://twitter.com/zulip)
|
||||
[](https://github.com/sponsors/zulip)
|
||||
|
||||
[mypy-coverage]: https://blog.zulip.org/2016/10/13/static-types-in-python-oh-mypy/
|
||||
[why-zulip]: https://zulip.com/why-zulip/
|
||||
[rust-case-study]: https://zulip.com/case-studies/rust/
|
||||
[features]: https://zulip.com/features/
|
||||
|
||||
## Getting started
|
||||
|
||||
- **Contributing code**. Check out our [guide for new
|
||||
contributors](https://zulip.readthedocs.io/en/latest/contributing/contributing.html)
|
||||
to get started. We have invested in making Zulip’s code highly
|
||||
readable, thoughtfully tested, and easy to modify. Beyond that, we
|
||||
have written an extraordinary 150K words of documentation for Zulip
|
||||
contributors.
|
||||
|
||||
- **Contributing non-code**. [Report an
|
||||
issue](https://zulip.readthedocs.io/en/latest/contributing/contributing.html#reporting-issues),
|
||||
[translate](https://zulip.readthedocs.io/en/latest/translating/translating.html)
|
||||
Zulip into your language, or [give us
|
||||
feedback](https://zulip.readthedocs.io/en/latest/contributing/suggesting-features.html).
|
||||
We'd love to hear from you, whether you've been using Zulip for years, or are just
|
||||
trying it out for the first time.
|
||||
|
||||
- **Checking Zulip out**. The best way to see Zulip in action is to drop by the
|
||||
[Zulip community server](https://zulip.com/development-community/). We also
|
||||
recommend reading about Zulip's [unique
|
||||
approach](https://zulip.com/why-zulip/) to organizing conversations.
|
||||
|
||||
- **Running a Zulip server**. Self-host Zulip directly on Ubuntu or Debian
|
||||
Linux, in [Docker](https://github.com/zulip/docker-zulip), or with prebuilt
|
||||
images for [Digital Ocean](https://marketplace.digitalocean.com/apps/zulip) and
|
||||
[Render](https://render.com/docs/deploy-zulip).
|
||||
Learn more about [self-hosting Zulip](https://zulip.com/self-hosting/).
|
||||
|
||||
- **Using Zulip without setting up a server**. Learn about [Zulip
|
||||
Cloud](https://zulip.com/plans/) hosting options. Zulip sponsors free [Zulip
|
||||
Cloud Standard](https://zulip.com/plans/) for hundreds of worthy
|
||||
organizations, including [fellow open-source
|
||||
projects](https://zulip.com/for/open-source/).
|
||||
|
||||
- **Participating in [outreach
|
||||
programs](https://zulip.readthedocs.io/en/latest/contributing/contributing.html#outreach-programs)**
|
||||
like [Google Summer of Code](https://developers.google.com/open-source/gsoc/)
|
||||
and [Outreachy](https://www.outreachy.org/).
|
||||
|
||||
- **Supporting Zulip**. Advocate for your organization to use Zulip, become a
|
||||
[sponsor](https://github.com/sponsors/zulip), write a review in the mobile app
|
||||
stores, or [help others find
|
||||
Zulip](https://zulip.readthedocs.io/en/latest/contributing/contributing.html#help-others-find-zulip).
|
||||
|
||||
You may also be interested in reading our [blog](https://blog.zulip.org/), and
|
||||
following us on [Twitter](https://twitter.com/zulip) and
|
||||
[LinkedIn](https://www.linkedin.com/company/zulip-project/).
|
||||
|
||||
Zulip is distributed under the
|
||||
[Apache 2.0](https://github.com/zulip/zulip/blob/main/LICENSE) license.
|
37
SECURITY.md
37
SECURITY.md
@@ -1,37 +0,0 @@
|
||||
# Security policy
|
||||
|
||||
## Reporting a vulnerability
|
||||
|
||||
We love responsible reports of (potential) security issues in Zulip,
|
||||
whether in the latest release or our development branch.
|
||||
|
||||
Our security contact is security@zulip.com. Reporters should expect a
|
||||
response within 24 hours.
|
||||
|
||||
Please include details on the issue and how you'd like to be credited
|
||||
in our release notes when we publish the fix.
|
||||
|
||||
Our [security model][securing-your-zulip-server] document may be a helpful
|
||||
resource.
|
||||
|
||||
## Security announcements
|
||||
|
||||
We send security announcements to our [announcement mailing
|
||||
list](https://groups.google.com/g/zulip-announce). If you are running
|
||||
Zulip in production, you should subscribe, by clicking "Join group" at
|
||||
the top of that page.
|
||||
|
||||
## Supported versions
|
||||
|
||||
Zulip provides security support for the latest major release, in the
|
||||
form of minor security/maintenance releases.
|
||||
|
||||
We work hard to make [upgrades][upgrades] reliable, so that there's no
|
||||
reason to run older major releases.
|
||||
|
||||
See also our documentation on the [Zulip release
|
||||
lifecycle][release-lifecycle].
|
||||
|
||||
[securing-your-zulip-server]: https://zulip.readthedocs.io/en/latest/production/securing-your-zulip-server.html
|
||||
[upgrades]: https://zulip.readthedocs.io/en/stable/production/upgrade.html#upgrading-to-a-release
|
||||
[release-lifecycle]: https://zulip.readthedocs.io/en/latest/overview/release-lifecycle.html
|
108
Vagrantfile
vendored
108
Vagrantfile
vendored
@@ -1,108 +0,0 @@
|
||||
# -*- mode: ruby -*-
|
||||
|
||||
Vagrant.require_version ">= 2.2.6"
|
||||
|
||||
Vagrant.configure("2") do |config|
|
||||
# The Zulip development environment runs on 9991 on the guest.
|
||||
host_port = 9991
|
||||
http_proxy = https_proxy = no_proxy = nil
|
||||
host_ip_addr = "127.0.0.1"
|
||||
|
||||
# System settings for the virtual machine.
|
||||
vm_num_cpus = "2"
|
||||
vm_memory = "2048"
|
||||
|
||||
ubuntu_mirror = ""
|
||||
vboxadd_version = nil
|
||||
|
||||
config.vm.box = "bento/ubuntu-22.04"
|
||||
|
||||
config.vm.synced_folder ".", "/vagrant", disabled: true
|
||||
config.vm.synced_folder ".", "/srv/zulip", docker_consistency: "z"
|
||||
|
||||
vagrant_config_file = ENV["HOME"] + "/.zulip-vagrant-config"
|
||||
if File.file?(vagrant_config_file)
|
||||
IO.foreach(vagrant_config_file) do |line|
|
||||
line.chomp!
|
||||
key, value = line.split(nil, 2)
|
||||
case key
|
||||
when /^([#;]|$)/ # ignore comments
|
||||
when "HTTP_PROXY"; http_proxy = value
|
||||
when "HTTPS_PROXY"; https_proxy = value
|
||||
when "NO_PROXY"; no_proxy = value
|
||||
when "HOST_PORT"; host_port = value.to_i
|
||||
when "HOST_IP_ADDR"; host_ip_addr = value
|
||||
when "GUEST_CPUS"; vm_num_cpus = value
|
||||
when "GUEST_MEMORY_MB"; vm_memory = value
|
||||
when "UBUNTU_MIRROR"; ubuntu_mirror = value
|
||||
when "VBOXADD_VERSION"; vboxadd_version = value
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
if Vagrant.has_plugin?("vagrant-proxyconf")
|
||||
if !http_proxy.nil?
|
||||
config.proxy.http = http_proxy
|
||||
end
|
||||
if !https_proxy.nil?
|
||||
config.proxy.https = https_proxy
|
||||
end
|
||||
if !no_proxy.nil?
|
||||
config.proxy.no_proxy = no_proxy
|
||||
end
|
||||
elsif !http_proxy.nil? or !https_proxy.nil?
|
||||
# This prints twice due to https://github.com/hashicorp/vagrant/issues/7504
|
||||
# We haven't figured out a workaround.
|
||||
puts "You have specified value for proxy in ~/.zulip-vagrant-config file but did not " \
|
||||
"install the vagrant-proxyconf plugin. To install it, run `vagrant plugin install " \
|
||||
"vagrant-proxyconf` in a terminal. This error will appear twice."
|
||||
exit
|
||||
end
|
||||
|
||||
config.vm.network "forwarded_port", guest: 9991, host: host_port, host_ip: host_ip_addr
|
||||
config.vm.network "forwarded_port", guest: 9994, host: host_port + 3, host_ip: host_ip_addr
|
||||
# Specify Docker provider before VirtualBox provider so it's preferred.
|
||||
config.vm.provider "docker" do |d, override|
|
||||
override.vm.box = nil
|
||||
d.build_dir = File.join(__dir__, "tools", "setup", "dev-vagrant-docker")
|
||||
d.build_args = ["--build-arg", "VAGRANT_UID=#{Process.uid}"]
|
||||
if !ubuntu_mirror.empty?
|
||||
d.build_args += ["--build-arg", "UBUNTU_MIRROR=#{ubuntu_mirror}"]
|
||||
end
|
||||
d.has_ssh = true
|
||||
d.create_args = ["--ulimit", "nofile=1024:65536"]
|
||||
end
|
||||
|
||||
config.vm.provider "virtualbox" do |vb, override|
|
||||
# It's possible we can get away with just 1.5GB; more testing needed
|
||||
vb.memory = vm_memory
|
||||
vb.cpus = vm_num_cpus
|
||||
|
||||
if !vboxadd_version.nil?
|
||||
override.vbguest.installer = Class.new(VagrantVbguest::Installers::Ubuntu) do
|
||||
define_method(:host_version) do |reload = false|
|
||||
VagrantVbguest::Version(vboxadd_version)
|
||||
end
|
||||
end
|
||||
override.vbguest.allow_downgrade = true
|
||||
override.vbguest.iso_path = "https://download.virtualbox.org/virtualbox/#{vboxadd_version}/VBoxGuestAdditions_#{vboxadd_version}.iso"
|
||||
end
|
||||
end
|
||||
|
||||
config.vm.provider "hyperv" do |h, override|
|
||||
h.memory = vm_memory
|
||||
h.maxmemory = vm_memory
|
||||
h.cpus = vm_num_cpus
|
||||
end
|
||||
|
||||
config.vm.provider "parallels" do |prl, override|
|
||||
prl.memory = vm_memory
|
||||
prl.cpus = vm_num_cpus
|
||||
end
|
||||
|
||||
config.vm.provision "shell",
|
||||
# We want provision to be run with the permissions of the vagrant user.
|
||||
privileged: false,
|
||||
path: "tools/setup/vagrant-provision",
|
||||
env: { "UBUNTU_MIRROR" => ubuntu_mirror }
|
||||
end
|
File diff suppressed because it is too large
Load Diff
@@ -1,77 +0,0 @@
|
||||
from math import sqrt
|
||||
from random import Random
|
||||
|
||||
from analytics.lib.counts import CountStat
|
||||
|
||||
|
||||
def generate_time_series_data(
|
||||
days: int = 100,
|
||||
business_hours_base: float = 10,
|
||||
non_business_hours_base: float = 10,
|
||||
growth: float = 1,
|
||||
autocorrelation: float = 0,
|
||||
spikiness: float = 1,
|
||||
holiday_rate: float = 0,
|
||||
frequency: str = CountStat.DAY,
|
||||
partial_sum: bool = False,
|
||||
random_seed: int = 26,
|
||||
) -> list[int]:
|
||||
"""
|
||||
Generate semi-realistic looking time series data for testing analytics graphs.
|
||||
|
||||
days -- Number of days of data. Is the number of data points generated if
|
||||
frequency is CountStat.DAY.
|
||||
business_hours_base -- Average value during a business hour (or day) at beginning of
|
||||
time series, if frequency is CountStat.HOUR (CountStat.DAY, respectively).
|
||||
non_business_hours_base -- The above, for non-business hours/days.
|
||||
growth -- Ratio between average values at end of time series and beginning of time series.
|
||||
autocorrelation -- Makes neighboring data points look more like each other. At 0 each
|
||||
point is unaffected by the previous point, and at 1 each point is a deterministic
|
||||
function of the previous point.
|
||||
spikiness -- 0 means no randomness (other than holiday_rate), higher values increase
|
||||
the variance.
|
||||
holiday_rate -- Fraction of days randomly set to 0, largely for testing how we handle 0s.
|
||||
frequency -- Should be CountStat.HOUR or CountStat.DAY.
|
||||
partial_sum -- If True, return partial sum of the series.
|
||||
random_seed -- Seed for random number generator.
|
||||
"""
|
||||
rng = Random(random_seed)
|
||||
|
||||
if frequency == CountStat.HOUR:
|
||||
length = days * 24
|
||||
seasonality = [non_business_hours_base] * 24 * 7
|
||||
for day in range(5):
|
||||
for hour in range(8):
|
||||
seasonality[24 * day + hour] = business_hours_base
|
||||
holidays = []
|
||||
for i in range(days):
|
||||
holidays.extend([rng.random() < holiday_rate] * 24)
|
||||
elif frequency == CountStat.DAY:
|
||||
length = days
|
||||
seasonality = [8 * business_hours_base + 16 * non_business_hours_base] * 5 + [
|
||||
24 * non_business_hours_base
|
||||
] * 2
|
||||
holidays = [rng.random() < holiday_rate for i in range(days)]
|
||||
else:
|
||||
raise AssertionError(f"Unknown frequency: {frequency}")
|
||||
if length < 2:
|
||||
raise AssertionError(
|
||||
f"Must be generating at least 2 data points. Currently generating {length}"
|
||||
)
|
||||
growth_base = growth ** (1.0 / (length - 1))
|
||||
values_no_noise = [seasonality[i % len(seasonality)] * (growth_base**i) for i in range(length)]
|
||||
|
||||
noise_scalars = [rng.gauss(0, 1)]
|
||||
for i in range(1, length):
|
||||
noise_scalars.append(
|
||||
noise_scalars[-1] * autocorrelation + rng.gauss(0, 1) * (1 - autocorrelation)
|
||||
)
|
||||
|
||||
values = [
|
||||
0 if holiday else int(v + sqrt(v) * noise_scalar * spikiness)
|
||||
for v, noise_scalar, holiday in zip(values_no_noise, noise_scalars, holidays, strict=False)
|
||||
]
|
||||
if partial_sum:
|
||||
for i in range(1, length):
|
||||
values[i] = values[i - 1] + values[i]
|
||||
return [max(v, 0) for v in values]
|
@@ -1,33 +0,0 @@
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from analytics.lib.counts import CountStat
|
||||
from zerver.lib.timestamp import floor_to_day, floor_to_hour, verify_UTC
|
||||
|
||||
|
||||
# If min_length is None, returns end_times from ceiling(start) to floor(end), inclusive.
|
||||
# If min_length is greater than 0, pads the list to the left.
|
||||
# So informally, time_range(Sep 20, Sep 22, day, None) returns [Sep 20, Sep 21, Sep 22],
|
||||
# and time_range(Sep 20, Sep 22, day, 5) returns [Sep 18, Sep 19, Sep 20, Sep 21, Sep 22]
|
||||
def time_range(
|
||||
start: datetime, end: datetime, frequency: str, min_length: int | None
|
||||
) -> list[datetime]:
|
||||
verify_UTC(start)
|
||||
verify_UTC(end)
|
||||
if frequency == CountStat.HOUR:
|
||||
end = floor_to_hour(end)
|
||||
step = timedelta(hours=1)
|
||||
elif frequency == CountStat.DAY:
|
||||
end = floor_to_day(end)
|
||||
step = timedelta(days=1)
|
||||
else:
|
||||
raise AssertionError(f"Unknown frequency: {frequency}")
|
||||
|
||||
times = []
|
||||
if min_length is not None:
|
||||
start = min(start, end - (min_length - 1) * step)
|
||||
current = end
|
||||
while current >= start:
|
||||
times.append(current)
|
||||
current -= step
|
||||
times.reverse()
|
||||
return times
|
57
analytics/management/commands/active_user_stats.py
Normal file
57
analytics/management/commands/active_user_stats.py
Normal file
@@ -0,0 +1,57 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from zerver.models import UserPresence, UserActivity
|
||||
from zerver.lib.utils import statsd, statsd_key
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from collections import defaultdict
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = """Sends active user statistics to statsd.
|
||||
|
||||
Run as a cron job that runs every 10 minutes."""
|
||||
|
||||
def handle(self, *args, **options):
|
||||
# Get list of all active users in the last 1 week
|
||||
cutoff = datetime.now() - timedelta(minutes=30, hours=168)
|
||||
|
||||
users = UserPresence.objects.select_related().filter(timestamp__gt=cutoff)
|
||||
|
||||
# Calculate 10min, 2hrs, 12hrs, 1day, 2 business days (TODO business days), 1 week bucket of stats
|
||||
hour_buckets = [0.16, 2, 12, 24, 48, 168]
|
||||
user_info = defaultdict(dict)
|
||||
|
||||
for last_presence in users:
|
||||
if last_presence.status == UserPresence.IDLE:
|
||||
known_active = last_presence.timestamp - timedelta(minutes=30)
|
||||
else:
|
||||
known_active = last_presence.timestamp
|
||||
|
||||
for bucket in hour_buckets:
|
||||
if not bucket in user_info[last_presence.user_profile.realm.domain]:
|
||||
user_info[last_presence.user_profile.realm.domain][bucket] = []
|
||||
if datetime.now(known_active.tzinfo) - known_active < timedelta(hours=bucket):
|
||||
user_info[last_presence.user_profile.realm.domain][bucket].append(last_presence.user_profile.email)
|
||||
|
||||
for realm, buckets in user_info.items():
|
||||
print("Realm %s" % realm)
|
||||
for hr, users in sorted(buckets.items()):
|
||||
print("\tUsers for %s: %s" % (hr, len(users)))
|
||||
statsd.gauge("users.active.%s.%shr" % (statsd_key(realm, True), statsd_key(hr, True)), len(users))
|
||||
|
||||
# Also do stats for how many users have been reading the app.
|
||||
users_reading = UserActivity.objects.select_related().filter(query="/json/update_message_flags")
|
||||
user_info = defaultdict(dict)
|
||||
for activity in users_reading:
|
||||
for bucket in hour_buckets:
|
||||
if not bucket in user_info[activity.user_profile.realm.domain]:
|
||||
user_info[activity.user_profile.realm.domain][bucket] = []
|
||||
if datetime.now(activity.last_visit.tzinfo) - activity.last_visit < timedelta(hours=bucket):
|
||||
user_info[activity.user_profile.realm.domain][bucket].append(activity.user_profile.email)
|
||||
for realm, buckets in user_info.items():
|
||||
print("Realm %s" % realm)
|
||||
for hr, users in sorted(buckets.items()):
|
||||
print("\tUsers reading for %s: %s" % (hr, len(users)))
|
||||
statsd.gauge("users.reading.%s.%shr" % (statsd_key(realm, True), statsd_key(hr, True)), len(users))
|
24
analytics/management/commands/active_user_stats_by_day.py
Normal file
24
analytics/management/commands/active_user_stats_by_day.py
Normal file
@@ -0,0 +1,24 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import datetime
|
||||
import pytz
|
||||
|
||||
from optparse import make_option
|
||||
from django.core.management.base import BaseCommand
|
||||
from zerver.lib.statistics import activity_averages_during_day
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Generate statistics on user activity for a given day."
|
||||
|
||||
option_list = BaseCommand.option_list + \
|
||||
(make_option('--date', default=None, action='store',
|
||||
help="Day to query in format 2013-12-05. Default is yesterday"),)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
if options["date"] is None:
|
||||
date = datetime.datetime.now() - datetime.timedelta(days=1)
|
||||
else:
|
||||
date = datetime.datetime.strptime(options["date"], "%Y-%m-%d")
|
||||
print "Activity data for", date
|
||||
print activity_averages_during_day(date)
|
||||
print "Please note that the total registered user count is a total for today"
|
81
analytics/management/commands/analyze_mit.py
Normal file
81
analytics/management/commands/analyze_mit.py
Normal file
@@ -0,0 +1,81 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
from optparse import make_option
|
||||
from django.core.management.base import BaseCommand
|
||||
from zerver.models import Recipient, Message
|
||||
from zerver.lib.timestamp import timestamp_to_datetime
|
||||
import datetime
|
||||
import time
|
||||
import logging
|
||||
|
||||
def compute_stats(log_level):
|
||||
logger = logging.getLogger()
|
||||
logger.setLevel(log_level)
|
||||
|
||||
one_week_ago = timestamp_to_datetime(time.time()) - datetime.timedelta(weeks=1)
|
||||
mit_query = Message.objects.filter(sender__realm__domain="mit.edu",
|
||||
recipient__type=Recipient.STREAM,
|
||||
pub_date__gt=one_week_ago)
|
||||
for bot_sender_start in ["imap.", "rcmd.", "sys."]:
|
||||
mit_query = mit_query.exclude(sender__email__startswith=(bot_sender_start))
|
||||
# Filtering for "/" covers tabbott/extra@ and all the daemon/foo bots.
|
||||
mit_query = mit_query.exclude(sender__email__contains=("/"))
|
||||
mit_query = mit_query.exclude(sender__email__contains=("aim.com"))
|
||||
mit_query = mit_query.exclude(
|
||||
sender__email__in=["rss@mit.edu", "bash@mit.edu", "apache@mit.edu",
|
||||
"bitcoin@mit.edu", "lp@mit.edu", "clocks@mit.edu",
|
||||
"root@mit.edu", "nagios@mit.edu",
|
||||
"www-data|local-realm@mit.edu"])
|
||||
user_counts = {}
|
||||
for m in mit_query.select_related("sending_client", "sender"):
|
||||
email = m.sender.email
|
||||
user_counts.setdefault(email, {})
|
||||
user_counts[email].setdefault(m.sending_client.name, 0)
|
||||
user_counts[email][m.sending_client.name] += 1
|
||||
|
||||
total_counts = {}
|
||||
total_user_counts = {}
|
||||
for email, counts in user_counts.items():
|
||||
total_user_counts.setdefault(email, 0)
|
||||
for client_name, count in counts.items():
|
||||
total_counts.setdefault(client_name, 0)
|
||||
total_counts[client_name] += count
|
||||
total_user_counts[email] += count
|
||||
|
||||
logging.debug("%40s | %10s | %s" % ("User", "Messages", "Percentage Zulip"))
|
||||
top_percents = {}
|
||||
for size in [10, 25, 50, 100, 200, len(total_user_counts.keys())]:
|
||||
top_percents[size] = 0
|
||||
for i, email in enumerate(sorted(total_user_counts.keys(),
|
||||
key=lambda x: -total_user_counts[x])):
|
||||
percent_zulip = round(100 - (user_counts[email].get("zephyr_mirror", 0)) * 100. /
|
||||
total_user_counts[email], 1)
|
||||
for size in top_percents.keys():
|
||||
top_percents.setdefault(size, 0)
|
||||
if i < size:
|
||||
top_percents[size] += (percent_zulip * 1.0 / size)
|
||||
|
||||
logging.debug("%40s | %10s | %s%%" % (email, total_user_counts[email],
|
||||
percent_zulip))
|
||||
|
||||
logging.info("")
|
||||
for size in sorted(top_percents.keys()):
|
||||
logging.info("Top %6s | %s%%" % (size, round(top_percents[size], 1)))
|
||||
|
||||
grand_total = sum(total_counts.values())
|
||||
print grand_total
|
||||
logging.info("%15s | %s" % ("Client", "Percentage"))
|
||||
for client in total_counts.keys():
|
||||
logging.info("%15s | %s%%" % (client, round(100. * total_counts[client] / grand_total, 1)))
|
||||
|
||||
class Command(BaseCommand):
|
||||
option_list = BaseCommand.option_list + \
|
||||
(make_option('--verbose', default=False, action='store_true'),)
|
||||
|
||||
help = "Compute statistics on MIT Zephyr usage."
|
||||
|
||||
def handle(self, *args, **options):
|
||||
level = logging.INFO
|
||||
if options["verbose"]:
|
||||
level = logging.DEBUG
|
||||
compute_stats(level)
|
57
analytics/management/commands/analyze_user_activity.py
Normal file
57
analytics/management/commands/analyze_user_activity.py
Normal file
@@ -0,0 +1,57 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
from zerver.lib.statistics import seconds_usage_between
|
||||
|
||||
from optparse import make_option
|
||||
from django.core.management.base import BaseCommand
|
||||
from zerver.models import UserProfile
|
||||
import datetime
|
||||
from django.utils.timezone import utc
|
||||
|
||||
def analyze_activity(options):
|
||||
day_start = datetime.datetime.strptime(options["date"], "%Y-%m-%d").replace(tzinfo=utc)
|
||||
day_end = day_start + datetime.timedelta(days=options["duration"])
|
||||
|
||||
user_profile_query = UserProfile.objects.all()
|
||||
if options["realm"]:
|
||||
user_profile_query = user_profile_query.filter(realm__domain=options["realm"])
|
||||
|
||||
print "Per-user online duration:\n"
|
||||
total_duration = datetime.timedelta(0)
|
||||
for user_profile in user_profile_query:
|
||||
duration = seconds_usage_between(user_profile, day_start, day_end)
|
||||
|
||||
if duration == datetime.timedelta(0):
|
||||
continue
|
||||
|
||||
total_duration += duration
|
||||
print "%-*s%s" % (37, user_profile.email, duration, )
|
||||
|
||||
print "\nTotal Duration: %s" % (total_duration,)
|
||||
print "\nTotal Duration in minutes: %s" % (total_duration.total_seconds() / 60.,)
|
||||
print "Total Duration amortized to a month: %s" % (total_duration.total_seconds() * 30. / 60.,)
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = """Report analytics of user activity on a per-user and realm basis.
|
||||
|
||||
This command aggregates user activity data that is collected by each user using Zulip. It attempts
|
||||
to approximate how much each user has been using Zulip per day, measured by recording each 15 minute
|
||||
period where some activity has occurred (mouse move or keyboard activity).
|
||||
|
||||
It will correctly not count server-initiated reloads in the activity statistics.
|
||||
|
||||
The duration flag can be used to control how many days to show usage duration for
|
||||
|
||||
Usage: python manage.py analyze_user_activity [--realm=zulip.com] [--date=2013-09-10] [--duration=1]
|
||||
|
||||
By default, if no date is selected 2013-09-10 is used. If no realm is provided, information
|
||||
is shown for all realms"""
|
||||
|
||||
option_list = BaseCommand.option_list + (
|
||||
make_option('--realm', action='store'),
|
||||
make_option('--date', action='store', default="2013-09-06"),
|
||||
make_option('--duration', action='store', default=1, type=int, help="How many days to show usage information for"),
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
analyze_activity(options)
|
@@ -1,92 +0,0 @@
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
from typing import Any, Literal
|
||||
|
||||
from django.utils.timezone import now as timezone_now
|
||||
from typing_extensions import override
|
||||
|
||||
from analytics.lib.counts import ALL_COUNT_STATS, CountStat
|
||||
from analytics.models import installation_epoch
|
||||
from scripts.lib.zulip_tools import atomic_nagios_write
|
||||
from zerver.lib.management import ZulipBaseCommand
|
||||
from zerver.lib.timestamp import TimeZoneNotUTCError, floor_to_day, floor_to_hour, verify_UTC
|
||||
from zerver.models import Realm
|
||||
|
||||
states = {
|
||||
0: "OK",
|
||||
1: "WARNING",
|
||||
2: "CRITICAL",
|
||||
3: "UNKNOWN",
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class NagiosResult:
|
||||
status: Literal["ok", "warning", "critical", "unknown"]
|
||||
message: str
|
||||
|
||||
|
||||
class Command(ZulipBaseCommand):
|
||||
help = """Checks FillState table.
|
||||
|
||||
Run as a cron job that runs every hour."""
|
||||
|
||||
@override
|
||||
def handle(self, *args: Any, **options: Any) -> None:
|
||||
fill_state = self.get_fill_state()
|
||||
atomic_nagios_write("check-analytics-state", fill_state.status, fill_state.message)
|
||||
|
||||
def get_fill_state(self) -> NagiosResult:
|
||||
if not Realm.objects.exists():
|
||||
return NagiosResult(status="ok", message="No realms exist, so not checking FillState.")
|
||||
|
||||
warning_unfilled_properties = []
|
||||
critical_unfilled_properties = []
|
||||
for property, stat in ALL_COUNT_STATS.items():
|
||||
last_fill = stat.last_successful_fill()
|
||||
if last_fill is None:
|
||||
last_fill = installation_epoch()
|
||||
try:
|
||||
verify_UTC(last_fill)
|
||||
except TimeZoneNotUTCError:
|
||||
return NagiosResult(
|
||||
status="critical", message=f"FillState not in UTC for {property}"
|
||||
)
|
||||
|
||||
if stat.frequency == CountStat.DAY:
|
||||
floor_function = floor_to_day
|
||||
warning_threshold = timedelta(hours=26)
|
||||
critical_threshold = timedelta(hours=50)
|
||||
else: # CountStat.HOUR
|
||||
floor_function = floor_to_hour
|
||||
warning_threshold = timedelta(minutes=90)
|
||||
critical_threshold = timedelta(minutes=150)
|
||||
|
||||
if floor_function(last_fill) != last_fill:
|
||||
return NagiosResult(
|
||||
status="critical",
|
||||
message=f"FillState not on {stat.frequency} boundary for {property}",
|
||||
)
|
||||
|
||||
time_to_last_fill = timezone_now() - last_fill
|
||||
if time_to_last_fill > critical_threshold:
|
||||
critical_unfilled_properties.append(property)
|
||||
elif time_to_last_fill > warning_threshold:
|
||||
warning_unfilled_properties.append(property)
|
||||
|
||||
if len(critical_unfilled_properties) == 0 and len(warning_unfilled_properties) == 0:
|
||||
return NagiosResult(status="ok", message="FillState looks fine.")
|
||||
if len(critical_unfilled_properties) == 0:
|
||||
return NagiosResult(
|
||||
status="warning",
|
||||
message="Missed filling {} once.".format(
|
||||
", ".join(warning_unfilled_properties),
|
||||
),
|
||||
)
|
||||
return NagiosResult(
|
||||
status="critical",
|
||||
message="Missed filling {} once. Missed filling {} at least twice.".format(
|
||||
", ".join(warning_unfilled_properties),
|
||||
", ".join(critical_unfilled_properties),
|
||||
),
|
||||
)
|
@@ -1,25 +0,0 @@
|
||||
from argparse import ArgumentParser
|
||||
from typing import Any
|
||||
|
||||
from django.core.management.base import CommandError
|
||||
from typing_extensions import override
|
||||
|
||||
from analytics.lib.counts import do_drop_all_analytics_tables
|
||||
from zerver.lib.management import ZulipBaseCommand
|
||||
|
||||
|
||||
class Command(ZulipBaseCommand):
|
||||
help = """Clear analytics tables."""
|
||||
|
||||
@override
|
||||
def add_arguments(self, parser: ArgumentParser) -> None:
|
||||
parser.add_argument("--force", action="store_true", help="Clear analytics tables.")
|
||||
|
||||
@override
|
||||
def handle(self, *args: Any, **options: Any) -> None:
|
||||
if options["force"]:
|
||||
do_drop_all_analytics_tables()
|
||||
else:
|
||||
raise CommandError(
|
||||
"Would delete all data from analytics tables (!); use --force to do so."
|
||||
)
|
@@ -1,27 +0,0 @@
|
||||
from argparse import ArgumentParser
|
||||
from typing import Any
|
||||
|
||||
from django.core.management.base import CommandError
|
||||
from typing_extensions import override
|
||||
|
||||
from analytics.lib.counts import ALL_COUNT_STATS, do_drop_single_stat
|
||||
from zerver.lib.management import ZulipBaseCommand
|
||||
|
||||
|
||||
class Command(ZulipBaseCommand):
|
||||
help = """Clear analytics tables."""
|
||||
|
||||
@override
|
||||
def add_arguments(self, parser: ArgumentParser) -> None:
|
||||
parser.add_argument("--force", action="store_true", help="Actually do it.")
|
||||
parser.add_argument("--property", help="The property of the stat to be cleared.")
|
||||
|
||||
@override
|
||||
def handle(self, *args: Any, **options: Any) -> None:
|
||||
property = options["property"]
|
||||
if property not in ALL_COUNT_STATS:
|
||||
raise CommandError(f"Invalid property: {property}")
|
||||
if not options["force"]:
|
||||
raise CommandError("No action taken. Use --force.")
|
||||
|
||||
do_drop_single_stat(property)
|
69
analytics/management/commands/client_activity.py
Normal file
69
analytics/management/commands/client_activity.py
Normal file
@@ -0,0 +1,69 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db.models import Count
|
||||
|
||||
from zerver.models import UserActivity, UserProfile, Realm, \
|
||||
get_realm, get_user_profile_by_email
|
||||
|
||||
import datetime
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = """Report rough client activity globally, for a realm, or for a user
|
||||
|
||||
Usage examples:
|
||||
|
||||
python manage.py client_activity
|
||||
python manage.py client_activity zulip.com
|
||||
python manage.py client_activity jesstess@zulip.com"""
|
||||
|
||||
def compute_activity(self, user_activity_objects):
|
||||
# Report data from the past week.
|
||||
#
|
||||
# This is a rough report of client activity because we inconsistently
|
||||
# register activity from various clients; think of it as telling you
|
||||
# approximately how many people from a group have used a particular
|
||||
# client recently. For example, this might be useful to get a sense of
|
||||
# how popular different versions of a desktop client are.
|
||||
#
|
||||
# Importantly, this does NOT tell you anything about the relative
|
||||
# volumes of requests from clients.
|
||||
threshold = datetime.datetime.now() - datetime.timedelta(days=7)
|
||||
client_counts = user_activity_objects.filter(
|
||||
last_visit__gt=threshold).values("client__name").annotate(
|
||||
count=Count('client__name'))
|
||||
|
||||
total = 0
|
||||
counts = []
|
||||
for client_type in client_counts:
|
||||
count = client_type["count"]
|
||||
client = client_type["client__name"]
|
||||
total += count
|
||||
counts.append((count, client))
|
||||
|
||||
counts.sort()
|
||||
|
||||
for count in counts:
|
||||
print "%25s %15d" % (count[1], count[0])
|
||||
print "Total:", total
|
||||
|
||||
|
||||
def handle(self, *args, **options):
|
||||
if len(args) == 0:
|
||||
# Report global activity.
|
||||
self.compute_activity(UserActivity.objects.all())
|
||||
elif len(args) == 1:
|
||||
try:
|
||||
# Report activity for a user.
|
||||
user_profile = get_user_profile_by_email(args[0])
|
||||
self.compute_activity(UserActivity.objects.filter(
|
||||
user_profile=user_profile))
|
||||
except UserProfile.DoesNotExist:
|
||||
try:
|
||||
# Report activity for a realm.
|
||||
realm = get_realm(args[0])
|
||||
self.compute_activity(UserActivity.objects.filter(
|
||||
user_profile__realm=realm))
|
||||
except Realm.DoesNotExist:
|
||||
print "Unknown user or domain %s" % (args[0],)
|
||||
exit(1)
|
@@ -1,350 +0,0 @@
|
||||
from collections.abc import Mapping
|
||||
from datetime import timedelta
|
||||
from typing import Any, TypeAlias
|
||||
|
||||
from django.core.files.uploadedfile import UploadedFile
|
||||
from django.utils.timezone import now as timezone_now
|
||||
from typing_extensions import override
|
||||
|
||||
from analytics.lib.counts import COUNT_STATS, CountStat, do_drop_all_analytics_tables
|
||||
from analytics.lib.fixtures import generate_time_series_data
|
||||
from analytics.lib.time_utils import time_range
|
||||
from analytics.models import (
|
||||
BaseCount,
|
||||
FillState,
|
||||
InstallationCount,
|
||||
RealmCount,
|
||||
StreamCount,
|
||||
UserCount,
|
||||
)
|
||||
from zerver.actions.create_realm import do_create_realm
|
||||
from zerver.lib.create_user import create_user
|
||||
from zerver.lib.management import ZulipBaseCommand
|
||||
from zerver.lib.storage import static_path
|
||||
from zerver.lib.stream_color import STREAM_ASSIGNMENT_COLORS
|
||||
from zerver.lib.stream_subscription import create_stream_subscription
|
||||
from zerver.lib.streams import get_default_values_for_stream_permission_group_settings
|
||||
from zerver.lib.timestamp import floor_to_day
|
||||
from zerver.lib.upload import upload_message_attachment_from_request
|
||||
from zerver.models import Client, Realm, RealmAuditLog, Recipient, Stream, UserProfile
|
||||
from zerver.models.groups import NamedUserGroup, SystemGroups, UserGroupMembership
|
||||
from zerver.models.realm_audit_logs import AuditLogEventType
|
||||
|
||||
|
||||
class Command(ZulipBaseCommand):
|
||||
help = """Populates analytics tables with randomly generated data."""
|
||||
|
||||
DAYS_OF_DATA = 100
|
||||
random_seed = 26
|
||||
|
||||
def generate_fixture_data(
|
||||
self,
|
||||
stat: CountStat,
|
||||
business_hours_base: float,
|
||||
non_business_hours_base: float,
|
||||
growth: float,
|
||||
autocorrelation: float,
|
||||
spikiness: float,
|
||||
holiday_rate: float = 0,
|
||||
partial_sum: bool = False,
|
||||
) -> list[int]:
|
||||
self.random_seed += 1
|
||||
return generate_time_series_data(
|
||||
days=self.DAYS_OF_DATA,
|
||||
business_hours_base=business_hours_base,
|
||||
non_business_hours_base=non_business_hours_base,
|
||||
growth=growth,
|
||||
autocorrelation=autocorrelation,
|
||||
spikiness=spikiness,
|
||||
holiday_rate=holiday_rate,
|
||||
frequency=stat.frequency,
|
||||
partial_sum=partial_sum,
|
||||
random_seed=self.random_seed,
|
||||
)
|
||||
|
||||
@override
|
||||
def handle(self, *args: Any, **options: Any) -> None:
|
||||
# TODO: This should arguably only delete the objects
|
||||
# associated with the "analytics" realm.
|
||||
do_drop_all_analytics_tables()
|
||||
|
||||
# This also deletes any objects with this realm as a foreign key
|
||||
Realm.objects.filter(string_id="analytics").delete()
|
||||
|
||||
# Because we just deleted a bunch of objects in the database
|
||||
# directly (rather than deleting individual objects in Django,
|
||||
# in which case our post_save hooks would have flushed the
|
||||
# individual objects from memcached for us), we need to flush
|
||||
# memcached in order to ensure deleted objects aren't still
|
||||
# present in the memcached cache.
|
||||
from zerver.apps import flush_cache
|
||||
|
||||
flush_cache(None)
|
||||
|
||||
installation_time = timezone_now() - timedelta(days=self.DAYS_OF_DATA)
|
||||
last_end_time = floor_to_day(timezone_now())
|
||||
realm = do_create_realm(
|
||||
string_id="analytics", name="Analytics", date_created=installation_time
|
||||
)
|
||||
|
||||
owners_system_group = NamedUserGroup.objects.get(
|
||||
name=SystemGroups.OWNERS, realm_for_sharding=realm, is_system_group=True
|
||||
)
|
||||
guests_system_group = NamedUserGroup.objects.get(
|
||||
name=SystemGroups.EVERYONE, realm_for_sharding=realm, is_system_group=True
|
||||
)
|
||||
|
||||
shylock = create_user(
|
||||
"shylock@analytics.ds",
|
||||
"Shylock",
|
||||
realm,
|
||||
full_name="Shylock",
|
||||
role=UserProfile.ROLE_REALM_OWNER,
|
||||
force_date_joined=installation_time,
|
||||
)
|
||||
UserGroupMembership.objects.create(user_profile=shylock, user_group=owners_system_group)
|
||||
|
||||
# Create guest user for set_guest_users_statistic.
|
||||
bassanio = create_user(
|
||||
"bassanio@analytics.ds",
|
||||
"Bassanio",
|
||||
realm,
|
||||
full_name="Bassanio",
|
||||
role=UserProfile.ROLE_GUEST,
|
||||
force_date_joined=installation_time,
|
||||
)
|
||||
UserGroupMembership.objects.create(user_profile=bassanio, user_group=guests_system_group)
|
||||
|
||||
stream = Stream.objects.create(
|
||||
name="all",
|
||||
realm=realm,
|
||||
date_created=installation_time,
|
||||
**get_default_values_for_stream_permission_group_settings(realm),
|
||||
)
|
||||
recipient = Recipient.objects.create(type_id=stream.id, type=Recipient.STREAM)
|
||||
stream.recipient = recipient
|
||||
stream.save(update_fields=["recipient"])
|
||||
|
||||
# Subscribe shylock to the stream to avoid invariant failures.
|
||||
create_stream_subscription(
|
||||
user_profile=shylock,
|
||||
recipient=recipient,
|
||||
stream=stream,
|
||||
color=STREAM_ASSIGNMENT_COLORS[0],
|
||||
)
|
||||
RealmAuditLog.objects.create(
|
||||
realm=realm,
|
||||
modified_user=shylock,
|
||||
modified_stream=stream,
|
||||
event_last_message_id=0,
|
||||
event_type=AuditLogEventType.SUBSCRIPTION_CREATED,
|
||||
event_time=installation_time,
|
||||
)
|
||||
|
||||
# Create an attachment in the database for set_storage_space_used_statistic.
|
||||
IMAGE_FILE_PATH = static_path("images/test-images/checkbox.png")
|
||||
with open(IMAGE_FILE_PATH, "rb") as fp:
|
||||
upload_message_attachment_from_request(UploadedFile(fp), shylock)
|
||||
|
||||
FixtureData: TypeAlias = Mapping[str | int | None, list[int]]
|
||||
|
||||
def insert_fixture_data(
|
||||
stat: CountStat,
|
||||
fixture_data: FixtureData,
|
||||
table: type[BaseCount],
|
||||
) -> None:
|
||||
end_times = time_range(
|
||||
last_end_time, last_end_time, stat.frequency, len(next(iter(fixture_data.values())))
|
||||
)
|
||||
if table == InstallationCount:
|
||||
id_args: dict[str, Any] = {}
|
||||
if table == RealmCount:
|
||||
id_args = {"realm": realm}
|
||||
if table == UserCount:
|
||||
id_args = {"realm": realm, "user": shylock}
|
||||
if table == StreamCount:
|
||||
id_args = {"stream": stream, "realm": realm}
|
||||
|
||||
for subgroup, values in fixture_data.items():
|
||||
table._default_manager.bulk_create(
|
||||
table(
|
||||
property=stat.property,
|
||||
subgroup=subgroup,
|
||||
end_time=end_time,
|
||||
value=value,
|
||||
**id_args,
|
||||
)
|
||||
for end_time, value in zip(end_times, values, strict=False)
|
||||
if value != 0
|
||||
)
|
||||
|
||||
stat = COUNT_STATS["1day_actives::day"]
|
||||
realm_data: FixtureData = {
|
||||
None: self.generate_fixture_data(stat, 0.08, 0.02, 3, 0.3, 6, partial_sum=True),
|
||||
}
|
||||
insert_fixture_data(stat, realm_data, RealmCount)
|
||||
installation_data: FixtureData = {
|
||||
None: self.generate_fixture_data(stat, 0.8, 0.2, 4, 0.3, 6, partial_sum=True),
|
||||
}
|
||||
insert_fixture_data(stat, installation_data, InstallationCount)
|
||||
FillState.objects.create(
|
||||
property=stat.property, end_time=last_end_time, state=FillState.DONE
|
||||
)
|
||||
|
||||
stat = COUNT_STATS["7day_actives::day"]
|
||||
realm_data = {
|
||||
None: self.generate_fixture_data(stat, 0.2, 0.07, 3, 0.3, 6, partial_sum=True),
|
||||
}
|
||||
insert_fixture_data(stat, realm_data, RealmCount)
|
||||
installation_data = {
|
||||
None: self.generate_fixture_data(stat, 2, 0.7, 4, 0.3, 6, partial_sum=True),
|
||||
}
|
||||
insert_fixture_data(stat, installation_data, InstallationCount)
|
||||
FillState.objects.create(
|
||||
property=stat.property, end_time=last_end_time, state=FillState.DONE
|
||||
)
|
||||
|
||||
stat = COUNT_STATS["realm_active_humans::day"]
|
||||
realm_data = {
|
||||
None: self.generate_fixture_data(stat, 0.8, 0.08, 3, 0.5, 3, partial_sum=True),
|
||||
}
|
||||
insert_fixture_data(stat, realm_data, RealmCount)
|
||||
installation_data = {
|
||||
None: self.generate_fixture_data(stat, 1, 0.3, 4, 0.5, 3, partial_sum=True),
|
||||
}
|
||||
insert_fixture_data(stat, installation_data, InstallationCount)
|
||||
FillState.objects.create(
|
||||
property=stat.property, end_time=last_end_time, state=FillState.DONE
|
||||
)
|
||||
|
||||
stat = COUNT_STATS["active_users_audit:is_bot:day"]
|
||||
realm_data = {
|
||||
"false": self.generate_fixture_data(stat, 1, 0.2, 3.5, 0.8, 2, partial_sum=True),
|
||||
"true": self.generate_fixture_data(stat, 0.3, 0.05, 3, 0.3, 2, partial_sum=True),
|
||||
}
|
||||
insert_fixture_data(stat, realm_data, RealmCount)
|
||||
installation_data = {
|
||||
"false": self.generate_fixture_data(stat, 3, 1, 4, 0.8, 2, partial_sum=True),
|
||||
"true": self.generate_fixture_data(stat, 1, 0.4, 4, 0.8, 2, partial_sum=True),
|
||||
}
|
||||
insert_fixture_data(stat, installation_data, InstallationCount)
|
||||
FillState.objects.create(
|
||||
property=stat.property, end_time=last_end_time, state=FillState.DONE
|
||||
)
|
||||
|
||||
stat = COUNT_STATS["messages_sent:is_bot:hour"]
|
||||
user_data: FixtureData = {
|
||||
"false": self.generate_fixture_data(stat, 2, 1, 1.5, 0.6, 8, holiday_rate=0.1),
|
||||
}
|
||||
insert_fixture_data(stat, user_data, UserCount)
|
||||
realm_data = {
|
||||
"false": self.generate_fixture_data(stat, 35, 15, 6, 0.6, 4),
|
||||
"true": self.generate_fixture_data(stat, 15, 15, 3, 0.4, 2),
|
||||
}
|
||||
insert_fixture_data(stat, realm_data, RealmCount)
|
||||
installation_data = {
|
||||
"false": self.generate_fixture_data(stat, 350, 150, 6, 0.6, 4),
|
||||
"true": self.generate_fixture_data(stat, 150, 150, 3, 0.4, 2),
|
||||
}
|
||||
insert_fixture_data(stat, installation_data, InstallationCount)
|
||||
FillState.objects.create(
|
||||
property=stat.property, end_time=last_end_time, state=FillState.DONE
|
||||
)
|
||||
|
||||
stat = COUNT_STATS["messages_sent:message_type:day"]
|
||||
user_data = {
|
||||
"public_stream": self.generate_fixture_data(stat, 1.5, 1, 3, 0.6, 8),
|
||||
"private_message": self.generate_fixture_data(stat, 0.5, 0.3, 1, 0.6, 8),
|
||||
"huddle_message": self.generate_fixture_data(stat, 0.2, 0.2, 2, 0.6, 8),
|
||||
}
|
||||
insert_fixture_data(stat, user_data, UserCount)
|
||||
realm_data = {
|
||||
"public_stream": self.generate_fixture_data(stat, 30, 8, 5, 0.6, 4),
|
||||
"private_stream": self.generate_fixture_data(stat, 7, 7, 5, 0.6, 4),
|
||||
"private_message": self.generate_fixture_data(stat, 13, 5, 5, 0.6, 4),
|
||||
"huddle_message": self.generate_fixture_data(stat, 6, 3, 3, 0.6, 4),
|
||||
}
|
||||
insert_fixture_data(stat, realm_data, RealmCount)
|
||||
installation_data = {
|
||||
"public_stream": self.generate_fixture_data(stat, 300, 80, 5, 0.6, 4),
|
||||
"private_stream": self.generate_fixture_data(stat, 70, 70, 5, 0.6, 4),
|
||||
"private_message": self.generate_fixture_data(stat, 130, 50, 5, 0.6, 4),
|
||||
"huddle_message": self.generate_fixture_data(stat, 60, 30, 3, 0.6, 4),
|
||||
}
|
||||
insert_fixture_data(stat, installation_data, InstallationCount)
|
||||
FillState.objects.create(
|
||||
property=stat.property, end_time=last_end_time, state=FillState.DONE
|
||||
)
|
||||
|
||||
website, _created = Client.objects.get_or_create(name="website")
|
||||
old_desktop, _created = Client.objects.get_or_create(name="desktop app Linux 0.3.7")
|
||||
android, _created = Client.objects.get_or_create(name="ZulipAndroid")
|
||||
iOS, _created = Client.objects.get_or_create(name="ZulipiOS")
|
||||
react_native, _created = Client.objects.get_or_create(name="ZulipMobile")
|
||||
flutter, _created = Client.objects.get_or_create(name="ZulipFlutter")
|
||||
API, _created = Client.objects.get_or_create(name="API: Python")
|
||||
zephyr_mirror, _created = Client.objects.get_or_create(name="zephyr_mirror")
|
||||
unused, _created = Client.objects.get_or_create(name="unused")
|
||||
long_webhook, _created = Client.objects.get_or_create(name="ZulipLooooooooooongNameWebhook")
|
||||
|
||||
stat = COUNT_STATS["messages_sent:client:day"]
|
||||
user_data = {
|
||||
website.id: self.generate_fixture_data(stat, 2, 1, 1.5, 0.6, 8),
|
||||
zephyr_mirror.id: self.generate_fixture_data(stat, 0, 0.3, 1.5, 0.6, 8),
|
||||
}
|
||||
insert_fixture_data(stat, user_data, UserCount)
|
||||
realm_data = {
|
||||
website.id: self.generate_fixture_data(stat, 30, 20, 5, 0.6, 3),
|
||||
old_desktop.id: self.generate_fixture_data(stat, 5, 3, 8, 0.6, 3),
|
||||
android.id: self.generate_fixture_data(stat, 5, 5, 2, 0.6, 3),
|
||||
iOS.id: self.generate_fixture_data(stat, 5, 5, 2, 0.6, 3),
|
||||
react_native.id: self.generate_fixture_data(stat, 5, 5, 10, 0.6, 3),
|
||||
flutter.id: self.generate_fixture_data(stat, 5, 5, 10, 0.6, 3),
|
||||
API.id: self.generate_fixture_data(stat, 5, 5, 5, 0.6, 3),
|
||||
zephyr_mirror.id: self.generate_fixture_data(stat, 1, 1, 3, 0.6, 3),
|
||||
unused.id: self.generate_fixture_data(stat, 0, 0, 0, 0, 0),
|
||||
long_webhook.id: self.generate_fixture_data(stat, 5, 5, 2, 0.6, 3),
|
||||
}
|
||||
insert_fixture_data(stat, realm_data, RealmCount)
|
||||
installation_data = {
|
||||
website.id: self.generate_fixture_data(stat, 300, 200, 5, 0.6, 3),
|
||||
old_desktop.id: self.generate_fixture_data(stat, 50, 30, 8, 0.6, 3),
|
||||
android.id: self.generate_fixture_data(stat, 50, 50, 2, 0.6, 3),
|
||||
iOS.id: self.generate_fixture_data(stat, 50, 50, 2, 0.6, 3),
|
||||
flutter.id: self.generate_fixture_data(stat, 5, 5, 10, 0.6, 3),
|
||||
react_native.id: self.generate_fixture_data(stat, 5, 5, 10, 0.6, 3),
|
||||
API.id: self.generate_fixture_data(stat, 50, 50, 5, 0.6, 3),
|
||||
zephyr_mirror.id: self.generate_fixture_data(stat, 10, 10, 3, 0.6, 3),
|
||||
unused.id: self.generate_fixture_data(stat, 0, 0, 0, 0, 0),
|
||||
long_webhook.id: self.generate_fixture_data(stat, 50, 50, 2, 0.6, 3),
|
||||
}
|
||||
insert_fixture_data(stat, installation_data, InstallationCount)
|
||||
FillState.objects.create(
|
||||
property=stat.property, end_time=last_end_time, state=FillState.DONE
|
||||
)
|
||||
|
||||
stat = COUNT_STATS["messages_in_stream:is_bot:day"]
|
||||
realm_data = {
|
||||
"false": self.generate_fixture_data(stat, 30, 5, 6, 0.6, 4),
|
||||
"true": self.generate_fixture_data(stat, 20, 2, 3, 0.2, 3),
|
||||
}
|
||||
insert_fixture_data(stat, realm_data, RealmCount)
|
||||
stream_data: Mapping[int | str | None, list[int]] = {
|
||||
"false": self.generate_fixture_data(stat, 10, 7, 5, 0.6, 4),
|
||||
"true": self.generate_fixture_data(stat, 5, 3, 2, 0.4, 2),
|
||||
}
|
||||
insert_fixture_data(stat, stream_data, StreamCount)
|
||||
FillState.objects.create(
|
||||
property=stat.property, end_time=last_end_time, state=FillState.DONE
|
||||
)
|
||||
|
||||
stat = COUNT_STATS["messages_read::hour"]
|
||||
user_data = {
|
||||
None: self.generate_fixture_data(stat, 7, 3, 2, 0.6, 8, holiday_rate=0.1),
|
||||
}
|
||||
insert_fixture_data(stat, user_data, UserCount)
|
||||
realm_data = {None: self.generate_fixture_data(stat, 50, 35, 6, 0.6, 4)}
|
||||
insert_fixture_data(stat, realm_data, RealmCount)
|
||||
FillState.objects.create(
|
||||
property=stat.property, end_time=last_end_time, state=FillState.DONE
|
||||
)
|
148
analytics/management/commands/realm_stats.py
Normal file
148
analytics/management/commands/realm_stats.py
Normal file
@@ -0,0 +1,148 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import datetime
|
||||
import pytz
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db.models import Count
|
||||
from zerver.models import UserProfile, Realm, Stream, Message, Recipient, UserActivity, \
|
||||
Subscription, UserMessage
|
||||
|
||||
MOBILE_CLIENT_LIST = ["Android", "ios"]
|
||||
HUMAN_CLIENT_LIST = MOBILE_CLIENT_LIST + ["website"]
|
||||
|
||||
human_messages = Message.objects.filter(sending_client__name__in=HUMAN_CLIENT_LIST)
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Generate statistics on realm activity."
|
||||
|
||||
def active_users(self, realm):
|
||||
# Has been active (on the website, for now) in the last 7 days.
|
||||
activity_cutoff = datetime.datetime.now(tz=pytz.utc) - datetime.timedelta(days=7)
|
||||
return [activity.user_profile for activity in \
|
||||
UserActivity.objects.filter(user_profile__realm=realm,
|
||||
user_profile__is_active=True,
|
||||
last_visit__gt=activity_cutoff,
|
||||
query="/json/update_pointer",
|
||||
client__name="website")]
|
||||
|
||||
def messages_sent_by(self, user, days_ago):
|
||||
sent_time_cutoff = datetime.datetime.now(tz=pytz.utc) - datetime.timedelta(days=days_ago)
|
||||
return human_messages.filter(sender=user, pub_date__gt=sent_time_cutoff).count()
|
||||
|
||||
def total_messages(self, realm, days_ago):
|
||||
sent_time_cutoff = datetime.datetime.now(tz=pytz.utc) - datetime.timedelta(days=days_ago)
|
||||
return Message.objects.filter(sender__realm=realm, pub_date__gt=sent_time_cutoff).count()
|
||||
|
||||
def human_messages(self, realm, days_ago):
|
||||
sent_time_cutoff = datetime.datetime.now(tz=pytz.utc) - datetime.timedelta(days=days_ago)
|
||||
return human_messages.filter(sender__realm=realm, pub_date__gt=sent_time_cutoff).count()
|
||||
|
||||
def api_messages(self, realm, days_ago):
|
||||
return (self.total_messages(realm, days_ago) - self.human_messages(realm, days_ago))
|
||||
|
||||
def stream_messages(self, realm, days_ago):
|
||||
sent_time_cutoff = datetime.datetime.now(tz=pytz.utc) - datetime.timedelta(days=days_ago)
|
||||
return human_messages.filter(sender__realm=realm, pub_date__gt=sent_time_cutoff,
|
||||
recipient__type=Recipient.STREAM).count()
|
||||
|
||||
def private_messages(self, realm, days_ago):
|
||||
sent_time_cutoff = datetime.datetime.now(tz=pytz.utc) - datetime.timedelta(days=days_ago)
|
||||
return human_messages.filter(sender__realm=realm, pub_date__gt=sent_time_cutoff).exclude(
|
||||
recipient__type=Recipient.STREAM).exclude(recipient__type=Recipient.HUDDLE).count()
|
||||
|
||||
def group_private_messages(self, realm, days_ago):
|
||||
sent_time_cutoff = datetime.datetime.now(tz=pytz.utc) - datetime.timedelta(days=days_ago)
|
||||
return human_messages.filter(sender__realm=realm, pub_date__gt=sent_time_cutoff).exclude(
|
||||
recipient__type=Recipient.STREAM).exclude(recipient__type=Recipient.PERSONAL).count()
|
||||
|
||||
def report_percentage(self, numerator, denominator, text):
|
||||
if not denominator:
|
||||
fraction = 0.0
|
||||
else:
|
||||
fraction = numerator / float(denominator)
|
||||
print "%.2f%% of" % (fraction * 100,), text
|
||||
|
||||
def handle(self, *args, **options):
|
||||
if args:
|
||||
try:
|
||||
realms = [Realm.objects.get(domain=domain) for domain in args]
|
||||
except Realm.DoesNotExist, e:
|
||||
print e
|
||||
exit(1)
|
||||
else:
|
||||
realms = Realm.objects.all()
|
||||
|
||||
for realm in realms:
|
||||
print realm.domain
|
||||
|
||||
user_profiles = UserProfile.objects.filter(realm=realm, is_active=True)
|
||||
active_users = self.active_users(realm)
|
||||
num_active = len(active_users)
|
||||
|
||||
print "%d active users (%d total)" % (num_active, len(user_profiles))
|
||||
streams = Stream.objects.filter(realm=realm).extra(
|
||||
tables=['zerver_subscription', 'zerver_recipient'],
|
||||
where=['zerver_subscription.recipient_id = zerver_recipient.id',
|
||||
'zerver_recipient.type = 2',
|
||||
'zerver_recipient.type_id = zerver_stream.id',
|
||||
'zerver_subscription.active = true']).annotate(count=Count("name"))
|
||||
print "%d streams" % (streams.count(),)
|
||||
|
||||
for days_ago in (1, 7, 30):
|
||||
print "In last %d days, users sent:" % (days_ago,)
|
||||
sender_quantities = [self.messages_sent_by(user, days_ago) for user in user_profiles]
|
||||
for quantity in sorted(sender_quantities, reverse=True):
|
||||
print quantity,
|
||||
print ""
|
||||
|
||||
print "%d stream messages" % (self.stream_messages(realm, days_ago),)
|
||||
print "%d one-on-one private messages" % (self.private_messages(realm, days_ago),)
|
||||
print "%d messages sent via the API" % (self.api_messages(realm, days_ago),)
|
||||
print "%d group private messages" % (self.group_private_messages(realm, days_ago),)
|
||||
|
||||
num_notifications_enabled = len(filter(lambda x: x.enable_desktop_notifications == True,
|
||||
active_users))
|
||||
self.report_percentage(num_notifications_enabled, num_active,
|
||||
"active users have desktop notifications enabled")
|
||||
|
||||
num_enter_sends = len(filter(lambda x: x.enter_sends, active_users))
|
||||
self.report_percentage(num_enter_sends, num_active,
|
||||
"active users have enter-sends")
|
||||
|
||||
all_message_count = human_messages.filter(sender__realm=realm).count()
|
||||
multi_paragraph_message_count = human_messages.filter(
|
||||
sender__realm=realm, content__contains="\n\n").count()
|
||||
self.report_percentage(multi_paragraph_message_count, all_message_count,
|
||||
"all messages are multi-paragraph")
|
||||
|
||||
# Starred messages
|
||||
starrers = UserMessage.objects.filter(user_profile__in=user_profiles,
|
||||
flags=UserMessage.flags.starred).values(
|
||||
"user_profile").annotate(count=Count("user_profile"))
|
||||
print "%d users have starred %d messages" % (
|
||||
len(starrers), sum([elt["count"] for elt in starrers]))
|
||||
|
||||
active_user_subs = Subscription.objects.filter(
|
||||
user_profile__in=user_profiles, active=True)
|
||||
|
||||
# Streams not in home view
|
||||
non_home_view = active_user_subs.filter(in_home_view=False).values(
|
||||
"user_profile").annotate(count=Count("user_profile"))
|
||||
print "%d users have %d streams not in home view" % (
|
||||
len(non_home_view), sum([elt["count"] for elt in non_home_view]))
|
||||
|
||||
# Code block markup
|
||||
markup_messages = human_messages.filter(
|
||||
sender__realm=realm, content__contains="~~~").values(
|
||||
"sender").annotate(count=Count("sender"))
|
||||
print "%d users have used code block markup on %s messages" % (
|
||||
len(markup_messages), sum([elt["count"] for elt in markup_messages]))
|
||||
|
||||
# Notifications for stream messages
|
||||
notifications = active_user_subs.filter(notifications=True).values(
|
||||
"user_profile").annotate(count=Count("user_profile"))
|
||||
print "%d users receive desktop notifications for %d streams" % (
|
||||
len(notifications), sum([elt["count"] for elt in notifications]))
|
||||
|
||||
print ""
|
36
analytics/management/commands/stream_stats.py
Normal file
36
analytics/management/commands/stream_stats.py
Normal file
@@ -0,0 +1,36 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db.models import Q
|
||||
from zerver.models import Realm, Stream, Message, Subscription, Recipient
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Generate statistics on the streams for a realm."
|
||||
|
||||
def handle(self, *args, **options):
|
||||
if args:
|
||||
try:
|
||||
realms = [Realm.objects.get(domain=domain) for domain in args]
|
||||
except Realm.DoesNotExist, e:
|
||||
print e
|
||||
exit(1)
|
||||
else:
|
||||
realms = Realm.objects.all()
|
||||
|
||||
for realm in realms:
|
||||
print realm.domain
|
||||
print "------------"
|
||||
print "%25s %15s %10s" % ("stream", "subscribers", "messages")
|
||||
streams = Stream.objects.filter(realm=realm).exclude(Q(name__istartswith="tutorial-"))
|
||||
invite_only_count = 0
|
||||
for stream in streams:
|
||||
if stream.invite_only:
|
||||
invite_only_count += 1
|
||||
continue
|
||||
print "%25s" % (stream.name,),
|
||||
recipient = Recipient.objects.filter(type=Recipient.STREAM, type_id=stream.id)
|
||||
print "%10d" % (len(Subscription.objects.filter(recipient=recipient, active=True)),),
|
||||
num_messages = len(Message.objects.filter(recipient=recipient))
|
||||
print "%12d" % (num_messages,)
|
||||
print "%d invite-only streams" % (invite_only_count,)
|
||||
print ""
|
@@ -1,99 +0,0 @@
|
||||
import hashlib
|
||||
import time
|
||||
from argparse import ArgumentParser
|
||||
from datetime import timezone
|
||||
from typing import Any
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils.dateparse import parse_datetime
|
||||
from django.utils.timezone import now as timezone_now
|
||||
from typing_extensions import override
|
||||
|
||||
from analytics.lib.counts import ALL_COUNT_STATS, logger, process_count_stat
|
||||
from zerver.lib.management import ZulipBaseCommand, abort_cron_during_deploy, abort_unless_locked
|
||||
from zerver.lib.remote_server import send_server_data_to_push_bouncer, should_send_analytics_data
|
||||
from zerver.lib.timestamp import floor_to_hour
|
||||
from zerver.models import Realm
|
||||
|
||||
|
||||
class Command(ZulipBaseCommand):
|
||||
help = """Fills Analytics tables.
|
||||
|
||||
Run as a cron job that runs every hour."""
|
||||
|
||||
@override
|
||||
def add_arguments(self, parser: ArgumentParser) -> None:
|
||||
parser.add_argument(
|
||||
"--time",
|
||||
"-t",
|
||||
help="Update stat tables from current state to --time. Defaults to the current time.",
|
||||
default=timezone_now().isoformat(),
|
||||
)
|
||||
parser.add_argument("--utc", action="store_true", help="Interpret --time in UTC.")
|
||||
parser.add_argument(
|
||||
"--stat", "-s", help="CountStat to process. If omitted, all stats are processed."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--verbose", action="store_true", help="Print timing information to stdout."
|
||||
)
|
||||
|
||||
@override
|
||||
@abort_cron_during_deploy
|
||||
@abort_unless_locked
|
||||
def handle(self, *args: Any, **options: Any) -> None:
|
||||
self.run_update_analytics_counts(options)
|
||||
|
||||
def run_update_analytics_counts(self, options: dict[str, Any]) -> None:
|
||||
# installation_epoch relies on there being at least one realm; we
|
||||
# shouldn't run the analytics code if that condition isn't satisfied
|
||||
if not Realm.objects.exists():
|
||||
logger.info("No realms, stopping update_analytics_counts")
|
||||
return
|
||||
|
||||
fill_to_time = parse_datetime(options["time"])
|
||||
assert fill_to_time is not None
|
||||
if options["utc"]:
|
||||
fill_to_time = fill_to_time.replace(tzinfo=timezone.utc)
|
||||
if fill_to_time.tzinfo is None:
|
||||
raise ValueError(
|
||||
"--time must be time-zone-aware. Maybe you meant to use the --utc option?"
|
||||
)
|
||||
|
||||
fill_to_time = floor_to_hour(fill_to_time.astimezone(timezone.utc))
|
||||
|
||||
if options["stat"] is not None:
|
||||
stats = [ALL_COUNT_STATS[options["stat"]]]
|
||||
else:
|
||||
stats = list(ALL_COUNT_STATS.values())
|
||||
|
||||
logger.info("Starting updating analytics counts through %s", fill_to_time)
|
||||
if options["verbose"]:
|
||||
start = time.time()
|
||||
last = start
|
||||
|
||||
for stat in stats:
|
||||
process_count_stat(stat, fill_to_time)
|
||||
if options["verbose"]:
|
||||
print(f"Updated {stat.property} in {time.time() - last:.3f}s")
|
||||
last = time.time()
|
||||
|
||||
if options["verbose"]:
|
||||
print(
|
||||
f"Finished updating analytics counts through {fill_to_time} in {time.time() - start:.3f}s"
|
||||
)
|
||||
logger.info("Finished updating analytics counts through %s", fill_to_time)
|
||||
|
||||
if should_send_analytics_data():
|
||||
# Based on the specific value of the setting, the exact details to send
|
||||
# will be decided. However, we proceed just based on this not being falsey.
|
||||
|
||||
# Skew 0-10 minutes based on a hash of settings.ZULIP_ORG_ID, so
|
||||
# that each server will report in at a somewhat consistent time.
|
||||
assert settings.ZULIP_ORG_ID
|
||||
delay = int.from_bytes(
|
||||
hashlib.sha256(settings.ZULIP_ORG_ID.encode()).digest(), byteorder="big"
|
||||
) % (60 * 10)
|
||||
logger.info("Sleeping %d seconds before reporting...", delay)
|
||||
time.sleep(delay)
|
||||
|
||||
send_server_data_to_push_bouncer(consider_usage_statistics=True, raise_on_error=True)
|
37
analytics/management/commands/user_stats.py
Normal file
37
analytics/management/commands/user_stats.py
Normal file
@@ -0,0 +1,37 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import datetime
|
||||
import pytz
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from zerver.models import UserProfile, Realm, Stream, Message
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Generate statistics on user activity."
|
||||
|
||||
def messages_sent_by(self, user, week):
|
||||
start = datetime.datetime.now(tz=pytz.utc) - datetime.timedelta(days=(week + 1)*7)
|
||||
end = datetime.datetime.now(tz=pytz.utc) - datetime.timedelta(days=week*7)
|
||||
return Message.objects.filter(sender=user, pub_date__gt=start, pub_date__lte=end).count()
|
||||
|
||||
def handle(self, *args, **options):
|
||||
if args:
|
||||
try:
|
||||
realms = [Realm.objects.get(domain=domain) for domain in args]
|
||||
except Realm.DoesNotExist, e:
|
||||
print e
|
||||
exit(1)
|
||||
else:
|
||||
realms = Realm.objects.all()
|
||||
|
||||
for realm in realms:
|
||||
print realm.domain
|
||||
user_profiles = UserProfile.objects.filter(realm=realm, is_active=True)
|
||||
print "%d users" % (len(user_profiles),)
|
||||
print "%d streams" % (len(Stream.objects.filter(realm=realm)),)
|
||||
|
||||
for user_profile in user_profiles:
|
||||
print "%35s" % (user_profile.email,),
|
||||
for week in range(10):
|
||||
print "%5d" % (self.messages_sent_by(user_profile, week)),
|
||||
print ""
|
@@ -1,208 +0,0 @@
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("zerver", "0030_realm_org_type"),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="Anomaly",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
|
||||
),
|
||||
),
|
||||
("info", models.CharField(max_length=1000)),
|
||||
],
|
||||
bases=(models.Model,),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="HuddleCount",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"huddle",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="zerver.Recipient"
|
||||
),
|
||||
),
|
||||
(
|
||||
"user",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
|
||||
),
|
||||
),
|
||||
("property", models.CharField(max_length=40)),
|
||||
("end_time", models.DateTimeField()),
|
||||
("interval", models.CharField(max_length=20)),
|
||||
("value", models.BigIntegerField()),
|
||||
(
|
||||
"anomaly",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="analytics.Anomaly",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
],
|
||||
bases=(models.Model,),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="InstallationCount",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
|
||||
),
|
||||
),
|
||||
("property", models.CharField(max_length=40)),
|
||||
("end_time", models.DateTimeField()),
|
||||
("interval", models.CharField(max_length=20)),
|
||||
("value", models.BigIntegerField()),
|
||||
(
|
||||
"anomaly",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="analytics.Anomaly",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
],
|
||||
bases=(models.Model,),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="RealmCount",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"realm",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"
|
||||
),
|
||||
),
|
||||
("property", models.CharField(max_length=40)),
|
||||
("end_time", models.DateTimeField()),
|
||||
("interval", models.CharField(max_length=20)),
|
||||
("value", models.BigIntegerField()),
|
||||
(
|
||||
"anomaly",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="analytics.Anomaly",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
],
|
||||
bases=(models.Model,),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="StreamCount",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"realm",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"
|
||||
),
|
||||
),
|
||||
(
|
||||
"stream",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="zerver.Stream"
|
||||
),
|
||||
),
|
||||
("property", models.CharField(max_length=40)),
|
||||
("end_time", models.DateTimeField()),
|
||||
("interval", models.CharField(max_length=20)),
|
||||
("value", models.BigIntegerField()),
|
||||
(
|
||||
"anomaly",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="analytics.Anomaly",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
],
|
||||
bases=(models.Model,),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="UserCount",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"realm",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"
|
||||
),
|
||||
),
|
||||
(
|
||||
"user",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
|
||||
),
|
||||
),
|
||||
("property", models.CharField(max_length=40)),
|
||||
("end_time", models.DateTimeField()),
|
||||
("interval", models.CharField(max_length=20)),
|
||||
("value", models.BigIntegerField()),
|
||||
(
|
||||
"anomaly",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="analytics.Anomaly",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
],
|
||||
bases=(models.Model,),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="usercount",
|
||||
unique_together={("user", "property", "end_time", "interval")},
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="streamcount",
|
||||
unique_together={("stream", "property", "end_time", "interval")},
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="realmcount",
|
||||
unique_together={("realm", "property", "end_time", "interval")},
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="installationcount",
|
||||
unique_together={("property", "end_time", "interval")},
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="huddlecount",
|
||||
unique_together={("huddle", "property", "end_time", "interval")},
|
||||
),
|
||||
]
|
@@ -1,224 +0,0 @@
|
||||
# Generated by Django 5.0.7 on 2024-08-13 20:16
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
replaces = [
|
||||
("analytics", "0001_initial"),
|
||||
("analytics", "0002_remove_huddlecount"),
|
||||
("analytics", "0003_fillstate"),
|
||||
("analytics", "0004_add_subgroup"),
|
||||
("analytics", "0005_alter_field_size"),
|
||||
("analytics", "0006_add_subgroup_to_unique_constraints"),
|
||||
("analytics", "0007_remove_interval"),
|
||||
("analytics", "0008_add_count_indexes"),
|
||||
("analytics", "0009_remove_messages_to_stream_stat"),
|
||||
("analytics", "0010_clear_messages_sent_values"),
|
||||
("analytics", "0011_clear_analytics_tables"),
|
||||
("analytics", "0012_add_on_delete"),
|
||||
("analytics", "0013_remove_anomaly"),
|
||||
("analytics", "0014_remove_fillstate_last_modified"),
|
||||
("analytics", "0015_clear_duplicate_counts"),
|
||||
("analytics", "0016_unique_constraint_when_subgroup_null"),
|
||||
("analytics", "0017_regenerate_partial_indexes"),
|
||||
("analytics", "0018_remove_usercount_active_users_audit"),
|
||||
("analytics", "0019_remove_unused_counts"),
|
||||
("analytics", "0020_alter_installationcount_id_alter_realmcount_id_and_more"),
|
||||
("analytics", "0021_alter_fillstate_id"),
|
||||
]
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
# Needed for foreign keys to core models like Realm.
|
||||
("zerver", "0001_initial"),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="InstallationCount",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
("property", models.CharField(max_length=32)),
|
||||
("end_time", models.DateTimeField()),
|
||||
("value", models.BigIntegerField()),
|
||||
("subgroup", models.CharField(max_length=16, null=True)),
|
||||
],
|
||||
options={
|
||||
"unique_together": set(),
|
||||
"constraints": [
|
||||
models.UniqueConstraint(
|
||||
condition=models.Q(("subgroup__isnull", False)),
|
||||
fields=("property", "subgroup", "end_time"),
|
||||
name="unique_installation_count",
|
||||
),
|
||||
models.UniqueConstraint(
|
||||
condition=models.Q(("subgroup__isnull", True)),
|
||||
fields=("property", "end_time"),
|
||||
name="unique_installation_count_null_subgroup",
|
||||
),
|
||||
],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="RealmCount",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
(
|
||||
"realm",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="zerver.realm"
|
||||
),
|
||||
),
|
||||
("property", models.CharField(max_length=32)),
|
||||
("end_time", models.DateTimeField()),
|
||||
("value", models.BigIntegerField()),
|
||||
("subgroup", models.CharField(max_length=16, null=True)),
|
||||
],
|
||||
options={
|
||||
"indexes": [
|
||||
models.Index(
|
||||
fields=["property", "end_time"],
|
||||
name="analytics_realmcount_property_end_time_3b60396b_idx",
|
||||
)
|
||||
],
|
||||
"unique_together": set(),
|
||||
"constraints": [
|
||||
models.UniqueConstraint(
|
||||
condition=models.Q(("subgroup__isnull", False)),
|
||||
fields=("realm", "property", "subgroup", "end_time"),
|
||||
name="unique_realm_count",
|
||||
),
|
||||
models.UniqueConstraint(
|
||||
condition=models.Q(("subgroup__isnull", True)),
|
||||
fields=("realm", "property", "end_time"),
|
||||
name="unique_realm_count_null_subgroup",
|
||||
),
|
||||
],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="StreamCount",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
(
|
||||
"realm",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="zerver.realm"
|
||||
),
|
||||
),
|
||||
(
|
||||
"stream",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="zerver.stream"
|
||||
),
|
||||
),
|
||||
("property", models.CharField(max_length=32)),
|
||||
("end_time", models.DateTimeField()),
|
||||
("value", models.BigIntegerField()),
|
||||
("subgroup", models.CharField(max_length=16, null=True)),
|
||||
],
|
||||
options={
|
||||
"indexes": [
|
||||
models.Index(
|
||||
fields=["property", "realm", "end_time"],
|
||||
name="analytics_streamcount_property_realm_id_end_time_155ae930_idx",
|
||||
)
|
||||
],
|
||||
"unique_together": set(),
|
||||
"constraints": [
|
||||
models.UniqueConstraint(
|
||||
condition=models.Q(("subgroup__isnull", False)),
|
||||
fields=("stream", "property", "subgroup", "end_time"),
|
||||
name="unique_stream_count",
|
||||
),
|
||||
models.UniqueConstraint(
|
||||
condition=models.Q(("subgroup__isnull", True)),
|
||||
fields=("stream", "property", "end_time"),
|
||||
name="unique_stream_count_null_subgroup",
|
||||
),
|
||||
],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="UserCount",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
(
|
||||
"realm",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to="zerver.realm"
|
||||
),
|
||||
),
|
||||
(
|
||||
"user",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
|
||||
),
|
||||
),
|
||||
("property", models.CharField(max_length=32)),
|
||||
("end_time", models.DateTimeField()),
|
||||
("value", models.BigIntegerField()),
|
||||
("subgroup", models.CharField(max_length=16, null=True)),
|
||||
],
|
||||
options={
|
||||
"indexes": [
|
||||
models.Index(
|
||||
fields=["property", "realm", "end_time"],
|
||||
name="analytics_usercount_property_realm_id_end_time_591dbec1_idx",
|
||||
)
|
||||
],
|
||||
"unique_together": set(),
|
||||
"constraints": [
|
||||
models.UniqueConstraint(
|
||||
condition=models.Q(("subgroup__isnull", False)),
|
||||
fields=("user", "property", "subgroup", "end_time"),
|
||||
name="unique_user_count",
|
||||
),
|
||||
models.UniqueConstraint(
|
||||
condition=models.Q(("subgroup__isnull", True)),
|
||||
fields=("user", "property", "end_time"),
|
||||
name="unique_user_count_null_subgroup",
|
||||
),
|
||||
],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="FillState",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
("property", models.CharField(max_length=40, unique=True)),
|
||||
("end_time", models.DateTimeField()),
|
||||
("state", models.PositiveSmallIntegerField()),
|
||||
],
|
||||
),
|
||||
]
|
@@ -1,29 +0,0 @@
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("analytics", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterUniqueTogether(
|
||||
name="huddlecount",
|
||||
unique_together=set(),
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="huddlecount",
|
||||
name="anomaly",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="huddlecount",
|
||||
name="huddle",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="huddlecount",
|
||||
name="user",
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name="HuddleCount",
|
||||
),
|
||||
]
|
@@ -1,26 +0,0 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("analytics", "0002_remove_huddlecount"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="FillState",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
verbose_name="ID", serialize=False, auto_created=True, primary_key=True
|
||||
),
|
||||
),
|
||||
("property", models.CharField(unique=True, max_length=40)),
|
||||
("end_time", models.DateTimeField()),
|
||||
("state", models.PositiveSmallIntegerField()),
|
||||
("last_modified", models.DateTimeField(auto_now=True)),
|
||||
],
|
||||
bases=(models.Model,),
|
||||
),
|
||||
]
|
@@ -1,30 +0,0 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("analytics", "0003_fillstate"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="installationcount",
|
||||
name="subgroup",
|
||||
field=models.CharField(max_length=16, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="realmcount",
|
||||
name="subgroup",
|
||||
field=models.CharField(max_length=16, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="streamcount",
|
||||
name="subgroup",
|
||||
field=models.CharField(max_length=16, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="usercount",
|
||||
name="subgroup",
|
||||
field=models.CharField(max_length=16, null=True),
|
||||
),
|
||||
]
|
@@ -1,50 +0,0 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("analytics", "0004_add_subgroup"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="installationcount",
|
||||
name="interval",
|
||||
field=models.CharField(max_length=8),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="installationcount",
|
||||
name="property",
|
||||
field=models.CharField(max_length=32),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="realmcount",
|
||||
name="interval",
|
||||
field=models.CharField(max_length=8),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="realmcount",
|
||||
name="property",
|
||||
field=models.CharField(max_length=32),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="streamcount",
|
||||
name="interval",
|
||||
field=models.CharField(max_length=8),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="streamcount",
|
||||
name="property",
|
||||
field=models.CharField(max_length=32),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="usercount",
|
||||
name="interval",
|
||||
field=models.CharField(max_length=8),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="usercount",
|
||||
name="property",
|
||||
field=models.CharField(max_length=32),
|
||||
),
|
||||
]
|
@@ -1,26 +0,0 @@
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("analytics", "0005_alter_field_size"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterUniqueTogether(
|
||||
name="installationcount",
|
||||
unique_together={("property", "subgroup", "end_time", "interval")},
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="realmcount",
|
||||
unique_together={("realm", "property", "subgroup", "end_time", "interval")},
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="streamcount",
|
||||
unique_together={("stream", "property", "subgroup", "end_time", "interval")},
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="usercount",
|
||||
unique_together={("user", "property", "subgroup", "end_time", "interval")},
|
||||
),
|
||||
]
|
@@ -1,43 +0,0 @@
|
||||
# Generated by Django 1.10.4 on 2017-01-16 20:50
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("analytics", "0006_add_subgroup_to_unique_constraints"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterUniqueTogether(
|
||||
name="installationcount",
|
||||
unique_together={("property", "subgroup", "end_time")},
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="installationcount",
|
||||
name="interval",
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="realmcount",
|
||||
unique_together={("realm", "property", "subgroup", "end_time")},
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="realmcount",
|
||||
name="interval",
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="streamcount",
|
||||
unique_together={("stream", "property", "subgroup", "end_time")},
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="streamcount",
|
||||
name="interval",
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="usercount",
|
||||
unique_together={("user", "property", "subgroup", "end_time")},
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="usercount",
|
||||
name="interval",
|
||||
),
|
||||
]
|
@@ -1,33 +0,0 @@
|
||||
# Generated by Django 1.10.5 on 2017-02-01 22:28
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("zerver", "0050_userprofile_avatar_version"),
|
||||
("analytics", "0007_remove_interval"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="realmcount",
|
||||
index=models.Index(
|
||||
fields=["property", "end_time"],
|
||||
name="analytics_realmcount_property_end_time_3b60396b_idx",
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="streamcount",
|
||||
index=models.Index(
|
||||
fields=["property", "realm", "end_time"],
|
||||
name="analytics_streamcount_property_realm_id_end_time_155ae930_idx",
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="usercount",
|
||||
index=models.Index(
|
||||
fields=["property", "realm", "end_time"],
|
||||
name="analytics_usercount_property_realm_id_end_time_591dbec1_idx",
|
||||
),
|
||||
),
|
||||
]
|
@@ -1,30 +0,0 @@
|
||||
from django.db import migrations
|
||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||
from django.db.migrations.state import StateApps
|
||||
|
||||
|
||||
def delete_messages_sent_to_stream_stat(
|
||||
apps: StateApps, schema_editor: BaseDatabaseSchemaEditor
|
||||
) -> None:
|
||||
UserCount = apps.get_model("analytics", "UserCount")
|
||||
StreamCount = apps.get_model("analytics", "StreamCount")
|
||||
RealmCount = apps.get_model("analytics", "RealmCount")
|
||||
InstallationCount = apps.get_model("analytics", "InstallationCount")
|
||||
FillState = apps.get_model("analytics", "FillState")
|
||||
|
||||
property = "messages_sent_to_stream:is_bot"
|
||||
UserCount.objects.filter(property=property).delete()
|
||||
StreamCount.objects.filter(property=property).delete()
|
||||
RealmCount.objects.filter(property=property).delete()
|
||||
InstallationCount.objects.filter(property=property).delete()
|
||||
FillState.objects.filter(property=property).delete()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("analytics", "0008_add_count_indexes"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(delete_messages_sent_to_stream_stat, elidable=True),
|
||||
]
|
@@ -1,28 +0,0 @@
|
||||
from django.db import migrations
|
||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||
from django.db.migrations.state import StateApps
|
||||
|
||||
|
||||
def clear_message_sent_by_message_type_values(
|
||||
apps: StateApps, schema_editor: BaseDatabaseSchemaEditor
|
||||
) -> None:
|
||||
UserCount = apps.get_model("analytics", "UserCount")
|
||||
StreamCount = apps.get_model("analytics", "StreamCount")
|
||||
RealmCount = apps.get_model("analytics", "RealmCount")
|
||||
InstallationCount = apps.get_model("analytics", "InstallationCount")
|
||||
FillState = apps.get_model("analytics", "FillState")
|
||||
|
||||
property = "messages_sent:message_type:day"
|
||||
UserCount.objects.filter(property=property).delete()
|
||||
StreamCount.objects.filter(property=property).delete()
|
||||
RealmCount.objects.filter(property=property).delete()
|
||||
InstallationCount.objects.filter(property=property).delete()
|
||||
FillState.objects.filter(property=property).delete()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [("analytics", "0009_remove_messages_to_stream_stat")]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(clear_message_sent_by_message_type_values, elidable=True),
|
||||
]
|
@@ -1,27 +0,0 @@
|
||||
from django.db import migrations
|
||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||
from django.db.migrations.state import StateApps
|
||||
|
||||
|
||||
def clear_analytics_tables(apps: StateApps, schema_editor: BaseDatabaseSchemaEditor) -> None:
|
||||
UserCount = apps.get_model("analytics", "UserCount")
|
||||
StreamCount = apps.get_model("analytics", "StreamCount")
|
||||
RealmCount = apps.get_model("analytics", "RealmCount")
|
||||
InstallationCount = apps.get_model("analytics", "InstallationCount")
|
||||
FillState = apps.get_model("analytics", "FillState")
|
||||
|
||||
UserCount.objects.all().delete()
|
||||
StreamCount.objects.all().delete()
|
||||
RealmCount.objects.all().delete()
|
||||
InstallationCount.objects.all().delete()
|
||||
FillState.objects.all().delete()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("analytics", "0010_clear_messages_sent_values"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(clear_analytics_tables, elidable=True),
|
||||
]
|
@@ -1,41 +0,0 @@
|
||||
# Generated by Django 1.11.6 on 2018-01-29 08:14
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("analytics", "0011_clear_analytics_tables"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="installationcount",
|
||||
name="anomaly",
|
||||
field=models.ForeignKey(
|
||||
null=True, on_delete=django.db.models.deletion.SET_NULL, to="analytics.Anomaly"
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="realmcount",
|
||||
name="anomaly",
|
||||
field=models.ForeignKey(
|
||||
null=True, on_delete=django.db.models.deletion.SET_NULL, to="analytics.Anomaly"
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="streamcount",
|
||||
name="anomaly",
|
||||
field=models.ForeignKey(
|
||||
null=True, on_delete=django.db.models.deletion.SET_NULL, to="analytics.Anomaly"
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="usercount",
|
||||
name="anomaly",
|
||||
field=models.ForeignKey(
|
||||
null=True, on_delete=django.db.models.deletion.SET_NULL, to="analytics.Anomaly"
|
||||
),
|
||||
),
|
||||
]
|
@@ -1,31 +0,0 @@
|
||||
# Generated by Django 1.11.18 on 2019-02-02 02:47
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("analytics", "0012_add_on_delete"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name="installationcount",
|
||||
name="anomaly",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="realmcount",
|
||||
name="anomaly",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="streamcount",
|
||||
name="anomaly",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="usercount",
|
||||
name="anomaly",
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name="Anomaly",
|
||||
),
|
||||
]
|
@@ -1,16 +0,0 @@
|
||||
# Generated by Django 1.11.26 on 2020-01-27 04:32
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("analytics", "0013_remove_anomaly"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name="fillstate",
|
||||
name="last_modified",
|
||||
),
|
||||
]
|
@@ -1,66 +0,0 @@
|
||||
from django.db import migrations
|
||||
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
||||
from django.db.migrations.state import StateApps
|
||||
from django.db.models import Count, Sum
|
||||
|
||||
|
||||
def clear_duplicate_counts(apps: StateApps, schema_editor: BaseDatabaseSchemaEditor) -> None:
|
||||
"""This is a preparatory migration for our Analytics tables.
|
||||
|
||||
The backstory is that Django's unique_together indexes do not properly
|
||||
handle the subgroup=None corner case (allowing duplicate rows that have a
|
||||
subgroup of None), which meant that in race conditions, rather than updating
|
||||
an existing row for the property/(realm, stream, user)/time with subgroup=None, Django would
|
||||
create a duplicate row.
|
||||
|
||||
In the next migration, we'll add a proper constraint to fix this bug, but
|
||||
we need to fix any existing problematic rows before we can add that constraint.
|
||||
|
||||
We fix this in an appropriate fashion for each type of CountStat object; mainly
|
||||
this means deleting the extra rows, but for LoggingCountStat objects, we need to
|
||||
additionally combine the sums.
|
||||
"""
|
||||
count_tables = dict(
|
||||
realm=apps.get_model("analytics", "RealmCount"),
|
||||
user=apps.get_model("analytics", "UserCount"),
|
||||
stream=apps.get_model("analytics", "StreamCount"),
|
||||
installation=apps.get_model("analytics", "InstallationCount"),
|
||||
)
|
||||
|
||||
for name, count_table in count_tables.items():
|
||||
value = [name, "property", "end_time"]
|
||||
if name == "installation":
|
||||
value = ["property", "end_time"]
|
||||
counts = (
|
||||
count_table.objects.filter(subgroup=None)
|
||||
.values(*value)
|
||||
.annotate(Count("id"), Sum("value"))
|
||||
.filter(id__count__gt=1)
|
||||
)
|
||||
|
||||
for count in counts:
|
||||
count.pop("id__count")
|
||||
total_value = count.pop("value__sum")
|
||||
duplicate_counts = list(count_table.objects.filter(**count))
|
||||
first_count = duplicate_counts[0]
|
||||
if count["property"] in ["invites_sent::day", "active_users_log:is_bot:day"]:
|
||||
# For LoggingCountStat objects, the right fix is to combine the totals;
|
||||
# for other CountStat objects, we expect the duplicates to have the same value.
|
||||
# And so all we need to do is delete them.
|
||||
first_count.value = total_value
|
||||
first_count.save()
|
||||
to_cleanup = duplicate_counts[1:]
|
||||
for duplicate_count in to_cleanup:
|
||||
duplicate_count.delete()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("analytics", "0014_remove_fillstate_last_modified"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
clear_duplicate_counts, reverse_code=migrations.RunPython.noop, elidable=True
|
||||
),
|
||||
]
|
@@ -1,92 +0,0 @@
|
||||
# Generated by Django 2.2.10 on 2020-02-29 19:40
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("analytics", "0015_clear_duplicate_counts"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterUniqueTogether(
|
||||
name="installationcount",
|
||||
unique_together=set(),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="realmcount",
|
||||
unique_together=set(),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="streamcount",
|
||||
unique_together=set(),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="usercount",
|
||||
unique_together=set(),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="installationcount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=False),
|
||||
fields=("property", "subgroup", "end_time"),
|
||||
name="unique_installation_count",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="installationcount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=True),
|
||||
fields=("property", "end_time"),
|
||||
name="unique_installation_count_null_subgroup",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="realmcount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=False),
|
||||
fields=("realm", "property", "subgroup", "end_time"),
|
||||
name="unique_realm_count",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="realmcount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=True),
|
||||
fields=("realm", "property", "end_time"),
|
||||
name="unique_realm_count_null_subgroup",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="streamcount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=False),
|
||||
fields=("stream", "property", "subgroup", "end_time"),
|
||||
name="unique_stream_count",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="streamcount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=True),
|
||||
fields=("stream", "property", "end_time"),
|
||||
name="unique_stream_count_null_subgroup",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="usercount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=False),
|
||||
fields=("user", "property", "subgroup", "end_time"),
|
||||
name="unique_user_count",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="usercount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=True),
|
||||
fields=("user", "property", "end_time"),
|
||||
name="unique_user_count_null_subgroup",
|
||||
),
|
||||
),
|
||||
]
|
@@ -1,114 +0,0 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("analytics", "0016_unique_constraint_when_subgroup_null"),
|
||||
]
|
||||
|
||||
# If the server was installed between 7.0 and 7.4 (or main between
|
||||
# 2c20028aa451 and 7807bff52635), it contains indexes which (when
|
||||
# running 7.5 or 7807bff52635 or higher) are never used, because
|
||||
# they contain an improper cast
|
||||
# (https://code.djangoproject.com/ticket/34840).
|
||||
#
|
||||
# We regenerate the indexes here, by dropping and re-creating
|
||||
# them, so that we know that they are properly formed.
|
||||
operations = [
|
||||
migrations.RemoveConstraint(
|
||||
model_name="installationcount",
|
||||
name="unique_installation_count",
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="installationcount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=False),
|
||||
fields=("property", "subgroup", "end_time"),
|
||||
name="unique_installation_count",
|
||||
),
|
||||
),
|
||||
migrations.RemoveConstraint(
|
||||
model_name="installationcount",
|
||||
name="unique_installation_count_null_subgroup",
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="installationcount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=True),
|
||||
fields=("property", "end_time"),
|
||||
name="unique_installation_count_null_subgroup",
|
||||
),
|
||||
),
|
||||
migrations.RemoveConstraint(
|
||||
model_name="realmcount",
|
||||
name="unique_realm_count",
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="realmcount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=False),
|
||||
fields=("realm", "property", "subgroup", "end_time"),
|
||||
name="unique_realm_count",
|
||||
),
|
||||
),
|
||||
migrations.RemoveConstraint(
|
||||
model_name="realmcount",
|
||||
name="unique_realm_count_null_subgroup",
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="realmcount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=True),
|
||||
fields=("realm", "property", "end_time"),
|
||||
name="unique_realm_count_null_subgroup",
|
||||
),
|
||||
),
|
||||
migrations.RemoveConstraint(
|
||||
model_name="streamcount",
|
||||
name="unique_stream_count",
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="streamcount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=False),
|
||||
fields=("stream", "property", "subgroup", "end_time"),
|
||||
name="unique_stream_count",
|
||||
),
|
||||
),
|
||||
migrations.RemoveConstraint(
|
||||
model_name="streamcount",
|
||||
name="unique_stream_count_null_subgroup",
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="streamcount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=True),
|
||||
fields=("stream", "property", "end_time"),
|
||||
name="unique_stream_count_null_subgroup",
|
||||
),
|
||||
),
|
||||
migrations.RemoveConstraint(
|
||||
model_name="usercount",
|
||||
name="unique_user_count",
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="usercount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=False),
|
||||
fields=("user", "property", "subgroup", "end_time"),
|
||||
name="unique_user_count",
|
||||
),
|
||||
),
|
||||
migrations.RemoveConstraint(
|
||||
model_name="usercount",
|
||||
name="unique_user_count_null_subgroup",
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="usercount",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(subgroup__isnull=True),
|
||||
fields=("user", "property", "end_time"),
|
||||
name="unique_user_count_null_subgroup",
|
||||
),
|
||||
),
|
||||
]
|
@@ -1,16 +0,0 @@
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
elidable = True
|
||||
|
||||
dependencies = [
|
||||
("analytics", "0017_regenerate_partial_indexes"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunSQL(
|
||||
"DELETE FROM analytics_usercount WHERE property = 'active_users_audit:is_bot:day'",
|
||||
elidable=True,
|
||||
)
|
||||
]
|
@@ -1,27 +0,0 @@
|
||||
from django.db import migrations
|
||||
|
||||
REMOVED_COUNTS = (
|
||||
"active_users_log:is_bot:day",
|
||||
"active_users:is_bot:day",
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
elidable = True
|
||||
|
||||
dependencies = [
|
||||
("analytics", "0018_remove_usercount_active_users_audit"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunSQL(
|
||||
[
|
||||
("DELETE FROM analytics_realmcount WHERE property IN %s", (REMOVED_COUNTS,)),
|
||||
(
|
||||
"DELETE FROM analytics_installationcount WHERE property IN %s",
|
||||
(REMOVED_COUNTS,),
|
||||
),
|
||||
],
|
||||
elidable=True,
|
||||
)
|
||||
]
|
@@ -1,40 +0,0 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False
|
||||
|
||||
dependencies = [
|
||||
("analytics", "0019_remove_unused_counts"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="installationcount",
|
||||
name="id",
|
||||
field=models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="realmcount",
|
||||
name="id",
|
||||
field=models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="streamcount",
|
||||
name="id",
|
||||
field=models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="usercount",
|
||||
name="id",
|
||||
field=models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
]
|
@@ -1,17 +0,0 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("analytics", "0020_alter_installationcount_id_alter_realmcount_id_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="fillstate",
|
||||
name="id",
|
||||
field=models.BigAutoField(
|
||||
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
|
||||
),
|
||||
),
|
||||
]
|
@@ -1,158 +0,0 @@
|
||||
from datetime import datetime
|
||||
|
||||
from django.db import models
|
||||
from django.db.models import Q, UniqueConstraint
|
||||
from typing_extensions import override
|
||||
|
||||
from zerver.lib.timestamp import floor_to_day
|
||||
from zerver.models import Realm, Stream, UserProfile
|
||||
|
||||
|
||||
class FillState(models.Model):
|
||||
property = models.CharField(max_length=40, unique=True)
|
||||
end_time = models.DateTimeField()
|
||||
|
||||
# Valid states are {DONE, STARTED}
|
||||
DONE = 1
|
||||
STARTED = 2
|
||||
state = models.PositiveSmallIntegerField()
|
||||
|
||||
@override
|
||||
def __str__(self) -> str:
|
||||
return f"{self.property} {self.end_time} {self.state}"
|
||||
|
||||
|
||||
# The earliest/starting end_time in FillState
|
||||
# We assume there is at least one realm
|
||||
def installation_epoch() -> datetime:
|
||||
earliest_realm_creation = Realm.objects.aggregate(models.Min("date_created"))[
|
||||
"date_created__min"
|
||||
]
|
||||
return floor_to_day(earliest_realm_creation)
|
||||
|
||||
|
||||
class BaseCount(models.Model):
|
||||
# Note: When inheriting from BaseCount, you may want to rearrange
|
||||
# the order of the columns in the migration to make sure they
|
||||
# match how you'd like the table to be arranged.
|
||||
property = models.CharField(max_length=32)
|
||||
subgroup = models.CharField(max_length=16, null=True)
|
||||
end_time = models.DateTimeField()
|
||||
value = models.BigIntegerField()
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
|
||||
class InstallationCount(BaseCount):
|
||||
class Meta:
|
||||
# Handles invalid duplicate InstallationCount data
|
||||
constraints = [
|
||||
UniqueConstraint(
|
||||
fields=["property", "subgroup", "end_time"],
|
||||
condition=Q(subgroup__isnull=False),
|
||||
name="unique_installation_count",
|
||||
),
|
||||
UniqueConstraint(
|
||||
fields=["property", "end_time"],
|
||||
condition=Q(subgroup__isnull=True),
|
||||
name="unique_installation_count_null_subgroup",
|
||||
),
|
||||
]
|
||||
|
||||
@override
|
||||
def __str__(self) -> str:
|
||||
return f"{self.property} {self.subgroup} {self.value}"
|
||||
|
||||
|
||||
class RealmCount(BaseCount):
|
||||
realm = models.ForeignKey(Realm, on_delete=models.CASCADE)
|
||||
|
||||
class Meta:
|
||||
# Handles invalid duplicate RealmCount data
|
||||
constraints = [
|
||||
UniqueConstraint(
|
||||
fields=["realm", "property", "subgroup", "end_time"],
|
||||
condition=Q(subgroup__isnull=False),
|
||||
name="unique_realm_count",
|
||||
),
|
||||
UniqueConstraint(
|
||||
fields=["realm", "property", "end_time"],
|
||||
condition=Q(subgroup__isnull=True),
|
||||
name="unique_realm_count_null_subgroup",
|
||||
),
|
||||
]
|
||||
indexes = [
|
||||
models.Index(
|
||||
fields=["property", "end_time"],
|
||||
name="analytics_realmcount_property_end_time_3b60396b_idx",
|
||||
)
|
||||
]
|
||||
|
||||
@override
|
||||
def __str__(self) -> str:
|
||||
return f"{self.realm!r} {self.property} {self.subgroup} {self.value}"
|
||||
|
||||
|
||||
class UserCount(BaseCount):
|
||||
user = models.ForeignKey(UserProfile, on_delete=models.CASCADE)
|
||||
realm = models.ForeignKey(Realm, on_delete=models.CASCADE)
|
||||
|
||||
class Meta:
|
||||
# Handles invalid duplicate UserCount data
|
||||
constraints = [
|
||||
UniqueConstraint(
|
||||
fields=["user", "property", "subgroup", "end_time"],
|
||||
condition=Q(subgroup__isnull=False),
|
||||
name="unique_user_count",
|
||||
),
|
||||
UniqueConstraint(
|
||||
fields=["user", "property", "end_time"],
|
||||
condition=Q(subgroup__isnull=True),
|
||||
name="unique_user_count_null_subgroup",
|
||||
),
|
||||
]
|
||||
# This index dramatically improves the performance of
|
||||
# aggregating from users to realms
|
||||
indexes = [
|
||||
models.Index(
|
||||
fields=["property", "realm", "end_time"],
|
||||
name="analytics_usercount_property_realm_id_end_time_591dbec1_idx",
|
||||
)
|
||||
]
|
||||
|
||||
@override
|
||||
def __str__(self) -> str:
|
||||
return f"{self.user!r} {self.property} {self.subgroup} {self.value}"
|
||||
|
||||
|
||||
class StreamCount(BaseCount):
|
||||
stream = models.ForeignKey(Stream, on_delete=models.CASCADE)
|
||||
realm = models.ForeignKey(Realm, on_delete=models.CASCADE)
|
||||
|
||||
class Meta:
|
||||
# Handles invalid duplicate StreamCount data
|
||||
constraints = [
|
||||
UniqueConstraint(
|
||||
fields=["stream", "property", "subgroup", "end_time"],
|
||||
condition=Q(subgroup__isnull=False),
|
||||
name="unique_stream_count",
|
||||
),
|
||||
UniqueConstraint(
|
||||
fields=["stream", "property", "end_time"],
|
||||
condition=Q(subgroup__isnull=True),
|
||||
name="unique_stream_count_null_subgroup",
|
||||
),
|
||||
]
|
||||
# This index dramatically improves the performance of
|
||||
# aggregating from streams to realms
|
||||
indexes = [
|
||||
models.Index(
|
||||
fields=["property", "realm", "end_time"],
|
||||
name="analytics_streamcount_property_realm_id_end_time_155ae930_idx",
|
||||
)
|
||||
]
|
||||
|
||||
@override
|
||||
def __str__(self) -> str:
|
||||
return f"{self.stream!r} {self.property} {self.subgroup} {self.value} {self.id}"
|
File diff suppressed because it is too large
Load Diff
@@ -1,40 +0,0 @@
|
||||
from analytics.lib.counts import CountStat
|
||||
from analytics.lib.fixtures import generate_time_series_data
|
||||
from zerver.lib.test_classes import ZulipTestCase
|
||||
|
||||
|
||||
# A very light test suite; the code being tested is not run in production.
|
||||
class TestFixtures(ZulipTestCase):
|
||||
def test_deterministic_settings(self) -> None:
|
||||
# test basic business_hour / non_business_hour calculation
|
||||
# test we get an array of the right length with frequency=CountStat.DAY
|
||||
data = generate_time_series_data(
|
||||
days=7, business_hours_base=20, non_business_hours_base=15, spikiness=0
|
||||
)
|
||||
self.assertEqual(data, [400, 400, 400, 400, 400, 360, 360])
|
||||
|
||||
data = generate_time_series_data(
|
||||
days=1,
|
||||
business_hours_base=2000,
|
||||
non_business_hours_base=1500,
|
||||
growth=2,
|
||||
spikiness=0,
|
||||
frequency=CountStat.HOUR,
|
||||
)
|
||||
# test we get an array of the right length with frequency=CountStat.HOUR
|
||||
self.assert_length(data, 24)
|
||||
# test that growth doesn't affect the first data point
|
||||
self.assertEqual(data[0], 2000)
|
||||
# test that the last data point is growth times what it otherwise would be
|
||||
self.assertEqual(data[-1], 1500 * 2)
|
||||
|
||||
# test autocorrelation == 1, since that's the easiest value to test
|
||||
data = generate_time_series_data(
|
||||
days=1,
|
||||
business_hours_base=2000,
|
||||
non_business_hours_base=2000,
|
||||
autocorrelation=1,
|
||||
frequency=CountStat.HOUR,
|
||||
)
|
||||
self.assertEqual(data[0], data[1])
|
||||
self.assertEqual(data[0], data[-1])
|
@@ -1,689 +0,0 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
from django.utils.timezone import now as timezone_now
|
||||
from typing_extensions import override
|
||||
|
||||
from analytics.lib.counts import COUNT_STATS, CountStat
|
||||
from analytics.lib.time_utils import time_range
|
||||
from analytics.models import FillState, RealmCount, StreamCount, UserCount
|
||||
from analytics.views.stats import rewrite_client_arrays, sort_by_totals, sort_client_labels
|
||||
from zerver.lib.test_classes import ZulipTestCase
|
||||
from zerver.lib.timestamp import ceiling_to_day, ceiling_to_hour, datetime_to_timestamp
|
||||
from zerver.models import Client
|
||||
from zerver.models.realms import get_realm
|
||||
|
||||
|
||||
class TestStatsEndpoint(ZulipTestCase):
|
||||
def test_stats(self) -> None:
|
||||
self.user = self.example_user("hamlet")
|
||||
self.login_user(self.user)
|
||||
result = self.client_get("/stats")
|
||||
self.assertEqual(result.status_code, 200)
|
||||
# Check that we get something back
|
||||
self.assert_in_response("Zulip analytics for", result)
|
||||
|
||||
def test_guest_user_cant_access_stats(self) -> None:
|
||||
self.user = self.example_user("polonius")
|
||||
self.login_user(self.user)
|
||||
result = self.client_get("/stats")
|
||||
self.assert_json_error(result, "Not allowed for guest users", 400)
|
||||
|
||||
result = self.client_get("/json/analytics/chart_data")
|
||||
self.assert_json_error(result, "Not allowed for guest users", 400)
|
||||
|
||||
def test_stats_for_realm(self) -> None:
|
||||
user = self.example_user("hamlet")
|
||||
self.login_user(user)
|
||||
|
||||
result = self.client_get("/stats/realm/zulip/")
|
||||
self.assertEqual(result.status_code, 302)
|
||||
|
||||
result = self.client_get("/stats/realm/not_existing_realm/")
|
||||
self.assertEqual(result.status_code, 302)
|
||||
|
||||
user = self.example_user("hamlet")
|
||||
user.is_staff = True
|
||||
user.save(update_fields=["is_staff"])
|
||||
|
||||
result = self.client_get("/stats/realm/not_existing_realm/")
|
||||
self.assertEqual(result.status_code, 404)
|
||||
|
||||
result = self.client_get("/stats/realm/zulip/")
|
||||
self.assertEqual(result.status_code, 200)
|
||||
self.assert_in_response("Zulip analytics for", result)
|
||||
|
||||
def test_stats_for_installation(self) -> None:
|
||||
user = self.example_user("hamlet")
|
||||
self.login_user(user)
|
||||
|
||||
result = self.client_get("/stats/installation")
|
||||
self.assertEqual(result.status_code, 302)
|
||||
|
||||
user = self.example_user("hamlet")
|
||||
user.is_staff = True
|
||||
user.save(update_fields=["is_staff"])
|
||||
|
||||
result = self.client_get("/stats/installation")
|
||||
self.assertEqual(result.status_code, 200)
|
||||
self.assert_in_response("Zulip analytics for", result)
|
||||
|
||||
|
||||
class TestGetChartData(ZulipTestCase):
|
||||
@override
|
||||
def setUp(self) -> None:
|
||||
super().setUp()
|
||||
self.realm = get_realm("zulip")
|
||||
self.user = self.example_user("hamlet")
|
||||
self.stream_id = self.get_stream_id(self.get_streams(self.user)[0])
|
||||
self.login_user(self.user)
|
||||
self.end_times_hour = [
|
||||
ceiling_to_hour(self.realm.date_created) + timedelta(hours=i) for i in range(4)
|
||||
]
|
||||
self.end_times_day = [
|
||||
ceiling_to_day(self.realm.date_created) + timedelta(days=i) for i in range(4)
|
||||
]
|
||||
|
||||
def data(self, i: int) -> list[int]:
|
||||
return [0, 0, i, 0]
|
||||
|
||||
def insert_data(
|
||||
self, stat: CountStat, realm_subgroups: list[str | None], user_subgroups: list[str]
|
||||
) -> None:
|
||||
if stat.frequency == CountStat.HOUR:
|
||||
insert_time = self.end_times_hour[2]
|
||||
fill_time = self.end_times_hour[-1]
|
||||
if stat.frequency == CountStat.DAY:
|
||||
insert_time = self.end_times_day[2]
|
||||
fill_time = self.end_times_day[-1]
|
||||
|
||||
RealmCount.objects.bulk_create(
|
||||
RealmCount(
|
||||
property=stat.property,
|
||||
subgroup=subgroup,
|
||||
end_time=insert_time,
|
||||
value=100 + i,
|
||||
realm=self.realm,
|
||||
)
|
||||
for i, subgroup in enumerate(realm_subgroups)
|
||||
)
|
||||
UserCount.objects.bulk_create(
|
||||
UserCount(
|
||||
property=stat.property,
|
||||
subgroup=subgroup,
|
||||
end_time=insert_time,
|
||||
value=200 + i,
|
||||
realm=self.realm,
|
||||
user=self.user,
|
||||
)
|
||||
for i, subgroup in enumerate(user_subgroups)
|
||||
)
|
||||
StreamCount.objects.bulk_create(
|
||||
StreamCount(
|
||||
property=stat.property,
|
||||
subgroup=subgroup,
|
||||
end_time=insert_time,
|
||||
value=100 + i,
|
||||
stream_id=self.stream_id,
|
||||
realm=self.realm,
|
||||
)
|
||||
for i, subgroup in enumerate(realm_subgroups)
|
||||
)
|
||||
FillState.objects.create(property=stat.property, end_time=fill_time, state=FillState.DONE)
|
||||
|
||||
def test_number_of_humans(self) -> None:
|
||||
stat = COUNT_STATS["realm_active_humans::day"]
|
||||
self.insert_data(stat, [None], [])
|
||||
stat = COUNT_STATS["1day_actives::day"]
|
||||
self.insert_data(stat, [None], [])
|
||||
stat = COUNT_STATS["active_users_audit:is_bot:day"]
|
||||
self.insert_data(stat, ["false"], [])
|
||||
result = self.client_get("/json/analytics/chart_data", {"chart_name": "number_of_humans"})
|
||||
data = self.assert_json_success(result)
|
||||
self.assertEqual(
|
||||
data,
|
||||
{
|
||||
"msg": "",
|
||||
"end_times": [datetime_to_timestamp(dt) for dt in self.end_times_day],
|
||||
"frequency": CountStat.DAY,
|
||||
"everyone": {
|
||||
"_1day": self.data(100),
|
||||
"_15day": self.data(100),
|
||||
"all_time": self.data(100),
|
||||
},
|
||||
"display_order": None,
|
||||
"result": "success",
|
||||
},
|
||||
)
|
||||
|
||||
def test_messages_sent_over_time(self) -> None:
|
||||
stat = COUNT_STATS["messages_sent:is_bot:hour"]
|
||||
self.insert_data(stat, ["true", "false"], ["false"])
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
|
||||
)
|
||||
data = self.assert_json_success(result)
|
||||
self.assertEqual(
|
||||
data,
|
||||
{
|
||||
"msg": "",
|
||||
"end_times": [datetime_to_timestamp(dt) for dt in self.end_times_hour],
|
||||
"frequency": CountStat.HOUR,
|
||||
"everyone": {"bot": self.data(100), "human": self.data(101)},
|
||||
"user": {"bot": self.data(0), "human": self.data(200)},
|
||||
"display_order": None,
|
||||
"result": "success",
|
||||
},
|
||||
)
|
||||
|
||||
def test_messages_sent_by_message_type(self) -> None:
|
||||
stat = COUNT_STATS["messages_sent:message_type:day"]
|
||||
self.insert_data(
|
||||
stat, ["public_stream", "private_message"], ["public_stream", "private_stream"]
|
||||
)
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_by_message_type"}
|
||||
)
|
||||
data = self.assert_json_success(result)
|
||||
self.assertEqual(
|
||||
data,
|
||||
{
|
||||
"msg": "",
|
||||
"end_times": [datetime_to_timestamp(dt) for dt in self.end_times_day],
|
||||
"frequency": CountStat.DAY,
|
||||
"everyone": {
|
||||
"Public channels": self.data(100),
|
||||
"Private channels": self.data(0),
|
||||
"Direct messages": self.data(101),
|
||||
"Group direct messages": self.data(0),
|
||||
},
|
||||
"user": {
|
||||
"Public channels": self.data(200),
|
||||
"Private channels": self.data(201),
|
||||
"Direct messages": self.data(0),
|
||||
"Group direct messages": self.data(0),
|
||||
},
|
||||
"display_order": [
|
||||
"Direct messages",
|
||||
"Public channels",
|
||||
"Private channels",
|
||||
"Group direct messages",
|
||||
],
|
||||
"result": "success",
|
||||
},
|
||||
)
|
||||
|
||||
def test_messages_sent_by_client(self) -> None:
|
||||
stat = COUNT_STATS["messages_sent:client:day"]
|
||||
client1 = Client.objects.create(name="client 1")
|
||||
client2 = Client.objects.create(name="client 2")
|
||||
client3 = Client.objects.create(name="client 3")
|
||||
client4 = Client.objects.create(name="client 4")
|
||||
self.insert_data(
|
||||
stat,
|
||||
[str(client4.id), str(client3.id), str(client2.id)],
|
||||
[str(client3.id), str(client1.id)],
|
||||
)
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_by_client"}
|
||||
)
|
||||
data = self.assert_json_success(result)
|
||||
self.assertEqual(
|
||||
data,
|
||||
{
|
||||
"msg": "",
|
||||
"end_times": [datetime_to_timestamp(dt) for dt in self.end_times_day],
|
||||
"frequency": CountStat.DAY,
|
||||
"everyone": {
|
||||
"client 4": self.data(100),
|
||||
"client 3": self.data(101),
|
||||
"client 2": self.data(102),
|
||||
},
|
||||
"user": {"client 3": self.data(200), "client 1": self.data(201)},
|
||||
"display_order": ["client 1", "client 2", "client 3", "client 4"],
|
||||
"result": "success",
|
||||
},
|
||||
)
|
||||
|
||||
def test_messages_read_over_time(self) -> None:
|
||||
stat = COUNT_STATS["messages_read::hour"]
|
||||
self.insert_data(stat, [None], [])
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_read_over_time"}
|
||||
)
|
||||
data = self.assert_json_success(result)
|
||||
self.assertEqual(
|
||||
data,
|
||||
{
|
||||
"msg": "",
|
||||
"end_times": [datetime_to_timestamp(dt) for dt in self.end_times_hour],
|
||||
"frequency": CountStat.HOUR,
|
||||
"everyone": {"read": self.data(100)},
|
||||
"user": {"read": self.data(0)},
|
||||
"display_order": None,
|
||||
"result": "success",
|
||||
},
|
||||
)
|
||||
|
||||
def test_messages_sent_by_stream(self) -> None:
|
||||
stat = COUNT_STATS["messages_in_stream:is_bot:day"]
|
||||
self.insert_data(stat, ["true", "false"], [])
|
||||
|
||||
result = self.client_get(
|
||||
f"/json/analytics/chart_data/stream/{self.stream_id}",
|
||||
{
|
||||
"chart_name": "messages_sent_by_stream",
|
||||
},
|
||||
)
|
||||
data = self.assert_json_success(result)
|
||||
self.assertEqual(
|
||||
data,
|
||||
{
|
||||
"msg": "",
|
||||
"end_times": [datetime_to_timestamp(dt) for dt in self.end_times_day],
|
||||
"frequency": CountStat.DAY,
|
||||
"everyone": {"bot": self.data(100), "human": self.data(101)},
|
||||
"display_order": None,
|
||||
"result": "success",
|
||||
},
|
||||
)
|
||||
|
||||
result = self.api_get(
|
||||
self.example_user("polonius"),
|
||||
f"/api/v1/analytics/chart_data/stream/{self.stream_id}",
|
||||
{
|
||||
"chart_name": "messages_sent_by_stream",
|
||||
},
|
||||
)
|
||||
self.assert_json_error(result, "Not allowed for guest users")
|
||||
|
||||
# Verify we correctly forbid access to stats of streams in other realms.
|
||||
result = self.api_get(
|
||||
self.mit_user("sipbtest"),
|
||||
f"/api/v1/analytics/chart_data/stream/{self.stream_id}",
|
||||
{
|
||||
"chart_name": "messages_sent_by_stream",
|
||||
},
|
||||
subdomain="zephyr",
|
||||
)
|
||||
self.assert_json_error(result, "Invalid channel ID")
|
||||
|
||||
def test_include_empty_subgroups(self) -> None:
|
||||
FillState.objects.create(
|
||||
property="realm_active_humans::day",
|
||||
end_time=self.end_times_day[0],
|
||||
state=FillState.DONE,
|
||||
)
|
||||
result = self.client_get("/json/analytics/chart_data", {"chart_name": "number_of_humans"})
|
||||
data = self.assert_json_success(result)
|
||||
self.assertEqual(data["everyone"], {"_1day": [0], "_15day": [0], "all_time": [0]})
|
||||
self.assertFalse("user" in data)
|
||||
|
||||
FillState.objects.create(
|
||||
property="messages_sent:is_bot:hour",
|
||||
end_time=self.end_times_hour[0],
|
||||
state=FillState.DONE,
|
||||
)
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
|
||||
)
|
||||
data = self.assert_json_success(result)
|
||||
self.assertEqual(data["everyone"], {"human": [0], "bot": [0]})
|
||||
self.assertEqual(data["user"], {"human": [0], "bot": [0]})
|
||||
|
||||
FillState.objects.create(
|
||||
property="messages_sent:message_type:day",
|
||||
end_time=self.end_times_day[0],
|
||||
state=FillState.DONE,
|
||||
)
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_by_message_type"}
|
||||
)
|
||||
data = self.assert_json_success(result)
|
||||
self.assertEqual(
|
||||
data["everyone"],
|
||||
{
|
||||
"Public channels": [0],
|
||||
"Private channels": [0],
|
||||
"Direct messages": [0],
|
||||
"Group direct messages": [0],
|
||||
},
|
||||
)
|
||||
self.assertEqual(
|
||||
data["user"],
|
||||
{
|
||||
"Public channels": [0],
|
||||
"Private channels": [0],
|
||||
"Direct messages": [0],
|
||||
"Group direct messages": [0],
|
||||
},
|
||||
)
|
||||
|
||||
FillState.objects.create(
|
||||
property="messages_sent:client:day",
|
||||
end_time=self.end_times_day[0],
|
||||
state=FillState.DONE,
|
||||
)
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_by_client"}
|
||||
)
|
||||
data = self.assert_json_success(result)
|
||||
self.assertEqual(data["everyone"], {})
|
||||
self.assertEqual(data["user"], {})
|
||||
|
||||
def test_start_and_end(self) -> None:
|
||||
stat = COUNT_STATS["realm_active_humans::day"]
|
||||
self.insert_data(stat, [None], [])
|
||||
stat = COUNT_STATS["1day_actives::day"]
|
||||
self.insert_data(stat, [None], [])
|
||||
stat = COUNT_STATS["active_users_audit:is_bot:day"]
|
||||
self.insert_data(stat, ["false"], [])
|
||||
end_time_timestamps = [datetime_to_timestamp(dt) for dt in self.end_times_day]
|
||||
|
||||
# valid start and end
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data",
|
||||
{
|
||||
"chart_name": "number_of_humans",
|
||||
"start": end_time_timestamps[1],
|
||||
"end": end_time_timestamps[2],
|
||||
},
|
||||
)
|
||||
data = self.assert_json_success(result)
|
||||
self.assertEqual(data["end_times"], end_time_timestamps[1:3])
|
||||
self.assertEqual(
|
||||
data["everyone"], {"_1day": [0, 100], "_15day": [0, 100], "all_time": [0, 100]}
|
||||
)
|
||||
|
||||
# start later then end
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data",
|
||||
{
|
||||
"chart_name": "number_of_humans",
|
||||
"start": end_time_timestamps[2],
|
||||
"end": end_time_timestamps[1],
|
||||
},
|
||||
)
|
||||
self.assert_json_error_contains(result, "Start time is later than")
|
||||
|
||||
def test_min_length(self) -> None:
|
||||
stat = COUNT_STATS["realm_active_humans::day"]
|
||||
self.insert_data(stat, [None], [])
|
||||
stat = COUNT_STATS["1day_actives::day"]
|
||||
self.insert_data(stat, [None], [])
|
||||
stat = COUNT_STATS["active_users_audit:is_bot:day"]
|
||||
self.insert_data(stat, ["false"], [])
|
||||
# test min_length is too short to change anything
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "number_of_humans", "min_length": 2}
|
||||
)
|
||||
data = self.assert_json_success(result)
|
||||
self.assertEqual(
|
||||
data["end_times"], [datetime_to_timestamp(dt) for dt in self.end_times_day]
|
||||
)
|
||||
self.assertEqual(
|
||||
data["everyone"],
|
||||
{"_1day": self.data(100), "_15day": self.data(100), "all_time": self.data(100)},
|
||||
)
|
||||
# test min_length larger than filled data
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "number_of_humans", "min_length": 5}
|
||||
)
|
||||
data = self.assert_json_success(result)
|
||||
end_times = [
|
||||
ceiling_to_day(self.realm.date_created) + timedelta(days=i) for i in range(-1, 4)
|
||||
]
|
||||
self.assertEqual(data["end_times"], [datetime_to_timestamp(dt) for dt in end_times])
|
||||
self.assertEqual(
|
||||
data["everyone"],
|
||||
{
|
||||
"_1day": [0, *self.data(100)],
|
||||
"_15day": [0, *self.data(100)],
|
||||
"all_time": [0, *self.data(100)],
|
||||
},
|
||||
)
|
||||
|
||||
def test_non_existent_chart(self) -> None:
|
||||
result = self.client_get("/json/analytics/chart_data", {"chart_name": "does_not_exist"})
|
||||
self.assert_json_error_contains(result, "Unknown chart name")
|
||||
|
||||
def test_analytics_not_running(self) -> None:
|
||||
realm = get_realm("zulip")
|
||||
|
||||
self.assertEqual(FillState.objects.count(), 0)
|
||||
|
||||
realm.date_created = timezone_now() - timedelta(days=3)
|
||||
realm.save(update_fields=["date_created"])
|
||||
with self.assertLogs(level="WARNING") as m:
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
|
||||
)
|
||||
self.assertEqual(
|
||||
m.output,
|
||||
[
|
||||
f"WARNING:root:User from realm zulip attempted to access /stats, but the computed start time: {realm.date_created} (creation of realm or installation) is later than the computed end time: 0001-01-01 00:00:00+00:00 (last successful analytics update). Is the analytics cron job running?"
|
||||
],
|
||||
)
|
||||
|
||||
self.assert_json_error_contains(result, "No analytics data available")
|
||||
|
||||
realm.date_created = timezone_now() - timedelta(days=1, hours=2)
|
||||
realm.save(update_fields=["date_created"])
|
||||
with self.assertLogs(level="WARNING") as m:
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
|
||||
)
|
||||
self.assertEqual(
|
||||
m.output,
|
||||
[
|
||||
f"WARNING:root:User from realm zulip attempted to access /stats, but the computed start time: {realm.date_created} (creation of realm or installation) is later than the computed end time: 0001-01-01 00:00:00+00:00 (last successful analytics update). Is the analytics cron job running?"
|
||||
],
|
||||
)
|
||||
|
||||
self.assert_json_error_contains(result, "No analytics data available")
|
||||
|
||||
realm.date_created = timezone_now() - timedelta(days=1, minutes=10)
|
||||
realm.save(update_fields=["date_created"])
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
|
||||
)
|
||||
self.assert_json_success(result)
|
||||
|
||||
realm.date_created = timezone_now() - timedelta(hours=10)
|
||||
realm.save(update_fields=["date_created"])
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
|
||||
)
|
||||
self.assert_json_success(result)
|
||||
|
||||
end_time = timezone_now() - timedelta(days=5)
|
||||
fill_state = FillState.objects.create(
|
||||
property="messages_sent:is_bot:hour", end_time=end_time, state=FillState.DONE
|
||||
)
|
||||
|
||||
realm.date_created = timezone_now() - timedelta(days=3)
|
||||
realm.save(update_fields=["date_created"])
|
||||
with self.assertLogs(level="WARNING") as m:
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
|
||||
)
|
||||
self.assertEqual(
|
||||
m.output,
|
||||
[
|
||||
f"WARNING:root:User from realm zulip attempted to access /stats, but the computed start time: {realm.date_created} (creation of realm or installation) is later than the computed end time: {end_time} (last successful analytics update). Is the analytics cron job running?"
|
||||
],
|
||||
)
|
||||
|
||||
self.assert_json_error_contains(result, "No analytics data available")
|
||||
|
||||
realm.date_created = timezone_now() - timedelta(days=1, minutes=10)
|
||||
realm.save(update_fields=["date_created"])
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
|
||||
)
|
||||
self.assert_json_success(result)
|
||||
|
||||
end_time = timezone_now() - timedelta(days=2)
|
||||
fill_state.end_time = end_time
|
||||
fill_state.save(update_fields=["end_time"])
|
||||
|
||||
realm.date_created = timezone_now() - timedelta(days=3)
|
||||
realm.save(update_fields=["date_created"])
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
|
||||
)
|
||||
self.assert_json_success(result)
|
||||
|
||||
realm.date_created = timezone_now() - timedelta(days=1, hours=2)
|
||||
realm.save(update_fields=["date_created"])
|
||||
with self.assertLogs(level="WARNING") as m:
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
|
||||
)
|
||||
self.assertEqual(
|
||||
m.output,
|
||||
[
|
||||
f"WARNING:root:User from realm zulip attempted to access /stats, but the computed start time: {realm.date_created} (creation of realm or installation) is later than the computed end time: {end_time} (last successful analytics update). Is the analytics cron job running?"
|
||||
],
|
||||
)
|
||||
|
||||
self.assert_json_error_contains(result, "No analytics data available")
|
||||
|
||||
realm.date_created = timezone_now() - timedelta(days=1, minutes=10)
|
||||
realm.save(update_fields=["date_created"])
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data", {"chart_name": "messages_sent_over_time"}
|
||||
)
|
||||
self.assert_json_success(result)
|
||||
|
||||
def test_get_chart_data_for_realm(self) -> None:
|
||||
user = self.example_user("hamlet")
|
||||
self.login_user(user)
|
||||
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data/realm/zulip", {"chart_name": "number_of_humans"}
|
||||
)
|
||||
self.assert_json_error(result, "Must be an server administrator", 400)
|
||||
|
||||
user = self.example_user("hamlet")
|
||||
user.is_staff = True
|
||||
user.save(update_fields=["is_staff"])
|
||||
stat = COUNT_STATS["realm_active_humans::day"]
|
||||
self.insert_data(stat, [None], [])
|
||||
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data/realm/not_existing_realm",
|
||||
{"chart_name": "number_of_humans"},
|
||||
)
|
||||
self.assert_json_error(result, "Invalid organization", 400)
|
||||
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data/realm/zulip", {"chart_name": "number_of_humans"}
|
||||
)
|
||||
self.assert_json_success(result)
|
||||
|
||||
def test_get_chart_data_for_installation(self) -> None:
|
||||
user = self.example_user("hamlet")
|
||||
self.login_user(user)
|
||||
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data/installation", {"chart_name": "number_of_humans"}
|
||||
)
|
||||
self.assert_json_error(result, "Must be an server administrator", 400)
|
||||
|
||||
user = self.example_user("hamlet")
|
||||
user.is_staff = True
|
||||
user.save(update_fields=["is_staff"])
|
||||
stat = COUNT_STATS["realm_active_humans::day"]
|
||||
self.insert_data(stat, [None], [])
|
||||
|
||||
result = self.client_get(
|
||||
"/json/analytics/chart_data/installation", {"chart_name": "number_of_humans"}
|
||||
)
|
||||
self.assert_json_success(result)
|
||||
|
||||
|
||||
class TestGetChartDataHelpers(ZulipTestCase):
|
||||
def test_sort_by_totals(self) -> None:
|
||||
empty: list[int] = []
|
||||
value_arrays = {"c": [0, 1], "a": [9], "b": [1, 1, 1], "d": empty}
|
||||
self.assertEqual(sort_by_totals(value_arrays), ["a", "b", "c", "d"])
|
||||
|
||||
def test_sort_client_labels(self) -> None:
|
||||
data = {
|
||||
"everyone": {"a": [16], "c": [15], "b": [14], "e": [13], "d": [12], "h": [11]},
|
||||
"user": {"a": [6], "b": [5], "d": [4], "e": [3], "f": [2], "g": [1]},
|
||||
}
|
||||
self.assertEqual(sort_client_labels(data), ["a", "b", "c", "d", "e", "f", "g", "h"])
|
||||
|
||||
|
||||
class TestTimeRange(ZulipTestCase):
|
||||
def test_time_range(self) -> None:
|
||||
HOUR = timedelta(hours=1)
|
||||
DAY = timedelta(days=1)
|
||||
|
||||
a_time = datetime(2016, 3, 14, 22, 59, tzinfo=timezone.utc)
|
||||
floor_hour = datetime(2016, 3, 14, 22, tzinfo=timezone.utc)
|
||||
floor_day = datetime(2016, 3, 14, tzinfo=timezone.utc)
|
||||
|
||||
# test start == end
|
||||
self.assertEqual(time_range(a_time, a_time, CountStat.HOUR, None), [])
|
||||
self.assertEqual(time_range(a_time, a_time, CountStat.DAY, None), [])
|
||||
# test start == end == boundary, and min_length == 0
|
||||
self.assertEqual(time_range(floor_hour, floor_hour, CountStat.HOUR, 0), [floor_hour])
|
||||
self.assertEqual(time_range(floor_day, floor_day, CountStat.DAY, 0), [floor_day])
|
||||
# test start and end on different boundaries
|
||||
self.assertEqual(
|
||||
time_range(floor_hour, floor_hour + HOUR, CountStat.HOUR, None),
|
||||
[floor_hour, floor_hour + HOUR],
|
||||
)
|
||||
self.assertEqual(
|
||||
time_range(floor_day, floor_day + DAY, CountStat.DAY, None),
|
||||
[floor_day, floor_day + DAY],
|
||||
)
|
||||
# test min_length
|
||||
self.assertEqual(
|
||||
time_range(floor_hour, floor_hour + HOUR, CountStat.HOUR, 4),
|
||||
[floor_hour - 2 * HOUR, floor_hour - HOUR, floor_hour, floor_hour + HOUR],
|
||||
)
|
||||
self.assertEqual(
|
||||
time_range(floor_day, floor_day + DAY, CountStat.DAY, 4),
|
||||
[floor_day - 2 * DAY, floor_day - DAY, floor_day, floor_day + DAY],
|
||||
)
|
||||
|
||||
|
||||
class TestMapArrays(ZulipTestCase):
|
||||
def test_map_arrays(self) -> None:
|
||||
a = {
|
||||
"desktop app 1.0": [1, 2, 3],
|
||||
"desktop app 2.0": [10, 12, 13],
|
||||
"desktop app 3.0": [21, 22, 23],
|
||||
"website": [1, 2, 3],
|
||||
"ZulipiOS": [1, 2, 3],
|
||||
"ZulipElectron": [2, 5, 7],
|
||||
"ZulipMobile": [1, 2, 3],
|
||||
"ZulipMobile/flutter": [1, 1, 1],
|
||||
"ZulipFlutter": [1, 1, 1],
|
||||
"ZulipPython": [1, 2, 3],
|
||||
"API: Python": [1, 2, 3],
|
||||
"SomethingRandom": [4, 5, 6],
|
||||
"ZulipGitHubWebhook": [7, 7, 9],
|
||||
"ZulipAndroid": [64, 63, 65],
|
||||
"ZulipTerminal": [9, 10, 11],
|
||||
}
|
||||
result = rewrite_client_arrays(a)
|
||||
self.assertEqual(
|
||||
result,
|
||||
{
|
||||
"Old desktop app": [32, 36, 39],
|
||||
"Ancient iOS app": [1, 2, 3],
|
||||
"Desktop app": [2, 5, 7],
|
||||
"Old mobile app (React Native)": [1, 2, 3],
|
||||
"Mobile app (Flutter)": [2, 2, 2],
|
||||
"Web app": [1, 2, 3],
|
||||
"Python API": [2, 4, 6],
|
||||
"SomethingRandom": [4, 5, 6],
|
||||
"GitHub webhook": [7, 7, 9],
|
||||
"Ancient Android app": [64, 63, 65],
|
||||
"Terminal app": [9, 10, 11],
|
||||
},
|
||||
)
|
@@ -1,74 +1,8 @@
|
||||
from django.conf import settings
|
||||
from django.conf.urls import include
|
||||
from django.urls import path
|
||||
from django.urls.resolvers import URLPattern, URLResolver
|
||||
from django.conf.urls import patterns, url
|
||||
|
||||
from analytics.views.stats import (
|
||||
get_chart_data,
|
||||
get_chart_data_for_installation,
|
||||
get_chart_data_for_realm,
|
||||
get_chart_data_for_stream,
|
||||
stats,
|
||||
stats_for_installation,
|
||||
stats_for_realm,
|
||||
urlpatterns = patterns('analytics.views',
|
||||
url(r'^activity$', 'get_activity'),
|
||||
url(r'^realm_activity/(?P<realm>[\S]+)/$', 'get_realm_activity'),
|
||||
url(r'^user_activity/(?P<email>[\S]+)/$', 'get_user_activity'),
|
||||
)
|
||||
from zerver.lib.rest import rest_path
|
||||
|
||||
i18n_urlpatterns: list[URLPattern | URLResolver] = [
|
||||
# Server admin (user_profile.is_staff) visible stats pages
|
||||
path("stats/realm/<realm_str>/", stats_for_realm),
|
||||
path("stats/installation", stats_for_installation),
|
||||
# User-visible stats page
|
||||
path("stats", stats, name="stats"),
|
||||
]
|
||||
|
||||
if settings.ZILENCER_ENABLED:
|
||||
from analytics.views.stats import stats_for_remote_installation, stats_for_remote_realm
|
||||
|
||||
i18n_urlpatterns += [
|
||||
path("stats/remote/<int:remote_server_id>/installation", stats_for_remote_installation),
|
||||
path(
|
||||
"stats/remote/<int:remote_server_id>/realm/<int:remote_realm_id>/",
|
||||
stats_for_remote_realm,
|
||||
),
|
||||
]
|
||||
|
||||
# These endpoints are a part of the API (V1), which uses:
|
||||
# * REST verbs
|
||||
# * Basic auth (username:password is email:apiKey)
|
||||
# * Takes and returns json-formatted data
|
||||
#
|
||||
# See rest_dispatch in zerver.lib.rest for an explanation of auth methods used
|
||||
#
|
||||
# All of these paths are accessed by either a /json or /api prefix
|
||||
v1_api_and_json_patterns = [
|
||||
# get data for the graphs at /stats
|
||||
rest_path("analytics/chart_data", GET=get_chart_data),
|
||||
rest_path("analytics/chart_data/stream/<stream_id>", GET=get_chart_data_for_stream),
|
||||
rest_path("analytics/chart_data/realm/<realm_str>", GET=get_chart_data_for_realm),
|
||||
rest_path("analytics/chart_data/installation", GET=get_chart_data_for_installation),
|
||||
]
|
||||
|
||||
if settings.ZILENCER_ENABLED:
|
||||
from analytics.views.stats import (
|
||||
get_chart_data_for_remote_installation,
|
||||
get_chart_data_for_remote_realm,
|
||||
)
|
||||
|
||||
v1_api_and_json_patterns += [
|
||||
rest_path(
|
||||
"analytics/chart_data/remote/<int:remote_server_id>/installation",
|
||||
GET=get_chart_data_for_remote_installation,
|
||||
),
|
||||
rest_path(
|
||||
"analytics/chart_data/remote/<int:remote_server_id>/realm/<int:remote_realm_id>",
|
||||
GET=get_chart_data_for_remote_realm,
|
||||
),
|
||||
]
|
||||
|
||||
i18n_urlpatterns += [
|
||||
path("api/v1/", include(v1_api_and_json_patterns)),
|
||||
path("json/", include(v1_api_and_json_patterns)),
|
||||
]
|
||||
|
||||
urlpatterns = i18n_urlpatterns
|
||||
|
880
analytics/views.py
Normal file
880
analytics/views.py
Normal file
@@ -0,0 +1,880 @@
|
||||
from django.db import connection
|
||||
from django.template import RequestContext, loader
|
||||
from django.utils.html import mark_safe
|
||||
from django.shortcuts import render_to_response
|
||||
from django.core import urlresolvers
|
||||
from django.http import HttpResponseNotFound
|
||||
|
||||
from zerver.decorator import has_request_variables, REQ, zulip_internal
|
||||
from zerver.models import get_realm, UserActivity, UserActivityInterval, Realm
|
||||
from zerver.lib.timestamp import timestamp_to_datetime
|
||||
|
||||
from collections import defaultdict
|
||||
from datetime import datetime, timedelta
|
||||
import itertools
|
||||
import time
|
||||
import re
|
||||
import pytz
|
||||
eastern_tz = pytz.timezone('US/Eastern')
|
||||
|
||||
def make_table(title, cols, rows, has_row_class=False):
|
||||
|
||||
if not has_row_class:
|
||||
def fix_row(row):
|
||||
return dict(cells=row, row_class=None)
|
||||
rows = map(fix_row, rows)
|
||||
|
||||
data = dict(title=title, cols=cols, rows=rows)
|
||||
|
||||
content = loader.render_to_string(
|
||||
'analytics/ad_hoc_query.html',
|
||||
dict(data=data)
|
||||
)
|
||||
|
||||
return content
|
||||
|
||||
def dictfetchall(cursor):
|
||||
"Returns all rows from a cursor as a dict"
|
||||
desc = cursor.description
|
||||
return [
|
||||
dict(zip([col[0] for col in desc], row))
|
||||
for row in cursor.fetchall()
|
||||
]
|
||||
|
||||
|
||||
def get_realm_day_counts():
|
||||
query = '''
|
||||
select
|
||||
r.domain,
|
||||
(now()::date - pub_date::date) age,
|
||||
count(*) cnt
|
||||
from zerver_message m
|
||||
join zerver_userprofile up on up.id = m.sender_id
|
||||
join zerver_realm r on r.id = up.realm_id
|
||||
join zerver_client c on c.id = m.sending_client_id
|
||||
where
|
||||
(not up.is_bot)
|
||||
and
|
||||
pub_date > now()::date - interval '8 day'
|
||||
and
|
||||
c.name not in ('zephyr_mirror', 'ZulipMonitoring')
|
||||
group by
|
||||
r.domain,
|
||||
age
|
||||
order by
|
||||
r.domain,
|
||||
age
|
||||
'''
|
||||
cursor = connection.cursor()
|
||||
cursor.execute(query)
|
||||
rows = dictfetchall(cursor)
|
||||
cursor.close()
|
||||
|
||||
counts = defaultdict(dict)
|
||||
for row in rows:
|
||||
counts[row['domain']][row['age']] = row['cnt']
|
||||
|
||||
|
||||
result = {}
|
||||
for domain in counts:
|
||||
cnts = [counts[domain].get(age, 0) for age in range(8)]
|
||||
min_cnt = min(cnts)
|
||||
max_cnt = max(cnts)
|
||||
|
||||
def format_count(cnt):
|
||||
if cnt == min_cnt:
|
||||
good_bad = 'bad'
|
||||
elif cnt == max_cnt:
|
||||
good_bad = 'good'
|
||||
else:
|
||||
good_bad = 'neutral'
|
||||
|
||||
return '<td class="number %s">%s</td>' % (good_bad, cnt)
|
||||
|
||||
cnts = ''.join(map(format_count, cnts))
|
||||
result[domain] = dict(cnts=cnts)
|
||||
|
||||
return result
|
||||
|
||||
def realm_summary_table(realm_minutes):
|
||||
query = '''
|
||||
SELECT
|
||||
realm.domain,
|
||||
coalesce(user_counts.active_user_count, 0) active_user_count,
|
||||
coalesce(at_risk_counts.at_risk_count, 0) at_risk_count,
|
||||
(
|
||||
SELECT
|
||||
count(*)
|
||||
FROM zerver_userprofile up
|
||||
WHERE up.realm_id = realm.id
|
||||
AND is_active
|
||||
AND not is_bot
|
||||
) user_profile_count,
|
||||
(
|
||||
SELECT
|
||||
count(*)
|
||||
FROM zerver_userprofile up
|
||||
WHERE up.realm_id = realm.id
|
||||
AND is_active
|
||||
AND is_bot
|
||||
) bot_count
|
||||
FROM zerver_realm realm
|
||||
LEFT OUTER JOIN
|
||||
(
|
||||
SELECT
|
||||
up.realm_id realm_id,
|
||||
count(distinct(ua.user_profile_id)) active_user_count
|
||||
FROM zerver_useractivity ua
|
||||
JOIN zerver_userprofile up
|
||||
ON up.id = ua.user_profile_id
|
||||
WHERE
|
||||
query in (
|
||||
'/json/send_message',
|
||||
'send_message_backend',
|
||||
'/api/v1/send_message',
|
||||
'/json/update_pointer'
|
||||
)
|
||||
AND
|
||||
last_visit > now() - interval '1 day'
|
||||
AND
|
||||
not is_bot
|
||||
GROUP BY realm_id
|
||||
) user_counts
|
||||
ON user_counts.realm_id = realm.id
|
||||
LEFT OUTER JOIN
|
||||
(
|
||||
SELECT
|
||||
realm_id,
|
||||
count(*) at_risk_count
|
||||
FROM (
|
||||
SELECT
|
||||
realm.id as realm_id,
|
||||
up.email
|
||||
FROM zerver_useractivity ua
|
||||
JOIN zerver_userprofile up
|
||||
ON up.id = ua.user_profile_id
|
||||
JOIN zerver_realm realm
|
||||
ON realm.id = up.realm_id
|
||||
WHERE up.is_active
|
||||
AND (not up.is_bot)
|
||||
AND
|
||||
ua.query in (
|
||||
'/json/send_message',
|
||||
'send_message_backend',
|
||||
'/api/v1/send_message',
|
||||
'/json/update_pointer'
|
||||
)
|
||||
GROUP by realm.id, up.email
|
||||
HAVING max(last_visit) between
|
||||
now() - interval '7 day' and
|
||||
now() - interval '1 day'
|
||||
) as at_risk_users
|
||||
GROUP BY realm_id
|
||||
) at_risk_counts
|
||||
ON at_risk_counts.realm_id = realm.id
|
||||
WHERE
|
||||
realm.domain not in ('customer4.invalid', 'wdaher.com')
|
||||
AND EXISTS (
|
||||
SELECT *
|
||||
FROM zerver_useractivity ua
|
||||
JOIN zerver_userprofile up
|
||||
ON up.id = ua.user_profile_id
|
||||
WHERE
|
||||
query in (
|
||||
'/json/send_message',
|
||||
'/api/v1/send_message',
|
||||
'send_message_backend',
|
||||
'/json/update_pointer'
|
||||
)
|
||||
AND
|
||||
up.realm_id = realm.id
|
||||
AND
|
||||
last_visit > now() - interval '2 week'
|
||||
)
|
||||
ORDER BY active_user_count DESC, domain ASC
|
||||
'''
|
||||
|
||||
cursor = connection.cursor()
|
||||
cursor.execute(query)
|
||||
rows = dictfetchall(cursor)
|
||||
cursor.close()
|
||||
|
||||
# get messages sent per day
|
||||
counts = get_realm_day_counts()
|
||||
for row in rows:
|
||||
try:
|
||||
row['history'] = counts[row['domain']]['cnts']
|
||||
except:
|
||||
row['history'] = ''
|
||||
|
||||
# augment data with realm_minutes
|
||||
total_hours = 0
|
||||
for row in rows:
|
||||
domain = row['domain']
|
||||
minutes = realm_minutes.get(domain, 0)
|
||||
hours = minutes / 60.0
|
||||
total_hours += hours
|
||||
row['hours'] = str(int(hours))
|
||||
try:
|
||||
row['hours_per_user'] = '%.1f' % (hours / row['active_user_count'],)
|
||||
except:
|
||||
pass
|
||||
|
||||
# formatting
|
||||
for row in rows:
|
||||
row['domain'] = realm_activity_link(row['domain'])
|
||||
|
||||
# Count active sites
|
||||
def meets_goal(row):
|
||||
# The wdaher.com realm doesn't count toward company goals for
|
||||
# obvious reasons, and customer4.invalid is essentially a dup
|
||||
# for users.customer4.invalid.
|
||||
if row['domain'] in ['customer4.invalid', 'wdaher.com']:
|
||||
return False
|
||||
return row['active_user_count'] >= 5
|
||||
|
||||
num_active_sites = len(filter(meets_goal, rows))
|
||||
|
||||
# create totals
|
||||
total_active_user_count = 0
|
||||
total_user_profile_count = 0
|
||||
total_bot_count = 0
|
||||
for row in rows:
|
||||
total_active_user_count += int(row['active_user_count'])
|
||||
total_user_profile_count += int(row['user_profile_count'])
|
||||
total_bot_count += int(row['bot_count'])
|
||||
|
||||
|
||||
rows.append(dict(
|
||||
domain='Total',
|
||||
active_user_count=total_active_user_count,
|
||||
user_profile_count=total_user_profile_count,
|
||||
bot_count=total_bot_count,
|
||||
hours=int(total_hours)
|
||||
))
|
||||
|
||||
content = loader.render_to_string(
|
||||
'analytics/realm_summary_table.html',
|
||||
dict(rows=rows, num_active_sites=num_active_sites)
|
||||
)
|
||||
return content
|
||||
|
||||
|
||||
def user_activity_intervals():
|
||||
day_end = timestamp_to_datetime(time.time())
|
||||
day_start = day_end - timedelta(hours=24)
|
||||
|
||||
output = "Per-user online duration for the last 24 hours:\n"
|
||||
total_duration = timedelta(0)
|
||||
|
||||
all_intervals = UserActivityInterval.objects.filter(
|
||||
end__gte=day_start,
|
||||
start__lte=day_end
|
||||
).select_related(
|
||||
'user_profile',
|
||||
'user_profile__realm'
|
||||
).only(
|
||||
'start',
|
||||
'end',
|
||||
'user_profile__email',
|
||||
'user_profile__realm__domain'
|
||||
).order_by(
|
||||
'user_profile__realm__domain',
|
||||
'user_profile__email'
|
||||
)
|
||||
|
||||
by_domain = lambda row: row.user_profile.realm.domain
|
||||
by_email = lambda row: row.user_profile.email
|
||||
|
||||
realm_minutes = {}
|
||||
|
||||
for domain, realm_intervals in itertools.groupby(all_intervals, by_domain):
|
||||
realm_duration = timedelta(0)
|
||||
output += '<hr>%s\n' % (domain,)
|
||||
for email, intervals in itertools.groupby(realm_intervals, by_email):
|
||||
duration = timedelta(0)
|
||||
for interval in intervals:
|
||||
start = max(day_start, interval.start)
|
||||
end = min(day_end, interval.end)
|
||||
duration += end - start
|
||||
|
||||
total_duration += duration
|
||||
realm_duration += duration
|
||||
output += " %-*s%s\n" % (37, email, duration, )
|
||||
|
||||
realm_minutes[domain] = realm_duration.total_seconds() / 60
|
||||
|
||||
output += "\nTotal Duration: %s\n" % (total_duration,)
|
||||
output += "\nTotal Duration in minutes: %s\n" % (total_duration.total_seconds() / 60.,)
|
||||
output += "Total Duration amortized to a month: %s" % (total_duration.total_seconds() * 30. / 60.,)
|
||||
content = mark_safe('<pre>' + output + '</pre>')
|
||||
return content, realm_minutes
|
||||
|
||||
def sent_messages_report(realm):
|
||||
title = 'Recently sent messages for ' + realm
|
||||
|
||||
cols = [
|
||||
'Date',
|
||||
'Humans',
|
||||
'Bots'
|
||||
]
|
||||
|
||||
query = '''
|
||||
select
|
||||
series.day::date,
|
||||
humans.cnt,
|
||||
bots.cnt
|
||||
from (
|
||||
select generate_series(
|
||||
(now()::date - interval '2 week'),
|
||||
now()::date,
|
||||
interval '1 day'
|
||||
) as day
|
||||
) as series
|
||||
left join (
|
||||
select
|
||||
pub_date::date pub_date,
|
||||
count(*) cnt
|
||||
from zerver_message m
|
||||
join zerver_userprofile up on up.id = m.sender_id
|
||||
join zerver_realm r on r.id = up.realm_id
|
||||
where
|
||||
r.domain = %s
|
||||
and
|
||||
(not up.is_bot)
|
||||
and
|
||||
pub_date > now() - interval '2 week'
|
||||
group by
|
||||
pub_date::date
|
||||
order by
|
||||
pub_date::date
|
||||
) humans on
|
||||
series.day = humans.pub_date
|
||||
left join (
|
||||
select
|
||||
pub_date::date pub_date,
|
||||
count(*) cnt
|
||||
from zerver_message m
|
||||
join zerver_userprofile up on up.id = m.sender_id
|
||||
join zerver_realm r on r.id = up.realm_id
|
||||
where
|
||||
r.domain = %s
|
||||
and
|
||||
up.is_bot
|
||||
and
|
||||
pub_date > now() - interval '2 week'
|
||||
group by
|
||||
pub_date::date
|
||||
order by
|
||||
pub_date::date
|
||||
) bots on
|
||||
series.day = bots.pub_date
|
||||
'''
|
||||
cursor = connection.cursor()
|
||||
cursor.execute(query, [realm, realm])
|
||||
rows = cursor.fetchall()
|
||||
cursor.close()
|
||||
|
||||
return make_table(title, cols, rows)
|
||||
|
||||
def ad_hoc_queries():
|
||||
def get_page(query, cols, title):
|
||||
cursor = connection.cursor()
|
||||
cursor.execute(query)
|
||||
rows = cursor.fetchall()
|
||||
rows = map(list, rows)
|
||||
cursor.close()
|
||||
|
||||
def fix_rows(i, fixup_func):
|
||||
for row in rows:
|
||||
row[i] = fixup_func(row[i])
|
||||
|
||||
for i, col in enumerate(cols):
|
||||
if col == 'Domain':
|
||||
fix_rows(i, realm_activity_link)
|
||||
elif col in ['Last time', 'Last visit']:
|
||||
fix_rows(i, format_date_for_activity_reports)
|
||||
|
||||
content = make_table(title, cols, rows)
|
||||
|
||||
return dict(
|
||||
content=content,
|
||||
title=title
|
||||
)
|
||||
|
||||
pages = []
|
||||
|
||||
###
|
||||
|
||||
for mobile_type in ['Android', 'ZulipiOS']:
|
||||
title = '%s usage' % (mobile_type,)
|
||||
|
||||
query = '''
|
||||
select
|
||||
realm.domain,
|
||||
up.id user_id,
|
||||
client.name,
|
||||
sum(count) as hits,
|
||||
max(last_visit) as last_time
|
||||
from zerver_useractivity ua
|
||||
join zerver_client client on client.id = ua.client_id
|
||||
join zerver_userprofile up on up.id = ua.user_profile_id
|
||||
join zerver_realm realm on realm.id = up.realm_id
|
||||
where
|
||||
client.name like '%s'
|
||||
group by domain, up.id, client.name
|
||||
having max(last_visit) > now() - interval '2 week'
|
||||
order by domain, up.id, client.name
|
||||
''' % (mobile_type,)
|
||||
|
||||
cols = [
|
||||
'Domain',
|
||||
'User id',
|
||||
'Name',
|
||||
'Hits',
|
||||
'Last time'
|
||||
]
|
||||
|
||||
pages.append(get_page(query, cols, title))
|
||||
|
||||
###
|
||||
|
||||
title = 'Desktop users'
|
||||
|
||||
query = '''
|
||||
select
|
||||
realm.domain,
|
||||
client.name,
|
||||
sum(count) as hits,
|
||||
max(last_visit) as last_time
|
||||
from zerver_useractivity ua
|
||||
join zerver_client client on client.id = ua.client_id
|
||||
join zerver_userprofile up on up.id = ua.user_profile_id
|
||||
join zerver_realm realm on realm.id = up.realm_id
|
||||
where
|
||||
client.name like 'desktop%%'
|
||||
group by domain, client.name
|
||||
having max(last_visit) > now() - interval '2 week'
|
||||
order by domain, client.name
|
||||
'''
|
||||
|
||||
cols = [
|
||||
'Domain',
|
||||
'Client',
|
||||
'Hits',
|
||||
'Last time'
|
||||
]
|
||||
|
||||
pages.append(get_page(query, cols, title))
|
||||
|
||||
###
|
||||
|
||||
title = 'Integrations by domain'
|
||||
|
||||
query = '''
|
||||
select
|
||||
realm.domain,
|
||||
case
|
||||
when query like '%%external%%' then split_part(query, '/', 5)
|
||||
else client.name
|
||||
end client_name,
|
||||
sum(count) as hits,
|
||||
max(last_visit) as last_time
|
||||
from zerver_useractivity ua
|
||||
join zerver_client client on client.id = ua.client_id
|
||||
join zerver_userprofile up on up.id = ua.user_profile_id
|
||||
join zerver_realm realm on realm.id = up.realm_id
|
||||
where
|
||||
(query in ('send_message_backend', '/api/v1/send_message')
|
||||
and client.name not in ('Android', 'ZulipiOS')
|
||||
and client.name not like 'test: Zulip%%'
|
||||
)
|
||||
or
|
||||
query like '%%external%%'
|
||||
group by domain, client_name
|
||||
having max(last_visit) > now() - interval '2 week'
|
||||
order by domain, client_name
|
||||
'''
|
||||
|
||||
cols = [
|
||||
'Domain',
|
||||
'Client',
|
||||
'Hits',
|
||||
'Last time'
|
||||
]
|
||||
|
||||
pages.append(get_page(query, cols, title))
|
||||
|
||||
###
|
||||
|
||||
title = 'Integrations by client'
|
||||
|
||||
query = '''
|
||||
select
|
||||
case
|
||||
when query like '%%external%%' then split_part(query, '/', 5)
|
||||
else client.name
|
||||
end client_name,
|
||||
realm.domain,
|
||||
sum(count) as hits,
|
||||
max(last_visit) as last_time
|
||||
from zerver_useractivity ua
|
||||
join zerver_client client on client.id = ua.client_id
|
||||
join zerver_userprofile up on up.id = ua.user_profile_id
|
||||
join zerver_realm realm on realm.id = up.realm_id
|
||||
where
|
||||
(query in ('send_message_backend', '/api/v1/send_message')
|
||||
and client.name not in ('Android', 'ZulipiOS')
|
||||
and client.name not like 'test: Zulip%%'
|
||||
)
|
||||
or
|
||||
query like '%%external%%'
|
||||
group by client_name, domain
|
||||
having max(last_visit) > now() - interval '2 week'
|
||||
order by client_name, domain
|
||||
'''
|
||||
|
||||
cols = [
|
||||
'Client',
|
||||
'Domain',
|
||||
'Hits',
|
||||
'Last time'
|
||||
]
|
||||
|
||||
pages.append(get_page(query, cols, title))
|
||||
|
||||
return pages
|
||||
|
||||
@zulip_internal
|
||||
@has_request_variables
|
||||
def get_activity(request):
|
||||
duration_content, realm_minutes = user_activity_intervals()
|
||||
counts_content = realm_summary_table(realm_minutes)
|
||||
data = [
|
||||
('Counts', counts_content),
|
||||
('Durations', duration_content),
|
||||
]
|
||||
for page in ad_hoc_queries():
|
||||
data.append((page['title'], page['content']))
|
||||
|
||||
title = 'Activity'
|
||||
|
||||
return render_to_response(
|
||||
'analytics/activity.html',
|
||||
dict(data=data, title=title, is_home=True),
|
||||
context_instance=RequestContext(request)
|
||||
)
|
||||
|
||||
def get_user_activity_records_for_realm(realm, is_bot):
|
||||
fields = [
|
||||
'user_profile__full_name',
|
||||
'user_profile__email',
|
||||
'query',
|
||||
'client__name',
|
||||
'count',
|
||||
'last_visit',
|
||||
]
|
||||
|
||||
records = UserActivity.objects.filter(
|
||||
user_profile__realm__domain=realm,
|
||||
user_profile__is_active=True,
|
||||
user_profile__is_bot=is_bot
|
||||
)
|
||||
records = records.order_by("user_profile__email", "-last_visit")
|
||||
records = records.select_related('user_profile', 'client').only(*fields)
|
||||
return records
|
||||
|
||||
def get_user_activity_records_for_email(email):
|
||||
fields = [
|
||||
'user_profile__full_name',
|
||||
'query',
|
||||
'client__name',
|
||||
'count',
|
||||
'last_visit'
|
||||
]
|
||||
|
||||
records = UserActivity.objects.filter(
|
||||
user_profile__email=email
|
||||
)
|
||||
records = records.order_by("-last_visit")
|
||||
records = records.select_related('user_profile', 'client').only(*fields)
|
||||
return records
|
||||
|
||||
def raw_user_activity_table(records):
|
||||
cols = [
|
||||
'query',
|
||||
'client',
|
||||
'count',
|
||||
'last_visit'
|
||||
]
|
||||
|
||||
def row(record):
|
||||
return [
|
||||
record.query,
|
||||
record.client.name,
|
||||
record.count,
|
||||
format_date_for_activity_reports(record.last_visit)
|
||||
]
|
||||
|
||||
rows = map(row, records)
|
||||
title = 'Raw Data'
|
||||
return make_table(title, cols, rows)
|
||||
|
||||
def get_user_activity_summary(records):
|
||||
summary = {}
|
||||
def update(action, record):
|
||||
if action not in summary:
|
||||
summary[action] = dict(
|
||||
count=record.count,
|
||||
last_visit=record.last_visit
|
||||
)
|
||||
else:
|
||||
summary[action]['count'] += record.count
|
||||
summary[action]['last_visit'] = max(
|
||||
summary[action]['last_visit'],
|
||||
record.last_visit
|
||||
)
|
||||
|
||||
if records:
|
||||
summary['name'] = records[0].user_profile.full_name
|
||||
|
||||
for record in records:
|
||||
client = record.client.name
|
||||
query = record.query
|
||||
|
||||
update('use', record)
|
||||
|
||||
if client == 'API':
|
||||
m = re.match('/api/.*/external/(.*)', query)
|
||||
if m:
|
||||
client = m.group(1)
|
||||
update(client, record)
|
||||
|
||||
if client.startswith('desktop'):
|
||||
update('desktop', record)
|
||||
if client == 'website':
|
||||
update('website', record)
|
||||
if ('send_message' in query) or re.search('/api/.*/external/.*', query):
|
||||
update('send', record)
|
||||
if query in ['/json/update_pointer', '/api/v1/update_pointer']:
|
||||
update('pointer', record)
|
||||
update(client, record)
|
||||
|
||||
|
||||
return summary
|
||||
|
||||
def format_date_for_activity_reports(date):
|
||||
if date:
|
||||
return date.astimezone(eastern_tz).strftime('%Y-%m-%d %H:%M')
|
||||
else:
|
||||
return ''
|
||||
|
||||
def user_activity_link(email):
|
||||
url_name = 'analytics.views.get_user_activity'
|
||||
url = urlresolvers.reverse(url_name, kwargs=dict(email=email))
|
||||
email_link = '<a href="%s">%s</a>' % (url, email)
|
||||
return mark_safe(email_link)
|
||||
|
||||
def realm_activity_link(realm):
|
||||
url_name = 'analytics.views.get_realm_activity'
|
||||
url = urlresolvers.reverse(url_name, kwargs=dict(realm=realm))
|
||||
realm_link = '<a href="%s">%s</a>' % (url, realm)
|
||||
return mark_safe(realm_link)
|
||||
|
||||
def realm_client_table(user_summaries):
|
||||
exclude_keys = [
|
||||
'internal',
|
||||
'name',
|
||||
'use',
|
||||
'send',
|
||||
'pointer',
|
||||
'website',
|
||||
'desktop',
|
||||
]
|
||||
|
||||
rows = []
|
||||
for email, user_summary in user_summaries.items():
|
||||
email_link = user_activity_link(email)
|
||||
name = user_summary['name']
|
||||
for k, v in user_summary.items():
|
||||
if k in exclude_keys:
|
||||
continue
|
||||
client = k
|
||||
count = v['count']
|
||||
last_visit = v['last_visit']
|
||||
row = [
|
||||
format_date_for_activity_reports(last_visit),
|
||||
client,
|
||||
name,
|
||||
email_link,
|
||||
count,
|
||||
]
|
||||
rows.append(row)
|
||||
|
||||
rows = sorted(rows, key=lambda r: r[0], reverse=True)
|
||||
|
||||
cols = [
|
||||
'Last visit',
|
||||
'Client',
|
||||
'Name',
|
||||
'Email',
|
||||
'Count',
|
||||
]
|
||||
|
||||
title = 'Clients'
|
||||
|
||||
return make_table(title, cols, rows)
|
||||
|
||||
def user_activity_summary_table(user_summary):
|
||||
rows = []
|
||||
for k, v in user_summary.items():
|
||||
if k == 'name':
|
||||
continue
|
||||
client = k
|
||||
count = v['count']
|
||||
last_visit = v['last_visit']
|
||||
row = [
|
||||
format_date_for_activity_reports(last_visit),
|
||||
client,
|
||||
count,
|
||||
]
|
||||
rows.append(row)
|
||||
|
||||
rows = sorted(rows, key=lambda r: r[0], reverse=True)
|
||||
|
||||
cols = [
|
||||
'last_visit',
|
||||
'client',
|
||||
'count',
|
||||
]
|
||||
|
||||
title = 'User Activity'
|
||||
return make_table(title, cols, rows)
|
||||
|
||||
def realm_user_summary_table(all_records, admin_emails):
|
||||
user_records = {}
|
||||
|
||||
def by_email(record):
|
||||
return record.user_profile.email
|
||||
|
||||
for email, records in itertools.groupby(all_records, by_email):
|
||||
user_records[email] = get_user_activity_summary(list(records))
|
||||
|
||||
def get_last_visit(user_summary, k):
|
||||
if k in user_summary:
|
||||
return user_summary[k]['last_visit']
|
||||
else:
|
||||
return None
|
||||
|
||||
def get_count(user_summary, k):
|
||||
if k in user_summary:
|
||||
return user_summary[k]['count']
|
||||
else:
|
||||
return ''
|
||||
|
||||
def is_recent(val):
|
||||
age = datetime.now(val.tzinfo) - val
|
||||
return age.total_seconds() < 5 * 60
|
||||
|
||||
rows = []
|
||||
for email, user_summary in user_records.items():
|
||||
email_link = user_activity_link(email)
|
||||
sent_count = get_count(user_summary, 'send')
|
||||
cells = [user_summary['name'], email_link, sent_count]
|
||||
row_class = ''
|
||||
for field in ['use', 'send', 'pointer', 'desktop', 'ZulipiOS', 'Android']:
|
||||
val = get_last_visit(user_summary, field)
|
||||
if field == 'use':
|
||||
if val and is_recent(val):
|
||||
row_class += ' recently_active'
|
||||
if email in admin_emails:
|
||||
row_class += ' admin'
|
||||
val = format_date_for_activity_reports(val)
|
||||
cells.append(val)
|
||||
row = dict(cells=cells, row_class=row_class)
|
||||
rows.append(row)
|
||||
|
||||
def by_used_time(row):
|
||||
return row['cells'][3]
|
||||
|
||||
rows = sorted(rows, key=by_used_time, reverse=True)
|
||||
|
||||
cols = [
|
||||
'Name',
|
||||
'Email',
|
||||
'Total sent',
|
||||
'Heard from',
|
||||
'Message sent',
|
||||
'Pointer motion',
|
||||
'Desktop',
|
||||
'ZulipiOS',
|
||||
'Android'
|
||||
]
|
||||
|
||||
title = 'Summary'
|
||||
|
||||
content = make_table(title, cols, rows, has_row_class=True)
|
||||
return user_records, content
|
||||
|
||||
@zulip_internal
|
||||
def get_realm_activity(request, realm):
|
||||
data = []
|
||||
all_records = {}
|
||||
all_user_records = {}
|
||||
|
||||
try:
|
||||
admins = Realm.objects.get(domain=realm).get_admin_users()
|
||||
except Realm.DoesNotExist:
|
||||
return HttpResponseNotFound("Realm %s does not exist" % (realm,))
|
||||
|
||||
admin_emails = {admin.email for admin in admins}
|
||||
|
||||
for is_bot, page_title in [(False, 'Humans'), (True, 'Bots')]:
|
||||
all_records = get_user_activity_records_for_realm(realm, is_bot)
|
||||
all_records = list(all_records)
|
||||
|
||||
user_records, content = realm_user_summary_table(all_records, admin_emails)
|
||||
all_user_records.update(user_records)
|
||||
|
||||
data += [(page_title, content)]
|
||||
|
||||
page_title = 'Clients'
|
||||
content = realm_client_table(all_user_records)
|
||||
data += [(page_title, content)]
|
||||
|
||||
|
||||
page_title = 'History'
|
||||
content = sent_messages_report(realm)
|
||||
data += [(page_title, content)]
|
||||
|
||||
fix_name = lambda realm: realm.replace('.', '_')
|
||||
|
||||
realm_link = 'https://stats1.zulip.net:444/render/?from=-7days'
|
||||
realm_link += '&target=stats.gauges.staging.users.active.%s.0_16hr' % (fix_name(realm),)
|
||||
|
||||
title = realm
|
||||
return render_to_response(
|
||||
'analytics/activity.html',
|
||||
dict(data=data, realm_link=realm_link, title=title),
|
||||
context_instance=RequestContext(request)
|
||||
)
|
||||
|
||||
@zulip_internal
|
||||
def get_user_activity(request, email):
|
||||
records = get_user_activity_records_for_email(email)
|
||||
|
||||
data = []
|
||||
user_summary = get_user_activity_summary(records)
|
||||
content = user_activity_summary_table(user_summary)
|
||||
|
||||
data += [('Summary', content)]
|
||||
|
||||
content = raw_user_activity_table(records)
|
||||
data += [('Info', content)]
|
||||
|
||||
title = email
|
||||
return render_to_response(
|
||||
'analytics/activity.html',
|
||||
dict(data=data, title=title),
|
||||
context_instance=RequestContext(request)
|
||||
)
|
@@ -1,654 +0,0 @@
|
||||
import logging
|
||||
from collections import defaultdict
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Annotated, Any, Optional, TypeAlias, TypeVar, cast
|
||||
|
||||
from django.conf import settings
|
||||
from django.db.models import QuerySet
|
||||
from django.http import HttpRequest, HttpResponse, HttpResponseNotFound
|
||||
from django.shortcuts import render
|
||||
from django.utils import translation
|
||||
from django.utils.timezone import now as timezone_now
|
||||
from django.utils.translation import gettext as _
|
||||
from pydantic import BeforeValidator, Json, NonNegativeInt
|
||||
|
||||
from analytics.lib.counts import COUNT_STATS, CountStat
|
||||
from analytics.lib.time_utils import time_range
|
||||
from analytics.models import (
|
||||
BaseCount,
|
||||
InstallationCount,
|
||||
RealmCount,
|
||||
StreamCount,
|
||||
UserCount,
|
||||
installation_epoch,
|
||||
)
|
||||
from zerver.decorator import (
|
||||
require_non_guest_user,
|
||||
require_server_admin,
|
||||
require_server_admin_api,
|
||||
to_utc_datetime,
|
||||
zulip_login_required,
|
||||
)
|
||||
from zerver.lib.exceptions import JsonableError
|
||||
from zerver.lib.i18n import get_and_set_request_language, get_language_translation_data
|
||||
from zerver.lib.response import json_success
|
||||
from zerver.lib.streams import access_stream_by_id
|
||||
from zerver.lib.timestamp import convert_to_UTC
|
||||
from zerver.lib.typed_endpoint import PathOnly, typed_endpoint
|
||||
from zerver.models import Client, Realm, Stream, UserProfile
|
||||
from zerver.models.realms import get_realm
|
||||
|
||||
if settings.ZILENCER_ENABLED:
|
||||
from zilencer.models import RemoteInstallationCount, RemoteRealmCount, RemoteZulipServer
|
||||
|
||||
MAX_TIME_FOR_FULL_ANALYTICS_GENERATION = timedelta(days=1, minutes=30)
|
||||
|
||||
|
||||
def is_analytics_ready(realm: Realm) -> bool:
|
||||
return (timezone_now() - realm.date_created) > MAX_TIME_FOR_FULL_ANALYTICS_GENERATION
|
||||
|
||||
|
||||
def render_stats(
|
||||
request: HttpRequest,
|
||||
data_url_suffix: str,
|
||||
realm: Realm | None,
|
||||
*,
|
||||
title: str | None = None,
|
||||
analytics_ready: bool = True,
|
||||
) -> HttpResponse:
|
||||
assert request.user.is_authenticated
|
||||
|
||||
if realm is not None:
|
||||
# Same query to get guest user count as in get_seat_count in corporate/lib/stripe.py.
|
||||
guest_users = UserProfile.objects.filter(
|
||||
realm=realm, is_active=True, is_bot=False, role=UserProfile.ROLE_GUEST
|
||||
).count()
|
||||
space_used = realm.currently_used_upload_space_bytes()
|
||||
if title:
|
||||
pass
|
||||
else:
|
||||
title = realm.name or realm.string_id
|
||||
else:
|
||||
assert title
|
||||
guest_users = None
|
||||
space_used = None
|
||||
|
||||
request_language = get_and_set_request_language(
|
||||
request,
|
||||
request.user.default_language,
|
||||
translation.get_language_from_path(request.path_info),
|
||||
)
|
||||
|
||||
# Sync this with stats_params_schema in base_page_params.ts.
|
||||
page_params = dict(
|
||||
page_type="stats",
|
||||
data_url_suffix=data_url_suffix,
|
||||
upload_space_used=space_used,
|
||||
guest_users=guest_users,
|
||||
translation_data=get_language_translation_data(request_language),
|
||||
)
|
||||
|
||||
return render(
|
||||
request,
|
||||
"analytics/stats.html",
|
||||
context=dict(
|
||||
target_name=title,
|
||||
page_params=page_params,
|
||||
analytics_ready=analytics_ready,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@zulip_login_required
|
||||
def stats(request: HttpRequest) -> HttpResponse:
|
||||
assert request.user.is_authenticated
|
||||
realm = request.user.realm
|
||||
if request.user.is_guest:
|
||||
# TODO: Make @zulip_login_required pass the UserProfile so we
|
||||
# can use @require_member_or_admin
|
||||
raise JsonableError(_("Not allowed for guest users"))
|
||||
return render_stats(request, "", realm, analytics_ready=is_analytics_ready(realm))
|
||||
|
||||
|
||||
@require_server_admin
|
||||
@typed_endpoint
|
||||
def stats_for_realm(request: HttpRequest, *, realm_str: PathOnly[str]) -> HttpResponse:
|
||||
try:
|
||||
realm = get_realm(realm_str)
|
||||
except Realm.DoesNotExist:
|
||||
return HttpResponseNotFound()
|
||||
|
||||
return render_stats(
|
||||
request,
|
||||
f"/realm/{realm_str}",
|
||||
realm,
|
||||
analytics_ready=is_analytics_ready(realm),
|
||||
)
|
||||
|
||||
|
||||
@require_server_admin
|
||||
@typed_endpoint
|
||||
def stats_for_remote_realm(
|
||||
request: HttpRequest, *, remote_server_id: PathOnly[int], remote_realm_id: PathOnly[int]
|
||||
) -> HttpResponse:
|
||||
assert settings.ZILENCER_ENABLED
|
||||
server = RemoteZulipServer.objects.get(id=remote_server_id)
|
||||
return render_stats(
|
||||
request,
|
||||
f"/remote/{server.id}/realm/{remote_realm_id}",
|
||||
None,
|
||||
title=f"Realm {remote_realm_id} on server {server.hostname}",
|
||||
)
|
||||
|
||||
|
||||
@require_server_admin_api
|
||||
@typed_endpoint
|
||||
def get_chart_data_for_realm(
|
||||
request: HttpRequest,
|
||||
user_profile: UserProfile,
|
||||
/,
|
||||
*,
|
||||
realm_str: PathOnly[str],
|
||||
chart_name: str,
|
||||
min_length: Json[NonNegativeInt] | None = None,
|
||||
start: Annotated[datetime | None, BeforeValidator(to_utc_datetime)] = None,
|
||||
end: Annotated[datetime | None, BeforeValidator(to_utc_datetime)] = None,
|
||||
) -> HttpResponse:
|
||||
try:
|
||||
realm = get_realm(realm_str)
|
||||
except Realm.DoesNotExist:
|
||||
raise JsonableError(_("Invalid organization"))
|
||||
|
||||
return do_get_chart_data(
|
||||
request,
|
||||
user_profile,
|
||||
realm=realm,
|
||||
chart_name=chart_name,
|
||||
min_length=min_length,
|
||||
start=start,
|
||||
end=end,
|
||||
)
|
||||
|
||||
|
||||
@require_non_guest_user
|
||||
@typed_endpoint
|
||||
def get_chart_data_for_stream(
|
||||
request: HttpRequest,
|
||||
user_profile: UserProfile,
|
||||
*,
|
||||
stream_id: PathOnly[int],
|
||||
chart_name: str,
|
||||
min_length: Json[NonNegativeInt] | None = None,
|
||||
start: Annotated[datetime | None, BeforeValidator(to_utc_datetime)] = None,
|
||||
end: Annotated[datetime | None, BeforeValidator(to_utc_datetime)] = None,
|
||||
) -> HttpResponse:
|
||||
stream, _sub = access_stream_by_id(
|
||||
user_profile,
|
||||
stream_id,
|
||||
require_content_access=False,
|
||||
)
|
||||
|
||||
return do_get_chart_data(
|
||||
request,
|
||||
user_profile,
|
||||
stream=stream,
|
||||
chart_name=chart_name,
|
||||
min_length=min_length,
|
||||
start=start,
|
||||
end=end,
|
||||
)
|
||||
|
||||
|
||||
@require_server_admin_api
|
||||
@typed_endpoint
|
||||
def get_chart_data_for_remote_realm(
|
||||
request: HttpRequest,
|
||||
user_profile: UserProfile,
|
||||
/,
|
||||
*,
|
||||
remote_server_id: PathOnly[int],
|
||||
remote_realm_id: PathOnly[int],
|
||||
chart_name: str,
|
||||
min_length: Json[NonNegativeInt] | None = None,
|
||||
start: Annotated[datetime | None, BeforeValidator(to_utc_datetime)] = None,
|
||||
end: Annotated[datetime | None, BeforeValidator(to_utc_datetime)] = None,
|
||||
) -> HttpResponse:
|
||||
assert settings.ZILENCER_ENABLED
|
||||
server = RemoteZulipServer.objects.get(id=remote_server_id)
|
||||
return do_get_chart_data(
|
||||
request,
|
||||
user_profile,
|
||||
server=server,
|
||||
remote=True,
|
||||
remote_realm_id=remote_realm_id,
|
||||
chart_name=chart_name,
|
||||
min_length=min_length,
|
||||
start=start,
|
||||
end=end,
|
||||
)
|
||||
|
||||
|
||||
@require_server_admin
|
||||
def stats_for_installation(request: HttpRequest) -> HttpResponse:
|
||||
assert request.user.is_authenticated
|
||||
return render_stats(request, "/installation", None, title="installation")
|
||||
|
||||
|
||||
@require_server_admin
|
||||
def stats_for_remote_installation(request: HttpRequest, remote_server_id: int) -> HttpResponse:
|
||||
assert settings.ZILENCER_ENABLED
|
||||
server = RemoteZulipServer.objects.get(id=remote_server_id)
|
||||
return render_stats(
|
||||
request,
|
||||
f"/remote/{server.id}/installation",
|
||||
None,
|
||||
title=f"remote installation {server.hostname}",
|
||||
)
|
||||
|
||||
|
||||
@require_server_admin_api
|
||||
@typed_endpoint
|
||||
def get_chart_data_for_installation(
|
||||
request: HttpRequest,
|
||||
user_profile: UserProfile,
|
||||
/,
|
||||
*,
|
||||
chart_name: str,
|
||||
min_length: Json[NonNegativeInt] | None = None,
|
||||
start: Annotated[datetime | None, BeforeValidator(to_utc_datetime)] = None,
|
||||
end: Annotated[datetime | None, BeforeValidator(to_utc_datetime)] = None,
|
||||
) -> HttpResponse:
|
||||
return do_get_chart_data(
|
||||
request,
|
||||
user_profile,
|
||||
for_installation=True,
|
||||
chart_name=chart_name,
|
||||
min_length=min_length,
|
||||
start=start,
|
||||
end=end,
|
||||
)
|
||||
|
||||
|
||||
@require_server_admin_api
|
||||
@typed_endpoint
|
||||
def get_chart_data_for_remote_installation(
|
||||
request: HttpRequest,
|
||||
user_profile: UserProfile,
|
||||
/,
|
||||
*,
|
||||
remote_server_id: PathOnly[int],
|
||||
chart_name: str,
|
||||
min_length: Json[NonNegativeInt] | None = None,
|
||||
start: Annotated[datetime | None, BeforeValidator(to_utc_datetime)] = None,
|
||||
end: Annotated[datetime | None, BeforeValidator(to_utc_datetime)] = None,
|
||||
) -> HttpResponse:
|
||||
assert settings.ZILENCER_ENABLED
|
||||
server = RemoteZulipServer.objects.get(id=remote_server_id)
|
||||
return do_get_chart_data(
|
||||
request,
|
||||
user_profile,
|
||||
for_installation=True,
|
||||
remote=True,
|
||||
server=server,
|
||||
chart_name=chart_name,
|
||||
min_length=min_length,
|
||||
start=start,
|
||||
end=end,
|
||||
)
|
||||
|
||||
|
||||
@require_non_guest_user
|
||||
@typed_endpoint
|
||||
def get_chart_data(
|
||||
request: HttpRequest,
|
||||
user_profile: UserProfile,
|
||||
*,
|
||||
chart_name: str,
|
||||
min_length: Json[NonNegativeInt] | None = None,
|
||||
start: Annotated[datetime | None, BeforeValidator(to_utc_datetime)] = None,
|
||||
end: Annotated[datetime | None, BeforeValidator(to_utc_datetime)] = None,
|
||||
) -> HttpResponse:
|
||||
return do_get_chart_data(
|
||||
request,
|
||||
user_profile,
|
||||
chart_name=chart_name,
|
||||
min_length=min_length,
|
||||
start=start,
|
||||
end=end,
|
||||
)
|
||||
|
||||
|
||||
@require_non_guest_user
|
||||
def do_get_chart_data(
|
||||
request: HttpRequest,
|
||||
user_profile: UserProfile,
|
||||
*,
|
||||
# Common parameters supported by all stats endpoints.
|
||||
chart_name: str,
|
||||
min_length: NonNegativeInt | None = None,
|
||||
start: datetime | None = None,
|
||||
end: datetime | None = None,
|
||||
# The following parameters are only used by wrapping functions for
|
||||
# various contexts; the callers are responsible for validating them.
|
||||
realm: Realm | None = None,
|
||||
for_installation: bool = False,
|
||||
remote: bool = False,
|
||||
remote_realm_id: int | None = None,
|
||||
server: Optional["RemoteZulipServer"] = None,
|
||||
stream: Stream | None = None,
|
||||
) -> HttpResponse:
|
||||
TableType: TypeAlias = (
|
||||
type["RemoteInstallationCount"]
|
||||
| type[InstallationCount]
|
||||
| type["RemoteRealmCount"]
|
||||
| type[RealmCount]
|
||||
)
|
||||
if for_installation:
|
||||
if remote:
|
||||
assert settings.ZILENCER_ENABLED
|
||||
aggregate_table: TableType = RemoteInstallationCount
|
||||
assert server is not None
|
||||
else:
|
||||
aggregate_table = InstallationCount
|
||||
else:
|
||||
if remote:
|
||||
assert settings.ZILENCER_ENABLED
|
||||
aggregate_table = RemoteRealmCount
|
||||
assert server is not None
|
||||
assert remote_realm_id is not None
|
||||
else:
|
||||
aggregate_table = RealmCount
|
||||
|
||||
tables: (
|
||||
tuple[TableType] | tuple[TableType, type[UserCount]] | tuple[TableType, type[StreamCount]]
|
||||
)
|
||||
|
||||
if chart_name == "number_of_humans":
|
||||
stats = [
|
||||
COUNT_STATS["1day_actives::day"],
|
||||
COUNT_STATS["realm_active_humans::day"],
|
||||
COUNT_STATS["active_users_audit:is_bot:day"],
|
||||
]
|
||||
tables = (aggregate_table,)
|
||||
subgroup_to_label: dict[CountStat, dict[str | None, str]] = {
|
||||
stats[0]: {None: "_1day"},
|
||||
stats[1]: {None: "_15day"},
|
||||
stats[2]: {"false": "all_time"},
|
||||
}
|
||||
labels_sort_function = None
|
||||
include_empty_subgroups = True
|
||||
elif chart_name == "messages_sent_over_time":
|
||||
stats = [COUNT_STATS["messages_sent:is_bot:hour"]]
|
||||
tables = (aggregate_table, UserCount)
|
||||
subgroup_to_label = {stats[0]: {"false": "human", "true": "bot"}}
|
||||
labels_sort_function = None
|
||||
include_empty_subgroups = True
|
||||
elif chart_name == "messages_sent_by_message_type":
|
||||
stats = [COUNT_STATS["messages_sent:message_type:day"]]
|
||||
tables = (aggregate_table, UserCount)
|
||||
subgroup_to_label = {
|
||||
stats[0]: {
|
||||
"public_stream": _("Public channels"),
|
||||
"private_stream": _("Private channels"),
|
||||
"private_message": _("Direct messages"),
|
||||
"huddle_message": _("Group direct messages"),
|
||||
}
|
||||
}
|
||||
labels_sort_function = lambda data: sort_by_totals(data["everyone"])
|
||||
include_empty_subgroups = True
|
||||
elif chart_name == "messages_sent_by_client":
|
||||
stats = [COUNT_STATS["messages_sent:client:day"]]
|
||||
tables = (aggregate_table, UserCount)
|
||||
# Note that the labels are further re-written by client_label_map
|
||||
subgroup_to_label = {
|
||||
stats[0]: {str(id): name for id, name in Client.objects.values_list("id", "name")}
|
||||
}
|
||||
labels_sort_function = sort_client_labels
|
||||
include_empty_subgroups = False
|
||||
elif chart_name == "messages_read_over_time":
|
||||
stats = [COUNT_STATS["messages_read::hour"]]
|
||||
tables = (aggregate_table, UserCount)
|
||||
subgroup_to_label = {stats[0]: {None: "read"}}
|
||||
labels_sort_function = None
|
||||
include_empty_subgroups = True
|
||||
elif chart_name == "messages_sent_by_stream":
|
||||
if stream is None:
|
||||
raise JsonableError(
|
||||
_("Missing channel for chart: {chart_name}").format(chart_name=chart_name)
|
||||
)
|
||||
stats = [COUNT_STATS["messages_in_stream:is_bot:day"]]
|
||||
tables = (aggregate_table, StreamCount)
|
||||
subgroup_to_label = {stats[0]: {"false": "human", "true": "bot"}}
|
||||
labels_sort_function = None
|
||||
include_empty_subgroups = True
|
||||
else:
|
||||
raise JsonableError(_("Unknown chart name: {chart_name}").format(chart_name=chart_name))
|
||||
|
||||
# Most likely someone using our API endpoint. The /stats page does not
|
||||
# pass a start or end in its requests.
|
||||
if start is not None:
|
||||
start = convert_to_UTC(start)
|
||||
if end is not None:
|
||||
end = convert_to_UTC(end)
|
||||
if start is not None and end is not None and start > end:
|
||||
raise JsonableError(
|
||||
_("Start time is later than end time. Start: {start}, End: {end}").format(
|
||||
start=start,
|
||||
end=end,
|
||||
)
|
||||
)
|
||||
|
||||
if realm is None:
|
||||
# Note that this value is invalid for Remote tables; be
|
||||
# careful not to access it in those code paths.
|
||||
realm = user_profile.realm
|
||||
|
||||
if remote:
|
||||
# For remote servers, we don't have fillstate data, and thus
|
||||
# should simply use the first and last data points for the
|
||||
# table.
|
||||
assert server is not None
|
||||
assert aggregate_table is RemoteInstallationCount or aggregate_table is RemoteRealmCount
|
||||
aggregate_table_remote = cast(
|
||||
type[RemoteInstallationCount] | type[RemoteRealmCount], aggregate_table
|
||||
) # https://stackoverflow.com/questions/68540528/mypy-assertions-on-the-types-of-types
|
||||
if not aggregate_table_remote.objects.filter(server=server).exists():
|
||||
raise JsonableError(
|
||||
_("No analytics data available. Please contact your server administrator.")
|
||||
)
|
||||
if start is None:
|
||||
first = (
|
||||
aggregate_table_remote.objects.filter(server=server).order_by("remote_id").first()
|
||||
)
|
||||
assert first is not None
|
||||
start = first.end_time
|
||||
if end is None:
|
||||
last = aggregate_table_remote.objects.filter(server=server).order_by("remote_id").last()
|
||||
assert last is not None
|
||||
end = last.end_time
|
||||
else:
|
||||
# Otherwise, we can use tables on the current server to
|
||||
# determine a nice range, and some additional validation.
|
||||
if start is None:
|
||||
if for_installation:
|
||||
start = installation_epoch()
|
||||
else:
|
||||
start = realm.date_created
|
||||
if end is None:
|
||||
end = max(
|
||||
stat.last_successful_fill() or datetime.min.replace(tzinfo=timezone.utc)
|
||||
for stat in stats
|
||||
)
|
||||
|
||||
if start > end and (timezone_now() - start > MAX_TIME_FOR_FULL_ANALYTICS_GENERATION):
|
||||
logging.warning(
|
||||
"User from realm %s attempted to access /stats, but the computed "
|
||||
"start time: %s (creation of realm or installation) is later than the computed "
|
||||
"end time: %s (last successful analytics update). Is the "
|
||||
"analytics cron job running?",
|
||||
realm.string_id,
|
||||
start,
|
||||
end,
|
||||
)
|
||||
raise JsonableError(
|
||||
_("No analytics data available. Please contact your server administrator.")
|
||||
)
|
||||
|
||||
assert len({stat.frequency for stat in stats}) == 1
|
||||
end_times = time_range(start, end, stats[0].frequency, min_length)
|
||||
data: dict[str, Any] = {
|
||||
"end_times": [int(end_time.timestamp()) for end_time in end_times],
|
||||
"frequency": stats[0].frequency,
|
||||
}
|
||||
|
||||
aggregation_level = {
|
||||
InstallationCount: "everyone",
|
||||
RealmCount: "everyone",
|
||||
UserCount: "user",
|
||||
StreamCount: "everyone",
|
||||
}
|
||||
if settings.ZILENCER_ENABLED:
|
||||
aggregation_level[RemoteInstallationCount] = "everyone"
|
||||
aggregation_level[RemoteRealmCount] = "everyone"
|
||||
|
||||
# -1 is a placeholder value, since there is no relevant filtering on InstallationCount
|
||||
id_value = {
|
||||
InstallationCount: -1,
|
||||
RealmCount: realm.id,
|
||||
UserCount: user_profile.id,
|
||||
}
|
||||
if stream is not None:
|
||||
id_value[StreamCount] = stream.id
|
||||
|
||||
if settings.ZILENCER_ENABLED:
|
||||
if server is not None:
|
||||
id_value[RemoteInstallationCount] = server.id
|
||||
# TODO: RemoteRealmCount logic doesn't correctly handle
|
||||
# filtering by server_id as well.
|
||||
if remote_realm_id is not None:
|
||||
id_value[RemoteRealmCount] = remote_realm_id
|
||||
|
||||
for table in tables:
|
||||
data[aggregation_level[table]] = {}
|
||||
for stat in stats:
|
||||
data[aggregation_level[table]].update(
|
||||
get_time_series_by_subgroup(
|
||||
stat,
|
||||
table,
|
||||
id_value[table],
|
||||
end_times,
|
||||
subgroup_to_label[stat],
|
||||
include_empty_subgroups,
|
||||
)
|
||||
)
|
||||
|
||||
if labels_sort_function is not None:
|
||||
data["display_order"] = labels_sort_function(data)
|
||||
else:
|
||||
data["display_order"] = None
|
||||
return json_success(request, data=data)
|
||||
|
||||
|
||||
def sort_by_totals(value_arrays: dict[str, list[int]]) -> list[str]:
|
||||
totals = sorted(((sum(values), label) for label, values in value_arrays.items()), reverse=True)
|
||||
return [label for total, label in totals]
|
||||
|
||||
|
||||
# For any given user, we want to show a fixed set of clients in the chart,
|
||||
# regardless of the time aggregation or whether we're looking at realm or
|
||||
# user data. This fixed set ideally includes the clients most important in
|
||||
# understanding the realm's traffic and the user's traffic. This function
|
||||
# tries to rank the clients so that taking the first N elements of the
|
||||
# sorted list has a reasonable chance of doing so.
|
||||
def sort_client_labels(data: dict[str, dict[str, list[int]]]) -> list[str]:
|
||||
realm_order = sort_by_totals(data["everyone"])
|
||||
user_order = sort_by_totals(data["user"])
|
||||
label_sort_values: dict[str, float] = {label: i for i, label in enumerate(realm_order)}
|
||||
for i, label in enumerate(user_order):
|
||||
label_sort_values[label] = min(i - 0.1, label_sort_values.get(label, i))
|
||||
return [label for label, sort_value in sorted(label_sort_values.items(), key=lambda x: x[1])]
|
||||
|
||||
|
||||
CountT = TypeVar("CountT", bound=BaseCount)
|
||||
|
||||
|
||||
def table_filtered_to_id(table: type[CountT], key_id: int) -> QuerySet[CountT]:
|
||||
if table == RealmCount:
|
||||
return table._default_manager.filter(realm_id=key_id)
|
||||
elif table == UserCount:
|
||||
return table._default_manager.filter(user_id=key_id)
|
||||
elif table == StreamCount:
|
||||
return table._default_manager.filter(stream_id=key_id)
|
||||
elif table == InstallationCount:
|
||||
return table._default_manager.all()
|
||||
elif settings.ZILENCER_ENABLED and table == RemoteInstallationCount:
|
||||
return table._default_manager.filter(server_id=key_id)
|
||||
elif settings.ZILENCER_ENABLED and table == RemoteRealmCount:
|
||||
return table._default_manager.filter(realm_id=key_id)
|
||||
else:
|
||||
raise AssertionError(f"Unknown table: {table}")
|
||||
|
||||
|
||||
def client_label_map(name: str) -> str:
|
||||
if name == "website":
|
||||
return "Web app"
|
||||
if name.startswith("desktop app"):
|
||||
return "Old desktop app"
|
||||
if name == "ZulipElectron":
|
||||
return "Desktop app"
|
||||
if name == "ZulipTerminal":
|
||||
return "Terminal app"
|
||||
if name == "ZulipAndroid":
|
||||
return "Ancient Android app"
|
||||
if name == "ZulipiOS":
|
||||
return "Ancient iOS app"
|
||||
if name == "ZulipMobile":
|
||||
return "Old mobile app (React Native)"
|
||||
if name in ["ZulipFlutter", "ZulipMobile/flutter"]:
|
||||
return "Mobile app (Flutter)"
|
||||
if name in ["ZulipPython", "API: Python"]:
|
||||
return "Python API"
|
||||
if name.startswith("Zulip") and name.endswith("Webhook"):
|
||||
return name.removeprefix("Zulip").removesuffix("Webhook") + " webhook"
|
||||
return name
|
||||
|
||||
|
||||
def rewrite_client_arrays(value_arrays: dict[str, list[int]]) -> dict[str, list[int]]:
|
||||
mapped_arrays: dict[str, list[int]] = {}
|
||||
for label, array in value_arrays.items():
|
||||
mapped_label = client_label_map(label)
|
||||
if mapped_label in mapped_arrays:
|
||||
for i in range(len(array)):
|
||||
mapped_arrays[mapped_label][i] += array[i]
|
||||
else:
|
||||
mapped_arrays[mapped_label] = array.copy()
|
||||
return mapped_arrays
|
||||
|
||||
|
||||
def get_time_series_by_subgroup(
|
||||
stat: CountStat,
|
||||
table: type[BaseCount],
|
||||
key_id: int,
|
||||
end_times: list[datetime],
|
||||
subgroup_to_label: dict[str | None, str],
|
||||
include_empty_subgroups: bool,
|
||||
) -> dict[str, list[int]]:
|
||||
queryset = (
|
||||
table_filtered_to_id(table, key_id)
|
||||
.filter(property=stat.property)
|
||||
.values_list("subgroup", "end_time", "value")
|
||||
)
|
||||
value_dicts: dict[str | None, dict[datetime, int]] = defaultdict(lambda: defaultdict(int))
|
||||
for subgroup, end_time, value in queryset:
|
||||
value_dicts[subgroup][end_time] = value
|
||||
value_arrays = {}
|
||||
for subgroup, label in subgroup_to_label.items():
|
||||
if (subgroup in value_dicts) or include_empty_subgroups:
|
||||
value_arrays[label] = [value_dicts[subgroup][end_time] for end_time in end_times]
|
||||
|
||||
if stat == COUNT_STATS["messages_sent:client:day"]:
|
||||
# HACK: We rewrite these arrays to collapse the Client objects
|
||||
# with similar names into a single sum, and generally give
|
||||
# them better names
|
||||
return rewrite_client_arrays(value_arrays)
|
||||
return value_arrays
|
10
api/MANIFEST.in
Normal file
10
api/MANIFEST.in
Normal file
@@ -0,0 +1,10 @@
|
||||
recursive-include integrations *
|
||||
include README.md
|
||||
include examples/zuliprc
|
||||
include examples/send-message
|
||||
include examples/subscribe
|
||||
include examples/get-public-streams
|
||||
include examples/unsubscribe
|
||||
include examples/list-members
|
||||
include examples/list-subscriptions
|
||||
include examples/print-messages
|
106
api/README.md
Normal file
106
api/README.md
Normal file
@@ -0,0 +1,106 @@
|
||||
#### Dependencies
|
||||
|
||||
The [Zulip API](https://zulip.com/api) Python bindings require the
|
||||
following Python libraries:
|
||||
|
||||
* simplejson
|
||||
* requests (version >= 0.12.1)
|
||||
|
||||
|
||||
#### Installing
|
||||
|
||||
This package uses distutils, so you can just run:
|
||||
|
||||
python setup.py install
|
||||
|
||||
#### Using the API
|
||||
|
||||
For now, the only fully supported API operation is sending a message.
|
||||
The other API queries work, but are under active development, so
|
||||
please make sure we know you're using them so that we can notify you
|
||||
as we make any changes to them.
|
||||
|
||||
The easiest way to use these API bindings is to base your tools off
|
||||
of the example tools under examples/ in this distribution.
|
||||
|
||||
If you place your API key in the config file `~/.zuliprc` the Python
|
||||
API bindings will automatically read it in. The format of the config
|
||||
file is as follows:
|
||||
|
||||
[api]
|
||||
key=<api key from the web interface>
|
||||
email=<your email address>
|
||||
|
||||
If you are using Zulip Enterprise, you should also add
|
||||
|
||||
site=<your Zulip Enterprise server's URI>
|
||||
|
||||
Alternatively, you may explicitly use "--user" and "--api-key" in our
|
||||
examples, which is especially useful if you are running several bots
|
||||
which share a home directory. There is also a "--site" option for
|
||||
setting the Zulip Enterprise server on the command line.
|
||||
|
||||
You can obtain your Zulip API key, create bots, and manage bots all
|
||||
from your Zulip [settings page](https://zulip.com/#settings).
|
||||
|
||||
A typical simple bot sending API messages will look as follows:
|
||||
|
||||
At the top of the file:
|
||||
|
||||
# Make sure the Zulip API distribution's root directory is in sys.path, then:
|
||||
import zulip
|
||||
zulip_client = zulip.Client(email="your-bot@example.com", client="MyTestClient/0.1")
|
||||
|
||||
When you want to send a message:
|
||||
|
||||
message = {
|
||||
"type": "stream",
|
||||
"to": ["support"],
|
||||
"subject": "your subject",
|
||||
"content": "your content",
|
||||
}
|
||||
zulip_client.send_message(message)
|
||||
|
||||
Additional examples:
|
||||
|
||||
client.send_message({'type': 'stream', 'content': 'Zulip rules!',
|
||||
'subject': 'feedback', 'to': ['support']})
|
||||
client.send_message({'type': 'private', 'content': 'Zulip rules!',
|
||||
'to': ['user1@example.com', 'user2@example.com']})
|
||||
|
||||
send_message() returns a dict guaranteed to contain the following
|
||||
keys: msg, result. For successful calls, result will be "success" and
|
||||
msg will be the empty string. On error, result will be "error" and
|
||||
msg will describe what went wrong.
|
||||
|
||||
#### Logging
|
||||
The Zulip API comes with a ZulipStream class which can be used with the
|
||||
logging module:
|
||||
|
||||
```
|
||||
import zulip
|
||||
import logging
|
||||
stream = zulip.ZulipStream(type="stream", to=["support"], subject="your subject")
|
||||
logger = logging.getLogger("your_logger")
|
||||
logger.addHandler(logging.StreamHandler(stream))
|
||||
logger.setLevel(logging.DEBUG)
|
||||
logger.info("This is an INFO test.")
|
||||
logger.debug("This is a DEBUG test.")
|
||||
logger.warn("This is a WARN test.")
|
||||
logger.error("This is a ERROR test.")
|
||||
```
|
||||
|
||||
#### Sending messages
|
||||
|
||||
You can use the included `zulip-send` script to send messages via the
|
||||
API directly from existing scripts.
|
||||
|
||||
zulip-send hamlet@example.com cordelia@example.com -m \
|
||||
"Conscience doth make cowards of us all."
|
||||
|
||||
Alternatively, if you don't want to use your ~/.zuliprc file:
|
||||
|
||||
zulip-send --user shakespeare-bot@example.com \
|
||||
--api-key a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5 \
|
||||
hamlet@example.com cordelia@example.com -m \
|
||||
"Conscience doth make cowards of us all."
|
126
api/bin/zulip-send
Executable file
126
api/bin/zulip-send
Executable file
@@ -0,0 +1,126 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
# zulip-send -- Sends a message to the specified recipients.
|
||||
|
||||
# Copyright © 2012 Zulip, Inc.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import sys
|
||||
import os
|
||||
import optparse
|
||||
import logging
|
||||
|
||||
|
||||
logging.basicConfig()
|
||||
|
||||
log = logging.getLogger('zulip-send')
|
||||
|
||||
def do_send_message(client, message_data ):
|
||||
'''Sends a message and optionally prints status about the same.'''
|
||||
|
||||
if message_data['type'] == 'stream':
|
||||
log.info('Sending message to stream "%s", subject "%s"... ' % \
|
||||
(message_data['to'], message_data['subject']))
|
||||
else:
|
||||
log.info('Sending message to %s... ' % message_data['to'])
|
||||
response = client.send_message(message_data)
|
||||
if response['result'] == 'success':
|
||||
log.info('Message sent.')
|
||||
return True
|
||||
else:
|
||||
log.error(response['msg'])
|
||||
return False
|
||||
|
||||
def main(argv=None):
|
||||
if argv is None:
|
||||
argv = sys.argv
|
||||
|
||||
usage = """%prog [options] [recipient...]
|
||||
|
||||
Sends a message specified recipients.
|
||||
|
||||
Examples: %prog --stream denmark --subject castle -m "Something is rotten in the state of Denmark."
|
||||
%prog hamlet@example.com cordelia@example.com -m "Conscience doth make cowards of us all."
|
||||
|
||||
These examples assume you have a proper '~/.zuliprc'. You may also set your credentials with the
|
||||
'--user' and '--api-key' arguments.
|
||||
"""
|
||||
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
|
||||
|
||||
import zulip
|
||||
|
||||
parser = optparse.OptionParser(usage=usage)
|
||||
|
||||
# Grab parser options from the API common set
|
||||
parser.add_option_group(zulip.generate_option_group(parser))
|
||||
|
||||
parser.add_option('-m', '--message',
|
||||
help='Specifies the message to send, prevents interactive prompting.')
|
||||
|
||||
group = optparse.OptionGroup(parser, 'Stream parameters')
|
||||
group.add_option('-s', '--stream',
|
||||
dest='stream',
|
||||
action='store',
|
||||
help='Allows the user to specify a stream for the message.')
|
||||
group.add_option('-S', '--subject',
|
||||
dest='subject',
|
||||
action='store',
|
||||
help='Allows the user to specify a subject for the message.')
|
||||
parser.add_option_group(group)
|
||||
|
||||
|
||||
(options, recipients) = parser.parse_args(argv[1:])
|
||||
|
||||
if options.verbose:
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
# Sanity check user data
|
||||
if len(recipients) != 0 and (options.stream or options.subject):
|
||||
parser.error('You cannot specify both a username and a stream/subject.')
|
||||
if len(recipients) == 0 and (bool(options.stream) != bool(options.subject)):
|
||||
parser.error('Stream messages must have a subject')
|
||||
if len(recipients) == 0 and not (options.stream and options.subject):
|
||||
parser.error('You must specify a stream/subject or at least one recipient.')
|
||||
|
||||
client = zulip.init_from_options(options)
|
||||
|
||||
if not options.message:
|
||||
options.message = sys.stdin.read()
|
||||
|
||||
if options.stream:
|
||||
message_data = {
|
||||
'type': 'stream',
|
||||
'content': options.message,
|
||||
'subject': options.subject,
|
||||
'to': options.stream,
|
||||
}
|
||||
else:
|
||||
message_data = {
|
||||
'type': 'private',
|
||||
'content': options.message,
|
||||
'to': recipients,
|
||||
}
|
||||
|
||||
if not do_send_message(client, message_data):
|
||||
return 1
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
54
api/examples/create-user
Executable file
54
api/examples/create-user
Executable file
@@ -0,0 +1,54 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright © 2012-2014 Zulip, Inc.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import sys
|
||||
from os import path
|
||||
import optparse
|
||||
|
||||
usage = """create-user --new-email=<email address> --new-password=<password> --new-full-name=<full name> --new-short-name=<short name> [options]
|
||||
|
||||
Create a user. You must be a realm admin to use this API, and the user
|
||||
will be created in your realm.
|
||||
|
||||
Example: create-user --site=http://localhost:9991 --user=rwbarton@zulip.com --new-email=jarthur@zulip.com --new-password=random17 --new-full-name 'J. Arthur Random' --new-short-name='jarthur'
|
||||
"""
|
||||
|
||||
sys.path.append(path.join(path.dirname(__file__), '..'))
|
||||
import zulip
|
||||
|
||||
parser = optparse.OptionParser(usage=usage)
|
||||
parser.add_option_group(zulip.generate_option_group(parser))
|
||||
parser.add_option('--new-email')
|
||||
parser.add_option('--new-password')
|
||||
parser.add_option('--new-full-name')
|
||||
parser.add_option('--new-short-name')
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
client = zulip.init_from_options(options)
|
||||
|
||||
print client.create_user({
|
||||
'email': options.new_email,
|
||||
'password': options.new_password,
|
||||
'full_name': options.new_full_name,
|
||||
'short_name': options.new_short_name
|
||||
})
|
56
api/examples/edit-message
Executable file
56
api/examples/edit-message
Executable file
@@ -0,0 +1,56 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright © 2012 Zulip, Inc.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import sys
|
||||
import os
|
||||
import optparse
|
||||
|
||||
usage = """edit-message [options] --message=<msg_id> --subject=<new subject> --content=<new content> --user=<sender's email address> --api-key=<sender's api key>
|
||||
|
||||
Edits a message that you sent
|
||||
|
||||
Example: edit-message --message-id="348135" --subject="my subject" --content="test message" --user=othello-bot@example.com --api-key=a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5
|
||||
|
||||
You can omit --user and --api-key arguments if you have a properly set up ~/.zuliprc
|
||||
"""
|
||||
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
||||
import zulip
|
||||
|
||||
parser = optparse.OptionParser(usage=usage)
|
||||
parser.add_option('--message-id', default="")
|
||||
parser.add_option('--subject', default="")
|
||||
parser.add_option('--content', default="")
|
||||
parser.add_option_group(zulip.generate_option_group(parser))
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
client = zulip.init_from_options(options)
|
||||
|
||||
message_data = {
|
||||
"message_id": options.message_id,
|
||||
}
|
||||
if options.subject != "":
|
||||
message_data["subject"] = options.subject
|
||||
if options.content != "":
|
||||
message_data["content"] = options.content
|
||||
print client.update_message(message_data)
|
46
api/examples/get-public-streams
Executable file
46
api/examples/get-public-streams
Executable file
@@ -0,0 +1,46 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright © 2012 Zulip, Inc.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import sys
|
||||
import os
|
||||
import optparse
|
||||
|
||||
usage = """get-public-streams --user=<bot's email address> --api-key=<bot's api key> [options]
|
||||
|
||||
Prints out all the public streams in the realm.
|
||||
|
||||
Example: get-public-streams --user=othello-bot@example.com --api-key=a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5
|
||||
|
||||
You can omit --user and --api-key arguments if you have a properly set up ~/.zuliprc
|
||||
"""
|
||||
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
|
||||
import zulip
|
||||
|
||||
parser = optparse.OptionParser(usage=usage)
|
||||
parser.add_option_group(zulip.generate_option_group(parser))
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
client = zulip.init_from_options(options)
|
||||
|
||||
print client.get_streams(include_public=True, include_subscribed=False)
|
45
api/examples/list-members
Executable file
45
api/examples/list-members
Executable file
@@ -0,0 +1,45 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright © 2014 Zulip, Inc.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import sys
|
||||
import os
|
||||
import optparse
|
||||
|
||||
usage = """list-members --user=<bot's email address> --api-key=<bot's api key> [options]
|
||||
|
||||
List the names and e-mail addresses of the people in your realm.
|
||||
|
||||
You can omit --user and --api-key arguments if you have a properly set up ~/.zuliprc
|
||||
"""
|
||||
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
|
||||
import zulip
|
||||
|
||||
parser = optparse.OptionParser(usage=usage)
|
||||
parser.add_option_group(zulip.generate_option_group(parser))
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
client = zulip.init_from_options(options)
|
||||
|
||||
for user in client.get_members()["members"]:
|
||||
print user["full_name"], user["email"]
|
45
api/examples/list-subscriptions
Executable file
45
api/examples/list-subscriptions
Executable file
@@ -0,0 +1,45 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright © 2012 Zulip, Inc.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import sys
|
||||
import os
|
||||
import optparse
|
||||
|
||||
usage = """list-subscriptions --user=<bot's email address> --api-key=<bot's api key> [options]
|
||||
|
||||
Prints out a list of the user's subscriptions.
|
||||
|
||||
Example: list-subscriptions --user=tabbott@zulip.com --api-key=a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5
|
||||
|
||||
You can omit --user and --api-key arguments if you have a properly set up ~/.zuliprc
|
||||
"""
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
|
||||
import zulip
|
||||
|
||||
parser = optparse.OptionParser(usage=usage)
|
||||
parser.add_option_group(zulip.generate_option_group(parser))
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
client = zulip.init_from_options(options)
|
||||
|
||||
print client.list_subscriptions()
|
49
api/examples/print-messages
Executable file
49
api/examples/print-messages
Executable file
@@ -0,0 +1,49 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright © 2012 Zulip, Inc.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import sys
|
||||
import os
|
||||
import optparse
|
||||
|
||||
usage = """print-messages --user=<bot's email address> --api-key=<bot's api key> [options]
|
||||
|
||||
Prints out each message received by the indicated bot or user.
|
||||
|
||||
Example: print-messages --user=tabbott@zulip.com --api-key=a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5
|
||||
|
||||
You can omit --user and --api-key arguments if you have a properly set up ~/.zuliprc
|
||||
"""
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
|
||||
import zulip
|
||||
|
||||
parser = optparse.OptionParser(usage=usage)
|
||||
parser.add_option_group(zulip.generate_option_group(parser))
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
client = zulip.init_from_options(options)
|
||||
|
||||
def print_message(message):
|
||||
print message
|
||||
|
||||
# This is a blocking call, and will continuously poll for new messages
|
||||
client.call_on_each_message(print_message)
|
45
api/examples/print-next-message
Executable file
45
api/examples/print-next-message
Executable file
@@ -0,0 +1,45 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright © 2012 Zulip, Inc.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import sys
|
||||
import os
|
||||
import optparse
|
||||
|
||||
usage = """print-next-message --user=<bot's email address> --api-key=<bot's api key> [options]
|
||||
|
||||
Prints out the next message received by the user.
|
||||
|
||||
Example: print-next-messages --user=tabbott@zulip.com --api-key=a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5
|
||||
|
||||
You can omit --user and --api-key arguments if you have a properly set up ~/.zuliprc
|
||||
"""
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
|
||||
import zulip
|
||||
|
||||
parser = optparse.OptionParser(usage=usage)
|
||||
parser.add_option_group(zulip.generate_option_group(parser))
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
client = zulip.init_from_options(options)
|
||||
|
||||
print client.get_messages({})
|
57
api/examples/send-message
Executable file
57
api/examples/send-message
Executable file
@@ -0,0 +1,57 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright © 2012 Zulip, Inc.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import sys
|
||||
import os
|
||||
import optparse
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
|
||||
import zulip
|
||||
|
||||
usage = """send-message --user=<bot's email address> --api-key=<bot's api key> [options] <recipients>
|
||||
|
||||
Sends a test message to the specified recipients.
|
||||
|
||||
Example: send-message --user=your-bot@example.com --api-key=a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5 --type=stream commits --subject="my subject" --message="test message"
|
||||
Example: send-message --user=your-bot@example.com --api-key=a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5 user1@example.com user2@example.com
|
||||
|
||||
You can omit --user and --api-key arguments if you have a properly set up ~/.zuliprc
|
||||
"""
|
||||
parser = optparse.OptionParser(usage=usage)
|
||||
parser.add_option('--subject', default="test")
|
||||
parser.add_option('--message', default="test message")
|
||||
parser.add_option('--type', default='private')
|
||||
parser.add_option_group(zulip.generate_option_group(parser))
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
if len(args) == 0:
|
||||
parser.error("You must specify recipients")
|
||||
|
||||
client = zulip.init_from_options(options)
|
||||
|
||||
message_data = {
|
||||
"type": options.type,
|
||||
"content": options.message,
|
||||
"subject": options.subject,
|
||||
"to": args,
|
||||
}
|
||||
print client.send_message(message_data)
|
52
api/examples/subscribe
Executable file
52
api/examples/subscribe
Executable file
@@ -0,0 +1,52 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright © 2012 Zulip, Inc.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import sys
|
||||
import os
|
||||
import optparse
|
||||
|
||||
usage = """subscribe --user=<bot's email address> --api-key=<bot's api key> [options] --streams=<streams>
|
||||
|
||||
Ensures the user is subscribed to the listed streams.
|
||||
|
||||
Examples: subscribe --user=tabbott@zulip.com --api-key=a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5 --streams=foo
|
||||
subscribe --user=tabbott@zulip.com --api-key=a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5 --streams='foo bar'
|
||||
|
||||
You can omit --user and --api-key arguments if you have a properly set up ~/.zuliprc
|
||||
"""
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
|
||||
import zulip
|
||||
|
||||
parser = optparse.OptionParser(usage=usage)
|
||||
parser.add_option_group(zulip.generate_option_group(parser))
|
||||
parser.add_option('--streams', default='')
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
client = zulip.init_from_options(options)
|
||||
|
||||
if options.streams == "":
|
||||
print >>sys.stderr, "Usage:", parser.usage
|
||||
sys.exit(1)
|
||||
|
||||
print client.add_subscriptions([{"name": stream_name} for stream_name in
|
||||
options.streams.split()])
|
51
api/examples/unsubscribe
Executable file
51
api/examples/unsubscribe
Executable file
@@ -0,0 +1,51 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright © 2012 Zulip, Inc.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import sys
|
||||
import os
|
||||
import optparse
|
||||
|
||||
usage = """unsubscribe --user=<bot's email address> --api-key=<bot's api key> [options] --streams=<streams>
|
||||
|
||||
Ensures the user is not subscribed to the listed streams.
|
||||
|
||||
Examples: unsubscribe --user=tabbott@zulip.com --api-key=a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5 --streams=foo
|
||||
unsubscribe --user=tabbott@zulip.com --api-key=a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5 --streams='foo bar'
|
||||
|
||||
You can omit --user and --api-key arguments if you have a properly set up ~/.zuliprc
|
||||
"""
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
|
||||
import zulip
|
||||
|
||||
parser = optparse.OptionParser(usage=usage)
|
||||
parser.add_option_group(zulip.generate_option_group(parser))
|
||||
parser.add_option('--streams', default='')
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
client = zulip.init_from_options(options)
|
||||
|
||||
if options.streams == "":
|
||||
print >>sys.stderr, "Usage:", parser.usage
|
||||
sys.exit(1)
|
||||
|
||||
print client.remove_subscriptions(options.streams.split())
|
4
api/examples/zuliprc
Normal file
4
api/examples/zuliprc
Normal file
@@ -0,0 +1,4 @@
|
||||
; Save this file as ~/.zuliprc
|
||||
[api]
|
||||
key=<your bot's api key from the web interface>
|
||||
email=<your bot's email address>
|
57
api/integrations/asana/zulip_asana_config.py
Normal file
57
api/integrations/asana/zulip_asana_config.py
Normal file
@@ -0,0 +1,57 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright © 2014 Zulip, Inc.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
|
||||
### REQUIRED CONFIGURATION ###
|
||||
|
||||
# Change these values to your Asana credentials.
|
||||
ASANA_API_KEY = "0123456789abcdef0123456789abcdef"
|
||||
|
||||
# Change these values to the credentials for your Asana bot.
|
||||
ZULIP_USER = "asana-bot@example.com"
|
||||
ZULIP_API_KEY = "0123456789abcdef0123456789abcdef"
|
||||
|
||||
# The Zulip stream that will receive Asana task updates.
|
||||
ZULIP_STREAM_NAME = "asana"
|
||||
|
||||
|
||||
### OPTIONAL CONFIGURATION ###
|
||||
|
||||
# Set to None for logging to stdout when testing, and to a file for
|
||||
# logging when deployed.
|
||||
#LOG_FILE = "/var/tmp/zulip_asana.log"
|
||||
LOG_FILE = None
|
||||
|
||||
# This file is used to resume this mirror in case the script shuts down.
|
||||
# It is required and needs to be writeable.
|
||||
RESUME_FILE = "/var/tmp/zulip_asana.state"
|
||||
|
||||
# When initially started, how many hours of messages to include.
|
||||
ASANA_INITIAL_HISTORY_HOURS = 1
|
||||
|
||||
# If you're using Zulip Enterprise, set this to your Zulip Enterprise server
|
||||
ZULIP_SITE = "https://api.zulip.com"
|
||||
|
||||
# If properly installed, the Zulip API should be in your import
|
||||
# path, but if not, set a custom path below
|
||||
ZULIP_API_PATH = None
|
277
api/integrations/asana/zulip_asana_mirror
Normal file
277
api/integrations/asana/zulip_asana_mirror
Normal file
@@ -0,0 +1,277 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Asana integration for Zulip
|
||||
#
|
||||
# Copyright © 2014 Zulip, Inc.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
import base64
|
||||
from datetime import datetime, timedelta
|
||||
import dateutil.parser
|
||||
import dateutil.tz
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
import urllib2
|
||||
|
||||
import sys
|
||||
sys.path.insert(0, os.path.dirname(__file__))
|
||||
import zulip_asana_config as config
|
||||
VERSION = "0.9"
|
||||
|
||||
if config.ZULIP_API_PATH is not None:
|
||||
sys.path.append(config.ZULIP_API_PATH)
|
||||
import zulip
|
||||
|
||||
if config.LOG_FILE:
|
||||
logging.basicConfig(filename=config.LOG_FILE, level=logging.WARNING)
|
||||
else:
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
client = zulip.Client(email=config.ZULIP_USER, api_key=config.ZULIP_API_KEY,
|
||||
site=config.ZULIP_SITE, client="ZulipAsana/" + VERSION)
|
||||
|
||||
def fetch_from_asana(path):
|
||||
"""
|
||||
Request a resource through the Asana API, authenticating using
|
||||
HTTP basic auth.
|
||||
"""
|
||||
auth = base64.encodestring('%s:' % (config.ASANA_API_KEY,))
|
||||
headers = {"Authorization": "Basic %s" % auth}
|
||||
|
||||
url = "https://app.asana.com/api/1.0" + path
|
||||
request = urllib2.Request(url, None, headers)
|
||||
result = urllib2.urlopen(request)
|
||||
|
||||
return json.load(result)
|
||||
|
||||
def send_zulip(topic, content):
|
||||
"""
|
||||
Send a message to Zulip using the configured stream and bot credentials.
|
||||
"""
|
||||
message = {"type": "stream",
|
||||
"sender": config.ZULIP_USER,
|
||||
"to": config.ZULIP_STREAM_NAME,
|
||||
"subject": topic,
|
||||
"content": content,
|
||||
}
|
||||
return client.send_message(message)
|
||||
|
||||
def datestring_to_datetime(datestring):
|
||||
"""
|
||||
Given an ISO 8601 datestring, return the corresponding datetime object.
|
||||
"""
|
||||
return dateutil.parser.parse(datestring).replace(
|
||||
tzinfo=dateutil.tz.gettz('Z'))
|
||||
|
||||
class TaskDict(dict):
|
||||
"""
|
||||
A helper class to turn a dictionary with task information into an
|
||||
object where each of the keys is an attribute for easy access.
|
||||
"""
|
||||
def __getattr__(self, field):
|
||||
return self.get(field)
|
||||
|
||||
def format_topic(task, projects):
|
||||
"""
|
||||
Return a string that will be the Zulip message topic for this task.
|
||||
"""
|
||||
# Tasks can be associated with multiple projects, but in practice they seem
|
||||
# to mostly be associated with one.
|
||||
project_name = projects[task.projects[0]["id"]]
|
||||
return "%s: %s" % (project_name, task.name)
|
||||
|
||||
def format_assignee(task, users):
|
||||
"""
|
||||
Return a string describing the task's assignee.
|
||||
"""
|
||||
if task.assignee:
|
||||
assignee_name = users[task.assignee["id"]]
|
||||
assignee_info = "**Assigned to**: %s (%s)" % (
|
||||
assignee_name, task.assignee_status)
|
||||
else:
|
||||
assignee_info = "**Status**: Unassigned"
|
||||
|
||||
return assignee_info
|
||||
|
||||
def format_due_date(task):
|
||||
"""
|
||||
Return a string describing the task's due date.
|
||||
"""
|
||||
if task.due_on:
|
||||
due_date_info = "**Due on**: %s" % (task.due_on,)
|
||||
else:
|
||||
due_date_info = "**Due date**: None"
|
||||
return due_date_info
|
||||
|
||||
def format_task_creation_event(task, projects, users):
|
||||
"""
|
||||
Format the topic and content for a newly-created task.
|
||||
"""
|
||||
topic = format_topic(task, projects)
|
||||
assignee_info = format_assignee(task, users)
|
||||
due_date_info = format_due_date(task)
|
||||
|
||||
content = """Task **%s** created:
|
||||
|
||||
~~~ quote
|
||||
%s
|
||||
~~~
|
||||
|
||||
%s
|
||||
%s
|
||||
""" % (task.name, task.notes, assignee_info, due_date_info)
|
||||
return topic, content
|
||||
|
||||
def format_task_completion_event(task, projects, users):
|
||||
"""
|
||||
Format the topic and content for a completed task.
|
||||
"""
|
||||
topic = format_topic(task, projects)
|
||||
assignee_info = format_assignee(task, users)
|
||||
due_date_info = format_due_date(task)
|
||||
|
||||
content = """Task **%s** completed. :white_check_mark:
|
||||
|
||||
%s
|
||||
%s
|
||||
""" % (task.name, assignee_info, due_date_info)
|
||||
return topic, content
|
||||
|
||||
def since():
|
||||
"""
|
||||
Return a newness threshold for task events to be processed.
|
||||
"""
|
||||
# If we have a record of the last event processed and it is recent, use it,
|
||||
# else process everything from ASANA_INITIAL_HISTORY_HOURS ago.
|
||||
def default_since():
|
||||
return datetime.utcnow() - timedelta(
|
||||
hours=config.ASANA_INITIAL_HISTORY_HOURS)
|
||||
|
||||
if os.path.exists(config.RESUME_FILE):
|
||||
try:
|
||||
with open(config.RESUME_FILE, "r") as f:
|
||||
datestring = f.readline().strip()
|
||||
timestamp = float(datestring)
|
||||
max_timestamp_processed = datetime.fromtimestamp(timestamp)
|
||||
logging.info("Reading from resume file: " + datestring)
|
||||
except (ValueError,IOError) as e:
|
||||
logging.warn("Could not open resume file: %s" % (
|
||||
e.message or e.strerror,))
|
||||
max_timestamp_processed = default_since()
|
||||
else:
|
||||
logging.info("No resume file, processing an initial history.")
|
||||
max_timestamp_processed = default_since()
|
||||
|
||||
# Even if we can read a timestamp from RESUME_FILE, if it is old don't use
|
||||
# it.
|
||||
return max(max_timestamp_processed, default_since())
|
||||
|
||||
def process_new_events():
|
||||
"""
|
||||
Forward new Asana task events to Zulip.
|
||||
"""
|
||||
# In task queries, Asana only exposes IDs for projects and users, so we need
|
||||
# to look up the mappings.
|
||||
projects = dict((elt["id"], elt["name"]) for elt in \
|
||||
fetch_from_asana("/projects")["data"])
|
||||
users = dict((elt["id"], elt["name"]) for elt in \
|
||||
fetch_from_asana("/users")["data"])
|
||||
|
||||
cutoff = since()
|
||||
max_timestamp_processed = cutoff
|
||||
time_operations = (("created_at", format_task_creation_event),
|
||||
("completed_at", format_task_completion_event))
|
||||
task_fields = ["assignee", "assignee_status", "created_at", "completed_at",
|
||||
"modified_at", "due_on", "name", "notes", "projects"]
|
||||
|
||||
# First, gather all of the tasks that need processing. We'll
|
||||
# process them in order.
|
||||
new_events = []
|
||||
|
||||
for project_id in projects:
|
||||
project_url = "/projects/%d/tasks?opt_fields=%s" % (
|
||||
project_id, ",".join(task_fields))
|
||||
tasks = fetch_from_asana(project_url)["data"]
|
||||
|
||||
for task in tasks:
|
||||
task = TaskDict(task)
|
||||
|
||||
for time_field, operation in time_operations:
|
||||
if task[time_field]:
|
||||
operation_time = datestring_to_datetime(task[time_field])
|
||||
if operation_time > cutoff:
|
||||
new_events.append((operation_time, time_field, operation, task))
|
||||
|
||||
new_events.sort()
|
||||
now = datetime.utcnow()
|
||||
|
||||
for operation_time, time_field, operation, task in new_events:
|
||||
# Unfortunately, creating an Asana task is not an atomic operation. If
|
||||
# the task was just created, or is missing basic information, it is
|
||||
# probably because the task is still being filled out -- wait until the
|
||||
# next round to process it.
|
||||
if (time_field == "created_at") and \
|
||||
(now - operation_time < timedelta(seconds=30)):
|
||||
# The task was just created, give the user some time to fill out
|
||||
# more information.
|
||||
return
|
||||
|
||||
if (time_field == "created_at") and (not task.name) and \
|
||||
(now - operation_time < timedelta(seconds=60)):
|
||||
# If this new task hasn't had a name for a full 30 seconds, assume
|
||||
# you don't plan on giving it one.
|
||||
return
|
||||
|
||||
topic, content = operation(task, projects, users)
|
||||
logging.info("Sending Zulip for " + topic)
|
||||
result = send_zulip(topic, content)
|
||||
|
||||
# If the Zulip wasn't sent successfully, don't update the
|
||||
# max timestamp processed so the task has another change to
|
||||
# be forwarded. Exit, giving temporary issues time to
|
||||
# resolve.
|
||||
if not result.get("result"):
|
||||
logging.warn("Malformed result, exiting:")
|
||||
logging.warn(result)
|
||||
return
|
||||
|
||||
if result["result"] != "success":
|
||||
logging.warn(result["msg"])
|
||||
return
|
||||
|
||||
if operation_time > max_timestamp_processed:
|
||||
max_timestamp_processed = operation_time
|
||||
|
||||
if max_timestamp_processed > cutoff:
|
||||
max_datestring = max_timestamp_processed.strftime("%s.%f")
|
||||
logging.info("Updating resume file: " + max_datestring)
|
||||
open(config.RESUME_FILE, 'w').write(max_datestring)
|
||||
|
||||
while True:
|
||||
try:
|
||||
process_new_events()
|
||||
time.sleep(5)
|
||||
except KeyboardInterrupt:
|
||||
logging.info("Shutting down...")
|
||||
logging.info("Set LOG_FILE to log to a file instead of stdout.")
|
||||
break
|
53
api/integrations/basecamp/zulip_basecamp_config.py
Normal file
53
api/integrations/basecamp/zulip_basecamp_config.py
Normal file
@@ -0,0 +1,53 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright © 2014 Zulip, Inc.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
|
||||
|
||||
# Change these values to configure authentication for basecamp account
|
||||
BASECAMP_ACCOUNT_ID = "12345678"
|
||||
BASECAMP_USERNAME = "foo@example.com"
|
||||
BASECAMP_PASSWORD = "p455w0rd"
|
||||
|
||||
# This script will mirror this many hours of history on the first run.
|
||||
# On subsequent runs this value is ignored.
|
||||
BASECAMP_INITIAL_HISTORY_HOURS = 0
|
||||
|
||||
# Change these values to configure Zulip authentication for the plugin
|
||||
ZULIP_USER = "basecamp-bot@example.com"
|
||||
ZULIP_API_KEY = "0123456789abcdef0123456789abcdef"
|
||||
ZULIP_STREAM_NAME = "basecamp"
|
||||
|
||||
## If properly installed, the Zulip API should be in your import
|
||||
## path, but if not, set a custom path below
|
||||
ZULIP_API_PATH = None
|
||||
|
||||
# If you're using Zulip Enterprise, set this to your Zulip Enterprise server
|
||||
ZULIP_SITE = "https://api.zulip.com"
|
||||
|
||||
# If you wish to log to a file rather than stdout/stderr,
|
||||
# please fill this out your desired path
|
||||
LOG_FILE = None
|
||||
|
||||
# This file is used to resume this mirror in case the script shuts down.
|
||||
# It is required and needs to be writeable.
|
||||
RESUME_FILE = "/var/tmp/zulip_basecamp.state"
|
180
api/integrations/basecamp/zulip_basecamp_mirror
Executable file
180
api/integrations/basecamp/zulip_basecamp_mirror
Executable file
@@ -0,0 +1,180 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Zulip mirror of Basecamp activity
|
||||
# Copyright © 2014 Zulip, Inc.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
#
|
||||
# The "basecamp-mirror.py" script is run continuously, possibly on a work computer
|
||||
# or preferably on a server.
|
||||
# You may need to install the python-requests library.
|
||||
|
||||
import requests
|
||||
import logging
|
||||
import time
|
||||
import re
|
||||
import sys
|
||||
import os
|
||||
from datetime import datetime, timedelta
|
||||
from HTMLParser import HTMLParser
|
||||
|
||||
sys.path.insert(0, os.path.dirname(__file__))
|
||||
import zulip_basecamp_config as config
|
||||
VERSION = "0.9"
|
||||
|
||||
if config.ZULIP_API_PATH is not None:
|
||||
sys.path.append(config.ZULIP_API_PATH)
|
||||
import zulip
|
||||
|
||||
|
||||
client = zulip.Client(
|
||||
email=config.ZULIP_USER,
|
||||
site=config.ZULIP_SITE,
|
||||
api_key=config.ZULIP_API_KEY,
|
||||
client="ZulipBasecamp/" + VERSION)
|
||||
user_agent = "Basecamp To Zulip Mirroring script (support@zulip.com)"
|
||||
htmlParser = HTMLParser()
|
||||
|
||||
# find some form of JSON loader/dumper, with a preference order for speed.
|
||||
json_implementations = ['ujson', 'cjson', 'simplejson', 'json']
|
||||
|
||||
while len(json_implementations):
|
||||
try:
|
||||
json = __import__(json_implementations.pop(0))
|
||||
break
|
||||
except ImportError:
|
||||
continue
|
||||
|
||||
# void function that checks the permissions of the files this script needs.
|
||||
def check_permissions():
|
||||
# check that the log file can be written
|
||||
if config.LOG_FILE:
|
||||
try:
|
||||
open(config.LOG_FILE, "w")
|
||||
except IOError as e:
|
||||
sys.stderr("Could not open up log for writing:")
|
||||
sys.stderr(e)
|
||||
# check that the resume file can be written (this creates if it doesn't exist)
|
||||
try:
|
||||
open(config.RESUME_FILE, "a+")
|
||||
except IOError as e:
|
||||
sys.stderr("Could not open up the file %s for reading and writing" % (config.RESUME_FILE,))
|
||||
sys.stderr(e)
|
||||
|
||||
# builds the message dict for sending a message with the Zulip API
|
||||
def build_message(event):
|
||||
if not (event.has_key('bucket') and event.has_key('creator') and event.has_key('html_url')):
|
||||
logging.error("Perhaps the Basecamp API changed behavior? "
|
||||
"This event doesn't have the expected format:\n%s" %(event,))
|
||||
return None
|
||||
# adjust the topic length to be bounded to 60 characters
|
||||
topic = event['bucket']['name']
|
||||
if len(topic) > 60:
|
||||
topic = topic[0:57] + "..."
|
||||
# get the action and target values
|
||||
action = htmlParser.unescape(re.sub(r"<[^<>]+>", "", event.get('action', '')))
|
||||
target = htmlParser.unescape(event.get('target', ''))
|
||||
# Some events have "excerpts", which we blockquote
|
||||
excerpt = htmlParser.unescape(event.get('excerpt',''))
|
||||
if excerpt.strip() == "":
|
||||
message = '**%s** %s [%s](%s).' % (event['creator']['name'], action, target, event['html_url'])
|
||||
else:
|
||||
message = '**%s** %s [%s](%s).\n> %s' % (event['creator']['name'], action, target, event['html_url'], excerpt)
|
||||
# assemble the message data dict
|
||||
message_data = {
|
||||
"type": "stream",
|
||||
"to": config.ZULIP_STREAM_NAME,
|
||||
"subject": topic,
|
||||
"content": message,
|
||||
}
|
||||
return message_data
|
||||
|
||||
# the main run loop for this mirror script
|
||||
def run_mirror():
|
||||
# we should have the right (write) permissions on the resume file, as seen
|
||||
# in check_permissions, but it may still be empty or corrupted
|
||||
try:
|
||||
with open(config.RESUME_FILE) as f:
|
||||
since = f.read()
|
||||
since = re.search(r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d{3}-\d{2}:\d{2}", since)
|
||||
assert since, "resume file does not meet expected format"
|
||||
since = since.string
|
||||
except (AssertionError,IOError) as e:
|
||||
logging.warn("Could not open resume file: %s" % (e.message or e.strerror,))
|
||||
since = (datetime.utcnow() - timedelta(hours=config.BASECAMP_INITIAL_HISTORY_HOURS)).isoformat() + "-00:00"
|
||||
try:
|
||||
# we use an exponential backoff approach when we get 429 (Too Many Requests).
|
||||
sleepInterval = 1
|
||||
while 1:
|
||||
time.sleep(sleepInterval)
|
||||
response = requests.get("https://basecamp.com/%s/api/v1/events.json" % (config.BASECAMP_ACCOUNT_ID),
|
||||
params={'since': since},
|
||||
auth=(config.BASECAMP_USERNAME, config.BASECAMP_PASSWORD),
|
||||
headers = {"User-Agent": user_agent})
|
||||
if response.status_code == 200:
|
||||
sleepInterval = 1
|
||||
events = json.loads(response.text)
|
||||
if len(events):
|
||||
logging.info("Got event(s): %s" % (response.text,))
|
||||
if response.status_code >= 500:
|
||||
logging.error(response.status_code)
|
||||
continue
|
||||
if response.status_code == 429:
|
||||
# exponential backoff
|
||||
sleepInterval *= 2
|
||||
logging.error(response.status_code)
|
||||
continue
|
||||
if response.status_code == 400:
|
||||
logging.error("Something went wrong. Basecamp must be unhappy for this reason: %s" % (response.text,))
|
||||
sys.exit(-1)
|
||||
if response.status_code == 401:
|
||||
logging.error("Bad authorization from Basecamp. Please check your Basecamp login credentials")
|
||||
sys.exit(-1)
|
||||
if len(events):
|
||||
since = events[0]['created_at']
|
||||
for event in reversed(events):
|
||||
message_data = build_message(event)
|
||||
if not message_data:
|
||||
continue
|
||||
zulip_api_result = client.send_message(message_data)
|
||||
if zulip_api_result['result'] == "success":
|
||||
logging.info("sent zulip with id: %s" % (zulip_api_result['id'],))
|
||||
else:
|
||||
logging.warn("%s %s" % (zulip_api_result['result'], zulip_api_result['msg']))
|
||||
# update 'since' each time in case we get KeyboardInterrupted
|
||||
since = event['created_at']
|
||||
# avoid hitting rate-limit
|
||||
time.sleep(0.2)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
logging.info("Shutting down, please hold")
|
||||
open("events.last", 'w').write(since)
|
||||
logging.info("Done!")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if not isinstance(config.RESUME_FILE, basestring):
|
||||
sys.stderr("RESUME_FILE path not given; refusing to continue")
|
||||
check_permissions()
|
||||
if config.LOG_FILE:
|
||||
logging.basicConfig(filename=config.LOG_FILE, level=logging.INFO)
|
||||
else:
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
run_mirror()
|
62
api/integrations/codebase/zulip_codebase_config.py
Normal file
62
api/integrations/codebase/zulip_codebase_config.py
Normal file
@@ -0,0 +1,62 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright © 2014 Zulip, Inc.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
|
||||
|
||||
# Change these values to configure authentication for your codebase account
|
||||
# Note that this is the Codebase API Username, found in the Settings page
|
||||
# for your account
|
||||
CODEBASE_API_USERNAME = "foo@example.com"
|
||||
CODEBASE_API_KEY = "1234561234567abcdef"
|
||||
|
||||
# The URL of your codebase setup
|
||||
CODEBASE_ROOT_URL = "https://YOUR_COMPANY.codebasehq.com"
|
||||
|
||||
# When initially started, how many hours of messages to include.
|
||||
# Note that the Codebase API only returns the 20 latest events,
|
||||
# if you have more than 20 events that fit within this window,
|
||||
# earlier ones may be lost
|
||||
CODEBASE_INITIAL_HISTORY_HOURS = 12
|
||||
|
||||
# Change these values to configure Zulip authentication for the plugin
|
||||
ZULIP_USER = "codebase-bot@example.com"
|
||||
ZULIP_API_KEY = "0123456789abcdef0123456789abcdef"
|
||||
|
||||
# The streams to send commit information and ticket information to
|
||||
ZULIP_COMMITS_STREAM_NAME = "codebase"
|
||||
ZULIP_TICKETS_STREAM_NAME = "tickets"
|
||||
|
||||
# If properly installed, the Zulip API should be in your import
|
||||
# path, but if not, set a custom path below
|
||||
ZULIP_API_PATH = None
|
||||
|
||||
# If you're using Zulip Enterprise, set this to your Zulip Enterprise server
|
||||
ZULIP_SITE = "https://api.zulip.com"
|
||||
|
||||
# If you wish to log to a file rather than stdout/stderr,
|
||||
# please fill this out your desired path
|
||||
LOG_FILE = None
|
||||
|
||||
# This file is used to resume this mirror in case the script shuts down.
|
||||
# It is required and needs to be writeable.
|
||||
RESUME_FILE = "/var/tmp/zulip_codebase.state"
|
318
api/integrations/codebase/zulip_codebase_mirror
Executable file
318
api/integrations/codebase/zulip_codebase_mirror
Executable file
@@ -0,0 +1,318 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Zulip mirror of Codebase HQ activity
|
||||
# Copyright © 2014 Zulip, Inc.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
#
|
||||
# The "codebase-mirror.py" script is run continuously, possibly on a work computer
|
||||
# or preferably on a server.
|
||||
#
|
||||
# When restarted, it will attempt to pick up where it left off.
|
||||
#
|
||||
# You may need to install the python-requests library, as well as python-dateutil
|
||||
|
||||
import requests
|
||||
import logging
|
||||
import time
|
||||
import sys
|
||||
import os
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
import dateutil.parser
|
||||
|
||||
sys.path.insert(0, os.path.dirname(__file__))
|
||||
import zulip_codebase_config as config
|
||||
VERSION = "0.9"
|
||||
|
||||
if config.ZULIP_API_PATH is not None:
|
||||
sys.path.append(config.ZULIP_API_PATH)
|
||||
import zulip
|
||||
|
||||
client = zulip.Client(
|
||||
email=config.ZULIP_USER,
|
||||
site=config.ZULIP_SITE,
|
||||
api_key=config.ZULIP_API_KEY,
|
||||
client="ZulipCodebase/" + VERSION)
|
||||
user_agent = "Codebase To Zulip Mirroring script (support@zulip.com)"
|
||||
|
||||
# find some form of JSON loader/dumper, with a preference order for speed.
|
||||
json_implementations = ['ujson', 'cjson', 'simplejson', 'json']
|
||||
|
||||
while len(json_implementations):
|
||||
try:
|
||||
json = __import__(json_implementations.pop(0))
|
||||
break
|
||||
except ImportError:
|
||||
continue
|
||||
|
||||
def make_api_call(path):
|
||||
response = requests.get("https://api3.codebasehq.com/%s" % (path,),
|
||||
auth=(config.CODEBASE_API_USERNAME, config.CODEBASE_API_KEY),
|
||||
params={'raw': True},
|
||||
headers = {"User-Agent": user_agent,
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "application/json"})
|
||||
if response.status_code == 200:
|
||||
return json.loads(response.text)
|
||||
|
||||
if response.status_code >= 500:
|
||||
logging.error(response.status_code)
|
||||
return None
|
||||
if response.status_code == 403:
|
||||
logging.error("Bad authorization from Codebase. Please check your credentials")
|
||||
sys.exit(-1)
|
||||
else:
|
||||
logging.warn("Found non-success response status code: %s %s" % (response.status_code, response.text))
|
||||
return None
|
||||
|
||||
def make_url(path):
|
||||
return "%s/%s" % (config.CODEBASE_ROOT_URL, path)
|
||||
|
||||
def handle_event(event):
|
||||
event = event['event']
|
||||
event_type = event['type']
|
||||
actor_name = event['actor_name']
|
||||
|
||||
raw_props = event.get('raw_properties', {})
|
||||
|
||||
project_link = raw_props.get('project_permalink')
|
||||
|
||||
subject = None
|
||||
content = None
|
||||
if event_type == 'repository_creation':
|
||||
stream = config.ZULIP_COMMITS_STREAM_NAME
|
||||
|
||||
project_name = raw_props.get('name')
|
||||
project_repo_type = raw_props.get('scm_type')
|
||||
|
||||
url = make_url("projects/%s" % project_link)
|
||||
scm = "of type %s" % (project_repo_type,) if project_repo_type else ""
|
||||
|
||||
|
||||
subject = "Repository %s Created" % (project_name,)
|
||||
content = "%s created a new repository %s [%s](%s)" % (actor_name, scm, project_name, url)
|
||||
elif event_type == 'push':
|
||||
stream = config.ZULIP_COMMITS_STREAM_NAME
|
||||
|
||||
num_commits = raw_props.get('commits_count')
|
||||
branch = raw_props.get('ref_name')
|
||||
project = raw_props.get('project_name')
|
||||
repo_link = raw_props.get('repository_permalink')
|
||||
deleted_ref = raw_props.get('deleted_ref')
|
||||
new_ref = raw_props.get('new_ref')
|
||||
|
||||
subject = "Push to %s on %s" % (branch, project)
|
||||
|
||||
if deleted_ref:
|
||||
content = "%s deleted branch %s from %s" % (actor_name, branch, project)
|
||||
else:
|
||||
if new_ref:
|
||||
branch = "new branch %s" % (branch, )
|
||||
content = "%s pushed %s commit(s) to %s in project %s:\n\n" % \
|
||||
(actor_name, num_commits, branch, project)
|
||||
for commit in raw_props.get('commits'):
|
||||
ref = commit.get('ref')
|
||||
url = make_url("projects/%s/repositories/%s/commit/%s" % (project_link, repo_link, ref))
|
||||
message = commit.get('message')
|
||||
content += "* [%s](%s): %s\n" % (ref, url, message)
|
||||
elif event_type == 'ticketing_ticket':
|
||||
stream = config.ZULIP_TICKETS_STREAM_NAME
|
||||
|
||||
num = raw_props.get('number')
|
||||
name = raw_props.get('subject')
|
||||
assignee = raw_props.get('assignee')
|
||||
priority = raw_props.get('priority')
|
||||
url = make_url("projects/%s/tickets/%s" % (project_link, num))
|
||||
|
||||
if assignee is None:
|
||||
assignee = "no one"
|
||||
subject = "#%s: %s" % (num, name)
|
||||
content = """%s created a new ticket [#%s](%s) priority **%s** assigned to %s:\n\n~~~ quote\n %s""" % \
|
||||
(actor_name, num, url, priority, assignee, name)
|
||||
elif event_type == 'ticketing_note':
|
||||
stream = config.ZULIP_TICKETS_STREAM_NAME
|
||||
|
||||
num = raw_props.get('number')
|
||||
name = raw_props.get('subject')
|
||||
body = raw_props.get('content')
|
||||
changes = raw_props.get('changes')
|
||||
|
||||
|
||||
url = make_url("projects/%s/tickets/%s" % (project_link, num))
|
||||
subject = "#%s: %s" % (num, name)
|
||||
|
||||
content = ""
|
||||
if body is not None and len(body) > 0:
|
||||
content = "%s added a comment to ticket [#%s](%s):\n\n~~~ quote\n%s\n\n" % (actor_name, num, url, body)
|
||||
|
||||
if 'status_id' in changes:
|
||||
status_change = changes.get('status_id')
|
||||
content += "Status changed from **%s** to **%s**\n\n" % (status_change[0], status_change[1])
|
||||
elif event_type == 'ticketing_milestone':
|
||||
stream = config.ZULIP_TICKETS_STREAM_NAME
|
||||
|
||||
name = raw_props.get('name')
|
||||
identifier = raw_props.get('identifier')
|
||||
url = make_url("projects/%s/milestone/%s" % (project_link, identifier))
|
||||
|
||||
subject = name
|
||||
content = "%s created a new milestone [%s](%s)" % (actor_name, name, url)
|
||||
elif event_type == 'comment':
|
||||
stream = config.ZULIP_COMMITS_STREAM_NAME
|
||||
|
||||
comment = raw_props.get('content')
|
||||
commit = raw_props.get('commit_ref')
|
||||
|
||||
# If there's a commit id, it's a comment to a commit
|
||||
if commit:
|
||||
repo_link = raw_props.get('repository_permalink')
|
||||
|
||||
url = make_url('projects/%s/repositories/%s/commit/%s' % (project_link, repo_link, commit))
|
||||
|
||||
subject = "%s commented on %s" % (actor_name, commit)
|
||||
content = "%s commented on [%s](%s):\n\n~~~ quote\n%s" % (actor_name, commit, url, comment)
|
||||
else:
|
||||
# Otherwise, this is a Discussion item, and handle it
|
||||
subj = raw_props.get("subject")
|
||||
category = raw_props.get("category")
|
||||
comment_content = raw_props.get("content")
|
||||
|
||||
subject = "Discussion: %s" % (subj,)
|
||||
|
||||
if category:
|
||||
content = "%s started a new discussion in %s:\n\n~~~ quote\n%s\n~~~" % (actor_name, category, comment_content)
|
||||
else:
|
||||
content = "%s posted:\n\n~~~ quote\n%s\n~~~" % (actor_name, comment_content)
|
||||
|
||||
elif event_type == 'deployment':
|
||||
stream = config.ZULIP_COMMITS_STREAM_NAME
|
||||
|
||||
start_ref = raw_props.get('start_ref')
|
||||
end_ref = raw_props.get('end_ref')
|
||||
environment = raw_props.get('environment')
|
||||
servers = raw_props.get('servers')
|
||||
repo_link = raw_props.get('repository_permalink')
|
||||
|
||||
start_ref_url = make_url("projects/%s/repositories/%s/commit/%s" % (project_link, repo_link, start_ref))
|
||||
end_ref_url = make_url("projects/%s/repositories/%s/commit/%s" % (project_link, repo_link, end_ref))
|
||||
between_url = make_url("projects/%s/repositories/%s/compare/%s...%s" % (project_link, repo_link, start_ref, end_ref))
|
||||
|
||||
subject = "Deployment to %s" % (environment,)
|
||||
|
||||
content = "%s deployed [%s](%s) [through](%s) [%s](%s) to the **%s** environment." % \
|
||||
(actor_name, start_ref, start_ref_url, between_url, end_ref, end_ref_url, environment)
|
||||
if servers is not None:
|
||||
content += "\n\nServers deployed to: %s" % (", ".join(["`%s`" % (server,) for server in servers]))
|
||||
|
||||
elif event_type == 'named_tree':
|
||||
# Docs say named_tree type used for new/deleting branches and tags,
|
||||
# but experimental testing showed that they were all sent as 'push' events
|
||||
pass
|
||||
elif event_type == 'wiki_page':
|
||||
logging.warn("Wiki page notifications not yet implemented")
|
||||
elif event_type == 'sprint_creation':
|
||||
logging.warn("Sprint notifications not yet implemented")
|
||||
elif event_type == 'sprint_ended':
|
||||
logging.warn("Sprint notifications not yet implemented")
|
||||
else:
|
||||
logging.info("Unknown event type %s, ignoring!" % (event_type,))
|
||||
|
||||
if subject and content:
|
||||
if len(subject) > 60:
|
||||
subject = subject[:57].rstrip() + '...'
|
||||
|
||||
res = client.send_message({"type": "stream",
|
||||
"to": stream,
|
||||
"subject": subject,
|
||||
"content": content})
|
||||
if res['result'] == 'success':
|
||||
logging.info("Successfully sent Zulip with id: %s" % (res['id']))
|
||||
else:
|
||||
logging.warn("Failed to send Zulip: %s %s" % (res['result'], res['msg']))
|
||||
|
||||
|
||||
# the main run loop for this mirror script
|
||||
def run_mirror():
|
||||
# we should have the right (write) permissions on the resume file, as seen
|
||||
# in check_permissions, but it may still be empty or corrupted
|
||||
def default_since():
|
||||
return datetime.utcnow() - timedelta(hours=config.CODEBASE_INITIAL_HISTORY_HOURS)
|
||||
|
||||
try:
|
||||
with open(config.RESUME_FILE) as f:
|
||||
timestamp = f.read()
|
||||
if timestamp == '':
|
||||
since = default_since()
|
||||
else:
|
||||
timestamp = int(timestamp, 10)
|
||||
since = datetime.fromtimestamp(timestamp)
|
||||
except (ValueError,IOError) as e:
|
||||
logging.warn("Could not open resume file: %s" % (e.message or e.strerror,))
|
||||
since = default_since()
|
||||
|
||||
try:
|
||||
sleepInterval = 1
|
||||
while 1:
|
||||
events = make_api_call("activity")[::-1]
|
||||
if events is not None:
|
||||
sleepInterval = 1
|
||||
for event in events:
|
||||
timestamp = event.get('event', {}).get('timestamp', '')
|
||||
event_date = dateutil.parser.parse(timestamp).replace(tzinfo=None)
|
||||
if event_date > since:
|
||||
handle_event(event)
|
||||
since = event_date
|
||||
else:
|
||||
# back off a bit
|
||||
if sleepInterval < 22:
|
||||
sleepInterval += 4
|
||||
time.sleep(sleepInterval)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
open(config.RESUME_FILE, 'w').write(since.strftime("%s"));
|
||||
logging.info("Shutting down Codebase mirror")
|
||||
|
||||
# void function that checks the permissions of the files this script needs.
|
||||
def check_permissions():
|
||||
# check that the log file can be written
|
||||
if config.LOG_FILE:
|
||||
try:
|
||||
open(config.LOG_FILE, "w")
|
||||
except IOError as e:
|
||||
sys.stderr("Could not open up log for writing:")
|
||||
sys.stderr(e)
|
||||
# check that the resume file can be written (this creates if it doesn't exist)
|
||||
try:
|
||||
open(config.RESUME_FILE, "a+")
|
||||
except IOError as e:
|
||||
sys.stderr("Could not open up the file %s for reading and writing" % (config.RESUME_FILE,))
|
||||
sys.stderr(e)
|
||||
|
||||
if __name__ == "__main__":
|
||||
if not isinstance(config.RESUME_FILE, basestring):
|
||||
sys.stderr("RESUME_FILE path not given; refusing to continue")
|
||||
check_permissions()
|
||||
if config.LOG_FILE:
|
||||
logging.basicConfig(filename=config.LOG_FILE, level=logging.WARNING)
|
||||
else:
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
run_mirror()
|
130
api/integrations/git/post-receive
Executable file
130
api/integrations/git/post-receive
Executable file
@@ -0,0 +1,130 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Zulip notification post-receive hook.
|
||||
# Copyright © 2012-2014 Zulip, Inc.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
#
|
||||
# The "post-receive" script is run after receive-pack has accepted a pack
|
||||
# and the repository has been updated. It is passed arguments in through
|
||||
# stdin in the form
|
||||
# <oldrev> <newrev> <refname>
|
||||
# For example:
|
||||
# aa453216d1b3e49e7f6f98441fa56946ddcd6a20 68f7abf4e6f922807889f52bc043ecd31b79f814 refs/heads/master
|
||||
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
import os.path
|
||||
|
||||
sys.path.insert(0, os.path.dirname(__file__))
|
||||
import zulip_git_config as config
|
||||
VERSION = "0.9"
|
||||
|
||||
if config.ZULIP_API_PATH is not None:
|
||||
sys.path.append(config.ZULIP_API_PATH)
|
||||
|
||||
import zulip
|
||||
client = zulip.Client(
|
||||
email=config.ZULIP_USER,
|
||||
site=config.ZULIP_SITE,
|
||||
api_key=config.ZULIP_API_KEY,
|
||||
client="ZulipGit/" + VERSION)
|
||||
|
||||
# check_output is backported from subprocess.py in Python 2.7
|
||||
def check_output(*popenargs, **kwargs):
|
||||
if 'stdout' in kwargs:
|
||||
raise ValueError('stdout argument not allowed, it will be overridden.')
|
||||
process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs)
|
||||
output, unused_err = process.communicate()
|
||||
retcode = process.poll()
|
||||
if retcode:
|
||||
cmd = kwargs.get("args")
|
||||
if cmd is None:
|
||||
cmd = popenargs[0]
|
||||
raise subprocess.CalledProcessError(retcode, cmd, output=output)
|
||||
return output
|
||||
subprocess.check_output = check_output
|
||||
|
||||
def git_repository_name():
|
||||
output = subprocess.check_output(["git", "rev-parse", "--is-bare-repository"])
|
||||
if output.strip() == "true":
|
||||
return os.path.basename(os.getcwd())[:-len(".git")]
|
||||
else:
|
||||
return os.path.basename(os.path.dirname(os.getcwd()))
|
||||
|
||||
def git_commit_range(oldrev, newrev):
|
||||
log_cmd = ["git", "log", "--reverse",
|
||||
"--pretty=%aE %H %s", "%s..%s" % (oldrev, newrev)]
|
||||
commits = ''
|
||||
for ln in subprocess.check_output(log_cmd).splitlines():
|
||||
author_email, commit_id, subject = ln.split(None, 2)
|
||||
if hasattr(config, "format_commit_message"):
|
||||
commits += config.format_commit_message(author_email, subject, commit_id)
|
||||
else:
|
||||
commits += '!avatar(%s) %s\n' % (author_email, subject)
|
||||
return commits
|
||||
|
||||
def send_bot_message(oldrev, newrev, refname):
|
||||
repo_name = git_repository_name()
|
||||
branch = refname.replace('refs/heads/', '')
|
||||
destination = config.commit_notice_destination(repo_name, branch, newrev)
|
||||
if destination is None:
|
||||
# Don't forward the notice anywhere
|
||||
return
|
||||
|
||||
new_head = newrev[:12]
|
||||
old_head = oldrev[:12]
|
||||
|
||||
if (oldrev == '0000000000000000000000000000000000000000' or
|
||||
newrev == '0000000000000000000000000000000000000000'):
|
||||
# New branch pushed or old branch removed
|
||||
added = ''
|
||||
removed = ''
|
||||
else:
|
||||
added = git_commit_range(oldrev, newrev)
|
||||
removed = git_commit_range(newrev, oldrev)
|
||||
|
||||
if oldrev == '0000000000000000000000000000000000000000':
|
||||
message = '`%s` was pushed to new branch `%s`' % (new_head, branch)
|
||||
elif newrev == '0000000000000000000000000000000000000000':
|
||||
message = 'branch `%s` was removed (was `%s`)' % (branch, old_head)
|
||||
elif removed:
|
||||
message = '`%s` was pushed to `%s`, **REMOVING**:\n\n%s' % (new_head, branch, removed)
|
||||
if added:
|
||||
message += '\n**and adding**:\n\n' + added
|
||||
message += '\n**A HISTORY REWRITE HAS OCCURRED!**'
|
||||
message += '\n@everyone: Please check your local branches to deal with this.'
|
||||
elif added:
|
||||
message = '`%s` was deployed to `%s` with:\n\n%s' % (new_head, branch, added)
|
||||
else:
|
||||
message = '`%s` was pushed to `%s`... but nothing changed?' % (new_head, branch)
|
||||
|
||||
message_data = {
|
||||
"type": "stream",
|
||||
"to": destination["stream"],
|
||||
"subject": destination["subject"],
|
||||
"content": message,
|
||||
}
|
||||
client.send_message(message_data)
|
||||
|
||||
for ln in sys.stdin:
|
||||
oldrev, newrev, refname = ln.strip().split()
|
||||
send_bot_message(oldrev, newrev, refname)
|
65
api/integrations/git/zulip_git_config.py
Normal file
65
api/integrations/git/zulip_git_config.py
Normal file
@@ -0,0 +1,65 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright © 2014 Zulip, Inc.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
|
||||
|
||||
# Change these values to configure authentication for the plugin
|
||||
ZULIP_USER = "git-bot@example.com"
|
||||
ZULIP_API_KEY = "0123456789abcdef0123456789abcdef"
|
||||
|
||||
# commit_notice_destination() lets you customize where commit notices
|
||||
# are sent to with the full power of a Python function.
|
||||
#
|
||||
# It takes the following arguments:
|
||||
# * repo = the name of the git repository
|
||||
# * branch = the name of the branch that was pushed to
|
||||
# * commit = the commit id
|
||||
#
|
||||
# Returns a dictionary encoding the stream and subject to send the
|
||||
# notification to (or None to send no notification).
|
||||
#
|
||||
# The default code below will send every commit pushed to "master" to
|
||||
# * stream "commits"
|
||||
# * topic "master"
|
||||
# And similarly for branch "test-post-receive" (for use when testing).
|
||||
def commit_notice_destination(repo, branch, commit):
|
||||
if branch in ["master", "test-post-receive"]:
|
||||
return dict(stream = "commits",
|
||||
subject = u"%s" % (branch,))
|
||||
|
||||
# Return None for cases where you don't want a notice sent
|
||||
return None
|
||||
|
||||
# Modify this function to change how commits are displayed; the most
|
||||
# common customization is to include a link to the commit in your
|
||||
# graphical repository viewer, e.g.
|
||||
#
|
||||
# return '!avatar(%s) [%s](https://example.com/commits/%s)\n' % (author, subject, commit_id)
|
||||
def format_commit_message(author, subject, commit_id):
|
||||
return '!avatar(%s) %s\n' % (author, subject)
|
||||
|
||||
## If properly installed, the Zulip API should be in your import
|
||||
## path, but if not, set a custom path below
|
||||
ZULIP_API_PATH = None
|
||||
|
||||
# If you're using Zulip Enterprise, set this to your Zulip Enterprise server
|
||||
ZULIP_SITE = "https://api.zulip.com"
|
170
api/integrations/hg/zulip-changegroup.py
Executable file
170
api/integrations/hg/zulip-changegroup.py
Executable file
@@ -0,0 +1,170 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Zulip hook for Mercurial changeset pushes.
|
||||
# Copyright © 2012-2014 Zulip, Inc.
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
#
|
||||
#
|
||||
# This hook is called when changesets are pushed to the master repository (ie
|
||||
# `hg push`). See https://zulip.com/integrations for installation instructions.
|
||||
|
||||
import zulip
|
||||
|
||||
VERSION = "0.9"
|
||||
|
||||
def format_summary_line(web_url, user, base, tip, branch, node):
|
||||
"""
|
||||
Format the first line of the message, which contains summary
|
||||
information about the changeset and links to the changelog if a
|
||||
web URL has been configured:
|
||||
|
||||
Jane Doe <jane@example.com> pushed 1 commit to master (170:e494a5be3393):
|
||||
"""
|
||||
revcount = tip - base
|
||||
plural = "s" if revcount > 1 else ""
|
||||
|
||||
if web_url:
|
||||
shortlog_base_url = web_url.rstrip("/") + "/shortlog/"
|
||||
summary_url = "{shortlog}{tip}?revcount={revcount}".format(
|
||||
shortlog=shortlog_base_url, tip=tip - 1, revcount=revcount)
|
||||
formatted_commit_count = "[{revcount} commit{s}]({url})".format(
|
||||
revcount=revcount, s=plural, url=summary_url)
|
||||
else:
|
||||
formatted_commit_count = "{revcount} commit{s}".format(
|
||||
revcount=revcount, s=plural)
|
||||
|
||||
return u"**{user}** pushed {commits} to **{branch}** (`{tip}:{node}`):\n\n".format(
|
||||
user=user, commits=formatted_commit_count, branch=branch, tip=tip,
|
||||
node=node[:12])
|
||||
|
||||
def format_commit_lines(web_url, repo, base, tip):
|
||||
"""
|
||||
Format the per-commit information for the message, including the one-line
|
||||
commit summary and a link to the diff if a web URL has been configured:
|
||||
"""
|
||||
if web_url:
|
||||
rev_base_url = web_url.rstrip("/") + "/rev/"
|
||||
|
||||
commit_summaries = []
|
||||
for rev in range(base, tip):
|
||||
rev_node = repo.changelog.node(rev)
|
||||
rev_ctx = repo.changectx(rev_node)
|
||||
one_liner = rev_ctx.description().split("\n")[0]
|
||||
|
||||
if web_url:
|
||||
summary_url = rev_base_url + str(rev_ctx)
|
||||
summary = "* [{summary}]({url})".format(
|
||||
summary=one_liner, url=summary_url)
|
||||
else:
|
||||
summary = "* {summary}".format(summary=one_liner)
|
||||
|
||||
commit_summaries.append(summary)
|
||||
|
||||
return "\n".join(summary for summary in commit_summaries)
|
||||
|
||||
def send_zulip(email, api_key, site, stream, subject, content):
|
||||
"""
|
||||
Send a message to Zulip using the provided credentials, which should be for
|
||||
a bot in most cases.
|
||||
"""
|
||||
client = zulip.Client(email=email, api_key=api_key,
|
||||
site=site,
|
||||
client="ZulipMercurial/" + VERSION)
|
||||
|
||||
message_data = {
|
||||
"type": "stream",
|
||||
"to": stream,
|
||||
"subject": subject,
|
||||
"content": content,
|
||||
}
|
||||
|
||||
client.send_message(message_data)
|
||||
|
||||
def get_config(ui, item):
|
||||
try:
|
||||
# configlist returns everything in lists.
|
||||
return ui.configlist('zulip', item)[0]
|
||||
except IndexError:
|
||||
return None
|
||||
|
||||
def hook(ui, repo, **kwargs):
|
||||
"""
|
||||
Invoked by configuring a [hook] entry in .hg/hgrc.
|
||||
"""
|
||||
hooktype = kwargs["hooktype"]
|
||||
node = kwargs["node"]
|
||||
|
||||
ui.debug("Zulip: received {hooktype} event\n".format(hooktype=hooktype))
|
||||
|
||||
if hooktype != "changegroup":
|
||||
ui.warn("Zulip: {hooktype} not supported\n".format(hooktype=hooktype))
|
||||
exit(1)
|
||||
|
||||
ctx = repo.changectx(node)
|
||||
branch = ctx.branch()
|
||||
|
||||
# If `branches` isn't specified, notify on all branches.
|
||||
branch_whitelist = get_config(ui, "branches")
|
||||
branch_blacklist = get_config(ui, "ignore_branches")
|
||||
|
||||
if branch_whitelist:
|
||||
# Only send notifications on branches we are watching.
|
||||
watched_branches = [b.lower().strip() for b in branch_whitelist.split(",")]
|
||||
if branch.lower() not in watched_branches:
|
||||
ui.debug("Zulip: ignoring event for {branch}\n".format(branch=branch))
|
||||
exit(0)
|
||||
|
||||
if branch_blacklist:
|
||||
# Don't send notifications for branches we've ignored.
|
||||
ignored_branches = [b.lower().strip() for b in branch_blacklist.split(",")]
|
||||
if branch.lower() in ignored_branches:
|
||||
ui.debug("Zulip: ignoring event for {branch}\n".format(branch=branch))
|
||||
exit(0)
|
||||
|
||||
# The first and final commits in the changeset.
|
||||
base = repo[node].rev()
|
||||
tip = len(repo)
|
||||
|
||||
email = get_config(ui, "email")
|
||||
api_key = get_config(ui, "api_key")
|
||||
site = get_config(ui, "site")
|
||||
|
||||
if not (email and api_key):
|
||||
ui.warn("Zulip: missing email or api_key configurations\n")
|
||||
ui.warn("in the [zulip] section of your .hg/hgrc.\n")
|
||||
exit(1)
|
||||
|
||||
stream = get_config(ui, "stream")
|
||||
# Give a default stream if one isn't provided.
|
||||
if not stream:
|
||||
stream = "commits"
|
||||
|
||||
web_url = get_config(ui, "web_url")
|
||||
user = ctx.user()
|
||||
content = format_summary_line(web_url, user, base, tip, branch, node)
|
||||
content += format_commit_lines(web_url, repo, base, tip)
|
||||
|
||||
subject = branch
|
||||
|
||||
ui.debug("Sending to Zulip:\n")
|
||||
ui.debug(content + "\n")
|
||||
|
||||
send_zulip(email, api_key, site, stream, subject, content)
|
149
api/integrations/jira/org/humbug/jira/ZulipListener.groovy
Normal file
149
api/integrations/jira/org/humbug/jira/ZulipListener.groovy
Normal file
@@ -0,0 +1,149 @@
|
||||
/*
|
||||
* Copyright (c) 2014 Zulip, Inc
|
||||
*/
|
||||
|
||||
package org.zulip.jira
|
||||
|
||||
import static com.atlassian.jira.event.type.EventType.*
|
||||
|
||||
import com.atlassian.jira.event.issue.AbstractIssueEventListener
|
||||
import com.atlassian.jira.event.issue.IssueEvent
|
||||
|
||||
import java.util.logging.Level
|
||||
import java.util.logging.Logger
|
||||
|
||||
import org.apache.commons.httpclient.HttpClient
|
||||
import org.apache.commons.httpclient.HttpStatus;
|
||||
import org.apache.commons.httpclient.methods.PostMethod
|
||||
import org.apache.commons.httpclient.NameValuePair
|
||||
|
||||
class ZulipListener extends AbstractIssueEventListener {
|
||||
Logger LOGGER = Logger.getLogger(ZulipListener.class.getName());
|
||||
|
||||
// The email address of one of the bots you created on your Zulip settings page.
|
||||
String zulipEmail = ""
|
||||
// That bot's API key.
|
||||
String zulipAPIKey = ""
|
||||
|
||||
// What stream to send messages to. Must already exist.
|
||||
String zulipStream = "jira"
|
||||
|
||||
// The base JIRA url for browsing
|
||||
String issueBaseUrl = "https://jira.COMPANY.com/browse/"
|
||||
|
||||
// Your zulip domain, only change if you have a custom one
|
||||
String base_url = "https://api.zulip.com"
|
||||
|
||||
@Override
|
||||
void workflowEvent(IssueEvent event) {
|
||||
processIssueEvent(event)
|
||||
}
|
||||
|
||||
String processIssueEvent(IssueEvent event) {
|
||||
String author = event.user.displayName
|
||||
String issueId = event.issue.key
|
||||
String issueUrl = issueBaseUrl + issueId
|
||||
String issueUrlMd = String.format("[%s](%s)", issueId, issueBaseUrl + issueId)
|
||||
String title = event.issue.summary
|
||||
String subject = truncate(String.format("%s: %s", issueId, title), 60)
|
||||
String assignee = "no one"
|
||||
if (event.issue.assignee) {
|
||||
assignee = event.issue.assignee.name
|
||||
}
|
||||
String comment = "";
|
||||
if (event.comment) {
|
||||
comment = event.comment.body
|
||||
}
|
||||
|
||||
String content;
|
||||
|
||||
// Event types:
|
||||
// https://docs.atlassian.com/jira/5.0/com/atlassian/jira/event/type/EventType.html
|
||||
// Issue API:
|
||||
// https://docs.atlassian.com/jira/5.0/com/atlassian/jira/issue/Issue.html
|
||||
switch (event.getEventTypeId()) {
|
||||
case ISSUE_COMMENTED_ID:
|
||||
content = String.format("%s **updated** %s with comment:\n\n> %s",
|
||||
author, issueUrlMd, comment)
|
||||
break
|
||||
case ISSUE_CREATED_ID:
|
||||
content = String.format("%s **created** %s priority %s, assigned to **%s**: \n\n> %s",
|
||||
author, issueUrlMd, event.issue.priorityObject.name,
|
||||
assignee, title)
|
||||
break
|
||||
case ISSUE_ASSIGNED_ID:
|
||||
content = String.format("%s **reassigned** %s to **%s**",
|
||||
author, issueUrlMd, assignee)
|
||||
break
|
||||
case ISSUE_DELETED_ID:
|
||||
content = String.format("%s **deleted** %s!",
|
||||
author, issueUrlMd)
|
||||
break
|
||||
case ISSUE_RESOLVED_ID:
|
||||
content = String.format("%s **resolved** %s as %s:\n\n> %s",
|
||||
author, issueUrlMd, event.issue.resolutionObject.name,
|
||||
comment)
|
||||
break
|
||||
case ISSUE_CLOSED_ID:
|
||||
content = String.format("%s **closed** %s with resolution %s:\n\n> %s",
|
||||
author, issueUrlMd, event.issue.resolutionObject.name,
|
||||
comment)
|
||||
break
|
||||
case ISSUE_REOPENED_ID:
|
||||
content = String.format("%s **reopened** %s:\n\n> %s",
|
||||
author, issueUrlMd, comment)
|
||||
break
|
||||
default:
|
||||
return
|
||||
}
|
||||
|
||||
sendStreamMessage(zulipStream, subject, content)
|
||||
}
|
||||
|
||||
String post(String method, NameValuePair[] parameters) {
|
||||
PostMethod post = new PostMethod(zulipUrl(method))
|
||||
post.setRequestHeader("Content-Type", post.FORM_URL_ENCODED_CONTENT_TYPE)
|
||||
// TODO: Include more useful data in the User-agent
|
||||
post.setRequestHeader("User-agent", "ZulipJira/0.1")
|
||||
try {
|
||||
post.setRequestBody(parameters)
|
||||
HttpClient client = new HttpClient()
|
||||
client.executeMethod(post)
|
||||
String response = post.getResponseBodyAsString()
|
||||
if (post.getStatusCode() != HttpStatus.SC_OK) {
|
||||
String params = ""
|
||||
for (NameValuePair pair: parameters) {
|
||||
params += "\n" + pair.getName() + ":" + pair.getValue()
|
||||
}
|
||||
LOGGER.log(Level.SEVERE, "Error sending Zulip message:\n" + response + "\n\n" +
|
||||
"We sent:" + params)
|
||||
}
|
||||
return response;
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e)
|
||||
} finally {
|
||||
post.releaseConnection()
|
||||
}
|
||||
}
|
||||
|
||||
String truncate(String string, int length) {
|
||||
if (string.length() < length) {
|
||||
return string
|
||||
}
|
||||
return string.substring(0, length - 3) + "..."
|
||||
}
|
||||
|
||||
String sendStreamMessage(String stream, String subject, String message) {
|
||||
NameValuePair[] body = [new NameValuePair("api-key", zulipAPIKey),
|
||||
new NameValuePair("email", zulipEmail),
|
||||
new NameValuePair("type", "stream"),
|
||||
new NameValuePair("to", stream),
|
||||
new NameValuePair("subject", subject),
|
||||
new NameValuePair("content", message)]
|
||||
return post("send_message", body);
|
||||
}
|
||||
|
||||
String zulipUrl(method) {
|
||||
return base_url + "/v1/" + method
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user