requirements: Migrate to uv.

https://docs.astral.sh/uv/

Signed-off-by: Anders Kaseorg <anders@zulip.com>
This commit is contained in:
Anders Kaseorg
2025-02-24 15:01:01 -08:00
committed by Tim Abbott
parent 72f5df2e09
commit d7556b4060
59 changed files with 5962 additions and 8166 deletions

View File

@@ -11,9 +11,9 @@ on:
- manage.py
- pnpm-lock.yaml
- puppet/**
- requirements/**
- scripts/**
- tools/**
- uv.lock
- web/babel.config.js
- web/postcss.config.js
- web/third/**
@@ -69,7 +69,7 @@ jobs:
- name: Create cache directories
run: |
dirs=(/srv/zulip-{venv,emoji}-cache)
dirs=(/srv/zulip-emoji-cache)
sudo mkdir -p "${dirs[@]}"
sudo chown -R github "${dirs[@]}"
@@ -79,12 +79,12 @@ jobs:
path: /__w/.pnpm-store
key: v1-pnpm-store-jammy-${{ hashFiles('pnpm-lock.yaml') }}
- name: Restore python cache
- name: Restore uv cache
uses: actions/cache@v4
with:
path: /srv/zulip-venv-cache
key: v1-venv-jammy-${{ hashFiles('requirements/dev.txt') }}
restore-keys: v1-venv-jammy
path: ~/.cache/uv
key: uv-jammy-${{ hashFiles('uv.lock') }}
restore-keys: uv-jammy-
- name: Restore emoji cache
uses: actions/cache@v4
@@ -109,6 +109,9 @@ jobs:
path="$(pnpm store path)"
[[ "$path" == /__w/.pnpm-store/* ]]
- name: Minimize uv cache
run: uv cache prune --ci
- name: Generate failure report string
id: failure_report_string
if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }}
@@ -182,7 +185,7 @@ jobs:
- name: Create cache directories
run: |
dirs=(/srv/zulip-{venv,emoji}-cache)
dirs=(/srv/zulip-emoji-cache)
sudo mkdir -p "${dirs[@]}"
sudo chown -R github "${dirs[@]}"
@@ -280,7 +283,7 @@ jobs:
- name: Create cache directories
run: |
dirs=(/srv/zulip-{venv,emoji}-cache)
dirs=(/srv/zulip-emoji-cache)
sudo mkdir -p "${dirs[@]}"
sudo chown -R github "${dirs[@]}"

View File

@@ -66,7 +66,7 @@ jobs:
- name: Create cache directories
run: |
dirs=(/srv/zulip-{venv,emoji}-cache)
dirs=(/srv/zulip-emoji-cache)
sudo mkdir -p "${dirs[@]}"
sudo chown -R github "${dirs[@]}"
@@ -76,12 +76,12 @@ jobs:
path: /__w/.pnpm-store
key: v1-pnpm-store-${{ matrix.os }}-${{ hashFiles('pnpm-lock.yaml') }}
- name: Restore python cache
- name: Restore uv cache
uses: actions/cache@v4
with:
path: /srv/zulip-venv-cache
key: v1-venv-${{ matrix.os }}-${{ hashFiles('requirements/dev.txt') }}
restore-keys: v1-venv-${{ matrix.os }}
path: ~/.cache/uv
key: uv-${{ matrix.os }}-${{ hashFiles('uv.lock') }}
restore-keys: uv-${{ matrix.os }}-
- name: Restore emoji cache
uses: actions/cache@v4
@@ -180,11 +180,8 @@ jobs:
- name: Run miscellaneous tests
run: |
source tools/ci/activate-venv
uv lock --check
# Currently our compiled requirements files will differ for different
# Python versions, so we will run test-locked-requirements only on the
# platform with the oldest one.
# ./tools/test-locked-requirements
# ./tools/test-run-dev # https://github.com/zulip/zulip/pull/14233
#
# This test has been persistently flaky at like 1% frequency, is slow,
@@ -214,12 +211,6 @@ jobs:
exit 1
fi
- name: Test locked requirements
if: ${{ matrix.os == 'jammy' }}
run: |
source tools/ci/activate-venv
./tools/test-locked-requirements
- name: Upload coverage reports
# Only upload coverage when both frontend and backend
@@ -248,6 +239,9 @@ jobs:
path="$(pnpm store path)"
[[ "$path" == /__w/.pnpm-store/* ]]
- name: Minimize uv cache
run: uv cache prune --ci
- name: Generate failure report string
id: failure_report_string
if: ${{ failure() && github.repository == 'zulip/zulip' && github.event_name == 'push' }}

3
.gitignore vendored
View File

@@ -35,6 +35,7 @@ package-lock.json
/.dmypy.json
/.ruff_cache
/.venv
# Generated i18n data
/locale/en
@@ -57,8 +58,6 @@ zulip-git-version
## Files (or really symlinks) created in a prod deployment
/zproject/prod_settings.py
/zulip-current-venv
/zulip-py3-venv
## Files left by various editors and local environments
# (Ideally these should be in everyone's respective personal gitignore files.)

View File

@@ -5,11 +5,15 @@ build:
os: ubuntu-22.04
tools:
python: "3.10"
jobs:
create_environment:
- asdf plugin add uv
- asdf install uv 0.6.2
- asdf global uv 0.6.2
- UV_PROJECT_ENVIRONMENT=$READTHEDOCS_VIRTUALENV_PATH uv venv
install:
- UV_PROJECT_ENVIRONMENT=$READTHEDOCS_VIRTUALENV_PATH uv sync --frozen --only-group=docs
sphinx:
configuration: docs/conf.py
fail_on_warning: true
python:
install:
- requirements: requirements/docs.txt

View File

@@ -259,7 +259,7 @@ After running the above command, you should see something similar to:
Using `manage.py` from within the Zulip development environment:
```console
(zulip-py3-venv) vagrant@vagrant:/srv/zulip$
(zulip-server) vagrant@vagrant:/srv/zulip$
./manage.py send_webhook_fixture_message \
--fixture=zerver/webhooks/helloworld/fixtures/hello.json \
'--url=http://localhost:9991/api/v1/external/helloworld?api_key=<api_key>'
@@ -402,7 +402,7 @@ Once you have written some tests, you can run just these new tests from within
the Zulip development environment with this command:
```console
(zulip-py3-venv) vagrant@vagrant:/srv/zulip$
(zulip-server) vagrant@vagrant:/srv/zulip$
./tools/test-backend zerver/webhooks/helloworld
```
@@ -451,7 +451,7 @@ Learn how Zulip integrations work with this simple Hello World example!
environment](https://zulip.readthedocs.io/en/latest/development/overview.html):
```
(zulip-py3-venv) vagrant@vagrant:/srv/zulip$
(zulip-server) vagrant@vagrant:/srv/zulip$
./manage.py send_webhook_fixture_message \
> --fixture=zerver/tests/fixtures/helloworld/hello.json \
> '--url=http://localhost:9991/api/v1/external/helloworld?api_key=abcdefgh&stream=stream%20name;'

View File

@@ -60,7 +60,7 @@ Once your remote dev instance is ready:
`ssh zulipdev@<username>.zulipdev.org` on the command line
(Terminal for macOS and Linux, Bash for Git on Windows).
- There is no password; your account is configured to use your SSH keys.
- Once you log in, you should see `(zulip-py3-venv) ~$`.
- Once you log in, you should see `(zulip-server) ~$`.
- To start the dev server, `cd zulip` and then run `./tools/run-dev`.
- While the dev server is running, you can see the Zulip server in your browser
at http://zulip.username.zulipdev.org:9991.

View File

@@ -54,7 +54,7 @@ Zulip and run the following commands:
```bash
# From inside a clone of zulip.git:
./tools/provision
source /srv/zulip-py3-venv/bin/activate
source .venv/bin/activate
./tools/run-dev # starts the development server
```
@@ -212,8 +212,8 @@ expected.
1. Set the `EXTERNAL_HOST` environment variable.
```console
(zulip-py3-venv) vagrant@ubuntu-18:/srv/zulip$ export EXTERNAL_HOST="$(hostname -I | xargs):9991"
(zulip-py3-venv) vagrant@ubuntu-18:/srv/zulip$ echo $EXTERNAL_HOST
(zulip-server) vagrant@ubuntu-18:/srv/zulip$ export EXTERNAL_HOST="$(hostname -I | xargs):9991"
(zulip-server) vagrant@ubuntu-18:/srv/zulip$ echo $EXTERNAL_HOST
```
The output will be like:
@@ -234,7 +234,7 @@ expected.
1. You should now be able to start the Zulip development server.
```console
(zulip-py3-venv) vagrant@ubuntu-18:/srv/zulip$ ./tools/run-dev
(zulip-server) vagrant@ubuntu-18:/srv/zulip$ ./tools/run-dev
```
The output will look like:

View File

@@ -293,7 +293,7 @@ simply click **Allow access**.)
$ # Install/update the Zulip development environment
$ ./tools/provision
$ # Enter the Zulip Python environment
$ source /srv/zulip-py3-venv/bin/activate
$ source .venv/bin/activate
$ # Start the development server
$ ./tools/run-dev
```
@@ -681,11 +681,11 @@ Alternatively, you can use a command to terminate/shutdown your WSL2 environment
On Windows with WSL 2, to resume developing you just need to open a new Git
BASH window. Then change into your `zulip` folder and verify the Python
environment was properly activated (you should see `(zulip-py3-venv)`). If the
`(zulip-py3-venv)` part is missing, run:
environment was properly activated (you should see `(zulip-server)`). If the
`(zulip-server)` part is missing, run:
```console
$ source /srv/zulip-py3-venv/bin/activate
$ source .venv/bin/activate
```
:::
@@ -765,7 +765,7 @@ When reporting your issue, please include the following information:
The output of `tools/diagnose` run inside the Vagrant guest is also
usually helpful.
#### Vagrant guest doesn't show (zulip-py3-venv) at start of prompt
#### Vagrant guest doesn't show (zulip-server) at start of prompt
This is caused by provisioning failing to complete successfully. You
can see the errors in `var/log/provision.log`; it should end with
@@ -1009,13 +1009,13 @@ Once you've provisioned successfully, you'll get output like this:
```console
Zulip development environment setup succeeded!
(zulip-py3-venv) vagrant@vagrant:/srv/zulip$
(zulip-server) vagrant@vagrant:/srv/zulip$
```
If the `(zulip-py3-venv)` part is missing, this is because your
If the `(zulip-server)` part is missing, this is because your
installation failed the first time before the Zulip virtualenv was
created. You can fix this by just closing the shell and running
`vagrant ssh` again, or using `source /srv/zulip-py3-venv/bin/activate`.
`vagrant ssh` again, or using `source .venv/bin/activate`.
Finally, if you encounter any issues that weren't caused by your
Internet connection, please report them! We try hard to keep Zulip

View File

@@ -11,7 +11,7 @@ From the window where run-dev is running:
2016-05-04 18:33:13,330 INFO 127.0.0.1 GET 200 92ms /register/ (unauth@zulip via ?)
^C
KeyboardInterrupt
(zulip-py3-venv) vagrant@vagrant:/srv/zulip$ exit
(zulip-server) vagrant@vagrant:/srv/zulip$ exit
logout
Connection to 127.0.0.1 closed.
$

View File

@@ -7,5 +7,5 @@ Zulip server:
$ vagrant up
$ vagrant ssh
(zulip-py3-venv) vagrant@vagrant:/srv/zulip$ ./tools/run-dev
(zulip-server) vagrant@vagrant:/srv/zulip$ ./tools/run-dev
```

View File

@@ -14,14 +14,14 @@ Welcome to Ubuntu 22.04.3 LTS (GNU/Linux 5.15.0-92-generic x86_64)
Congrats, you're now inside the Zulip development environment!
You can confirm this by looking at the command prompt, which starts
with `(zulip-py3-venv)vagrant@`. If it just starts with `vagrant@`, your
with `(zulip-server) vagrant@`. If it just starts with `vagrant@`, your
provisioning failed and you should look at the
[troubleshooting section](/development/setup-recommended.md#troubleshooting-and-common-errors).
Next, start the Zulip server:
```console
(zulip-py3-venv) vagrant@vagrant:/srv/zulip$ ./tools/run-dev
(zulip-server) vagrant@vagrant:/srv/zulip$ ./tools/run-dev
```
You will see something like:

View File

@@ -71,11 +71,10 @@ and looking at the content on the GitHub web UI, since GitHub renders
Markdown, though that won't be as faithful as the `make html`
approach or the preview build.
When editing dependencies for the Zulip documentation, you should edit
`requirements/docs.in` and then run `tools/update-locked-requirements`
which updates docs.txt file (which is used by ReadTheDocs to build the
Zulip developer documentation, without installing all of Zulip's
dependencies).
We manage Python requirements for the documentation build in the `docs` uv
[group](https://docs.astral.sh/uv/concepts/projects/dependencies/#dependency-groups),
which is used by our ReadTheDocs build configuration in
[`.readthedocs.yaml`](https://docs.readthedocs.com/platform/stable/config-file/v2.html).
## Core website documentation

View File

@@ -24,7 +24,7 @@ checklist of things one must do before making a PyPI release:
[Zulip server repo][zulip-repo] to render the interactive bots on
[Zulip's integrations page](https://zulip.com/integrations/). The server
repo installs the `zulip_bots` package
[directly from the GitHub repository][requirements-link] so that this extra
directly from the GitHub repository so that this extra
content is included in its installation of the package.
3. Follow PyPI's instructions in
@@ -57,9 +57,9 @@ Now it is time to [update the dependencies](dependencies) in the
most cases.
2. Update the release tags in the Git URLs for `zulip` and `zulip_bots` in
`requirements/common.in`.
`pyproject.toml`.
3. Run `tools/update-locked-requirements` to update the rest of the requirements files.
3. Run `uv lock` to update the Python lock file.
4. Commit your changes and submit a PR! **Note**: See
[this example commit][example-zulip-commit] to get an idea of what the final change
@@ -83,7 +83,6 @@ update to dependencies required in the [Zulip server repo][zulip-repo].
[zulip-package]: https://github.com/zulip/python-zulip-api/tree/main/zulip
[zulip-bots-package]: https://github.com/zulip/python-zulip-api/tree/main/zulip_bots
[zulip-botserver-package]: https://github.com/zulip/python-zulip-api/tree/main/zulip_botserver
[requirements-link]: https://github.com/zulip/zulip/blob/main/requirements/common.in#L116
[generating-dist-archives]: https://packaging.python.org/en/latest/tutorials/packaging-projects/#generating-distribution-archives
[upload-dist-archives]: https://packaging.python.org/en/latest/tutorials/packaging-projects/#uploading-the-distribution-archives
[install-pkg]: https://packaging.python.org/en/latest/tutorials/packaging-projects/#installing-your-newly-uploaded-package

View File

@@ -125,70 +125,12 @@ Ubuntu 22.04 being the platform requiring 3.10 support. The comments
in `.github/workflows/zulip-ci.yml` document the Python versions used
by each supported platform.
We manage Python packages via the Python-standard `requirements.txt`
system and virtualenvs, but theres a number of interesting details
about how Zulip makes this system work well for us that are worth
highlighting. The system is largely managed by the code in
`scripts/lib/setup_venv.py`
We manage third-party Python packages using [uv](https://docs.astral.sh/uv/),
with our requirements listed in
[pyproject.toml](https://docs.astral.sh/uv/concepts/projects/layout/#the-pyprojecttoml),
and locked versions stored in
[`uv.lock`](https://docs.astral.sh/uv/concepts/projects/layout/#the-lockfile).
- **Using `pip` to manage dependencies**. This is standard in the
Python ecosystem, and means we only need to record a list of
versions in a `requirements.txt` file to declare what we're using.
Since we have a few different installation targets, we maintain
several `requirements.txt` format files in the `requirements/`
directory (e.g., `dev.in` for development, `prod.in` for
production, `docs.in` for ReadTheDocs, `common.in` for the vast
majority of packages common to prod and development, etc.). We use
`pip install --no-deps` to ensure we only install the packages we
explicitly declare as dependencies.
- **virtualenv with pinned versions**. For a large application like
Zulip, it is important to ensure that we're always using consistent,
predictable versions of all of our Python dependencies. To ensure
this, we install our dependencies in a [virtualenv][] that contains
only the packages and versions that Zulip needs, and we always pin
exact versions of our dependencies in our `requirements.txt` files.
We pin exact versions, not minimum versions, so that installing
Zulip won't break if a dependency makes a buggy release. A side
effect is that it's easy to debug problems caused by dependency
upgrades, since we're always doing those upgrades with an explicit
commit updating the `requirements/` directory.
- **Pinning versions of indirect dependencies**. We "pin" or "lock"
the versions of our indirect dependencies files with
`tools/update-locked-requirements` (powered by `pip-compile`). What
this means is that we have some "source" requirements files, like
`requirements/common.in`, that declare the packages that Zulip
depends on directly. Those packages have their own recursive
dependencies. When adding or removing a dependency from Zulip, one
simply edits the appropriate "source" requirements files, and then
runs `tools/update-locked-requirements`. That tool will use
`pip-compile` to generate the locked requirements files like
`prod.txt`, `dev.txt` etc files that explicitly declare versions of
all of Zulip's recursive dependencies. For indirect dependencies
(i.e. dependencies not explicitly declared in the source
requirements files), it provides helpful comments explaining which
direct dependency (or dependencies) needed that indirect dependency.
The process for using this system is documented in more detail in
`requirements/README.md`.
- **Caching of virtualenvs and packages**. To make updating the
dependencies of a Zulip installation efficient, we maintain a cache
of virtualenvs named by the hash of the relevant `requirements.txt`
file (`scripts/lib/hash_reqs.py`). These caches live under
`/srv/zulip-venv-cache/<hash>`. That way, when re-provisioning a
development environment or deploying a new production version with
the same Python dependencies, no downloading or installation is
required: we just use the same virtualenv. When the only changes
are upgraded versions, we'll use [virtualenv-clone][] to clone the
most similar existing virtualenv and then just upgrade the packages
needed, making small version upgrades extremely efficient. And
finally, we use `pip`'s built-in caching to ensure that a specific
version of a specific package is only downloaded once.
- **Garbage-collecting caches**. We have a tool,
`scripts/lib/clean_venv_cache.py`, which will clean old cached
virtualenvs that are no longer in use. In production, the algorithm
preserves recent virtualenvs as well as those in use by any current
production deployment directory under `/home/zulip/deployments/`.
This helps ensure that a Zulip installation doesn't leak large
amounts of disk over time.
- **Scripts**. Often, we want a script running in production to use
the Zulip virtualenv. To make that work without a lot of duplicated
code, we have a helpful function,
@@ -203,15 +145,7 @@ highlighting. The system is largely managed by the code in
`ignore_missing_imports` for the new library. See
[our mypy docs][mypy-docs] for more details.
### Upgrading packages
See the [README][requirements-readme] file in `requirements/` directory
to learn how to upgrade a single Python package.
[mypy-docs]: ../testing/mypy.md
[requirements-readme]: https://github.com/zulip/zulip/blob/main/requirements/README.md#requirements
[stack-overflow]: https://askubuntu.com/questions/8653/how-to-keep-processes-running-after-ending-ssh-session
[caching]: https://help.github.com/en/articles/caching-your-github-password-in-git
## JavaScript and other frontend packages

View File

@@ -15,7 +15,7 @@ new major versions of Django. Here are the steps:
much of the changes for the migration as we can independently from
the big cutover.
- Check the version support of the third-party Django packages we use
(`git grep django requirements/` to see a list), upgrade any as
(`git grep django pyproject.toml` to see a list), upgrade any as
needed and file bugs upstream for any that lack support. Look into
fixing said bugs.
- Look at the pieces of Django code that we've copied and then

View File

@@ -124,6 +124,6 @@ would have used had the cache not existed. In practice, bugs are
always possible, so be mindful of this possibility.
A consequence of this caching is that test jobs for branches which
modify `package.json`, `requirements/`, and other key dependencies
modify `package.json`, `pyproject.toml`, and other key dependencies
will be significantly slower than normal, because they won't get to
benefit from the cache.

View File

@@ -70,11 +70,6 @@ Additionally, Zulip also has about a dozen smaller tests suites:
- `tools/test-api`: Tests that the API documentation at `/api`
actually works; the actual code for this is defined in
`zerver/openapi/python_examples.py`.
- `test-locked-requirements`: Verifies that developers didn't forget
to run `tools/update-locked-requirements` after modifying
`requirements/*.in`. See
[our dependency documentation](../subsystems/dependencies.md) for
details on the system this is verifying.
- `tools/check-capitalization`: Checks whether translated strings (aka
user-facing strings) correctly follow Zulip's capitalization
conventions. This requires some maintenance of an exclude list

View File

@@ -26,8 +26,6 @@ export default [
"var",
"web/generated",
"web/third",
"zulip-current-venv",
"zulip-py3-venv",
],
},
js.configs.recommended,

View File

@@ -30,7 +30,7 @@ deploy_environment=$(crudini --get /etc/zulip/zulip.conf machine deploy_type ||
commit_count=$(git rev-list "${from}..${to}" | wc -l)
zulip_send() {
./zulip-py3-venv/bin/zulip-send \
uv run --no-sync zulip-send \
--site "$zulip_notify_server" \
--user "$zulip_notify_bot_email" \
--api-key "$zulip_api_key" \

View File

@@ -8,7 +8,7 @@
; variables can be expanded using this syntax: "%(ENV_HOME)s".
[program:zulip-django]
command=nice -n5 /home/zulip/deployments/current/zulip-current-venv/bin/uwsgi --ini /etc/zulip/uwsgi.ini
command=nice -n5 uv run --no-sync uwsgi --ini /etc/zulip/uwsgi.ini
environment=HTTP_proxy="<%= @proxy %>",HTTPS_proxy="<%= @proxy %>"
priority=100 ; the relative start priority (default 999)
autostart=true ; start at supervisord start (default: true)

View File

@@ -1,3 +1,326 @@
[project]
name = "zulip-server"
version = "0.1.0"
requires-python = ">=3.10"
[dependency-groups]
prod = [
# Django itself
"django[argon2]==5.1.*",
"asgiref", # https://github.com/django/asgiref/pull/494
# needed for NotRequired, ParamSpec
"typing-extensions",
# Needed for rendering backend templates
"jinja2",
# Needed for Markdown processing
"markdown",
"pygments",
"jsx-lexer",
"uri-template",
"regex",
# Needed for manage.py
"ipython",
# Needed for image processing and thumbnailing
"pyvips",
# Needed for building complex DB queries
"sqlalchemy==1.4.*",
"greenlet",
# Needed for S3 file uploads and other AWS tools
"boto3",
# The runtime-relevant part of boto3-stubs (see mypy.in)
"mypy-boto3-s3",
"mypy-boto3-ses",
"mypy-boto3-sns",
"mypy-boto3-sqs",
# Needed for integrations
"defusedxml",
# Needed for LDAP support
"python-ldap",
"django-auth-ldap",
# Django extension providing bitfield support
"django-bitfield",
# Needed for Android push notifications
"firebase-admin",
# Needed for the email mirror
"html2text",
"talon-core",
# Needed for inlining the CSS in emails
"css-inline",
# Needed for JWT-based auth
"pyjwt",
# Needed to access RabbitMQ
"pika",
# Needed to access our database
"psycopg2",
# Needed for memcached usage
"python-binary-memcached",
# Needed for compression support in memcached via python-binary-memcached
"django-bmemcached",
# Needed for zerver/tests/test_timestamp.py
"python-dateutil",
# Needed for Redis
"redis",
# Tornado used for server->client push system
"tornado",
# Fast JSON parser
"orjson",
# Needed for iOS push notifications
"aioapns",
"python-twitter",
# To parse po files
"polib",
# Needed for cloning virtual environments
"virtualenv-clone",
# Needed for link preview
"beautifulsoup4",
"pyoembed",
"python-magic",
# The Zulip API bindings, from its own repository.
"zulip",
"zulip-bots",
# Used for Hesiod lookups, etc.
"py3dns",
# Install Python Social Auth
"social-auth-app-django",
"social-auth-core[azuread,openidconnect,saml]",
"python3-saml",
# For encrypting a login token to the desktop app
"cryptography",
# Needed for messages' rendered content parsing in push notifications.
"lxml",
# Needed for 2-factor authentication
"django-two-factor-auth[call,phonenumberslite,sms]",
# Needed for processing payments (in corporate)
"stripe",
# For checking whether email of the user is from a disposable email provider.
"disposable-email-domains",
# Needed for parsing YAML with JSON references from the REST API spec files
"jsonref",
# Needed for string matching in AlertWordProcessor
"pyahocorasick",
# Needed for function decorators that don't break introspection.
# Used for rate limiting authentication.
"decorator",
# For server-side enforcement of password strength
"zxcvbn",
# Needed for sending HTTP requests
"requests[security]",
"requests-oauthlib",
# For OpenAPI schema validation.
"openapi-core",
"werkzeug<3.1.2", # https://github.com/python-openapi/openapi-core/issues/938
# For reporting errors to sentry.io
"sentry-sdk",
# For detecting URLs to link
"tlds",
# Unicode Collation Algorithm for sorting multilingual strings
"pyuca",
# Handle connection retries with exponential backoff
"backoff",
# Needed for reading bson files in rocketchat import tool
"pymongo",
# Non-backtracking regular expressions
"google-re2",
# For querying recursive group membership
"django-cte",
# SCIM integration
"django-scim2",
# Circuit-breaking for outgoing services
"circuitbreaker",
# Runtime monkeypatching of django-stubs generics
"django-stubs-ext",
# Structured data representation with parsing.
"pydantic",
"annotated-types",
# For requesting LLM API endpoints.
"litellm",
# Used for running the Zulip production Django server
"uwsgi",
# Used for monitoring memcached
"prometheus-client",
]
docs = [
# Needed to build RTD docs
"sphinx",
"sphinx-rtd-theme",
"sphinx-design",
# Needed to build Markdown docs
"myst-parser",
]
dev = [
{ include-group = "prod" },
{ include-group = "docs" },
# moto s3 mock
"moto[s3]",
# For tools/run-dev
"aiohttp",
# Needed for documentation links test
"scrapy",
# Needed to compute test coverage
"coverage",
# fake for LDAP testing
"fakeldap",
# For testing mock http requests
"responses",
# For doing highly usable Python profiling
"line-profiler",
# Python reformatter
"black",
# Python linter
"ruff",
# Needed for watching file changes
"pyinotify",
"pyasyncore", # https://github.com/seb-m/pyinotify/issues/204
# Needed to run tests in parallel
"tblib",
# For linting Git commit messages
"gitlint-core",
# Needed for visualising cProfile reports
"snakeviz",
# Needed for creating DigitalOcean droplets
"python-digitalocean",
# zulip's linting framework - zulint
"zulint",
# For type checking
"mypy[faster-cache]",
"boto3-stubs[s3,ses,sns,sqs]",
"django-stubs",
"lxml-stubs",
"SQLAlchemy[mypy]",
"types-beautifulsoup4",
"types-boto",
"types-chardet",
"types-decorator",
"types-defusedxml",
"types-jsonschema",
"types-Markdown",
"types-oauthlib",
"types-polib",
"types-pika",
"types-psycopg2",
"types-Pygments",
"types-pyOpenSSL",
"types-python-dateutil",
"types-PyYAML",
"types-redis",
"types-regex",
"types-requests",
"types-zxcvbn",
# Needed for tools/check-thirdparty
"python-debian",
# Pattern-based lint tool
"semgrep<1.80.0", # https://github.com/semgrep/semgrep/issues/10408
# For sorting versions when uploading releases
"natsort",
# For spell check linter
"codespell",
# For mocking time
"time-machine",
]
[tool.uv]
no-binary-package = ["lxml", "xmlsec"]
[tool.uv.sources]
# https://github.com/django/asgiref/pull/494
asgiref = { url = "https://github.com/andersk/asgiref/archive/8a2717c14bce1b8dd37371c675ee3728e66c3fe3.zip" }
# Forked to avoid pulling in scipy: https://github.com/mailgun/talon/pull/200
# and chardet, cchardet: https://github.com/mailgun/talon/pull/239
# and fix invalid escape sequences: https://github.com/mailgun/talon/pull/245
talon-core = { url = "https://github.com/zulip/talon/archive/e87a64dccc3c5ee1b8ea157d4b6e15ecd46f2bed.zip", subdirectory = "talon-core" }
# We integrate with these tightly, including fetching content not included in
# the official PyPI release tarballs, such as logos, assets and documentation
# files that we render on our /integrations/ page. Therefore, we need to pin the
# version from Git rather than a PyPI release. Keeping everything in one
# repository simplifies the process of implementing and documenting new bots for
# new contributors.
zulip = { url = "https://github.com/zulip/python-zulip-api/archive/0.9.0.zip", subdirectory = "zulip" }
zulip-bots = { url = "https://github.com/zulip/python-zulip-api/archive/0.9.0.zip", subdirectory = "zulip_bots" }
# zulip's linting framework - zulint
zulint = { url = "https://github.com/zulip/zulint/archive/9be0a32bf75a9d8738b005f0b880567fff64e943.zip" }
[tool.black]
line-length = 100
target-version = ["py310"]

View File

@@ -1,49 +0,0 @@
The dependency graph of the requirements is as follows:
```
dev +-> prod +-> common
+
|
v
docs,pip
```
Of the files, only dev and prod have been used in the install
scripts directly. The rest are implicit dependencies.
Steps to update a lock file, e.g., to update ipython from 5.3.0 to latest version:
0. Remove entry for `ipython==5.3.0` in dev.txt.
1. Run `./tools/update-locked-requirements`, which will generate new entries, pinned to the latest version.
2. Increase `PROVISION_VERSION` in `version.py`.
3. Run `./tools/provision` to install the new deps and test them.
4. Commit your changes.
## Testing custom modifications of a dependency
When working on Zulip, sometimes it is necessary to also make
modifications to a dependency (either to add a feature that we will
rely on, or to fix a bug that needs to be fixed for the intended
changes to work in Zulip) - with the idea that eventually they will be
merged upstream and included in an official release.
That process can take time however, and sometimes it'd be good to be
able to create and test your Zulip changes on top of changes to the
upstream package, without waiting for the potentially lengthy code
review and release process of the upstream dependency.
You can do this forking the upstream project, making the changes on a
branch in your fork, and then replacing the package's entry in
`dev.in` or `prod.in` with an appropriate GitHub link to the branch
with your modifications. The files have various examples of how this
should be done, but essentially you will add an entry looking like
this:
```
https://github.com/<your GitHub>/<package name>/archive/<commit hash>.zip#egg=<package name>==<version>+git
```
After that, you can follow the above process involving
`./tools/update-locked-requirements` and the following steps to have
the modified package installed in your dev environment, where it can
be used for testing.

View File

@@ -1,201 +0,0 @@
# After editing this file, you MUST afterward run
# /tools/update-locked-requirements to update requirements/dev.txt
# and requirements/prod.txt.
# See requirements/README.md for more detail.
-r pip.in
# Django itself
Django[argon2]==5.1.*
https://github.com/andersk/asgiref/archive/8a2717c14bce1b8dd37371c675ee3728e66c3fe3.zip#egg=asgiref==3.8.1+git # https://github.com/django/asgiref/pull/494
# needed for NotRequired, ParamSpec
typing-extensions
# Needed for rendering backend templates
Jinja2
# Needed for Markdown processing
Markdown
Pygments
jsx-lexer
uri-template
regex
# Needed for manage.py
ipython
# Needed for image processing and thumbnailing
pyvips
# Needed for building complex DB queries
SQLAlchemy==1.4.*
greenlet
# Needed for S3 file uploads and other AWS tools
boto3
# The runtime-relevant part of boto3-stubs (see mypy.in)
mypy-boto3-s3
mypy-boto3-ses
mypy-boto3-sns
mypy-boto3-sqs
# Needed for integrations
defusedxml
# Needed for LDAP support
python-ldap
django-auth-ldap
# Django extension providing bitfield support
django-bitfield
# Needed for Android push notifications
firebase-admin
# Needed for the email mirror
html2text
# Forked to avoid pulling in scipy: https://github.com/mailgun/talon/pull/200
# and chardet, cchardet: https://github.com/mailgun/talon/pull/239
# and fix invalid escape sequences: https://github.com/mailgun/talon/pull/245
https://github.com/zulip/talon/archive/e87a64dccc3c5ee1b8ea157d4b6e15ecd46f2bed.zip#egg=talon-core==1.6.0+git&subdirectory=talon-core
# Needed for inlining the CSS in emails
css-inline
# Needed for JWT-based auth
PyJWT
# Needed to access RabbitMQ
pika
# Needed to access our database
psycopg2
# Needed for memcached usage
python-binary-memcached
# Needed for compression support in memcached via python-binary-memcached
django-bmemcached
# Needed for zerver/tests/test_timestamp.py
python-dateutil
# Needed for Redis
redis
# Tornado used for server->client push system
tornado
# Fast JSON parser
orjson
# Needed for iOS push notifications
aioapns
python-twitter
# To parse po files
polib
# Needed for cloning virtual environments
virtualenv-clone
# Needed for link preview
beautifulsoup4
pyoembed
python-magic
# The Zulip API bindings, from its own repository. We integrate with
# these tightly, including fetching content not included in the official
# PyPI release tarballs, such as logos, assets and documentation files
# that we render on our /integrations/ page. Therefore, we need to pin
# the version from Git rather than a PyPI release. Keeping everything in
# one repository simplifies the process of implementing and documenting
# new bots for new contributors.
https://github.com/zulip/python-zulip-api/archive/0.9.0.zip#egg=zulip==0.9.0+git&subdirectory=zulip
https://github.com/zulip/python-zulip-api/archive/0.9.0.zip#egg=zulip_bots==0.9.0+git&subdirectory=zulip_bots
# Used for Hesiod lookups, etc.
py3dns
# Install Python Social Auth
social-auth-app-django
social-auth-core[azuread,openidconnect,saml]
python3-saml
--no-binary=xmlsec
# For encrypting a login token to the desktop app
cryptography
# Needed for messages' rendered content parsing in push notifications.
lxml
--no-binary=lxml
# Needed for 2-factor authentication
django-two-factor-auth[call,phonenumberslite,sms]
# Needed for processing payments (in corporate)
stripe
# For checking whether email of the user is from a disposable email provider.
disposable-email-domains
# Needed for parsing YAML with JSON references from the REST API spec files
jsonref
# Needed for string matching in AlertWordProcessor
pyahocorasick
# Needed for function decorators that don't break introspection.
# Used for rate limiting authentication.
decorator
# For server-side enforcement of password strength
zxcvbn
# Needed for sending HTTP requests
requests[security]
requests-oauthlib
# For OpenAPI schema validation.
openapi-core
werkzeug<3.1.2 # https://github.com/python-openapi/openapi-core/issues/938
# For reporting errors to sentry.io
sentry-sdk
# For detecting URLs to link
tlds
# Unicode Collation Algorithm for sorting multilingual strings
pyuca
# Handle connection retries with exponential backoff
backoff
# Needed for reading bson files in rocketchat import tool
pymongo
# Non-backtracking regular expressions
google-re2
# For querying recursive group membership
django-cte
# SCIM integration
django-scim2
# Circuit-breaking for outgoing services
circuitbreaker
# Runtime monkeypatching of django-stubs generics
django-stubs-ext
# Structured data representation with parsing.
pydantic
annotated_types
# For requesting LLM API endpoints.
litellm

View File

@@ -1,71 +0,0 @@
# After editing this file, you MUST afterward run
# /tools/update-locked-requirements to update requirements/dev.txt.
# See requirements/README.md for more detail.
-r prod.in
-r docs.in
# moto s3 mock
moto[s3]
# For tools/run-dev
aiohttp
# Needed for documentation links test
Scrapy
# Needed to compute test coverage
coverage
# fake for LDAP testing
fakeldap
# For testing mock http requests
responses
# For doing highly usable Python profiling
line-profiler
# Python reformatter
black
# Python linter
ruff
# Needed for watching file changes
pyinotify
pyasyncore # https://github.com/seb-m/pyinotify/issues/204
# Needed to run tests in parallel
tblib
# For linting Git commit messages
gitlint-core
# Needed for visualising cProfile reports
snakeviz
# Needed for creating DigitalOcean droplets
python-digitalocean
# Needed for updating the locked pip dependencies
pip-tools<6.3.0 # https://github.com/jazzband/pip-tools/pull/1455 breaks our hack for installing specific commits from Git
# zulip's linting framework - zulint
https://github.com/zulip/zulint/archive/9be0a32bf75a9d8738b005f0b880567fff64e943.zip#egg=zulint==1.0.0+git
-r mypy.in
# Needed for tools/check-thirdparty
python-debian
# Pattern-based lint tool
semgrep<1.80.0 # https://github.com/semgrep/semgrep/issues/10408
# For sorting versions when uploading releases
natsort
# For spell check linter
codespell
# For mocking time
time-machine

File diff suppressed because it is too large Load Diff

View File

@@ -1,16 +0,0 @@
# This is used by ReadTheDocs to install dependencies, so it's
# valuable to keep this file as just the dependencies for our
# documentation.
# After editing this file, you MUST afterward run
# /tools/update-locked-requirements to update requirements/dev.txt
# and requirements/docs.txt.
# See requirements/README.md for more detail.
# Needed to build RTD docs
sphinx
sphinx-rtd-theme
sphinx-design
# Needed to build Markdown docs
MyST-Parser

View File

@@ -1,341 +0,0 @@
#
# This file is GENERATED. Don't edit directly.
#
# To update, edit the non-"lock" files in requirements/*.in, then:
#
# tools/update-locked-requirements
#
# For details, see requirements/README.md .
#
alabaster==1.0.0 \
--hash=sha256:c00dca57bca26fa62a6d7d0a9fcce65f3e026e9bfe33e9c538fd3fbb2144fd9e \
--hash=sha256:fc6786402dc3fcb2de3cabd5fe455a2db534b371124f1f21de8731783dec828b
# via sphinx
babel==2.17.0 \
--hash=sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d \
--hash=sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2
# via sphinx
certifi==2025.1.31 \
--hash=sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651 \
--hash=sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe
# via requests
charset-normalizer==3.4.1 \
--hash=sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537 \
--hash=sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa \
--hash=sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a \
--hash=sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294 \
--hash=sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b \
--hash=sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd \
--hash=sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601 \
--hash=sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd \
--hash=sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4 \
--hash=sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d \
--hash=sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2 \
--hash=sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313 \
--hash=sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd \
--hash=sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa \
--hash=sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8 \
--hash=sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1 \
--hash=sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2 \
--hash=sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496 \
--hash=sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d \
--hash=sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b \
--hash=sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e \
--hash=sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a \
--hash=sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4 \
--hash=sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca \
--hash=sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78 \
--hash=sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408 \
--hash=sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5 \
--hash=sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3 \
--hash=sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f \
--hash=sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a \
--hash=sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765 \
--hash=sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6 \
--hash=sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146 \
--hash=sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6 \
--hash=sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9 \
--hash=sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd \
--hash=sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c \
--hash=sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f \
--hash=sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545 \
--hash=sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176 \
--hash=sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770 \
--hash=sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824 \
--hash=sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f \
--hash=sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf \
--hash=sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487 \
--hash=sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d \
--hash=sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd \
--hash=sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b \
--hash=sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534 \
--hash=sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f \
--hash=sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b \
--hash=sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9 \
--hash=sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd \
--hash=sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125 \
--hash=sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9 \
--hash=sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de \
--hash=sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11 \
--hash=sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d \
--hash=sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35 \
--hash=sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f \
--hash=sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda \
--hash=sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7 \
--hash=sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a \
--hash=sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971 \
--hash=sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8 \
--hash=sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41 \
--hash=sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d \
--hash=sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f \
--hash=sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757 \
--hash=sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a \
--hash=sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886 \
--hash=sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77 \
--hash=sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76 \
--hash=sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247 \
--hash=sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85 \
--hash=sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb \
--hash=sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7 \
--hash=sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e \
--hash=sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6 \
--hash=sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037 \
--hash=sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1 \
--hash=sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e \
--hash=sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807 \
--hash=sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407 \
--hash=sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c \
--hash=sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12 \
--hash=sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3 \
--hash=sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089 \
--hash=sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd \
--hash=sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e \
--hash=sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00 \
--hash=sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616
# via requests
docutils==0.21.2 \
--hash=sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f \
--hash=sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2
# via
# myst-parser
# sphinx
# sphinx-rtd-theme
idna==3.10 \
--hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \
--hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3
# via requests
imagesize==1.4.1 \
--hash=sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b \
--hash=sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a
# via sphinx
jinja2==3.1.5 \
--hash=sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb \
--hash=sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb
# via
# myst-parser
# sphinx
markdown-it-py==3.0.0 \
--hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \
--hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb
# via
# mdit-py-plugins
# myst-parser
markupsafe==3.0.2 \
--hash=sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4 \
--hash=sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30 \
--hash=sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0 \
--hash=sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9 \
--hash=sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396 \
--hash=sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13 \
--hash=sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028 \
--hash=sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca \
--hash=sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557 \
--hash=sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832 \
--hash=sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0 \
--hash=sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b \
--hash=sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579 \
--hash=sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a \
--hash=sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c \
--hash=sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff \
--hash=sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c \
--hash=sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22 \
--hash=sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094 \
--hash=sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb \
--hash=sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e \
--hash=sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5 \
--hash=sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a \
--hash=sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d \
--hash=sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a \
--hash=sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b \
--hash=sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8 \
--hash=sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225 \
--hash=sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c \
--hash=sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144 \
--hash=sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f \
--hash=sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87 \
--hash=sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d \
--hash=sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93 \
--hash=sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf \
--hash=sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158 \
--hash=sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84 \
--hash=sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb \
--hash=sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48 \
--hash=sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171 \
--hash=sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c \
--hash=sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6 \
--hash=sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd \
--hash=sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d \
--hash=sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1 \
--hash=sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d \
--hash=sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca \
--hash=sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a \
--hash=sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29 \
--hash=sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe \
--hash=sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798 \
--hash=sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c \
--hash=sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8 \
--hash=sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f \
--hash=sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f \
--hash=sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a \
--hash=sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178 \
--hash=sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0 \
--hash=sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79 \
--hash=sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430 \
--hash=sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50
# via jinja2
mdit-py-plugins==0.4.2 \
--hash=sha256:0c673c3f889399a33b95e88d2f0d111b4447bdfea7f237dab2d488f459835636 \
--hash=sha256:5f2cd1fdb606ddf152d37ec30e46101a60512bc0e5fa1a7002c36647b09e26b5
# via myst-parser
mdurl==0.1.2 \
--hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \
--hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba
# via markdown-it-py
myst-parser==4.0.1 \
--hash=sha256:5cfea715e4f3574138aecbf7d54132296bfd72bb614d31168f48c477a830a7c4 \
--hash=sha256:9134e88959ec3b5780aedf8a99680ea242869d012e8821db3126d427edc9c95d
# via -r requirements/docs.in
packaging==24.2 \
--hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \
--hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f
# via sphinx
pygments==2.19.1 \
--hash=sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f \
--hash=sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c
# via sphinx
pyyaml==6.0.2 \
--hash=sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff \
--hash=sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48 \
--hash=sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086 \
--hash=sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e \
--hash=sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133 \
--hash=sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5 \
--hash=sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484 \
--hash=sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee \
--hash=sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5 \
--hash=sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68 \
--hash=sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a \
--hash=sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf \
--hash=sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99 \
--hash=sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8 \
--hash=sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85 \
--hash=sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19 \
--hash=sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc \
--hash=sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a \
--hash=sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1 \
--hash=sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317 \
--hash=sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c \
--hash=sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631 \
--hash=sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d \
--hash=sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652 \
--hash=sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5 \
--hash=sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e \
--hash=sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b \
--hash=sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8 \
--hash=sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476 \
--hash=sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706 \
--hash=sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563 \
--hash=sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237 \
--hash=sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b \
--hash=sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083 \
--hash=sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180 \
--hash=sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425 \
--hash=sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e \
--hash=sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f \
--hash=sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725 \
--hash=sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183 \
--hash=sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab \
--hash=sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774 \
--hash=sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725 \
--hash=sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e \
--hash=sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5 \
--hash=sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d \
--hash=sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290 \
--hash=sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44 \
--hash=sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed \
--hash=sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4 \
--hash=sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba \
--hash=sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12 \
--hash=sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4
# via myst-parser
requests==2.32.3 \
--hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \
--hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6
# via sphinx
snowballstemmer==2.2.0 \
--hash=sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1 \
--hash=sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a
# via sphinx
sphinx==8.1.3 \
--hash=sha256:09719015511837b76bf6e03e42eb7595ac8c2e41eeb9c29c5b755c6b677992a2 \
--hash=sha256:43c1911eecb0d3e161ad78611bc905d1ad0e523e4ddc202a58a821773dc4c927
# via
# -r requirements/docs.in
# myst-parser
# sphinx-design
# sphinx-rtd-theme
# sphinxcontrib-jquery
sphinx-design==0.6.1 \
--hash=sha256:b11f37db1a802a183d61b159d9a202314d4d2fe29c163437001324fe2f19549c \
--hash=sha256:b44eea3719386d04d765c1a8257caca2b3e6f8421d7b3a5e742c0fd45f84e632
# via -r requirements/docs.in
sphinx-rtd-theme==3.0.2 \
--hash=sha256:422ccc750c3a3a311de4ae327e82affdaf59eb695ba4936538552f3b00f4ee13 \
--hash=sha256:b7457bc25dda723b20b086a670b9953c859eab60a2a03ee8eb2bb23e176e5f85
# via -r requirements/docs.in
sphinxcontrib-applehelp==2.0.0 \
--hash=sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1 \
--hash=sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5
# via sphinx
sphinxcontrib-devhelp==2.0.0 \
--hash=sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad \
--hash=sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2
# via sphinx
sphinxcontrib-htmlhelp==2.1.0 \
--hash=sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8 \
--hash=sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9
# via sphinx
sphinxcontrib-jquery==4.1 \
--hash=sha256:1620739f04e36a2c779f1a131a2dfd49b2fd07351bf1968ced074365933abc7a \
--hash=sha256:f936030d7d0147dd026a4f2b5a57343d233f1fc7b363f68b3d4f1cb0993878ae
# via sphinx-rtd-theme
sphinxcontrib-jsmath==1.0.1 \
--hash=sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178 \
--hash=sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8
# via sphinx
sphinxcontrib-qthelp==2.0.0 \
--hash=sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab \
--hash=sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb
# via sphinx
sphinxcontrib-serializinghtml==2.0.0 \
--hash=sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331 \
--hash=sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d
# via sphinx
tomli==2.0.2 \
--hash=sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38 \
--hash=sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed
# via sphinx
urllib3==2.3.0 \
--hash=sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df \
--hash=sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d
# via requests

View File

@@ -1,28 +0,0 @@
# After editing this file, you MUST afterward run
# /tools/update-locked-requirements to update requirements/dev.txt.
# See requirements/README.md for more detail.
mypy[faster-cache]
boto3-stubs[s3,ses,sns,sqs]
django-stubs
lxml-stubs
SQLAlchemy[mypy]
types-beautifulsoup4
types-boto
types-chardet
types-decorator
types-defusedxml
types-jsonschema
types-Markdown
types-oauthlib
types-polib
types-pika
types-psycopg2
types-Pygments
types-pyOpenSSL
types-python-dateutil
types-PyYAML
types-redis
types-regex
types-requests
types-zxcvbn

View File

@@ -1,3 +0,0 @@
https://github.com/zulip/pip/archive/50e61dcc78d0da8a041e4fecc566f40b2b0604df.zip#egg=pip==20.3.4+git # Our hack for installing specific commits from Git requires --use-deprecated=legacy-resolver: https://github.com/pypa/pip/issues/5780
setuptools<71.0.2 # Newer setuptools fools old pip into thinking typing_extensions is already installed
wheel

View File

@@ -1,22 +0,0 @@
#
# This file is GENERATED. Don't edit directly.
#
# To update, edit the non-"lock" files in requirements/*.in, then:
#
# tools/update-locked-requirements
#
# For details, see requirements/README.md .
#
wheel==0.45.1 \
--hash=sha256:661e1abd9198507b1409a20c02106d9670b2576e916d58f520316666abca6729 \
--hash=sha256:708e7481cc80179af0e556bbf0cc00b8444c7321e2700b8d8580231d13017248
# via -r requirements/pip.in
# The following packages are considered to be unsafe in a requirements file:
https://github.com/zulip/pip/archive/50e61dcc78d0da8a041e4fecc566f40b2b0604df.zip#egg=pip==20.3.4+git \
--hash=sha256:35e47938b7e2a91523359b54a53c62fc2aeed5adaaba24437a33cbd9a4641574
# via -r requirements/pip.in
setuptools==71.0.0 \
--hash=sha256:98da3b8aca443b9848a209ae4165e2edede62633219afa493a58fbba57f72e2e \
--hash=sha256:f06fbe978a91819d250a30e0dc4ca79df713d909e24438a42d0ec300fc52247f
# via -r requirements/pip.in

View File

@@ -1,9 +0,0 @@
# After editing this file, you MUST afterward run
# /tools/update-locked-requirements to update requirements/prod.txt.
# See requirements/README.md for more detail.
-r common.in
# Used for running the Zulip production Django server
uWSGI
# Used for monitoring memcached
prometheus_client

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,9 @@
#!/usr/bin/env python3
# TODO: After switching from pip to uv, we no longer create
# /srv/zulip-venv-cache or symlink zulip-py3-venv, so this script can be
# replaced with shutil.rmtree("/srv/zulip-venv-cache").
import argparse
import glob
import os

View File

@@ -7,8 +7,8 @@ ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__f
if ZULIP_PATH not in sys.path:
sys.path.append(ZULIP_PATH)
from scripts.lib.setup_venv import get_venv_dependencies, setup_virtualenv
from scripts.lib.zulip_tools import os_families, overwrite_symlink, parse_os_release, run
from scripts.lib.setup_venv import get_venv_dependencies
from scripts.lib.zulip_tools import os_families, parse_os_release, run
parser = argparse.ArgumentParser(description="Create a production virtualenv with caching")
parser.add_argument("deploy_path")
@@ -27,14 +27,6 @@ else:
print("Unsupported platform: {}".format(distro_info["ID"]))
sys.exit(1)
# Set the current working directory to the Zulip checkout, so the api/
# relative path in requirements/common.in works.
os.chdir(ZULIP_PATH)
venv_name = "zulip-py3-venv"
cached_venv_path = setup_virtualenv(
os.path.join(args.deploy_path, venv_name), os.path.join(ZULIP_PATH, "requirements", "prod.txt")
)
current_venv_path = os.path.join(args.deploy_path, "zulip-current-venv")
overwrite_symlink(venv_name, current_venv_path)
run(["scripts/lib/install-uv"])
run(["uv", "sync", "--frozen", "--only-group=prod"])

25
scripts/lib/install-uv Executable file
View File

@@ -0,0 +1,25 @@
#!/usr/bin/env bash
set -eu
version=0.6.3
arch="$(uname -m)"
tarball="uv-$arch-unknown-linux-gnu.tar.gz"
declare -A sha256=(
[aarch64]=447726788204106ffd8ecc59396fccc75fae7aca998555265b5ea6950b00160c
[x86_64]=b7a37a33d62cb7672716c695226450231e8c02a8eb2b468fa61cd28a8f86eab2
)
check_version() {
out="$(uv --version)" && [ "$out" = "uv $version" ]
}
if ! check_version 2>/dev/null; then
set -x
tmpdir="$(mktemp -d)"
trap 'rm -r "$tmpdir"' EXIT
cd "$tmpdir"
curl -fLO --retry 3 "https://github.com/astral-sh/uv/releases/download/$version/$tarball"
sha256sum -c <<<"${sha256[$arch]} $tarball"
tar -xzf "$tarball" --no-same-owner --strip-components=1 -C /usr/local/bin "uv-$arch-unknown-linux-gnu"/{uv,uvx}
check_version
fi

View File

@@ -8,9 +8,15 @@ import sys
def setup_path() -> None:
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
venv = os.path.realpath(os.path.join(BASE_DIR, "zulip-py3-venv"))
venv = os.path.realpath(os.path.join(BASE_DIR, ".venv"))
if sys.prefix != venv:
sys.path = list(filter(lambda p: "/zulip-py3-venv/" not in p, sys.path))
sys.path = list(
filter(
# zulip-py3-venv was an historical virtualenv symlink
lambda p: "/zulip-py3-venv/" not in p and "/.venv/" not in p,
sys.path,
)
)
activate_this = os.path.join(venv, "bin", "activate_this.py")
activate_locals = dict(__file__=activate_this)
with open(activate_this) as f:

View File

@@ -1,13 +1,8 @@
import logging
import os
import shutil
import subprocess
from scripts.lib.hash_reqs import expand_reqs, python_version
from scripts.lib.zulip_tools import ENDC, WARNING, os_families, run, run_as_root
from scripts.lib.zulip_tools import os_families
ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
VENV_CACHE_PATH = "/srv/zulip-venv-cache"
VENV_DEPENDENCIES = [
"build-essential",
@@ -72,262 +67,3 @@ def get_venv_dependencies(vendor: str, os_version: str) -> list[str]:
return FEDORA_VENV_DEPENDENCIES
else:
raise AssertionError("Invalid vendor")
def install_venv_deps(pip: str, requirements_file: str) -> None:
pip_requirements = os.path.join(ZULIP_PATH, "requirements", "pip.txt")
run([pip, "install", "--force-reinstall", "--require-hashes", "-r", pip_requirements])
run(
[
pip,
"install",
"--use-deprecated=legacy-resolver", # https://github.com/pypa/pip/issues/5780
"--no-deps",
"--require-hashes",
"-r",
requirements_file,
]
)
def get_index_filename(venv_path: str) -> str:
return os.path.join(venv_path, "package_index")
def get_package_names(requirements_file: str) -> list[str]:
packages = expand_reqs(requirements_file)
cleaned = []
operators = ["~=", "==", "!=", "<", ">"]
for package in packages:
if package.startswith("git+https://") and "#egg=" in package:
split_package = package.split("#egg=")
if len(split_package) != 2:
raise Exception(f"Unexpected duplicate #egg in package {package}")
# Extract the package name from Git requirements entries
package = split_package[1]
for operator in operators:
if operator in package:
package = package.split(operator)[0]
package = package.strip()
if package:
cleaned.append(package.lower())
return sorted(cleaned)
def create_requirements_index_file(venv_path: str, requirements_file: str) -> str:
"""
Creates a file, called package_index, in the virtual environment
directory that contains all the PIP packages installed in the
virtual environment. This file is used to determine the packages
that can be copied to a new virtual environment.
"""
index_filename = get_index_filename(venv_path)
packages = get_package_names(requirements_file)
with open(index_filename, "w") as writer:
writer.write("\n".join(packages))
writer.write("\n")
return index_filename
def get_venv_packages(venv_path: str) -> set[str]:
"""
Returns the packages installed in the virtual environment using the
package index file.
"""
with open(get_index_filename(venv_path)) as reader:
return {p.strip() for p in reader.read().split("\n") if p.strip()}
def try_to_copy_venv(venv_path: str, new_packages: set[str]) -> bool:
"""
Tries to copy packages from an old virtual environment in the cache
to the new virtual environment. The algorithm works as follows:
1. Find a virtual environment, v, from the cache that has the
highest overlap with the new requirements such that:
a. The new requirements only add to the packages of v.
b. The new requirements only upgrade packages of v.
2. Copy the contents of v to the new virtual environment using
virtualenv-clone.
3. Delete all .pyc files in the new virtual environment.
"""
if not os.path.exists(VENV_CACHE_PATH):
return False
desired_python_version = python_version()
venv_name = os.path.basename(venv_path)
overlaps: list[tuple[int, str, set[str]]] = []
old_packages: set[str] = set()
for sha1sum in os.listdir(VENV_CACHE_PATH):
curr_venv_path = os.path.join(VENV_CACHE_PATH, sha1sum, venv_name)
if curr_venv_path == venv_path or not os.path.exists(get_index_filename(curr_venv_path)):
continue
# Check the Python version in the venv matches the version we want to use.
venv_python3 = os.path.join(curr_venv_path, "bin", "python3")
if not os.path.exists(venv_python3):
continue
venv_python_version = subprocess.check_output([venv_python3, "-VV"], text=True)
if desired_python_version != venv_python_version:
continue
old_packages = get_venv_packages(curr_venv_path)
# We only consider using old virtualenvs that only
# contain packages that we want in our new virtualenv.
if not (old_packages - new_packages):
overlap = new_packages & old_packages
overlaps.append((len(overlap), curr_venv_path, overlap))
target_log = get_logfile_name(venv_path)
source_venv_path = None
if overlaps:
# Here, we select the old virtualenv with the largest overlap
overlaps = sorted(overlaps)
_, source_venv_path, copied_packages = overlaps[-1]
print(f"Copying packages from {source_venv_path}")
clone_ve = f"{source_venv_path}/bin/virtualenv-clone"
cmd = [clone_ve, source_venv_path, venv_path]
try:
# TODO: We can probably remove this in a few months, now
# that we can expect that virtualenv-clone is present in
# all of our recent virtualenvs.
run_as_root(cmd)
except subprocess.CalledProcessError:
# Virtualenv-clone is either not installed or threw an
# error. Just return False: making a new venv is safe.
logging.warning("Error cloning virtualenv %s", source_venv_path)
return False
# virtualenv-clone, unfortunately, copies the success stamp,
# which means if the upcoming `pip install` phase were to
# fail, we'd end up with a broken half-provisioned virtualenv
# that's incorrectly tagged as properly provisioned. The
# right fix is to use
# https://github.com/edwardgeorge/virtualenv-clone/pull/38,
# but this rm is almost as good.
success_stamp_path = os.path.join(venv_path, "success-stamp")
run_as_root(["rm", "-f", success_stamp_path])
run_as_root(["chown", "-R", f"{os.getuid()}:{os.getgid()}", venv_path])
source_log = get_logfile_name(source_venv_path)
copy_parent_log(source_log, target_log)
create_log_entry(
target_log, source_venv_path, copied_packages, new_packages - copied_packages
)
return True
return False
def get_logfile_name(venv_path: str) -> str:
return f"{venv_path}/setup-venv.log"
def create_log_entry(
target_log: str,
parent: str,
copied_packages: set[str],
new_packages: set[str],
) -> None:
venv_path = os.path.dirname(target_log)
with open(target_log, "a") as writer:
writer.write(f"{venv_path}\n")
if copied_packages:
writer.write(f"Copied from {parent}:\n")
writer.write("\n".join(f"- {p}" for p in sorted(copied_packages)))
writer.write("\n")
writer.write("New packages:\n")
writer.write("\n".join(f"- {p}" for p in sorted(new_packages)))
writer.write("\n\n")
def copy_parent_log(source_log: str, target_log: str) -> None:
if os.path.exists(source_log):
shutil.copyfile(source_log, target_log)
def do_patch_activate_script(venv_path: str) -> None:
"""
Patches the bin/activate script so that the value of the environment variable VIRTUAL_ENV
is set to venv_path during the script's execution whenever it is sourced.
"""
# venv_path should be what we want to have in VIRTUAL_ENV after patching
script_path = os.path.join(venv_path, "bin", "activate")
with open(script_path) as f:
lines = f.readlines()
for i, line in enumerate(lines):
if line.startswith("VIRTUAL_ENV="):
lines[i] = f'VIRTUAL_ENV="{venv_path}"\n'
with open(script_path, "w") as f:
f.write("".join(lines))
def generate_hash(requirements_file: str) -> str:
path = os.path.join(ZULIP_PATH, "scripts", "lib", "hash_reqs.py")
output = subprocess.check_output([path, requirements_file], text=True)
return output.split()[0]
def setup_virtualenv(
target_venv_path: str | None,
requirements_file: str,
patch_activate_script: bool = False,
) -> str:
sha1sum = generate_hash(requirements_file)
# Check if a cached version already exists
if target_venv_path is None:
cached_venv_path = os.path.join(VENV_CACHE_PATH, sha1sum, "venv")
else:
cached_venv_path = os.path.join(
VENV_CACHE_PATH, sha1sum, os.path.basename(target_venv_path)
)
success_stamp = os.path.join(cached_venv_path, "success-stamp")
if not os.path.exists(success_stamp):
do_setup_virtualenv(cached_venv_path, requirements_file)
with open(success_stamp, "w") as f:
f.close()
print(f"Using cached Python venv from {cached_venv_path}")
if target_venv_path is not None:
run_as_root(["ln", "-nsf", cached_venv_path, target_venv_path])
if patch_activate_script:
do_patch_activate_script(target_venv_path)
return cached_venv_path
def do_setup_virtualenv(venv_path: str, requirements_file: str) -> None:
# Set up Python virtualenv
new_packages = set(get_package_names(requirements_file))
run_as_root(["rm", "-rf", venv_path])
if not try_to_copy_venv(venv_path, new_packages):
# Create new virtualenv.
run_as_root(["mkdir", "-p", venv_path])
run_as_root(["virtualenv", "-p", "python3", "--no-download", venv_path])
run_as_root(["chown", "-R", f"{os.getuid()}:{os.getgid()}", venv_path])
create_log_entry(get_logfile_name(venv_path), "", set(), new_packages)
create_requirements_index_file(venv_path, requirements_file)
pip = os.path.join(venv_path, "bin", "pip")
try:
install_venv_deps(pip, requirements_file)
except subprocess.CalledProcessError:
try:
# Might be a failure due to network connection issues. Retrying...
print(WARNING + "`pip install` failed; retrying..." + ENDC)
install_venv_deps(pip, requirements_file)
except BaseException as e:
# Suppress exception chaining
raise e from None
run_as_root(["chmod", "-R", "a+rX", venv_path])

View File

@@ -76,7 +76,7 @@ mv zulip-git-version "$OUTPUT_DIR/$prefix/"
cd "$OUTPUT_DIR/$prefix"
ln -s "$BASEDIR/zulip-py3-venv" .
uv sync --frozen
# create var/log directory in the new temporary checkout
mkdir -p "var/log"

View File

@@ -1,6 +1,6 @@
#!/usr/bin/env bash
ZULIP_PATH="$(dirname "${BASH_SOURCE[0]}")/../.."
source /srv/zulip-py3-venv/bin/activate
source "$ZULIP_PATH/.venv/bin/activate"
source "$ZULIP_PATH"/tools/python-warnings.bash
echo "Using $VIRTUAL_ENV"

View File

@@ -64,7 +64,7 @@ def check_django() -> bool:
Make sure your shell does this at init time:
source /srv/zulip-py3-venv/bin/activate
source /srv/zulip/.venv/bin/activate
Or maybe you forget to run inside your VM?
"""
@@ -108,21 +108,6 @@ def test_models() -> bool:
return True
@run
def check_venv() -> bool:
path = os.path.join(ROOT_DIR, "scripts", "lib", "hash_reqs.py")
cache_dir = "/srv/zulip-venv-cache/"
for fn in ["dev.txt"]:
requirements_file = os.path.join(ROOT_DIR, "requirements", fn)
output = subprocess.check_output([path, requirements_file], text=True)
sha1sum = output.split()[0]
print(fn, "venv sha: ", sha1sum)
if not os.path.exists(os.path.join(cache_dir, sha1sum)):
print("Your venv may be improperly installed!")
return False
return True
@run
def check_migrations() -> bool:
print()

View File

@@ -99,7 +99,7 @@ Your remote Zulip dev server has been created!
`ssh zulipdev@<username>.zulipdev.org` on the command line
(Terminal for macOS and Linux, Bash for Git on Windows).
- There is no password; your account is configured to use your SSH keys.
- Once you log in, you should see `(zulip-py3-venv) ~$`.
- Once you log in, you should see `(zulip-server) ~$`.
- To start the dev server, `cd zulip` and then run `./tools/run-dev`.
- While the dev server is running, you can see the Zulip server in your browser
at http://<username>.zulipdev.org:9991.

View File

@@ -263,7 +263,7 @@ Your remote Zulip dev server has been created!
`ssh zulipdev@{droplet_domain_name}` on the command line
(Terminal for macOS and Linux, Bash for Git on Windows).
- There is no password; your account is configured to use your SSH keys.
- Once you log in, you should see `(zulip-py3-venv) ~$`.
- Once you log in, you should see `(zulip-server) ~$`.
- To start the dev server, `cd zulip` and then run `./tools/run-dev`.
- While the dev server is running, you can see the Zulip server in your browser at
http://{droplet_domain_name}:9991.

View File

@@ -1,5 +1,6 @@
#!/usr/bin/env python3
import argparse
import contextlib
import hashlib
import logging
import os
@@ -23,9 +24,9 @@ from scripts.lib.zulip_tools import (
get_dev_uuid_var_path,
os_families,
parse_os_release,
run,
run_as_root,
)
from tools.setup import setup_venvs
VAR_DIR_PATH = os.path.join(ZULIP_PATH, "var")
@@ -424,7 +425,14 @@ def main(options: argparse.Namespace) -> NoReturn:
# Install tusd
run_as_root([*proxy_env, "tools/setup/install-tusd"])
setup_venvs.main()
# Install Python environment
run_as_root([*proxy_env, "scripts/lib/install-uv"])
run([*proxy_env, "uv", "sync", "--frozen"])
# Clean old symlinks used before uv migration
with contextlib.suppress(FileNotFoundError):
os.unlink("zulip-py3-venv")
if os.path.lexists("/srv/zulip-py3-venv"):
run_as_root(["rm", "/srv/zulip-py3-venv"])
run_as_root(["cp", REPO_STOPWORDS_PATH, TSEARCH_STOPWORDS_PATH])
@@ -451,13 +459,13 @@ def main(options: argparse.Namespace) -> NoReturn:
# bad idea, and empirically it can cause Python to segfault on
# certain cffi-related imports. Instead, start a new Python
# process inside the virtualenv.
activate_this = "/srv/zulip-py3-venv/bin/activate_this.py"
provision_inner = os.path.join(ZULIP_PATH, "tools", "lib", "provision_inner.py")
with open(activate_this) as f:
exec(f.read(), dict(__file__=activate_this)) # noqa: S102
os.execvp(
provision_inner,
"uv",
[
"uv",
"run",
"--no-sync",
provision_inner,
*(["--force"] if options.is_force else []),
*(["--build-release-tarball-only"] if options.is_build_release_tarball_only else []),

View File

@@ -10,6 +10,7 @@ import argparse
import glob
import os
import pwd
import re
import shutil
import subprocess
import sys
@@ -33,7 +34,7 @@ from scripts.lib.zulip_tools import (
from tools.setup.generate_zulip_bots_static_files import generate_zulip_bots_static_files
from version import PROVISION_VERSION
VENV_PATH = "/srv/zulip-py3-venv"
VENV_PATH = os.path.join(ZULIP_PATH, ".venv")
UUID_VAR_PATH = get_dev_uuid_var_path()
with get_tzdata_zi() as f:
@@ -117,24 +118,41 @@ def is_vagrant_or_digitalocean_instance() -> bool:
def setup_shell_profile(shell_profile: str) -> None:
shell_profile_path = os.path.expanduser(shell_profile)
if os.path.exists(shell_profile_path):
with open(shell_profile_path) as f:
code = f.read()
else:
code = ""
def write_command(command: str) -> None:
if os.path.exists(shell_profile_path):
with open(shell_profile_path) as shell_profile_file:
lines = [line.strip() for line in shell_profile_file]
if command not in lines:
with open(shell_profile_path, "a+") as shell_profile_file:
shell_profile_file.writelines(command + "\n")
else:
with open(shell_profile_path, "w") as shell_profile_file:
shell_profile_file.writelines(command + "\n")
source_activate_command = "source " + os.path.join(VENV_PATH, "bin", "activate")
# We want to activate the virtual environment for login shells only on virtualized systems.
zulip_code = ""
if is_vagrant_or_digitalocean_instance() or is_wsl_instance():
write_command(source_activate_command)
zulip_code += (
"if [ -L /srv/zulip-py3-venv ]; then\n" # For development environment downgrades
"source /srv/zulip-py3-venv/bin/activate\n" # Not indented so old versions recognize and avoid re-adding this
"else\n"
f"source {os.path.join(VENV_PATH, 'bin', 'activate')}\n"
"fi\n"
)
if os.path.exists("/srv/zulip"):
write_command("cd /srv/zulip")
zulip_code += "cd /srv/zulip\n"
if zulip_code:
zulip_code = f"\n# begin Zulip setup\n{zulip_code}# end Zulip setup\n"
def patch_code(code: str) -> str:
return re.sub(
r"\n# begin Zulip setup\n(?s:.*)# end Zulip setup\n|(?:source /srv/zulip-py3-venv/bin/activate\n|cd /srv/zulip\n)+|\Z",
lambda m: zulip_code,
code,
count=1,
)
new_code = patch_code(code)
if new_code != code:
assert patch_code(new_code) == new_code
with open(f"{shell_profile_path}.new", "w") as f:
f.write(new_code)
os.rename(f"{shell_profile_path}.new", shell_profile_path)
def setup_bash_profile() -> None:

View File

@@ -4,15 +4,14 @@ import sys
def check_venv(filename: str) -> None:
if os.path.basename(sys.prefix) != "zulip-py3-venv":
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
venv = os.path.realpath(os.path.join(BASE_DIR, ".venv"))
if sys.prefix != venv:
print(f"You need to run {filename} inside a Zulip dev environment.")
user_id = os.getuid()
user_name = pwd.getpwuid(user_id).pw_name
print(
"You can `source /srv/zulip-py3-venv/bin/activate` "
"to enter the development environment."
)
print(f"You can `source {venv}/bin/activate` to enter the development environment.")
if user_name not in ("vagrant", "zulipdev"):
print()

View File

@@ -44,19 +44,19 @@ ENDC='\033[0m'
# Make the script independent of the location from where it is executed
PARENT_PATH=$(
cd "$(dirname "${BASH_SOURCE[0]}")"
cd "$(dirname "${BASH_SOURCE[0]}")/.."
pwd -P
)
cd "$PARENT_PATH"
mkdir -p ../var/log
LOG_PATH="../var/log/provision.log"
mkdir -p var/log
LOG_PATH="var/log/provision.log"
echo "PROVISIONING STARTING." >>$LOG_PATH
# PYTHONUNBUFFERED is important to ensure that tracebacks don't get
# lost far above where they should be in the output.
export PYTHONUNBUFFERED=1
./lib/provision.py "$@" 2>&1 | tee -a "$LOG_PATH"
tools/lib/provision.py "$@" 2>&1 | tee -a "$LOG_PATH"
failed=${PIPESTATUS[0]}
if [ "$failed" -ne 0 ]; then
@@ -69,13 +69,13 @@ if [ "$failed" -ne 0 ]; then
echo "* Logs are here: zulip/var/log/provision.log"
echo -e "$ENDC"
exit "$failed"
elif [ "$VIRTUAL_ENV" != "/srv/zulip-py3-venv" ] && [ -z "${SKIP_VENV_SHELL_WARNING}" ]; then
elif [ "$VIRTUAL_ENV" != "$PARENT_PATH/.venv" ] && [ -z "${SKIP_VENV_SHELL_WARNING}" ]; then
echo -e "$WARNING"
echo "WARNING: This shell does not have the Zulip Python 3 virtualenv activated."
echo "Zulip commands will fail until you activate the virtualenv."
echo
echo "To update the shell, run:"
echo " source /srv/zulip-py3-venv/bin/activate"
echo " source $PARENT_PATH/.venv/bin/activate"
# shellcheck disable=SC2016
echo 'or just close this shell and start a new one (with Vagrant, `vagrant ssh`).'
echo -en "$ENDC"

View File

@@ -30,22 +30,6 @@ PYTHONWARNINGS+=',default:Attempting to work in a virtualenv.:UserWarning:IPytho
PYTHONWARNINGS+=',ignore:datetime.datetime.utcfromtimestamp() is deprecated and scheduled for removal in a future version.:DeprecationWarning:onelogin.saml2.utils'
PYTHONWARNINGS+=',ignore:datetime.datetime.utcnow() is deprecated and scheduled for removal in a future version.:DeprecationWarning:onelogin.saml2.utils'
# Probably due to ancient pip
PYTHONWARNINGS+=',default:DEPRECATION::pip._internal.models.link'
PYTHONWARNINGS+=',default:Unimplemented abstract methods:DeprecationWarning:pip._internal.metadata.importlib._dists'
PYTHONWARNINGS+=',default:module '\''sre_constants'\'' is deprecated:DeprecationWarning:pip._vendor.pyparsing'
PYTHONWARNINGS+=',default:Creating a LegacyVersion has been deprecated and will be removed in the next major release:DeprecationWarning:pip._vendor.packaging.version'
PYTHONWARNINGS+=',default:path is deprecated.:DeprecationWarning:pip._vendor.certifi.core'
PYTHONWARNINGS+=',default:ssl.PROTOCOL_TLS is deprecated:DeprecationWarning:pip._vendor.urllib3.util.ssl_'
PYTHONWARNINGS+=',default:Creating a LegacyVersion has been deprecated and will be removed in the next major release:DeprecationWarning:pip._vendor.packaging.specifiers'
PYTHONWARNINGS+=',default:path is deprecated.:DeprecationWarning:pip._vendor.pep517.wrappers'
PYTHONWARNINGS+=',default:The distutils package is deprecated and slated for removal in Python 3.12.:DeprecationWarning:pip._internal.locations'
PYTHONWARNINGS+=',default:The distutils.sysconfig module is deprecated:DeprecationWarning:pip._internal.locations'
PYTHONWARNINGS+=',default:ssl.match_hostname() is deprecated:DeprecationWarning:pip._vendor.urllib3.connection'
PYTHONWARNINGS+=',default:The distutils package is deprecated and slated for removal in Python 3.12.:DeprecationWarning:pip._internal.locations._distutils'
PYTHONWARNINGS+=',default:The distutils.sysconfig module is deprecated:DeprecationWarning:distutils.command.install'
PYTHONWARNINGS+=',default:The distutils package is deprecated and slated for removal in Python 3.12.:DeprecationWarning:pip._internal.cli.cmdoptions'
# https://github.com/python-openapi/openapi-core/issues/931
PYTHONWARNINGS+=',ignore::DeprecationWarning:openapi_core.validation.request.validators'

View File

@@ -34,17 +34,9 @@ args = parser.parse_args()
assert_provisioning_status_ok(args.skip_provision_check)
if args.use_daemon:
command_name = "dmypy"
mypy_command = "dmypy"
else:
command_name = "mypy"
# Use zulip-py3-venv's mypy if it's available.
VENV_DIR = "/srv/zulip-py3-venv"
MYPY_VENV_PATH = os.path.join(VENV_DIR, "bin", command_name)
if os.path.exists(MYPY_VENV_PATH):
mypy_command = MYPY_VENV_PATH
else:
mypy_command = command_name
mypy_command = "mypy"
if args.version:
print("mypy command:", mypy_command)

View File

@@ -1,27 +0,0 @@
#!/usr/bin/env python3
import os
import sys
ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
if ZULIP_PATH not in sys.path:
sys.path.append(ZULIP_PATH)
# unset PIP_USER if set, since it is not compatible with virtualenvs.
os.environ.pop("PIP_USER", None)
from scripts.lib.setup_venv import setup_virtualenv
from scripts.lib.zulip_tools import overwrite_symlink
VENV_PATH = "/srv/zulip-py3-venv"
DEV_REQS_FILE = os.path.join(ZULIP_PATH, "requirements", "dev.txt")
def main() -> None:
cached_venv_path = setup_virtualenv(VENV_PATH, DEV_REQS_FILE, patch_activate_script=True)
overwrite_symlink(cached_venv_path, os.path.join(ZULIP_PATH, "zulip-py3-venv"))
if __name__ == "__main__":
main()

View File

@@ -61,8 +61,7 @@ run ./tools/test-migrations
# run ./tools/test-documentation --skip-external-links
run ./tools/test-help-documentation --skip-external-links "${forcearg[@]}"
run ./tools/test-api
# Not running requirements check locally, because slow and low-churn
# run ./tools/test-locked-requirements
run uv lock --check
# Not running run-dev tests locally; we never have
# run ./tools/test-run-dev
# Not running queue worker reload tests since it's low-churn code

View File

@@ -1,124 +0,0 @@
#!/usr/bin/env python3
import difflib
import filecmp
import glob
import hashlib
import os
import shutil
import subprocess
import sys
import tempfile
import orjson
TOOLS_DIR = os.path.abspath(os.path.dirname(__file__))
ZULIP_PATH = os.path.dirname(TOOLS_DIR)
REQS_DIR = os.path.join(ZULIP_PATH, "requirements")
CACHE_DIR = os.path.join(ZULIP_PATH, "var", "tmp")
CACHE_FILE = os.path.join(CACHE_DIR, "requirements_hashes")
def print_diff(path_file1: str, path_file2: str) -> None:
with open(path_file1) as file1, open(path_file2) as file2:
diff = difflib.unified_diff(
file1.readlines(),
file2.readlines(),
fromfile=path_file1,
tofile=path_file2,
)
sys.stdout.writelines(diff)
def test_locked_requirements(tmp_dir: str) -> bool:
# `pip-compile` tries to avoid unnecessarily updating recursive dependencies
# if lock files are present already. If we don't copy these files to the tmp
# dir then recursive dependencies will get updated to their latest version
# without any change in the input requirements file and the test will not pass.
for locked_file in glob.glob(os.path.join(REQS_DIR, "*.txt")):
fn = os.path.basename(locked_file)
locked_file = os.path.join(REQS_DIR, fn)
test_locked_file = os.path.join(tmp_dir, fn)
shutil.copyfile(locked_file, test_locked_file)
subprocess.check_call(
[os.path.join(TOOLS_DIR, "update-locked-requirements"), "--output-dir", tmp_dir]
)
same = True
for test_locked_file in glob.glob(os.path.join(tmp_dir, "*.txt")):
fn = os.path.basename(test_locked_file)
locked_file = os.path.join(REQS_DIR, fn)
same = same and filecmp.cmp(test_locked_file, locked_file, shallow=False)
return same
def get_requirements_hash(tmp_dir: str, use_test_lock_files: bool = False) -> str:
sha1 = hashlib.sha1()
reqs_files = sorted(glob.glob(os.path.join(REQS_DIR, "*.in")))
lock_files_path = REQS_DIR
if use_test_lock_files:
lock_files_path = tmp_dir
reqs_files.extend(sorted(glob.glob(os.path.join(lock_files_path, "*.txt"))))
for file_path in reqs_files:
with open(file_path, "rb") as fp:
sha1.update(fp.read())
return sha1.hexdigest()
def maybe_set_up_cache() -> None:
os.makedirs(CACHE_DIR, exist_ok=True)
if not os.path.exists(CACHE_FILE):
with open(CACHE_FILE, "wb") as fp:
fp.write(orjson.dumps([]))
def load_cache() -> list[str]:
with open(CACHE_FILE, "rb") as fp:
hash_list = orjson.loads(fp.read())
return hash_list
def update_cache(hash_list: list[str]) -> None:
# We store last 100 hash entries. Aggressive caching is
# not a problem as it is cheap to do.
if len(hash_list) > 100:
hash_list = hash_list[-100:]
with open(CACHE_FILE, "wb") as fp:
fp.write(orjson.dumps(hash_list))
def main() -> None:
maybe_set_up_cache()
hash_list = load_cache()
tmp = tempfile.TemporaryDirectory()
tmp_dir = tmp.name
curr_hash = get_requirements_hash(tmp_dir)
if curr_hash in hash_list:
# We have already checked this set of requirements and they
# were consistent so no need to check again.
return
requirements_are_consistent = test_locked_requirements(tmp_dir)
# Cache the hash so that we need not to run the `update_locked_requirements`
# tool again for checking this set of requirements.
valid_hash = get_requirements_hash(tmp_dir, use_test_lock_files=True)
update_cache([*(h for h in hash_list if h != valid_hash), valid_hash])
if not requirements_are_consistent:
for test_locked_file in glob.glob(os.path.join(tmp_dir, "*.txt")):
fn = os.path.basename(test_locked_file)
locked_file = os.path.join(REQS_DIR, fn)
print_diff(locked_file, test_locked_file)
# Flush the output to ensure we print the error at the end.
sys.stdout.flush()
raise Exception(
"It looks like you have updated some python dependencies but haven't "
"updated locked requirements files. Please update them by running "
"`tools/update-locked-requirements`. For more information please "
"refer to `requirements/README.md`."
)
if __name__ == "__main__":
main()

View File

@@ -25,9 +25,7 @@ if __name__ == "__main__":
if args.coverage:
import coverage
cov = coverage.Coverage(
branch=True, omit=["*/zulip-venv-cache/*", os.path.join(tools_test_dir, "*")]
)
cov = coverage.Coverage(branch=True, omit=[os.path.join(tools_test_dir, "*")])
cov.start()
suite = loader.discover(start_dir=tools_test_dir, top_level_dir=root_dir)

View File

@@ -1,23 +0,0 @@
import unittest
from unittest import mock
from scripts.lib.hash_reqs import expand_reqs, hash_deps
from tools.setup.setup_venvs import DEV_REQS_FILE
class TestHashCreation(unittest.TestCase):
def test_diff_hash_for_diff_python_version(self) -> None:
with mock.patch("scripts.lib.hash_reqs.python_version", return_value="Python 3.6.9"):
deps = expand_reqs(DEV_REQS_FILE)
hash1 = hash_deps(deps)
with mock.patch("scripts.lib.hash_reqs.python_version", return_value="Python 3.6.9"):
deps = expand_reqs(DEV_REQS_FILE)
hash2 = hash_deps(deps)
with mock.patch("scripts.lib.hash_reqs.python_version", return_value="Python 3.8.2"):
deps = expand_reqs(DEV_REQS_FILE)
hash3 = hash_deps(deps)
assert hash1 == hash2
assert hash1 != hash3

View File

@@ -1,56 +0,0 @@
#!/usr/bin/env bash
set -e
# Make sure the Zulip dev virtualenv exists, and operate within it.
if [ ! -d /srv/zulip-py3-venv ]; then
./tools/setup/setup_venvs.py
fi
compile_requirements() {
source="$1"
output="$2"
echo "Compiling $output"
/srv/zulip-py3-venv/bin/pip-compile --quiet --allow-unsafe --generate-hashes --no-header --output-file "$output" "$source"
cat - "$output" <<EOF | sponge "$output"
#
# This file is GENERATED. Don't edit directly.
#
# To update, edit the non-"lock" files in requirements/*.in, then:
#
# tools/update-locked-requirements
#
# For details, see requirements/README.md .
#
EOF
# Work around https://github.com/jazzband/pip-tools/issues/268
chmod a+r "$output"
}
OUTPUT_BASE_DIR='requirements'
# Parse arguments.
if [ $# -gt 0 ]; then
while [ "$1" != "" ]; do
case $1 in
--output-dir)
shift
OUTPUT_BASE_DIR=$(readlink -m "$1")
;;
*)
echo "Invalid arguments passed."
echo "Usage: $0 [--output-dir <path-to-output-dir>]"
exit
;;
esac
shift
done
fi
compile_requirements requirements/dev.in "$OUTPUT_BASE_DIR/dev.txt"
for name in pip prod docs; do
cp "$OUTPUT_BASE_DIR/dev.txt" "$OUTPUT_BASE_DIR/$name.txt"
compile_requirements "requirements/$name.in" "$OUTPUT_BASE_DIR/$name.txt"
done

View File

@@ -39,9 +39,9 @@
"help-beta",
// Skip walking large generated directories.
".venv",
"docs/_build",
"static/webpack-bundles",
"var",
"zulip-py3-venv",
],
}

5461
uv.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -49,4 +49,4 @@ API_FEATURE_LEVEL = 356 # Last bumped for adding `include_can_access_content` t
# historical commits sharing the same major version, in which case a
# minor version bump suffices.
PROVISION_VERSION = (316, 1) # bumped 2025-02-21 to patch talon
PROVISION_VERSION = (317, 0) # bumped 2025-02-24 to migrate to uv

View File

@@ -57,7 +57,7 @@ VERBOSE_MESSAGE_ABOUT_HASH_TRANSITION = """
def migration_paths() -> list[str]:
return [
*glob.glob("*/migrations/*.py"),
"requirements/dev.txt",
"uv.lock",
]

View File

@@ -20,7 +20,7 @@ integration](/api/incoming-webhooks-walkthrough).
environment](https://zulip.readthedocs.io/en/latest/development/overview.html):
```
(zulip-py3-venv) vagrant@vagrant:/srv/zulip$
(zulip-server) vagrant@vagrant:/srv/zulip$
./manage.py send_webhook_fixture_message \
> --fixture=zerver/tests/fixtures/helloworld/hello.json \
> '--url=http://localhost:9991/api/v1/external/helloworld?api_key=abcdefgh&stream=channel%20name;'