python: Reformat with Black, except quotes.

Signed-off-by: Anders Kaseorg <anders@zulip.com>
This commit is contained in:
Anders Kaseorg
2021-02-11 23:19:30 -08:00
committed by Tim Abbott
parent 5028c081cb
commit 11741543da
817 changed files with 44952 additions and 24860 deletions

View File

@@ -13,30 +13,28 @@ VENV_CACHE_PATH = "/srv/zulip-venv-cache"
VENV_DEPENDENCIES = [
"build-essential",
"libffi-dev",
"libfreetype6-dev", # Needed for image types with Pillow
"zlib1g-dev", # Needed to handle compressed PNGs with Pillow
"libjpeg-dev", # Needed to handle JPEGs with Pillow
"libfreetype6-dev", # Needed for image types with Pillow
"zlib1g-dev", # Needed to handle compressed PNGs with Pillow
"libjpeg-dev", # Needed to handle JPEGs with Pillow
"libldap2-dev",
"python3-dev", # Needed to install typed-ast dependency of mypy
"python3-dev", # Needed to install typed-ast dependency of mypy
"python3-pip",
"virtualenv",
"libxml2-dev", # Used for installing talon and python-xmlsec
"libxslt1-dev", # Used for installing talon
"libpq-dev", # Needed by psycopg2
"libssl-dev", # Needed to build pycurl and other libraries
"libmagic1", # Used for install python-magic
"libyaml-dev", # For fast YAML parsing in PyYAML
"libxml2-dev", # Used for installing talon and python-xmlsec
"libxslt1-dev", # Used for installing talon
"libpq-dev", # Needed by psycopg2
"libssl-dev", # Needed to build pycurl and other libraries
"libmagic1", # Used for install python-magic
"libyaml-dev", # For fast YAML parsing in PyYAML
# Needed by python-xmlsec:
"libxmlsec1-dev",
"pkg-config",
# This is technically a node dependency, but we add it here
# because we don't have another place that we install apt packages
# on upgrade of a production server, and it's not worth adding
# another call to `apt install` for.
"jq", # Used by scripts/lib/install-node to check yarn version
"libsasl2-dev", # For building python-ldap from source
"jq", # Used by scripts/lib/install-node to check yarn version
"libsasl2-dev", # For building python-ldap from source
]
COMMON_YUM_VENV_DEPENDENCIES = [
@@ -89,6 +87,7 @@ YUM_THUMBOR_VENV_DEPENDENCIES = [
"gifsicle",
]
def get_venv_dependencies(vendor: str, os_version: str) -> List[str]:
if "debian" in os_families():
return VENV_DEPENDENCIES
@@ -99,14 +98,17 @@ def get_venv_dependencies(vendor: str, os_version: str) -> List[str]:
else:
raise AssertionError("Invalid vendor")
def install_venv_deps(pip: str, requirements_file: str) -> None:
pip_requirements = os.path.join(ZULIP_PATH, "requirements", "pip.txt")
run([pip, "install", "--force-reinstall", "--require-hashes", "-r", pip_requirements])
run([pip, "install", "--no-deps", "--require-hashes", "-r", requirements_file])
def get_index_filename(venv_path: str) -> str:
return os.path.join(venv_path, 'package_index')
def get_package_names(requirements_file: str) -> List[str]:
packages = expand_reqs(requirements_file)
cleaned = []
@@ -129,6 +131,7 @@ def get_package_names(requirements_file: str) -> List[str]:
return sorted(cleaned)
def create_requirements_index_file(venv_path: str, requirements_file: str) -> str:
"""
Creates a file, called package_index, in the virtual environment
@@ -144,6 +147,7 @@ def create_requirements_index_file(venv_path: str, requirements_file: str) -> st
return index_filename
def get_venv_packages(venv_path: str) -> Set[str]:
"""
Returns the packages installed in the virtual environment using the
@@ -152,6 +156,7 @@ def get_venv_packages(venv_path: str) -> Set[str]:
with open(get_index_filename(venv_path)) as reader:
return {p.strip() for p in reader.read().split('\n') if p.strip()}
def try_to_copy_venv(venv_path: str, new_packages: Set[str]) -> bool:
"""
Tries to copy packages from an old virtual environment in the cache
@@ -174,16 +179,16 @@ def try_to_copy_venv(venv_path: str, new_packages: Set[str]) -> bool:
old_packages = set() # type: Set[str]
for sha1sum in os.listdir(VENV_CACHE_PATH):
curr_venv_path = os.path.join(VENV_CACHE_PATH, sha1sum, venv_name)
if (curr_venv_path == venv_path or
not os.path.exists(get_index_filename(curr_venv_path))):
if curr_venv_path == venv_path or not os.path.exists(get_index_filename(curr_venv_path)):
continue
# Check the Python version in the venv matches the version we want to use.
venv_python3 = os.path.join(curr_venv_path, "bin", "python3")
if not os.path.exists(venv_python3):
continue
venv_python_version = subprocess.check_output([
venv_python3, "-VV"], universal_newlines=True)
venv_python_version = subprocess.check_output(
[venv_python3, "-VV"], universal_newlines=True
)
if desired_python_version != venv_python_version:
continue
@@ -225,29 +230,33 @@ def try_to_copy_venv(venv_path: str, new_packages: Set[str]) -> bool:
success_stamp_path = os.path.join(venv_path, 'success-stamp')
run_as_root(["rm", "-f", success_stamp_path])
run_as_root(["chown", "-R",
"{}:{}".format(os.getuid(), os.getgid()), venv_path])
run_as_root(["chown", "-R", "{}:{}".format(os.getuid(), os.getgid()), venv_path])
source_log = get_logfile_name(source_venv_path)
copy_parent_log(source_log, target_log)
create_log_entry(target_log, source_venv_path, copied_packages,
new_packages - copied_packages)
create_log_entry(
target_log, source_venv_path, copied_packages, new_packages - copied_packages
)
return True
return False
def get_logfile_name(venv_path: str) -> str:
return "{}/setup-venv.log".format(venv_path)
def create_log_entry(
target_log: str, parent: str, copied_packages: Set[str], new_packages: Set[str],
target_log: str,
parent: str,
copied_packages: Set[str],
new_packages: Set[str],
) -> None:
venv_path = os.path.dirname(target_log)
with open(target_log, 'a') as writer:
writer.write("{}\n".format(venv_path))
if copied_packages:
writer.write(
"Copied from {}:\n".format(parent))
writer.write("Copied from {}:\n".format(parent))
writer.write("\n".join('- {}'.format(p) for p in sorted(copied_packages)))
writer.write("\n")
@@ -255,10 +264,12 @@ def create_log_entry(
writer.write("\n".join('- {}'.format(p) for p in sorted(new_packages)))
writer.write("\n\n")
def copy_parent_log(source_log: str, target_log: str) -> None:
if os.path.exists(source_log):
shutil.copyfile(source_log, target_log)
def do_patch_activate_script(venv_path: str) -> None:
"""
Patches the bin/activate script so that the value of the environment variable VIRTUAL_ENV
@@ -276,11 +287,13 @@ def do_patch_activate_script(venv_path: str) -> None:
with open(script_path, 'w') as f:
f.write("".join(lines))
def generate_hash(requirements_file: str) -> str:
path = os.path.join(ZULIP_PATH, 'scripts', 'lib', 'hash_reqs.py')
output = subprocess.check_output([path, requirements_file], universal_newlines=True)
return output.split()[0]
def setup_virtualenv(
target_venv_path: Optional[str],
requirements_file: str,
@@ -292,7 +305,9 @@ def setup_virtualenv(
if target_venv_path is None:
cached_venv_path = os.path.join(VENV_CACHE_PATH, sha1sum, 'venv')
else:
cached_venv_path = os.path.join(VENV_CACHE_PATH, sha1sum, os.path.basename(target_venv_path))
cached_venv_path = os.path.join(
VENV_CACHE_PATH, sha1sum, os.path.basename(target_venv_path)
)
success_stamp = os.path.join(cached_venv_path, "success-stamp")
if not os.path.exists(success_stamp):
do_setup_virtualenv(cached_venv_path, requirements_file)
@@ -306,12 +321,14 @@ def setup_virtualenv(
do_patch_activate_script(target_venv_path)
return cached_venv_path
def add_cert_to_pipconf() -> None:
conffile = os.path.expanduser("~/.pip/pip.conf")
confdir = os.path.expanduser("~/.pip/")
os.makedirs(confdir, exist_ok=True)
run(["crudini", "--set", conffile, "global", "cert", os.environ["CUSTOM_CA_CERTIFICATES"]])
def do_setup_virtualenv(venv_path: str, requirements_file: str) -> None:
# Setup Python virtualenv
@@ -322,8 +339,7 @@ def do_setup_virtualenv(venv_path: str, requirements_file: str) -> None:
# Create new virtualenv.
run_as_root(["mkdir", "-p", venv_path])
run_as_root(["virtualenv", "-p", "python3", venv_path])
run_as_root(["chown", "-R",
"{}:{}".format(os.getuid(), os.getgid()), venv_path])
run_as_root(["chown", "-R", "{}:{}".format(os.getuid(), os.getgid()), venv_path])
create_log_entry(get_logfile_name(venv_path), "", set(), new_packages)
create_requirements_index_file(venv_path, requirements_file)