mirror of
https://github.com/zulip/zulip.git
synced 2025-11-01 12:33:40 +00:00
digest refactor: Clean up names and comments.
We now use `extra_strings` instead of `package_versions` to allow for more generic digests to be built (without naming confusion).
This commit is contained in:
@@ -379,26 +379,40 @@ def os_families() -> Set[str]:
|
||||
distro_info = parse_os_release()
|
||||
return {distro_info["ID"], *distro_info.get("ID_LIKE", "").split()}
|
||||
|
||||
def path_version_digest(paths: List[str],
|
||||
package_versions: List[str]) -> str:
|
||||
def files_and_string_digest(filenames: List[str],
|
||||
extra_strings: List[str]) -> str:
|
||||
# see is_digest_obsolete for more context
|
||||
sha1sum = hashlib.sha1()
|
||||
for path in paths:
|
||||
with open(path, 'rb') as file_to_hash:
|
||||
for fn in filenames:
|
||||
with open(fn, 'rb') as file_to_hash:
|
||||
sha1sum.update(file_to_hash.read())
|
||||
|
||||
# The output of tools like build_pygments_data depends
|
||||
# on the version of some pip packages as well.
|
||||
for package_version in package_versions:
|
||||
sha1sum.update(package_version.encode("utf-8"))
|
||||
for extra_string in extra_strings:
|
||||
sha1sum.update(extra_string.encode("utf-8"))
|
||||
|
||||
return sha1sum.hexdigest()
|
||||
|
||||
def is_digest_obsolete(hash_name: str,
|
||||
paths: List[str],
|
||||
package_versions: List[str]=[]) -> bool:
|
||||
# Check whether the `paths` contents or
|
||||
# `package_versions` have changed.
|
||||
filenames: List[str],
|
||||
extra_strings: List[str]=[]) -> bool:
|
||||
'''
|
||||
In order to determine if we need to run some
|
||||
process, we calculate a digest of the important
|
||||
files and strings whose respective contents
|
||||
or values may indicate such a need.
|
||||
|
||||
filenames = files we should hash the contents of
|
||||
extra_strings = strings we should hash directly
|
||||
|
||||
Grep for callers to see examples of how this is used.
|
||||
|
||||
To elaborate on extra_strings, they will typically
|
||||
be things like:
|
||||
|
||||
- package versions (that we import)
|
||||
- settings values (that we stringify with
|
||||
json, deterministically)
|
||||
'''
|
||||
last_hash_path = os.path.join(get_dev_uuid_var_path(), hash_name)
|
||||
try:
|
||||
with open(last_hash_path) as f:
|
||||
@@ -408,15 +422,15 @@ def is_digest_obsolete(hash_name: str,
|
||||
# digest is an obsolete digest.
|
||||
return True
|
||||
|
||||
new_hash = path_version_digest(paths, package_versions)
|
||||
new_hash = files_and_string_digest(filenames, extra_strings)
|
||||
|
||||
return new_hash != old_hash
|
||||
|
||||
def write_new_digest(hash_name: str,
|
||||
paths: List[str],
|
||||
package_versions: List[str]=[]) -> None:
|
||||
filenames: List[str],
|
||||
extra_strings: List[str]=[]) -> None:
|
||||
hash_path = os.path.join(get_dev_uuid_var_path(), hash_name)
|
||||
new_hash = path_version_digest(paths, package_versions)
|
||||
new_hash = files_and_string_digest(filenames, extra_strings)
|
||||
with open(hash_path, 'w') as f:
|
||||
f.write(new_hash)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user