mirror of
https://github.com/zulip/zulip.git
synced 2025-11-03 13:33:24 +00:00
digest refactor: Clean up names and comments.
We now use `extra_strings` instead of `package_versions` to allow for more generic digests to be built (without naming confusion).
This commit is contained in:
@@ -379,26 +379,40 @@ def os_families() -> Set[str]:
|
|||||||
distro_info = parse_os_release()
|
distro_info = parse_os_release()
|
||||||
return {distro_info["ID"], *distro_info.get("ID_LIKE", "").split()}
|
return {distro_info["ID"], *distro_info.get("ID_LIKE", "").split()}
|
||||||
|
|
||||||
def path_version_digest(paths: List[str],
|
def files_and_string_digest(filenames: List[str],
|
||||||
package_versions: List[str]) -> str:
|
extra_strings: List[str]) -> str:
|
||||||
|
# see is_digest_obsolete for more context
|
||||||
sha1sum = hashlib.sha1()
|
sha1sum = hashlib.sha1()
|
||||||
for path in paths:
|
for fn in filenames:
|
||||||
with open(path, 'rb') as file_to_hash:
|
with open(fn, 'rb') as file_to_hash:
|
||||||
sha1sum.update(file_to_hash.read())
|
sha1sum.update(file_to_hash.read())
|
||||||
|
|
||||||
# The output of tools like build_pygments_data depends
|
for extra_string in extra_strings:
|
||||||
# on the version of some pip packages as well.
|
sha1sum.update(extra_string.encode("utf-8"))
|
||||||
for package_version in package_versions:
|
|
||||||
sha1sum.update(package_version.encode("utf-8"))
|
|
||||||
|
|
||||||
return sha1sum.hexdigest()
|
return sha1sum.hexdigest()
|
||||||
|
|
||||||
def is_digest_obsolete(hash_name: str,
|
def is_digest_obsolete(hash_name: str,
|
||||||
paths: List[str],
|
filenames: List[str],
|
||||||
package_versions: List[str]=[]) -> bool:
|
extra_strings: List[str]=[]) -> bool:
|
||||||
# Check whether the `paths` contents or
|
'''
|
||||||
# `package_versions` have changed.
|
In order to determine if we need to run some
|
||||||
|
process, we calculate a digest of the important
|
||||||
|
files and strings whose respective contents
|
||||||
|
or values may indicate such a need.
|
||||||
|
|
||||||
|
filenames = files we should hash the contents of
|
||||||
|
extra_strings = strings we should hash directly
|
||||||
|
|
||||||
|
Grep for callers to see examples of how this is used.
|
||||||
|
|
||||||
|
To elaborate on extra_strings, they will typically
|
||||||
|
be things like:
|
||||||
|
|
||||||
|
- package versions (that we import)
|
||||||
|
- settings values (that we stringify with
|
||||||
|
json, deterministically)
|
||||||
|
'''
|
||||||
last_hash_path = os.path.join(get_dev_uuid_var_path(), hash_name)
|
last_hash_path = os.path.join(get_dev_uuid_var_path(), hash_name)
|
||||||
try:
|
try:
|
||||||
with open(last_hash_path) as f:
|
with open(last_hash_path) as f:
|
||||||
@@ -408,15 +422,15 @@ def is_digest_obsolete(hash_name: str,
|
|||||||
# digest is an obsolete digest.
|
# digest is an obsolete digest.
|
||||||
return True
|
return True
|
||||||
|
|
||||||
new_hash = path_version_digest(paths, package_versions)
|
new_hash = files_and_string_digest(filenames, extra_strings)
|
||||||
|
|
||||||
return new_hash != old_hash
|
return new_hash != old_hash
|
||||||
|
|
||||||
def write_new_digest(hash_name: str,
|
def write_new_digest(hash_name: str,
|
||||||
paths: List[str],
|
filenames: List[str],
|
||||||
package_versions: List[str]=[]) -> None:
|
extra_strings: List[str]=[]) -> None:
|
||||||
hash_path = os.path.join(get_dev_uuid_var_path(), hash_name)
|
hash_path = os.path.join(get_dev_uuid_var_path(), hash_name)
|
||||||
new_hash = path_version_digest(paths, package_versions)
|
new_hash = files_and_string_digest(filenames, extra_strings)
|
||||||
with open(hash_path, 'w') as f:
|
with open(hash_path, 'w') as f:
|
||||||
f.write(new_hash)
|
f.write(new_hash)
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user