python: Convert json.loads(f.read()) to json.load(f).

Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
This commit is contained in:
Anders Kaseorg
2020-03-19 18:12:02 -07:00
committed by Tim Abbott
parent 328961a4fc
commit 39f9abeb3f
12 changed files with 27 additions and 26 deletions

View File

@@ -40,7 +40,7 @@ def generate_sha1sum_node_modules(setup_dir=None, production=DEFAULT_PRODUCTION)
# For backwards compatibility, we can't assume yarn.lock exists
sha1sum.update(subprocess_text_output(['cat', YARN_LOCK_FILE_PATH]).encode('utf8'))
with open(YARN_PACKAGE_JSON, "r") as f:
yarn_version = json.loads(f.read())['version']
yarn_version = json.load(f)['version']
sha1sum.update(yarn_version.encode("utf8"))
sha1sum.update(subprocess_text_output(['node', '--version']).encode('utf8'))
yarn_args = get_yarn_args(production=production)

View File

@@ -45,7 +45,7 @@ def user_exists(username):
user_api_url = "https://api.github.com/users/{0}".format(username)
try:
response = urllib.request.urlopen(user_api_url)
json.loads(response.read().decode())
json.load(response)
print("...user exists!")
return True
except urllib.error.HTTPError as err:
@@ -59,7 +59,7 @@ def get_keys(username):
apiurl_keys = "https://api.github.com/users/{0}/keys".format(username)
try:
response = urllib.request.urlopen(apiurl_keys)
userkeys = json.loads(response.read().decode())
userkeys = json.load(response)
if not userkeys:
print("No keys found. Has user {0} added ssh keys to their github account?".format(username))
sys.exit(1)
@@ -76,7 +76,7 @@ def fork_exists(username):
apiurl_fork = "https://api.github.com/repos/{0}/zulip".format(username)
try:
response = urllib.request.urlopen(apiurl_fork)
json.loads(response.read().decode())
json.load(response)
print("...fork found!")
return True
except urllib.error.HTTPError as err:

View File

@@ -115,7 +115,7 @@ def run_production() -> None:
# remove duplicate contributions count
# find commits at the time of split and substract from zulip-server
with open(duplicate_commits_file, 'r') as f:
duplicate_commits = json.loads(f.read())
duplicate_commits = json.load(f)
for committer in duplicate_commits:
if committer in contribs_list and contribs_list[committer].get('server'):
total_commits = contribs_list[committer]['server']

View File

@@ -954,7 +954,7 @@ def export_usermessages_batch(input_path: Path, output_path: Path,
objects. (This is called by the export_usermessage_batch
management command)."""
with open(input_path, "r") as input_file:
output = ujson.loads(input_file.read())
output = ujson.load(input_file)
message_ids = [item['id'] for item in output['zerver_message']]
user_profile_ids = set(output['zerver_userprofile_ids'])
del output['zerver_userprofile_ids']
@@ -1429,8 +1429,7 @@ def do_write_stats_file_for_realm_export(output_dir: Path) -> None:
for fn in fns:
f.write(os.path.basename(fn) + '\n')
with open(fn, 'r') as filename:
payload = filename.read()
data = ujson.loads(payload)
data = ujson.load(filename)
for k in sorted(data):
f.write('%5d %s\n' % (len(data[k]), k))
f.write('\n')
@@ -1441,8 +1440,7 @@ def do_write_stats_file_for_realm_export(output_dir: Path) -> None:
for fn in [avatar_file, uploads_file]:
f.write(fn+'\n')
with open(fn, 'r') as filename:
payload = filename.read()
data = ujson.loads(payload)
data = ujson.load(filename)
f.write('%5d records\n' % (len(data),))
f.write('\n')

View File

@@ -603,7 +603,7 @@ def import_uploads(realm: Realm, import_dir: Path, processes: int, processing_av
records_filename = os.path.join(import_dir, "records.json")
with open(records_filename) as records_file:
records = ujson.loads(records_file.read()) # type: List[Dict[str, Any]]
records = ujson.load(records_file) # type: List[Dict[str, Any]]
timestamp = datetime_to_timestamp(timezone_now())
re_map_foreign_keys_internal(records, 'records', 'realm_id', related_table="realm",

View File

@@ -91,4 +91,5 @@ approach shown above.
return os.path.exists(fixture_path)
def _get_fixture_as_json(self, fixture_path: str) -> str:
return ujson.dumps(ujson.loads(open(fixture_path).read()))
with open(fixture_path) as f:
return ujson.dumps(ujson.load(f))

View File

@@ -49,7 +49,7 @@ def ensure_users(ids_list: List[int], user_names: List[str]) -> None:
def load_api_fixtures():
# type: () -> Dict[str, Any]
with open(FIXTURE_PATH, 'r') as fp:
json_dict = json.loads(fp.read())
json_dict = json.load(fp)
return json_dict
FIXTURES = load_api_fixtures()

View File

@@ -357,7 +357,7 @@ class BugdownTest(ZulipTestCase):
def load_bugdown_tests(self) -> Tuple[Dict[str, Any], List[List[str]]]:
test_fixtures = {}
with open(os.path.join(os.path.dirname(__file__), 'fixtures/markdown_test_cases.json'), 'r') as f:
data = ujson.loads('\n'.join(f.readlines()))
data = ujson.load(f)
for test in data['regular_tests']:
test_fixtures[test['name']] = test

View File

@@ -237,7 +237,7 @@ class TestSendWebhookFixtureMessage(TestCase):
ujson_mock: MagicMock,
client_mock: MagicMock,
os_path_exists_mock: MagicMock) -> None:
ujson_mock.loads.return_value = {}
ujson_mock.load.return_value = {}
ujson_mock.dumps.return_value = "{}"
os_path_exists_mock.return_value = True
@@ -246,7 +246,7 @@ class TestSendWebhookFixtureMessage(TestCase):
with self.assertRaises(CommandError):
call_command(self.COMMAND_NAME, fixture=self.fixture_path, url=self.url)
self.assertTrue(ujson_mock.dumps.called)
self.assertTrue(ujson_mock.loads.called)
self.assertTrue(ujson_mock.load.called)
self.assertTrue(open_mock.called)
client.post.assert_called_once_with(self.url, "{}", content_type="application/json",
HTTP_HOST="zulip.testserver")

View File

@@ -457,7 +457,8 @@ class NarrowLibraryTest(TestCase):
def test_build_narrow_filter(self) -> None:
fixtures_path = os.path.join(os.path.dirname(__file__),
'fixtures/narrow.json')
scenarios = ujson.loads(open(fixtures_path, 'r').read())
with open(fixtures_path, 'r') as f:
scenarios = ujson.load(f)
self.assertTrue(len(scenarios) == 9)
for scenario in scenarios:
narrow = scenario['narrow']

View File

@@ -26,7 +26,7 @@ class SlackMessageConversion(ZulipTestCase):
test_fixtures = {}
with open(os.path.join(os.path.dirname(__file__),
'fixtures/slack_message_conversion.json'), 'r') as f:
data = ujson.loads('\n'.join(f.readlines()))
data = ujson.load(f)
for test in data['regular_tests']:
test_fixtures[test['name']] = test

View File

@@ -460,19 +460,20 @@ def load_event_queues(port: int) -> None:
global clients
start = time.time()
# ujson chokes on bad input pretty easily. We separate out the actual
# file reading from the loading so that we don't silently fail if we get
# bad input.
try:
with open(persistent_queue_filename(port), "r") as stored_queues:
json_data = stored_queues.read()
data = ujson.load(stored_queues)
except FileNotFoundError:
pass
except ValueError:
logging.exception("Tornado %d could not deserialize event queues" % (port,))
else:
try:
clients = dict((qid, ClientDescriptor.from_dict(client))
for (qid, client) in ujson.loads(json_data))
clients = {
qid: ClientDescriptor.from_dict(client) for (qid, client) in data
}
except Exception:
logging.exception("Tornado %d could not deserialize event queues" % (port,))
except (IOError, EOFError):
pass
for client in clients.values():
# Put code for migrations due to event queue data format changes here