mirror of
https://github.com/zulip/zulip.git
synced 2025-11-02 21:13:36 +00:00
We added a really nice feature recently, called `--interactive`, which lets you loop through Casper tests without having to restart it every time. I am renaming it to `--loop` and adding a few features: - The first loop will just run without you having to tell it to start. (This means you don't have to sit there while waiting for webpack to finish and for the server to start, just to launch the tests again.) - You specify how many loops you want to run, which means in the success case, it won't just keep going forever--it will eventually stop, giving you an opportunity to refine the test further without re-launching.
189 lines
6.5 KiB
Python
Executable File
189 lines
6.5 KiB
Python
Executable File
#!/usr/bin/env python3
|
|
import argparse
|
|
import subprocess
|
|
import sys
|
|
import os
|
|
import glob
|
|
import shlex
|
|
|
|
#
|
|
# In order to use remote casperjs debugging, pass the --remote-debug flag
|
|
# This will start a remote debugging session listening on port 7777
|
|
#
|
|
# See https://zulip.readthedocs.io/en/latest/testing/testing-with-casper.html
|
|
# for more information on how to use remote debugging
|
|
#
|
|
|
|
ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
|
|
|
os.environ["CASPER_TESTS"] = "1"
|
|
os.environ["PHANTOMJS_EXECUTABLE"] = os.path.join(ZULIP_PATH, "node_modules/.bin/phantomjs")
|
|
os.environ.pop("http_proxy", "")
|
|
os.environ.pop("https_proxy", "")
|
|
|
|
usage = """
|
|
|
|
test-js-with-casper # Run all test files
|
|
test-js-with-casper 09-navigation.js # Run a single test file
|
|
test-js-with-casper 09 # Run a single test file 09-navigation.js
|
|
test-js-with-casper 01-login.js 03-narrow.js # Run a few test files
|
|
test-js-with-casper 01 03 # Run a few test files, 01-login.js and 03-narrow.js here
|
|
|
|
Using loops:
|
|
|
|
test-js-with-capser --loop 5 07 # run 5 loops of test 07
|
|
---
|
|
"""
|
|
parser = argparse.ArgumentParser(usage=usage)
|
|
|
|
parser.add_argument('--skip-flaky-tests', dest='skip_flaky',
|
|
action="store_true",
|
|
default=False, help='Skip flaky tests')
|
|
parser.add_argument('--loop', dest='loop', nargs=1,
|
|
action="store", type=int,
|
|
default=None, help='Run tests in a loop.')
|
|
parser.add_argument('--force', dest='force',
|
|
action="store_true",
|
|
default=False, help='Run tests despite possible problems.')
|
|
parser.add_argument('--verbose',
|
|
help='Whether or not to enable verbose mode',
|
|
action="store_true",
|
|
default=False)
|
|
parser.add_argument('--remote-debug',
|
|
help='Whether or not to enable remote debugging on port 7777',
|
|
action="store_true",
|
|
default=False)
|
|
parser.add_argument('--xunit-export', dest='xunit_export',
|
|
action="store_true",
|
|
default=False, help='Export the results of the test suite to an XUnit XML file,')
|
|
parser.add_argument('tests', nargs=argparse.REMAINDER,
|
|
help='Specific tests to run; by default, runs all tests')
|
|
options = parser.parse_args()
|
|
|
|
sys.path.insert(0, ZULIP_PATH)
|
|
|
|
# check for the venv
|
|
from tools.lib import sanity_check
|
|
sanity_check.check_venv(__file__)
|
|
|
|
from tools.lib.test_script import assert_provisioning_status_ok
|
|
from tools.lib.test_server import test_server_running
|
|
|
|
from typing import Iterable, List
|
|
|
|
assert_provisioning_status_ok(options.force)
|
|
|
|
os.chdir(ZULIP_PATH)
|
|
|
|
subprocess.check_call(['node', 'node_modules/phantomjs-prebuilt/install.js'])
|
|
|
|
os.makedirs('var/casper', exist_ok=True)
|
|
|
|
for f in glob.glob('var/casper/casper-failure*.png'):
|
|
os.remove(f)
|
|
|
|
def reset_database() -> None:
|
|
from zerver.lib.test_helpers import reset_emails_in_zulip_realm
|
|
reset_emails_in_zulip_realm()
|
|
|
|
def run_tests(files: Iterable[str], external_host: str) -> None:
|
|
test_dir = os.path.join(ZULIP_PATH, 'frontend_tests/casper_tests')
|
|
test_files = []
|
|
for file in files:
|
|
for file_name in os.listdir(test_dir):
|
|
if file_name.startswith(file):
|
|
file = file_name
|
|
break
|
|
if not os.path.exists(file):
|
|
file = os.path.join(test_dir, file)
|
|
test_files.append(os.path.abspath(file))
|
|
|
|
if not test_files:
|
|
test_files = sorted(glob.glob(os.path.join(test_dir, '*.js')))
|
|
|
|
# 10-admin.js is too flaky!
|
|
if options.skip_flaky:
|
|
test_files = [fn for fn in test_files if '10-admin' not in fn]
|
|
|
|
if options.loop:
|
|
loop_cnt = options.loop[0]
|
|
print('\n\nWe will use loop mode for these tests:\n')
|
|
for test_file in test_files:
|
|
print(' ' + os.path.basename(test_file))
|
|
print('\nnumber of loops: {}\n'.format(loop_cnt))
|
|
print()
|
|
else:
|
|
loop_cnt = None
|
|
|
|
remote_debug = [] # type: List[str]
|
|
if options.remote_debug:
|
|
remote_debug = ["--remote-debugger-port=7777", "--remote-debugger-autorun=yes"]
|
|
|
|
verbose = [] # type: List[str]
|
|
if options.verbose:
|
|
verbose = ["--verbose", "--log-level=debug"]
|
|
|
|
xunit_export = [] # type: List[str]
|
|
if options.xunit_export:
|
|
xunit_export = ["--xunit=var/xunit-test-results/casper/result.xml"]
|
|
|
|
def run_tests() -> int:
|
|
ret = 1
|
|
for test_file in test_files:
|
|
test_name = os.path.basename(test_file)
|
|
cmd = ["node_modules/.bin/casperjs"] + remote_debug + verbose + xunit_export + ["test", test_file]
|
|
print("\n\n===================== %s\nRunning %s\n\n" % (test_name, " ".join(map(shlex.quote, cmd))), flush=True)
|
|
ret = subprocess.call(cmd)
|
|
if ret != 0:
|
|
return ret
|
|
return 0
|
|
|
|
def run_loops(loop_cnt: int) -> None:
|
|
while True:
|
|
for trial in range(1, loop_cnt + 1):
|
|
print('\n\n\nSTARTING TRIAL {} / {}\n'.format(trial, loop_cnt))
|
|
ret = run_tests()
|
|
if ret == 0:
|
|
print('`\n\nSUCCESS! trial #{}\n\n'.format(trial))
|
|
else:
|
|
print('\n\nFAIL! trial #{}\n'.format(trial))
|
|
break
|
|
|
|
while True:
|
|
response = input('Press "q" to quit or enter number of loops: ')
|
|
if response == 'q':
|
|
return
|
|
try:
|
|
loop_cnt = int(response)
|
|
break
|
|
except ValueError:
|
|
continue
|
|
|
|
with test_server_running(options.force, external_host):
|
|
# Important: do next things inside the `with` block, when Django
|
|
# will be pointing at the test database.
|
|
reset_database()
|
|
subprocess.check_call('tools/setup/generate-test-credentials')
|
|
|
|
# RUN THE TESTS!!!
|
|
if loop_cnt:
|
|
run_loops(loop_cnt)
|
|
ret = 0
|
|
else:
|
|
ret = run_tests()
|
|
|
|
if ret != 0:
|
|
print("""
|
|
The Casper frontend tests failed! For help debugging, read:
|
|
https://zulip.readthedocs.io/en/latest/testing/testing-with-casper.html""", file=sys.stderr)
|
|
if os.environ.get("CIRCLECI"):
|
|
print("", file=sys.stderr)
|
|
print("In CircleCI, the Artifacts tab contains screenshots of the failure.", file=sys.stderr)
|
|
print("", file=sys.stderr)
|
|
|
|
sys.exit(ret)
|
|
|
|
external_host = "zulipdev.com:9981"
|
|
run_tests(options.tests, external_host)
|
|
sys.exit(0)
|