mirror of
https://github.com/zulip/zulip.git
synced 2025-10-29 11:03:54 +00:00
With the new portico work we've done, the help documentation does sorta depend on the database if you're logged in. So it's best to just require it for these tests.
46 lines
1.4 KiB
Python
Executable File
46 lines
1.4 KiB
Python
Executable File
#!/usr/bin/env python3
|
|
import argparse
|
|
import os
|
|
import sys
|
|
import subprocess
|
|
|
|
ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
|
|
|
# check for the venv
|
|
from lib import sanity_check
|
|
sanity_check.check_venv(__file__)
|
|
|
|
parser = argparse.ArgumentParser()
|
|
parser.add_argument('--force', default=False,
|
|
action="store_true",
|
|
help='Run tests despite possible problems.')
|
|
options = parser.parse_args()
|
|
|
|
os.chdir(ZULIP_PATH)
|
|
sys.path.insert(0, ZULIP_PATH)
|
|
from tools.lib.test_server import test_server_running
|
|
|
|
subprocess.check_call(['mkdir', '-p', 'var/help-documentation'])
|
|
|
|
LOG_FILE = 'var/help-documentation/server.log'
|
|
external_host = "localhost:9981"
|
|
|
|
with test_server_running(options.force, external_host, log_file=LOG_FILE,
|
|
dots=True, use_db=True):
|
|
ret_help_doc = subprocess.call(('scrapy', 'crawl_with_status', 'help_documentation_crawler'),
|
|
cwd='tools/documentation_crawler')
|
|
ret_api_doc = subprocess.call(('scrapy', 'crawl_with_status', 'api_documentation_crawler'),
|
|
cwd='tools/documentation_crawler')
|
|
|
|
if ret_help_doc != 0 or ret_api_doc != 0:
|
|
print("\033[0;91m")
|
|
print("Failed")
|
|
print("\033[0m")
|
|
else:
|
|
print("\033[0;92m")
|
|
print("Passed!")
|
|
print("\033[0m")
|
|
|
|
|
|
sys.exit(ret_help_doc or ret_api_doc)
|