mirror of
https://github.com/zulip/zulip.git
synced 2025-11-07 07:23:22 +00:00
ruff: Fix UP006 Use list instead of List for type annotation.
Signed-off-by: Anders Kaseorg <anders@zulip.com>
This commit is contained in:
committed by
Tim Abbott
parent
c2214b3904
commit
e08a24e47f
@@ -1,12 +1,12 @@
|
||||
import optparse
|
||||
from typing import List, Union
|
||||
from typing import Union
|
||||
|
||||
from scrapy.commands import crawl
|
||||
from scrapy.crawler import Crawler
|
||||
|
||||
|
||||
class Command(crawl.Command):
|
||||
def run(self, args: List[str], opts: optparse.Values) -> None:
|
||||
def run(self, args: list[str], opts: optparse.Values) -> None:
|
||||
crawlers = []
|
||||
real_create_crawler = self.crawler_process.create_crawler
|
||||
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
import os
|
||||
import pathlib
|
||||
from typing import List
|
||||
|
||||
from .common.spiders import BaseDocumentationSpider
|
||||
|
||||
|
||||
def get_start_url() -> List[str]:
|
||||
def get_start_url() -> list[str]:
|
||||
# Get index.html file as start URL and convert it to file URI
|
||||
dir_path = os.path.dirname(os.path.realpath(__file__))
|
||||
start_file = os.path.join(
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import os
|
||||
from posixpath import basename
|
||||
from typing import Any, List, Set
|
||||
from typing import Any
|
||||
from urllib.parse import urlsplit
|
||||
|
||||
from typing_extensions import override
|
||||
@@ -20,7 +20,7 @@ class UnusedImagesLinterSpider(BaseDocumentationSpider):
|
||||
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
super().__init__(*args, **kwargs)
|
||||
self.static_images: Set[str] = set()
|
||||
self.static_images: set[str] = set()
|
||||
self.images_static_dir: str = get_images_dir(self.images_path)
|
||||
|
||||
@override
|
||||
@@ -45,7 +45,7 @@ class UnusedImagesLinterSpider(BaseDocumentationSpider):
|
||||
class HelpDocumentationSpider(UnusedImagesLinterSpider):
|
||||
name = "help_documentation_crawler"
|
||||
start_urls = ["http://localhost:9981/help/"]
|
||||
deny_domains: List[str] = []
|
||||
deny_domains: list[str] = []
|
||||
deny = ["/policies/privacy"]
|
||||
images_path = "static/images/help"
|
||||
|
||||
@@ -53,7 +53,7 @@ class HelpDocumentationSpider(UnusedImagesLinterSpider):
|
||||
class APIDocumentationSpider(UnusedImagesLinterSpider):
|
||||
name = "api_documentation_crawler"
|
||||
start_urls = ["http://localhost:9981/api"]
|
||||
deny_domains: List[str] = []
|
||||
deny_domains: list[str] = []
|
||||
images_path = "static/images/api"
|
||||
|
||||
|
||||
@@ -84,4 +84,4 @@ class PorticoDocumentationSpider(BaseDocumentationSpider):
|
||||
"http://localhost:9981/for/research/",
|
||||
"http://localhost:9981/security/",
|
||||
]
|
||||
deny_domains: List[str] = []
|
||||
deny_domains: list[str] = []
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
from typing import Callable, Iterator, List, Optional, Union
|
||||
from typing import Callable, Iterator, Optional, Union
|
||||
from urllib.parse import urlsplit
|
||||
|
||||
import scrapy
|
||||
@@ -60,10 +60,10 @@ ZULIP_SERVER_GITHUB_DIRECTORY_PATH_PREFIX = "/zulip/zulip/tree/main"
|
||||
class BaseDocumentationSpider(scrapy.Spider):
|
||||
name: Optional[str] = None
|
||||
# Exclude domain address.
|
||||
deny_domains: List[str] = []
|
||||
start_urls: List[str] = []
|
||||
deny: List[str] = []
|
||||
file_extensions: List[str] = ["." + ext for ext in IGNORED_EXTENSIONS]
|
||||
deny_domains: list[str] = []
|
||||
start_urls: list[str] = []
|
||||
deny: list[str] = []
|
||||
file_extensions: list[str] = ["." + ext for ext in IGNORED_EXTENSIONS]
|
||||
tags = ("a", "area", "img")
|
||||
attrs = ("href", "src")
|
||||
|
||||
|
||||
Reference in New Issue
Block a user