ruff: Fix UP007 Use X | Y for type annotations.

Signed-off-by: Anders Kaseorg <anders@zulip.com>
This commit is contained in:
Anders Kaseorg
2024-07-11 17:30:23 -07:00
committed by Tim Abbott
parent e08a24e47f
commit 531b34cb4c
355 changed files with 2759 additions and 3029 deletions

View File

@@ -1,5 +1,4 @@
import optparse
from typing import Union
from scrapy.commands import crawl
from scrapy.crawler import Crawler
@@ -10,7 +9,7 @@ class Command(crawl.Command):
crawlers = []
real_create_crawler = self.crawler_process.create_crawler
def create_crawler(crawler_or_spidercls: Union[Crawler, str]) -> Crawler:
def create_crawler(crawler_or_spidercls: Crawler | str) -> Crawler:
crawler = real_create_crawler(crawler_or_spidercls)
crawlers.append(crawler)
return crawler

View File

@@ -1,7 +1,7 @@
import json
import os
import re
from typing import Callable, Iterator, Optional, Union
from typing import Callable, Iterator
from urllib.parse import urlsplit
import scrapy
@@ -58,7 +58,7 @@ ZULIP_SERVER_GITHUB_DIRECTORY_PATH_PREFIX = "/zulip/zulip/tree/main"
class BaseDocumentationSpider(scrapy.Spider):
name: Optional[str] = None
name: str | None = None
# Exclude domain address.
deny_domains: list[str] = []
start_urls: list[str] = []
@@ -155,7 +155,7 @@ class BaseDocumentationSpider(scrapy.Spider):
if url.startswith("http://localhost:9981/plans"):
return
callback: Callable[[Response], Optional[Iterator[Request]]] = self.parse
callback: Callable[[Response], Iterator[Request] | None] = self.parse
dont_filter = False
method = "GET"
if self._is_external_url(url):
@@ -233,7 +233,7 @@ class BaseDocumentationSpider(scrapy.Spider):
request.dont_filter = True
yield request
def error_callback(self, failure: Failure) -> Optional[Union[Failure, Iterator[Request]]]:
def error_callback(self, failure: Failure) -> Failure | Iterator[Request] | None:
if isinstance(failure.value, HttpError):
response = failure.value.response
# Hack: The filtering above does not catch this URL,