diff --git a/tools/documentation_crawler/documentation_crawler/spiders/common/spiders.py b/tools/documentation_crawler/documentation_crawler/spiders/common/spiders.py index 20e7f8e889..555df8affb 100644 --- a/tools/documentation_crawler/documentation_crawler/spiders/common/spiders.py +++ b/tools/documentation_crawler/documentation_crawler/spiders/common/spiders.py @@ -184,7 +184,7 @@ class BaseDocumentationSpider(scrapy.Spider): "There is no local directory associated with the GitHub URL: %s", url ) return - elif getattr(self, "skip_check_fragment", False) and split_url.fragment != "": + elif split_url.fragment != "": dont_filter = True callback = self.check_fragment if getattr(self, "skip_external", False) and self._is_external_link(url): diff --git a/tools/test-help-documentation b/tools/test-help-documentation index c9a231eb09..a3a93cc0e1 100755 --- a/tools/test-help-documentation +++ b/tools/test-help-documentation @@ -75,14 +75,7 @@ with ( ), ): ret_help_doc = subprocess.call( - [ - "scrapy", - "crawl_with_status", - *extra_args, - "-a", - "skip_check_fragment=set", - "help_documentation_crawler", - ], + ["scrapy", "crawl_with_status", *extra_args, "help_documentation_crawler"], cwd="tools/documentation_crawler", ) extra_args += ["-a", "validate_html=set"]