diff --git a/scrapy_zyte_api/_middlewares.py b/scrapy_zyte_api/_middlewares.py index 629aed4f..7dd20008 100644 --- a/scrapy_zyte_api/_middlewares.py +++ b/scrapy_zyte_api/_middlewares.py @@ -108,7 +108,9 @@ def process_request(self, request, spider): slot.delay = 0 if self._max_requests_reached(downloader): - self._crawler.engine.close_spider(spider, "closespider_max_zapi_requests") + self._crawler.engine.close_spider( + spider, f"closespider_max_{self._max_requests}_zapi_requests" + ) raise IgnoreRequest( f"The request {request} is skipped as {self._max_requests} max " f"Zyte API requests have been reached." diff --git a/tests/test_middlewares.py b/tests/test_middlewares.py index 2305d22a..f287c6b9 100644 --- a/tests/test_middlewares.py +++ b/tests/test_middlewares.py @@ -133,7 +133,10 @@ def parse(self, response): assert crawler.stats.get_value("scrapy-zyte-api/success") <= zapi_max_requests assert crawler.stats.get_value("scrapy-zyte-api/processed") == zapi_max_requests assert crawler.stats.get_value("item_scraped_count") == zapi_max_requests + 6 - assert crawler.stats.get_value("finish_reason") == "closespider_max_zapi_requests" + assert ( + crawler.stats.get_value("finish_reason") + == f"closespider_max_{zapi_max_requests}_zapi_requests" + ) assert ( crawler.stats.get_value( "downloader/exception_type_count/scrapy.exceptions.IgnoreRequest"