If the crawler is to crawl the same page, you must pass
unique_key
example:
async def main() -> None:
crawler = BeautifulSoupCrawler()
@crawler.router.default_handler
async def request_handler(context: BeautifulSoupCrawlingContext) -> None:
context.log.info(f'Processing {context.request.url} ...')
request_1 = Request.from_url("https://httpbin.org/get", unique_key="1")
request_2 = Request.from_url("https://httpbin.org/get", unique_key="2")
request_3 = Request.from_url("https://httpbin.org/get", unique_key="3")
await crawler.run(
[
request_1,
request_2,
request_3
]
)