async def main(): async with Actor: input_data = await Actor.get_input() urls = input_data['websites'] print(urls) async with aiohttp.ClientSession() as session: tasks = [fetch_emails(session, website) for website in urls] results = await asyncio.gather(*tasks) print('here') dataset = apify_client.dataset() for email in website_list: print(email) dataset.push_data({"email": email}) print(website_list) print(f"tried to scrape {len(urls)}") print(f"there is {invalid_url}") print(f'successfully scraped {len(website_list)} emails ! ') asyncio.run(main())
async with Actor: input_data = await Actor.get_input() urls = input_data['websites'] print(urls)
async with Actor:
). You can read more about Actor lifecycles and alternatives here: https://docs.apify.com/sdk/python/docs/concepts/actor-lifecycle