There was an uncaught exception during the run of the Actor and it was not handled.
const crawler = new PuppeteerCrawler({ launchContext: { launchOptions: { headless: true, args: [ "--no-sandbox", // Mitigates the "sandboxed" process issue in Docker containers, "--ignore-certificate-errors", "--disable-dev-shm-usage", "--disable-infobars", "--disable-extensions", "--disable-setuid-sandbox", "--ignore-certificate-errors", "--disable-gpu", // Mitigates the "crashing GPU process" issue in Docker containers ], }, }, maxRequestRetries: 1, navigationTimeoutSecs: 60, autoscaledPoolOptions: { minConcurrency: 30 }, maxSessionRotations: 5, preNavigationHooks: [ async ({ blockRequests }, goToOptions) => { if (goToOptions) goToOptions.waitUntil = "domcontentloaded"; // Set waitUntil here await blockRequests({ urlPatterns: [ ... ], }); }, ], proxyConfiguration, requestHandler: router, }); await crawler.run(startUrls); await Actor.exit();
stringList
doesn't work on web console.{ "title" : "Test", "type" : "object", "schemaVersion" : 1, "properties" : { "search.location": {"title": "Locations #1", "type": "array", "description":"", "editor":"stringList", "prefill": ["Bandung"]}, ### <-- Problem "search_location": {"title": "Locations #2", "type": "array", "description":"", "editor":"stringList", "prefill": ["Bandung"]} } }
+Add
button. When edited using Bulk
button, the resulting Json
is weird. It automatically become Object Structure which is nice effect. not sure if this really a Bug, or new features ?{
"actor-start": {
"eventTitle": "Price for Actor start",
"eventDescription": "Flat fee for starting an Actor run.",
"eventPriceUsd": 0.1
},
"task-completed": {
"eventTitle": "Price for completing the task",
"eventDescription": "Flat fee for completing the task.",
"eventPriceUsd": 0.4
}
}
async def main():
"""Runs the AI Travel Planner workflow."""
async with Actor:
await Actor.charge('actor-start')
actor_input = await Actor.get_input() or {}
Actor.log.info(f"Received input: {actor_input}")
travel_query = TravelState(**actor_input)
# Execute workflow
final_state = travel_workflow.invoke(travel_query)
Actor.log.info(f"Workflow completed. Final state: {final_state}")
await Actor.charge('task-completed')
# Save the final report
await save_report(final_state)
2025-03-07T21:22:12.478Z ACTOR: Pulling Docker image of build aJ5w2MnrBdaZRxGeA from repository. 2025-03-07T21:22:13.611Z ACTOR: Creating Docker container. 2025-03-07T21:22:13.835Z ACTOR: Starting Docker container. 2025-03-07T21:22:14.208Z Starting X virtual framebuffer using: Xvfb :99 -ac -screen 0 1920x1080x24+32 -nolisten tcp 2025-03-07T21:22:14.210Z Executing main command 2025-03-07T21:22:15.368Z INFO System info {"apifyVersion":"3.3.2","apifyClientVersion":"2.12.0","crawleeVersion":"3.13.0","osType":"Linux","nodeVersion":"v20.18.3"} 2025-03-07T21:22:15.498Z INFO Starting the crawl process {"startUrls":[{"url":"https://salesblaster.ai"}],"maxRequestsPerCrawl":100,"datasetName":"default"} 2025-03-07T21:22:15.905Z ERROR Error running scraper: {"error":"Request options are not valid, provide either a URL or an object with 'url' property (but without 'id' property), or an object with 'requestsFromUrl' property. Input: {\n url: { url: 'https://salesblaster.ai' },\n userData: {\n datasetName: 'default',\n initialUrl: { url: 'https://salesblaster.ai' }\n }\n}"}
ERROR PlaywrightCrawler: Request failed and reached maximum retries. ApifyApiError: Dataset was not found 2025-03-06T17:37:21.112Z clientMethod: DatasetClient.pushItems 2025-03-06T17:37:21.113Z statusCode: 404 2025-03-06T17:37:21.115Z type: record-not-found 2025-03-06T17:37:21.119Z httpMethod: post 2025-03-06T17:37:21.120Z path: /v2/datasets/<redacted>/items 2025-03-06T17:37:21.122Z stack: 2025-03-06T17:37:21.124Z at makeRequest (/home/myuser/node_modules/apify-client/dist/http_client.js:187:30) 2025-03-06T17:37:21.125Z at process.processTicksAndRejections (node:internal/process/task_queues:95:5) 2025-03-06T17:37:21.127Z at async DatasetClient.pushItems (/home/myuser/node_modules/apify-client/dist/resource_clients/dataset.js:104:9) 2025-03-06T17:37:21.129Z at async processSingleReviewDetails (file:///home/myuser/dist/helperfunctions.js:365:5) 2025-03-06T17:37:21.131Z at async Module.processReviews (file:///home/myuser/dist/helperfunctions.js:379:13) 2025-03-06T17:37:21.133Z at async getReviews (file:///home/myuser/dist/main.js:37:5) 2025-03-06T17:37:21.135Z at async PlaywrightCrawler.requestHandler [as userProvidedRequestHandler] (file:///home/myuser/dist/main.js:98:13) 2025-03-06T17:37:21.137Z at async wrap (/home/myuser/node_modules/@apify/timeout/cjs/index.cjs:54:21) 2025-03-06T17:37:21.139Z data: undefined {"id":"<redacted>","url":"<redacted>?sort=recency&languages=all","method":"GET","uniqueKey":"https://www.trustpilot.com/review/<redacted>?languages=all&sort=recency"}
"proxy": { "useApifyProxy": true, "apifyProxyGroups": ["RESIDENTIAL"], "countryCode": "FR", "apifyProxyCountry": "FR" }, "proxyConfiguration": { "useApifyProxy": true, "apifyProxyGroups": ["RESIDENTIAL"], "countryCode": "FR", "apifyProxyCountry": "FR" },