const proxyConfiguration = new ProxyConfiguration({tieredProxyUrls: [{[null], ['proxyTier1'], ['proxyTier2']]});
would be a solution where the crawler would always start scraping without proxies, but I don't want to have other requests being scraped without proxy usage πconst proxyConfiguration = new ProxyConfiguration({ newUrlFunction: (sessionId, { request }) => { if (request?.url.includes('crawlee.dev')) { return null; // for crawlee.dev, we don't use a proxy } return 'http://proxy-1.com'; // for all other URLs, we use this proxy } });