broad-emeraldB
Apify & Crawlee2y ago
1 reply
broad-emerald

Cookies and other inputs

Hello everyone,
I am new to crawlee.

I used apify api version, now I want to apply same logic with python version.
My pain points are input values in api version which I didnt find out where I should write them in python version.
Inputs are :
{
"startUrls": [
{
"url": "https://celo.org"
}
],
"useSitemaps": false,
"crawlerType": "playwright:firefox",
"includeUrlGlobs": [],
"excludeUrlGlobs": [],
"ignoreCanonicalUrl": false,
"maxCrawlDepth": 20,
"maxCrawlPages": 9999999,
"initialConcurrency": 0,
"maxConcurrency": 200,
"initialCookies": [],
"proxyConfiguration": {
"useApifyProxy": true
},
"maxSessionRotations": 10,
"maxRequestRetries": 5,
"requestTimeoutSecs": 60,
"minFileDownloadSpeedKBps": 128,
"dynamicContentWaitSecs": 10,
"waitForSelector": "",
"maxScrollHeightPixels": 5000,
"removeElementsCssSelector": "nav, footer, script, style, noscript, svg,\n[role="alert"],\n[role="banner"],\n[role="dialog"],\n[role="alertdialog"],\n[role="region"][aria-label*="skip" i],\n[aria-modal="true"]",
"removeCookieWarnings": true,
"expandIframes": true,
"clickElementsCssSelector": "[aria-expanded="false"]",
"htmlTransformer": "readableText",
"readableTextCharThreshold": 100,
"aggressivePrune": false,
"debugMode": false,
"debugLog": false,
"saveHtml": false,
"saveHtmlAsFile": false,
"saveMarkdown": true,
"saveFiles": false,
"saveScreenshots": false,
"maxResults": 9999999,
"clientSideMinChangePercentage": 15,
"renderingTypeDetectionPercentage": 10
}


Thanks in advance!
Was this page helpful?