TargetClosedError
Im always getting error after 5 minutes of start scraping(works normally until it gets error)
import { PlaywrightCrawler, ProxyConfiguration } from "crawlee"
const crawler = new PlaywrightCrawler({
proxyConfiguration: new ProxyConfiguration({
proxyUrls: [
...
],
}),
async requestHandler({ request, page, enqueueLinks, pushData, log }) {
const title = await page.title()
const content = await page.content()
log.info(`URL: ${request.loadedUrl} || TITLE: '${title}'`)
const links = await page.$$eval(
"a.button.button-join.is-discord",
(links) => links.map((link) => link.getAttribute("href"))
)
await pushData({
title,
url: request.loadedUrl,
content,
links,
})
await enqueueLinks()
},
})
await crawler.run(["....."])import { PlaywrightCrawler, ProxyConfiguration } from "crawlee"
const crawler = new PlaywrightCrawler({
proxyConfiguration: new ProxyConfiguration({
proxyUrls: [
...
],
}),
async requestHandler({ request, page, enqueueLinks, pushData, log }) {
const title = await page.title()
const content = await page.content()
log.info(`URL: ${request.loadedUrl} || TITLE: '${title}'`)
const links = await page.$$eval(
"a.button.button-join.is-discord",
(links) => links.map((link) => link.getAttribute("href"))
)
await pushData({
title,
url: request.loadedUrl,
content,
links,
})
await enqueueLinks()
},
})
await crawler.run(["....."])