nodejs-web-scraper
Advanced tools
Comparing version 6.0.2 to 6.1.0
{ | ||
"name": "nodejs-web-scraper", | ||
"version": "6.0.2", | ||
"version": "6.1.0", | ||
"description": "A web scraper for NodeJs", | ||
@@ -5,0 +5,0 @@ "main": "index.js", |
@@ -475,3 +475,4 @@ nodejs-web-scraper is a simple tool for scraping/crawling server-side rendered pages. | ||
proxy:null,//Use a proxy. Pass a full proxy URL, including the protocol and the port. | ||
showConsoleLogs:true//Set to false, if you want to disable the messages | ||
showConsoleLogs:true,//Set to false, if you want to disable the messages | ||
onError:null//callback function that is called whenever an error occurs - signature is: onError(errorString) => {} | ||
} | ||
@@ -632,3 +633,5 @@ ``` | ||
Alternatively, use the `onError` callback function in the scraper's global config. | ||
## Automatic logs | ||
@@ -635,0 +638,0 @@ If a logPath was provided, the scraper will create a log for each operation object you create, and also the following ones: "log.json"(summary of the entire scraping tree), and "finalErrors.json"(an array of all FINAL errors encountered). I really recommend using this feature, along side your own hooks and data handling. |
@@ -11,2 +11,6 @@ | ||
/** | ||
* @callback errorCallback | ||
* @param {string} errorString | ||
*/ | ||
@@ -32,2 +36,3 @@ | ||
* @param {string} [globalConfig.proxy = null] | ||
* @param {Function} [globalConfig.onError = null] | ||
*/ | ||
@@ -62,3 +67,4 @@ | ||
...globalConfig.puppeteerConfig | ||
} | ||
}, | ||
onError: null //callback runs whenever any error occurs during scraping | ||
} | ||
@@ -207,2 +213,3 @@ // this.state = new State(); | ||
this.state.failedScrapingIterations.push(errorString); | ||
if(this.config.onError) this.config.onError(errorString); | ||
} | ||
@@ -209,0 +216,0 @@ |
116115
2130
655