diff --git a/package-lock.json b/package-lock.json index aef552c..9985a1f 100644 --- a/package-lock.json +++ b/package-lock.json @@ -13,7 +13,7 @@ "dotenv": "^8.6.0", "express": "^4.19.2", "got": "^11.8.0", - "hpagent": "^1.2.0", + "https-proxy-agent": "^7.0.5", "metascraper": "^5.14.18", "metascraper-description": "^5.14.18", "metascraper-image": "^5.14.18", @@ -1247,15 +1247,6 @@ "node": ">= 0.4" } }, - "node_modules/hpagent": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/hpagent/-/hpagent-1.2.0.tgz", - "integrity": "sha512-A91dYTeIB6NoXG+PxTQpCCDDnfHsW9kc06Lvpu1TEe9gnd6ZFeiBoRO9JvzEv6xK7EX97/dUE8g/vBMTqTS3CA==", - "license": "MIT", - "engines": { - "node": ">=14" - } - }, "node_modules/html-encoding-sniffer": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-4.0.0.tgz", diff --git a/package.json b/package.json index 8c94bc7..7b538cb 100644 --- a/package.json +++ b/package.json @@ -13,7 +13,7 @@ "dotenv": "^8.6.0", "express": "^4.19.2", "got": "^11.8.0", - "hpagent": "^1.2.0", + "https-proxy-agent": "^7.0.5", "metascraper": "^5.14.18", "metascraper-description": "^5.14.18", "metascraper-image": "^5.14.18", diff --git a/src/proxyConfig.js b/src/proxyConfig.js index 5e46ce9..6760814 100644 --- a/src/proxyConfig.js +++ b/src/proxyConfig.js @@ -1,6 +1,6 @@ // proxyConfig.js const path = require("path"); -const { HttpsProxyAgent, HttpProxyAgent } = require("hpagent"); +const { HttpsProxyAgent } = require("https-proxy-agent"); // Fake user agents array - you can expand this list const FAKE_USER_AGENTS = [ @@ -34,15 +34,15 @@ function getProxyUrl() { // Main function to get axios config for scraping async function getScrapingConfig() { + const httpsProxyAgent = new HttpsProxyAgent(getProxyUrl()); return { headers: { "User-Agent": FAKE_USER_AGENTS[Math.floor(Math.random() * FAKE_USER_AGENTS.length)], }, agent: { - http: new HttpProxyAgent({ - http: getProxyUrl(), - }), + https: httpsProxyAgent, + http: httpsProxyAgent, }, }; }