diff --git a/collector/processLink/convert/generic.js b/collector/processLink/convert/generic.js index 64fc0a0b7c4bfa7e913fa8b63448db235389819c..c12d79ade5856b819879786137638b3240d16a8e 100644 --- a/collector/processLink/convert/generic.js +++ b/collector/processLink/convert/generic.js @@ -28,7 +28,7 @@ async function scrapeGenericUrl(link, textOnly = false) { const url = new URL(link); const decodedPathname = decodeURIComponent(url.pathname); - const filename = `${url.hostname}${decodedPathname.replace(/\//g, '_')}`; + const filename = `${url.hostname}${decodedPathname.replace(/\//g, "_")}`; const data = { id: v4(), diff --git a/collector/utils/extensions/WebsiteDepth/index.js b/collector/utils/extensions/WebsiteDepth/index.js index e7d26d99a769e93e716657ccdaea202f4e14f30f..d8b23144dc2a37cff4e1971f53468d440d4be792 100644 --- a/collector/utils/extensions/WebsiteDepth/index.js +++ b/collector/utils/extensions/WebsiteDepth/index.js @@ -109,7 +109,7 @@ async function bulkScrapePages(links, outFolderPath) { const url = new URL(link); const decodedPathname = decodeURIComponent(url.pathname); - const filename = `${url.hostname}${decodedPathname.replace(/\//g, '_')}`; + const filename = `${url.hostname}${decodedPathname.replace(/\//g, "_")}`; const data = { id: v4(),