Fix multiple calls to parseInt

This commit is contained in:
Ilango 2024-01-22 08:30:36 +05:30
parent 62ec17d684
commit bf60a1a2a9
3 changed files with 18 additions and 18 deletions

View File

@ -91,7 +91,7 @@ class Cheerio_DocumentLoaders implements INode {
const metadata = nodeData.inputs?.metadata const metadata = nodeData.inputs?.metadata
const relativeLinksMethod = nodeData.inputs?.relativeLinksMethod as string const relativeLinksMethod = nodeData.inputs?.relativeLinksMethod as string
const selectedLinks = nodeData.inputs?.selectedLinks as string[] const selectedLinks = nodeData.inputs?.selectedLinks as string[]
let limit = nodeData.inputs?.limit as string let limit = parseInt(nodeData.inputs?.limit as string)
let url = nodeData.inputs?.url as string let url = nodeData.inputs?.url as string
url = url.trim() url = url.trim()
@ -125,14 +125,14 @@ class Cheerio_DocumentLoaders implements INode {
let docs = [] let docs = []
if (relativeLinksMethod) { if (relativeLinksMethod) {
if (process.env.DEBUG === 'true') console.info(`Start ${relativeLinksMethod}`) if (process.env.DEBUG === 'true') console.info(`Start ${relativeLinksMethod}`)
if (!limit) limit = '10' if (!limit) limit = 10
else if (parseInt(limit) < 0) throw new Error('Limit cannot be less than 0') else if (limit < 0) throw new Error('Limit cannot be less than 0')
const pages: string[] = const pages: string[] =
selectedLinks && selectedLinks.length > 0 selectedLinks && selectedLinks.length > 0
? selectedLinks.slice(0, parseInt(limit)) ? selectedLinks.slice(0, limit)
: relativeLinksMethod === 'webCrawl' : relativeLinksMethod === 'webCrawl'
? await webCrawl(url, parseInt(limit)) ? await webCrawl(url, limit)
: await xmlScrape(url, parseInt(limit)) : await xmlScrape(url, limit)
if (process.env.DEBUG === 'true') console.info(`pages: ${JSON.stringify(pages)}, length: ${pages.length}`) if (process.env.DEBUG === 'true') console.info(`pages: ${JSON.stringify(pages)}, length: ${pages.length}`)
if (!pages || pages.length === 0) throw new Error('No relative links found') if (!pages || pages.length === 0) throw new Error('No relative links found')
for (const page of pages) { for (const page of pages) {

View File

@ -119,7 +119,7 @@ class Playwright_DocumentLoaders implements INode {
const metadata = nodeData.inputs?.metadata const metadata = nodeData.inputs?.metadata
const relativeLinksMethod = nodeData.inputs?.relativeLinksMethod as string const relativeLinksMethod = nodeData.inputs?.relativeLinksMethod as string
const selectedLinks = nodeData.inputs?.selectedLinks as string[] const selectedLinks = nodeData.inputs?.selectedLinks as string[]
let limit = nodeData.inputs?.limit as string let limit = parseInt(nodeData.inputs?.limit as string)
let waitUntilGoToOption = nodeData.inputs?.waitUntilGoToOption as 'load' | 'domcontentloaded' | 'networkidle' | 'commit' | undefined let waitUntilGoToOption = nodeData.inputs?.waitUntilGoToOption as 'load' | 'domcontentloaded' | 'networkidle' | 'commit' | undefined
let waitForSelector = nodeData.inputs?.waitForSelector as string let waitForSelector = nodeData.inputs?.waitForSelector as string
@ -166,14 +166,14 @@ class Playwright_DocumentLoaders implements INode {
let docs = [] let docs = []
if (relativeLinksMethod) { if (relativeLinksMethod) {
if (process.env.DEBUG === 'true') console.info(`Start ${relativeLinksMethod}`) if (process.env.DEBUG === 'true') console.info(`Start ${relativeLinksMethod}`)
if (!limit) limit = '10' if (!limit) limit = 10
else if (parseInt(limit) < 0) throw new Error('Limit cannot be less than 0') else if (limit < 0) throw new Error('Limit cannot be less than 0')
const pages: string[] = const pages: string[] =
selectedLinks && selectedLinks.length > 0 selectedLinks && selectedLinks.length > 0
? selectedLinks.slice(0, parseInt(limit)) ? selectedLinks.slice(0, limit)
: relativeLinksMethod === 'webCrawl' : relativeLinksMethod === 'webCrawl'
? await webCrawl(url, parseInt(limit)) ? await webCrawl(url, limit)
: await xmlScrape(url, parseInt(limit)) : await xmlScrape(url, limit)
if (process.env.DEBUG === 'true') console.info(`pages: ${JSON.stringify(pages)}, length: ${pages.length}`) if (process.env.DEBUG === 'true') console.info(`pages: ${JSON.stringify(pages)}, length: ${pages.length}`)
if (!pages || pages.length === 0) throw new Error('No relative links found') if (!pages || pages.length === 0) throw new Error('No relative links found')
for (const page of pages) { for (const page of pages) {

View File

@ -120,7 +120,7 @@ class Puppeteer_DocumentLoaders implements INode {
const metadata = nodeData.inputs?.metadata const metadata = nodeData.inputs?.metadata
const relativeLinksMethod = nodeData.inputs?.relativeLinksMethod as string const relativeLinksMethod = nodeData.inputs?.relativeLinksMethod as string
const selectedLinks = nodeData.inputs?.selectedLinks as string[] const selectedLinks = nodeData.inputs?.selectedLinks as string[]
let limit = nodeData.inputs?.limit as string let limit = parseInt(nodeData.inputs?.limit as string)
let waitUntilGoToOption = nodeData.inputs?.waitUntilGoToOption as PuppeteerLifeCycleEvent let waitUntilGoToOption = nodeData.inputs?.waitUntilGoToOption as PuppeteerLifeCycleEvent
let waitForSelector = nodeData.inputs?.waitForSelector as string let waitForSelector = nodeData.inputs?.waitForSelector as string
@ -167,14 +167,14 @@ class Puppeteer_DocumentLoaders implements INode {
let docs = [] let docs = []
if (relativeLinksMethod) { if (relativeLinksMethod) {
if (process.env.DEBUG === 'true') console.info(`Start ${relativeLinksMethod}`) if (process.env.DEBUG === 'true') console.info(`Start ${relativeLinksMethod}`)
if (!limit) limit = '10' if (!limit) limit = 10
else if (parseInt(limit) < 0) throw new Error('Limit cannot be less than 0') else if (limit < 0) throw new Error('Limit cannot be less than 0')
const pages: string[] = const pages: string[] =
selectedLinks && selectedLinks.length > 0 selectedLinks && selectedLinks.length > 0
? selectedLinks.slice(0, parseInt(limit)) ? selectedLinks.slice(0, limit)
: relativeLinksMethod === 'webCrawl' : relativeLinksMethod === 'webCrawl'
? await webCrawl(url, parseInt(limit)) ? await webCrawl(url, limit)
: await xmlScrape(url, parseInt(limit)) : await xmlScrape(url, limit)
if (process.env.DEBUG === 'true') console.info(`pages: ${JSON.stringify(pages)}, length: ${pages.length}`) if (process.env.DEBUG === 'true') console.info(`pages: ${JSON.stringify(pages)}, length: ${pages.length}`)
if (!pages || pages.length === 0) throw new Error('No relative links found') if (!pages || pages.length === 0) throw new Error('No relative links found')
for (const page of pages) { for (const page of pages) {