modify playwright web crawl

This commit is contained in:
chungyau97 2023-07-05 17:17:13 +08:00
parent c18e98761a
commit 607d4a3394
1 changed files with 28 additions and 20 deletions

View File

@ -2,7 +2,7 @@ import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { TextSplitter } from 'langchain/text_splitter' import { TextSplitter } from 'langchain/text_splitter'
import { PlaywrightWebBaseLoader } from 'langchain/document_loaders/web/playwright' import { PlaywrightWebBaseLoader } from 'langchain/document_loaders/web/playwright'
import { test } from 'linkifyjs' import { test } from 'linkifyjs'
import { getAvailableURLs } from '../../../src' import { webCrawl } from '../../../src'
class Playwright_DocumentLoaders implements INode { class Playwright_DocumentLoaders implements INode {
label: string label: string
@ -35,19 +35,20 @@ class Playwright_DocumentLoaders implements INode {
optional: true optional: true
}, },
{ {
label: 'Web Scrap for Relative Links', label: 'Web Crawl for Relative Links',
name: 'webScrap', name: 'boolWebCrawl',
type: 'boolean', type: 'boolean',
optional: true, optional: true,
additionalParams: true additionalParams: true
}, },
{ {
label: 'Web Scrap Links Limit', label: 'Web Crawl Links Limit',
name: 'limit', name: 'limit',
type: 'number', type: 'number',
default: 10, default: 10,
optional: true, optional: true,
additionalParams: true additionalParams: true,
description: 'Set 0 to crawl all relative links'
}, },
{ {
label: 'Metadata', label: 'Metadata',
@ -62,7 +63,7 @@ class Playwright_DocumentLoaders implements INode {
async init(nodeData: INodeData): Promise<any> { async init(nodeData: INodeData): Promise<any> {
const textSplitter = nodeData.inputs?.textSplitter as TextSplitter const textSplitter = nodeData.inputs?.textSplitter as TextSplitter
const metadata = nodeData.inputs?.metadata const metadata = nodeData.inputs?.metadata
const webScrap = nodeData.inputs?.webScrap as boolean const boolWebCrawl = nodeData.inputs?.boolWebCrawl as boolean
let limit = nodeData.inputs?.limit as string let limit = nodeData.inputs?.limit as string
let url = nodeData.inputs?.url as string let url = nodeData.inputs?.url as string
@ -71,25 +72,32 @@ class Playwright_DocumentLoaders implements INode {
throw new Error('Invalid URL') throw new Error('Invalid URL')
} }
const playwrightLoader = async (url: string): Promise<any> => { async function playwrightLoader(url: string): Promise<any> {
let docs = [] try {
const loader = new PlaywrightWebBaseLoader(url) let docs = []
if (textSplitter) { const loader = new PlaywrightWebBaseLoader(url)
docs = await loader.loadAndSplit(textSplitter) if (textSplitter) {
} else { docs = await loader.loadAndSplit(textSplitter)
docs = await loader.load() } else {
docs = await loader.load()
}
return docs
} catch (err) {
if (process.env.DEBUG === 'true') console.error(`error in CheerioWebBaseLoader: ${err.message}, on page: ${url}`)
} }
return docs
} }
let availableUrls: string[]
let docs = [] let docs = []
if (webScrap) { if (boolWebCrawl) {
if (!limit) limit = '10' if (process.env.DEBUG === 'true') console.info('Start Web Crawl')
availableUrls = await getAvailableURLs(url, parseInt(limit)) if (!limit) throw new Error('Please set a limit to crawl')
for (let i = 0; i < availableUrls.length; i++) { else if (parseInt(limit) < 0) throw new Error('Limit cannot be less than 0')
docs.push(...(await playwrightLoader(availableUrls[i]))) const pages: string[] = await webCrawl(url, parseInt(limit))
if (process.env.DEBUG === 'true') console.info(`pages: ${JSON.stringify(pages)}, length: ${pages.length}`)
for (const page of pages) {
docs.push(...(await playwrightLoader(page)))
} }
if (process.env.DEBUG === 'true') console.info('Finish Web Crawl')
} else { } else {
docs = await playwrightLoader(url) docs = await playwrightLoader(url)
} }