Merge pull request #399 from apify/main
feat: Add document loader for Apify Website Content Crawler
This commit is contained in:
commit
b4f3d70d89
|
|
@ -0,0 +1,26 @@
|
|||
import { INodeParams, INodeCredential } from '../src/Interface'
|
||||
|
||||
class ApifyApiCredential implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Apify API'
|
||||
this.name = 'apifyApi'
|
||||
this.version = 1.0
|
||||
this.description =
|
||||
'You can find the Apify API token on your <a target="_blank" href="https://console.apify.com/account#/integrations">Apify account</a> page.'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Apify API',
|
||||
name: 'apifyApiToken',
|
||||
type: 'password'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: ApifyApiCredential }
|
||||
|
|
@ -0,0 +1,139 @@
|
|||
import { INode, INodeData, INodeParams, ICommonObject } from '../../../src/Interface'
|
||||
import { getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
import { TextSplitter } from 'langchain/text_splitter'
|
||||
import { ApifyDatasetLoader } from 'langchain/document_loaders/web/apify_dataset'
|
||||
import { Document } from 'langchain/document'
|
||||
|
||||
class ApifyWebsiteContentCrawler_DocumentLoaders implements INode {
|
||||
label: string
|
||||
name: string
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
version: number
|
||||
category: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
|
||||
constructor() {
|
||||
this.label = 'Apify Website Content Crawler'
|
||||
this.name = 'apifyWebsiteContentCrawler'
|
||||
this.type = 'Document'
|
||||
this.icon = 'apify-symbol-transparent.svg'
|
||||
this.version = 1.0
|
||||
this.category = 'Document Loaders'
|
||||
this.description = 'Load data from Apify Website Content Crawler'
|
||||
this.baseClasses = [this.type]
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Start URLs',
|
||||
name: 'urls',
|
||||
type: 'string',
|
||||
description: 'One or more URLs of pages where the crawler will start, separated by commas.',
|
||||
placeholder: 'https://js.langchain.com/docs/'
|
||||
},
|
||||
{
|
||||
label: 'Crawler type',
|
||||
type: 'options',
|
||||
name: 'crawlerType',
|
||||
options: [
|
||||
{
|
||||
label: 'Headless web browser (Chrome+Playwright)',
|
||||
name: 'playwright:chrome'
|
||||
},
|
||||
{
|
||||
label: 'Stealthy web browser (Firefox+Playwright)',
|
||||
name: 'playwright:firefox'
|
||||
},
|
||||
{
|
||||
label: 'Raw HTTP client (Cheerio)',
|
||||
name: 'cheerio'
|
||||
},
|
||||
{
|
||||
label: 'Raw HTTP client with JavaScript execution (JSDOM) [experimental]',
|
||||
name: 'jsdom'
|
||||
}
|
||||
],
|
||||
description:
|
||||
'Select the crawling engine, see <a target="_blank" href="https://apify.com/apify/website-content-crawler#crawling">documentation</a> for additional information.',
|
||||
default: 'playwright:firefox'
|
||||
},
|
||||
{
|
||||
label: 'Max crawling depth',
|
||||
name: 'maxCrawlDepth',
|
||||
type: 'number',
|
||||
optional: true,
|
||||
default: 1
|
||||
},
|
||||
{
|
||||
label: 'Max crawl pages',
|
||||
name: 'maxCrawlPages',
|
||||
type: 'number',
|
||||
optional: true,
|
||||
default: 3
|
||||
},
|
||||
{
|
||||
label: 'Additional input',
|
||||
name: 'additionalInput',
|
||||
type: 'json',
|
||||
default: JSON.stringify({}),
|
||||
description:
|
||||
'For additional input options for the crawler see <a target="_blank" href="https://apify.com/apify/website-content-crawler/input-schema">documentation</a>.',
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Text Splitter',
|
||||
name: 'textSplitter',
|
||||
type: 'TextSplitter',
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.credential = {
|
||||
label: 'Connect Apify API',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
credentialNames: ['apifyApi']
|
||||
}
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const textSplitter = nodeData.inputs?.textSplitter as TextSplitter
|
||||
|
||||
// Get input options and merge with additional input
|
||||
const urls = nodeData.inputs?.urls as string
|
||||
const crawlerType = nodeData.inputs?.crawlerType as string
|
||||
const maxCrawlDepth = nodeData.inputs?.maxCrawlDepth as string
|
||||
const maxCrawlPages = nodeData.inputs?.maxCrawlPages as string
|
||||
const additionalInput =
|
||||
typeof nodeData.inputs?.additionalInput === 'object'
|
||||
? nodeData.inputs?.additionalInput
|
||||
: JSON.parse(nodeData.inputs?.additionalInput as string)
|
||||
const input = {
|
||||
startUrls: urls.split(',').map((url) => ({ url: url.trim() })),
|
||||
crawlerType,
|
||||
maxCrawlDepth: parseInt(maxCrawlDepth, 10),
|
||||
maxCrawlPages: parseInt(maxCrawlPages, 10),
|
||||
...additionalInput
|
||||
}
|
||||
|
||||
// Get Apify API token from credential data
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const apifyApiToken = getCredentialParam('apifyApiToken', credentialData, nodeData)
|
||||
|
||||
const loader = await ApifyDatasetLoader.fromActorCall('apify/website-content-crawler', input, {
|
||||
datasetMappingFunction: (item) =>
|
||||
new Document({
|
||||
pageContent: (item.text || '') as string,
|
||||
metadata: { source: item.url }
|
||||
}),
|
||||
clientOptions: {
|
||||
token: apifyApiToken
|
||||
}
|
||||
})
|
||||
|
||||
return textSplitter ? loader.loadAndSplit(textSplitter) : loader.load()
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: ApifyWebsiteContentCrawler_DocumentLoaders }
|
||||
|
|
@ -0,0 +1 @@
|
|||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"><defs><style>.cls-1{fill:none;}.cls-2{fill:#97d700;}.cls-3{fill:#71c5e8;}.cls-4{fill:#ff9013;}</style></defs><g id="Trmplate"><rect class="cls-1" width="512" height="512"/><path class="cls-2" d="M163.14,152.65a36.06,36.06,0,0,0-30.77,40.67v0l21.34,152.33,89.74-204.23Z"/><path class="cls-3" d="M379.69,279.56l-8.38-117.1a36.12,36.12,0,0,0-38.53-33.36,17.61,17.61,0,0,0-2.4.26l-34.63,4.79,76.08,170.57A35.94,35.94,0,0,0,379.69,279.56Z"/><path class="cls-4" d="M186.43,382.69a35.88,35.88,0,0,0,18-2.63l130.65-55.13L273,185.65Z"/></g></svg>
|
||||
|
After Width: | Height: | Size: 599 B |
|
|
@ -26,6 +26,7 @@
|
|||
"@qdrant/js-client-rest": "^1.2.2",
|
||||
"@supabase/supabase-js": "^2.29.0",
|
||||
"@types/js-yaml": "^4.0.5",
|
||||
"apify-client": "^2.7.1",
|
||||
"@types/jsdom": "^21.1.1",
|
||||
"axios": "^0.27.2",
|
||||
"cheerio": "^1.0.0-rc.12",
|
||||
|
|
|
|||
Loading…
Reference in New Issue