Bugfix: Check for relative path when saving file, to prevent unauthorised writes (#3172)
* Check for relative path when saving file, to prevent unauthorised writes * preventing relative paths for all modes (s3/local) * preventing relative paths for all modes (s3/local) * Update storageUtils.ts * changing the code to sanitizing filenames. * fix lock file --------- Co-authored-by: Henry Heng <henryheng@flowiseai.com> Co-authored-by: Henry <hzj94@hotmail.com>
This commit is contained in:
parent
0420ff2af3
commit
8bd3de4153
|
|
@ -111,6 +111,7 @@
|
|||
"pyodide": ">=0.21.0-alpha.2",
|
||||
"redis": "^4.6.7",
|
||||
"replicate": "^0.31.1",
|
||||
"sanitize-filename": "^1.6.3",
|
||||
"socket.io": "^4.6.1",
|
||||
"srt-parser-2": "^1.2.3",
|
||||
"typeorm": "^0.3.6",
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ import {
|
|||
} from '@aws-sdk/client-s3'
|
||||
import { Readable } from 'node:stream'
|
||||
import { getUserHome } from './utils'
|
||||
import sanitize from 'sanitize-filename'
|
||||
|
||||
export const addBase64FilesToStorage = async (fileBase64: string, chatflowid: string, fileNames: string[]) => {
|
||||
const storageType = getStorageType()
|
||||
|
|
@ -21,7 +22,9 @@ export const addBase64FilesToStorage = async (fileBase64: string, chatflowid: st
|
|||
const bf = Buffer.from(splitDataURI.pop() || '', 'base64')
|
||||
const mime = splitDataURI[0].split(':')[1].split(';')[0]
|
||||
|
||||
const Key = chatflowid + '/' + filename
|
||||
const sanitizedFilename = _sanitizeFilename(filename)
|
||||
|
||||
const Key = chatflowid + '/' + sanitizedFilename
|
||||
const putObjCmd = new PutObjectCommand({
|
||||
Bucket,
|
||||
Key,
|
||||
|
|
@ -31,7 +34,7 @@ export const addBase64FilesToStorage = async (fileBase64: string, chatflowid: st
|
|||
})
|
||||
await s3Client.send(putObjCmd)
|
||||
|
||||
fileNames.push(filename)
|
||||
fileNames.push(sanitizedFilename)
|
||||
return 'FILE-STORAGE::' + JSON.stringify(fileNames)
|
||||
} else {
|
||||
const dir = path.join(getStoragePath(), chatflowid)
|
||||
|
|
@ -42,20 +45,23 @@ export const addBase64FilesToStorage = async (fileBase64: string, chatflowid: st
|
|||
const splitDataURI = fileBase64.split(',')
|
||||
const filename = splitDataURI.pop()?.split(':')[1] ?? ''
|
||||
const bf = Buffer.from(splitDataURI.pop() || '', 'base64')
|
||||
const sanitizedFilename = _sanitizeFilename(filename)
|
||||
|
||||
const filePath = path.join(dir, filename)
|
||||
const filePath = path.join(dir, sanitizedFilename)
|
||||
fs.writeFileSync(filePath, bf)
|
||||
fileNames.push(filename)
|
||||
fileNames.push(sanitizedFilename)
|
||||
return 'FILE-STORAGE::' + JSON.stringify(fileNames)
|
||||
}
|
||||
}
|
||||
|
||||
export const addArrayFilesToStorage = async (mime: string, bf: Buffer, fileName: string, fileNames: string[], ...paths: string[]) => {
|
||||
const storageType = getStorageType()
|
||||
|
||||
const sanitizedFilename = _sanitizeFilename(fileName)
|
||||
if (storageType === 's3') {
|
||||
const { s3Client, Bucket } = getS3Config()
|
||||
|
||||
let Key = paths.reduce((acc, cur) => acc + '/' + cur, '') + '/' + fileName
|
||||
let Key = paths.reduce((acc, cur) => acc + '/' + cur, '') + '/' + sanitizedFilename
|
||||
if (Key.startsWith('/')) {
|
||||
Key = Key.substring(1)
|
||||
}
|
||||
|
|
@ -68,27 +74,28 @@ export const addArrayFilesToStorage = async (mime: string, bf: Buffer, fileName:
|
|||
Body: bf
|
||||
})
|
||||
await s3Client.send(putObjCmd)
|
||||
fileNames.push(fileName)
|
||||
fileNames.push(sanitizedFilename)
|
||||
return 'FILE-STORAGE::' + JSON.stringify(fileNames)
|
||||
} else {
|
||||
const dir = path.join(getStoragePath(), ...paths)
|
||||
if (!fs.existsSync(dir)) {
|
||||
fs.mkdirSync(dir, { recursive: true })
|
||||
}
|
||||
|
||||
const filePath = path.join(dir, fileName)
|
||||
const filePath = path.join(dir, sanitizedFilename)
|
||||
fs.writeFileSync(filePath, bf)
|
||||
fileNames.push(fileName)
|
||||
fileNames.push(sanitizedFilename)
|
||||
return 'FILE-STORAGE::' + JSON.stringify(fileNames)
|
||||
}
|
||||
}
|
||||
|
||||
export const addSingleFileToStorage = async (mime: string, bf: Buffer, fileName: string, ...paths: string[]) => {
|
||||
const storageType = getStorageType()
|
||||
const sanitizedFilename = _sanitizeFilename(fileName)
|
||||
|
||||
if (storageType === 's3') {
|
||||
const { s3Client, Bucket } = getS3Config()
|
||||
|
||||
let Key = paths.reduce((acc, cur) => acc + '/' + cur, '') + '/' + fileName
|
||||
let Key = paths.reduce((acc, cur) => acc + '/' + cur, '') + '/' + sanitizedFilename
|
||||
if (Key.startsWith('/')) {
|
||||
Key = Key.substring(1)
|
||||
}
|
||||
|
|
@ -101,16 +108,15 @@ export const addSingleFileToStorage = async (mime: string, bf: Buffer, fileName:
|
|||
Body: bf
|
||||
})
|
||||
await s3Client.send(putObjCmd)
|
||||
return 'FILE-STORAGE::' + fileName
|
||||
return 'FILE-STORAGE::' + sanitizedFilename
|
||||
} else {
|
||||
const dir = path.join(getStoragePath(), ...paths)
|
||||
if (!fs.existsSync(dir)) {
|
||||
fs.mkdirSync(dir, { recursive: true })
|
||||
}
|
||||
|
||||
const filePath = path.join(dir, fileName)
|
||||
const filePath = path.join(dir, sanitizedFilename)
|
||||
fs.writeFileSync(filePath, bf)
|
||||
return 'FILE-STORAGE::' + fileName
|
||||
return 'FILE-STORAGE::' + sanitizedFilename
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -185,6 +191,11 @@ export const removeSpecificFileFromStorage = async (...paths: string[]) => {
|
|||
}
|
||||
await _deleteS3Folder(Key)
|
||||
} else {
|
||||
const fileName = paths.pop()
|
||||
if (fileName) {
|
||||
const sanitizedFilename = _sanitizeFilename(fileName)
|
||||
paths.push(sanitizedFilename)
|
||||
}
|
||||
const file = path.join(getStoragePath(), ...paths)
|
||||
fs.unlinkSync(file)
|
||||
}
|
||||
|
|
@ -282,10 +293,11 @@ export const streamStorageFile = async (
|
|||
fileName: string
|
||||
): Promise<fs.ReadStream | Buffer | undefined> => {
|
||||
const storageType = getStorageType()
|
||||
const sanitizedFilename = sanitize(fileName)
|
||||
if (storageType === 's3') {
|
||||
const { s3Client, Bucket } = getS3Config()
|
||||
|
||||
const Key = chatflowId + '/' + chatId + '/' + fileName
|
||||
const Key = chatflowId + '/' + chatId + '/' + sanitizedFilename
|
||||
const getParams = {
|
||||
Bucket,
|
||||
Key
|
||||
|
|
@ -297,7 +309,7 @@ export const streamStorageFile = async (
|
|||
return Buffer.from(blob)
|
||||
}
|
||||
} else {
|
||||
const filePath = path.join(getStoragePath(), chatflowId, chatId, fileName)
|
||||
const filePath = path.join(getStoragePath(), chatflowId, chatId, sanitizedFilename)
|
||||
//raise error if file path is not absolute
|
||||
if (!path.isAbsolute(filePath)) throw new Error(`Invalid file path`)
|
||||
//raise error if file path contains '..'
|
||||
|
|
@ -339,3 +351,12 @@ export const getS3Config = () => {
|
|||
})
|
||||
return { s3Client, Bucket }
|
||||
}
|
||||
|
||||
const _sanitizeFilename = (filename: string): string => {
|
||||
if (filename) {
|
||||
let sanitizedFilename = sanitize(filename)
|
||||
// remove all leading .
|
||||
return sanitizedFilename.replace(/^\.+/, '')
|
||||
}
|
||||
return ''
|
||||
}
|
||||
|
|
|
|||
|
|
@ -374,6 +374,9 @@ importers:
|
|||
replicate:
|
||||
specifier: ^0.31.1
|
||||
version: 0.31.1
|
||||
sanitize-filename:
|
||||
specifier: ^1.6.3
|
||||
version: 1.6.3
|
||||
socket.io:
|
||||
specifier: ^4.6.1
|
||||
version: 4.7.4(bufferutil@4.0.8)(utf-8-validate@6.0.4)
|
||||
|
|
@ -14616,6 +14619,9 @@ packages:
|
|||
safer-buffer@2.1.2:
|
||||
resolution: { integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== }
|
||||
|
||||
sanitize-filename@1.6.3:
|
||||
resolution: { integrity: sha512-y/52Mcy7aw3gRm7IrcGDFx/bCk4AhRh2eI9luHOQM86nZsqwiRkkq2GekHXBBD+SmPidc8i2PqtYZl+pWJ8Oeg== }
|
||||
|
||||
sanitize-html@2.12.1:
|
||||
resolution: { integrity: sha512-Plh+JAn0UVDpBRP/xEjsk+xDCoOvMBwQUf/K+/cBAVuTbtX8bj2VB7S1sL1dssVpykqp0/KPSesHrqXtokVBpA== }
|
||||
|
||||
|
|
@ -15733,6 +15739,9 @@ packages:
|
|||
trough@2.2.0:
|
||||
resolution: { integrity: sha512-tmMpK00BjZiUyVyvrBK7knerNgmgvcV/KLVyuma/SC+TQN167GrMRciANTz09+k3zW8L8t60jWO1GpfkZdjTaw== }
|
||||
|
||||
truncate-utf8-bytes@1.0.2:
|
||||
resolution: { integrity: sha512-95Pu1QXQvruGEhv62XCMO3Mm90GscOCClvrIUwCM0PYOXK3kaF3l3sIHxx71ThJfcbM2O5Au6SO3AWCSEfW4mQ== }
|
||||
|
||||
tryer@1.0.1:
|
||||
resolution: { integrity: sha512-c3zayb8/kWWpycWYg87P71E1S1ZL6b6IJxfb5fvsUgsf0S2MVGaDhDXXjDMpdCpfWXqptc+4mXwmiy1ypXqRAA== }
|
||||
|
||||
|
|
@ -16232,6 +16241,9 @@ packages:
|
|||
resolution: { integrity: sha512-xu9GQDeFp+eZ6LnCywXN/zBancWvOpUMzgjLPSjy4BRHSmTelvn2E0DG0o1sTiw5hkCKBHo8rwSKncfRfv2EEQ== }
|
||||
engines: { node: '>=6.14.2' }
|
||||
|
||||
utf8-byte-length@1.0.5:
|
||||
resolution: { integrity: sha512-Xn0w3MtiQ6zoz2vFyUVruaCL53O/DwUvkEeOvj+uulMm0BkUGYWmBYVyElqZaSLhY6ZD0ulfU3aBra2aVT4xfA== }
|
||||
|
||||
util-deprecate@1.0.2:
|
||||
resolution: { integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw== }
|
||||
|
||||
|
|
@ -35448,6 +35460,10 @@ snapshots:
|
|||
|
||||
safer-buffer@2.1.2: {}
|
||||
|
||||
sanitize-filename@1.6.3:
|
||||
dependencies:
|
||||
truncate-utf8-bytes: 1.0.2
|
||||
|
||||
sanitize-html@2.12.1:
|
||||
dependencies:
|
||||
deepmerge: 4.3.1
|
||||
|
|
@ -36815,6 +36831,10 @@ snapshots:
|
|||
|
||||
trough@2.2.0: {}
|
||||
|
||||
truncate-utf8-bytes@1.0.2:
|
||||
dependencies:
|
||||
utf8-byte-length: 1.0.5
|
||||
|
||||
tryer@1.0.1: {}
|
||||
|
||||
ts-api-utils@1.3.0(typescript@5.5.2):
|
||||
|
|
@ -37330,6 +37350,8 @@ snapshots:
|
|||
node-gyp-build: 4.8.1
|
||||
optional: true
|
||||
|
||||
utf8-byte-length@1.0.5: {}
|
||||
|
||||
util-deprecate@1.0.2: {}
|
||||
|
||||
util.promisify@1.0.1:
|
||||
|
|
|
|||
Loading…
Reference in New Issue