Bugfix: Check for relative path when saving file, to prevent unauthorised writes (#3172)

* Check for relative path when saving file, to prevent unauthorised writes

* preventing relative paths for all modes (s3/local)

* preventing relative paths for all modes (s3/local)

* Update storageUtils.ts

* changing the code to sanitizing filenames.

* fix lock file

---------

Co-authored-by: Henry Heng <henryheng@flowiseai.com>
Co-authored-by: Henry <hzj94@hotmail.com>
This commit is contained in:
Vinod Kiran 2024-09-14 18:28:52 +05:30 committed by GitHub
parent 0420ff2af3
commit 8bd3de4153
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 60 additions and 16 deletions

View File

@ -111,6 +111,7 @@
"pyodide": ">=0.21.0-alpha.2", "pyodide": ">=0.21.0-alpha.2",
"redis": "^4.6.7", "redis": "^4.6.7",
"replicate": "^0.31.1", "replicate": "^0.31.1",
"sanitize-filename": "^1.6.3",
"socket.io": "^4.6.1", "socket.io": "^4.6.1",
"srt-parser-2": "^1.2.3", "srt-parser-2": "^1.2.3",
"typeorm": "^0.3.6", "typeorm": "^0.3.6",

View File

@ -10,6 +10,7 @@ import {
} from '@aws-sdk/client-s3' } from '@aws-sdk/client-s3'
import { Readable } from 'node:stream' import { Readable } from 'node:stream'
import { getUserHome } from './utils' import { getUserHome } from './utils'
import sanitize from 'sanitize-filename'
export const addBase64FilesToStorage = async (fileBase64: string, chatflowid: string, fileNames: string[]) => { export const addBase64FilesToStorage = async (fileBase64: string, chatflowid: string, fileNames: string[]) => {
const storageType = getStorageType() const storageType = getStorageType()
@ -21,7 +22,9 @@ export const addBase64FilesToStorage = async (fileBase64: string, chatflowid: st
const bf = Buffer.from(splitDataURI.pop() || '', 'base64') const bf = Buffer.from(splitDataURI.pop() || '', 'base64')
const mime = splitDataURI[0].split(':')[1].split(';')[0] const mime = splitDataURI[0].split(':')[1].split(';')[0]
const Key = chatflowid + '/' + filename const sanitizedFilename = _sanitizeFilename(filename)
const Key = chatflowid + '/' + sanitizedFilename
const putObjCmd = new PutObjectCommand({ const putObjCmd = new PutObjectCommand({
Bucket, Bucket,
Key, Key,
@ -31,7 +34,7 @@ export const addBase64FilesToStorage = async (fileBase64: string, chatflowid: st
}) })
await s3Client.send(putObjCmd) await s3Client.send(putObjCmd)
fileNames.push(filename) fileNames.push(sanitizedFilename)
return 'FILE-STORAGE::' + JSON.stringify(fileNames) return 'FILE-STORAGE::' + JSON.stringify(fileNames)
} else { } else {
const dir = path.join(getStoragePath(), chatflowid) const dir = path.join(getStoragePath(), chatflowid)
@ -42,20 +45,23 @@ export const addBase64FilesToStorage = async (fileBase64: string, chatflowid: st
const splitDataURI = fileBase64.split(',') const splitDataURI = fileBase64.split(',')
const filename = splitDataURI.pop()?.split(':')[1] ?? '' const filename = splitDataURI.pop()?.split(':')[1] ?? ''
const bf = Buffer.from(splitDataURI.pop() || '', 'base64') const bf = Buffer.from(splitDataURI.pop() || '', 'base64')
const sanitizedFilename = _sanitizeFilename(filename)
const filePath = path.join(dir, filename) const filePath = path.join(dir, sanitizedFilename)
fs.writeFileSync(filePath, bf) fs.writeFileSync(filePath, bf)
fileNames.push(filename) fileNames.push(sanitizedFilename)
return 'FILE-STORAGE::' + JSON.stringify(fileNames) return 'FILE-STORAGE::' + JSON.stringify(fileNames)
} }
} }
export const addArrayFilesToStorage = async (mime: string, bf: Buffer, fileName: string, fileNames: string[], ...paths: string[]) => { export const addArrayFilesToStorage = async (mime: string, bf: Buffer, fileName: string, fileNames: string[], ...paths: string[]) => {
const storageType = getStorageType() const storageType = getStorageType()
const sanitizedFilename = _sanitizeFilename(fileName)
if (storageType === 's3') { if (storageType === 's3') {
const { s3Client, Bucket } = getS3Config() const { s3Client, Bucket } = getS3Config()
let Key = paths.reduce((acc, cur) => acc + '/' + cur, '') + '/' + fileName let Key = paths.reduce((acc, cur) => acc + '/' + cur, '') + '/' + sanitizedFilename
if (Key.startsWith('/')) { if (Key.startsWith('/')) {
Key = Key.substring(1) Key = Key.substring(1)
} }
@ -68,27 +74,28 @@ export const addArrayFilesToStorage = async (mime: string, bf: Buffer, fileName:
Body: bf Body: bf
}) })
await s3Client.send(putObjCmd) await s3Client.send(putObjCmd)
fileNames.push(fileName) fileNames.push(sanitizedFilename)
return 'FILE-STORAGE::' + JSON.stringify(fileNames) return 'FILE-STORAGE::' + JSON.stringify(fileNames)
} else { } else {
const dir = path.join(getStoragePath(), ...paths) const dir = path.join(getStoragePath(), ...paths)
if (!fs.existsSync(dir)) { if (!fs.existsSync(dir)) {
fs.mkdirSync(dir, { recursive: true }) fs.mkdirSync(dir, { recursive: true })
} }
const filePath = path.join(dir, sanitizedFilename)
const filePath = path.join(dir, fileName)
fs.writeFileSync(filePath, bf) fs.writeFileSync(filePath, bf)
fileNames.push(fileName) fileNames.push(sanitizedFilename)
return 'FILE-STORAGE::' + JSON.stringify(fileNames) return 'FILE-STORAGE::' + JSON.stringify(fileNames)
} }
} }
export const addSingleFileToStorage = async (mime: string, bf: Buffer, fileName: string, ...paths: string[]) => { export const addSingleFileToStorage = async (mime: string, bf: Buffer, fileName: string, ...paths: string[]) => {
const storageType = getStorageType() const storageType = getStorageType()
const sanitizedFilename = _sanitizeFilename(fileName)
if (storageType === 's3') { if (storageType === 's3') {
const { s3Client, Bucket } = getS3Config() const { s3Client, Bucket } = getS3Config()
let Key = paths.reduce((acc, cur) => acc + '/' + cur, '') + '/' + fileName let Key = paths.reduce((acc, cur) => acc + '/' + cur, '') + '/' + sanitizedFilename
if (Key.startsWith('/')) { if (Key.startsWith('/')) {
Key = Key.substring(1) Key = Key.substring(1)
} }
@ -101,16 +108,15 @@ export const addSingleFileToStorage = async (mime: string, bf: Buffer, fileName:
Body: bf Body: bf
}) })
await s3Client.send(putObjCmd) await s3Client.send(putObjCmd)
return 'FILE-STORAGE::' + fileName return 'FILE-STORAGE::' + sanitizedFilename
} else { } else {
const dir = path.join(getStoragePath(), ...paths) const dir = path.join(getStoragePath(), ...paths)
if (!fs.existsSync(dir)) { if (!fs.existsSync(dir)) {
fs.mkdirSync(dir, { recursive: true }) fs.mkdirSync(dir, { recursive: true })
} }
const filePath = path.join(dir, sanitizedFilename)
const filePath = path.join(dir, fileName)
fs.writeFileSync(filePath, bf) fs.writeFileSync(filePath, bf)
return 'FILE-STORAGE::' + fileName return 'FILE-STORAGE::' + sanitizedFilename
} }
} }
@ -185,6 +191,11 @@ export const removeSpecificFileFromStorage = async (...paths: string[]) => {
} }
await _deleteS3Folder(Key) await _deleteS3Folder(Key)
} else { } else {
const fileName = paths.pop()
if (fileName) {
const sanitizedFilename = _sanitizeFilename(fileName)
paths.push(sanitizedFilename)
}
const file = path.join(getStoragePath(), ...paths) const file = path.join(getStoragePath(), ...paths)
fs.unlinkSync(file) fs.unlinkSync(file)
} }
@ -282,10 +293,11 @@ export const streamStorageFile = async (
fileName: string fileName: string
): Promise<fs.ReadStream | Buffer | undefined> => { ): Promise<fs.ReadStream | Buffer | undefined> => {
const storageType = getStorageType() const storageType = getStorageType()
const sanitizedFilename = sanitize(fileName)
if (storageType === 's3') { if (storageType === 's3') {
const { s3Client, Bucket } = getS3Config() const { s3Client, Bucket } = getS3Config()
const Key = chatflowId + '/' + chatId + '/' + fileName const Key = chatflowId + '/' + chatId + '/' + sanitizedFilename
const getParams = { const getParams = {
Bucket, Bucket,
Key Key
@ -297,7 +309,7 @@ export const streamStorageFile = async (
return Buffer.from(blob) return Buffer.from(blob)
} }
} else { } else {
const filePath = path.join(getStoragePath(), chatflowId, chatId, fileName) const filePath = path.join(getStoragePath(), chatflowId, chatId, sanitizedFilename)
//raise error if file path is not absolute //raise error if file path is not absolute
if (!path.isAbsolute(filePath)) throw new Error(`Invalid file path`) if (!path.isAbsolute(filePath)) throw new Error(`Invalid file path`)
//raise error if file path contains '..' //raise error if file path contains '..'
@ -339,3 +351,12 @@ export const getS3Config = () => {
}) })
return { s3Client, Bucket } return { s3Client, Bucket }
} }
const _sanitizeFilename = (filename: string): string => {
if (filename) {
let sanitizedFilename = sanitize(filename)
// remove all leading .
return sanitizedFilename.replace(/^\.+/, '')
}
return ''
}

View File

@ -374,6 +374,9 @@ importers:
replicate: replicate:
specifier: ^0.31.1 specifier: ^0.31.1
version: 0.31.1 version: 0.31.1
sanitize-filename:
specifier: ^1.6.3
version: 1.6.3
socket.io: socket.io:
specifier: ^4.6.1 specifier: ^4.6.1
version: 4.7.4(bufferutil@4.0.8)(utf-8-validate@6.0.4) version: 4.7.4(bufferutil@4.0.8)(utf-8-validate@6.0.4)
@ -14616,6 +14619,9 @@ packages:
safer-buffer@2.1.2: safer-buffer@2.1.2:
resolution: { integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== } resolution: { integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== }
sanitize-filename@1.6.3:
resolution: { integrity: sha512-y/52Mcy7aw3gRm7IrcGDFx/bCk4AhRh2eI9luHOQM86nZsqwiRkkq2GekHXBBD+SmPidc8i2PqtYZl+pWJ8Oeg== }
sanitize-html@2.12.1: sanitize-html@2.12.1:
resolution: { integrity: sha512-Plh+JAn0UVDpBRP/xEjsk+xDCoOvMBwQUf/K+/cBAVuTbtX8bj2VB7S1sL1dssVpykqp0/KPSesHrqXtokVBpA== } resolution: { integrity: sha512-Plh+JAn0UVDpBRP/xEjsk+xDCoOvMBwQUf/K+/cBAVuTbtX8bj2VB7S1sL1dssVpykqp0/KPSesHrqXtokVBpA== }
@ -15733,6 +15739,9 @@ packages:
trough@2.2.0: trough@2.2.0:
resolution: { integrity: sha512-tmMpK00BjZiUyVyvrBK7knerNgmgvcV/KLVyuma/SC+TQN167GrMRciANTz09+k3zW8L8t60jWO1GpfkZdjTaw== } resolution: { integrity: sha512-tmMpK00BjZiUyVyvrBK7knerNgmgvcV/KLVyuma/SC+TQN167GrMRciANTz09+k3zW8L8t60jWO1GpfkZdjTaw== }
truncate-utf8-bytes@1.0.2:
resolution: { integrity: sha512-95Pu1QXQvruGEhv62XCMO3Mm90GscOCClvrIUwCM0PYOXK3kaF3l3sIHxx71ThJfcbM2O5Au6SO3AWCSEfW4mQ== }
tryer@1.0.1: tryer@1.0.1:
resolution: { integrity: sha512-c3zayb8/kWWpycWYg87P71E1S1ZL6b6IJxfb5fvsUgsf0S2MVGaDhDXXjDMpdCpfWXqptc+4mXwmiy1ypXqRAA== } resolution: { integrity: sha512-c3zayb8/kWWpycWYg87P71E1S1ZL6b6IJxfb5fvsUgsf0S2MVGaDhDXXjDMpdCpfWXqptc+4mXwmiy1ypXqRAA== }
@ -16232,6 +16241,9 @@ packages:
resolution: { integrity: sha512-xu9GQDeFp+eZ6LnCywXN/zBancWvOpUMzgjLPSjy4BRHSmTelvn2E0DG0o1sTiw5hkCKBHo8rwSKncfRfv2EEQ== } resolution: { integrity: sha512-xu9GQDeFp+eZ6LnCywXN/zBancWvOpUMzgjLPSjy4BRHSmTelvn2E0DG0o1sTiw5hkCKBHo8rwSKncfRfv2EEQ== }
engines: { node: '>=6.14.2' } engines: { node: '>=6.14.2' }
utf8-byte-length@1.0.5:
resolution: { integrity: sha512-Xn0w3MtiQ6zoz2vFyUVruaCL53O/DwUvkEeOvj+uulMm0BkUGYWmBYVyElqZaSLhY6ZD0ulfU3aBra2aVT4xfA== }
util-deprecate@1.0.2: util-deprecate@1.0.2:
resolution: { integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw== } resolution: { integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw== }
@ -35448,6 +35460,10 @@ snapshots:
safer-buffer@2.1.2: {} safer-buffer@2.1.2: {}
sanitize-filename@1.6.3:
dependencies:
truncate-utf8-bytes: 1.0.2
sanitize-html@2.12.1: sanitize-html@2.12.1:
dependencies: dependencies:
deepmerge: 4.3.1 deepmerge: 4.3.1
@ -36815,6 +36831,10 @@ snapshots:
trough@2.2.0: {} trough@2.2.0: {}
truncate-utf8-bytes@1.0.2:
dependencies:
utf8-byte-length: 1.0.5
tryer@1.0.1: {} tryer@1.0.1: {}
ts-api-utils@1.3.0(typescript@5.5.2): ts-api-utils@1.3.0(typescript@5.5.2):
@ -37330,6 +37350,8 @@ snapshots:
node-gyp-build: 4.8.1 node-gyp-build: 4.8.1
optional: true optional: true
utf8-byte-length@1.0.5: {}
util-deprecate@1.0.2: {} util-deprecate@1.0.2: {}
util.promisify@1.0.1: util.promisify@1.0.1: