Improve chunk upload code security

This commit is contained in:
David Bomba 2025-05-04 09:51:33 +10:00
parent 08f6e641e4
commit 02151b570b
1 changed files with 117 additions and 44 deletions

View File

@ -109,73 +109,146 @@ class ImportJsonController extends BaseController
private function handleChunkedUpload(ImportJsonRequest $request) private function handleChunkedUpload(ImportJsonRequest $request)
{ {
$metadata = json_decode($request->metadata, true); $metadata = json_decode($request->metadata, true);
$chunk = $request->file('file');
$tempPath = sys_get_temp_dir()."/{$metadata['fileHash']}/app/chunks/"; // Validate metadata structure
if (!isset($metadata['fileHash'], $metadata['fileName'], $metadata['totalChunks'], $metadata['currentChunk'])) {
if (!is_dir($tempPath)) { throw new \InvalidArgumentException('Invalid metadata structure');
mkdir($tempPath, 0777, true);
} }
$chunkPath = $tempPath . '/' . $metadata['currentChunk']; // Sanitize and validate file hash (should be alphanumeric)
if (!preg_match('/^[a-zA-Z0-9]+$/', $metadata['fileHash'])) {
throw new \InvalidArgumentException('Invalid file hash format');
}
file_put_contents($chunkPath, file_get_contents($chunk)); // Sanitize and validate filename
$safeFileName = basename($metadata['fileName']);
if ($safeFileName !== $metadata['fileName']) {
throw new \InvalidArgumentException('Invalid filename');
}
// Validate chunk number format
if (!is_numeric($metadata['currentChunk']) || $metadata['currentChunk'] < 0) {
throw new \InvalidArgumentException('Invalid chunk number');
}
// Validate total chunks
if (!is_numeric($metadata['totalChunks']) || $metadata['totalChunks'] <= 0 || $metadata['totalChunks'] > 1000) {
throw new \InvalidArgumentException('Invalid total chunks');
}
// Validate file type
$chunk = $request->file('file');
if (!$chunk || !$chunk->isValid()) {
throw new \InvalidArgumentException('Invalid file chunk');
}
// Create a secure base directory path
$baseDir = storage_path('app/tmp/uploads');
$tempPath = $baseDir . '/' . $metadata['fileHash'] . '/chunks';
// Secure directory creation
if (!is_dir($tempPath)) {
if (!mkdir($tempPath, 0755, true)) {
throw new \RuntimeException('Failed to create directory');
}
}
// Secure path for chunk
$chunkPath = $tempPath . '/' . (int)$metadata['currentChunk'];
// Validate file size before saving
$maxChunkSize = 5 * 1024 * 1024; // 5MB
if ($chunk->getSize() > $maxChunkSize) {
throw new \InvalidArgumentException('Chunk size exceeds limit');
}
// Save chunk securely
if (!$chunk->move($tempPath, (string)$metadata['currentChunk'])) {
throw new \RuntimeException('Failed to save chunk');
}
$uploadedChunks = count(glob($tempPath . '/*')); $uploadedChunks = count(glob($tempPath . '/*'));
if ($uploadedChunks >= $metadata['totalChunks']) { if ($uploadedChunks >= $metadata['totalChunks']) {
// Combine all chunks try {
$tempFilePath = $tempPath . $metadata['fileName']; // Combine chunks securely
$outputPath = $baseDir . '/' . $metadata['fileHash'] . '/' . $safeFileName;
$outputDir = dirname($outputPath);
$handle = fopen($tempFilePath, 'wb'); if (!is_dir($outputDir)) {
mkdir($outputDir, 0755, true);
}
$handle = fopen($outputPath, 'wb');
if ($handle === false) {
throw new \RuntimeException('Failed to create output file');
}
// Combine chunks with validation
for ($i = 0; $i < $metadata['totalChunks']; $i++) { for ($i = 0; $i < $metadata['totalChunks']; $i++) {
$chunkContent = file_get_contents($tempPath . '/' . $i); $chunkFile = $tempPath . '/' . $i;
fwrite($handle, $chunkContent); if (!file_exists($chunkFile)) {
throw new \RuntimeException("Missing chunk: {$i}");
}
$chunkContent = file_get_contents($chunkFile);
if ($chunkContent === false) {
throw new \RuntimeException("Failed to read chunk: {$i}");
}
if (fwrite($handle, $chunkContent) === false) {
throw new \RuntimeException("Failed to write chunk: {$i}");
}
} }
fclose($handle); fclose($handle);
// Store in final location
$disk = Ninja::isHosted() ? 'backup' : config('filesystems.default'); $disk = Ninja::isHosted() ? 'backup' : config('filesystems.default');
$finalPath = 'migrations/' . $safeFileName;
Storage::disk($disk)->put( Storage::disk($disk)->put(
'migrations/'.$metadata['fileName'], $finalPath,
file_get_contents($tempFilePath), file_get_contents($outputPath),
['visibility' => 'private'] ['visibility' => 'private']
); );
$this->deleteDirectory(sys_get_temp_dir()."/{$metadata['fileHash']}"); // Clean up
$this->secureDeleteDirectory($baseDir . '/' . $metadata['fileHash']);
Storage::deleteDirectory(sys_get_temp_dir()."/{$metadata['fileHash']}");
$metadata['uploaded_filepath'] = 'migrations/'.$metadata['fileName'];
$metadata['uploaded_filepath'] = $finalPath;
return $metadata; return $metadata;
} catch (\Exception $e) {
// Clean up on error
$this->secureDeleteDirectory($baseDir . '/' . $metadata['fileHash']);
throw $e;
}
} }
return $metadata; return $metadata;
} }
private function deleteDirectory($dir) /**
* Securely delete a directory and its contents
*/
private function secureDeleteDirectory(string $dir): void
{ {
if (!is_dir($dir)) {
return;
}
$files = new \RecursiveIteratorIterator( $files = new \RecursiveIteratorIterator(
new \RecursiveDirectoryIterator($dir, \RecursiveDirectoryIterator::SKIP_DOTS), new \RecursiveDirectoryIterator($dir, \RecursiveDirectoryIterator::SKIP_DOTS),
\RecursiveIteratorIterator::CHILD_FIRST \RecursiveIteratorIterator::CHILD_FIRST
); );
foreach ($files as $file) { foreach ($files as $fileinfo) {
if ($file->isDir()) { $todo = ($fileinfo->isDir() ? 'rmdir' : 'unlink');
rmdir($file->getRealPath()); $todo($fileinfo->getRealPath());
} else {
unlink($file->getRealPath());
}
} }
return rmdir($dir); rmdir($dir);
} }
} }