Files
2025-03-28 12:30:19 +11:00

287 lines
7.9 KiB
JavaScript
Raw Permalink Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
import util from 'util'
import Busboy from 'busboy'
import { WriteStream } from 'fs-capacitor'
import createError from 'http-errors'
import objectPath from 'object-path'
import { SPEC_URL } from './constants'
import { ignoreStream } from './ignoreStream'
import { isEnumerableObject } from './isEnumerableObject'
class Upload {
constructor() {
this.promise = new Promise((resolve, reject) => {
this.resolve = file => {
this.file = file
resolve(file)
}
this.reject = reject
})
this.promise.catch(() => {})
}
}
export const processRequest = (
request,
response,
{ maxFieldSize = 1000000, maxFileSize = Infinity, maxFiles = Infinity } = {}
) =>
new Promise((resolve, reject) => {
let released
let exitError
let currentStream
let operations
let operationsPath
let map
const parser = new Busboy({
headers: request.headers,
limits: {
fieldSize: maxFieldSize,
fields: 2,
fileSize: maxFileSize,
files: maxFiles
}
})
const exit = error => {
if (exitError) return
exitError = error
reject(exitError)
parser.destroy()
if (currentStream) currentStream.destroy(exitError)
if (map)
for (const upload of map.values())
if (!upload.file) upload.reject(exitError)
request.unpipe(parser)
setImmediate(() => {
request.resume()
})
}
const release = () => {
// istanbul ignore next
if (released) return
released = true
if (map)
for (const upload of map.values())
if (upload.file) upload.file.capacitor.destroy()
}
const abort = () => {
exit(
createError(
499,
'Request disconnected during file upload stream parsing.'
)
)
}
parser.on(
'field',
(fieldName, value, fieldNameTruncated, valueTruncated) => {
if (exitError) return
if (valueTruncated)
return exit(
createError(
413,
`The ${fieldName} multipart field value exceeds the ${maxFieldSize} byte size limit.`
)
)
switch (fieldName) {
case 'operations':
try {
operations = JSON.parse(value)
} catch (error) {
return exit(
createError(
400,
`Invalid JSON in the operations multipart field (${SPEC_URL}).`
)
)
}
if (!isEnumerableObject(operations) && !Array.isArray(operations))
return exit(
createError(
400,
`Invalid type for the operations multipart field (${SPEC_URL}).`
)
)
operationsPath = objectPath(operations)
break
case 'map': {
if (!operations)
return exit(
createError(
400,
`Misordered multipart fields; map should follow operations (${SPEC_URL}).`
)
)
let parsedMap
try {
parsedMap = JSON.parse(value)
} catch (error) {
return exit(
createError(
400,
`Invalid JSON in the map multipart field (${SPEC_URL}).`
)
)
}
if (!isEnumerableObject(parsedMap))
return exit(
createError(
400,
`Invalid type for the map multipart field (${SPEC_URL}).`
)
)
const mapEntries = Object.entries(parsedMap)
if (mapEntries.length > maxFiles)
return exit(
createError(413, `${maxFiles} max file uploads exceeded.`)
)
map = new Map()
for (const [fieldName, paths] of mapEntries) {
if (!Array.isArray(paths))
return exit(
createError(
400,
`Invalid type for the map multipart field entry key ${fieldName} array (${SPEC_URL}).`
)
)
map.set(fieldName, new Upload())
for (const [index, path] of paths.entries()) {
if (typeof path !== 'string')
return exit(
createError(
400,
`Invalid type for the map multipart field entry key ${fieldName} array index ${index} value (${SPEC_URL}).`
)
)
try {
operationsPath.set(path, map.get(fieldName).promise)
} catch (error) {
return exit(
createError(
400,
`Invalid object path for the map multipart field entry key ${fieldName} array index ${index} value ${path} (${SPEC_URL}).`
)
)
}
}
}
resolve(operations)
}
}
}
)
parser.on('file', (fieldName, stream, filename, encoding, mimetype) => {
if (exitError) {
ignoreStream(stream)
return
}
if (!map) {
ignoreStream(stream)
return exit(
createError(
400,
`Misordered multipart fields; files should follow map (${SPEC_URL}).`
)
)
}
currentStream = stream
stream.on('end', () => {
currentStream = null
})
const upload = map.get(fieldName)
if (!upload) {
ignoreStream(stream)
return
}
const capacitor = new WriteStream()
capacitor.on('error', () => {
stream.unpipe()
stream.resume()
})
stream.on('limit', () => {
stream.unpipe()
capacitor.destroy(
createError(
413,
`File truncated as it exceeds the ${maxFileSize} byte size limit.`
)
)
})
stream.on('error', error => {
stream.unpipe() // istanbul ignore next
capacitor.destroy(exitError || error)
})
stream.pipe(capacitor)
const file = {
filename,
mimetype,
encoding,
createReadStream() {
const error = capacitor.error || (released ? exitError : null)
if (error) throw error
return capacitor.createReadStream()
}
}
let capacitorStream
Object.defineProperty(file, 'stream', {
get: util.deprecate(function() {
if (!capacitorStream) capacitorStream = this.createReadStream()
return capacitorStream
}, 'File upload property stream is deprecated. Use createReadStream() instead.')
})
Object.defineProperty(file, 'capacitor', {
value: capacitor
})
upload.resolve(file)
})
parser.once('filesLimit', () =>
exit(createError(413, `${maxFiles} max file uploads exceeded.`))
)
parser.once('finish', () => {
request.unpipe(parser)
request.resume()
if (!operations)
return exit(
createError(
400,
`Missing multipart field operations (${SPEC_URL}).`
)
)
if (!map)
return exit(
createError(400, `Missing multipart field map (${SPEC_URL}).`)
)
for (const upload of map.values())
if (!upload.file)
upload.reject(createError(400, 'File missing in the request.'))
})
parser.once('error', exit)
response.once('finish', release)
response.once('close', release)
request.once('close', abort)
request.once('end', () => {
request.removeListener('close', abort)
})
request.pipe(parser)
})