Initial Save

This commit is contained in:
jackbeeby
2025-03-28 12:30:19 +11:00
parent e381994f19
commit d8773925e8
9910 changed files with 982718 additions and 0 deletions

View File

@@ -0,0 +1,21 @@
'use strict'
exports.__esModule = true
exports.GraphQLUpload = void 0
var _graphql = require('graphql')
const GraphQLUpload = new _graphql.GraphQLScalarType({
name: 'Upload',
description: 'The `Upload` scalar type represents a file upload.',
parseValue: value => value,
parseLiteral() {
throw new Error('Upload scalar literal unsupported.')
},
serialize() {
throw new Error('Upload scalar serialization unsupported.')
}
})
exports.GraphQLUpload = GraphQLUpload

View File

@@ -0,0 +1,14 @@
import { GraphQLScalarType } from 'graphql'
export const GraphQLUpload = new GraphQLScalarType({
name: 'Upload',
description: 'The `Upload` scalar type represents a file upload.',
parseValue: value => value,
parseLiteral() {
throw new Error('Upload scalar literal unsupported.')
},
serialize() {
throw new Error('Upload scalar serialization unsupported.')
}
})

View File

@@ -0,0 +1,6 @@
'use strict'
exports.__esModule = true
exports.SPEC_URL = void 0
const SPEC_URL = 'https://github.com/jaydenseric/graphql-multipart-request-spec'
exports.SPEC_URL = SPEC_URL

View File

@@ -0,0 +1,2 @@
export const SPEC_URL =
'https://github.com/jaydenseric/graphql-multipart-request-spec'

View File

@@ -0,0 +1,34 @@
'use strict'
exports.__esModule = true
exports.graphqlUploadExpress = void 0
var _processRequest = require('./processRequest')
const graphqlUploadExpress = ({
processRequest = _processRequest.processRequest,
...processRequestOptions
} = {}) => (request, response, next) => {
if (!request.is('multipart/form-data')) return next()
const finished = new Promise(resolve => request.on('end', resolve))
const { send } = response
response.send = (...args) => {
finished.then(() => {
response.send = send
response.send(...args)
})
}
processRequest(request, response, processRequestOptions)
.then(body => {
request.body = body
next()
})
.catch(error => {
if (error.status && error.expose) response.status(error.status)
next(error)
})
}
exports.graphqlUploadExpress = graphqlUploadExpress

View File

@@ -0,0 +1,26 @@
import { processRequest as defaultProcessRequest } from './processRequest'
export const graphqlUploadExpress = ({
processRequest = defaultProcessRequest,
...processRequestOptions
} = {}) => (request, response, next) => {
if (!request.is('multipart/form-data')) return next()
const finished = new Promise(resolve => request.on('end', resolve))
const { send } = response
response.send = (...args) => {
finished.then(() => {
response.send = send
response.send(...args)
})
}
processRequest(request, response, processRequestOptions)
.then(body => {
request.body = body
next()
})
.catch(error => {
if (error.status && error.expose) response.status(error.status)
next(error)
})
}

View File

@@ -0,0 +1,27 @@
'use strict'
exports.__esModule = true
exports.graphqlUploadKoa = void 0
var _processRequest = require('./processRequest')
const graphqlUploadKoa = ({
processRequest = _processRequest.processRequest,
...processRequestOptions
} = {}) => async (ctx, next) => {
if (!ctx.request.is('multipart/form-data')) return next()
const finished = new Promise(resolve => ctx.req.on('end', resolve))
try {
ctx.request.body = await processRequest(
ctx.req,
ctx.res,
processRequestOptions
)
await next()
} finally {
await finished
}
}
exports.graphqlUploadKoa = graphqlUploadKoa

View File

@@ -0,0 +1,19 @@
import { processRequest as defaultProcessRequest } from './processRequest'
export const graphqlUploadKoa = ({
processRequest = defaultProcessRequest,
...processRequestOptions
} = {}) => async (ctx, next) => {
if (!ctx.request.is('multipart/form-data')) return next()
const finished = new Promise(resolve => ctx.req.on('end', resolve))
try {
ctx.request.body = await processRequest(
ctx.req,
ctx.res,
processRequestOptions
)
await next()
} finally {
await finished
}
}

View File

@@ -0,0 +1,11 @@
'use strict'
exports.__esModule = true
exports.ignoreStream = void 0
const ignoreStream = stream => {
stream.on('error', () => {})
stream.resume()
}
exports.ignoreStream = ignoreStream

View File

@@ -0,0 +1,4 @@
export const ignoreStream = stream => {
stream.on('error', () => {})
stream.resume()
}

View File

@@ -0,0 +1,20 @@
'use strict'
exports.__esModule = true
exports.graphqlUploadExpress = exports.graphqlUploadKoa = exports.processRequest = exports.GraphQLUpload = void 0
var _GraphQLUpload = require('./GraphQLUpload')
exports.GraphQLUpload = _GraphQLUpload.GraphQLUpload
var _processRequest = require('./processRequest')
exports.processRequest = _processRequest.processRequest
var _graphqlUploadKoa = require('./graphqlUploadKoa')
exports.graphqlUploadKoa = _graphqlUploadKoa.graphqlUploadKoa
var _graphqlUploadExpress = require('./graphqlUploadExpress')
exports.graphqlUploadExpress = _graphqlUploadExpress.graphqlUploadExpress

View File

@@ -0,0 +1,4 @@
export { GraphQLUpload } from './GraphQLUpload'
export { processRequest } from './processRequest'
export { graphqlUploadKoa } from './graphqlUploadKoa'
export { graphqlUploadExpress } from './graphqlUploadExpress'

View File

@@ -0,0 +1,9 @@
'use strict'
exports.__esModule = true
exports.isEnumerableObject = void 0
const isEnumerableObject = value =>
typeof value === 'object' && value !== null && !Array.isArray(value)
exports.isEnumerableObject = isEnumerableObject

View File

@@ -0,0 +1,2 @@
export const isEnumerableObject = value =>
typeof value === 'object' && value !== null && !Array.isArray(value)

View File

@@ -0,0 +1,318 @@
'use strict'
exports.__esModule = true
exports.processRequest = void 0
var _util = _interopRequireDefault(require('util'))
var _busboy = _interopRequireDefault(require('busboy'))
var _fsCapacitor = require('fs-capacitor')
var _httpErrors = _interopRequireDefault(require('http-errors'))
var _objectPath = _interopRequireDefault(require('object-path'))
var _constants = require('./constants')
var _ignoreStream = require('./ignoreStream')
var _isEnumerableObject = require('./isEnumerableObject')
// istanbul ignore next
function _interopRequireDefault(obj) {
return obj && obj.__esModule ? obj : { default: obj }
}
class Upload {
constructor() {
this.promise = new Promise((resolve, reject) => {
this.resolve = file => {
this.file = file
resolve(file)
}
this.reject = reject
})
this.promise.catch(() => {})
}
}
const processRequest = (
request,
response,
{ maxFieldSize = 1000000, maxFileSize = Infinity, maxFiles = Infinity } = {}
) =>
new Promise((resolve, reject) => {
let released
let exitError
let currentStream
let operations
let operationsPath
let map
const parser = new _busboy.default({
headers: request.headers,
limits: {
fieldSize: maxFieldSize,
fields: 2,
fileSize: maxFileSize,
files: maxFiles
}
})
const exit = error => {
if (exitError) return
exitError = error
reject(exitError)
parser.destroy()
if (currentStream) currentStream.destroy(exitError)
if (map)
for (const upload of map.values())
if (!upload.file) upload.reject(exitError)
request.unpipe(parser)
setImmediate(() => {
request.resume()
})
}
const release = () => {
// istanbul ignore next
if (released) return
released = true
if (map)
for (const upload of map.values())
if (upload.file) upload.file.capacitor.destroy()
}
const abort = () => {
exit(
(0, _httpErrors.default)(
499,
'Request disconnected during file upload stream parsing.'
)
)
}
parser.on(
'field',
(fieldName, value, fieldNameTruncated, valueTruncated) => {
if (exitError) return
if (valueTruncated)
return exit(
(0, _httpErrors.default)(
413,
`The ${fieldName} multipart field value exceeds the ${maxFieldSize} byte size limit.`
)
)
switch (fieldName) {
case 'operations':
try {
operations = JSON.parse(value)
} catch (error) {
return exit(
(0, _httpErrors.default)(
400,
`Invalid JSON in the operations multipart field (${_constants.SPEC_URL}).`
)
)
}
if (
!(0, _isEnumerableObject.isEnumerableObject)(operations) &&
!Array.isArray(operations)
)
return exit(
(0, _httpErrors.default)(
400,
`Invalid type for the operations multipart field (${_constants.SPEC_URL}).`
)
)
operationsPath = (0, _objectPath.default)(operations)
break
case 'map': {
if (!operations)
return exit(
(0, _httpErrors.default)(
400,
`Misordered multipart fields; map should follow operations (${_constants.SPEC_URL}).`
)
)
let parsedMap
try {
parsedMap = JSON.parse(value)
} catch (error) {
return exit(
(0, _httpErrors.default)(
400,
`Invalid JSON in the map multipart field (${_constants.SPEC_URL}).`
)
)
}
if (!(0, _isEnumerableObject.isEnumerableObject)(parsedMap))
return exit(
(0, _httpErrors.default)(
400,
`Invalid type for the map multipart field (${_constants.SPEC_URL}).`
)
)
const mapEntries = Object.entries(parsedMap)
if (mapEntries.length > maxFiles)
return exit(
(0, _httpErrors.default)(
413,
`${maxFiles} max file uploads exceeded.`
)
)
map = new Map()
for (const [fieldName, paths] of mapEntries) {
if (!Array.isArray(paths))
return exit(
(0, _httpErrors.default)(
400,
`Invalid type for the map multipart field entry key ${fieldName} array (${_constants.SPEC_URL}).`
)
)
map.set(fieldName, new Upload())
for (const [index, path] of paths.entries()) {
if (typeof path !== 'string')
return exit(
(0, _httpErrors.default)(
400,
`Invalid type for the map multipart field entry key ${fieldName} array index ${index} value (${_constants.SPEC_URL}).`
)
)
try {
operationsPath.set(path, map.get(fieldName).promise)
} catch (error) {
return exit(
(0, _httpErrors.default)(
400,
`Invalid object path for the map multipart field entry key ${fieldName} array index ${index} value ${path} (${_constants.SPEC_URL}).`
)
)
}
}
}
resolve(operations)
}
}
}
)
parser.on('file', (fieldName, stream, filename, encoding, mimetype) => {
if (exitError) {
;(0, _ignoreStream.ignoreStream)(stream)
return
}
if (!map) {
;(0, _ignoreStream.ignoreStream)(stream)
return exit(
(0, _httpErrors.default)(
400,
`Misordered multipart fields; files should follow map (${_constants.SPEC_URL}).`
)
)
}
currentStream = stream
stream.on('end', () => {
currentStream = null
})
const upload = map.get(fieldName)
if (!upload) {
;(0, _ignoreStream.ignoreStream)(stream)
return
}
const capacitor = new _fsCapacitor.WriteStream()
capacitor.on('error', () => {
stream.unpipe()
stream.resume()
})
stream.on('limit', () => {
stream.unpipe()
capacitor.destroy(
(0, _httpErrors.default)(
413,
`File truncated as it exceeds the ${maxFileSize} byte size limit.`
)
)
})
stream.on('error', error => {
stream.unpipe() // istanbul ignore next
capacitor.destroy(exitError || error)
})
stream.pipe(capacitor)
const file = {
filename,
mimetype,
encoding,
createReadStream() {
const error = capacitor.error || (released ? exitError : null)
if (error) throw error
return capacitor.createReadStream()
}
}
let capacitorStream
Object.defineProperty(file, 'stream', {
get: _util.default.deprecate(function() {
if (!capacitorStream) capacitorStream = this.createReadStream()
return capacitorStream
}, 'File upload property stream is deprecated. Use createReadStream() instead.')
})
Object.defineProperty(file, 'capacitor', {
value: capacitor
})
upload.resolve(file)
})
parser.once('filesLimit', () =>
exit(
(0, _httpErrors.default)(413, `${maxFiles} max file uploads exceeded.`)
)
)
parser.once('finish', () => {
request.unpipe(parser)
request.resume()
if (!operations)
return exit(
(0, _httpErrors.default)(
400,
`Missing multipart field operations (${_constants.SPEC_URL}).`
)
)
if (!map)
return exit(
(0, _httpErrors.default)(
400,
`Missing multipart field map (${_constants.SPEC_URL}).`
)
)
for (const upload of map.values())
if (!upload.file)
upload.reject(
(0, _httpErrors.default)(400, 'File missing in the request.')
)
})
parser.once('error', exit)
response.once('finish', release)
response.once('close', release)
request.once('close', abort)
request.once('end', () => {
request.removeListener('close', abort)
})
request.pipe(parser)
})
exports.processRequest = processRequest

View File

@@ -0,0 +1,286 @@
import util from 'util'
import Busboy from 'busboy'
import { WriteStream } from 'fs-capacitor'
import createError from 'http-errors'
import objectPath from 'object-path'
import { SPEC_URL } from './constants'
import { ignoreStream } from './ignoreStream'
import { isEnumerableObject } from './isEnumerableObject'
class Upload {
constructor() {
this.promise = new Promise((resolve, reject) => {
this.resolve = file => {
this.file = file
resolve(file)
}
this.reject = reject
})
this.promise.catch(() => {})
}
}
export const processRequest = (
request,
response,
{ maxFieldSize = 1000000, maxFileSize = Infinity, maxFiles = Infinity } = {}
) =>
new Promise((resolve, reject) => {
let released
let exitError
let currentStream
let operations
let operationsPath
let map
const parser = new Busboy({
headers: request.headers,
limits: {
fieldSize: maxFieldSize,
fields: 2,
fileSize: maxFileSize,
files: maxFiles
}
})
const exit = error => {
if (exitError) return
exitError = error
reject(exitError)
parser.destroy()
if (currentStream) currentStream.destroy(exitError)
if (map)
for (const upload of map.values())
if (!upload.file) upload.reject(exitError)
request.unpipe(parser)
setImmediate(() => {
request.resume()
})
}
const release = () => {
// istanbul ignore next
if (released) return
released = true
if (map)
for (const upload of map.values())
if (upload.file) upload.file.capacitor.destroy()
}
const abort = () => {
exit(
createError(
499,
'Request disconnected during file upload stream parsing.'
)
)
}
parser.on(
'field',
(fieldName, value, fieldNameTruncated, valueTruncated) => {
if (exitError) return
if (valueTruncated)
return exit(
createError(
413,
`The ${fieldName} multipart field value exceeds the ${maxFieldSize} byte size limit.`
)
)
switch (fieldName) {
case 'operations':
try {
operations = JSON.parse(value)
} catch (error) {
return exit(
createError(
400,
`Invalid JSON in the operations multipart field (${SPEC_URL}).`
)
)
}
if (!isEnumerableObject(operations) && !Array.isArray(operations))
return exit(
createError(
400,
`Invalid type for the operations multipart field (${SPEC_URL}).`
)
)
operationsPath = objectPath(operations)
break
case 'map': {
if (!operations)
return exit(
createError(
400,
`Misordered multipart fields; map should follow operations (${SPEC_URL}).`
)
)
let parsedMap
try {
parsedMap = JSON.parse(value)
} catch (error) {
return exit(
createError(
400,
`Invalid JSON in the map multipart field (${SPEC_URL}).`
)
)
}
if (!isEnumerableObject(parsedMap))
return exit(
createError(
400,
`Invalid type for the map multipart field (${SPEC_URL}).`
)
)
const mapEntries = Object.entries(parsedMap)
if (mapEntries.length > maxFiles)
return exit(
createError(413, `${maxFiles} max file uploads exceeded.`)
)
map = new Map()
for (const [fieldName, paths] of mapEntries) {
if (!Array.isArray(paths))
return exit(
createError(
400,
`Invalid type for the map multipart field entry key ${fieldName} array (${SPEC_URL}).`
)
)
map.set(fieldName, new Upload())
for (const [index, path] of paths.entries()) {
if (typeof path !== 'string')
return exit(
createError(
400,
`Invalid type for the map multipart field entry key ${fieldName} array index ${index} value (${SPEC_URL}).`
)
)
try {
operationsPath.set(path, map.get(fieldName).promise)
} catch (error) {
return exit(
createError(
400,
`Invalid object path for the map multipart field entry key ${fieldName} array index ${index} value ${path} (${SPEC_URL}).`
)
)
}
}
}
resolve(operations)
}
}
}
)
parser.on('file', (fieldName, stream, filename, encoding, mimetype) => {
if (exitError) {
ignoreStream(stream)
return
}
if (!map) {
ignoreStream(stream)
return exit(
createError(
400,
`Misordered multipart fields; files should follow map (${SPEC_URL}).`
)
)
}
currentStream = stream
stream.on('end', () => {
currentStream = null
})
const upload = map.get(fieldName)
if (!upload) {
ignoreStream(stream)
return
}
const capacitor = new WriteStream()
capacitor.on('error', () => {
stream.unpipe()
stream.resume()
})
stream.on('limit', () => {
stream.unpipe()
capacitor.destroy(
createError(
413,
`File truncated as it exceeds the ${maxFileSize} byte size limit.`
)
)
})
stream.on('error', error => {
stream.unpipe() // istanbul ignore next
capacitor.destroy(exitError || error)
})
stream.pipe(capacitor)
const file = {
filename,
mimetype,
encoding,
createReadStream() {
const error = capacitor.error || (released ? exitError : null)
if (error) throw error
return capacitor.createReadStream()
}
}
let capacitorStream
Object.defineProperty(file, 'stream', {
get: util.deprecate(function() {
if (!capacitorStream) capacitorStream = this.createReadStream()
return capacitorStream
}, 'File upload property stream is deprecated. Use createReadStream() instead.')
})
Object.defineProperty(file, 'capacitor', {
value: capacitor
})
upload.resolve(file)
})
parser.once('filesLimit', () =>
exit(createError(413, `${maxFiles} max file uploads exceeded.`))
)
parser.once('finish', () => {
request.unpipe(parser)
request.resume()
if (!operations)
return exit(
createError(
400,
`Missing multipart field operations (${SPEC_URL}).`
)
)
if (!map)
return exit(
createError(400, `Missing multipart field map (${SPEC_URL}).`)
)
for (const upload of map.values())
if (!upload.file)
upload.reject(createError(400, 'File missing in the request.'))
})
parser.once('error', exit)
response.once('finish', release)
response.once('close', release)
request.once('close', abort)
request.once('end', () => {
request.removeListener('close', abort)
})
request.pipe(parser)
})