Initial Save

This commit is contained in:
jackbeeby
2025-03-28 12:30:19 +11:00
parent e381994f19
commit d8773925e8
9910 changed files with 982718 additions and 0 deletions

View File

@@ -0,0 +1,15 @@
# @apollographql/graphql-upload-8-fork changelog
See [the upstream changelog](https://github.com/jaydenseric/graphql-upload/blob/master/changelog.md) for more details.
## 8.1.3
Depend on the `@types` packages required to use the typings from this package. (Because this fork is intended only for use by the TypeScript project Apollo Server, this seems reasonable.)
## 8.1.2
Incorporate typings from DefinitelyTyped.
## 8.1.1
Add v15 to the graphql peerDependencies.

View File

@@ -0,0 +1,48 @@
// Type definitions for graphql-upload 8.0
// Project: https://github.com/jaydenseric/graphql-upload#readme
// Definitions by: Mike Marcacci <https://github.com/mike-marcacci>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
// TypeScript Version: 3.3
/* tslint:disable:no-unnecessary-generics */
import { IncomingMessage, ServerResponse } from 'http'
import { GraphQLScalarType } from 'graphql'
import { RequestHandler } from 'express'
import { Middleware } from 'koa'
import { ReadStream } from 'fs-capacitor'
export interface UploadOptions {
maxFieldSize?: number
maxFileSize?: number
maxFiles?: number
}
export interface GraphQLOperation {
query: string
operationName?: null | string
variables?: null | unknown
}
export function processRequest(
request: IncomingMessage,
response: ServerResponse,
uploadOptions?: UploadOptions
): Promise<GraphQLOperation | GraphQLOperation[]>
export function graphqlUploadExpress(
uploadOptions?: UploadOptions
): RequestHandler
export function graphqlUploadKoa<StateT = any, CustomT = {}>(
uploadOptions?: UploadOptions
): Middleware<StateT, CustomT>
export const GraphQLUpload: GraphQLScalarType
export interface FileUpload {
filename: string
mimetype: string
encoding: string
createReadStream(): ReadStream
}

View File

@@ -0,0 +1,21 @@
'use strict'
exports.__esModule = true
exports.GraphQLUpload = void 0
var _graphql = require('graphql')
const GraphQLUpload = new _graphql.GraphQLScalarType({
name: 'Upload',
description: 'The `Upload` scalar type represents a file upload.',
parseValue: value => value,
parseLiteral() {
throw new Error('Upload scalar literal unsupported.')
},
serialize() {
throw new Error('Upload scalar serialization unsupported.')
}
})
exports.GraphQLUpload = GraphQLUpload

View File

@@ -0,0 +1,14 @@
import { GraphQLScalarType } from 'graphql'
export const GraphQLUpload = new GraphQLScalarType({
name: 'Upload',
description: 'The `Upload` scalar type represents a file upload.',
parseValue: value => value,
parseLiteral() {
throw new Error('Upload scalar literal unsupported.')
},
serialize() {
throw new Error('Upload scalar serialization unsupported.')
}
})

View File

@@ -0,0 +1,6 @@
'use strict'
exports.__esModule = true
exports.SPEC_URL = void 0
const SPEC_URL = 'https://github.com/jaydenseric/graphql-multipart-request-spec'
exports.SPEC_URL = SPEC_URL

View File

@@ -0,0 +1,2 @@
export const SPEC_URL =
'https://github.com/jaydenseric/graphql-multipart-request-spec'

View File

@@ -0,0 +1,34 @@
'use strict'
exports.__esModule = true
exports.graphqlUploadExpress = void 0
var _processRequest = require('./processRequest')
const graphqlUploadExpress = ({
processRequest = _processRequest.processRequest,
...processRequestOptions
} = {}) => (request, response, next) => {
if (!request.is('multipart/form-data')) return next()
const finished = new Promise(resolve => request.on('end', resolve))
const { send } = response
response.send = (...args) => {
finished.then(() => {
response.send = send
response.send(...args)
})
}
processRequest(request, response, processRequestOptions)
.then(body => {
request.body = body
next()
})
.catch(error => {
if (error.status && error.expose) response.status(error.status)
next(error)
})
}
exports.graphqlUploadExpress = graphqlUploadExpress

View File

@@ -0,0 +1,26 @@
import { processRequest as defaultProcessRequest } from './processRequest'
export const graphqlUploadExpress = ({
processRequest = defaultProcessRequest,
...processRequestOptions
} = {}) => (request, response, next) => {
if (!request.is('multipart/form-data')) return next()
const finished = new Promise(resolve => request.on('end', resolve))
const { send } = response
response.send = (...args) => {
finished.then(() => {
response.send = send
response.send(...args)
})
}
processRequest(request, response, processRequestOptions)
.then(body => {
request.body = body
next()
})
.catch(error => {
if (error.status && error.expose) response.status(error.status)
next(error)
})
}

View File

@@ -0,0 +1,27 @@
'use strict'
exports.__esModule = true
exports.graphqlUploadKoa = void 0
var _processRequest = require('./processRequest')
const graphqlUploadKoa = ({
processRequest = _processRequest.processRequest,
...processRequestOptions
} = {}) => async (ctx, next) => {
if (!ctx.request.is('multipart/form-data')) return next()
const finished = new Promise(resolve => ctx.req.on('end', resolve))
try {
ctx.request.body = await processRequest(
ctx.req,
ctx.res,
processRequestOptions
)
await next()
} finally {
await finished
}
}
exports.graphqlUploadKoa = graphqlUploadKoa

View File

@@ -0,0 +1,19 @@
import { processRequest as defaultProcessRequest } from './processRequest'
export const graphqlUploadKoa = ({
processRequest = defaultProcessRequest,
...processRequestOptions
} = {}) => async (ctx, next) => {
if (!ctx.request.is('multipart/form-data')) return next()
const finished = new Promise(resolve => ctx.req.on('end', resolve))
try {
ctx.request.body = await processRequest(
ctx.req,
ctx.res,
processRequestOptions
)
await next()
} finally {
await finished
}
}

View File

@@ -0,0 +1,11 @@
'use strict'
exports.__esModule = true
exports.ignoreStream = void 0
const ignoreStream = stream => {
stream.on('error', () => {})
stream.resume()
}
exports.ignoreStream = ignoreStream

View File

@@ -0,0 +1,4 @@
export const ignoreStream = stream => {
stream.on('error', () => {})
stream.resume()
}

View File

@@ -0,0 +1,20 @@
'use strict'
exports.__esModule = true
exports.graphqlUploadExpress = exports.graphqlUploadKoa = exports.processRequest = exports.GraphQLUpload = void 0
var _GraphQLUpload = require('./GraphQLUpload')
exports.GraphQLUpload = _GraphQLUpload.GraphQLUpload
var _processRequest = require('./processRequest')
exports.processRequest = _processRequest.processRequest
var _graphqlUploadKoa = require('./graphqlUploadKoa')
exports.graphqlUploadKoa = _graphqlUploadKoa.graphqlUploadKoa
var _graphqlUploadExpress = require('./graphqlUploadExpress')
exports.graphqlUploadExpress = _graphqlUploadExpress.graphqlUploadExpress

View File

@@ -0,0 +1,4 @@
export { GraphQLUpload } from './GraphQLUpload'
export { processRequest } from './processRequest'
export { graphqlUploadKoa } from './graphqlUploadKoa'
export { graphqlUploadExpress } from './graphqlUploadExpress'

View File

@@ -0,0 +1,9 @@
'use strict'
exports.__esModule = true
exports.isEnumerableObject = void 0
const isEnumerableObject = value =>
typeof value === 'object' && value !== null && !Array.isArray(value)
exports.isEnumerableObject = isEnumerableObject

View File

@@ -0,0 +1,2 @@
export const isEnumerableObject = value =>
typeof value === 'object' && value !== null && !Array.isArray(value)

View File

@@ -0,0 +1,318 @@
'use strict'
exports.__esModule = true
exports.processRequest = void 0
var _util = _interopRequireDefault(require('util'))
var _busboy = _interopRequireDefault(require('busboy'))
var _fsCapacitor = require('fs-capacitor')
var _httpErrors = _interopRequireDefault(require('http-errors'))
var _objectPath = _interopRequireDefault(require('object-path'))
var _constants = require('./constants')
var _ignoreStream = require('./ignoreStream')
var _isEnumerableObject = require('./isEnumerableObject')
// istanbul ignore next
function _interopRequireDefault(obj) {
return obj && obj.__esModule ? obj : { default: obj }
}
class Upload {
constructor() {
this.promise = new Promise((resolve, reject) => {
this.resolve = file => {
this.file = file
resolve(file)
}
this.reject = reject
})
this.promise.catch(() => {})
}
}
const processRequest = (
request,
response,
{ maxFieldSize = 1000000, maxFileSize = Infinity, maxFiles = Infinity } = {}
) =>
new Promise((resolve, reject) => {
let released
let exitError
let currentStream
let operations
let operationsPath
let map
const parser = new _busboy.default({
headers: request.headers,
limits: {
fieldSize: maxFieldSize,
fields: 2,
fileSize: maxFileSize,
files: maxFiles
}
})
const exit = error => {
if (exitError) return
exitError = error
reject(exitError)
parser.destroy()
if (currentStream) currentStream.destroy(exitError)
if (map)
for (const upload of map.values())
if (!upload.file) upload.reject(exitError)
request.unpipe(parser)
setImmediate(() => {
request.resume()
})
}
const release = () => {
// istanbul ignore next
if (released) return
released = true
if (map)
for (const upload of map.values())
if (upload.file) upload.file.capacitor.destroy()
}
const abort = () => {
exit(
(0, _httpErrors.default)(
499,
'Request disconnected during file upload stream parsing.'
)
)
}
parser.on(
'field',
(fieldName, value, fieldNameTruncated, valueTruncated) => {
if (exitError) return
if (valueTruncated)
return exit(
(0, _httpErrors.default)(
413,
`The ${fieldName} multipart field value exceeds the ${maxFieldSize} byte size limit.`
)
)
switch (fieldName) {
case 'operations':
try {
operations = JSON.parse(value)
} catch (error) {
return exit(
(0, _httpErrors.default)(
400,
`Invalid JSON in the operations multipart field (${_constants.SPEC_URL}).`
)
)
}
if (
!(0, _isEnumerableObject.isEnumerableObject)(operations) &&
!Array.isArray(operations)
)
return exit(
(0, _httpErrors.default)(
400,
`Invalid type for the operations multipart field (${_constants.SPEC_URL}).`
)
)
operationsPath = (0, _objectPath.default)(operations)
break
case 'map': {
if (!operations)
return exit(
(0, _httpErrors.default)(
400,
`Misordered multipart fields; map should follow operations (${_constants.SPEC_URL}).`
)
)
let parsedMap
try {
parsedMap = JSON.parse(value)
} catch (error) {
return exit(
(0, _httpErrors.default)(
400,
`Invalid JSON in the map multipart field (${_constants.SPEC_URL}).`
)
)
}
if (!(0, _isEnumerableObject.isEnumerableObject)(parsedMap))
return exit(
(0, _httpErrors.default)(
400,
`Invalid type for the map multipart field (${_constants.SPEC_URL}).`
)
)
const mapEntries = Object.entries(parsedMap)
if (mapEntries.length > maxFiles)
return exit(
(0, _httpErrors.default)(
413,
`${maxFiles} max file uploads exceeded.`
)
)
map = new Map()
for (const [fieldName, paths] of mapEntries) {
if (!Array.isArray(paths))
return exit(
(0, _httpErrors.default)(
400,
`Invalid type for the map multipart field entry key ${fieldName} array (${_constants.SPEC_URL}).`
)
)
map.set(fieldName, new Upload())
for (const [index, path] of paths.entries()) {
if (typeof path !== 'string')
return exit(
(0, _httpErrors.default)(
400,
`Invalid type for the map multipart field entry key ${fieldName} array index ${index} value (${_constants.SPEC_URL}).`
)
)
try {
operationsPath.set(path, map.get(fieldName).promise)
} catch (error) {
return exit(
(0, _httpErrors.default)(
400,
`Invalid object path for the map multipart field entry key ${fieldName} array index ${index} value ${path} (${_constants.SPEC_URL}).`
)
)
}
}
}
resolve(operations)
}
}
}
)
parser.on('file', (fieldName, stream, filename, encoding, mimetype) => {
if (exitError) {
;(0, _ignoreStream.ignoreStream)(stream)
return
}
if (!map) {
;(0, _ignoreStream.ignoreStream)(stream)
return exit(
(0, _httpErrors.default)(
400,
`Misordered multipart fields; files should follow map (${_constants.SPEC_URL}).`
)
)
}
currentStream = stream
stream.on('end', () => {
currentStream = null
})
const upload = map.get(fieldName)
if (!upload) {
;(0, _ignoreStream.ignoreStream)(stream)
return
}
const capacitor = new _fsCapacitor.WriteStream()
capacitor.on('error', () => {
stream.unpipe()
stream.resume()
})
stream.on('limit', () => {
stream.unpipe()
capacitor.destroy(
(0, _httpErrors.default)(
413,
`File truncated as it exceeds the ${maxFileSize} byte size limit.`
)
)
})
stream.on('error', error => {
stream.unpipe() // istanbul ignore next
capacitor.destroy(exitError || error)
})
stream.pipe(capacitor)
const file = {
filename,
mimetype,
encoding,
createReadStream() {
const error = capacitor.error || (released ? exitError : null)
if (error) throw error
return capacitor.createReadStream()
}
}
let capacitorStream
Object.defineProperty(file, 'stream', {
get: _util.default.deprecate(function() {
if (!capacitorStream) capacitorStream = this.createReadStream()
return capacitorStream
}, 'File upload property stream is deprecated. Use createReadStream() instead.')
})
Object.defineProperty(file, 'capacitor', {
value: capacitor
})
upload.resolve(file)
})
parser.once('filesLimit', () =>
exit(
(0, _httpErrors.default)(413, `${maxFiles} max file uploads exceeded.`)
)
)
parser.once('finish', () => {
request.unpipe(parser)
request.resume()
if (!operations)
return exit(
(0, _httpErrors.default)(
400,
`Missing multipart field operations (${_constants.SPEC_URL}).`
)
)
if (!map)
return exit(
(0, _httpErrors.default)(
400,
`Missing multipart field map (${_constants.SPEC_URL}).`
)
)
for (const upload of map.values())
if (!upload.file)
upload.reject(
(0, _httpErrors.default)(400, 'File missing in the request.')
)
})
parser.once('error', exit)
response.once('finish', release)
response.once('close', release)
request.once('close', abort)
request.once('end', () => {
request.removeListener('close', abort)
})
request.pipe(parser)
})
exports.processRequest = processRequest

View File

@@ -0,0 +1,286 @@
import util from 'util'
import Busboy from 'busboy'
import { WriteStream } from 'fs-capacitor'
import createError from 'http-errors'
import objectPath from 'object-path'
import { SPEC_URL } from './constants'
import { ignoreStream } from './ignoreStream'
import { isEnumerableObject } from './isEnumerableObject'
class Upload {
constructor() {
this.promise = new Promise((resolve, reject) => {
this.resolve = file => {
this.file = file
resolve(file)
}
this.reject = reject
})
this.promise.catch(() => {})
}
}
export const processRequest = (
request,
response,
{ maxFieldSize = 1000000, maxFileSize = Infinity, maxFiles = Infinity } = {}
) =>
new Promise((resolve, reject) => {
let released
let exitError
let currentStream
let operations
let operationsPath
let map
const parser = new Busboy({
headers: request.headers,
limits: {
fieldSize: maxFieldSize,
fields: 2,
fileSize: maxFileSize,
files: maxFiles
}
})
const exit = error => {
if (exitError) return
exitError = error
reject(exitError)
parser.destroy()
if (currentStream) currentStream.destroy(exitError)
if (map)
for (const upload of map.values())
if (!upload.file) upload.reject(exitError)
request.unpipe(parser)
setImmediate(() => {
request.resume()
})
}
const release = () => {
// istanbul ignore next
if (released) return
released = true
if (map)
for (const upload of map.values())
if (upload.file) upload.file.capacitor.destroy()
}
const abort = () => {
exit(
createError(
499,
'Request disconnected during file upload stream parsing.'
)
)
}
parser.on(
'field',
(fieldName, value, fieldNameTruncated, valueTruncated) => {
if (exitError) return
if (valueTruncated)
return exit(
createError(
413,
`The ${fieldName} multipart field value exceeds the ${maxFieldSize} byte size limit.`
)
)
switch (fieldName) {
case 'operations':
try {
operations = JSON.parse(value)
} catch (error) {
return exit(
createError(
400,
`Invalid JSON in the operations multipart field (${SPEC_URL}).`
)
)
}
if (!isEnumerableObject(operations) && !Array.isArray(operations))
return exit(
createError(
400,
`Invalid type for the operations multipart field (${SPEC_URL}).`
)
)
operationsPath = objectPath(operations)
break
case 'map': {
if (!operations)
return exit(
createError(
400,
`Misordered multipart fields; map should follow operations (${SPEC_URL}).`
)
)
let parsedMap
try {
parsedMap = JSON.parse(value)
} catch (error) {
return exit(
createError(
400,
`Invalid JSON in the map multipart field (${SPEC_URL}).`
)
)
}
if (!isEnumerableObject(parsedMap))
return exit(
createError(
400,
`Invalid type for the map multipart field (${SPEC_URL}).`
)
)
const mapEntries = Object.entries(parsedMap)
if (mapEntries.length > maxFiles)
return exit(
createError(413, `${maxFiles} max file uploads exceeded.`)
)
map = new Map()
for (const [fieldName, paths] of mapEntries) {
if (!Array.isArray(paths))
return exit(
createError(
400,
`Invalid type for the map multipart field entry key ${fieldName} array (${SPEC_URL}).`
)
)
map.set(fieldName, new Upload())
for (const [index, path] of paths.entries()) {
if (typeof path !== 'string')
return exit(
createError(
400,
`Invalid type for the map multipart field entry key ${fieldName} array index ${index} value (${SPEC_URL}).`
)
)
try {
operationsPath.set(path, map.get(fieldName).promise)
} catch (error) {
return exit(
createError(
400,
`Invalid object path for the map multipart field entry key ${fieldName} array index ${index} value ${path} (${SPEC_URL}).`
)
)
}
}
}
resolve(operations)
}
}
}
)
parser.on('file', (fieldName, stream, filename, encoding, mimetype) => {
if (exitError) {
ignoreStream(stream)
return
}
if (!map) {
ignoreStream(stream)
return exit(
createError(
400,
`Misordered multipart fields; files should follow map (${SPEC_URL}).`
)
)
}
currentStream = stream
stream.on('end', () => {
currentStream = null
})
const upload = map.get(fieldName)
if (!upload) {
ignoreStream(stream)
return
}
const capacitor = new WriteStream()
capacitor.on('error', () => {
stream.unpipe()
stream.resume()
})
stream.on('limit', () => {
stream.unpipe()
capacitor.destroy(
createError(
413,
`File truncated as it exceeds the ${maxFileSize} byte size limit.`
)
)
})
stream.on('error', error => {
stream.unpipe() // istanbul ignore next
capacitor.destroy(exitError || error)
})
stream.pipe(capacitor)
const file = {
filename,
mimetype,
encoding,
createReadStream() {
const error = capacitor.error || (released ? exitError : null)
if (error) throw error
return capacitor.createReadStream()
}
}
let capacitorStream
Object.defineProperty(file, 'stream', {
get: util.deprecate(function() {
if (!capacitorStream) capacitorStream = this.createReadStream()
return capacitorStream
}, 'File upload property stream is deprecated. Use createReadStream() instead.')
})
Object.defineProperty(file, 'capacitor', {
value: capacitor
})
upload.resolve(file)
})
parser.once('filesLimit', () =>
exit(createError(413, `${maxFiles} max file uploads exceeded.`))
)
parser.once('finish', () => {
request.unpipe(parser)
request.resume()
if (!operations)
return exit(
createError(
400,
`Missing multipart field operations (${SPEC_URL}).`
)
)
if (!map)
return exit(
createError(400, `Missing multipart field map (${SPEC_URL}).`)
)
for (const upload of map.values())
if (!upload.file)
upload.reject(createError(400, 'File missing in the request.'))
})
parser.once('error', exit)
response.once('finish', release)
response.once('close', release)
request.once('close', abort)
request.once('end', () => {
request.removeListener('close', abort)
})
request.pipe(parser)
})

View File

@@ -0,0 +1,81 @@
{
"name": "@apollographql/graphql-upload-8-fork",
"version": "8.1.4",
"description": "Fork of graphql-upload@8 that works with graphql@15 for compatibility with apollo-server@2",
"license": "MIT",
"author": "Apollo <opensource@apollographql.com>",
"repository": "github:apollographql/graphql-upload",
"homepage": "https://github.com/apollographql/graphql-upload#readme",
"keywords": [
"graphql",
"upload",
"file",
"multipart",
"server",
"koa",
"express",
"apollo",
"esm",
"mjs"
],
"files": [
"lib",
"index.d.ts",
"!*.test.*",
"!test-helpers"
],
"main": "lib",
"types": "index.d.ts",
"engines": {
"node": ">=8.5"
},
"browserslist": "node >= 8.5",
"peerDependencies": {
"graphql": "0.13.1 - 15"
},
"dependencies": {
"@types/express": "*",
"@types/fs-capacitor": "^2.0.0",
"@types/koa": "*",
"busboy": "^0.3.1",
"fs-capacitor": "^2.0.4",
"http-errors": "^1.7.3",
"object-path": "^0.11.4"
},
"devDependencies": {
"@babel/cli": "^7.6.3",
"@babel/core": "^7.6.3",
"@babel/preset-env": "^7.6.3",
"babel-eslint": "^10.0.3",
"eslint": "^6.5.1",
"eslint-config-env": "^9.1.0",
"eslint-config-prettier": "^6.4.0",
"eslint-plugin-import": "^2.18.2",
"eslint-plugin-import-order-alphabetical": "^1.0.0",
"eslint-plugin-jsdoc": "^15.9.10",
"eslint-plugin-node": "^10.0.0",
"eslint-plugin-prettier": "^3.1.1",
"express": "^4.17.1",
"express-async-handler": "^1.1.4",
"form-data": "^2.5.1",
"graphql": "^14.5.8",
"husky": "^3.0.8",
"koa": "^2.8.2",
"lint-staged": "^9.4.2",
"node-fetch": "^2.6.0",
"prettier": "^1.18.2",
"tap": "^14.6.9"
},
"scripts": {
"prepare": "npm run prepare:clean && npm run prepare:mjs && npm run prepare:js && npm run prepare:prettier",
"prepare:clean": "rm -rf lib",
"prepare:mjs": "BABEL_ESM=1 babel src -d lib --keep-file-extension",
"prepare:js": "babel src -d lib",
"prepare:prettier": "prettier 'lib/**/*.{mjs,js}' readme.md --write",
"test": "npm run test:eslint && npm run test:prettier && npm run test:tap",
"test:eslint": "eslint . --ext mjs,js",
"test:prettier": "prettier '**/*.{json,yml,md}' -l",
"test:tap": "tap --test-ignore=src",
"prepublishOnly": "npm test"
}
}

View File

@@ -0,0 +1,11 @@
This is a fork of [graphql-upload](https://github.com/jaydenseric/graphql-upload) by Jayden Seric which Apollo has created purely for the internal use of [Apollo Server](https://github.com/apollographql/apollo-server).
Apollo Server v2 depends on `graphql-upload` to provide an easy-to-use way to integrate `graphql-upload` into your servers without having to depend on `graphql-upload` yourself. It currently depends on v8 of `graphql-upload`.
`graphql-upload` made backwards-incompatible changes after v8, such as changing what Node versions are supported and removing the `stream` property that was later replaced with `createReadStream`. Because of this, we cannot upgrade the version of `graphql-upload` used by Apollo Server past v8 without potentially breaking users.
However, the latest release of `graphql-upload@8` (8.1.0) declares peer dependencies on `graphql` that do not include `graphql@15`. We want users of Apollo Server v2 to be able to use graphql v15 without getting peer dependencies warnings (or errors when they are using npm v7), so we have forked `graphql-upload` v8 just to extend the peer dependency.
We do not recommend that you directly depend on this fork. Our recommendation is that if you want to use uploads in your GraphQL server, you should consider disabling Apollo Server's built-in `graphql-upload` integration by passing `uploads: false` to `new ApolloServer` and use `graphql-upload` directly. That way, you can use the latest and greatest version of `graphql-upload`. We currently intend to remove the integration from Apollo Server v3.
This fork also contains the TypeScript typings from [DefinitelyTyped](https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/graphql-upload), so you don't have to try to combine this fork with `@types/graphql-upload`.