Initial Save

This commit is contained in:
jackbeeby
2025-03-28 12:30:19 +11:00
parent e381994f19
commit d8773925e8
9910 changed files with 982718 additions and 0 deletions

21
node_modules/@apollographql/apollo-tools/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2016 Meteor Development Group, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -0,0 +1,13 @@
import { GraphQLSchema, DocumentNode, GraphQLError } from "graphql";
import { GraphQLResolverMap } from "./schema/resolverMap";
export interface GraphQLSchemaModule {
typeDefs: DocumentNode;
resolvers?: GraphQLResolverMap<any>;
}
interface GraphQLServiceDefinition {
schema?: GraphQLSchema;
errors?: GraphQLError[];
}
export declare function buildServiceDefinition(modules: (GraphQLSchemaModule | DocumentNode)[]): GraphQLServiceDefinition;
export {};
//# sourceMappingURL=buildServiceDefinition.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"buildServiceDefinition.d.ts","sourceRoot":"","sources":["../src/buildServiceDefinition.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,aAAa,EACb,YAAY,EAMZ,YAAY,EAQb,MAAM,SAAS,CAAC;AAEjB,OAAO,EAAE,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AAG1D,MAAM,WAAW,mBAAmB;IAClC,QAAQ,EAAE,YAAY,CAAC;IACvB,SAAS,CAAC,EAAE,kBAAkB,CAAC,GAAG,CAAC,CAAC;CACrC;AAED,UAAU,wBAAwB;IAChC,MAAM,CAAC,EAAE,aAAa,CAAC;IACvB,MAAM,CAAC,EAAE,YAAY,EAAE,CAAC;CACzB;AAMD,wBAAgB,sBAAsB,CACpC,OAAO,EAAE,CAAC,mBAAmB,GAAG,YAAY,CAAC,EAAE,GAC9C,wBAAwB,CA+K1B"}

View File

@@ -0,0 +1,168 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.buildServiceDefinition = void 0;
const graphql_1 = require("graphql");
const graphql_2 = require("./utilities/graphql");
const predicates_1 = require("./utilities/predicates");
function flattened(arr) {
return new Array().concat(...arr);
}
function buildServiceDefinition(modules) {
const errors = [];
const typeDefinitionsMap = Object.create(null);
const typeExtensionsMap = Object.create(null);
const directivesMap = Object.create(null);
const schemaDefinitions = [];
const schemaExtensions = [];
for (let module of modules) {
if ((0, graphql_2.isNode)(module) && (0, graphql_2.isDocumentNode)(module)) {
module = { typeDefs: module };
}
for (const definition of module.typeDefs.definitions) {
if ((0, graphql_1.isTypeDefinitionNode)(definition)) {
const typeName = definition.name.value;
if (typeDefinitionsMap[typeName]) {
typeDefinitionsMap[typeName].push(definition);
}
else {
typeDefinitionsMap[typeName] = [definition];
}
}
else if ((0, graphql_1.isTypeExtensionNode)(definition)) {
const typeName = definition.name.value;
if (typeExtensionsMap[typeName]) {
typeExtensionsMap[typeName].push(definition);
}
else {
typeExtensionsMap[typeName] = [definition];
}
}
else if (definition.kind === graphql_1.Kind.DIRECTIVE_DEFINITION) {
const directiveName = definition.name.value;
if (directivesMap[directiveName]) {
directivesMap[directiveName].push(definition);
}
else {
directivesMap[directiveName] = [definition];
}
}
else if (definition.kind === graphql_1.Kind.SCHEMA_DEFINITION) {
schemaDefinitions.push(definition);
}
else if (definition.kind === graphql_1.Kind.SCHEMA_EXTENSION) {
schemaExtensions.push(definition);
}
}
}
for (const [typeName, typeDefinitions] of Object.entries(typeDefinitionsMap)) {
if (typeDefinitions.length > 1) {
errors.push(new graphql_1.GraphQLError(`Type "${typeName}" was defined more than once.`, typeDefinitions));
}
}
for (const [directiveName, directives] of Object.entries(directivesMap)) {
if (directives.length > 1) {
errors.push(new graphql_1.GraphQLError(`Directive "${directiveName}" was defined more than once.`, directives));
}
}
let operationTypeMap;
if (schemaDefinitions.length > 0 || schemaExtensions.length > 0) {
operationTypeMap = {};
const schemaDefinition = schemaDefinitions[schemaDefinitions.length - 1];
const operationTypes = flattened([schemaDefinition, ...schemaExtensions]
.map((node) => node.operationTypes)
.filter(predicates_1.isNotNullOrUndefined));
for (const operationType of operationTypes) {
const typeName = operationType.type.name.value;
const operation = operationType.operation;
if (operationTypeMap[operation]) {
throw new graphql_1.GraphQLError(`Must provide only one ${operation} type in schema.`, [schemaDefinition]);
}
if (!(typeDefinitionsMap[typeName] || typeExtensionsMap[typeName])) {
throw new graphql_1.GraphQLError(`Specified ${operation} type "${typeName}" not found in document.`, [schemaDefinition]);
}
operationTypeMap[operation] = typeName;
}
}
else {
operationTypeMap = {
query: "Query",
mutation: "Mutation",
subscription: "Subscription",
};
}
for (const [typeName, typeExtensions] of Object.entries(typeExtensionsMap)) {
if (!typeDefinitionsMap[typeName]) {
if (Object.values(operationTypeMap).includes(typeName)) {
typeDefinitionsMap[typeName] = [
{
kind: graphql_1.Kind.OBJECT_TYPE_DEFINITION,
name: {
kind: graphql_1.Kind.NAME,
value: typeName,
},
},
];
}
else {
errors.push(new graphql_1.GraphQLError(`Cannot extend type "${typeName}" because it does not exist in the existing schema.`, typeExtensions));
}
}
}
if (errors.length > 0) {
return { errors };
}
try {
const typeDefinitions = flattened(Object.values(typeDefinitionsMap));
const directives = flattened(Object.values(directivesMap));
let schema = (0, graphql_1.buildASTSchema)({
kind: graphql_1.Kind.DOCUMENT,
definitions: [...typeDefinitions, ...directives],
});
const typeExtensions = flattened(Object.values(typeExtensionsMap));
if (typeExtensions.length > 0) {
schema = (0, graphql_1.extendSchema)(schema, {
kind: graphql_1.Kind.DOCUMENT,
definitions: typeExtensions,
});
}
for (const module of modules) {
if ("kind" in module || !module.resolvers)
continue;
addResolversToSchema(schema, module.resolvers);
}
return { schema };
}
catch (error) {
return { errors: [error] };
}
}
exports.buildServiceDefinition = buildServiceDefinition;
function addResolversToSchema(schema, resolvers) {
for (const [typeName, fieldConfigs] of Object.entries(resolvers)) {
const type = schema.getType(typeName);
if (!(0, graphql_1.isObjectType)(type))
continue;
const fieldMap = type.getFields();
for (const [fieldName, fieldConfig] of Object.entries(fieldConfigs)) {
if (fieldName.startsWith("__")) {
type[fieldName.substring(2)] = fieldConfig;
continue;
}
const field = fieldMap[fieldName];
if (!field)
continue;
if (typeof fieldConfig === "function") {
field.resolve = fieldConfig;
}
else {
if (fieldConfig.resolve) {
field.resolve = fieldConfig.resolve;
}
if (fieldConfig.subscribe) {
field.subscribe = fieldConfig.subscribe;
}
}
}
}
}
//# sourceMappingURL=buildServiceDefinition.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,4 @@
export * from "./utilities";
export * from "./schema";
export * from "./buildServiceDefinition";
//# sourceMappingURL=index.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,aAAa,CAAC;AAE5B,cAAc,UAAU,CAAC;AACzB,cAAc,0BAA0B,CAAC"}

20
node_modules/@apollographql/apollo-tools/lib/index.js generated vendored Normal file
View File

@@ -0,0 +1,20 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __exportStar = (this && this.__exportStar) || function(m, exports) {
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
};
Object.defineProperty(exports, "__esModule", { value: true });
__exportStar(require("./utilities"), exports);
__exportStar(require("./schema"), exports);
__exportStar(require("./buildServiceDefinition"), exports);
//# sourceMappingURL=index.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;AAAA,8CAA4B;AAE5B,2CAAyB;AACzB,2DAAyC"}

View File

@@ -0,0 +1,3 @@
export * from "./resolverMap";
export * from "./resolveObject";
//# sourceMappingURL=index.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/schema/index.ts"],"names":[],"mappings":"AAAA,cAAc,eAAe,CAAC;AAC9B,cAAc,iBAAiB,CAAC"}

View File

@@ -0,0 +1,19 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __exportStar = (this && this.__exportStar) || function(m, exports) {
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
};
Object.defineProperty(exports, "__esModule", { value: true });
__exportStar(require("./resolverMap"), exports);
__exportStar(require("./resolveObject"), exports);
//# sourceMappingURL=index.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/schema/index.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;AAAA,gDAA8B;AAC9B,kDAAgC"}

View File

@@ -0,0 +1,11 @@
import { GraphQLResolveInfo, FieldNode } from "graphql";
export declare type GraphQLObjectResolver<TSource, TContext> = (source: TSource, fields: Record<string, ReadonlyArray<FieldNode>>, context: TContext, info: GraphQLResolveInfo) => any;
declare module "graphql/type/definition" {
interface GraphQLObjectType {
resolveObject?: GraphQLObjectResolver<any, any>;
}
interface GraphQLObjectTypeConfig<TSource, TContext> {
resolveObject?: GraphQLObjectResolver<TSource, TContext>;
}
}
//# sourceMappingURL=resolveObject.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"resolveObject.d.ts","sourceRoot":"","sources":["../../src/schema/resolveObject.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,kBAAkB,EAAE,SAAS,EAAE,MAAM,SAAS,CAAC;AAExD,oBAAY,qBAAqB,CAAC,OAAO,EAAE,QAAQ,IAAI,CACrD,MAAM,EAAE,OAAO,EACf,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,aAAa,CAAC,SAAS,CAAC,CAAC,EAChD,OAAO,EAAE,QAAQ,EACjB,IAAI,EAAE,kBAAkB,KACrB,GAAG,CAAC;AAET,OAAO,QAAQ,yBAAyB,CAAC;IACvC,UAAU,iBAAiB;QACzB,aAAa,CAAC,EAAE,qBAAqB,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC;KACjD;IAED,UAAU,uBAAuB,CAAC,OAAO,EAAE,QAAQ;QACjD,aAAa,CAAC,EAAE,qBAAqB,CAAC,OAAO,EAAE,QAAQ,CAAC,CAAC;KAC1D;CACF"}

View File

@@ -0,0 +1,3 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
//# sourceMappingURL=resolveObject.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"resolveObject.js","sourceRoot":"","sources":["../../src/schema/resolveObject.ts"],"names":[],"mappings":""}

View File

@@ -0,0 +1,19 @@
import { GraphQLFieldResolver } from "graphql";
export interface GraphQLResolverMap<TContext> {
[typeName: string]: {
[fieldName: string]: GraphQLFieldResolver<any, TContext> | {
requires?: string;
resolve: GraphQLFieldResolver<any, TContext>;
subscribe?: undefined;
} | {
requires?: string;
resolve?: undefined;
subscribe: GraphQLFieldResolver<any, TContext>;
} | {
requires?: string;
resolve: GraphQLFieldResolver<any, TContext>;
subscribe: GraphQLFieldResolver<any, TContext>;
};
};
}
//# sourceMappingURL=resolverMap.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"resolverMap.d.ts","sourceRoot":"","sources":["../../src/schema/resolverMap.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,oBAAoB,EAAE,MAAM,SAAS,CAAC;AAE/C,MAAM,WAAW,kBAAkB,CAAC,QAAQ;IAC1C,CAAC,QAAQ,EAAE,MAAM,GAAG;QAClB,CAAC,SAAS,EAAE,MAAM,GACd,oBAAoB,CAAC,GAAG,EAAE,QAAQ,CAAC,GACnC;YACE,QAAQ,CAAC,EAAE,MAAM,CAAC;YAClB,OAAO,EAAE,oBAAoB,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;YAC7C,SAAS,CAAC,EAAE,SAAS,CAAC;SACvB,GACD;YACE,QAAQ,CAAC,EAAE,MAAM,CAAC;YAClB,OAAO,CAAC,EAAE,SAAS,CAAC;YACpB,SAAS,EAAE,oBAAoB,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;SAChD,GACD;YACE,QAAQ,CAAC,EAAE,MAAM,CAAC;YAClB,OAAO,EAAE,oBAAoB,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;YAC7C,SAAS,EAAE,oBAAoB,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;SAChD,CAAC;KACP,CAAC;CACH"}

View File

@@ -0,0 +1,3 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
//# sourceMappingURL=resolverMap.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"resolverMap.js","sourceRoot":"","sources":["../../src/schema/resolverMap.ts"],"names":[],"mappings":""}

View File

@@ -0,0 +1,8 @@
import { ASTNode, TypeDefinitionNode, TypeExtensionNode, DocumentNode } from "graphql";
declare module "graphql/language/predicates" {
function isTypeDefinitionNode(node: ASTNode): node is TypeDefinitionNode;
function isTypeExtensionNode(node: ASTNode): node is TypeExtensionNode;
}
export declare function isNode(maybeNode: any): maybeNode is ASTNode;
export declare function isDocumentNode(node: ASTNode): node is DocumentNode;
//# sourceMappingURL=graphql.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"graphql.d.ts","sourceRoot":"","sources":["../../src/utilities/graphql.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,OAAO,EACP,kBAAkB,EAClB,iBAAiB,EACjB,YAAY,EAEb,MAAM,SAAS,CAAC;AAIjB,OAAO,QAAQ,6BAA6B,CAAC;IAC3C,SAAS,oBAAoB,CAAC,IAAI,EAAE,OAAO,GAAG,IAAI,IAAI,kBAAkB,CAAC;IACzE,SAAS,mBAAmB,CAAC,IAAI,EAAE,OAAO,GAAG,IAAI,IAAI,iBAAiB,CAAC;CACxE;AAED,wBAAgB,MAAM,CAAC,SAAS,EAAE,GAAG,GAAG,SAAS,IAAI,OAAO,CAE3D;AAED,wBAAgB,cAAc,CAAC,IAAI,EAAE,OAAO,GAAG,IAAI,IAAI,YAAY,CAElE"}

View File

@@ -0,0 +1,13 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.isDocumentNode = exports.isNode = void 0;
const graphql_1 = require("graphql");
function isNode(maybeNode) {
return maybeNode && typeof maybeNode.kind === "string";
}
exports.isNode = isNode;
function isDocumentNode(node) {
return isNode(node) && node.kind === graphql_1.Kind.DOCUMENT;
}
exports.isDocumentNode = isDocumentNode;
//# sourceMappingURL=graphql.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"graphql.js","sourceRoot":"","sources":["../../src/utilities/graphql.ts"],"names":[],"mappings":";;;AAAA,qCAMiB;AASjB,SAAgB,MAAM,CAAC,SAAc;IACnC,OAAO,SAAS,IAAI,OAAO,SAAS,CAAC,IAAI,KAAK,QAAQ,CAAC;AACzD,CAAC;AAFD,wBAEC;AAED,SAAgB,cAAc,CAAC,IAAa;IAC1C,OAAO,MAAM,CAAC,IAAI,CAAC,IAAI,IAAI,CAAC,IAAI,KAAK,cAAI,CAAC,QAAQ,CAAC;AACrD,CAAC;AAFD,wCAEC"}

View File

@@ -0,0 +1,4 @@
export * from "./invariant";
export * from "./predicates";
export * from "./graphql";
//# sourceMappingURL=index.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/utilities/index.ts"],"names":[],"mappings":"AAAA,cAAc,aAAa,CAAC;AAC5B,cAAc,cAAc,CAAC;AAC7B,cAAc,WAAW,CAAC"}

View File

@@ -0,0 +1,20 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __exportStar = (this && this.__exportStar) || function(m, exports) {
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
};
Object.defineProperty(exports, "__esModule", { value: true });
__exportStar(require("./invariant"), exports);
__exportStar(require("./predicates"), exports);
__exportStar(require("./graphql"), exports);
//# sourceMappingURL=index.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/utilities/index.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;AAAA,8CAA4B;AAC5B,+CAA6B;AAC7B,4CAA0B"}

View File

@@ -0,0 +1,2 @@
export declare function invariant(condition: any, message: string): void;
//# sourceMappingURL=invariant.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"invariant.d.ts","sourceRoot":"","sources":["../../src/utilities/invariant.ts"],"names":[],"mappings":"AAAA,wBAAgB,SAAS,CAAC,SAAS,EAAE,GAAG,EAAE,OAAO,EAAE,MAAM,QAIxD"}

View File

@@ -0,0 +1,10 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.invariant = void 0;
function invariant(condition, message) {
if (!condition) {
throw new Error(message);
}
}
exports.invariant = invariant;
//# sourceMappingURL=invariant.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"invariant.js","sourceRoot":"","sources":["../../src/utilities/invariant.ts"],"names":[],"mappings":";;;AAAA,SAAgB,SAAS,CAAC,SAAc,EAAE,OAAe;IACvD,IAAI,CAAC,SAAS,EAAE;QACd,MAAM,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC;KAC1B;AACH,CAAC;AAJD,8BAIC"}

View File

@@ -0,0 +1,2 @@
export declare function isNotNullOrUndefined<T>(value: T | null | undefined): value is T;
//# sourceMappingURL=predicates.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"predicates.d.ts","sourceRoot":"","sources":["../../src/utilities/predicates.ts"],"names":[],"mappings":"AAAA,wBAAgB,oBAAoB,CAAC,CAAC,EACpC,KAAK,EAAE,CAAC,GAAG,IAAI,GAAG,SAAS,GAC1B,KAAK,IAAI,CAAC,CAEZ"}

View File

@@ -0,0 +1,8 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.isNotNullOrUndefined = void 0;
function isNotNullOrUndefined(value) {
return value !== null && typeof value !== "undefined";
}
exports.isNotNullOrUndefined = isNotNullOrUndefined;
//# sourceMappingURL=predicates.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"predicates.js","sourceRoot":"","sources":["../../src/utilities/predicates.ts"],"names":[],"mappings":";;;AAAA,SAAgB,oBAAoB,CAClC,KAA2B;IAE3B,OAAO,KAAK,KAAK,IAAI,IAAI,OAAO,KAAK,KAAK,WAAW,CAAC;AACxD,CAAC;AAJD,oDAIC"}

49
node_modules/@apollographql/apollo-tools/package.json generated vendored Normal file
View File

@@ -0,0 +1,49 @@
{
"name": "@apollographql/apollo-tools",
"version": "0.5.4",
"author": "Apollo GraphQL <packages@apollographql.com>",
"license": "MIT",
"repository": {
"type": "git",
"url": "git+https://github.com/apollographql/apollo-tooling.git"
},
"homepage": "https://github.com/apollographql/apollo-tooling",
"bugs": "https://github.com/apollographql/apollo-tooling/issues",
"main": "lib/index.js",
"types": "lib/index.d.ts",
"engines": {
"node": ">=8",
"npm": ">=6"
},
"peerDependencies": {
"graphql": "^14.2.1 || ^15.0.0 || ^16.0.0"
},
"jest": {
"preset": "ts-jest",
"testEnvironment": "node",
"testMatch": null,
"testRegex": "/__tests__/.*\\.test\\.(js|ts)$",
"testPathIgnorePatterns": [
"<rootDir>/node_modules/",
"<rootDir>/lib/"
],
"moduleFileExtensions": [
"ts",
"js"
],
"transformIgnorePatterns": [
"/node_modules/"
],
"snapshotSerializers": [
"<rootDir>/src/__tests__/snapshotSerializers/astSerializer.ts",
"<rootDir>/src/__tests__/snapshotSerializers/graphQLTypeSerializer.ts"
],
"globals": {
"ts-jest": {
"tsconfig": "<rootDir>/tsconfig.test.json",
"diagnostics": false
}
}
},
"gitHead": "58b96377de23b35f31264fda805d967a63a800c7"
}

View File

@@ -0,0 +1,246 @@
import {
GraphQLSchema,
DocumentNode,
TypeDefinitionNode,
DirectiveDefinitionNode,
isTypeDefinitionNode,
TypeExtensionNode,
isTypeExtensionNode,
GraphQLError,
buildASTSchema,
Kind,
extendSchema,
isObjectType,
SchemaDefinitionNode,
OperationTypeNode,
SchemaExtensionNode,
} from "graphql";
import { isNode, isDocumentNode } from "./utilities/graphql";
import { GraphQLResolverMap } from "./schema/resolverMap";
import { isNotNullOrUndefined } from "./utilities/predicates";
export interface GraphQLSchemaModule {
typeDefs: DocumentNode;
resolvers?: GraphQLResolverMap<any>;
}
interface GraphQLServiceDefinition {
schema?: GraphQLSchema;
errors?: GraphQLError[];
}
function flattened<T>(arr: ReadonlyArray<ReadonlyArray<T>>): ReadonlyArray<T> {
return new Array<T>().concat(...arr);
}
export function buildServiceDefinition(
modules: (GraphQLSchemaModule | DocumentNode)[]
): GraphQLServiceDefinition {
const errors: GraphQLError[] = [];
const typeDefinitionsMap: {
[name: string]: TypeDefinitionNode[];
} = Object.create(null);
const typeExtensionsMap: {
[name: string]: TypeExtensionNode[];
} = Object.create(null);
const directivesMap: {
[name: string]: DirectiveDefinitionNode[];
} = Object.create(null);
const schemaDefinitions: SchemaDefinitionNode[] = [];
const schemaExtensions: SchemaExtensionNode[] = [];
for (let module of modules) {
if (isNode(module) && isDocumentNode(module)) {
module = { typeDefs: module };
}
for (const definition of module.typeDefs.definitions) {
if (isTypeDefinitionNode(definition)) {
const typeName = definition.name.value;
if (typeDefinitionsMap[typeName]) {
typeDefinitionsMap[typeName].push(definition);
} else {
typeDefinitionsMap[typeName] = [definition];
}
} else if (isTypeExtensionNode(definition)) {
const typeName = definition.name.value;
if (typeExtensionsMap[typeName]) {
typeExtensionsMap[typeName].push(definition);
} else {
typeExtensionsMap[typeName] = [definition];
}
} else if (definition.kind === Kind.DIRECTIVE_DEFINITION) {
const directiveName = definition.name.value;
if (directivesMap[directiveName]) {
directivesMap[directiveName].push(definition);
} else {
directivesMap[directiveName] = [definition];
}
} else if (definition.kind === Kind.SCHEMA_DEFINITION) {
schemaDefinitions.push(definition);
} else if (definition.kind === Kind.SCHEMA_EXTENSION) {
schemaExtensions.push(definition);
}
}
}
for (const [typeName, typeDefinitions] of Object.entries(
typeDefinitionsMap
)) {
if (typeDefinitions.length > 1) {
errors.push(
new GraphQLError(
`Type "${typeName}" was defined more than once.`,
typeDefinitions
)
);
}
}
for (const [directiveName, directives] of Object.entries(directivesMap)) {
if (directives.length > 1) {
errors.push(
new GraphQLError(
`Directive "${directiveName}" was defined more than once.`,
directives
)
);
}
}
let operationTypeMap: { [operation in OperationTypeNode]?: string };
if (schemaDefinitions.length > 0 || schemaExtensions.length > 0) {
operationTypeMap = {};
// We should report an error if more than one schema definition is included,
// but this matches the current 'last definition wins' behavior of `buildASTSchema`.
const schemaDefinition = schemaDefinitions[schemaDefinitions.length - 1];
const operationTypes = flattened(
[schemaDefinition, ...schemaExtensions]
.map((node) => node.operationTypes)
.filter(isNotNullOrUndefined)
);
for (const operationType of operationTypes) {
const typeName = operationType.type.name.value;
const operation = operationType.operation;
if (operationTypeMap[operation]) {
throw new GraphQLError(
`Must provide only one ${operation} type in schema.`,
[schemaDefinition]
);
}
if (!(typeDefinitionsMap[typeName] || typeExtensionsMap[typeName])) {
throw new GraphQLError(
`Specified ${operation} type "${typeName}" not found in document.`,
[schemaDefinition]
);
}
operationTypeMap[operation] = typeName;
}
} else {
operationTypeMap = {
query: "Query",
mutation: "Mutation",
subscription: "Subscription",
};
}
for (const [typeName, typeExtensions] of Object.entries(typeExtensionsMap)) {
if (!typeDefinitionsMap[typeName]) {
if (Object.values(operationTypeMap).includes(typeName)) {
typeDefinitionsMap[typeName] = [
{
kind: Kind.OBJECT_TYPE_DEFINITION,
name: {
kind: Kind.NAME,
value: typeName,
},
},
];
} else {
errors.push(
new GraphQLError(
`Cannot extend type "${typeName}" because it does not exist in the existing schema.`,
typeExtensions
)
);
}
}
}
if (errors.length > 0) {
return { errors };
}
try {
const typeDefinitions = flattened(Object.values(typeDefinitionsMap));
const directives = flattened(Object.values(directivesMap));
let schema = buildASTSchema({
kind: Kind.DOCUMENT,
definitions: [...typeDefinitions, ...directives],
});
const typeExtensions = flattened(Object.values(typeExtensionsMap));
if (typeExtensions.length > 0) {
schema = extendSchema(schema, {
kind: Kind.DOCUMENT,
definitions: typeExtensions,
});
}
for (const module of modules) {
if ("kind" in module || !module.resolvers) continue;
addResolversToSchema(schema, module.resolvers);
}
return { schema };
} catch (error) {
return { errors: [error] };
}
}
function addResolversToSchema(
schema: GraphQLSchema,
resolvers: GraphQLResolverMap<any>
) {
for (const [typeName, fieldConfigs] of Object.entries(resolvers)) {
const type = schema.getType(typeName);
if (!isObjectType(type)) continue;
const fieldMap = type.getFields();
for (const [fieldName, fieldConfig] of Object.entries(fieldConfigs)) {
if (fieldName.startsWith("__")) {
(type as any)[fieldName.substring(2)] = fieldConfig;
continue;
}
const field = fieldMap[fieldName];
if (!field) continue;
if (typeof fieldConfig === "function") {
field.resolve = fieldConfig;
} else {
if (fieldConfig.resolve) {
field.resolve = fieldConfig.resolve;
}
if (fieldConfig.subscribe) {
field.subscribe = fieldConfig.subscribe;
}
}
}
}
}

View File

@@ -0,0 +1,4 @@
export * from "./utilities";
export * from "./schema";
export * from "./buildServiceDefinition";

View File

@@ -0,0 +1,2 @@
export * from "./resolverMap";
export * from "./resolveObject";

View File

@@ -0,0 +1,18 @@
import { GraphQLResolveInfo, FieldNode } from "graphql";
export type GraphQLObjectResolver<TSource, TContext> = (
source: TSource,
fields: Record<string, ReadonlyArray<FieldNode>>,
context: TContext,
info: GraphQLResolveInfo
) => any;
declare module "graphql/type/definition" {
interface GraphQLObjectType {
resolveObject?: GraphQLObjectResolver<any, any>;
}
interface GraphQLObjectTypeConfig<TSource, TContext> {
resolveObject?: GraphQLObjectResolver<TSource, TContext>;
}
}

View File

@@ -0,0 +1,23 @@
import { GraphQLFieldResolver } from "graphql";
export interface GraphQLResolverMap<TContext> {
[typeName: string]: {
[fieldName: string]:
| GraphQLFieldResolver<any, TContext>
| {
requires?: string;
resolve: GraphQLFieldResolver<any, TContext>;
subscribe?: undefined;
}
| {
requires?: string;
resolve?: undefined;
subscribe: GraphQLFieldResolver<any, TContext>;
}
| {
requires?: string;
resolve: GraphQLFieldResolver<any, TContext>;
subscribe: GraphQLFieldResolver<any, TContext>;
};
};
}

View File

@@ -0,0 +1,22 @@
import {
ASTNode,
TypeDefinitionNode,
TypeExtensionNode,
DocumentNode,
Kind,
} from "graphql";
// FIXME: We should add proper type guards for these predicate functions
// to `@types/graphql`.
declare module "graphql/language/predicates" {
function isTypeDefinitionNode(node: ASTNode): node is TypeDefinitionNode;
function isTypeExtensionNode(node: ASTNode): node is TypeExtensionNode;
}
export function isNode(maybeNode: any): maybeNode is ASTNode {
return maybeNode && typeof maybeNode.kind === "string";
}
export function isDocumentNode(node: ASTNode): node is DocumentNode {
return isNode(node) && node.kind === Kind.DOCUMENT;
}

View File

@@ -0,0 +1,3 @@
export * from "./invariant";
export * from "./predicates";
export * from "./graphql";

View File

@@ -0,0 +1,5 @@
export function invariant(condition: any, message: string) {
if (!condition) {
throw new Error(message);
}
}

View File

@@ -0,0 +1,5 @@
export function isNotNullOrUndefined<T>(
value: T | null | undefined
): value is T {
return value !== null && typeof value !== "undefined";
}

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,5 @@
# @apollographql/graphql-playground-html
**NOTE:** This is a fork of [`graphql-playground-html`](https://npm.im/graphql-playground-html) which is meant to be used by Apollo Server and only by Apollo Server. It is not intended to be used directly. Those looking to use GraphQL Playground directly can refer to [the upstream repository](https://github.com/prisma-labs/graphql-playground) for usage instructions.
> **SECURITY WARNING:** Via the upstream fork, this package had a severe XSS Reflection attack vulnerability until version `1.6.25` of this package. **While we have published a fix, users were only affected if they were using `@apollographql/graphql-playground-html` directly as their own custom middleware.** The direct usage of this package was never recommended as it provided no advantage over the upstream package in that regard. Users of Apollo Server who leverage this package automatically by the dependency declared within Apollo Sever were not affected since Apollo Server never provided dynamic facilities to customize playground options per request. Users of Apollo Server would have had to statically embedded very explicit vulnerabilities (e.g., using malicious, unescaped code, `<script>` tags, etc.).

View File

@@ -0,0 +1,5 @@
declare const getLoadingMarkup: () => {
script: string;
container: string;
};
export default getLoadingMarkup;

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1 @@
{"version":3,"file":"get-loading-markup.js","sourceRoot":"","sources":["../src/get-loading-markup.ts"],"names":[],"mappings":";;AAAA,IAAM,gBAAgB,GAAG,cAAM,OAAA,CAAC;IAC9B,MAAM,EAAE,kKAKL;IACH,SAAS,EAAE,wofA4cZ;CACA,CAAC,EApd6B,CAod7B,CAAA;AAEF,kBAAe,gBAAgB,CAAA"}

View File

@@ -0,0 +1 @@
export { renderPlaygroundPage, MiddlewareOptions, RenderPageOptions, } from './render-playground-page';

View File

@@ -0,0 +1,5 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var render_playground_page_1 = require("./render-playground-page");
Object.defineProperty(exports, "renderPlaygroundPage", { enumerable: true, get: function () { return render_playground_page_1.renderPlaygroundPage; } });
//# sourceMappingURL=index.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;AAAA,mEAIiC;AAH/B,8HAAA,oBAAoB,OAAA"}

View File

@@ -0,0 +1,71 @@
export interface MiddlewareOptions {
endpoint?: string;
subscriptionEndpoint?: string;
workspaceName?: string;
env?: any;
config?: any;
settings?: ISettings;
schema?: IntrospectionResult;
tabs?: Tab[];
codeTheme?: EditorColours;
}
export declare type CursorShape = 'line' | 'block' | 'underline';
export declare type Theme = 'dark' | 'light';
export interface ISettings {
'general.betaUpdates': boolean;
'editor.cursorShape': CursorShape;
'editor.theme': Theme;
'editor.reuseHeaders': boolean;
'tracing.hideTracingResponse': boolean;
'queryPlan.hideQueryPlanResponse'?: boolean;
'editor.fontSize': number;
'editor.fontFamily': string;
'request.credentials': string;
'schema.polling.enable': boolean;
'schema.polling.endpointFilter': string;
'schema.polling.interval': number;
}
export interface EditorColours {
property: string;
comment: string;
punctuation: string;
keyword: string;
def: string;
qualifier: string;
attribute: string;
number: string;
string: string;
builtin: string;
string2: string;
variable: string;
meta: string;
atom: string;
ws: string;
selection: string;
cursorColor: string;
editorBackground: string;
resultBackground: string;
leftDrawerBackground: string;
rightDrawerBackground: string;
}
export interface IntrospectionResult {
__schema: any;
}
export interface RenderPageOptions extends MiddlewareOptions {
version?: string;
cdnUrl?: string;
env?: any;
title?: string;
faviconUrl?: string | null;
}
export interface Tab {
endpoint: string;
query: string;
name?: string;
variables?: string;
responses?: string[];
headers?: {
[key: string]: string;
};
}
export declare function renderPlaygroundPage(options: RenderPageOptions): string;

View File

@@ -0,0 +1,59 @@
"use strict";
var __assign = (this && this.__assign) || function () {
__assign = Object.assign || function(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i];
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
t[p] = s[p];
}
return t;
};
return __assign.apply(this, arguments);
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.renderPlaygroundPage = void 0;
var xss_1 = require("xss");
var get_loading_markup_1 = require("./get-loading-markup");
var filter = function (val) {
return xss_1.filterXSS(val, {
// @ts-ignore
whiteList: [],
stripIgnoreTag: true,
stripIgnoreTagBody: ["script"]
});
};
var loading = get_loading_markup_1.default();
var reactPackageName = '@apollographql/graphql-playground-react';
var getCdnMarkup = function (_a) {
var version = _a.version, _b = _a.cdnUrl, cdnUrl = _b === void 0 ? '//cdn.jsdelivr.net/npm' : _b, faviconUrl = _a.faviconUrl;
var buildCDNUrl = function (packageName, suffix) { return filter(cdnUrl + "/" + packageName + (version ? "@" + version : '') + "/" + suffix || ''); };
return "\n <link\n rel=\"stylesheet\"\n href=\"" + buildCDNUrl(reactPackageName, 'build/static/css/index.css') + "\"\n />\n " + (typeof faviconUrl === 'string' ? "<link rel=\"shortcut icon\" href=\"" + filter(faviconUrl || '') + "\" />" : '') + "\n " + (faviconUrl === undefined ? "<link rel=\"shortcut icon\" href=\"" + buildCDNUrl(reactPackageName, 'build/favicon.png') + "\" />" : '') + "\n <script\n src=\"" + buildCDNUrl(reactPackageName, 'build/static/js/middleware.js') + "\"\n ></script>\n";
};
var renderConfig = function (config) {
return '<div id="playground-config" style="display: none;">' + xss_1.filterXSS(JSON.stringify(config), {
// @ts-ignore
whiteList: [],
}) + '</div>';
};
function renderPlaygroundPage(options) {
var extendedOptions = __assign(__assign({}, options), { canSaveConfig: false });
// for compatibility
if (options.subscriptionsEndpoint) {
extendedOptions.subscriptionEndpoint = filter(options.subscriptionsEndpoint || '');
}
if (options.config) {
extendedOptions.configString = JSON.stringify(options.config, null, 2);
}
if (!extendedOptions.endpoint && !extendedOptions.configString) {
/* tslint:disable-next-line */
console.warn("WARNING: You didn't provide an endpoint and don't have a .graphqlconfig. Make sure you have at least one of them.");
}
else if (extendedOptions.endpoint) {
extendedOptions.endpoint = filter(extendedOptions.endpoint || '');
}
return "\n <!DOCTYPE html>\n <html>\n <head>\n <meta charset=utf-8 />\n <meta name=\"viewport\" content=\"user-scalable=no, initial-scale=1.0, minimum-scale=1.0, maximum-scale=1.0, minimal-ui\">\n <link href=\"https://fonts.googleapis.com/css?family=Open+Sans:300,400,600,700|Source+Code+Pro:400,700\" rel=\"stylesheet\">\n <title>" + (filter(extendedOptions.title) || 'GraphQL Playground') + "</title>\n " + (extendedOptions.env === 'react' || extendedOptions.env === 'electron'
? ''
: getCdnMarkup(extendedOptions)) + "\n </head>\n <body>\n <style type=\"text/css\">\n html {\n font-family: \"Open Sans\", sans-serif;\n overflow: hidden;\n }\n \n body {\n margin: 0;\n background: #172a3a;\n }\n \n .playgroundIn {\n -webkit-animation: playgroundIn 0.5s ease-out forwards;\n animation: playgroundIn 0.5s ease-out forwards;\n }\n \n @-webkit-keyframes playgroundIn {\n from {\n opacity: 0;\n -webkit-transform: translateY(10px);\n -ms-transform: translateY(10px);\n transform: translateY(10px);\n }\n to {\n opacity: 1;\n -webkit-transform: translateY(0);\n -ms-transform: translateY(0);\n transform: translateY(0);\n }\n }\n \n @keyframes playgroundIn {\n from {\n opacity: 0;\n -webkit-transform: translateY(10px);\n -ms-transform: translateY(10px);\n transform: translateY(10px);\n }\n to {\n opacity: 1;\n -webkit-transform: translateY(0);\n -ms-transform: translateY(0);\n transform: translateY(0);\n }\n }\n </style>\n " + loading.container + "\n " + renderConfig(extendedOptions) + "\n <div id=\"root\" />\n <script type=\"text/javascript\">\n window.addEventListener('load', function (event) {\n " + loading.script + "\n \n const root = document.getElementById('root');\n root.classList.add('playgroundIn');\n const configText = document.getElementById('playground-config').innerText\n if(configText && configText.length) {\n try {\n GraphQLPlayground.init(root, JSON.parse(configText))\n }\n catch(err) {\n console.error(\"could not find config\")\n }\n }\n })\n </script>\n </body>\n </html>\n";
}
exports.renderPlaygroundPage = renderPlaygroundPage;
//# sourceMappingURL=render-playground-page.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"render-playground-page.js","sourceRoot":"","sources":["../src/render-playground-page.ts"],"names":[],"mappings":";;;;;;;;;;;;;;AAAA,2BAAgC;AAEhC,2DAAmD;AA6EnD,IAAM,MAAM,GAAG,UAAC,GAAG;IACjB,OAAO,eAAS,CAAC,GAAG,EAAE;QACpB,aAAa;QACb,SAAS,EAAE,EAAE;QACb,cAAc,EAAE,IAAI;QACpB,kBAAkB,EAAE,CAAC,QAAQ,CAAC;KAC/B,CAAC,CAAA;AACJ,CAAC,CAAA;AAGD,IAAM,OAAO,GAAG,4BAAgB,EAAE,CAAA;AAElC,IAAM,gBAAgB,GAAG,yCAAyC,CAAC;AACnE,IAAM,YAAY,GAAG,UAAC,EAA0D;QAAxD,OAAO,aAAA,EAAE,cAAiC,EAAjC,MAAM,mBAAG,wBAAwB,KAAA,EAAE,UAAU,gBAAA;IAC5E,IAAM,WAAW,GAAG,UAAC,WAAmB,EAAE,MAAc,IAAK,OAAA,MAAM,CAAI,MAAM,SAAI,WAAW,IAAG,OAAO,CAAC,CAAC,CAAC,MAAI,OAAS,CAAC,CAAC,CAAC,EAAE,UAAI,MAAQ,IAAI,EAAE,CAAC,EAAjF,CAAiF,CAAA;IAC9I,OAAO,yDAGK,WAAW,CAAC,gBAAgB,EAAE,4BAA4B,CAAC,yBAEnE,OAAO,UAAU,KAAK,QAAQ,CAAC,CAAC,CAAC,wCAAmC,MAAM,CAAC,UAAU,IAAI,EAAE,CAAC,UAAM,CAAC,CAAC,CAAC,EAAE,gBACvG,UAAU,KAAK,SAAS,CAAC,CAAC,CAAC,wCAAmC,WAAW,CAAC,gBAAgB,EAAE,mBAAmB,CAAC,UAAM,CAAC,CAAC,CAAC,EAAE,oCAEpH,WAAW,CAAC,gBAAgB,EAAE,+BAA+B,CAAC,yBAE1E,CAAA;AAAA,CAAC,CAAA;AAGF,IAAM,YAAY,GAAG,UAAC,MAAM;IAC1B,OAAO,qDAAqD,GAAG,eAAS,CAAC,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,EAAE;QAC/F,aAAa;QACb,SAAS,EAAE,EAAE;KACd,CAAC,GAAG,QAAQ,CAAC;AAChB,CAAC,CAAA;AAED,SAAgB,oBAAoB,CAAC,OAA0B;IAC7D,IAAM,eAAe,yBAChB,OAAO,KACV,aAAa,EAAE,KAAK,GACrB,CAAA;IACD,oBAAoB;IACpB,IAAK,OAAe,CAAC,qBAAqB,EAAE;QAC1C,eAAe,CAAC,oBAAoB,GAAG,MAAM,CAAE,OAAe,CAAC,qBAAqB,IAAI,EAAE,CAAC,CAAA;KAC5F;IACD,IAAI,OAAO,CAAC,MAAM,EAAE;QAClB,eAAe,CAAC,YAAY,GAAG,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,MAAM,EAAE,IAAI,EAAE,CAAC,CAAC,CAAA;KACvE;IACD,IAAI,CAAC,eAAe,CAAC,QAAQ,IAAI,CAAC,eAAe,CAAC,YAAY,EAAE;QAC9D,8BAA8B;QAC9B,OAAO,CAAC,IAAI,CACV,mHAAmH,CACpH,CAAA;KACF;SACI,IAAI,eAAe,CAAC,QAAQ,EAAE;QACjC,eAAe,CAAC,QAAQ,GAAG,MAAM,CAAC,eAAe,CAAC,QAAQ,IAAI,EAAE,CAAC,CAAA;KAClE;IAED,OAAO,wVAOI,MAAM,CAAC,eAAe,CAAC,KAAK,CAAC,IAAI,oBAAoB,wBAE9D,eAAe,CAAC,GAAG,KAAK,OAAO,IAAI,eAAe,CAAC,GAAG,KAAK,UAAU;QACnE,CAAC,CAAC,EAAE;QACJ,CAAC,CAAC,YAAY,CAAC,eAAe,CAAC,qsCAkD/B,OAAO,CAAC,SAAS,cACjB,YAAY,CAAC,eAAe,CAAC,4IAIzB,OAAO,CAAC,MAAM,seAiBvB,CAAA;AACD,CAAC;AA1GD,oDA0GC"}

View File

@@ -0,0 +1,33 @@
{
"name": "@apollographql/graphql-playground-html",
"version": "1.6.27",
"homepage": "https://github.com/graphcool/graphql-playground/tree/master/packages/graphql-playground-html",
"description": "GraphQL IDE for better development workflows (GraphQL Subscriptions, interactive docs & collaboration).",
"contributors": [
"Tim Suchanek <tim@graph.cool>",
"Johannes Schickling <johannes@graph.cool>",
"Mohammad Rajabifard <mo.rajbi@gmail.com>"
],
"repository": "http://github.com/graphcool/graphql-playground.git",
"license": "MIT",
"main": "dist/index.js",
"files": [
"dist"
],
"scripts": {
"build": "rimraf dist && tsc",
"prepare": "npm run build"
},
"devDependencies": {
"@types/node": "12.12.34",
"rimraf": "3.0.2",
"typescript": "3.9.5"
},
"typings": "dist/index.d.ts",
"typescript": {
"definition": "dist/index.d.ts"
},
"dependencies": {
"xss": "^1.0.8"
}
}

View File

@@ -0,0 +1,15 @@
# @apollographql/graphql-upload-8-fork changelog
See [the upstream changelog](https://github.com/jaydenseric/graphql-upload/blob/master/changelog.md) for more details.
## 8.1.3
Depend on the `@types` packages required to use the typings from this package. (Because this fork is intended only for use by the TypeScript project Apollo Server, this seems reasonable.)
## 8.1.2
Incorporate typings from DefinitelyTyped.
## 8.1.1
Add v15 to the graphql peerDependencies.

View File

@@ -0,0 +1,48 @@
// Type definitions for graphql-upload 8.0
// Project: https://github.com/jaydenseric/graphql-upload#readme
// Definitions by: Mike Marcacci <https://github.com/mike-marcacci>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
// TypeScript Version: 3.3
/* tslint:disable:no-unnecessary-generics */
import { IncomingMessage, ServerResponse } from 'http'
import { GraphQLScalarType } from 'graphql'
import { RequestHandler } from 'express'
import { Middleware } from 'koa'
import { ReadStream } from 'fs-capacitor'
export interface UploadOptions {
maxFieldSize?: number
maxFileSize?: number
maxFiles?: number
}
export interface GraphQLOperation {
query: string
operationName?: null | string
variables?: null | unknown
}
export function processRequest(
request: IncomingMessage,
response: ServerResponse,
uploadOptions?: UploadOptions
): Promise<GraphQLOperation | GraphQLOperation[]>
export function graphqlUploadExpress(
uploadOptions?: UploadOptions
): RequestHandler
export function graphqlUploadKoa<StateT = any, CustomT = {}>(
uploadOptions?: UploadOptions
): Middleware<StateT, CustomT>
export const GraphQLUpload: GraphQLScalarType
export interface FileUpload {
filename: string
mimetype: string
encoding: string
createReadStream(): ReadStream
}

View File

@@ -0,0 +1,21 @@
'use strict'
exports.__esModule = true
exports.GraphQLUpload = void 0
var _graphql = require('graphql')
const GraphQLUpload = new _graphql.GraphQLScalarType({
name: 'Upload',
description: 'The `Upload` scalar type represents a file upload.',
parseValue: value => value,
parseLiteral() {
throw new Error('Upload scalar literal unsupported.')
},
serialize() {
throw new Error('Upload scalar serialization unsupported.')
}
})
exports.GraphQLUpload = GraphQLUpload

View File

@@ -0,0 +1,14 @@
import { GraphQLScalarType } from 'graphql'
export const GraphQLUpload = new GraphQLScalarType({
name: 'Upload',
description: 'The `Upload` scalar type represents a file upload.',
parseValue: value => value,
parseLiteral() {
throw new Error('Upload scalar literal unsupported.')
},
serialize() {
throw new Error('Upload scalar serialization unsupported.')
}
})

View File

@@ -0,0 +1,6 @@
'use strict'
exports.__esModule = true
exports.SPEC_URL = void 0
const SPEC_URL = 'https://github.com/jaydenseric/graphql-multipart-request-spec'
exports.SPEC_URL = SPEC_URL

View File

@@ -0,0 +1,2 @@
export const SPEC_URL =
'https://github.com/jaydenseric/graphql-multipart-request-spec'

View File

@@ -0,0 +1,34 @@
'use strict'
exports.__esModule = true
exports.graphqlUploadExpress = void 0
var _processRequest = require('./processRequest')
const graphqlUploadExpress = ({
processRequest = _processRequest.processRequest,
...processRequestOptions
} = {}) => (request, response, next) => {
if (!request.is('multipart/form-data')) return next()
const finished = new Promise(resolve => request.on('end', resolve))
const { send } = response
response.send = (...args) => {
finished.then(() => {
response.send = send
response.send(...args)
})
}
processRequest(request, response, processRequestOptions)
.then(body => {
request.body = body
next()
})
.catch(error => {
if (error.status && error.expose) response.status(error.status)
next(error)
})
}
exports.graphqlUploadExpress = graphqlUploadExpress

View File

@@ -0,0 +1,26 @@
import { processRequest as defaultProcessRequest } from './processRequest'
export const graphqlUploadExpress = ({
processRequest = defaultProcessRequest,
...processRequestOptions
} = {}) => (request, response, next) => {
if (!request.is('multipart/form-data')) return next()
const finished = new Promise(resolve => request.on('end', resolve))
const { send } = response
response.send = (...args) => {
finished.then(() => {
response.send = send
response.send(...args)
})
}
processRequest(request, response, processRequestOptions)
.then(body => {
request.body = body
next()
})
.catch(error => {
if (error.status && error.expose) response.status(error.status)
next(error)
})
}

View File

@@ -0,0 +1,27 @@
'use strict'
exports.__esModule = true
exports.graphqlUploadKoa = void 0
var _processRequest = require('./processRequest')
const graphqlUploadKoa = ({
processRequest = _processRequest.processRequest,
...processRequestOptions
} = {}) => async (ctx, next) => {
if (!ctx.request.is('multipart/form-data')) return next()
const finished = new Promise(resolve => ctx.req.on('end', resolve))
try {
ctx.request.body = await processRequest(
ctx.req,
ctx.res,
processRequestOptions
)
await next()
} finally {
await finished
}
}
exports.graphqlUploadKoa = graphqlUploadKoa

View File

@@ -0,0 +1,19 @@
import { processRequest as defaultProcessRequest } from './processRequest'
export const graphqlUploadKoa = ({
processRequest = defaultProcessRequest,
...processRequestOptions
} = {}) => async (ctx, next) => {
if (!ctx.request.is('multipart/form-data')) return next()
const finished = new Promise(resolve => ctx.req.on('end', resolve))
try {
ctx.request.body = await processRequest(
ctx.req,
ctx.res,
processRequestOptions
)
await next()
} finally {
await finished
}
}

View File

@@ -0,0 +1,11 @@
'use strict'
exports.__esModule = true
exports.ignoreStream = void 0
const ignoreStream = stream => {
stream.on('error', () => {})
stream.resume()
}
exports.ignoreStream = ignoreStream

View File

@@ -0,0 +1,4 @@
export const ignoreStream = stream => {
stream.on('error', () => {})
stream.resume()
}

View File

@@ -0,0 +1,20 @@
'use strict'
exports.__esModule = true
exports.graphqlUploadExpress = exports.graphqlUploadKoa = exports.processRequest = exports.GraphQLUpload = void 0
var _GraphQLUpload = require('./GraphQLUpload')
exports.GraphQLUpload = _GraphQLUpload.GraphQLUpload
var _processRequest = require('./processRequest')
exports.processRequest = _processRequest.processRequest
var _graphqlUploadKoa = require('./graphqlUploadKoa')
exports.graphqlUploadKoa = _graphqlUploadKoa.graphqlUploadKoa
var _graphqlUploadExpress = require('./graphqlUploadExpress')
exports.graphqlUploadExpress = _graphqlUploadExpress.graphqlUploadExpress

View File

@@ -0,0 +1,4 @@
export { GraphQLUpload } from './GraphQLUpload'
export { processRequest } from './processRequest'
export { graphqlUploadKoa } from './graphqlUploadKoa'
export { graphqlUploadExpress } from './graphqlUploadExpress'

View File

@@ -0,0 +1,9 @@
'use strict'
exports.__esModule = true
exports.isEnumerableObject = void 0
const isEnumerableObject = value =>
typeof value === 'object' && value !== null && !Array.isArray(value)
exports.isEnumerableObject = isEnumerableObject

View File

@@ -0,0 +1,2 @@
export const isEnumerableObject = value =>
typeof value === 'object' && value !== null && !Array.isArray(value)

View File

@@ -0,0 +1,318 @@
'use strict'
exports.__esModule = true
exports.processRequest = void 0
var _util = _interopRequireDefault(require('util'))
var _busboy = _interopRequireDefault(require('busboy'))
var _fsCapacitor = require('fs-capacitor')
var _httpErrors = _interopRequireDefault(require('http-errors'))
var _objectPath = _interopRequireDefault(require('object-path'))
var _constants = require('./constants')
var _ignoreStream = require('./ignoreStream')
var _isEnumerableObject = require('./isEnumerableObject')
// istanbul ignore next
function _interopRequireDefault(obj) {
return obj && obj.__esModule ? obj : { default: obj }
}
class Upload {
constructor() {
this.promise = new Promise((resolve, reject) => {
this.resolve = file => {
this.file = file
resolve(file)
}
this.reject = reject
})
this.promise.catch(() => {})
}
}
const processRequest = (
request,
response,
{ maxFieldSize = 1000000, maxFileSize = Infinity, maxFiles = Infinity } = {}
) =>
new Promise((resolve, reject) => {
let released
let exitError
let currentStream
let operations
let operationsPath
let map
const parser = new _busboy.default({
headers: request.headers,
limits: {
fieldSize: maxFieldSize,
fields: 2,
fileSize: maxFileSize,
files: maxFiles
}
})
const exit = error => {
if (exitError) return
exitError = error
reject(exitError)
parser.destroy()
if (currentStream) currentStream.destroy(exitError)
if (map)
for (const upload of map.values())
if (!upload.file) upload.reject(exitError)
request.unpipe(parser)
setImmediate(() => {
request.resume()
})
}
const release = () => {
// istanbul ignore next
if (released) return
released = true
if (map)
for (const upload of map.values())
if (upload.file) upload.file.capacitor.destroy()
}
const abort = () => {
exit(
(0, _httpErrors.default)(
499,
'Request disconnected during file upload stream parsing.'
)
)
}
parser.on(
'field',
(fieldName, value, fieldNameTruncated, valueTruncated) => {
if (exitError) return
if (valueTruncated)
return exit(
(0, _httpErrors.default)(
413,
`The ${fieldName} multipart field value exceeds the ${maxFieldSize} byte size limit.`
)
)
switch (fieldName) {
case 'operations':
try {
operations = JSON.parse(value)
} catch (error) {
return exit(
(0, _httpErrors.default)(
400,
`Invalid JSON in the operations multipart field (${_constants.SPEC_URL}).`
)
)
}
if (
!(0, _isEnumerableObject.isEnumerableObject)(operations) &&
!Array.isArray(operations)
)
return exit(
(0, _httpErrors.default)(
400,
`Invalid type for the operations multipart field (${_constants.SPEC_URL}).`
)
)
operationsPath = (0, _objectPath.default)(operations)
break
case 'map': {
if (!operations)
return exit(
(0, _httpErrors.default)(
400,
`Misordered multipart fields; map should follow operations (${_constants.SPEC_URL}).`
)
)
let parsedMap
try {
parsedMap = JSON.parse(value)
} catch (error) {
return exit(
(0, _httpErrors.default)(
400,
`Invalid JSON in the map multipart field (${_constants.SPEC_URL}).`
)
)
}
if (!(0, _isEnumerableObject.isEnumerableObject)(parsedMap))
return exit(
(0, _httpErrors.default)(
400,
`Invalid type for the map multipart field (${_constants.SPEC_URL}).`
)
)
const mapEntries = Object.entries(parsedMap)
if (mapEntries.length > maxFiles)
return exit(
(0, _httpErrors.default)(
413,
`${maxFiles} max file uploads exceeded.`
)
)
map = new Map()
for (const [fieldName, paths] of mapEntries) {
if (!Array.isArray(paths))
return exit(
(0, _httpErrors.default)(
400,
`Invalid type for the map multipart field entry key ${fieldName} array (${_constants.SPEC_URL}).`
)
)
map.set(fieldName, new Upload())
for (const [index, path] of paths.entries()) {
if (typeof path !== 'string')
return exit(
(0, _httpErrors.default)(
400,
`Invalid type for the map multipart field entry key ${fieldName} array index ${index} value (${_constants.SPEC_URL}).`
)
)
try {
operationsPath.set(path, map.get(fieldName).promise)
} catch (error) {
return exit(
(0, _httpErrors.default)(
400,
`Invalid object path for the map multipart field entry key ${fieldName} array index ${index} value ${path} (${_constants.SPEC_URL}).`
)
)
}
}
}
resolve(operations)
}
}
}
)
parser.on('file', (fieldName, stream, filename, encoding, mimetype) => {
if (exitError) {
;(0, _ignoreStream.ignoreStream)(stream)
return
}
if (!map) {
;(0, _ignoreStream.ignoreStream)(stream)
return exit(
(0, _httpErrors.default)(
400,
`Misordered multipart fields; files should follow map (${_constants.SPEC_URL}).`
)
)
}
currentStream = stream
stream.on('end', () => {
currentStream = null
})
const upload = map.get(fieldName)
if (!upload) {
;(0, _ignoreStream.ignoreStream)(stream)
return
}
const capacitor = new _fsCapacitor.WriteStream()
capacitor.on('error', () => {
stream.unpipe()
stream.resume()
})
stream.on('limit', () => {
stream.unpipe()
capacitor.destroy(
(0, _httpErrors.default)(
413,
`File truncated as it exceeds the ${maxFileSize} byte size limit.`
)
)
})
stream.on('error', error => {
stream.unpipe() // istanbul ignore next
capacitor.destroy(exitError || error)
})
stream.pipe(capacitor)
const file = {
filename,
mimetype,
encoding,
createReadStream() {
const error = capacitor.error || (released ? exitError : null)
if (error) throw error
return capacitor.createReadStream()
}
}
let capacitorStream
Object.defineProperty(file, 'stream', {
get: _util.default.deprecate(function() {
if (!capacitorStream) capacitorStream = this.createReadStream()
return capacitorStream
}, 'File upload property stream is deprecated. Use createReadStream() instead.')
})
Object.defineProperty(file, 'capacitor', {
value: capacitor
})
upload.resolve(file)
})
parser.once('filesLimit', () =>
exit(
(0, _httpErrors.default)(413, `${maxFiles} max file uploads exceeded.`)
)
)
parser.once('finish', () => {
request.unpipe(parser)
request.resume()
if (!operations)
return exit(
(0, _httpErrors.default)(
400,
`Missing multipart field operations (${_constants.SPEC_URL}).`
)
)
if (!map)
return exit(
(0, _httpErrors.default)(
400,
`Missing multipart field map (${_constants.SPEC_URL}).`
)
)
for (const upload of map.values())
if (!upload.file)
upload.reject(
(0, _httpErrors.default)(400, 'File missing in the request.')
)
})
parser.once('error', exit)
response.once('finish', release)
response.once('close', release)
request.once('close', abort)
request.once('end', () => {
request.removeListener('close', abort)
})
request.pipe(parser)
})
exports.processRequest = processRequest

View File

@@ -0,0 +1,286 @@
import util from 'util'
import Busboy from 'busboy'
import { WriteStream } from 'fs-capacitor'
import createError from 'http-errors'
import objectPath from 'object-path'
import { SPEC_URL } from './constants'
import { ignoreStream } from './ignoreStream'
import { isEnumerableObject } from './isEnumerableObject'
class Upload {
constructor() {
this.promise = new Promise((resolve, reject) => {
this.resolve = file => {
this.file = file
resolve(file)
}
this.reject = reject
})
this.promise.catch(() => {})
}
}
export const processRequest = (
request,
response,
{ maxFieldSize = 1000000, maxFileSize = Infinity, maxFiles = Infinity } = {}
) =>
new Promise((resolve, reject) => {
let released
let exitError
let currentStream
let operations
let operationsPath
let map
const parser = new Busboy({
headers: request.headers,
limits: {
fieldSize: maxFieldSize,
fields: 2,
fileSize: maxFileSize,
files: maxFiles
}
})
const exit = error => {
if (exitError) return
exitError = error
reject(exitError)
parser.destroy()
if (currentStream) currentStream.destroy(exitError)
if (map)
for (const upload of map.values())
if (!upload.file) upload.reject(exitError)
request.unpipe(parser)
setImmediate(() => {
request.resume()
})
}
const release = () => {
// istanbul ignore next
if (released) return
released = true
if (map)
for (const upload of map.values())
if (upload.file) upload.file.capacitor.destroy()
}
const abort = () => {
exit(
createError(
499,
'Request disconnected during file upload stream parsing.'
)
)
}
parser.on(
'field',
(fieldName, value, fieldNameTruncated, valueTruncated) => {
if (exitError) return
if (valueTruncated)
return exit(
createError(
413,
`The ${fieldName} multipart field value exceeds the ${maxFieldSize} byte size limit.`
)
)
switch (fieldName) {
case 'operations':
try {
operations = JSON.parse(value)
} catch (error) {
return exit(
createError(
400,
`Invalid JSON in the operations multipart field (${SPEC_URL}).`
)
)
}
if (!isEnumerableObject(operations) && !Array.isArray(operations))
return exit(
createError(
400,
`Invalid type for the operations multipart field (${SPEC_URL}).`
)
)
operationsPath = objectPath(operations)
break
case 'map': {
if (!operations)
return exit(
createError(
400,
`Misordered multipart fields; map should follow operations (${SPEC_URL}).`
)
)
let parsedMap
try {
parsedMap = JSON.parse(value)
} catch (error) {
return exit(
createError(
400,
`Invalid JSON in the map multipart field (${SPEC_URL}).`
)
)
}
if (!isEnumerableObject(parsedMap))
return exit(
createError(
400,
`Invalid type for the map multipart field (${SPEC_URL}).`
)
)
const mapEntries = Object.entries(parsedMap)
if (mapEntries.length > maxFiles)
return exit(
createError(413, `${maxFiles} max file uploads exceeded.`)
)
map = new Map()
for (const [fieldName, paths] of mapEntries) {
if (!Array.isArray(paths))
return exit(
createError(
400,
`Invalid type for the map multipart field entry key ${fieldName} array (${SPEC_URL}).`
)
)
map.set(fieldName, new Upload())
for (const [index, path] of paths.entries()) {
if (typeof path !== 'string')
return exit(
createError(
400,
`Invalid type for the map multipart field entry key ${fieldName} array index ${index} value (${SPEC_URL}).`
)
)
try {
operationsPath.set(path, map.get(fieldName).promise)
} catch (error) {
return exit(
createError(
400,
`Invalid object path for the map multipart field entry key ${fieldName} array index ${index} value ${path} (${SPEC_URL}).`
)
)
}
}
}
resolve(operations)
}
}
}
)
parser.on('file', (fieldName, stream, filename, encoding, mimetype) => {
if (exitError) {
ignoreStream(stream)
return
}
if (!map) {
ignoreStream(stream)
return exit(
createError(
400,
`Misordered multipart fields; files should follow map (${SPEC_URL}).`
)
)
}
currentStream = stream
stream.on('end', () => {
currentStream = null
})
const upload = map.get(fieldName)
if (!upload) {
ignoreStream(stream)
return
}
const capacitor = new WriteStream()
capacitor.on('error', () => {
stream.unpipe()
stream.resume()
})
stream.on('limit', () => {
stream.unpipe()
capacitor.destroy(
createError(
413,
`File truncated as it exceeds the ${maxFileSize} byte size limit.`
)
)
})
stream.on('error', error => {
stream.unpipe() // istanbul ignore next
capacitor.destroy(exitError || error)
})
stream.pipe(capacitor)
const file = {
filename,
mimetype,
encoding,
createReadStream() {
const error = capacitor.error || (released ? exitError : null)
if (error) throw error
return capacitor.createReadStream()
}
}
let capacitorStream
Object.defineProperty(file, 'stream', {
get: util.deprecate(function() {
if (!capacitorStream) capacitorStream = this.createReadStream()
return capacitorStream
}, 'File upload property stream is deprecated. Use createReadStream() instead.')
})
Object.defineProperty(file, 'capacitor', {
value: capacitor
})
upload.resolve(file)
})
parser.once('filesLimit', () =>
exit(createError(413, `${maxFiles} max file uploads exceeded.`))
)
parser.once('finish', () => {
request.unpipe(parser)
request.resume()
if (!operations)
return exit(
createError(
400,
`Missing multipart field operations (${SPEC_URL}).`
)
)
if (!map)
return exit(
createError(400, `Missing multipart field map (${SPEC_URL}).`)
)
for (const upload of map.values())
if (!upload.file)
upload.reject(createError(400, 'File missing in the request.'))
})
parser.once('error', exit)
response.once('finish', release)
response.once('close', release)
request.once('close', abort)
request.once('end', () => {
request.removeListener('close', abort)
})
request.pipe(parser)
})

View File

@@ -0,0 +1,81 @@
{
"name": "@apollographql/graphql-upload-8-fork",
"version": "8.1.4",
"description": "Fork of graphql-upload@8 that works with graphql@15 for compatibility with apollo-server@2",
"license": "MIT",
"author": "Apollo <opensource@apollographql.com>",
"repository": "github:apollographql/graphql-upload",
"homepage": "https://github.com/apollographql/graphql-upload#readme",
"keywords": [
"graphql",
"upload",
"file",
"multipart",
"server",
"koa",
"express",
"apollo",
"esm",
"mjs"
],
"files": [
"lib",
"index.d.ts",
"!*.test.*",
"!test-helpers"
],
"main": "lib",
"types": "index.d.ts",
"engines": {
"node": ">=8.5"
},
"browserslist": "node >= 8.5",
"peerDependencies": {
"graphql": "0.13.1 - 15"
},
"dependencies": {
"@types/express": "*",
"@types/fs-capacitor": "^2.0.0",
"@types/koa": "*",
"busboy": "^0.3.1",
"fs-capacitor": "^2.0.4",
"http-errors": "^1.7.3",
"object-path": "^0.11.4"
},
"devDependencies": {
"@babel/cli": "^7.6.3",
"@babel/core": "^7.6.3",
"@babel/preset-env": "^7.6.3",
"babel-eslint": "^10.0.3",
"eslint": "^6.5.1",
"eslint-config-env": "^9.1.0",
"eslint-config-prettier": "^6.4.0",
"eslint-plugin-import": "^2.18.2",
"eslint-plugin-import-order-alphabetical": "^1.0.0",
"eslint-plugin-jsdoc": "^15.9.10",
"eslint-plugin-node": "^10.0.0",
"eslint-plugin-prettier": "^3.1.1",
"express": "^4.17.1",
"express-async-handler": "^1.1.4",
"form-data": "^2.5.1",
"graphql": "^14.5.8",
"husky": "^3.0.8",
"koa": "^2.8.2",
"lint-staged": "^9.4.2",
"node-fetch": "^2.6.0",
"prettier": "^1.18.2",
"tap": "^14.6.9"
},
"scripts": {
"prepare": "npm run prepare:clean && npm run prepare:mjs && npm run prepare:js && npm run prepare:prettier",
"prepare:clean": "rm -rf lib",
"prepare:mjs": "BABEL_ESM=1 babel src -d lib --keep-file-extension",
"prepare:js": "babel src -d lib",
"prepare:prettier": "prettier 'lib/**/*.{mjs,js}' readme.md --write",
"test": "npm run test:eslint && npm run test:prettier && npm run test:tap",
"test:eslint": "eslint . --ext mjs,js",
"test:prettier": "prettier '**/*.{json,yml,md}' -l",
"test:tap": "tap --test-ignore=src",
"prepublishOnly": "npm test"
}
}

View File

@@ -0,0 +1,11 @@
This is a fork of [graphql-upload](https://github.com/jaydenseric/graphql-upload) by Jayden Seric which Apollo has created purely for the internal use of [Apollo Server](https://github.com/apollographql/apollo-server).
Apollo Server v2 depends on `graphql-upload` to provide an easy-to-use way to integrate `graphql-upload` into your servers without having to depend on `graphql-upload` yourself. It currently depends on v8 of `graphql-upload`.
`graphql-upload` made backwards-incompatible changes after v8, such as changing what Node versions are supported and removing the `stream` property that was later replaced with `createReadStream`. Because of this, we cannot upgrade the version of `graphql-upload` used by Apollo Server past v8 without potentially breaking users.
However, the latest release of `graphql-upload@8` (8.1.0) declares peer dependencies on `graphql` that do not include `graphql@15`. We want users of Apollo Server v2 to be able to use graphql v15 without getting peer dependencies warnings (or errors when they are using npm v7), so we have forked `graphql-upload` v8 just to extend the peer dependency.
We do not recommend that you directly depend on this fork. Our recommendation is that if you want to use uploads in your GraphQL server, you should consider disabling Apollo Server's built-in `graphql-upload` integration by passing `uploads: false` to `new ApolloServer` and use `graphql-upload` directly. That way, you can use the latest and greatest version of `graphql-upload`. We currently intend to remove the integration from Apollo Server v3.
This fork also contains the TypeScript typings from [DefinitelyTyped](https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/graphql-upload), so you don't have to try to combine this fork with `@types/graphql-upload`.