Initial Save
This commit is contained in:
573
node_modules/graphql/language/ast.d.ts
generated
vendored
Normal file
573
node_modules/graphql/language/ast.d.ts
generated
vendored
Normal file
@@ -0,0 +1,573 @@
|
||||
import { Source } from './source';
|
||||
import { TokenKindEnum } from './tokenKind';
|
||||
|
||||
/**
|
||||
* Contains a range of UTF-8 character offsets and token references that
|
||||
* identify the region of the source from which the AST derived.
|
||||
*/
|
||||
export interface Location {
|
||||
/**
|
||||
* The character offset at which this Node begins.
|
||||
*/
|
||||
readonly start: number;
|
||||
|
||||
/**
|
||||
* The character offset at which this Node ends.
|
||||
*/
|
||||
readonly end: number;
|
||||
|
||||
/**
|
||||
* The Token at which this Node begins.
|
||||
*/
|
||||
readonly startToken: Token;
|
||||
|
||||
/**
|
||||
* The Token at which this Node ends.
|
||||
*/
|
||||
readonly endToken: Token;
|
||||
|
||||
/**
|
||||
* The Source document the AST represents.
|
||||
*/
|
||||
readonly source: Source;
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents a range of characters represented by a lexical token
|
||||
* within a Source.
|
||||
*/
|
||||
export interface Token {
|
||||
/**
|
||||
* The kind of Token.
|
||||
*/
|
||||
readonly kind: TokenKindEnum;
|
||||
|
||||
/**
|
||||
* The character offset at which this Node begins.
|
||||
*/
|
||||
readonly start: number;
|
||||
|
||||
/**
|
||||
* The character offset at which this Node ends.
|
||||
*/
|
||||
readonly end: number;
|
||||
|
||||
/**
|
||||
* The 1-indexed line number on which this Token appears.
|
||||
*/
|
||||
readonly line: number;
|
||||
|
||||
/**
|
||||
* The 1-indexed column number at which this Token begins.
|
||||
*/
|
||||
readonly column: number;
|
||||
|
||||
/**
|
||||
* For non-punctuation tokens, represents the interpreted value of the token.
|
||||
*/
|
||||
readonly value: string | undefined;
|
||||
|
||||
/**
|
||||
* Tokens exist as nodes in a double-linked-list amongst all tokens
|
||||
* including ignored tokens. <SOF> is always the first node and <EOF>
|
||||
* the last.
|
||||
*/
|
||||
readonly prev: Token | null;
|
||||
readonly next: Token | null;
|
||||
}
|
||||
|
||||
/**
|
||||
* The list of all possible AST node types.
|
||||
*/
|
||||
export type ASTNode =
|
||||
| NameNode
|
||||
| DocumentNode
|
||||
| OperationDefinitionNode
|
||||
| VariableDefinitionNode
|
||||
| VariableNode
|
||||
| SelectionSetNode
|
||||
| FieldNode
|
||||
| ArgumentNode
|
||||
| FragmentSpreadNode
|
||||
| InlineFragmentNode
|
||||
| FragmentDefinitionNode
|
||||
| IntValueNode
|
||||
| FloatValueNode
|
||||
| StringValueNode
|
||||
| BooleanValueNode
|
||||
| NullValueNode
|
||||
| EnumValueNode
|
||||
| ListValueNode
|
||||
| ObjectValueNode
|
||||
| ObjectFieldNode
|
||||
| DirectiveNode
|
||||
| NamedTypeNode
|
||||
| ListTypeNode
|
||||
| NonNullTypeNode
|
||||
| SchemaDefinitionNode
|
||||
| OperationTypeDefinitionNode
|
||||
| ScalarTypeDefinitionNode
|
||||
| ObjectTypeDefinitionNode
|
||||
| FieldDefinitionNode
|
||||
| InputValueDefinitionNode
|
||||
| InterfaceTypeDefinitionNode
|
||||
| UnionTypeDefinitionNode
|
||||
| EnumTypeDefinitionNode
|
||||
| EnumValueDefinitionNode
|
||||
| InputObjectTypeDefinitionNode
|
||||
| DirectiveDefinitionNode
|
||||
| SchemaExtensionNode
|
||||
| ScalarTypeExtensionNode
|
||||
| ObjectTypeExtensionNode
|
||||
| InterfaceTypeExtensionNode
|
||||
| UnionTypeExtensionNode
|
||||
| EnumTypeExtensionNode
|
||||
| InputObjectTypeExtensionNode;
|
||||
|
||||
/**
|
||||
* Utility type listing all nodes indexed by their kind.
|
||||
*/
|
||||
export interface ASTKindToNode {
|
||||
Name: NameNode;
|
||||
Document: DocumentNode;
|
||||
OperationDefinition: OperationDefinitionNode;
|
||||
VariableDefinition: VariableDefinitionNode;
|
||||
Variable: VariableNode;
|
||||
SelectionSet: SelectionSetNode;
|
||||
Field: FieldNode;
|
||||
Argument: ArgumentNode;
|
||||
FragmentSpread: FragmentSpreadNode;
|
||||
InlineFragment: InlineFragmentNode;
|
||||
FragmentDefinition: FragmentDefinitionNode;
|
||||
IntValue: IntValueNode;
|
||||
FloatValue: FloatValueNode;
|
||||
StringValue: StringValueNode;
|
||||
BooleanValue: BooleanValueNode;
|
||||
NullValue: NullValueNode;
|
||||
EnumValue: EnumValueNode;
|
||||
ListValue: ListValueNode;
|
||||
ObjectValue: ObjectValueNode;
|
||||
ObjectField: ObjectFieldNode;
|
||||
Directive: DirectiveNode;
|
||||
NamedType: NamedTypeNode;
|
||||
ListType: ListTypeNode;
|
||||
NonNullType: NonNullTypeNode;
|
||||
SchemaDefinition: SchemaDefinitionNode;
|
||||
OperationTypeDefinition: OperationTypeDefinitionNode;
|
||||
ScalarTypeDefinition: ScalarTypeDefinitionNode;
|
||||
ObjectTypeDefinition: ObjectTypeDefinitionNode;
|
||||
FieldDefinition: FieldDefinitionNode;
|
||||
InputValueDefinition: InputValueDefinitionNode;
|
||||
InterfaceTypeDefinition: InterfaceTypeDefinitionNode;
|
||||
UnionTypeDefinition: UnionTypeDefinitionNode;
|
||||
EnumTypeDefinition: EnumTypeDefinitionNode;
|
||||
EnumValueDefinition: EnumValueDefinitionNode;
|
||||
InputObjectTypeDefinition: InputObjectTypeDefinitionNode;
|
||||
DirectiveDefinition: DirectiveDefinitionNode;
|
||||
SchemaExtension: SchemaExtensionNode;
|
||||
ScalarTypeExtension: ScalarTypeExtensionNode;
|
||||
ObjectTypeExtension: ObjectTypeExtensionNode;
|
||||
InterfaceTypeExtension: InterfaceTypeExtensionNode;
|
||||
UnionTypeExtension: UnionTypeExtensionNode;
|
||||
EnumTypeExtension: EnumTypeExtensionNode;
|
||||
InputObjectTypeExtension: InputObjectTypeExtensionNode;
|
||||
}
|
||||
|
||||
// Name
|
||||
|
||||
export interface NameNode {
|
||||
readonly kind: 'Name';
|
||||
readonly loc?: Location;
|
||||
readonly value: string;
|
||||
}
|
||||
|
||||
// Document
|
||||
|
||||
export interface DocumentNode {
|
||||
readonly kind: 'Document';
|
||||
readonly loc?: Location;
|
||||
readonly definitions: ReadonlyArray<DefinitionNode>;
|
||||
}
|
||||
|
||||
export type DefinitionNode =
|
||||
| ExecutableDefinitionNode
|
||||
| TypeSystemDefinitionNode
|
||||
| TypeSystemExtensionNode;
|
||||
|
||||
export type ExecutableDefinitionNode =
|
||||
| OperationDefinitionNode
|
||||
| FragmentDefinitionNode;
|
||||
|
||||
export interface OperationDefinitionNode {
|
||||
readonly kind: 'OperationDefinition';
|
||||
readonly loc?: Location;
|
||||
readonly operation: OperationTypeNode;
|
||||
readonly name?: NameNode;
|
||||
readonly variableDefinitions?: ReadonlyArray<VariableDefinitionNode>;
|
||||
readonly directives?: ReadonlyArray<DirectiveNode>;
|
||||
readonly selectionSet: SelectionSetNode;
|
||||
}
|
||||
|
||||
export type OperationTypeNode = 'query' | 'mutation' | 'subscription';
|
||||
|
||||
export interface VariableDefinitionNode {
|
||||
readonly kind: 'VariableDefinition';
|
||||
readonly loc?: Location;
|
||||
readonly variable: VariableNode;
|
||||
readonly type: TypeNode;
|
||||
readonly defaultValue?: ValueNode;
|
||||
readonly directives?: ReadonlyArray<DirectiveNode>;
|
||||
}
|
||||
|
||||
export interface VariableNode {
|
||||
readonly kind: 'Variable';
|
||||
readonly loc?: Location;
|
||||
readonly name: NameNode;
|
||||
}
|
||||
|
||||
export interface SelectionSetNode {
|
||||
kind: 'SelectionSet';
|
||||
loc?: Location;
|
||||
selections: ReadonlyArray<SelectionNode>;
|
||||
}
|
||||
|
||||
export type SelectionNode = FieldNode | FragmentSpreadNode | InlineFragmentNode;
|
||||
|
||||
export interface FieldNode {
|
||||
readonly kind: 'Field';
|
||||
readonly loc?: Location;
|
||||
readonly alias?: NameNode;
|
||||
readonly name: NameNode;
|
||||
readonly arguments?: ReadonlyArray<ArgumentNode>;
|
||||
readonly directives?: ReadonlyArray<DirectiveNode>;
|
||||
readonly selectionSet?: SelectionSetNode;
|
||||
}
|
||||
|
||||
export interface ArgumentNode {
|
||||
readonly kind: 'Argument';
|
||||
readonly loc?: Location;
|
||||
readonly name: NameNode;
|
||||
readonly value: ValueNode;
|
||||
}
|
||||
|
||||
// Fragments
|
||||
|
||||
export interface FragmentSpreadNode {
|
||||
readonly kind: 'FragmentSpread';
|
||||
readonly loc?: Location;
|
||||
readonly name: NameNode;
|
||||
readonly directives?: ReadonlyArray<DirectiveNode>;
|
||||
}
|
||||
|
||||
export interface InlineFragmentNode {
|
||||
readonly kind: 'InlineFragment';
|
||||
readonly loc?: Location;
|
||||
readonly typeCondition?: NamedTypeNode;
|
||||
readonly directives?: ReadonlyArray<DirectiveNode>;
|
||||
readonly selectionSet: SelectionSetNode;
|
||||
}
|
||||
|
||||
export interface FragmentDefinitionNode {
|
||||
readonly kind: 'FragmentDefinition';
|
||||
readonly loc?: Location;
|
||||
readonly name: NameNode;
|
||||
// Note: fragment variable definitions are experimental and may be changed
|
||||
// or removed in the future.
|
||||
readonly variableDefinitions?: ReadonlyArray<VariableDefinitionNode>;
|
||||
readonly typeCondition: NamedTypeNode;
|
||||
readonly directives?: ReadonlyArray<DirectiveNode>;
|
||||
readonly selectionSet: SelectionSetNode;
|
||||
}
|
||||
|
||||
// Values
|
||||
|
||||
export type ValueNode =
|
||||
| VariableNode
|
||||
| IntValueNode
|
||||
| FloatValueNode
|
||||
| StringValueNode
|
||||
| BooleanValueNode
|
||||
| NullValueNode
|
||||
| EnumValueNode
|
||||
| ListValueNode
|
||||
| ObjectValueNode;
|
||||
|
||||
export interface IntValueNode {
|
||||
readonly kind: 'IntValue';
|
||||
readonly loc?: Location;
|
||||
readonly value: string;
|
||||
}
|
||||
|
||||
export interface FloatValueNode {
|
||||
readonly kind: 'FloatValue';
|
||||
readonly loc?: Location;
|
||||
readonly value: string;
|
||||
}
|
||||
|
||||
export interface StringValueNode {
|
||||
readonly kind: 'StringValue';
|
||||
readonly loc?: Location;
|
||||
readonly value: string;
|
||||
readonly block?: boolean;
|
||||
}
|
||||
|
||||
export interface BooleanValueNode {
|
||||
readonly kind: 'BooleanValue';
|
||||
readonly loc?: Location;
|
||||
readonly value: boolean;
|
||||
}
|
||||
|
||||
export interface NullValueNode {
|
||||
readonly kind: 'NullValue';
|
||||
readonly loc?: Location;
|
||||
}
|
||||
|
||||
export interface EnumValueNode {
|
||||
readonly kind: 'EnumValue';
|
||||
readonly loc?: Location;
|
||||
readonly value: string;
|
||||
}
|
||||
|
||||
export interface ListValueNode {
|
||||
readonly kind: 'ListValue';
|
||||
readonly loc?: Location;
|
||||
readonly values: ReadonlyArray<ValueNode>;
|
||||
}
|
||||
|
||||
export interface ObjectValueNode {
|
||||
readonly kind: 'ObjectValue';
|
||||
readonly loc?: Location;
|
||||
readonly fields: ReadonlyArray<ObjectFieldNode>;
|
||||
}
|
||||
|
||||
export interface ObjectFieldNode {
|
||||
readonly kind: 'ObjectField';
|
||||
readonly loc?: Location;
|
||||
readonly name: NameNode;
|
||||
readonly value: ValueNode;
|
||||
}
|
||||
|
||||
// Directives
|
||||
|
||||
export interface DirectiveNode {
|
||||
readonly kind: 'Directive';
|
||||
readonly loc?: Location;
|
||||
readonly name: NameNode;
|
||||
readonly arguments?: ReadonlyArray<ArgumentNode>;
|
||||
}
|
||||
|
||||
// Type Reference
|
||||
|
||||
export type TypeNode = NamedTypeNode | ListTypeNode | NonNullTypeNode;
|
||||
|
||||
export interface NamedTypeNode {
|
||||
readonly kind: 'NamedType';
|
||||
readonly loc?: Location;
|
||||
readonly name: NameNode;
|
||||
}
|
||||
|
||||
export interface ListTypeNode {
|
||||
readonly kind: 'ListType';
|
||||
readonly loc?: Location;
|
||||
readonly type: TypeNode;
|
||||
}
|
||||
|
||||
export interface NonNullTypeNode {
|
||||
readonly kind: 'NonNullType';
|
||||
readonly loc?: Location;
|
||||
readonly type: NamedTypeNode | ListTypeNode;
|
||||
}
|
||||
|
||||
// Type System Definition
|
||||
|
||||
export type TypeSystemDefinitionNode =
|
||||
| SchemaDefinitionNode
|
||||
| TypeDefinitionNode
|
||||
| DirectiveDefinitionNode;
|
||||
|
||||
export interface SchemaDefinitionNode {
|
||||
readonly kind: 'SchemaDefinition';
|
||||
readonly loc?: Location;
|
||||
readonly directives?: ReadonlyArray<DirectiveNode>;
|
||||
readonly operationTypes: ReadonlyArray<OperationTypeDefinitionNode>;
|
||||
}
|
||||
|
||||
export interface OperationTypeDefinitionNode {
|
||||
readonly kind: 'OperationTypeDefinition';
|
||||
readonly loc?: Location;
|
||||
readonly operation: OperationTypeNode;
|
||||
readonly type: NamedTypeNode;
|
||||
}
|
||||
|
||||
// Type Definition
|
||||
|
||||
export type TypeDefinitionNode =
|
||||
| ScalarTypeDefinitionNode
|
||||
| ObjectTypeDefinitionNode
|
||||
| InterfaceTypeDefinitionNode
|
||||
| UnionTypeDefinitionNode
|
||||
| EnumTypeDefinitionNode
|
||||
| InputObjectTypeDefinitionNode;
|
||||
|
||||
export interface ScalarTypeDefinitionNode {
|
||||
readonly kind: 'ScalarTypeDefinition';
|
||||
readonly loc?: Location;
|
||||
readonly description?: StringValueNode;
|
||||
readonly name: NameNode;
|
||||
readonly directives?: ReadonlyArray<DirectiveNode>;
|
||||
}
|
||||
|
||||
export interface ObjectTypeDefinitionNode {
|
||||
readonly kind: 'ObjectTypeDefinition';
|
||||
readonly loc?: Location;
|
||||
readonly description?: StringValueNode;
|
||||
readonly name: NameNode;
|
||||
readonly interfaces?: ReadonlyArray<NamedTypeNode>;
|
||||
readonly directives?: ReadonlyArray<DirectiveNode>;
|
||||
readonly fields?: ReadonlyArray<FieldDefinitionNode>;
|
||||
}
|
||||
|
||||
export interface FieldDefinitionNode {
|
||||
readonly kind: 'FieldDefinition';
|
||||
readonly loc?: Location;
|
||||
readonly description?: StringValueNode;
|
||||
readonly name: NameNode;
|
||||
readonly arguments?: ReadonlyArray<InputValueDefinitionNode>;
|
||||
readonly type: TypeNode;
|
||||
readonly directives?: ReadonlyArray<DirectiveNode>;
|
||||
}
|
||||
|
||||
export interface InputValueDefinitionNode {
|
||||
readonly kind: 'InputValueDefinition';
|
||||
readonly loc?: Location;
|
||||
readonly description?: StringValueNode;
|
||||
readonly name: NameNode;
|
||||
readonly type: TypeNode;
|
||||
readonly defaultValue?: ValueNode;
|
||||
readonly directives?: ReadonlyArray<DirectiveNode>;
|
||||
}
|
||||
|
||||
export interface InterfaceTypeDefinitionNode {
|
||||
readonly kind: 'InterfaceTypeDefinition';
|
||||
readonly loc?: Location;
|
||||
readonly description?: StringValueNode;
|
||||
readonly name: NameNode;
|
||||
readonly directives?: ReadonlyArray<DirectiveNode>;
|
||||
readonly fields?: ReadonlyArray<FieldDefinitionNode>;
|
||||
}
|
||||
|
||||
export interface UnionTypeDefinitionNode {
|
||||
readonly kind: 'UnionTypeDefinition';
|
||||
readonly loc?: Location;
|
||||
readonly description?: StringValueNode;
|
||||
readonly name: NameNode;
|
||||
readonly directives?: ReadonlyArray<DirectiveNode>;
|
||||
readonly types?: ReadonlyArray<NamedTypeNode>;
|
||||
}
|
||||
|
||||
export interface EnumTypeDefinitionNode {
|
||||
readonly kind: 'EnumTypeDefinition';
|
||||
readonly loc?: Location;
|
||||
readonly description?: StringValueNode;
|
||||
readonly name: NameNode;
|
||||
readonly directives?: ReadonlyArray<DirectiveNode>;
|
||||
readonly values?: ReadonlyArray<EnumValueDefinitionNode>;
|
||||
}
|
||||
|
||||
export interface EnumValueDefinitionNode {
|
||||
readonly kind: 'EnumValueDefinition';
|
||||
readonly loc?: Location;
|
||||
readonly description?: StringValueNode;
|
||||
readonly name: NameNode;
|
||||
readonly directives?: ReadonlyArray<DirectiveNode>;
|
||||
}
|
||||
|
||||
export interface InputObjectTypeDefinitionNode {
|
||||
readonly kind: 'InputObjectTypeDefinition';
|
||||
readonly loc?: Location;
|
||||
readonly description?: StringValueNode;
|
||||
readonly name: NameNode;
|
||||
readonly directives?: ReadonlyArray<DirectiveNode>;
|
||||
readonly fields?: ReadonlyArray<InputValueDefinitionNode>;
|
||||
}
|
||||
|
||||
// Directive Definitions
|
||||
|
||||
export interface DirectiveDefinitionNode {
|
||||
readonly kind: 'DirectiveDefinition';
|
||||
readonly loc?: Location;
|
||||
readonly description?: StringValueNode;
|
||||
readonly name: NameNode;
|
||||
readonly arguments?: ReadonlyArray<InputValueDefinitionNode>;
|
||||
readonly repeatable: boolean;
|
||||
readonly locations: ReadonlyArray<NameNode>;
|
||||
}
|
||||
|
||||
// Type System Extensions
|
||||
|
||||
export type TypeSystemExtensionNode = SchemaExtensionNode | TypeExtensionNode;
|
||||
|
||||
export type SchemaExtensionNode = {
|
||||
readonly kind: 'SchemaExtension';
|
||||
readonly loc?: Location;
|
||||
readonly directives?: ReadonlyArray<DirectiveNode>;
|
||||
readonly operationTypes?: ReadonlyArray<OperationTypeDefinitionNode>;
|
||||
};
|
||||
|
||||
// Type Extensions
|
||||
|
||||
export type TypeExtensionNode =
|
||||
| ScalarTypeExtensionNode
|
||||
| ObjectTypeExtensionNode
|
||||
| InterfaceTypeExtensionNode
|
||||
| UnionTypeExtensionNode
|
||||
| EnumTypeExtensionNode
|
||||
| InputObjectTypeExtensionNode;
|
||||
|
||||
export interface ScalarTypeExtensionNode {
|
||||
readonly kind: 'ScalarTypeExtension';
|
||||
readonly loc?: Location;
|
||||
readonly name: NameNode;
|
||||
readonly directives?: ReadonlyArray<DirectiveNode>;
|
||||
}
|
||||
|
||||
export interface ObjectTypeExtensionNode {
|
||||
readonly kind: 'ObjectTypeExtension';
|
||||
readonly loc?: Location;
|
||||
readonly name: NameNode;
|
||||
readonly interfaces?: ReadonlyArray<NamedTypeNode>;
|
||||
readonly directives?: ReadonlyArray<DirectiveNode>;
|
||||
readonly fields?: ReadonlyArray<FieldDefinitionNode>;
|
||||
}
|
||||
|
||||
export interface InterfaceTypeExtensionNode {
|
||||
readonly kind: 'InterfaceTypeExtension';
|
||||
readonly loc?: Location;
|
||||
readonly name: NameNode;
|
||||
readonly directives?: ReadonlyArray<DirectiveNode>;
|
||||
readonly fields?: ReadonlyArray<FieldDefinitionNode>;
|
||||
}
|
||||
|
||||
export interface UnionTypeExtensionNode {
|
||||
readonly kind: 'UnionTypeExtension';
|
||||
readonly loc?: Location;
|
||||
readonly name: NameNode;
|
||||
readonly directives?: ReadonlyArray<DirectiveNode>;
|
||||
readonly types?: ReadonlyArray<NamedTypeNode>;
|
||||
}
|
||||
|
||||
export interface EnumTypeExtensionNode {
|
||||
readonly kind: 'EnumTypeExtension';
|
||||
readonly loc?: Location;
|
||||
readonly name: NameNode;
|
||||
readonly directives?: ReadonlyArray<DirectiveNode>;
|
||||
readonly values?: ReadonlyArray<EnumValueDefinitionNode>;
|
||||
}
|
||||
|
||||
export interface InputObjectTypeExtensionNode {
|
||||
readonly kind: 'InputObjectTypeExtension';
|
||||
readonly loc?: Location;
|
||||
readonly name: NameNode;
|
||||
readonly directives?: ReadonlyArray<DirectiveNode>;
|
||||
readonly fields?: ReadonlyArray<InputValueDefinitionNode>;
|
||||
}
|
||||
1
node_modules/graphql/language/ast.js
generated
vendored
Normal file
1
node_modules/graphql/language/ast.js
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
"use strict";
|
||||
622
node_modules/graphql/language/ast.js.flow
generated
vendored
Normal file
622
node_modules/graphql/language/ast.js.flow
generated
vendored
Normal file
@@ -0,0 +1,622 @@
|
||||
// @flow strict
|
||||
|
||||
import { type Source } from './source';
|
||||
import { type TokenKindEnum } from './tokenKind';
|
||||
|
||||
/**
|
||||
* Contains a range of UTF-8 character offsets and token references that
|
||||
* identify the region of the source from which the AST derived.
|
||||
*/
|
||||
export type Location = {
|
||||
/**
|
||||
* The character offset at which this Node begins.
|
||||
*/
|
||||
+start: number,
|
||||
|
||||
/**
|
||||
* The character offset at which this Node ends.
|
||||
*/
|
||||
+end: number,
|
||||
|
||||
/**
|
||||
* The Token at which this Node begins.
|
||||
*/
|
||||
+startToken: Token,
|
||||
|
||||
/**
|
||||
* The Token at which this Node ends.
|
||||
*/
|
||||
+endToken: Token,
|
||||
|
||||
/**
|
||||
* The Source document the AST represents.
|
||||
*/
|
||||
+source: Source,
|
||||
|
||||
...
|
||||
};
|
||||
|
||||
/**
|
||||
* Represents a range of characters represented by a lexical token
|
||||
* within a Source.
|
||||
*/
|
||||
export type Token = {
|
||||
/**
|
||||
* The kind of Token.
|
||||
*/
|
||||
+kind: TokenKindEnum,
|
||||
|
||||
/**
|
||||
* The character offset at which this Node begins.
|
||||
*/
|
||||
+start: number,
|
||||
|
||||
/**
|
||||
* The character offset at which this Node ends.
|
||||
*/
|
||||
+end: number,
|
||||
|
||||
/**
|
||||
* The 1-indexed line number on which this Token appears.
|
||||
*/
|
||||
+line: number,
|
||||
|
||||
/**
|
||||
* The 1-indexed column number at which this Token begins.
|
||||
*/
|
||||
+column: number,
|
||||
|
||||
/**
|
||||
* For non-punctuation tokens, represents the interpreted value of the token.
|
||||
*/
|
||||
+value: string | void,
|
||||
|
||||
/**
|
||||
* Tokens exist as nodes in a double-linked-list amongst all tokens
|
||||
* including ignored tokens. <SOF> is always the first node and <EOF>
|
||||
* the last.
|
||||
*/
|
||||
+prev: Token | null,
|
||||
+next: Token | null,
|
||||
|
||||
...
|
||||
};
|
||||
|
||||
/**
|
||||
* The list of all possible AST node types.
|
||||
*/
|
||||
export type ASTNode =
|
||||
| NameNode
|
||||
| DocumentNode
|
||||
| OperationDefinitionNode
|
||||
| VariableDefinitionNode
|
||||
| VariableNode
|
||||
| SelectionSetNode
|
||||
| FieldNode
|
||||
| ArgumentNode
|
||||
| FragmentSpreadNode
|
||||
| InlineFragmentNode
|
||||
| FragmentDefinitionNode
|
||||
| IntValueNode
|
||||
| FloatValueNode
|
||||
| StringValueNode
|
||||
| BooleanValueNode
|
||||
| NullValueNode
|
||||
| EnumValueNode
|
||||
| ListValueNode
|
||||
| ObjectValueNode
|
||||
| ObjectFieldNode
|
||||
| DirectiveNode
|
||||
| NamedTypeNode
|
||||
| ListTypeNode
|
||||
| NonNullTypeNode
|
||||
| SchemaDefinitionNode
|
||||
| OperationTypeDefinitionNode
|
||||
| ScalarTypeDefinitionNode
|
||||
| ObjectTypeDefinitionNode
|
||||
| FieldDefinitionNode
|
||||
| InputValueDefinitionNode
|
||||
| InterfaceTypeDefinitionNode
|
||||
| UnionTypeDefinitionNode
|
||||
| EnumTypeDefinitionNode
|
||||
| EnumValueDefinitionNode
|
||||
| InputObjectTypeDefinitionNode
|
||||
| DirectiveDefinitionNode
|
||||
| SchemaExtensionNode
|
||||
| ScalarTypeExtensionNode
|
||||
| ObjectTypeExtensionNode
|
||||
| InterfaceTypeExtensionNode
|
||||
| UnionTypeExtensionNode
|
||||
| EnumTypeExtensionNode
|
||||
| InputObjectTypeExtensionNode;
|
||||
|
||||
/**
|
||||
* Utility type listing all nodes indexed by their kind.
|
||||
*/
|
||||
export type ASTKindToNode = {|
|
||||
Name: NameNode,
|
||||
Document: DocumentNode,
|
||||
OperationDefinition: OperationDefinitionNode,
|
||||
VariableDefinition: VariableDefinitionNode,
|
||||
Variable: VariableNode,
|
||||
SelectionSet: SelectionSetNode,
|
||||
Field: FieldNode,
|
||||
Argument: ArgumentNode,
|
||||
FragmentSpread: FragmentSpreadNode,
|
||||
InlineFragment: InlineFragmentNode,
|
||||
FragmentDefinition: FragmentDefinitionNode,
|
||||
IntValue: IntValueNode,
|
||||
FloatValue: FloatValueNode,
|
||||
StringValue: StringValueNode,
|
||||
BooleanValue: BooleanValueNode,
|
||||
NullValue: NullValueNode,
|
||||
EnumValue: EnumValueNode,
|
||||
ListValue: ListValueNode,
|
||||
ObjectValue: ObjectValueNode,
|
||||
ObjectField: ObjectFieldNode,
|
||||
Directive: DirectiveNode,
|
||||
NamedType: NamedTypeNode,
|
||||
ListType: ListTypeNode,
|
||||
NonNullType: NonNullTypeNode,
|
||||
SchemaDefinition: SchemaDefinitionNode,
|
||||
OperationTypeDefinition: OperationTypeDefinitionNode,
|
||||
ScalarTypeDefinition: ScalarTypeDefinitionNode,
|
||||
ObjectTypeDefinition: ObjectTypeDefinitionNode,
|
||||
FieldDefinition: FieldDefinitionNode,
|
||||
InputValueDefinition: InputValueDefinitionNode,
|
||||
InterfaceTypeDefinition: InterfaceTypeDefinitionNode,
|
||||
UnionTypeDefinition: UnionTypeDefinitionNode,
|
||||
EnumTypeDefinition: EnumTypeDefinitionNode,
|
||||
EnumValueDefinition: EnumValueDefinitionNode,
|
||||
InputObjectTypeDefinition: InputObjectTypeDefinitionNode,
|
||||
DirectiveDefinition: DirectiveDefinitionNode,
|
||||
SchemaExtension: SchemaExtensionNode,
|
||||
ScalarTypeExtension: ScalarTypeExtensionNode,
|
||||
ObjectTypeExtension: ObjectTypeExtensionNode,
|
||||
InterfaceTypeExtension: InterfaceTypeExtensionNode,
|
||||
UnionTypeExtension: UnionTypeExtensionNode,
|
||||
EnumTypeExtension: EnumTypeExtensionNode,
|
||||
InputObjectTypeExtension: InputObjectTypeExtensionNode,
|
||||
|};
|
||||
|
||||
// Name
|
||||
|
||||
export type NameNode = {
|
||||
+kind: 'Name',
|
||||
+loc?: Location,
|
||||
+value: string,
|
||||
...
|
||||
};
|
||||
|
||||
// Document
|
||||
|
||||
export type DocumentNode = {
|
||||
+kind: 'Document',
|
||||
+loc?: Location,
|
||||
+definitions: $ReadOnlyArray<DefinitionNode>,
|
||||
...
|
||||
};
|
||||
|
||||
export type DefinitionNode =
|
||||
| ExecutableDefinitionNode
|
||||
| TypeSystemDefinitionNode
|
||||
| TypeSystemExtensionNode;
|
||||
|
||||
export type ExecutableDefinitionNode =
|
||||
| OperationDefinitionNode
|
||||
| FragmentDefinitionNode;
|
||||
|
||||
export type OperationDefinitionNode = {
|
||||
+kind: 'OperationDefinition',
|
||||
+loc?: Location,
|
||||
+operation: OperationTypeNode,
|
||||
+name?: NameNode,
|
||||
+variableDefinitions?: $ReadOnlyArray<VariableDefinitionNode>,
|
||||
+directives?: $ReadOnlyArray<DirectiveNode>,
|
||||
+selectionSet: SelectionSetNode,
|
||||
...
|
||||
};
|
||||
|
||||
export type OperationTypeNode = 'query' | 'mutation' | 'subscription';
|
||||
|
||||
export type VariableDefinitionNode = {
|
||||
+kind: 'VariableDefinition',
|
||||
+loc?: Location,
|
||||
+variable: VariableNode,
|
||||
+type: TypeNode,
|
||||
+defaultValue?: ValueNode,
|
||||
+directives?: $ReadOnlyArray<DirectiveNode>,
|
||||
...
|
||||
};
|
||||
|
||||
export type VariableNode = {
|
||||
+kind: 'Variable',
|
||||
+loc?: Location,
|
||||
+name: NameNode,
|
||||
...
|
||||
};
|
||||
|
||||
export type SelectionSetNode = {
|
||||
kind: 'SelectionSet',
|
||||
loc?: Location,
|
||||
selections: $ReadOnlyArray<SelectionNode>,
|
||||
...
|
||||
};
|
||||
|
||||
export type SelectionNode = FieldNode | FragmentSpreadNode | InlineFragmentNode;
|
||||
|
||||
export type FieldNode = {
|
||||
+kind: 'Field',
|
||||
+loc?: Location,
|
||||
+alias?: NameNode,
|
||||
+name: NameNode,
|
||||
+arguments?: $ReadOnlyArray<ArgumentNode>,
|
||||
+directives?: $ReadOnlyArray<DirectiveNode>,
|
||||
+selectionSet?: SelectionSetNode,
|
||||
...
|
||||
};
|
||||
|
||||
export type ArgumentNode = {
|
||||
+kind: 'Argument',
|
||||
+loc?: Location,
|
||||
+name: NameNode,
|
||||
+value: ValueNode,
|
||||
...
|
||||
};
|
||||
|
||||
// Fragments
|
||||
|
||||
export type FragmentSpreadNode = {
|
||||
+kind: 'FragmentSpread',
|
||||
+loc?: Location,
|
||||
+name: NameNode,
|
||||
+directives?: $ReadOnlyArray<DirectiveNode>,
|
||||
...
|
||||
};
|
||||
|
||||
export type InlineFragmentNode = {
|
||||
+kind: 'InlineFragment',
|
||||
+loc?: Location,
|
||||
+typeCondition?: NamedTypeNode,
|
||||
+directives?: $ReadOnlyArray<DirectiveNode>,
|
||||
+selectionSet: SelectionSetNode,
|
||||
...
|
||||
};
|
||||
|
||||
export type FragmentDefinitionNode = {
|
||||
+kind: 'FragmentDefinition',
|
||||
+loc?: Location,
|
||||
+name: NameNode,
|
||||
// Note: fragment variable definitions are experimental and may be changed
|
||||
// or removed in the future.
|
||||
+variableDefinitions?: $ReadOnlyArray<VariableDefinitionNode>,
|
||||
+typeCondition: NamedTypeNode,
|
||||
+directives?: $ReadOnlyArray<DirectiveNode>,
|
||||
+selectionSet: SelectionSetNode,
|
||||
...
|
||||
};
|
||||
|
||||
// Values
|
||||
|
||||
export type ValueNode =
|
||||
| VariableNode
|
||||
| IntValueNode
|
||||
| FloatValueNode
|
||||
| StringValueNode
|
||||
| BooleanValueNode
|
||||
| NullValueNode
|
||||
| EnumValueNode
|
||||
| ListValueNode
|
||||
| ObjectValueNode;
|
||||
|
||||
export type IntValueNode = {
|
||||
+kind: 'IntValue',
|
||||
+loc?: Location,
|
||||
+value: string,
|
||||
...
|
||||
};
|
||||
|
||||
export type FloatValueNode = {
|
||||
+kind: 'FloatValue',
|
||||
+loc?: Location,
|
||||
+value: string,
|
||||
...
|
||||
};
|
||||
|
||||
export type StringValueNode = {
|
||||
+kind: 'StringValue',
|
||||
+loc?: Location,
|
||||
+value: string,
|
||||
+block?: boolean,
|
||||
...
|
||||
};
|
||||
|
||||
export type BooleanValueNode = {
|
||||
+kind: 'BooleanValue',
|
||||
+loc?: Location,
|
||||
+value: boolean,
|
||||
...
|
||||
};
|
||||
|
||||
export type NullValueNode = {
|
||||
+kind: 'NullValue',
|
||||
+loc?: Location,
|
||||
...
|
||||
};
|
||||
|
||||
export type EnumValueNode = {
|
||||
+kind: 'EnumValue',
|
||||
+loc?: Location,
|
||||
+value: string,
|
||||
...
|
||||
};
|
||||
|
||||
export type ListValueNode = {
|
||||
+kind: 'ListValue',
|
||||
+loc?: Location,
|
||||
+values: $ReadOnlyArray<ValueNode>,
|
||||
...
|
||||
};
|
||||
|
||||
export type ObjectValueNode = {
|
||||
+kind: 'ObjectValue',
|
||||
+loc?: Location,
|
||||
+fields: $ReadOnlyArray<ObjectFieldNode>,
|
||||
...
|
||||
};
|
||||
|
||||
export type ObjectFieldNode = {
|
||||
+kind: 'ObjectField',
|
||||
+loc?: Location,
|
||||
+name: NameNode,
|
||||
+value: ValueNode,
|
||||
...
|
||||
};
|
||||
|
||||
// Directives
|
||||
|
||||
export type DirectiveNode = {
|
||||
+kind: 'Directive',
|
||||
+loc?: Location,
|
||||
+name: NameNode,
|
||||
+arguments?: $ReadOnlyArray<ArgumentNode>,
|
||||
...
|
||||
};
|
||||
|
||||
// Type Reference
|
||||
|
||||
export type TypeNode = NamedTypeNode | ListTypeNode | NonNullTypeNode;
|
||||
|
||||
export type NamedTypeNode = {
|
||||
+kind: 'NamedType',
|
||||
+loc?: Location,
|
||||
+name: NameNode,
|
||||
...
|
||||
};
|
||||
|
||||
export type ListTypeNode = {
|
||||
+kind: 'ListType',
|
||||
+loc?: Location,
|
||||
+type: TypeNode,
|
||||
...
|
||||
};
|
||||
|
||||
export type NonNullTypeNode = {
|
||||
+kind: 'NonNullType',
|
||||
+loc?: Location,
|
||||
+type: NamedTypeNode | ListTypeNode,
|
||||
...
|
||||
};
|
||||
|
||||
// Type System Definition
|
||||
|
||||
export type TypeSystemDefinitionNode =
|
||||
| SchemaDefinitionNode
|
||||
| TypeDefinitionNode
|
||||
| DirectiveDefinitionNode;
|
||||
|
||||
export type SchemaDefinitionNode = {
|
||||
+kind: 'SchemaDefinition',
|
||||
+loc?: Location,
|
||||
+directives?: $ReadOnlyArray<DirectiveNode>,
|
||||
+operationTypes: $ReadOnlyArray<OperationTypeDefinitionNode>,
|
||||
...
|
||||
};
|
||||
|
||||
export type OperationTypeDefinitionNode = {
|
||||
+kind: 'OperationTypeDefinition',
|
||||
+loc?: Location,
|
||||
+operation: OperationTypeNode,
|
||||
+type: NamedTypeNode,
|
||||
...
|
||||
};
|
||||
|
||||
// Type Definition
|
||||
|
||||
export type TypeDefinitionNode =
|
||||
| ScalarTypeDefinitionNode
|
||||
| ObjectTypeDefinitionNode
|
||||
| InterfaceTypeDefinitionNode
|
||||
| UnionTypeDefinitionNode
|
||||
| EnumTypeDefinitionNode
|
||||
| InputObjectTypeDefinitionNode;
|
||||
|
||||
export type ScalarTypeDefinitionNode = {
|
||||
+kind: 'ScalarTypeDefinition',
|
||||
+loc?: Location,
|
||||
+description?: StringValueNode,
|
||||
+name: NameNode,
|
||||
+directives?: $ReadOnlyArray<DirectiveNode>,
|
||||
...
|
||||
};
|
||||
|
||||
export type ObjectTypeDefinitionNode = {
|
||||
+kind: 'ObjectTypeDefinition',
|
||||
+loc?: Location,
|
||||
+description?: StringValueNode,
|
||||
+name: NameNode,
|
||||
+interfaces?: $ReadOnlyArray<NamedTypeNode>,
|
||||
+directives?: $ReadOnlyArray<DirectiveNode>,
|
||||
+fields?: $ReadOnlyArray<FieldDefinitionNode>,
|
||||
...
|
||||
};
|
||||
|
||||
export type FieldDefinitionNode = {
|
||||
+kind: 'FieldDefinition',
|
||||
+loc?: Location,
|
||||
+description?: StringValueNode,
|
||||
+name: NameNode,
|
||||
+arguments?: $ReadOnlyArray<InputValueDefinitionNode>,
|
||||
+type: TypeNode,
|
||||
+directives?: $ReadOnlyArray<DirectiveNode>,
|
||||
...
|
||||
};
|
||||
|
||||
export type InputValueDefinitionNode = {
|
||||
+kind: 'InputValueDefinition',
|
||||
+loc?: Location,
|
||||
+description?: StringValueNode,
|
||||
+name: NameNode,
|
||||
+type: TypeNode,
|
||||
+defaultValue?: ValueNode,
|
||||
+directives?: $ReadOnlyArray<DirectiveNode>,
|
||||
...
|
||||
};
|
||||
|
||||
export type InterfaceTypeDefinitionNode = {
|
||||
+kind: 'InterfaceTypeDefinition',
|
||||
+loc?: Location,
|
||||
+description?: StringValueNode,
|
||||
+name: NameNode,
|
||||
+directives?: $ReadOnlyArray<DirectiveNode>,
|
||||
+fields?: $ReadOnlyArray<FieldDefinitionNode>,
|
||||
...
|
||||
};
|
||||
|
||||
export type UnionTypeDefinitionNode = {
|
||||
+kind: 'UnionTypeDefinition',
|
||||
+loc?: Location,
|
||||
+description?: StringValueNode,
|
||||
+name: NameNode,
|
||||
+directives?: $ReadOnlyArray<DirectiveNode>,
|
||||
+types?: $ReadOnlyArray<NamedTypeNode>,
|
||||
...
|
||||
};
|
||||
|
||||
export type EnumTypeDefinitionNode = {
|
||||
+kind: 'EnumTypeDefinition',
|
||||
+loc?: Location,
|
||||
+description?: StringValueNode,
|
||||
+name: NameNode,
|
||||
+directives?: $ReadOnlyArray<DirectiveNode>,
|
||||
+values?: $ReadOnlyArray<EnumValueDefinitionNode>,
|
||||
...
|
||||
};
|
||||
|
||||
export type EnumValueDefinitionNode = {
|
||||
+kind: 'EnumValueDefinition',
|
||||
+loc?: Location,
|
||||
+description?: StringValueNode,
|
||||
+name: NameNode,
|
||||
+directives?: $ReadOnlyArray<DirectiveNode>,
|
||||
...
|
||||
};
|
||||
|
||||
export type InputObjectTypeDefinitionNode = {
|
||||
+kind: 'InputObjectTypeDefinition',
|
||||
+loc?: Location,
|
||||
+description?: StringValueNode,
|
||||
+name: NameNode,
|
||||
+directives?: $ReadOnlyArray<DirectiveNode>,
|
||||
+fields?: $ReadOnlyArray<InputValueDefinitionNode>,
|
||||
...
|
||||
};
|
||||
|
||||
// Directive Definitions
|
||||
|
||||
export type DirectiveDefinitionNode = {
|
||||
+kind: 'DirectiveDefinition',
|
||||
+loc?: Location,
|
||||
+description?: StringValueNode,
|
||||
+name: NameNode,
|
||||
+arguments?: $ReadOnlyArray<InputValueDefinitionNode>,
|
||||
+repeatable: boolean,
|
||||
+locations: $ReadOnlyArray<NameNode>,
|
||||
...
|
||||
};
|
||||
|
||||
// Type System Extensions
|
||||
|
||||
export type TypeSystemExtensionNode = SchemaExtensionNode | TypeExtensionNode;
|
||||
|
||||
export type SchemaExtensionNode = {
|
||||
+kind: 'SchemaExtension',
|
||||
+loc?: Location,
|
||||
+directives?: $ReadOnlyArray<DirectiveNode>,
|
||||
+operationTypes?: $ReadOnlyArray<OperationTypeDefinitionNode>,
|
||||
...
|
||||
};
|
||||
|
||||
// Type Extensions
|
||||
|
||||
export type TypeExtensionNode =
|
||||
| ScalarTypeExtensionNode
|
||||
| ObjectTypeExtensionNode
|
||||
| InterfaceTypeExtensionNode
|
||||
| UnionTypeExtensionNode
|
||||
| EnumTypeExtensionNode
|
||||
| InputObjectTypeExtensionNode;
|
||||
|
||||
export type ScalarTypeExtensionNode = {
|
||||
+kind: 'ScalarTypeExtension',
|
||||
+loc?: Location,
|
||||
+name: NameNode,
|
||||
+directives?: $ReadOnlyArray<DirectiveNode>,
|
||||
...
|
||||
};
|
||||
|
||||
export type ObjectTypeExtensionNode = {
|
||||
+kind: 'ObjectTypeExtension',
|
||||
+loc?: Location,
|
||||
+name: NameNode,
|
||||
+interfaces?: $ReadOnlyArray<NamedTypeNode>,
|
||||
+directives?: $ReadOnlyArray<DirectiveNode>,
|
||||
+fields?: $ReadOnlyArray<FieldDefinitionNode>,
|
||||
...
|
||||
};
|
||||
|
||||
export type InterfaceTypeExtensionNode = {
|
||||
+kind: 'InterfaceTypeExtension',
|
||||
+loc?: Location,
|
||||
+name: NameNode,
|
||||
+directives?: $ReadOnlyArray<DirectiveNode>,
|
||||
+fields?: $ReadOnlyArray<FieldDefinitionNode>,
|
||||
...
|
||||
};
|
||||
|
||||
export type UnionTypeExtensionNode = {
|
||||
+kind: 'UnionTypeExtension',
|
||||
+loc?: Location,
|
||||
+name: NameNode,
|
||||
+directives?: $ReadOnlyArray<DirectiveNode>,
|
||||
+types?: $ReadOnlyArray<NamedTypeNode>,
|
||||
...
|
||||
};
|
||||
|
||||
export type EnumTypeExtensionNode = {
|
||||
+kind: 'EnumTypeExtension',
|
||||
+loc?: Location,
|
||||
+name: NameNode,
|
||||
+directives?: $ReadOnlyArray<DirectiveNode>,
|
||||
+values?: $ReadOnlyArray<EnumValueDefinitionNode>,
|
||||
...
|
||||
};
|
||||
|
||||
export type InputObjectTypeExtensionNode = {
|
||||
+kind: 'InputObjectTypeExtension',
|
||||
+loc?: Location,
|
||||
+name: NameNode,
|
||||
+directives?: $ReadOnlyArray<DirectiveNode>,
|
||||
+fields?: $ReadOnlyArray<InputValueDefinitionNode>,
|
||||
...
|
||||
};
|
||||
1
node_modules/graphql/language/ast.mjs
generated
vendored
Normal file
1
node_modules/graphql/language/ast.mjs
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
21
node_modules/graphql/language/blockString.d.ts
generated
vendored
Normal file
21
node_modules/graphql/language/blockString.d.ts
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
/**
|
||||
* Produces the value of a block string from its parsed raw value, similar to
|
||||
* Coffeescript's block string, Python's docstring trim or Ruby's strip_heredoc.
|
||||
*
|
||||
* This implements the GraphQL spec's BlockStringValue() static algorithm.
|
||||
*/
|
||||
export function dedentBlockStringValue(rawString: string): string;
|
||||
|
||||
// @internal
|
||||
export function getBlockStringIndentation(lines: ReadonlyArray<string>): number;
|
||||
|
||||
/**
|
||||
* Print a block string in the indented block form by adding a leading and
|
||||
* trailing blank line. However, if a block string starts with whitespace and is
|
||||
* a single-line, adding a leading blank line would strip that whitespace.
|
||||
*/
|
||||
export function printBlockString(
|
||||
value: string,
|
||||
indentation?: string,
|
||||
preferMultipleLines?: boolean,
|
||||
): string;
|
||||
105
node_modules/graphql/language/blockString.js
generated
vendored
Normal file
105
node_modules/graphql/language/blockString.js
generated
vendored
Normal file
@@ -0,0 +1,105 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.dedentBlockStringValue = dedentBlockStringValue;
|
||||
exports.getBlockStringIndentation = getBlockStringIndentation;
|
||||
exports.printBlockString = printBlockString;
|
||||
|
||||
/**
|
||||
* Produces the value of a block string from its parsed raw value, similar to
|
||||
* CoffeeScript's block string, Python's docstring trim or Ruby's strip_heredoc.
|
||||
*
|
||||
* This implements the GraphQL spec's BlockStringValue() static algorithm.
|
||||
*/
|
||||
function dedentBlockStringValue(rawString) {
|
||||
// Expand a block string's raw value into independent lines.
|
||||
var lines = rawString.split(/\r\n|[\n\r]/g); // Remove common indentation from all lines but first.
|
||||
|
||||
var commonIndent = getBlockStringIndentation(lines);
|
||||
|
||||
if (commonIndent !== 0) {
|
||||
for (var i = 1; i < lines.length; i++) {
|
||||
lines[i] = lines[i].slice(commonIndent);
|
||||
}
|
||||
} // Remove leading and trailing blank lines.
|
||||
|
||||
|
||||
while (lines.length > 0 && isBlank(lines[0])) {
|
||||
lines.shift();
|
||||
}
|
||||
|
||||
while (lines.length > 0 && isBlank(lines[lines.length - 1])) {
|
||||
lines.pop();
|
||||
} // Return a string of the lines joined with U+000A.
|
||||
|
||||
|
||||
return lines.join('\n');
|
||||
} // @internal
|
||||
|
||||
|
||||
function getBlockStringIndentation(lines) {
|
||||
var commonIndent = null;
|
||||
|
||||
for (var i = 1; i < lines.length; i++) {
|
||||
var line = lines[i];
|
||||
var indent = leadingWhitespace(line);
|
||||
|
||||
if (indent === line.length) {
|
||||
continue; // skip empty lines
|
||||
}
|
||||
|
||||
if (commonIndent === null || indent < commonIndent) {
|
||||
commonIndent = indent;
|
||||
|
||||
if (commonIndent === 0) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return commonIndent === null ? 0 : commonIndent;
|
||||
}
|
||||
|
||||
function leadingWhitespace(str) {
|
||||
var i = 0;
|
||||
|
||||
while (i < str.length && (str[i] === ' ' || str[i] === '\t')) {
|
||||
i++;
|
||||
}
|
||||
|
||||
return i;
|
||||
}
|
||||
|
||||
function isBlank(str) {
|
||||
return leadingWhitespace(str) === str.length;
|
||||
}
|
||||
/**
|
||||
* Print a block string in the indented block form by adding a leading and
|
||||
* trailing blank line. However, if a block string starts with whitespace and is
|
||||
* a single-line, adding a leading blank line would strip that whitespace.
|
||||
*/
|
||||
|
||||
|
||||
function printBlockString(value) {
|
||||
var indentation = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : '';
|
||||
var preferMultipleLines = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : false;
|
||||
var isSingleLine = value.indexOf('\n') === -1;
|
||||
var hasLeadingSpace = value[0] === ' ' || value[0] === '\t';
|
||||
var hasTrailingQuote = value[value.length - 1] === '"';
|
||||
var printAsMultipleLines = !isSingleLine || hasTrailingQuote || preferMultipleLines;
|
||||
var result = ''; // Format a multi-line block quote to account for leading space.
|
||||
|
||||
if (printAsMultipleLines && !(isSingleLine && hasLeadingSpace)) {
|
||||
result += '\n' + indentation;
|
||||
}
|
||||
|
||||
result += indentation ? value.replace(/\n/g, '\n' + indentation) : value;
|
||||
|
||||
if (printAsMultipleLines) {
|
||||
result += '\n';
|
||||
}
|
||||
|
||||
return '"""' + result.replace(/"""/g, '\\"""') + '"""';
|
||||
}
|
||||
97
node_modules/graphql/language/blockString.js.flow
generated
vendored
Normal file
97
node_modules/graphql/language/blockString.js.flow
generated
vendored
Normal file
@@ -0,0 +1,97 @@
|
||||
// @flow strict
|
||||
|
||||
/**
|
||||
* Produces the value of a block string from its parsed raw value, similar to
|
||||
* CoffeeScript's block string, Python's docstring trim or Ruby's strip_heredoc.
|
||||
*
|
||||
* This implements the GraphQL spec's BlockStringValue() static algorithm.
|
||||
*/
|
||||
export function dedentBlockStringValue(rawString: string): string {
|
||||
// Expand a block string's raw value into independent lines.
|
||||
const lines = rawString.split(/\r\n|[\n\r]/g);
|
||||
|
||||
// Remove common indentation from all lines but first.
|
||||
const commonIndent = getBlockStringIndentation(lines);
|
||||
|
||||
if (commonIndent !== 0) {
|
||||
for (let i = 1; i < lines.length; i++) {
|
||||
lines[i] = lines[i].slice(commonIndent);
|
||||
}
|
||||
}
|
||||
|
||||
// Remove leading and trailing blank lines.
|
||||
while (lines.length > 0 && isBlank(lines[0])) {
|
||||
lines.shift();
|
||||
}
|
||||
while (lines.length > 0 && isBlank(lines[lines.length - 1])) {
|
||||
lines.pop();
|
||||
}
|
||||
|
||||
// Return a string of the lines joined with U+000A.
|
||||
return lines.join('\n');
|
||||
}
|
||||
|
||||
// @internal
|
||||
export function getBlockStringIndentation(
|
||||
lines: $ReadOnlyArray<string>,
|
||||
): number {
|
||||
let commonIndent = null;
|
||||
|
||||
for (let i = 1; i < lines.length; i++) {
|
||||
const line = lines[i];
|
||||
const indent = leadingWhitespace(line);
|
||||
if (indent === line.length) {
|
||||
continue; // skip empty lines
|
||||
}
|
||||
|
||||
if (commonIndent === null || indent < commonIndent) {
|
||||
commonIndent = indent;
|
||||
if (commonIndent === 0) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return commonIndent === null ? 0 : commonIndent;
|
||||
}
|
||||
|
||||
function leadingWhitespace(str) {
|
||||
let i = 0;
|
||||
while (i < str.length && (str[i] === ' ' || str[i] === '\t')) {
|
||||
i++;
|
||||
}
|
||||
return i;
|
||||
}
|
||||
|
||||
function isBlank(str) {
|
||||
return leadingWhitespace(str) === str.length;
|
||||
}
|
||||
|
||||
/**
|
||||
* Print a block string in the indented block form by adding a leading and
|
||||
* trailing blank line. However, if a block string starts with whitespace and is
|
||||
* a single-line, adding a leading blank line would strip that whitespace.
|
||||
*/
|
||||
export function printBlockString(
|
||||
value: string,
|
||||
indentation?: string = '',
|
||||
preferMultipleLines?: boolean = false,
|
||||
): string {
|
||||
const isSingleLine = value.indexOf('\n') === -1;
|
||||
const hasLeadingSpace = value[0] === ' ' || value[0] === '\t';
|
||||
const hasTrailingQuote = value[value.length - 1] === '"';
|
||||
const printAsMultipleLines =
|
||||
!isSingleLine || hasTrailingQuote || preferMultipleLines;
|
||||
|
||||
let result = '';
|
||||
// Format a multi-line block quote to account for leading space.
|
||||
if (printAsMultipleLines && !(isSingleLine && hasLeadingSpace)) {
|
||||
result += '\n' + indentation;
|
||||
}
|
||||
result += indentation ? value.replace(/\n/g, '\n' + indentation) : value;
|
||||
if (printAsMultipleLines) {
|
||||
result += '\n';
|
||||
}
|
||||
|
||||
return '"""' + result.replace(/"""/g, '\\"""') + '"""';
|
||||
}
|
||||
95
node_modules/graphql/language/blockString.mjs
generated
vendored
Normal file
95
node_modules/graphql/language/blockString.mjs
generated
vendored
Normal file
@@ -0,0 +1,95 @@
|
||||
/**
|
||||
* Produces the value of a block string from its parsed raw value, similar to
|
||||
* CoffeeScript's block string, Python's docstring trim or Ruby's strip_heredoc.
|
||||
*
|
||||
* This implements the GraphQL spec's BlockStringValue() static algorithm.
|
||||
*/
|
||||
export function dedentBlockStringValue(rawString) {
|
||||
// Expand a block string's raw value into independent lines.
|
||||
var lines = rawString.split(/\r\n|[\n\r]/g); // Remove common indentation from all lines but first.
|
||||
|
||||
var commonIndent = getBlockStringIndentation(lines);
|
||||
|
||||
if (commonIndent !== 0) {
|
||||
for (var i = 1; i < lines.length; i++) {
|
||||
lines[i] = lines[i].slice(commonIndent);
|
||||
}
|
||||
} // Remove leading and trailing blank lines.
|
||||
|
||||
|
||||
while (lines.length > 0 && isBlank(lines[0])) {
|
||||
lines.shift();
|
||||
}
|
||||
|
||||
while (lines.length > 0 && isBlank(lines[lines.length - 1])) {
|
||||
lines.pop();
|
||||
} // Return a string of the lines joined with U+000A.
|
||||
|
||||
|
||||
return lines.join('\n');
|
||||
} // @internal
|
||||
|
||||
export function getBlockStringIndentation(lines) {
|
||||
var commonIndent = null;
|
||||
|
||||
for (var i = 1; i < lines.length; i++) {
|
||||
var line = lines[i];
|
||||
var indent = leadingWhitespace(line);
|
||||
|
||||
if (indent === line.length) {
|
||||
continue; // skip empty lines
|
||||
}
|
||||
|
||||
if (commonIndent === null || indent < commonIndent) {
|
||||
commonIndent = indent;
|
||||
|
||||
if (commonIndent === 0) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return commonIndent === null ? 0 : commonIndent;
|
||||
}
|
||||
|
||||
function leadingWhitespace(str) {
|
||||
var i = 0;
|
||||
|
||||
while (i < str.length && (str[i] === ' ' || str[i] === '\t')) {
|
||||
i++;
|
||||
}
|
||||
|
||||
return i;
|
||||
}
|
||||
|
||||
function isBlank(str) {
|
||||
return leadingWhitespace(str) === str.length;
|
||||
}
|
||||
/**
|
||||
* Print a block string in the indented block form by adding a leading and
|
||||
* trailing blank line. However, if a block string starts with whitespace and is
|
||||
* a single-line, adding a leading blank line would strip that whitespace.
|
||||
*/
|
||||
|
||||
|
||||
export function printBlockString(value) {
|
||||
var indentation = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : '';
|
||||
var preferMultipleLines = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : false;
|
||||
var isSingleLine = value.indexOf('\n') === -1;
|
||||
var hasLeadingSpace = value[0] === ' ' || value[0] === '\t';
|
||||
var hasTrailingQuote = value[value.length - 1] === '"';
|
||||
var printAsMultipleLines = !isSingleLine || hasTrailingQuote || preferMultipleLines;
|
||||
var result = ''; // Format a multi-line block quote to account for leading space.
|
||||
|
||||
if (printAsMultipleLines && !(isSingleLine && hasLeadingSpace)) {
|
||||
result += '\n' + indentation;
|
||||
}
|
||||
|
||||
result += indentation ? value.replace(/\n/g, '\n' + indentation) : value;
|
||||
|
||||
if (printAsMultipleLines) {
|
||||
result += '\n';
|
||||
}
|
||||
|
||||
return '"""' + result.replace(/"""/g, '\\"""') + '"""';
|
||||
}
|
||||
35
node_modules/graphql/language/directiveLocation.d.ts
generated
vendored
Normal file
35
node_modules/graphql/language/directiveLocation.d.ts
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
/**
|
||||
* The set of allowed directive location values.
|
||||
*/
|
||||
export const DirectiveLocation: _DirectiveLocation;
|
||||
|
||||
// @internal
|
||||
type _DirectiveLocation = {
|
||||
// Request Definitions
|
||||
QUERY: 'QUERY';
|
||||
MUTATION: 'MUTATION';
|
||||
SUBSCRIPTION: 'SUBSCRIPTION';
|
||||
FIELD: 'FIELD';
|
||||
FRAGMENT_DEFINITION: 'FRAGMENT_DEFINITION';
|
||||
FRAGMENT_SPREAD: 'FRAGMENT_SPREAD';
|
||||
INLINE_FRAGMENT: 'INLINE_FRAGMENT';
|
||||
VARIABLE_DEFINITION: 'VARIABLE_DEFINITION';
|
||||
|
||||
// Type System Definitions
|
||||
SCHEMA: 'SCHEMA';
|
||||
SCALAR: 'SCALAR';
|
||||
OBJECT: 'OBJECT';
|
||||
FIELD_DEFINITION: 'FIELD_DEFINITION';
|
||||
ARGUMENT_DEFINITION: 'ARGUMENT_DEFINITION';
|
||||
INTERFACE: 'INTERFACE';
|
||||
UNION: 'UNION';
|
||||
ENUM: 'ENUM';
|
||||
ENUM_VALUE: 'ENUM_VALUE';
|
||||
INPUT_OBJECT: 'INPUT_OBJECT';
|
||||
INPUT_FIELD_DEFINITION: 'INPUT_FIELD_DEFINITION';
|
||||
};
|
||||
|
||||
/**
|
||||
* The enum type representing the directive location values.
|
||||
*/
|
||||
export type DirectiveLocationEnum = _DirectiveLocation[keyof _DirectiveLocation];
|
||||
38
node_modules/graphql/language/directiveLocation.js
generated
vendored
Normal file
38
node_modules/graphql/language/directiveLocation.js
generated
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.DirectiveLocation = void 0;
|
||||
|
||||
/**
|
||||
* The set of allowed directive location values.
|
||||
*/
|
||||
var DirectiveLocation = Object.freeze({
|
||||
// Request Definitions
|
||||
QUERY: 'QUERY',
|
||||
MUTATION: 'MUTATION',
|
||||
SUBSCRIPTION: 'SUBSCRIPTION',
|
||||
FIELD: 'FIELD',
|
||||
FRAGMENT_DEFINITION: 'FRAGMENT_DEFINITION',
|
||||
FRAGMENT_SPREAD: 'FRAGMENT_SPREAD',
|
||||
INLINE_FRAGMENT: 'INLINE_FRAGMENT',
|
||||
VARIABLE_DEFINITION: 'VARIABLE_DEFINITION',
|
||||
// Type System Definitions
|
||||
SCHEMA: 'SCHEMA',
|
||||
SCALAR: 'SCALAR',
|
||||
OBJECT: 'OBJECT',
|
||||
FIELD_DEFINITION: 'FIELD_DEFINITION',
|
||||
ARGUMENT_DEFINITION: 'ARGUMENT_DEFINITION',
|
||||
INTERFACE: 'INTERFACE',
|
||||
UNION: 'UNION',
|
||||
ENUM: 'ENUM',
|
||||
ENUM_VALUE: 'ENUM_VALUE',
|
||||
INPUT_OBJECT: 'INPUT_OBJECT',
|
||||
INPUT_FIELD_DEFINITION: 'INPUT_FIELD_DEFINITION'
|
||||
});
|
||||
/**
|
||||
* The enum type representing the directive location values.
|
||||
*/
|
||||
|
||||
exports.DirectiveLocation = DirectiveLocation;
|
||||
33
node_modules/graphql/language/directiveLocation.js.flow
generated
vendored
Normal file
33
node_modules/graphql/language/directiveLocation.js.flow
generated
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
// @flow strict
|
||||
|
||||
/**
|
||||
* The set of allowed directive location values.
|
||||
*/
|
||||
export const DirectiveLocation = Object.freeze({
|
||||
// Request Definitions
|
||||
QUERY: 'QUERY',
|
||||
MUTATION: 'MUTATION',
|
||||
SUBSCRIPTION: 'SUBSCRIPTION',
|
||||
FIELD: 'FIELD',
|
||||
FRAGMENT_DEFINITION: 'FRAGMENT_DEFINITION',
|
||||
FRAGMENT_SPREAD: 'FRAGMENT_SPREAD',
|
||||
INLINE_FRAGMENT: 'INLINE_FRAGMENT',
|
||||
VARIABLE_DEFINITION: 'VARIABLE_DEFINITION',
|
||||
// Type System Definitions
|
||||
SCHEMA: 'SCHEMA',
|
||||
SCALAR: 'SCALAR',
|
||||
OBJECT: 'OBJECT',
|
||||
FIELD_DEFINITION: 'FIELD_DEFINITION',
|
||||
ARGUMENT_DEFINITION: 'ARGUMENT_DEFINITION',
|
||||
INTERFACE: 'INTERFACE',
|
||||
UNION: 'UNION',
|
||||
ENUM: 'ENUM',
|
||||
ENUM_VALUE: 'ENUM_VALUE',
|
||||
INPUT_OBJECT: 'INPUT_OBJECT',
|
||||
INPUT_FIELD_DEFINITION: 'INPUT_FIELD_DEFINITION',
|
||||
});
|
||||
|
||||
/**
|
||||
* The enum type representing the directive location values.
|
||||
*/
|
||||
export type DirectiveLocationEnum = $Values<typeof DirectiveLocation>;
|
||||
29
node_modules/graphql/language/directiveLocation.mjs
generated
vendored
Normal file
29
node_modules/graphql/language/directiveLocation.mjs
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
/**
|
||||
* The set of allowed directive location values.
|
||||
*/
|
||||
export var DirectiveLocation = Object.freeze({
|
||||
// Request Definitions
|
||||
QUERY: 'QUERY',
|
||||
MUTATION: 'MUTATION',
|
||||
SUBSCRIPTION: 'SUBSCRIPTION',
|
||||
FIELD: 'FIELD',
|
||||
FRAGMENT_DEFINITION: 'FRAGMENT_DEFINITION',
|
||||
FRAGMENT_SPREAD: 'FRAGMENT_SPREAD',
|
||||
INLINE_FRAGMENT: 'INLINE_FRAGMENT',
|
||||
VARIABLE_DEFINITION: 'VARIABLE_DEFINITION',
|
||||
// Type System Definitions
|
||||
SCHEMA: 'SCHEMA',
|
||||
SCALAR: 'SCALAR',
|
||||
OBJECT: 'OBJECT',
|
||||
FIELD_DEFINITION: 'FIELD_DEFINITION',
|
||||
ARGUMENT_DEFINITION: 'ARGUMENT_DEFINITION',
|
||||
INTERFACE: 'INTERFACE',
|
||||
UNION: 'UNION',
|
||||
ENUM: 'ENUM',
|
||||
ENUM_VALUE: 'ENUM_VALUE',
|
||||
INPUT_OBJECT: 'INPUT_OBJECT',
|
||||
INPUT_FIELD_DEFINITION: 'INPUT_FIELD_DEFINITION'
|
||||
});
|
||||
/**
|
||||
* The enum type representing the directive location values.
|
||||
*/
|
||||
96
node_modules/graphql/language/index.d.ts
generated
vendored
Normal file
96
node_modules/graphql/language/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,96 @@
|
||||
export { Source } from './source';
|
||||
export { getLocation, SourceLocation } from './location';
|
||||
|
||||
export { printLocation, printSourceLocation } from './printLocation';
|
||||
|
||||
export { Kind, KindEnum } from './kinds';
|
||||
export { TokenKind, TokenKindEnum } from './tokenKind';
|
||||
export { createLexer, Lexer } from './lexer';
|
||||
export { parse, parseValue, parseType, ParseOptions } from './parser';
|
||||
export { print } from './printer';
|
||||
export {
|
||||
visit,
|
||||
visitInParallel,
|
||||
visitWithTypeInfo,
|
||||
getVisitFn,
|
||||
BREAK,
|
||||
ASTVisitor,
|
||||
Visitor,
|
||||
VisitFn,
|
||||
VisitorKeyMap,
|
||||
} from './visitor';
|
||||
|
||||
export {
|
||||
Location,
|
||||
Token,
|
||||
ASTNode,
|
||||
ASTKindToNode,
|
||||
// Each kind of AST node
|
||||
NameNode,
|
||||
DocumentNode,
|
||||
DefinitionNode,
|
||||
ExecutableDefinitionNode,
|
||||
OperationDefinitionNode,
|
||||
OperationTypeNode,
|
||||
VariableDefinitionNode,
|
||||
VariableNode,
|
||||
SelectionSetNode,
|
||||
SelectionNode,
|
||||
FieldNode,
|
||||
ArgumentNode,
|
||||
FragmentSpreadNode,
|
||||
InlineFragmentNode,
|
||||
FragmentDefinitionNode,
|
||||
ValueNode,
|
||||
IntValueNode,
|
||||
FloatValueNode,
|
||||
StringValueNode,
|
||||
BooleanValueNode,
|
||||
NullValueNode,
|
||||
EnumValueNode,
|
||||
ListValueNode,
|
||||
ObjectValueNode,
|
||||
ObjectFieldNode,
|
||||
DirectiveNode,
|
||||
TypeNode,
|
||||
NamedTypeNode,
|
||||
ListTypeNode,
|
||||
NonNullTypeNode,
|
||||
TypeSystemDefinitionNode,
|
||||
SchemaDefinitionNode,
|
||||
OperationTypeDefinitionNode,
|
||||
TypeDefinitionNode,
|
||||
ScalarTypeDefinitionNode,
|
||||
ObjectTypeDefinitionNode,
|
||||
FieldDefinitionNode,
|
||||
InputValueDefinitionNode,
|
||||
InterfaceTypeDefinitionNode,
|
||||
UnionTypeDefinitionNode,
|
||||
EnumTypeDefinitionNode,
|
||||
EnumValueDefinitionNode,
|
||||
InputObjectTypeDefinitionNode,
|
||||
DirectiveDefinitionNode,
|
||||
TypeSystemExtensionNode,
|
||||
SchemaExtensionNode,
|
||||
TypeExtensionNode,
|
||||
ScalarTypeExtensionNode,
|
||||
ObjectTypeExtensionNode,
|
||||
InterfaceTypeExtensionNode,
|
||||
UnionTypeExtensionNode,
|
||||
EnumTypeExtensionNode,
|
||||
InputObjectTypeExtensionNode,
|
||||
} from './ast';
|
||||
|
||||
export {
|
||||
isDefinitionNode,
|
||||
isExecutableDefinitionNode,
|
||||
isSelectionNode,
|
||||
isValueNode,
|
||||
isTypeNode,
|
||||
isTypeSystemDefinitionNode,
|
||||
isTypeDefinitionNode,
|
||||
isTypeSystemExtensionNode,
|
||||
isTypeExtensionNode,
|
||||
} from './predicates';
|
||||
|
||||
export { DirectiveLocation, DirectiveLocationEnum } from './directiveLocation';
|
||||
183
node_modules/graphql/language/index.js
generated
vendored
Normal file
183
node_modules/graphql/language/index.js
generated
vendored
Normal file
@@ -0,0 +1,183 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
Object.defineProperty(exports, "Source", {
|
||||
enumerable: true,
|
||||
get: function get() {
|
||||
return _source.Source;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "getLocation", {
|
||||
enumerable: true,
|
||||
get: function get() {
|
||||
return _location.getLocation;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "printLocation", {
|
||||
enumerable: true,
|
||||
get: function get() {
|
||||
return _printLocation.printLocation;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "printSourceLocation", {
|
||||
enumerable: true,
|
||||
get: function get() {
|
||||
return _printLocation.printSourceLocation;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "Kind", {
|
||||
enumerable: true,
|
||||
get: function get() {
|
||||
return _kinds.Kind;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "TokenKind", {
|
||||
enumerable: true,
|
||||
get: function get() {
|
||||
return _tokenKind.TokenKind;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "createLexer", {
|
||||
enumerable: true,
|
||||
get: function get() {
|
||||
return _lexer.createLexer;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "parse", {
|
||||
enumerable: true,
|
||||
get: function get() {
|
||||
return _parser.parse;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "parseValue", {
|
||||
enumerable: true,
|
||||
get: function get() {
|
||||
return _parser.parseValue;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "parseType", {
|
||||
enumerable: true,
|
||||
get: function get() {
|
||||
return _parser.parseType;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "print", {
|
||||
enumerable: true,
|
||||
get: function get() {
|
||||
return _printer.print;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "visit", {
|
||||
enumerable: true,
|
||||
get: function get() {
|
||||
return _visitor.visit;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "visitInParallel", {
|
||||
enumerable: true,
|
||||
get: function get() {
|
||||
return _visitor.visitInParallel;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "visitWithTypeInfo", {
|
||||
enumerable: true,
|
||||
get: function get() {
|
||||
return _visitor.visitWithTypeInfo;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "getVisitFn", {
|
||||
enumerable: true,
|
||||
get: function get() {
|
||||
return _visitor.getVisitFn;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "BREAK", {
|
||||
enumerable: true,
|
||||
get: function get() {
|
||||
return _visitor.BREAK;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "isDefinitionNode", {
|
||||
enumerable: true,
|
||||
get: function get() {
|
||||
return _predicates.isDefinitionNode;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "isExecutableDefinitionNode", {
|
||||
enumerable: true,
|
||||
get: function get() {
|
||||
return _predicates.isExecutableDefinitionNode;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "isSelectionNode", {
|
||||
enumerable: true,
|
||||
get: function get() {
|
||||
return _predicates.isSelectionNode;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "isValueNode", {
|
||||
enumerable: true,
|
||||
get: function get() {
|
||||
return _predicates.isValueNode;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "isTypeNode", {
|
||||
enumerable: true,
|
||||
get: function get() {
|
||||
return _predicates.isTypeNode;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "isTypeSystemDefinitionNode", {
|
||||
enumerable: true,
|
||||
get: function get() {
|
||||
return _predicates.isTypeSystemDefinitionNode;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "isTypeDefinitionNode", {
|
||||
enumerable: true,
|
||||
get: function get() {
|
||||
return _predicates.isTypeDefinitionNode;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "isTypeSystemExtensionNode", {
|
||||
enumerable: true,
|
||||
get: function get() {
|
||||
return _predicates.isTypeSystemExtensionNode;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "isTypeExtensionNode", {
|
||||
enumerable: true,
|
||||
get: function get() {
|
||||
return _predicates.isTypeExtensionNode;
|
||||
}
|
||||
});
|
||||
Object.defineProperty(exports, "DirectiveLocation", {
|
||||
enumerable: true,
|
||||
get: function get() {
|
||||
return _directiveLocation.DirectiveLocation;
|
||||
}
|
||||
});
|
||||
|
||||
var _source = require("./source");
|
||||
|
||||
var _location = require("./location");
|
||||
|
||||
var _printLocation = require("./printLocation");
|
||||
|
||||
var _kinds = require("./kinds");
|
||||
|
||||
var _tokenKind = require("./tokenKind");
|
||||
|
||||
var _lexer = require("./lexer");
|
||||
|
||||
var _parser = require("./parser");
|
||||
|
||||
var _printer = require("./printer");
|
||||
|
||||
var _visitor = require("./visitor");
|
||||
|
||||
var _predicates = require("./predicates");
|
||||
|
||||
var _directiveLocation = require("./directiveLocation");
|
||||
107
node_modules/graphql/language/index.js.flow
generated
vendored
Normal file
107
node_modules/graphql/language/index.js.flow
generated
vendored
Normal file
@@ -0,0 +1,107 @@
|
||||
// @flow strict
|
||||
|
||||
export { Source } from './source';
|
||||
|
||||
export { getLocation } from './location';
|
||||
export type { SourceLocation } from './location';
|
||||
|
||||
export { printLocation, printSourceLocation } from './printLocation';
|
||||
|
||||
export { Kind } from './kinds';
|
||||
export type { KindEnum } from './kinds';
|
||||
|
||||
export { TokenKind } from './tokenKind';
|
||||
export type { TokenKindEnum } from './tokenKind';
|
||||
|
||||
export { createLexer } from './lexer';
|
||||
export type { Lexer } from './lexer';
|
||||
|
||||
export { parse, parseValue, parseType } from './parser';
|
||||
export type { ParseOptions } from './parser';
|
||||
|
||||
export { print } from './printer';
|
||||
|
||||
export {
|
||||
visit,
|
||||
visitInParallel,
|
||||
visitWithTypeInfo,
|
||||
getVisitFn,
|
||||
BREAK,
|
||||
} from './visitor';
|
||||
export type { ASTVisitor, Visitor, VisitFn, VisitorKeyMap } from './visitor';
|
||||
|
||||
export type {
|
||||
Location,
|
||||
Token,
|
||||
ASTNode,
|
||||
ASTKindToNode,
|
||||
// Each kind of AST node
|
||||
NameNode,
|
||||
DocumentNode,
|
||||
DefinitionNode,
|
||||
ExecutableDefinitionNode,
|
||||
OperationDefinitionNode,
|
||||
OperationTypeNode,
|
||||
VariableDefinitionNode,
|
||||
VariableNode,
|
||||
SelectionSetNode,
|
||||
SelectionNode,
|
||||
FieldNode,
|
||||
ArgumentNode,
|
||||
FragmentSpreadNode,
|
||||
InlineFragmentNode,
|
||||
FragmentDefinitionNode,
|
||||
ValueNode,
|
||||
IntValueNode,
|
||||
FloatValueNode,
|
||||
StringValueNode,
|
||||
BooleanValueNode,
|
||||
NullValueNode,
|
||||
EnumValueNode,
|
||||
ListValueNode,
|
||||
ObjectValueNode,
|
||||
ObjectFieldNode,
|
||||
DirectiveNode,
|
||||
TypeNode,
|
||||
NamedTypeNode,
|
||||
ListTypeNode,
|
||||
NonNullTypeNode,
|
||||
TypeSystemDefinitionNode,
|
||||
SchemaDefinitionNode,
|
||||
OperationTypeDefinitionNode,
|
||||
TypeDefinitionNode,
|
||||
ScalarTypeDefinitionNode,
|
||||
ObjectTypeDefinitionNode,
|
||||
FieldDefinitionNode,
|
||||
InputValueDefinitionNode,
|
||||
InterfaceTypeDefinitionNode,
|
||||
UnionTypeDefinitionNode,
|
||||
EnumTypeDefinitionNode,
|
||||
EnumValueDefinitionNode,
|
||||
InputObjectTypeDefinitionNode,
|
||||
DirectiveDefinitionNode,
|
||||
TypeSystemExtensionNode,
|
||||
SchemaExtensionNode,
|
||||
TypeExtensionNode,
|
||||
ScalarTypeExtensionNode,
|
||||
ObjectTypeExtensionNode,
|
||||
InterfaceTypeExtensionNode,
|
||||
UnionTypeExtensionNode,
|
||||
EnumTypeExtensionNode,
|
||||
InputObjectTypeExtensionNode,
|
||||
} from './ast';
|
||||
|
||||
export {
|
||||
isDefinitionNode,
|
||||
isExecutableDefinitionNode,
|
||||
isSelectionNode,
|
||||
isValueNode,
|
||||
isTypeNode,
|
||||
isTypeSystemDefinitionNode,
|
||||
isTypeDefinitionNode,
|
||||
isTypeSystemExtensionNode,
|
||||
isTypeExtensionNode,
|
||||
} from './predicates';
|
||||
|
||||
export { DirectiveLocation } from './directiveLocation';
|
||||
export type { DirectiveLocationEnum } from './directiveLocation';
|
||||
11
node_modules/graphql/language/index.mjs
generated
vendored
Normal file
11
node_modules/graphql/language/index.mjs
generated
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
export { Source } from './source';
|
||||
export { getLocation } from './location';
|
||||
export { printLocation, printSourceLocation } from './printLocation';
|
||||
export { Kind } from './kinds';
|
||||
export { TokenKind } from './tokenKind';
|
||||
export { createLexer } from './lexer';
|
||||
export { parse, parseValue, parseType } from './parser';
|
||||
export { print } from './printer';
|
||||
export { visit, visitInParallel, visitWithTypeInfo, getVisitFn, BREAK } from './visitor';
|
||||
export { isDefinitionNode, isExecutableDefinitionNode, isSelectionNode, isValueNode, isTypeNode, isTypeSystemDefinitionNode, isTypeDefinitionNode, isTypeSystemExtensionNode, isTypeExtensionNode } from './predicates';
|
||||
export { DirectiveLocation } from './directiveLocation';
|
||||
77
node_modules/graphql/language/kinds.d.ts
generated
vendored
Normal file
77
node_modules/graphql/language/kinds.d.ts
generated
vendored
Normal file
@@ -0,0 +1,77 @@
|
||||
/**
|
||||
* The set of allowed kind values for AST nodes.
|
||||
*/
|
||||
export const Kind: _Kind;
|
||||
|
||||
// @internal
|
||||
type _Kind = {
|
||||
// Name
|
||||
NAME: 'Name';
|
||||
|
||||
// Document
|
||||
DOCUMENT: 'Document';
|
||||
OPERATION_DEFINITION: 'OperationDefinition';
|
||||
VARIABLE_DEFINITION: 'VariableDefinition';
|
||||
SELECTION_SET: 'SelectionSet';
|
||||
FIELD: 'Field';
|
||||
ARGUMENT: 'Argument';
|
||||
|
||||
// Fragments
|
||||
FRAGMENT_SPREAD: 'FragmentSpread';
|
||||
INLINE_FRAGMENT: 'InlineFragment';
|
||||
FRAGMENT_DEFINITION: 'FragmentDefinition';
|
||||
|
||||
// Values
|
||||
VARIABLE: 'Variable';
|
||||
INT: 'IntValue';
|
||||
FLOAT: 'FloatValue';
|
||||
STRING: 'StringValue';
|
||||
BOOLEAN: 'BooleanValue';
|
||||
NULL: 'NullValue';
|
||||
ENUM: 'EnumValue';
|
||||
LIST: 'ListValue';
|
||||
OBJECT: 'ObjectValue';
|
||||
OBJECT_FIELD: 'ObjectField';
|
||||
|
||||
// Directives
|
||||
DIRECTIVE: 'Directive';
|
||||
|
||||
// Types
|
||||
NAMED_TYPE: 'NamedType';
|
||||
LIST_TYPE: 'ListType';
|
||||
NON_NULL_TYPE: 'NonNullType';
|
||||
|
||||
// Type System Definitions
|
||||
SCHEMA_DEFINITION: 'SchemaDefinition';
|
||||
OPERATION_TYPE_DEFINITION: 'OperationTypeDefinition';
|
||||
|
||||
// Type Definitions
|
||||
SCALAR_TYPE_DEFINITION: 'ScalarTypeDefinition';
|
||||
OBJECT_TYPE_DEFINITION: 'ObjectTypeDefinition';
|
||||
FIELD_DEFINITION: 'FieldDefinition';
|
||||
INPUT_VALUE_DEFINITION: 'InputValueDefinition';
|
||||
INTERFACE_TYPE_DEFINITION: 'InterfaceTypeDefinition';
|
||||
UNION_TYPE_DEFINITION: 'UnionTypeDefinition';
|
||||
ENUM_TYPE_DEFINITION: 'EnumTypeDefinition';
|
||||
ENUM_VALUE_DEFINITION: 'EnumValueDefinition';
|
||||
INPUT_OBJECT_TYPE_DEFINITION: 'InputObjectTypeDefinition';
|
||||
|
||||
// Directive Definitions
|
||||
DIRECTIVE_DEFINITION: 'DirectiveDefinition';
|
||||
|
||||
// Type System Extensions
|
||||
SCHEMA_EXTENSION: 'SchemaExtension';
|
||||
|
||||
// Type Extensions
|
||||
SCALAR_TYPE_EXTENSION: 'ScalarTypeExtension';
|
||||
OBJECT_TYPE_EXTENSION: 'ObjectTypeExtension';
|
||||
INTERFACE_TYPE_EXTENSION: 'InterfaceTypeExtension';
|
||||
UNION_TYPE_EXTENSION: 'UnionTypeExtension';
|
||||
ENUM_TYPE_EXTENSION: 'EnumTypeExtension';
|
||||
INPUT_OBJECT_TYPE_EXTENSION: 'InputObjectTypeExtension';
|
||||
};
|
||||
|
||||
/**
|
||||
* The enum type representing the possible kind values of AST nodes.
|
||||
*/
|
||||
export type KindEnum = _Kind[keyof _Kind];
|
||||
71
node_modules/graphql/language/kinds.js
generated
vendored
Normal file
71
node_modules/graphql/language/kinds.js
generated
vendored
Normal file
@@ -0,0 +1,71 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.Kind = void 0;
|
||||
|
||||
/**
|
||||
* The set of allowed kind values for AST nodes.
|
||||
*/
|
||||
var Kind = Object.freeze({
|
||||
// Name
|
||||
NAME: 'Name',
|
||||
// Document
|
||||
DOCUMENT: 'Document',
|
||||
OPERATION_DEFINITION: 'OperationDefinition',
|
||||
VARIABLE_DEFINITION: 'VariableDefinition',
|
||||
SELECTION_SET: 'SelectionSet',
|
||||
FIELD: 'Field',
|
||||
ARGUMENT: 'Argument',
|
||||
// Fragments
|
||||
FRAGMENT_SPREAD: 'FragmentSpread',
|
||||
INLINE_FRAGMENT: 'InlineFragment',
|
||||
FRAGMENT_DEFINITION: 'FragmentDefinition',
|
||||
// Values
|
||||
VARIABLE: 'Variable',
|
||||
INT: 'IntValue',
|
||||
FLOAT: 'FloatValue',
|
||||
STRING: 'StringValue',
|
||||
BOOLEAN: 'BooleanValue',
|
||||
NULL: 'NullValue',
|
||||
ENUM: 'EnumValue',
|
||||
LIST: 'ListValue',
|
||||
OBJECT: 'ObjectValue',
|
||||
OBJECT_FIELD: 'ObjectField',
|
||||
// Directives
|
||||
DIRECTIVE: 'Directive',
|
||||
// Types
|
||||
NAMED_TYPE: 'NamedType',
|
||||
LIST_TYPE: 'ListType',
|
||||
NON_NULL_TYPE: 'NonNullType',
|
||||
// Type System Definitions
|
||||
SCHEMA_DEFINITION: 'SchemaDefinition',
|
||||
OPERATION_TYPE_DEFINITION: 'OperationTypeDefinition',
|
||||
// Type Definitions
|
||||
SCALAR_TYPE_DEFINITION: 'ScalarTypeDefinition',
|
||||
OBJECT_TYPE_DEFINITION: 'ObjectTypeDefinition',
|
||||
FIELD_DEFINITION: 'FieldDefinition',
|
||||
INPUT_VALUE_DEFINITION: 'InputValueDefinition',
|
||||
INTERFACE_TYPE_DEFINITION: 'InterfaceTypeDefinition',
|
||||
UNION_TYPE_DEFINITION: 'UnionTypeDefinition',
|
||||
ENUM_TYPE_DEFINITION: 'EnumTypeDefinition',
|
||||
ENUM_VALUE_DEFINITION: 'EnumValueDefinition',
|
||||
INPUT_OBJECT_TYPE_DEFINITION: 'InputObjectTypeDefinition',
|
||||
// Directive Definitions
|
||||
DIRECTIVE_DEFINITION: 'DirectiveDefinition',
|
||||
// Type System Extensions
|
||||
SCHEMA_EXTENSION: 'SchemaExtension',
|
||||
// Type Extensions
|
||||
SCALAR_TYPE_EXTENSION: 'ScalarTypeExtension',
|
||||
OBJECT_TYPE_EXTENSION: 'ObjectTypeExtension',
|
||||
INTERFACE_TYPE_EXTENSION: 'InterfaceTypeExtension',
|
||||
UNION_TYPE_EXTENSION: 'UnionTypeExtension',
|
||||
ENUM_TYPE_EXTENSION: 'EnumTypeExtension',
|
||||
INPUT_OBJECT_TYPE_EXTENSION: 'InputObjectTypeExtension'
|
||||
});
|
||||
/**
|
||||
* The enum type representing the possible kind values of AST nodes.
|
||||
*/
|
||||
|
||||
exports.Kind = Kind;
|
||||
76
node_modules/graphql/language/kinds.js.flow
generated
vendored
Normal file
76
node_modules/graphql/language/kinds.js.flow
generated
vendored
Normal file
@@ -0,0 +1,76 @@
|
||||
// @flow strict
|
||||
|
||||
/**
|
||||
* The set of allowed kind values for AST nodes.
|
||||
*/
|
||||
export const Kind = Object.freeze({
|
||||
// Name
|
||||
NAME: 'Name',
|
||||
|
||||
// Document
|
||||
DOCUMENT: 'Document',
|
||||
OPERATION_DEFINITION: 'OperationDefinition',
|
||||
VARIABLE_DEFINITION: 'VariableDefinition',
|
||||
SELECTION_SET: 'SelectionSet',
|
||||
FIELD: 'Field',
|
||||
ARGUMENT: 'Argument',
|
||||
|
||||
// Fragments
|
||||
FRAGMENT_SPREAD: 'FragmentSpread',
|
||||
INLINE_FRAGMENT: 'InlineFragment',
|
||||
FRAGMENT_DEFINITION: 'FragmentDefinition',
|
||||
|
||||
// Values
|
||||
VARIABLE: 'Variable',
|
||||
INT: 'IntValue',
|
||||
FLOAT: 'FloatValue',
|
||||
STRING: 'StringValue',
|
||||
BOOLEAN: 'BooleanValue',
|
||||
NULL: 'NullValue',
|
||||
ENUM: 'EnumValue',
|
||||
LIST: 'ListValue',
|
||||
OBJECT: 'ObjectValue',
|
||||
OBJECT_FIELD: 'ObjectField',
|
||||
|
||||
// Directives
|
||||
DIRECTIVE: 'Directive',
|
||||
|
||||
// Types
|
||||
NAMED_TYPE: 'NamedType',
|
||||
LIST_TYPE: 'ListType',
|
||||
NON_NULL_TYPE: 'NonNullType',
|
||||
|
||||
// Type System Definitions
|
||||
SCHEMA_DEFINITION: 'SchemaDefinition',
|
||||
OPERATION_TYPE_DEFINITION: 'OperationTypeDefinition',
|
||||
|
||||
// Type Definitions
|
||||
SCALAR_TYPE_DEFINITION: 'ScalarTypeDefinition',
|
||||
OBJECT_TYPE_DEFINITION: 'ObjectTypeDefinition',
|
||||
FIELD_DEFINITION: 'FieldDefinition',
|
||||
INPUT_VALUE_DEFINITION: 'InputValueDefinition',
|
||||
INTERFACE_TYPE_DEFINITION: 'InterfaceTypeDefinition',
|
||||
UNION_TYPE_DEFINITION: 'UnionTypeDefinition',
|
||||
ENUM_TYPE_DEFINITION: 'EnumTypeDefinition',
|
||||
ENUM_VALUE_DEFINITION: 'EnumValueDefinition',
|
||||
INPUT_OBJECT_TYPE_DEFINITION: 'InputObjectTypeDefinition',
|
||||
|
||||
// Directive Definitions
|
||||
DIRECTIVE_DEFINITION: 'DirectiveDefinition',
|
||||
|
||||
// Type System Extensions
|
||||
SCHEMA_EXTENSION: 'SchemaExtension',
|
||||
|
||||
// Type Extensions
|
||||
SCALAR_TYPE_EXTENSION: 'ScalarTypeExtension',
|
||||
OBJECT_TYPE_EXTENSION: 'ObjectTypeExtension',
|
||||
INTERFACE_TYPE_EXTENSION: 'InterfaceTypeExtension',
|
||||
UNION_TYPE_EXTENSION: 'UnionTypeExtension',
|
||||
ENUM_TYPE_EXTENSION: 'EnumTypeExtension',
|
||||
INPUT_OBJECT_TYPE_EXTENSION: 'InputObjectTypeExtension',
|
||||
});
|
||||
|
||||
/**
|
||||
* The enum type representing the possible kind values of AST nodes.
|
||||
*/
|
||||
export type KindEnum = $Values<typeof Kind>;
|
||||
62
node_modules/graphql/language/kinds.mjs
generated
vendored
Normal file
62
node_modules/graphql/language/kinds.mjs
generated
vendored
Normal file
@@ -0,0 +1,62 @@
|
||||
/**
|
||||
* The set of allowed kind values for AST nodes.
|
||||
*/
|
||||
export var Kind = Object.freeze({
|
||||
// Name
|
||||
NAME: 'Name',
|
||||
// Document
|
||||
DOCUMENT: 'Document',
|
||||
OPERATION_DEFINITION: 'OperationDefinition',
|
||||
VARIABLE_DEFINITION: 'VariableDefinition',
|
||||
SELECTION_SET: 'SelectionSet',
|
||||
FIELD: 'Field',
|
||||
ARGUMENT: 'Argument',
|
||||
// Fragments
|
||||
FRAGMENT_SPREAD: 'FragmentSpread',
|
||||
INLINE_FRAGMENT: 'InlineFragment',
|
||||
FRAGMENT_DEFINITION: 'FragmentDefinition',
|
||||
// Values
|
||||
VARIABLE: 'Variable',
|
||||
INT: 'IntValue',
|
||||
FLOAT: 'FloatValue',
|
||||
STRING: 'StringValue',
|
||||
BOOLEAN: 'BooleanValue',
|
||||
NULL: 'NullValue',
|
||||
ENUM: 'EnumValue',
|
||||
LIST: 'ListValue',
|
||||
OBJECT: 'ObjectValue',
|
||||
OBJECT_FIELD: 'ObjectField',
|
||||
// Directives
|
||||
DIRECTIVE: 'Directive',
|
||||
// Types
|
||||
NAMED_TYPE: 'NamedType',
|
||||
LIST_TYPE: 'ListType',
|
||||
NON_NULL_TYPE: 'NonNullType',
|
||||
// Type System Definitions
|
||||
SCHEMA_DEFINITION: 'SchemaDefinition',
|
||||
OPERATION_TYPE_DEFINITION: 'OperationTypeDefinition',
|
||||
// Type Definitions
|
||||
SCALAR_TYPE_DEFINITION: 'ScalarTypeDefinition',
|
||||
OBJECT_TYPE_DEFINITION: 'ObjectTypeDefinition',
|
||||
FIELD_DEFINITION: 'FieldDefinition',
|
||||
INPUT_VALUE_DEFINITION: 'InputValueDefinition',
|
||||
INTERFACE_TYPE_DEFINITION: 'InterfaceTypeDefinition',
|
||||
UNION_TYPE_DEFINITION: 'UnionTypeDefinition',
|
||||
ENUM_TYPE_DEFINITION: 'EnumTypeDefinition',
|
||||
ENUM_VALUE_DEFINITION: 'EnumValueDefinition',
|
||||
INPUT_OBJECT_TYPE_DEFINITION: 'InputObjectTypeDefinition',
|
||||
// Directive Definitions
|
||||
DIRECTIVE_DEFINITION: 'DirectiveDefinition',
|
||||
// Type System Extensions
|
||||
SCHEMA_EXTENSION: 'SchemaExtension',
|
||||
// Type Extensions
|
||||
SCALAR_TYPE_EXTENSION: 'ScalarTypeExtension',
|
||||
OBJECT_TYPE_EXTENSION: 'ObjectTypeExtension',
|
||||
INTERFACE_TYPE_EXTENSION: 'InterfaceTypeExtension',
|
||||
UNION_TYPE_EXTENSION: 'UnionTypeExtension',
|
||||
ENUM_TYPE_EXTENSION: 'EnumTypeExtension',
|
||||
INPUT_OBJECT_TYPE_EXTENSION: 'InputObjectTypeExtension'
|
||||
});
|
||||
/**
|
||||
* The enum type representing the possible kind values of AST nodes.
|
||||
*/
|
||||
60
node_modules/graphql/language/lexer.d.ts
generated
vendored
Normal file
60
node_modules/graphql/language/lexer.d.ts
generated
vendored
Normal file
@@ -0,0 +1,60 @@
|
||||
import { syntaxError } from '../error';
|
||||
import { Token } from './ast';
|
||||
import { Source } from './source';
|
||||
|
||||
/**
|
||||
* Given a Source object, this returns a Lexer for that source.
|
||||
* A Lexer is a stateful stream generator in that every time
|
||||
* it is advanced, it returns the next token in the Source. Assuming the
|
||||
* source lexes, the final Token emitted by the lexer will be of kind
|
||||
* EOF, after which the lexer will repeatedly return the same EOF token
|
||||
* whenever called.
|
||||
*/
|
||||
export function createLexer<TOptions>(
|
||||
source: Source,
|
||||
options: TOptions,
|
||||
): Lexer<TOptions>;
|
||||
|
||||
/**
|
||||
* The return type of createLexer.
|
||||
*/
|
||||
export interface Lexer<TOptions> {
|
||||
source: Source;
|
||||
options: TOptions;
|
||||
|
||||
/**
|
||||
* The previously focused non-ignored token.
|
||||
*/
|
||||
lastToken: Token;
|
||||
|
||||
/**
|
||||
* The currently focused non-ignored token.
|
||||
*/
|
||||
token: Token;
|
||||
|
||||
/**
|
||||
* The (1-indexed) line containing the current token.
|
||||
*/
|
||||
line: number;
|
||||
|
||||
/**
|
||||
* The character offset at which the current line begins.
|
||||
*/
|
||||
lineStart: number;
|
||||
|
||||
/**
|
||||
* Advances the token stream to the next non-ignored token.
|
||||
*/
|
||||
advance(): Token;
|
||||
|
||||
/**
|
||||
* Looks ahead and returns the next non-ignored token, but does not change
|
||||
* the Lexer's state.
|
||||
*/
|
||||
lookahead(): Token;
|
||||
}
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
export function isPunctuatorToken(token: Token): boolean;
|
||||
627
node_modules/graphql/language/lexer.js
generated
vendored
Normal file
627
node_modules/graphql/language/lexer.js
generated
vendored
Normal file
@@ -0,0 +1,627 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.createLexer = createLexer;
|
||||
exports.isPunctuatorToken = isPunctuatorToken;
|
||||
|
||||
var _defineToJSON = _interopRequireDefault(require("../jsutils/defineToJSON"));
|
||||
|
||||
var _syntaxError = require("../error/syntaxError");
|
||||
|
||||
var _blockString = require("./blockString");
|
||||
|
||||
var _tokenKind = require("./tokenKind");
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
/**
|
||||
* Given a Source object, this returns a Lexer for that source.
|
||||
* A Lexer is a stateful stream generator in that every time
|
||||
* it is advanced, it returns the next token in the Source. Assuming the
|
||||
* source lexes, the final Token emitted by the lexer will be of kind
|
||||
* EOF, after which the lexer will repeatedly return the same EOF token
|
||||
* whenever called.
|
||||
*/
|
||||
function createLexer(source, options) {
|
||||
var startOfFileToken = new Tok(_tokenKind.TokenKind.SOF, 0, 0, 0, 0, null);
|
||||
var lexer = {
|
||||
source: source,
|
||||
options: options,
|
||||
lastToken: startOfFileToken,
|
||||
token: startOfFileToken,
|
||||
line: 1,
|
||||
lineStart: 0,
|
||||
advance: advanceLexer,
|
||||
lookahead: lookahead
|
||||
};
|
||||
return lexer;
|
||||
}
|
||||
|
||||
function advanceLexer() {
|
||||
this.lastToken = this.token;
|
||||
var token = this.token = this.lookahead();
|
||||
return token;
|
||||
}
|
||||
|
||||
function lookahead() {
|
||||
var token = this.token;
|
||||
|
||||
if (token.kind !== _tokenKind.TokenKind.EOF) {
|
||||
do {
|
||||
// Note: next is only mutable during parsing, so we cast to allow this.
|
||||
token = token.next || (token.next = readToken(this, token));
|
||||
} while (token.kind === _tokenKind.TokenKind.COMMENT);
|
||||
}
|
||||
|
||||
return token;
|
||||
}
|
||||
/**
|
||||
* The return type of createLexer.
|
||||
*/
|
||||
|
||||
|
||||
// @internal
|
||||
function isPunctuatorToken(token) {
|
||||
var kind = token.kind;
|
||||
return kind === _tokenKind.TokenKind.BANG || kind === _tokenKind.TokenKind.DOLLAR || kind === _tokenKind.TokenKind.AMP || kind === _tokenKind.TokenKind.PAREN_L || kind === _tokenKind.TokenKind.PAREN_R || kind === _tokenKind.TokenKind.SPREAD || kind === _tokenKind.TokenKind.COLON || kind === _tokenKind.TokenKind.EQUALS || kind === _tokenKind.TokenKind.AT || kind === _tokenKind.TokenKind.BRACKET_L || kind === _tokenKind.TokenKind.BRACKET_R || kind === _tokenKind.TokenKind.BRACE_L || kind === _tokenKind.TokenKind.PIPE || kind === _tokenKind.TokenKind.BRACE_R;
|
||||
}
|
||||
/**
|
||||
* Helper function for constructing the Token object.
|
||||
*/
|
||||
|
||||
|
||||
function Tok(kind, start, end, line, column, prev, value) {
|
||||
this.kind = kind;
|
||||
this.start = start;
|
||||
this.end = end;
|
||||
this.line = line;
|
||||
this.column = column;
|
||||
this.value = value;
|
||||
this.prev = prev;
|
||||
this.next = null;
|
||||
} // Print a simplified form when appearing in JSON/util.inspect.
|
||||
|
||||
|
||||
(0, _defineToJSON.default)(Tok, function () {
|
||||
return {
|
||||
kind: this.kind,
|
||||
value: this.value,
|
||||
line: this.line,
|
||||
column: this.column
|
||||
};
|
||||
});
|
||||
|
||||
function printCharCode(code) {
|
||||
return (// NaN/undefined represents access beyond the end of the file.
|
||||
isNaN(code) ? _tokenKind.TokenKind.EOF : // Trust JSON for ASCII.
|
||||
code < 0x007f ? JSON.stringify(String.fromCharCode(code)) : // Otherwise print the escaped form.
|
||||
"\"\\u".concat(('00' + code.toString(16).toUpperCase()).slice(-4), "\"")
|
||||
);
|
||||
}
|
||||
/**
|
||||
* Gets the next token from the source starting at the given position.
|
||||
*
|
||||
* This skips over whitespace until it finds the next lexable token, then lexes
|
||||
* punctuators immediately or calls the appropriate helper function for more
|
||||
* complicated tokens.
|
||||
*/
|
||||
|
||||
|
||||
function readToken(lexer, prev) {
|
||||
var source = lexer.source;
|
||||
var body = source.body;
|
||||
var bodyLength = body.length;
|
||||
var pos = positionAfterWhitespace(body, prev.end, lexer);
|
||||
var line = lexer.line;
|
||||
var col = 1 + pos - lexer.lineStart;
|
||||
|
||||
if (pos >= bodyLength) {
|
||||
return new Tok(_tokenKind.TokenKind.EOF, bodyLength, bodyLength, line, col, prev);
|
||||
}
|
||||
|
||||
var code = body.charCodeAt(pos); // SourceCharacter
|
||||
|
||||
switch (code) {
|
||||
// !
|
||||
case 33:
|
||||
return new Tok(_tokenKind.TokenKind.BANG, pos, pos + 1, line, col, prev);
|
||||
// #
|
||||
|
||||
case 35:
|
||||
return readComment(source, pos, line, col, prev);
|
||||
// $
|
||||
|
||||
case 36:
|
||||
return new Tok(_tokenKind.TokenKind.DOLLAR, pos, pos + 1, line, col, prev);
|
||||
// &
|
||||
|
||||
case 38:
|
||||
return new Tok(_tokenKind.TokenKind.AMP, pos, pos + 1, line, col, prev);
|
||||
// (
|
||||
|
||||
case 40:
|
||||
return new Tok(_tokenKind.TokenKind.PAREN_L, pos, pos + 1, line, col, prev);
|
||||
// )
|
||||
|
||||
case 41:
|
||||
return new Tok(_tokenKind.TokenKind.PAREN_R, pos, pos + 1, line, col, prev);
|
||||
// .
|
||||
|
||||
case 46:
|
||||
if (body.charCodeAt(pos + 1) === 46 && body.charCodeAt(pos + 2) === 46) {
|
||||
return new Tok(_tokenKind.TokenKind.SPREAD, pos, pos + 3, line, col, prev);
|
||||
}
|
||||
|
||||
break;
|
||||
// :
|
||||
|
||||
case 58:
|
||||
return new Tok(_tokenKind.TokenKind.COLON, pos, pos + 1, line, col, prev);
|
||||
// =
|
||||
|
||||
case 61:
|
||||
return new Tok(_tokenKind.TokenKind.EQUALS, pos, pos + 1, line, col, prev);
|
||||
// @
|
||||
|
||||
case 64:
|
||||
return new Tok(_tokenKind.TokenKind.AT, pos, pos + 1, line, col, prev);
|
||||
// [
|
||||
|
||||
case 91:
|
||||
return new Tok(_tokenKind.TokenKind.BRACKET_L, pos, pos + 1, line, col, prev);
|
||||
// ]
|
||||
|
||||
case 93:
|
||||
return new Tok(_tokenKind.TokenKind.BRACKET_R, pos, pos + 1, line, col, prev);
|
||||
// {
|
||||
|
||||
case 123:
|
||||
return new Tok(_tokenKind.TokenKind.BRACE_L, pos, pos + 1, line, col, prev);
|
||||
// |
|
||||
|
||||
case 124:
|
||||
return new Tok(_tokenKind.TokenKind.PIPE, pos, pos + 1, line, col, prev);
|
||||
// }
|
||||
|
||||
case 125:
|
||||
return new Tok(_tokenKind.TokenKind.BRACE_R, pos, pos + 1, line, col, prev);
|
||||
// A-Z _ a-z
|
||||
|
||||
case 65:
|
||||
case 66:
|
||||
case 67:
|
||||
case 68:
|
||||
case 69:
|
||||
case 70:
|
||||
case 71:
|
||||
case 72:
|
||||
case 73:
|
||||
case 74:
|
||||
case 75:
|
||||
case 76:
|
||||
case 77:
|
||||
case 78:
|
||||
case 79:
|
||||
case 80:
|
||||
case 81:
|
||||
case 82:
|
||||
case 83:
|
||||
case 84:
|
||||
case 85:
|
||||
case 86:
|
||||
case 87:
|
||||
case 88:
|
||||
case 89:
|
||||
case 90:
|
||||
case 95:
|
||||
case 97:
|
||||
case 98:
|
||||
case 99:
|
||||
case 100:
|
||||
case 101:
|
||||
case 102:
|
||||
case 103:
|
||||
case 104:
|
||||
case 105:
|
||||
case 106:
|
||||
case 107:
|
||||
case 108:
|
||||
case 109:
|
||||
case 110:
|
||||
case 111:
|
||||
case 112:
|
||||
case 113:
|
||||
case 114:
|
||||
case 115:
|
||||
case 116:
|
||||
case 117:
|
||||
case 118:
|
||||
case 119:
|
||||
case 120:
|
||||
case 121:
|
||||
case 122:
|
||||
return readName(source, pos, line, col, prev);
|
||||
// - 0-9
|
||||
|
||||
case 45:
|
||||
case 48:
|
||||
case 49:
|
||||
case 50:
|
||||
case 51:
|
||||
case 52:
|
||||
case 53:
|
||||
case 54:
|
||||
case 55:
|
||||
case 56:
|
||||
case 57:
|
||||
return readNumber(source, pos, code, line, col, prev);
|
||||
// "
|
||||
|
||||
case 34:
|
||||
if (body.charCodeAt(pos + 1) === 34 && body.charCodeAt(pos + 2) === 34) {
|
||||
return readBlockString(source, pos, line, col, prev, lexer);
|
||||
}
|
||||
|
||||
return readString(source, pos, line, col, prev);
|
||||
}
|
||||
|
||||
throw (0, _syntaxError.syntaxError)(source, pos, unexpectedCharacterMessage(code));
|
||||
}
|
||||
/**
|
||||
* Report a message that an unexpected character was encountered.
|
||||
*/
|
||||
|
||||
|
||||
function unexpectedCharacterMessage(code) {
|
||||
if (code < 0x0020 && code !== 0x0009 && code !== 0x000a && code !== 0x000d) {
|
||||
return "Cannot contain the invalid character ".concat(printCharCode(code), ".");
|
||||
}
|
||||
|
||||
if (code === 39) {
|
||||
// '
|
||||
return 'Unexpected single quote character (\'), did you mean to use a double quote (")?';
|
||||
}
|
||||
|
||||
return "Cannot parse the unexpected character ".concat(printCharCode(code), ".");
|
||||
}
|
||||
/**
|
||||
* Reads from body starting at startPosition until it finds a non-whitespace
|
||||
* character, then returns the position of that character for lexing.
|
||||
*/
|
||||
|
||||
|
||||
function positionAfterWhitespace(body, startPosition, lexer) {
|
||||
var bodyLength = body.length;
|
||||
var position = startPosition;
|
||||
|
||||
while (position < bodyLength) {
|
||||
var code = body.charCodeAt(position); // tab | space | comma | BOM
|
||||
|
||||
if (code === 9 || code === 32 || code === 44 || code === 0xfeff) {
|
||||
++position;
|
||||
} else if (code === 10) {
|
||||
// new line
|
||||
++position;
|
||||
++lexer.line;
|
||||
lexer.lineStart = position;
|
||||
} else if (code === 13) {
|
||||
// carriage return
|
||||
if (body.charCodeAt(position + 1) === 10) {
|
||||
position += 2;
|
||||
} else {
|
||||
++position;
|
||||
}
|
||||
|
||||
++lexer.line;
|
||||
lexer.lineStart = position;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return position;
|
||||
}
|
||||
/**
|
||||
* Reads a comment token from the source file.
|
||||
*
|
||||
* #[\u0009\u0020-\uFFFF]*
|
||||
*/
|
||||
|
||||
|
||||
function readComment(source, start, line, col, prev) {
|
||||
var body = source.body;
|
||||
var code;
|
||||
var position = start;
|
||||
|
||||
do {
|
||||
code = body.charCodeAt(++position);
|
||||
} while (!isNaN(code) && ( // SourceCharacter but not LineTerminator
|
||||
code > 0x001f || code === 0x0009));
|
||||
|
||||
return new Tok(_tokenKind.TokenKind.COMMENT, start, position, line, col, prev, body.slice(start + 1, position));
|
||||
}
|
||||
/**
|
||||
* Reads a number token from the source file, either a float
|
||||
* or an int depending on whether a decimal point appears.
|
||||
*
|
||||
* Int: -?(0|[1-9][0-9]*)
|
||||
* Float: -?(0|[1-9][0-9]*)(\.[0-9]+)?((E|e)(+|-)?[0-9]+)?
|
||||
*/
|
||||
|
||||
|
||||
function readNumber(source, start, firstCode, line, col, prev) {
|
||||
var body = source.body;
|
||||
var code = firstCode;
|
||||
var position = start;
|
||||
var isFloat = false;
|
||||
|
||||
if (code === 45) {
|
||||
// -
|
||||
code = body.charCodeAt(++position);
|
||||
}
|
||||
|
||||
if (code === 48) {
|
||||
// 0
|
||||
code = body.charCodeAt(++position);
|
||||
|
||||
if (code >= 48 && code <= 57) {
|
||||
throw (0, _syntaxError.syntaxError)(source, position, "Invalid number, unexpected digit after 0: ".concat(printCharCode(code), "."));
|
||||
}
|
||||
} else {
|
||||
position = readDigits(source, position, code);
|
||||
code = body.charCodeAt(position);
|
||||
}
|
||||
|
||||
if (code === 46) {
|
||||
// .
|
||||
isFloat = true;
|
||||
code = body.charCodeAt(++position);
|
||||
position = readDigits(source, position, code);
|
||||
code = body.charCodeAt(position);
|
||||
}
|
||||
|
||||
if (code === 69 || code === 101) {
|
||||
// E e
|
||||
isFloat = true;
|
||||
code = body.charCodeAt(++position);
|
||||
|
||||
if (code === 43 || code === 45) {
|
||||
// + -
|
||||
code = body.charCodeAt(++position);
|
||||
}
|
||||
|
||||
position = readDigits(source, position, code);
|
||||
code = body.charCodeAt(position);
|
||||
} // Numbers cannot be followed by . or e
|
||||
|
||||
|
||||
if (code === 46 || code === 69 || code === 101) {
|
||||
throw (0, _syntaxError.syntaxError)(source, position, "Invalid number, expected digit but got: ".concat(printCharCode(code), "."));
|
||||
}
|
||||
|
||||
return new Tok(isFloat ? _tokenKind.TokenKind.FLOAT : _tokenKind.TokenKind.INT, start, position, line, col, prev, body.slice(start, position));
|
||||
}
|
||||
/**
|
||||
* Returns the new position in the source after reading digits.
|
||||
*/
|
||||
|
||||
|
||||
function readDigits(source, start, firstCode) {
|
||||
var body = source.body;
|
||||
var position = start;
|
||||
var code = firstCode;
|
||||
|
||||
if (code >= 48 && code <= 57) {
|
||||
// 0 - 9
|
||||
do {
|
||||
code = body.charCodeAt(++position);
|
||||
} while (code >= 48 && code <= 57); // 0 - 9
|
||||
|
||||
|
||||
return position;
|
||||
}
|
||||
|
||||
throw (0, _syntaxError.syntaxError)(source, position, "Invalid number, expected digit but got: ".concat(printCharCode(code), "."));
|
||||
}
|
||||
/**
|
||||
* Reads a string token from the source file.
|
||||
*
|
||||
* "([^"\\\u000A\u000D]|(\\(u[0-9a-fA-F]{4}|["\\/bfnrt])))*"
|
||||
*/
|
||||
|
||||
|
||||
function readString(source, start, line, col, prev) {
|
||||
var body = source.body;
|
||||
var position = start + 1;
|
||||
var chunkStart = position;
|
||||
var code = 0;
|
||||
var value = '';
|
||||
|
||||
while (position < body.length && !isNaN(code = body.charCodeAt(position)) && // not LineTerminator
|
||||
code !== 0x000a && code !== 0x000d) {
|
||||
// Closing Quote (")
|
||||
if (code === 34) {
|
||||
value += body.slice(chunkStart, position);
|
||||
return new Tok(_tokenKind.TokenKind.STRING, start, position + 1, line, col, prev, value);
|
||||
} // SourceCharacter
|
||||
|
||||
|
||||
if (code < 0x0020 && code !== 0x0009) {
|
||||
throw (0, _syntaxError.syntaxError)(source, position, "Invalid character within String: ".concat(printCharCode(code), "."));
|
||||
}
|
||||
|
||||
++position;
|
||||
|
||||
if (code === 92) {
|
||||
// \
|
||||
value += body.slice(chunkStart, position - 1);
|
||||
code = body.charCodeAt(position);
|
||||
|
||||
switch (code) {
|
||||
case 34:
|
||||
value += '"';
|
||||
break;
|
||||
|
||||
case 47:
|
||||
value += '/';
|
||||
break;
|
||||
|
||||
case 92:
|
||||
value += '\\';
|
||||
break;
|
||||
|
||||
case 98:
|
||||
value += '\b';
|
||||
break;
|
||||
|
||||
case 102:
|
||||
value += '\f';
|
||||
break;
|
||||
|
||||
case 110:
|
||||
value += '\n';
|
||||
break;
|
||||
|
||||
case 114:
|
||||
value += '\r';
|
||||
break;
|
||||
|
||||
case 116:
|
||||
value += '\t';
|
||||
break;
|
||||
|
||||
case 117:
|
||||
{
|
||||
// uXXXX
|
||||
var charCode = uniCharCode(body.charCodeAt(position + 1), body.charCodeAt(position + 2), body.charCodeAt(position + 3), body.charCodeAt(position + 4));
|
||||
|
||||
if (charCode < 0) {
|
||||
var invalidSequence = body.slice(position + 1, position + 5);
|
||||
throw (0, _syntaxError.syntaxError)(source, position, "Invalid character escape sequence: \\u".concat(invalidSequence, "."));
|
||||
}
|
||||
|
||||
value += String.fromCharCode(charCode);
|
||||
position += 4;
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
throw (0, _syntaxError.syntaxError)(source, position, "Invalid character escape sequence: \\".concat(String.fromCharCode(code), "."));
|
||||
}
|
||||
|
||||
++position;
|
||||
chunkStart = position;
|
||||
}
|
||||
}
|
||||
|
||||
throw (0, _syntaxError.syntaxError)(source, position, 'Unterminated string.');
|
||||
}
|
||||
/**
|
||||
* Reads a block string token from the source file.
|
||||
*
|
||||
* """("?"?(\\"""|\\(?!=""")|[^"\\]))*"""
|
||||
*/
|
||||
|
||||
|
||||
function readBlockString(source, start, line, col, prev, lexer) {
|
||||
var body = source.body;
|
||||
var position = start + 3;
|
||||
var chunkStart = position;
|
||||
var code = 0;
|
||||
var rawValue = '';
|
||||
|
||||
while (position < body.length && !isNaN(code = body.charCodeAt(position))) {
|
||||
// Closing Triple-Quote (""")
|
||||
if (code === 34 && body.charCodeAt(position + 1) === 34 && body.charCodeAt(position + 2) === 34) {
|
||||
rawValue += body.slice(chunkStart, position);
|
||||
return new Tok(_tokenKind.TokenKind.BLOCK_STRING, start, position + 3, line, col, prev, (0, _blockString.dedentBlockStringValue)(rawValue));
|
||||
} // SourceCharacter
|
||||
|
||||
|
||||
if (code < 0x0020 && code !== 0x0009 && code !== 0x000a && code !== 0x000d) {
|
||||
throw (0, _syntaxError.syntaxError)(source, position, "Invalid character within String: ".concat(printCharCode(code), "."));
|
||||
}
|
||||
|
||||
if (code === 10) {
|
||||
// new line
|
||||
++position;
|
||||
++lexer.line;
|
||||
lexer.lineStart = position;
|
||||
} else if (code === 13) {
|
||||
// carriage return
|
||||
if (body.charCodeAt(position + 1) === 10) {
|
||||
position += 2;
|
||||
} else {
|
||||
++position;
|
||||
}
|
||||
|
||||
++lexer.line;
|
||||
lexer.lineStart = position;
|
||||
} else if ( // Escape Triple-Quote (\""")
|
||||
code === 92 && body.charCodeAt(position + 1) === 34 && body.charCodeAt(position + 2) === 34 && body.charCodeAt(position + 3) === 34) {
|
||||
rawValue += body.slice(chunkStart, position) + '"""';
|
||||
position += 4;
|
||||
chunkStart = position;
|
||||
} else {
|
||||
++position;
|
||||
}
|
||||
}
|
||||
|
||||
throw (0, _syntaxError.syntaxError)(source, position, 'Unterminated string.');
|
||||
}
|
||||
/**
|
||||
* Converts four hexadecimal chars to the integer that the
|
||||
* string represents. For example, uniCharCode('0','0','0','f')
|
||||
* will return 15, and uniCharCode('0','0','f','f') returns 255.
|
||||
*
|
||||
* Returns a negative number on error, if a char was invalid.
|
||||
*
|
||||
* This is implemented by noting that char2hex() returns -1 on error,
|
||||
* which means the result of ORing the char2hex() will also be negative.
|
||||
*/
|
||||
|
||||
|
||||
function uniCharCode(a, b, c, d) {
|
||||
return char2hex(a) << 12 | char2hex(b) << 8 | char2hex(c) << 4 | char2hex(d);
|
||||
}
|
||||
/**
|
||||
* Converts a hex character to its integer value.
|
||||
* '0' becomes 0, '9' becomes 9
|
||||
* 'A' becomes 10, 'F' becomes 15
|
||||
* 'a' becomes 10, 'f' becomes 15
|
||||
*
|
||||
* Returns -1 on error.
|
||||
*/
|
||||
|
||||
|
||||
function char2hex(a) {
|
||||
return a >= 48 && a <= 57 ? a - 48 // 0-9
|
||||
: a >= 65 && a <= 70 ? a - 55 // A-F
|
||||
: a >= 97 && a <= 102 ? a - 87 // a-f
|
||||
: -1;
|
||||
}
|
||||
/**
|
||||
* Reads an alphanumeric + underscore name from the source.
|
||||
*
|
||||
* [_A-Za-z][_0-9A-Za-z]*
|
||||
*/
|
||||
|
||||
|
||||
function readName(source, start, line, col, prev) {
|
||||
var body = source.body;
|
||||
var bodyLength = body.length;
|
||||
var position = start + 1;
|
||||
var code = 0;
|
||||
|
||||
while (position !== bodyLength && !isNaN(code = body.charCodeAt(position)) && (code === 95 || // _
|
||||
code >= 48 && code <= 57 || // 0-9
|
||||
code >= 65 && code <= 90 || // A-Z
|
||||
code >= 97 && code <= 122) // a-z
|
||||
) {
|
||||
++position;
|
||||
}
|
||||
|
||||
return new Tok(_tokenKind.TokenKind.NAME, start, position, line, col, prev, body.slice(start, position));
|
||||
}
|
||||
740
node_modules/graphql/language/lexer.js.flow
generated
vendored
Normal file
740
node_modules/graphql/language/lexer.js.flow
generated
vendored
Normal file
@@ -0,0 +1,740 @@
|
||||
// @flow strict
|
||||
|
||||
import defineToJSON from '../jsutils/defineToJSON';
|
||||
|
||||
import { syntaxError } from '../error/syntaxError';
|
||||
|
||||
import { type Token } from './ast';
|
||||
import { type Source } from './source';
|
||||
import { dedentBlockStringValue } from './blockString';
|
||||
import { type TokenKindEnum, TokenKind } from './tokenKind';
|
||||
|
||||
/**
|
||||
* Given a Source object, this returns a Lexer for that source.
|
||||
* A Lexer is a stateful stream generator in that every time
|
||||
* it is advanced, it returns the next token in the Source. Assuming the
|
||||
* source lexes, the final Token emitted by the lexer will be of kind
|
||||
* EOF, after which the lexer will repeatedly return the same EOF token
|
||||
* whenever called.
|
||||
*/
|
||||
export function createLexer<TOptions>(
|
||||
source: Source,
|
||||
options: TOptions,
|
||||
): Lexer<TOptions> {
|
||||
const startOfFileToken = new Tok(TokenKind.SOF, 0, 0, 0, 0, null);
|
||||
const lexer: Lexer<TOptions> = {
|
||||
source,
|
||||
options,
|
||||
lastToken: startOfFileToken,
|
||||
token: startOfFileToken,
|
||||
line: 1,
|
||||
lineStart: 0,
|
||||
advance: advanceLexer,
|
||||
lookahead,
|
||||
};
|
||||
return lexer;
|
||||
}
|
||||
|
||||
function advanceLexer() {
|
||||
this.lastToken = this.token;
|
||||
const token = (this.token = this.lookahead());
|
||||
return token;
|
||||
}
|
||||
|
||||
function lookahead() {
|
||||
let token = this.token;
|
||||
if (token.kind !== TokenKind.EOF) {
|
||||
do {
|
||||
// Note: next is only mutable during parsing, so we cast to allow this.
|
||||
token = token.next || ((token: any).next = readToken(this, token));
|
||||
} while (token.kind === TokenKind.COMMENT);
|
||||
}
|
||||
return token;
|
||||
}
|
||||
|
||||
/**
|
||||
* The return type of createLexer.
|
||||
*/
|
||||
export type Lexer<TOptions> = {
|
||||
source: Source,
|
||||
options: TOptions,
|
||||
|
||||
/**
|
||||
* The previously focused non-ignored token.
|
||||
*/
|
||||
lastToken: Token,
|
||||
|
||||
/**
|
||||
* The currently focused non-ignored token.
|
||||
*/
|
||||
token: Token,
|
||||
|
||||
/**
|
||||
* The (1-indexed) line containing the current token.
|
||||
*/
|
||||
line: number,
|
||||
|
||||
/**
|
||||
* The character offset at which the current line begins.
|
||||
*/
|
||||
lineStart: number,
|
||||
|
||||
/**
|
||||
* Advances the token stream to the next non-ignored token.
|
||||
*/
|
||||
advance(): Token,
|
||||
|
||||
/**
|
||||
* Looks ahead and returns the next non-ignored token, but does not change
|
||||
* the Lexer's state.
|
||||
*/
|
||||
lookahead(): Token,
|
||||
|
||||
...
|
||||
};
|
||||
|
||||
// @internal
|
||||
export function isPunctuatorToken(token: Token) {
|
||||
const kind = token.kind;
|
||||
return (
|
||||
kind === TokenKind.BANG ||
|
||||
kind === TokenKind.DOLLAR ||
|
||||
kind === TokenKind.AMP ||
|
||||
kind === TokenKind.PAREN_L ||
|
||||
kind === TokenKind.PAREN_R ||
|
||||
kind === TokenKind.SPREAD ||
|
||||
kind === TokenKind.COLON ||
|
||||
kind === TokenKind.EQUALS ||
|
||||
kind === TokenKind.AT ||
|
||||
kind === TokenKind.BRACKET_L ||
|
||||
kind === TokenKind.BRACKET_R ||
|
||||
kind === TokenKind.BRACE_L ||
|
||||
kind === TokenKind.PIPE ||
|
||||
kind === TokenKind.BRACE_R
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function for constructing the Token object.
|
||||
*/
|
||||
function Tok(
|
||||
kind: TokenKindEnum,
|
||||
start: number,
|
||||
end: number,
|
||||
line: number,
|
||||
column: number,
|
||||
prev: Token | null,
|
||||
value?: string,
|
||||
) {
|
||||
this.kind = kind;
|
||||
this.start = start;
|
||||
this.end = end;
|
||||
this.line = line;
|
||||
this.column = column;
|
||||
this.value = value;
|
||||
this.prev = prev;
|
||||
this.next = null;
|
||||
}
|
||||
|
||||
// Print a simplified form when appearing in JSON/util.inspect.
|
||||
defineToJSON(Tok, function() {
|
||||
return {
|
||||
kind: this.kind,
|
||||
value: this.value,
|
||||
line: this.line,
|
||||
column: this.column,
|
||||
};
|
||||
});
|
||||
|
||||
function printCharCode(code) {
|
||||
return (
|
||||
// NaN/undefined represents access beyond the end of the file.
|
||||
isNaN(code)
|
||||
? TokenKind.EOF
|
||||
: // Trust JSON for ASCII.
|
||||
code < 0x007f
|
||||
? JSON.stringify(String.fromCharCode(code))
|
||||
: // Otherwise print the escaped form.
|
||||
`"\\u${('00' + code.toString(16).toUpperCase()).slice(-4)}"`
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the next token from the source starting at the given position.
|
||||
*
|
||||
* This skips over whitespace until it finds the next lexable token, then lexes
|
||||
* punctuators immediately or calls the appropriate helper function for more
|
||||
* complicated tokens.
|
||||
*/
|
||||
function readToken(lexer: Lexer<mixed>, prev: Token): Token {
|
||||
const source = lexer.source;
|
||||
const body = source.body;
|
||||
const bodyLength = body.length;
|
||||
|
||||
const pos = positionAfterWhitespace(body, prev.end, lexer);
|
||||
const line = lexer.line;
|
||||
const col = 1 + pos - lexer.lineStart;
|
||||
|
||||
if (pos >= bodyLength) {
|
||||
return new Tok(TokenKind.EOF, bodyLength, bodyLength, line, col, prev);
|
||||
}
|
||||
|
||||
const code = body.charCodeAt(pos);
|
||||
|
||||
// SourceCharacter
|
||||
switch (code) {
|
||||
// !
|
||||
case 33:
|
||||
return new Tok(TokenKind.BANG, pos, pos + 1, line, col, prev);
|
||||
// #
|
||||
case 35:
|
||||
return readComment(source, pos, line, col, prev);
|
||||
// $
|
||||
case 36:
|
||||
return new Tok(TokenKind.DOLLAR, pos, pos + 1, line, col, prev);
|
||||
// &
|
||||
case 38:
|
||||
return new Tok(TokenKind.AMP, pos, pos + 1, line, col, prev);
|
||||
// (
|
||||
case 40:
|
||||
return new Tok(TokenKind.PAREN_L, pos, pos + 1, line, col, prev);
|
||||
// )
|
||||
case 41:
|
||||
return new Tok(TokenKind.PAREN_R, pos, pos + 1, line, col, prev);
|
||||
// .
|
||||
case 46:
|
||||
if (body.charCodeAt(pos + 1) === 46 && body.charCodeAt(pos + 2) === 46) {
|
||||
return new Tok(TokenKind.SPREAD, pos, pos + 3, line, col, prev);
|
||||
}
|
||||
break;
|
||||
// :
|
||||
case 58:
|
||||
return new Tok(TokenKind.COLON, pos, pos + 1, line, col, prev);
|
||||
// =
|
||||
case 61:
|
||||
return new Tok(TokenKind.EQUALS, pos, pos + 1, line, col, prev);
|
||||
// @
|
||||
case 64:
|
||||
return new Tok(TokenKind.AT, pos, pos + 1, line, col, prev);
|
||||
// [
|
||||
case 91:
|
||||
return new Tok(TokenKind.BRACKET_L, pos, pos + 1, line, col, prev);
|
||||
// ]
|
||||
case 93:
|
||||
return new Tok(TokenKind.BRACKET_R, pos, pos + 1, line, col, prev);
|
||||
// {
|
||||
case 123:
|
||||
return new Tok(TokenKind.BRACE_L, pos, pos + 1, line, col, prev);
|
||||
// |
|
||||
case 124:
|
||||
return new Tok(TokenKind.PIPE, pos, pos + 1, line, col, prev);
|
||||
// }
|
||||
case 125:
|
||||
return new Tok(TokenKind.BRACE_R, pos, pos + 1, line, col, prev);
|
||||
// A-Z _ a-z
|
||||
case 65:
|
||||
case 66:
|
||||
case 67:
|
||||
case 68:
|
||||
case 69:
|
||||
case 70:
|
||||
case 71:
|
||||
case 72:
|
||||
case 73:
|
||||
case 74:
|
||||
case 75:
|
||||
case 76:
|
||||
case 77:
|
||||
case 78:
|
||||
case 79:
|
||||
case 80:
|
||||
case 81:
|
||||
case 82:
|
||||
case 83:
|
||||
case 84:
|
||||
case 85:
|
||||
case 86:
|
||||
case 87:
|
||||
case 88:
|
||||
case 89:
|
||||
case 90:
|
||||
case 95:
|
||||
case 97:
|
||||
case 98:
|
||||
case 99:
|
||||
case 100:
|
||||
case 101:
|
||||
case 102:
|
||||
case 103:
|
||||
case 104:
|
||||
case 105:
|
||||
case 106:
|
||||
case 107:
|
||||
case 108:
|
||||
case 109:
|
||||
case 110:
|
||||
case 111:
|
||||
case 112:
|
||||
case 113:
|
||||
case 114:
|
||||
case 115:
|
||||
case 116:
|
||||
case 117:
|
||||
case 118:
|
||||
case 119:
|
||||
case 120:
|
||||
case 121:
|
||||
case 122:
|
||||
return readName(source, pos, line, col, prev);
|
||||
// - 0-9
|
||||
case 45:
|
||||
case 48:
|
||||
case 49:
|
||||
case 50:
|
||||
case 51:
|
||||
case 52:
|
||||
case 53:
|
||||
case 54:
|
||||
case 55:
|
||||
case 56:
|
||||
case 57:
|
||||
return readNumber(source, pos, code, line, col, prev);
|
||||
// "
|
||||
case 34:
|
||||
if (body.charCodeAt(pos + 1) === 34 && body.charCodeAt(pos + 2) === 34) {
|
||||
return readBlockString(source, pos, line, col, prev, lexer);
|
||||
}
|
||||
return readString(source, pos, line, col, prev);
|
||||
}
|
||||
|
||||
throw syntaxError(source, pos, unexpectedCharacterMessage(code));
|
||||
}
|
||||
|
||||
/**
|
||||
* Report a message that an unexpected character was encountered.
|
||||
*/
|
||||
function unexpectedCharacterMessage(code) {
|
||||
if (code < 0x0020 && code !== 0x0009 && code !== 0x000a && code !== 0x000d) {
|
||||
return `Cannot contain the invalid character ${printCharCode(code)}.`;
|
||||
}
|
||||
|
||||
if (code === 39) {
|
||||
// '
|
||||
return 'Unexpected single quote character (\'), did you mean to use a double quote (")?';
|
||||
}
|
||||
|
||||
return `Cannot parse the unexpected character ${printCharCode(code)}.`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads from body starting at startPosition until it finds a non-whitespace
|
||||
* character, then returns the position of that character for lexing.
|
||||
*/
|
||||
function positionAfterWhitespace(
|
||||
body: string,
|
||||
startPosition: number,
|
||||
lexer: Lexer<mixed>,
|
||||
): number {
|
||||
const bodyLength = body.length;
|
||||
let position = startPosition;
|
||||
while (position < bodyLength) {
|
||||
const code = body.charCodeAt(position);
|
||||
// tab | space | comma | BOM
|
||||
if (code === 9 || code === 32 || code === 44 || code === 0xfeff) {
|
||||
++position;
|
||||
} else if (code === 10) {
|
||||
// new line
|
||||
++position;
|
||||
++lexer.line;
|
||||
lexer.lineStart = position;
|
||||
} else if (code === 13) {
|
||||
// carriage return
|
||||
if (body.charCodeAt(position + 1) === 10) {
|
||||
position += 2;
|
||||
} else {
|
||||
++position;
|
||||
}
|
||||
++lexer.line;
|
||||
lexer.lineStart = position;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
return position;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads a comment token from the source file.
|
||||
*
|
||||
* #[\u0009\u0020-\uFFFF]*
|
||||
*/
|
||||
function readComment(source, start, line, col, prev): Token {
|
||||
const body = source.body;
|
||||
let code;
|
||||
let position = start;
|
||||
|
||||
do {
|
||||
code = body.charCodeAt(++position);
|
||||
} while (
|
||||
!isNaN(code) &&
|
||||
// SourceCharacter but not LineTerminator
|
||||
(code > 0x001f || code === 0x0009)
|
||||
);
|
||||
|
||||
return new Tok(
|
||||
TokenKind.COMMENT,
|
||||
start,
|
||||
position,
|
||||
line,
|
||||
col,
|
||||
prev,
|
||||
body.slice(start + 1, position),
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads a number token from the source file, either a float
|
||||
* or an int depending on whether a decimal point appears.
|
||||
*
|
||||
* Int: -?(0|[1-9][0-9]*)
|
||||
* Float: -?(0|[1-9][0-9]*)(\.[0-9]+)?((E|e)(+|-)?[0-9]+)?
|
||||
*/
|
||||
function readNumber(source, start, firstCode, line, col, prev): Token {
|
||||
const body = source.body;
|
||||
let code = firstCode;
|
||||
let position = start;
|
||||
let isFloat = false;
|
||||
|
||||
if (code === 45) {
|
||||
// -
|
||||
code = body.charCodeAt(++position);
|
||||
}
|
||||
|
||||
if (code === 48) {
|
||||
// 0
|
||||
code = body.charCodeAt(++position);
|
||||
if (code >= 48 && code <= 57) {
|
||||
throw syntaxError(
|
||||
source,
|
||||
position,
|
||||
`Invalid number, unexpected digit after 0: ${printCharCode(code)}.`,
|
||||
);
|
||||
}
|
||||
} else {
|
||||
position = readDigits(source, position, code);
|
||||
code = body.charCodeAt(position);
|
||||
}
|
||||
|
||||
if (code === 46) {
|
||||
// .
|
||||
isFloat = true;
|
||||
|
||||
code = body.charCodeAt(++position);
|
||||
position = readDigits(source, position, code);
|
||||
code = body.charCodeAt(position);
|
||||
}
|
||||
|
||||
if (code === 69 || code === 101) {
|
||||
// E e
|
||||
isFloat = true;
|
||||
|
||||
code = body.charCodeAt(++position);
|
||||
if (code === 43 || code === 45) {
|
||||
// + -
|
||||
code = body.charCodeAt(++position);
|
||||
}
|
||||
position = readDigits(source, position, code);
|
||||
code = body.charCodeAt(position);
|
||||
}
|
||||
|
||||
// Numbers cannot be followed by . or e
|
||||
if (code === 46 || code === 69 || code === 101) {
|
||||
throw syntaxError(
|
||||
source,
|
||||
position,
|
||||
`Invalid number, expected digit but got: ${printCharCode(code)}.`,
|
||||
);
|
||||
}
|
||||
|
||||
return new Tok(
|
||||
isFloat ? TokenKind.FLOAT : TokenKind.INT,
|
||||
start,
|
||||
position,
|
||||
line,
|
||||
col,
|
||||
prev,
|
||||
body.slice(start, position),
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the new position in the source after reading digits.
|
||||
*/
|
||||
function readDigits(source, start, firstCode) {
|
||||
const body = source.body;
|
||||
let position = start;
|
||||
let code = firstCode;
|
||||
if (code >= 48 && code <= 57) {
|
||||
// 0 - 9
|
||||
do {
|
||||
code = body.charCodeAt(++position);
|
||||
} while (code >= 48 && code <= 57); // 0 - 9
|
||||
return position;
|
||||
}
|
||||
throw syntaxError(
|
||||
source,
|
||||
position,
|
||||
`Invalid number, expected digit but got: ${printCharCode(code)}.`,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads a string token from the source file.
|
||||
*
|
||||
* "([^"\\\u000A\u000D]|(\\(u[0-9a-fA-F]{4}|["\\/bfnrt])))*"
|
||||
*/
|
||||
function readString(source, start, line, col, prev): Token {
|
||||
const body = source.body;
|
||||
let position = start + 1;
|
||||
let chunkStart = position;
|
||||
let code = 0;
|
||||
let value = '';
|
||||
|
||||
while (
|
||||
position < body.length &&
|
||||
!isNaN((code = body.charCodeAt(position))) &&
|
||||
// not LineTerminator
|
||||
code !== 0x000a &&
|
||||
code !== 0x000d
|
||||
) {
|
||||
// Closing Quote (")
|
||||
if (code === 34) {
|
||||
value += body.slice(chunkStart, position);
|
||||
return new Tok(
|
||||
TokenKind.STRING,
|
||||
start,
|
||||
position + 1,
|
||||
line,
|
||||
col,
|
||||
prev,
|
||||
value,
|
||||
);
|
||||
}
|
||||
|
||||
// SourceCharacter
|
||||
if (code < 0x0020 && code !== 0x0009) {
|
||||
throw syntaxError(
|
||||
source,
|
||||
position,
|
||||
`Invalid character within String: ${printCharCode(code)}.`,
|
||||
);
|
||||
}
|
||||
|
||||
++position;
|
||||
if (code === 92) {
|
||||
// \
|
||||
value += body.slice(chunkStart, position - 1);
|
||||
code = body.charCodeAt(position);
|
||||
switch (code) {
|
||||
case 34:
|
||||
value += '"';
|
||||
break;
|
||||
case 47:
|
||||
value += '/';
|
||||
break;
|
||||
case 92:
|
||||
value += '\\';
|
||||
break;
|
||||
case 98:
|
||||
value += '\b';
|
||||
break;
|
||||
case 102:
|
||||
value += '\f';
|
||||
break;
|
||||
case 110:
|
||||
value += '\n';
|
||||
break;
|
||||
case 114:
|
||||
value += '\r';
|
||||
break;
|
||||
case 116:
|
||||
value += '\t';
|
||||
break;
|
||||
case 117: {
|
||||
// uXXXX
|
||||
const charCode = uniCharCode(
|
||||
body.charCodeAt(position + 1),
|
||||
body.charCodeAt(position + 2),
|
||||
body.charCodeAt(position + 3),
|
||||
body.charCodeAt(position + 4),
|
||||
);
|
||||
if (charCode < 0) {
|
||||
const invalidSequence = body.slice(position + 1, position + 5);
|
||||
throw syntaxError(
|
||||
source,
|
||||
position,
|
||||
`Invalid character escape sequence: \\u${invalidSequence}.`,
|
||||
);
|
||||
}
|
||||
value += String.fromCharCode(charCode);
|
||||
position += 4;
|
||||
break;
|
||||
}
|
||||
default:
|
||||
throw syntaxError(
|
||||
source,
|
||||
position,
|
||||
`Invalid character escape sequence: \\${String.fromCharCode(
|
||||
code,
|
||||
)}.`,
|
||||
);
|
||||
}
|
||||
++position;
|
||||
chunkStart = position;
|
||||
}
|
||||
}
|
||||
|
||||
throw syntaxError(source, position, 'Unterminated string.');
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads a block string token from the source file.
|
||||
*
|
||||
* """("?"?(\\"""|\\(?!=""")|[^"\\]))*"""
|
||||
*/
|
||||
function readBlockString(source, start, line, col, prev, lexer): Token {
|
||||
const body = source.body;
|
||||
let position = start + 3;
|
||||
let chunkStart = position;
|
||||
let code = 0;
|
||||
let rawValue = '';
|
||||
|
||||
while (position < body.length && !isNaN((code = body.charCodeAt(position)))) {
|
||||
// Closing Triple-Quote (""")
|
||||
if (
|
||||
code === 34 &&
|
||||
body.charCodeAt(position + 1) === 34 &&
|
||||
body.charCodeAt(position + 2) === 34
|
||||
) {
|
||||
rawValue += body.slice(chunkStart, position);
|
||||
return new Tok(
|
||||
TokenKind.BLOCK_STRING,
|
||||
start,
|
||||
position + 3,
|
||||
line,
|
||||
col,
|
||||
prev,
|
||||
dedentBlockStringValue(rawValue),
|
||||
);
|
||||
}
|
||||
|
||||
// SourceCharacter
|
||||
if (
|
||||
code < 0x0020 &&
|
||||
code !== 0x0009 &&
|
||||
code !== 0x000a &&
|
||||
code !== 0x000d
|
||||
) {
|
||||
throw syntaxError(
|
||||
source,
|
||||
position,
|
||||
`Invalid character within String: ${printCharCode(code)}.`,
|
||||
);
|
||||
}
|
||||
|
||||
if (code === 10) {
|
||||
// new line
|
||||
++position;
|
||||
++lexer.line;
|
||||
lexer.lineStart = position;
|
||||
} else if (code === 13) {
|
||||
// carriage return
|
||||
if (body.charCodeAt(position + 1) === 10) {
|
||||
position += 2;
|
||||
} else {
|
||||
++position;
|
||||
}
|
||||
++lexer.line;
|
||||
lexer.lineStart = position;
|
||||
} else if (
|
||||
// Escape Triple-Quote (\""")
|
||||
code === 92 &&
|
||||
body.charCodeAt(position + 1) === 34 &&
|
||||
body.charCodeAt(position + 2) === 34 &&
|
||||
body.charCodeAt(position + 3) === 34
|
||||
) {
|
||||
rawValue += body.slice(chunkStart, position) + '"""';
|
||||
position += 4;
|
||||
chunkStart = position;
|
||||
} else {
|
||||
++position;
|
||||
}
|
||||
}
|
||||
|
||||
throw syntaxError(source, position, 'Unterminated string.');
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts four hexadecimal chars to the integer that the
|
||||
* string represents. For example, uniCharCode('0','0','0','f')
|
||||
* will return 15, and uniCharCode('0','0','f','f') returns 255.
|
||||
*
|
||||
* Returns a negative number on error, if a char was invalid.
|
||||
*
|
||||
* This is implemented by noting that char2hex() returns -1 on error,
|
||||
* which means the result of ORing the char2hex() will also be negative.
|
||||
*/
|
||||
function uniCharCode(a, b, c, d) {
|
||||
return (
|
||||
(char2hex(a) << 12) | (char2hex(b) << 8) | (char2hex(c) << 4) | char2hex(d)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a hex character to its integer value.
|
||||
* '0' becomes 0, '9' becomes 9
|
||||
* 'A' becomes 10, 'F' becomes 15
|
||||
* 'a' becomes 10, 'f' becomes 15
|
||||
*
|
||||
* Returns -1 on error.
|
||||
*/
|
||||
function char2hex(a) {
|
||||
return a >= 48 && a <= 57
|
||||
? a - 48 // 0-9
|
||||
: a >= 65 && a <= 70
|
||||
? a - 55 // A-F
|
||||
: a >= 97 && a <= 102
|
||||
? a - 87 // a-f
|
||||
: -1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads an alphanumeric + underscore name from the source.
|
||||
*
|
||||
* [_A-Za-z][_0-9A-Za-z]*
|
||||
*/
|
||||
function readName(source, start, line, col, prev): Token {
|
||||
const body = source.body;
|
||||
const bodyLength = body.length;
|
||||
let position = start + 1;
|
||||
let code = 0;
|
||||
while (
|
||||
position !== bodyLength &&
|
||||
!isNaN((code = body.charCodeAt(position))) &&
|
||||
(code === 95 || // _
|
||||
(code >= 48 && code <= 57) || // 0-9
|
||||
(code >= 65 && code <= 90) || // A-Z
|
||||
(code >= 97 && code <= 122)) // a-z
|
||||
) {
|
||||
++position;
|
||||
}
|
||||
return new Tok(
|
||||
TokenKind.NAME,
|
||||
start,
|
||||
position,
|
||||
line,
|
||||
col,
|
||||
prev,
|
||||
body.slice(start, position),
|
||||
);
|
||||
}
|
||||
613
node_modules/graphql/language/lexer.mjs
generated
vendored
Normal file
613
node_modules/graphql/language/lexer.mjs
generated
vendored
Normal file
@@ -0,0 +1,613 @@
|
||||
import defineToJSON from '../jsutils/defineToJSON';
|
||||
import { syntaxError } from '../error/syntaxError';
|
||||
import { dedentBlockStringValue } from './blockString';
|
||||
import { TokenKind } from './tokenKind';
|
||||
/**
|
||||
* Given a Source object, this returns a Lexer for that source.
|
||||
* A Lexer is a stateful stream generator in that every time
|
||||
* it is advanced, it returns the next token in the Source. Assuming the
|
||||
* source lexes, the final Token emitted by the lexer will be of kind
|
||||
* EOF, after which the lexer will repeatedly return the same EOF token
|
||||
* whenever called.
|
||||
*/
|
||||
|
||||
export function createLexer(source, options) {
|
||||
var startOfFileToken = new Tok(TokenKind.SOF, 0, 0, 0, 0, null);
|
||||
var lexer = {
|
||||
source: source,
|
||||
options: options,
|
||||
lastToken: startOfFileToken,
|
||||
token: startOfFileToken,
|
||||
line: 1,
|
||||
lineStart: 0,
|
||||
advance: advanceLexer,
|
||||
lookahead: lookahead
|
||||
};
|
||||
return lexer;
|
||||
}
|
||||
|
||||
function advanceLexer() {
|
||||
this.lastToken = this.token;
|
||||
var token = this.token = this.lookahead();
|
||||
return token;
|
||||
}
|
||||
|
||||
function lookahead() {
|
||||
var token = this.token;
|
||||
|
||||
if (token.kind !== TokenKind.EOF) {
|
||||
do {
|
||||
// Note: next is only mutable during parsing, so we cast to allow this.
|
||||
token = token.next || (token.next = readToken(this, token));
|
||||
} while (token.kind === TokenKind.COMMENT);
|
||||
}
|
||||
|
||||
return token;
|
||||
}
|
||||
/**
|
||||
* The return type of createLexer.
|
||||
*/
|
||||
|
||||
|
||||
// @internal
|
||||
export function isPunctuatorToken(token) {
|
||||
var kind = token.kind;
|
||||
return kind === TokenKind.BANG || kind === TokenKind.DOLLAR || kind === TokenKind.AMP || kind === TokenKind.PAREN_L || kind === TokenKind.PAREN_R || kind === TokenKind.SPREAD || kind === TokenKind.COLON || kind === TokenKind.EQUALS || kind === TokenKind.AT || kind === TokenKind.BRACKET_L || kind === TokenKind.BRACKET_R || kind === TokenKind.BRACE_L || kind === TokenKind.PIPE || kind === TokenKind.BRACE_R;
|
||||
}
|
||||
/**
|
||||
* Helper function for constructing the Token object.
|
||||
*/
|
||||
|
||||
function Tok(kind, start, end, line, column, prev, value) {
|
||||
this.kind = kind;
|
||||
this.start = start;
|
||||
this.end = end;
|
||||
this.line = line;
|
||||
this.column = column;
|
||||
this.value = value;
|
||||
this.prev = prev;
|
||||
this.next = null;
|
||||
} // Print a simplified form when appearing in JSON/util.inspect.
|
||||
|
||||
|
||||
defineToJSON(Tok, function () {
|
||||
return {
|
||||
kind: this.kind,
|
||||
value: this.value,
|
||||
line: this.line,
|
||||
column: this.column
|
||||
};
|
||||
});
|
||||
|
||||
function printCharCode(code) {
|
||||
return (// NaN/undefined represents access beyond the end of the file.
|
||||
isNaN(code) ? TokenKind.EOF : // Trust JSON for ASCII.
|
||||
code < 0x007f ? JSON.stringify(String.fromCharCode(code)) : // Otherwise print the escaped form.
|
||||
"\"\\u".concat(('00' + code.toString(16).toUpperCase()).slice(-4), "\"")
|
||||
);
|
||||
}
|
||||
/**
|
||||
* Gets the next token from the source starting at the given position.
|
||||
*
|
||||
* This skips over whitespace until it finds the next lexable token, then lexes
|
||||
* punctuators immediately or calls the appropriate helper function for more
|
||||
* complicated tokens.
|
||||
*/
|
||||
|
||||
|
||||
function readToken(lexer, prev) {
|
||||
var source = lexer.source;
|
||||
var body = source.body;
|
||||
var bodyLength = body.length;
|
||||
var pos = positionAfterWhitespace(body, prev.end, lexer);
|
||||
var line = lexer.line;
|
||||
var col = 1 + pos - lexer.lineStart;
|
||||
|
||||
if (pos >= bodyLength) {
|
||||
return new Tok(TokenKind.EOF, bodyLength, bodyLength, line, col, prev);
|
||||
}
|
||||
|
||||
var code = body.charCodeAt(pos); // SourceCharacter
|
||||
|
||||
switch (code) {
|
||||
// !
|
||||
case 33:
|
||||
return new Tok(TokenKind.BANG, pos, pos + 1, line, col, prev);
|
||||
// #
|
||||
|
||||
case 35:
|
||||
return readComment(source, pos, line, col, prev);
|
||||
// $
|
||||
|
||||
case 36:
|
||||
return new Tok(TokenKind.DOLLAR, pos, pos + 1, line, col, prev);
|
||||
// &
|
||||
|
||||
case 38:
|
||||
return new Tok(TokenKind.AMP, pos, pos + 1, line, col, prev);
|
||||
// (
|
||||
|
||||
case 40:
|
||||
return new Tok(TokenKind.PAREN_L, pos, pos + 1, line, col, prev);
|
||||
// )
|
||||
|
||||
case 41:
|
||||
return new Tok(TokenKind.PAREN_R, pos, pos + 1, line, col, prev);
|
||||
// .
|
||||
|
||||
case 46:
|
||||
if (body.charCodeAt(pos + 1) === 46 && body.charCodeAt(pos + 2) === 46) {
|
||||
return new Tok(TokenKind.SPREAD, pos, pos + 3, line, col, prev);
|
||||
}
|
||||
|
||||
break;
|
||||
// :
|
||||
|
||||
case 58:
|
||||
return new Tok(TokenKind.COLON, pos, pos + 1, line, col, prev);
|
||||
// =
|
||||
|
||||
case 61:
|
||||
return new Tok(TokenKind.EQUALS, pos, pos + 1, line, col, prev);
|
||||
// @
|
||||
|
||||
case 64:
|
||||
return new Tok(TokenKind.AT, pos, pos + 1, line, col, prev);
|
||||
// [
|
||||
|
||||
case 91:
|
||||
return new Tok(TokenKind.BRACKET_L, pos, pos + 1, line, col, prev);
|
||||
// ]
|
||||
|
||||
case 93:
|
||||
return new Tok(TokenKind.BRACKET_R, pos, pos + 1, line, col, prev);
|
||||
// {
|
||||
|
||||
case 123:
|
||||
return new Tok(TokenKind.BRACE_L, pos, pos + 1, line, col, prev);
|
||||
// |
|
||||
|
||||
case 124:
|
||||
return new Tok(TokenKind.PIPE, pos, pos + 1, line, col, prev);
|
||||
// }
|
||||
|
||||
case 125:
|
||||
return new Tok(TokenKind.BRACE_R, pos, pos + 1, line, col, prev);
|
||||
// A-Z _ a-z
|
||||
|
||||
case 65:
|
||||
case 66:
|
||||
case 67:
|
||||
case 68:
|
||||
case 69:
|
||||
case 70:
|
||||
case 71:
|
||||
case 72:
|
||||
case 73:
|
||||
case 74:
|
||||
case 75:
|
||||
case 76:
|
||||
case 77:
|
||||
case 78:
|
||||
case 79:
|
||||
case 80:
|
||||
case 81:
|
||||
case 82:
|
||||
case 83:
|
||||
case 84:
|
||||
case 85:
|
||||
case 86:
|
||||
case 87:
|
||||
case 88:
|
||||
case 89:
|
||||
case 90:
|
||||
case 95:
|
||||
case 97:
|
||||
case 98:
|
||||
case 99:
|
||||
case 100:
|
||||
case 101:
|
||||
case 102:
|
||||
case 103:
|
||||
case 104:
|
||||
case 105:
|
||||
case 106:
|
||||
case 107:
|
||||
case 108:
|
||||
case 109:
|
||||
case 110:
|
||||
case 111:
|
||||
case 112:
|
||||
case 113:
|
||||
case 114:
|
||||
case 115:
|
||||
case 116:
|
||||
case 117:
|
||||
case 118:
|
||||
case 119:
|
||||
case 120:
|
||||
case 121:
|
||||
case 122:
|
||||
return readName(source, pos, line, col, prev);
|
||||
// - 0-9
|
||||
|
||||
case 45:
|
||||
case 48:
|
||||
case 49:
|
||||
case 50:
|
||||
case 51:
|
||||
case 52:
|
||||
case 53:
|
||||
case 54:
|
||||
case 55:
|
||||
case 56:
|
||||
case 57:
|
||||
return readNumber(source, pos, code, line, col, prev);
|
||||
// "
|
||||
|
||||
case 34:
|
||||
if (body.charCodeAt(pos + 1) === 34 && body.charCodeAt(pos + 2) === 34) {
|
||||
return readBlockString(source, pos, line, col, prev, lexer);
|
||||
}
|
||||
|
||||
return readString(source, pos, line, col, prev);
|
||||
}
|
||||
|
||||
throw syntaxError(source, pos, unexpectedCharacterMessage(code));
|
||||
}
|
||||
/**
|
||||
* Report a message that an unexpected character was encountered.
|
||||
*/
|
||||
|
||||
|
||||
function unexpectedCharacterMessage(code) {
|
||||
if (code < 0x0020 && code !== 0x0009 && code !== 0x000a && code !== 0x000d) {
|
||||
return "Cannot contain the invalid character ".concat(printCharCode(code), ".");
|
||||
}
|
||||
|
||||
if (code === 39) {
|
||||
// '
|
||||
return 'Unexpected single quote character (\'), did you mean to use a double quote (")?';
|
||||
}
|
||||
|
||||
return "Cannot parse the unexpected character ".concat(printCharCode(code), ".");
|
||||
}
|
||||
/**
|
||||
* Reads from body starting at startPosition until it finds a non-whitespace
|
||||
* character, then returns the position of that character for lexing.
|
||||
*/
|
||||
|
||||
|
||||
function positionAfterWhitespace(body, startPosition, lexer) {
|
||||
var bodyLength = body.length;
|
||||
var position = startPosition;
|
||||
|
||||
while (position < bodyLength) {
|
||||
var code = body.charCodeAt(position); // tab | space | comma | BOM
|
||||
|
||||
if (code === 9 || code === 32 || code === 44 || code === 0xfeff) {
|
||||
++position;
|
||||
} else if (code === 10) {
|
||||
// new line
|
||||
++position;
|
||||
++lexer.line;
|
||||
lexer.lineStart = position;
|
||||
} else if (code === 13) {
|
||||
// carriage return
|
||||
if (body.charCodeAt(position + 1) === 10) {
|
||||
position += 2;
|
||||
} else {
|
||||
++position;
|
||||
}
|
||||
|
||||
++lexer.line;
|
||||
lexer.lineStart = position;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return position;
|
||||
}
|
||||
/**
|
||||
* Reads a comment token from the source file.
|
||||
*
|
||||
* #[\u0009\u0020-\uFFFF]*
|
||||
*/
|
||||
|
||||
|
||||
function readComment(source, start, line, col, prev) {
|
||||
var body = source.body;
|
||||
var code;
|
||||
var position = start;
|
||||
|
||||
do {
|
||||
code = body.charCodeAt(++position);
|
||||
} while (!isNaN(code) && ( // SourceCharacter but not LineTerminator
|
||||
code > 0x001f || code === 0x0009));
|
||||
|
||||
return new Tok(TokenKind.COMMENT, start, position, line, col, prev, body.slice(start + 1, position));
|
||||
}
|
||||
/**
|
||||
* Reads a number token from the source file, either a float
|
||||
* or an int depending on whether a decimal point appears.
|
||||
*
|
||||
* Int: -?(0|[1-9][0-9]*)
|
||||
* Float: -?(0|[1-9][0-9]*)(\.[0-9]+)?((E|e)(+|-)?[0-9]+)?
|
||||
*/
|
||||
|
||||
|
||||
function readNumber(source, start, firstCode, line, col, prev) {
|
||||
var body = source.body;
|
||||
var code = firstCode;
|
||||
var position = start;
|
||||
var isFloat = false;
|
||||
|
||||
if (code === 45) {
|
||||
// -
|
||||
code = body.charCodeAt(++position);
|
||||
}
|
||||
|
||||
if (code === 48) {
|
||||
// 0
|
||||
code = body.charCodeAt(++position);
|
||||
|
||||
if (code >= 48 && code <= 57) {
|
||||
throw syntaxError(source, position, "Invalid number, unexpected digit after 0: ".concat(printCharCode(code), "."));
|
||||
}
|
||||
} else {
|
||||
position = readDigits(source, position, code);
|
||||
code = body.charCodeAt(position);
|
||||
}
|
||||
|
||||
if (code === 46) {
|
||||
// .
|
||||
isFloat = true;
|
||||
code = body.charCodeAt(++position);
|
||||
position = readDigits(source, position, code);
|
||||
code = body.charCodeAt(position);
|
||||
}
|
||||
|
||||
if (code === 69 || code === 101) {
|
||||
// E e
|
||||
isFloat = true;
|
||||
code = body.charCodeAt(++position);
|
||||
|
||||
if (code === 43 || code === 45) {
|
||||
// + -
|
||||
code = body.charCodeAt(++position);
|
||||
}
|
||||
|
||||
position = readDigits(source, position, code);
|
||||
code = body.charCodeAt(position);
|
||||
} // Numbers cannot be followed by . or e
|
||||
|
||||
|
||||
if (code === 46 || code === 69 || code === 101) {
|
||||
throw syntaxError(source, position, "Invalid number, expected digit but got: ".concat(printCharCode(code), "."));
|
||||
}
|
||||
|
||||
return new Tok(isFloat ? TokenKind.FLOAT : TokenKind.INT, start, position, line, col, prev, body.slice(start, position));
|
||||
}
|
||||
/**
|
||||
* Returns the new position in the source after reading digits.
|
||||
*/
|
||||
|
||||
|
||||
function readDigits(source, start, firstCode) {
|
||||
var body = source.body;
|
||||
var position = start;
|
||||
var code = firstCode;
|
||||
|
||||
if (code >= 48 && code <= 57) {
|
||||
// 0 - 9
|
||||
do {
|
||||
code = body.charCodeAt(++position);
|
||||
} while (code >= 48 && code <= 57); // 0 - 9
|
||||
|
||||
|
||||
return position;
|
||||
}
|
||||
|
||||
throw syntaxError(source, position, "Invalid number, expected digit but got: ".concat(printCharCode(code), "."));
|
||||
}
|
||||
/**
|
||||
* Reads a string token from the source file.
|
||||
*
|
||||
* "([^"\\\u000A\u000D]|(\\(u[0-9a-fA-F]{4}|["\\/bfnrt])))*"
|
||||
*/
|
||||
|
||||
|
||||
function readString(source, start, line, col, prev) {
|
||||
var body = source.body;
|
||||
var position = start + 1;
|
||||
var chunkStart = position;
|
||||
var code = 0;
|
||||
var value = '';
|
||||
|
||||
while (position < body.length && !isNaN(code = body.charCodeAt(position)) && // not LineTerminator
|
||||
code !== 0x000a && code !== 0x000d) {
|
||||
// Closing Quote (")
|
||||
if (code === 34) {
|
||||
value += body.slice(chunkStart, position);
|
||||
return new Tok(TokenKind.STRING, start, position + 1, line, col, prev, value);
|
||||
} // SourceCharacter
|
||||
|
||||
|
||||
if (code < 0x0020 && code !== 0x0009) {
|
||||
throw syntaxError(source, position, "Invalid character within String: ".concat(printCharCode(code), "."));
|
||||
}
|
||||
|
||||
++position;
|
||||
|
||||
if (code === 92) {
|
||||
// \
|
||||
value += body.slice(chunkStart, position - 1);
|
||||
code = body.charCodeAt(position);
|
||||
|
||||
switch (code) {
|
||||
case 34:
|
||||
value += '"';
|
||||
break;
|
||||
|
||||
case 47:
|
||||
value += '/';
|
||||
break;
|
||||
|
||||
case 92:
|
||||
value += '\\';
|
||||
break;
|
||||
|
||||
case 98:
|
||||
value += '\b';
|
||||
break;
|
||||
|
||||
case 102:
|
||||
value += '\f';
|
||||
break;
|
||||
|
||||
case 110:
|
||||
value += '\n';
|
||||
break;
|
||||
|
||||
case 114:
|
||||
value += '\r';
|
||||
break;
|
||||
|
||||
case 116:
|
||||
value += '\t';
|
||||
break;
|
||||
|
||||
case 117:
|
||||
{
|
||||
// uXXXX
|
||||
var charCode = uniCharCode(body.charCodeAt(position + 1), body.charCodeAt(position + 2), body.charCodeAt(position + 3), body.charCodeAt(position + 4));
|
||||
|
||||
if (charCode < 0) {
|
||||
var invalidSequence = body.slice(position + 1, position + 5);
|
||||
throw syntaxError(source, position, "Invalid character escape sequence: \\u".concat(invalidSequence, "."));
|
||||
}
|
||||
|
||||
value += String.fromCharCode(charCode);
|
||||
position += 4;
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
throw syntaxError(source, position, "Invalid character escape sequence: \\".concat(String.fromCharCode(code), "."));
|
||||
}
|
||||
|
||||
++position;
|
||||
chunkStart = position;
|
||||
}
|
||||
}
|
||||
|
||||
throw syntaxError(source, position, 'Unterminated string.');
|
||||
}
|
||||
/**
|
||||
* Reads a block string token from the source file.
|
||||
*
|
||||
* """("?"?(\\"""|\\(?!=""")|[^"\\]))*"""
|
||||
*/
|
||||
|
||||
|
||||
function readBlockString(source, start, line, col, prev, lexer) {
|
||||
var body = source.body;
|
||||
var position = start + 3;
|
||||
var chunkStart = position;
|
||||
var code = 0;
|
||||
var rawValue = '';
|
||||
|
||||
while (position < body.length && !isNaN(code = body.charCodeAt(position))) {
|
||||
// Closing Triple-Quote (""")
|
||||
if (code === 34 && body.charCodeAt(position + 1) === 34 && body.charCodeAt(position + 2) === 34) {
|
||||
rawValue += body.slice(chunkStart, position);
|
||||
return new Tok(TokenKind.BLOCK_STRING, start, position + 3, line, col, prev, dedentBlockStringValue(rawValue));
|
||||
} // SourceCharacter
|
||||
|
||||
|
||||
if (code < 0x0020 && code !== 0x0009 && code !== 0x000a && code !== 0x000d) {
|
||||
throw syntaxError(source, position, "Invalid character within String: ".concat(printCharCode(code), "."));
|
||||
}
|
||||
|
||||
if (code === 10) {
|
||||
// new line
|
||||
++position;
|
||||
++lexer.line;
|
||||
lexer.lineStart = position;
|
||||
} else if (code === 13) {
|
||||
// carriage return
|
||||
if (body.charCodeAt(position + 1) === 10) {
|
||||
position += 2;
|
||||
} else {
|
||||
++position;
|
||||
}
|
||||
|
||||
++lexer.line;
|
||||
lexer.lineStart = position;
|
||||
} else if ( // Escape Triple-Quote (\""")
|
||||
code === 92 && body.charCodeAt(position + 1) === 34 && body.charCodeAt(position + 2) === 34 && body.charCodeAt(position + 3) === 34) {
|
||||
rawValue += body.slice(chunkStart, position) + '"""';
|
||||
position += 4;
|
||||
chunkStart = position;
|
||||
} else {
|
||||
++position;
|
||||
}
|
||||
}
|
||||
|
||||
throw syntaxError(source, position, 'Unterminated string.');
|
||||
}
|
||||
/**
|
||||
* Converts four hexadecimal chars to the integer that the
|
||||
* string represents. For example, uniCharCode('0','0','0','f')
|
||||
* will return 15, and uniCharCode('0','0','f','f') returns 255.
|
||||
*
|
||||
* Returns a negative number on error, if a char was invalid.
|
||||
*
|
||||
* This is implemented by noting that char2hex() returns -1 on error,
|
||||
* which means the result of ORing the char2hex() will also be negative.
|
||||
*/
|
||||
|
||||
|
||||
function uniCharCode(a, b, c, d) {
|
||||
return char2hex(a) << 12 | char2hex(b) << 8 | char2hex(c) << 4 | char2hex(d);
|
||||
}
|
||||
/**
|
||||
* Converts a hex character to its integer value.
|
||||
* '0' becomes 0, '9' becomes 9
|
||||
* 'A' becomes 10, 'F' becomes 15
|
||||
* 'a' becomes 10, 'f' becomes 15
|
||||
*
|
||||
* Returns -1 on error.
|
||||
*/
|
||||
|
||||
|
||||
function char2hex(a) {
|
||||
return a >= 48 && a <= 57 ? a - 48 // 0-9
|
||||
: a >= 65 && a <= 70 ? a - 55 // A-F
|
||||
: a >= 97 && a <= 102 ? a - 87 // a-f
|
||||
: -1;
|
||||
}
|
||||
/**
|
||||
* Reads an alphanumeric + underscore name from the source.
|
||||
*
|
||||
* [_A-Za-z][_0-9A-Za-z]*
|
||||
*/
|
||||
|
||||
|
||||
function readName(source, start, line, col, prev) {
|
||||
var body = source.body;
|
||||
var bodyLength = body.length;
|
||||
var position = start + 1;
|
||||
var code = 0;
|
||||
|
||||
while (position !== bodyLength && !isNaN(code = body.charCodeAt(position)) && (code === 95 || // _
|
||||
code >= 48 && code <= 57 || // 0-9
|
||||
code >= 65 && code <= 90 || // A-Z
|
||||
code >= 97 && code <= 122) // a-z
|
||||
) {
|
||||
++position;
|
||||
}
|
||||
|
||||
return new Tok(TokenKind.NAME, start, position, line, col, prev, body.slice(start, position));
|
||||
}
|
||||
15
node_modules/graphql/language/location.d.ts
generated
vendored
Normal file
15
node_modules/graphql/language/location.d.ts
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
import { Source } from './source';
|
||||
|
||||
/**
|
||||
* Represents a location in a Source.
|
||||
*/
|
||||
export interface SourceLocation {
|
||||
readonly line: number;
|
||||
readonly column: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Takes a Source and a UTF-8 character offset, and returns the corresponding
|
||||
* line and column as a SourceLocation.
|
||||
*/
|
||||
export function getLocation(source: Source, position: number): SourceLocation;
|
||||
31
node_modules/graphql/language/location.js
generated
vendored
Normal file
31
node_modules/graphql/language/location.js
generated
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.getLocation = getLocation;
|
||||
|
||||
/**
|
||||
* Represents a location in a Source.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Takes a Source and a UTF-8 character offset, and returns the corresponding
|
||||
* line and column as a SourceLocation.
|
||||
*/
|
||||
function getLocation(source, position) {
|
||||
var lineRegexp = /\r\n|[\n\r]/g;
|
||||
var line = 1;
|
||||
var column = position + 1;
|
||||
var match;
|
||||
|
||||
while ((match = lineRegexp.exec(source.body)) && match.index < position) {
|
||||
line += 1;
|
||||
column = position + 1 - (match.index + match[0].length);
|
||||
}
|
||||
|
||||
return {
|
||||
line: line,
|
||||
column: column
|
||||
};
|
||||
}
|
||||
27
node_modules/graphql/language/location.js.flow
generated
vendored
Normal file
27
node_modules/graphql/language/location.js.flow
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
// @flow strict
|
||||
|
||||
import { type Source } from './source';
|
||||
|
||||
/**
|
||||
* Represents a location in a Source.
|
||||
*/
|
||||
export type SourceLocation = {|
|
||||
+line: number,
|
||||
+column: number,
|
||||
|};
|
||||
|
||||
/**
|
||||
* Takes a Source and a UTF-8 character offset, and returns the corresponding
|
||||
* line and column as a SourceLocation.
|
||||
*/
|
||||
export function getLocation(source: Source, position: number): SourceLocation {
|
||||
const lineRegexp = /\r\n|[\n\r]/g;
|
||||
let line = 1;
|
||||
let column = position + 1;
|
||||
let match;
|
||||
while ((match = lineRegexp.exec(source.body)) && match.index < position) {
|
||||
line += 1;
|
||||
column = position + 1 - (match.index + match[0].length);
|
||||
}
|
||||
return { line, column };
|
||||
}
|
||||
24
node_modules/graphql/language/location.mjs
generated
vendored
Normal file
24
node_modules/graphql/language/location.mjs
generated
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
/**
|
||||
* Represents a location in a Source.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Takes a Source and a UTF-8 character offset, and returns the corresponding
|
||||
* line and column as a SourceLocation.
|
||||
*/
|
||||
export function getLocation(source, position) {
|
||||
var lineRegexp = /\r\n|[\n\r]/g;
|
||||
var line = 1;
|
||||
var column = position + 1;
|
||||
var match;
|
||||
|
||||
while ((match = lineRegexp.exec(source.body)) && match.index < position) {
|
||||
line += 1;
|
||||
column = position + 1 - (match.index + match[0].length);
|
||||
}
|
||||
|
||||
return {
|
||||
line: line,
|
||||
column: column
|
||||
};
|
||||
}
|
||||
89
node_modules/graphql/language/parser.d.ts
generated
vendored
Normal file
89
node_modules/graphql/language/parser.d.ts
generated
vendored
Normal file
@@ -0,0 +1,89 @@
|
||||
import { Source } from './source';
|
||||
import { Lexer } from './lexer';
|
||||
import { NamedTypeNode, TypeNode, ValueNode, DocumentNode } from './ast';
|
||||
|
||||
/**
|
||||
* Configuration options to control parser behavior
|
||||
*/
|
||||
export interface ParseOptions {
|
||||
/**
|
||||
* By default, the parser creates AST nodes that know the location
|
||||
* in the source that they correspond to. This configuration flag
|
||||
* disables that behavior for performance or testing.
|
||||
*/
|
||||
noLocation?: boolean;
|
||||
|
||||
/**
|
||||
* If enabled, the parser will parse empty fields sets in the Schema
|
||||
* Definition Language. Otherwise, the parser will follow the current
|
||||
* specification.
|
||||
*
|
||||
* This option is provided to ease adoption of the final SDL specification
|
||||
* and will be removed in v16.
|
||||
*/
|
||||
allowLegacySDLEmptyFields?: boolean;
|
||||
|
||||
/**
|
||||
* If enabled, the parser will parse implemented interfaces with no `&`
|
||||
* character between each interface. Otherwise, the parser will follow the
|
||||
* current specification.
|
||||
*
|
||||
* This option is provided to ease adoption of the final SDL specification
|
||||
* and will be removed in v16.
|
||||
*/
|
||||
allowLegacySDLImplementsInterfaces?: boolean;
|
||||
|
||||
/**
|
||||
* EXPERIMENTAL:
|
||||
*
|
||||
* If enabled, the parser will understand and parse variable definitions
|
||||
* contained in a fragment definition. They'll be represented in the
|
||||
* `variableDefinitions` field of the FragmentDefinitionNode.
|
||||
*
|
||||
* The syntax is identical to normal, query-defined variables. For example:
|
||||
*
|
||||
* fragment A($var: Boolean = false) on T {
|
||||
* ...
|
||||
* }
|
||||
*
|
||||
* Note: this feature is experimental and may change or be removed in the
|
||||
* future.
|
||||
*/
|
||||
experimentalFragmentVariables?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a GraphQL source, parses it into a Document.
|
||||
* Throws GraphQLError if a syntax error is encountered.
|
||||
*/
|
||||
export function parse(
|
||||
source: string | Source,
|
||||
options?: ParseOptions,
|
||||
): DocumentNode;
|
||||
|
||||
/**
|
||||
* Given a string containing a GraphQL value, parse the AST for that value.
|
||||
* Throws GraphQLError if a syntax error is encountered.
|
||||
*
|
||||
* This is useful within tools that operate upon GraphQL Values directly and
|
||||
* in isolation of complete GraphQL documents.
|
||||
*/
|
||||
export function parseValue(
|
||||
source: string | Source,
|
||||
options?: ParseOptions,
|
||||
): ValueNode;
|
||||
|
||||
/**
|
||||
* Given a string containing a GraphQL Type (ex. `[Int!]`), parse the AST for
|
||||
* that type.
|
||||
* Throws GraphQLError if a syntax error is encountered.
|
||||
*
|
||||
* This is useful within tools that operate upon GraphQL Types directly and
|
||||
* in isolation of complete GraphQL documents.
|
||||
*
|
||||
* Consider providing the results to the utility function: typeFromAST().
|
||||
*/
|
||||
export function parseType(
|
||||
source: string | Source,
|
||||
options?: ParseOptions,
|
||||
): TypeNode;
|
||||
1549
node_modules/graphql/language/parser.js
generated
vendored
Normal file
1549
node_modules/graphql/language/parser.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1561
node_modules/graphql/language/parser.js.flow
generated
vendored
Normal file
1561
node_modules/graphql/language/parser.js.flow
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1528
node_modules/graphql/language/parser.mjs
generated
vendored
Normal file
1528
node_modules/graphql/language/parser.mjs
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
36
node_modules/graphql/language/predicates.d.ts
generated
vendored
Normal file
36
node_modules/graphql/language/predicates.d.ts
generated
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
import {
|
||||
ASTNode,
|
||||
DefinitionNode,
|
||||
ExecutableDefinitionNode,
|
||||
SelectionNode,
|
||||
ValueNode,
|
||||
TypeNode,
|
||||
TypeSystemDefinitionNode,
|
||||
TypeDefinitionNode,
|
||||
TypeSystemExtensionNode,
|
||||
TypeExtensionNode,
|
||||
} from './ast';
|
||||
|
||||
export function isDefinitionNode(node: ASTNode): node is DefinitionNode;
|
||||
|
||||
export function isExecutableDefinitionNode(
|
||||
node: ASTNode,
|
||||
): node is ExecutableDefinitionNode;
|
||||
|
||||
export function isSelectionNode(node: ASTNode): node is SelectionNode;
|
||||
|
||||
export function isValueNode(node: ASTNode): node is ValueNode;
|
||||
|
||||
export function isTypeNode(node: ASTNode): node is TypeNode;
|
||||
|
||||
export function isTypeSystemDefinitionNode(
|
||||
node: ASTNode,
|
||||
): node is TypeSystemDefinitionNode;
|
||||
|
||||
export function isTypeDefinitionNode(node: ASTNode): node is TypeDefinitionNode;
|
||||
|
||||
export function isTypeSystemExtensionNode(
|
||||
node: ASTNode,
|
||||
): node is TypeSystemExtensionNode;
|
||||
|
||||
export function isTypeExtensionNode(node: ASTNode): node is TypeExtensionNode;
|
||||
52
node_modules/graphql/language/predicates.js
generated
vendored
Normal file
52
node_modules/graphql/language/predicates.js
generated
vendored
Normal file
@@ -0,0 +1,52 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.isDefinitionNode = isDefinitionNode;
|
||||
exports.isExecutableDefinitionNode = isExecutableDefinitionNode;
|
||||
exports.isSelectionNode = isSelectionNode;
|
||||
exports.isValueNode = isValueNode;
|
||||
exports.isTypeNode = isTypeNode;
|
||||
exports.isTypeSystemDefinitionNode = isTypeSystemDefinitionNode;
|
||||
exports.isTypeDefinitionNode = isTypeDefinitionNode;
|
||||
exports.isTypeSystemExtensionNode = isTypeSystemExtensionNode;
|
||||
exports.isTypeExtensionNode = isTypeExtensionNode;
|
||||
|
||||
var _kinds = require("./kinds");
|
||||
|
||||
function isDefinitionNode(node) {
|
||||
return isExecutableDefinitionNode(node) || isTypeSystemDefinitionNode(node) || isTypeSystemExtensionNode(node);
|
||||
}
|
||||
|
||||
function isExecutableDefinitionNode(node) {
|
||||
return node.kind === _kinds.Kind.OPERATION_DEFINITION || node.kind === _kinds.Kind.FRAGMENT_DEFINITION;
|
||||
}
|
||||
|
||||
function isSelectionNode(node) {
|
||||
return node.kind === _kinds.Kind.FIELD || node.kind === _kinds.Kind.FRAGMENT_SPREAD || node.kind === _kinds.Kind.INLINE_FRAGMENT;
|
||||
}
|
||||
|
||||
function isValueNode(node) {
|
||||
return node.kind === _kinds.Kind.VARIABLE || node.kind === _kinds.Kind.INT || node.kind === _kinds.Kind.FLOAT || node.kind === _kinds.Kind.STRING || node.kind === _kinds.Kind.BOOLEAN || node.kind === _kinds.Kind.NULL || node.kind === _kinds.Kind.ENUM || node.kind === _kinds.Kind.LIST || node.kind === _kinds.Kind.OBJECT;
|
||||
}
|
||||
|
||||
function isTypeNode(node) {
|
||||
return node.kind === _kinds.Kind.NAMED_TYPE || node.kind === _kinds.Kind.LIST_TYPE || node.kind === _kinds.Kind.NON_NULL_TYPE;
|
||||
}
|
||||
|
||||
function isTypeSystemDefinitionNode(node) {
|
||||
return node.kind === _kinds.Kind.SCHEMA_DEFINITION || isTypeDefinitionNode(node) || node.kind === _kinds.Kind.DIRECTIVE_DEFINITION;
|
||||
}
|
||||
|
||||
function isTypeDefinitionNode(node) {
|
||||
return node.kind === _kinds.Kind.SCALAR_TYPE_DEFINITION || node.kind === _kinds.Kind.OBJECT_TYPE_DEFINITION || node.kind === _kinds.Kind.INTERFACE_TYPE_DEFINITION || node.kind === _kinds.Kind.UNION_TYPE_DEFINITION || node.kind === _kinds.Kind.ENUM_TYPE_DEFINITION || node.kind === _kinds.Kind.INPUT_OBJECT_TYPE_DEFINITION;
|
||||
}
|
||||
|
||||
function isTypeSystemExtensionNode(node) {
|
||||
return node.kind === _kinds.Kind.SCHEMA_EXTENSION || isTypeExtensionNode(node);
|
||||
}
|
||||
|
||||
function isTypeExtensionNode(node) {
|
||||
return node.kind === _kinds.Kind.SCALAR_TYPE_EXTENSION || node.kind === _kinds.Kind.OBJECT_TYPE_EXTENSION || node.kind === _kinds.Kind.INTERFACE_TYPE_EXTENSION || node.kind === _kinds.Kind.UNION_TYPE_EXTENSION || node.kind === _kinds.Kind.ENUM_TYPE_EXTENSION || node.kind === _kinds.Kind.INPUT_OBJECT_TYPE_EXTENSION;
|
||||
}
|
||||
83
node_modules/graphql/language/predicates.js.flow
generated
vendored
Normal file
83
node_modules/graphql/language/predicates.js.flow
generated
vendored
Normal file
@@ -0,0 +1,83 @@
|
||||
// @flow strict
|
||||
|
||||
import { Kind } from './kinds';
|
||||
import { type ASTNode } from './ast';
|
||||
|
||||
export function isDefinitionNode(node: ASTNode): boolean %checks {
|
||||
return (
|
||||
isExecutableDefinitionNode(node) ||
|
||||
isTypeSystemDefinitionNode(node) ||
|
||||
isTypeSystemExtensionNode(node)
|
||||
);
|
||||
}
|
||||
|
||||
export function isExecutableDefinitionNode(node: ASTNode): boolean %checks {
|
||||
return (
|
||||
node.kind === Kind.OPERATION_DEFINITION ||
|
||||
node.kind === Kind.FRAGMENT_DEFINITION
|
||||
);
|
||||
}
|
||||
|
||||
export function isSelectionNode(node: ASTNode): boolean %checks {
|
||||
return (
|
||||
node.kind === Kind.FIELD ||
|
||||
node.kind === Kind.FRAGMENT_SPREAD ||
|
||||
node.kind === Kind.INLINE_FRAGMENT
|
||||
);
|
||||
}
|
||||
|
||||
export function isValueNode(node: ASTNode): boolean %checks {
|
||||
return (
|
||||
node.kind === Kind.VARIABLE ||
|
||||
node.kind === Kind.INT ||
|
||||
node.kind === Kind.FLOAT ||
|
||||
node.kind === Kind.STRING ||
|
||||
node.kind === Kind.BOOLEAN ||
|
||||
node.kind === Kind.NULL ||
|
||||
node.kind === Kind.ENUM ||
|
||||
node.kind === Kind.LIST ||
|
||||
node.kind === Kind.OBJECT
|
||||
);
|
||||
}
|
||||
|
||||
export function isTypeNode(node: ASTNode): boolean %checks {
|
||||
return (
|
||||
node.kind === Kind.NAMED_TYPE ||
|
||||
node.kind === Kind.LIST_TYPE ||
|
||||
node.kind === Kind.NON_NULL_TYPE
|
||||
);
|
||||
}
|
||||
|
||||
export function isTypeSystemDefinitionNode(node: ASTNode): boolean %checks {
|
||||
return (
|
||||
node.kind === Kind.SCHEMA_DEFINITION ||
|
||||
isTypeDefinitionNode(node) ||
|
||||
node.kind === Kind.DIRECTIVE_DEFINITION
|
||||
);
|
||||
}
|
||||
|
||||
export function isTypeDefinitionNode(node: ASTNode): boolean %checks {
|
||||
return (
|
||||
node.kind === Kind.SCALAR_TYPE_DEFINITION ||
|
||||
node.kind === Kind.OBJECT_TYPE_DEFINITION ||
|
||||
node.kind === Kind.INTERFACE_TYPE_DEFINITION ||
|
||||
node.kind === Kind.UNION_TYPE_DEFINITION ||
|
||||
node.kind === Kind.ENUM_TYPE_DEFINITION ||
|
||||
node.kind === Kind.INPUT_OBJECT_TYPE_DEFINITION
|
||||
);
|
||||
}
|
||||
|
||||
export function isTypeSystemExtensionNode(node: ASTNode): boolean %checks {
|
||||
return node.kind === Kind.SCHEMA_EXTENSION || isTypeExtensionNode(node);
|
||||
}
|
||||
|
||||
export function isTypeExtensionNode(node: ASTNode): boolean %checks {
|
||||
return (
|
||||
node.kind === Kind.SCALAR_TYPE_EXTENSION ||
|
||||
node.kind === Kind.OBJECT_TYPE_EXTENSION ||
|
||||
node.kind === Kind.INTERFACE_TYPE_EXTENSION ||
|
||||
node.kind === Kind.UNION_TYPE_EXTENSION ||
|
||||
node.kind === Kind.ENUM_TYPE_EXTENSION ||
|
||||
node.kind === Kind.INPUT_OBJECT_TYPE_EXTENSION
|
||||
);
|
||||
}
|
||||
28
node_modules/graphql/language/predicates.mjs
generated
vendored
Normal file
28
node_modules/graphql/language/predicates.mjs
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
import { Kind } from './kinds';
|
||||
export function isDefinitionNode(node) {
|
||||
return isExecutableDefinitionNode(node) || isTypeSystemDefinitionNode(node) || isTypeSystemExtensionNode(node);
|
||||
}
|
||||
export function isExecutableDefinitionNode(node) {
|
||||
return node.kind === Kind.OPERATION_DEFINITION || node.kind === Kind.FRAGMENT_DEFINITION;
|
||||
}
|
||||
export function isSelectionNode(node) {
|
||||
return node.kind === Kind.FIELD || node.kind === Kind.FRAGMENT_SPREAD || node.kind === Kind.INLINE_FRAGMENT;
|
||||
}
|
||||
export function isValueNode(node) {
|
||||
return node.kind === Kind.VARIABLE || node.kind === Kind.INT || node.kind === Kind.FLOAT || node.kind === Kind.STRING || node.kind === Kind.BOOLEAN || node.kind === Kind.NULL || node.kind === Kind.ENUM || node.kind === Kind.LIST || node.kind === Kind.OBJECT;
|
||||
}
|
||||
export function isTypeNode(node) {
|
||||
return node.kind === Kind.NAMED_TYPE || node.kind === Kind.LIST_TYPE || node.kind === Kind.NON_NULL_TYPE;
|
||||
}
|
||||
export function isTypeSystemDefinitionNode(node) {
|
||||
return node.kind === Kind.SCHEMA_DEFINITION || isTypeDefinitionNode(node) || node.kind === Kind.DIRECTIVE_DEFINITION;
|
||||
}
|
||||
export function isTypeDefinitionNode(node) {
|
||||
return node.kind === Kind.SCALAR_TYPE_DEFINITION || node.kind === Kind.OBJECT_TYPE_DEFINITION || node.kind === Kind.INTERFACE_TYPE_DEFINITION || node.kind === Kind.UNION_TYPE_DEFINITION || node.kind === Kind.ENUM_TYPE_DEFINITION || node.kind === Kind.INPUT_OBJECT_TYPE_DEFINITION;
|
||||
}
|
||||
export function isTypeSystemExtensionNode(node) {
|
||||
return node.kind === Kind.SCHEMA_EXTENSION || isTypeExtensionNode(node);
|
||||
}
|
||||
export function isTypeExtensionNode(node) {
|
||||
return node.kind === Kind.SCALAR_TYPE_EXTENSION || node.kind === Kind.OBJECT_TYPE_EXTENSION || node.kind === Kind.INTERFACE_TYPE_EXTENSION || node.kind === Kind.UNION_TYPE_EXTENSION || node.kind === Kind.ENUM_TYPE_EXTENSION || node.kind === Kind.INPUT_OBJECT_TYPE_EXTENSION;
|
||||
}
|
||||
16
node_modules/graphql/language/printLocation.d.ts
generated
vendored
Normal file
16
node_modules/graphql/language/printLocation.d.ts
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
import { Location } from '../language/ast';
|
||||
import { Source } from '../language/source';
|
||||
import { SourceLocation } from '../language/location';
|
||||
|
||||
/**
|
||||
* Render a helpful description of the location in the GraphQL Source document.
|
||||
*/
|
||||
export function printLocation(location: Location): string;
|
||||
|
||||
/**
|
||||
* Render a helpful description of the location in the GraphQL Source document.
|
||||
*/
|
||||
export function printSourceLocation(
|
||||
source: Source,
|
||||
sourceLocation: SourceLocation,
|
||||
): string;
|
||||
75
node_modules/graphql/language/printLocation.js
generated
vendored
Normal file
75
node_modules/graphql/language/printLocation.js
generated
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.printLocation = printLocation;
|
||||
exports.printSourceLocation = printSourceLocation;
|
||||
|
||||
var _location = require("../language/location");
|
||||
|
||||
/**
|
||||
* Render a helpful description of the location in the GraphQL Source document.
|
||||
*/
|
||||
function printLocation(location) {
|
||||
return printSourceLocation(location.source, (0, _location.getLocation)(location.source, location.start));
|
||||
}
|
||||
/**
|
||||
* Render a helpful description of the location in the GraphQL Source document.
|
||||
*/
|
||||
|
||||
|
||||
function printSourceLocation(source, sourceLocation) {
|
||||
var firstLineColumnOffset = source.locationOffset.column - 1;
|
||||
var body = whitespace(firstLineColumnOffset) + source.body;
|
||||
var lineIndex = sourceLocation.line - 1;
|
||||
var lineOffset = source.locationOffset.line - 1;
|
||||
var lineNum = sourceLocation.line + lineOffset;
|
||||
var columnOffset = sourceLocation.line === 1 ? firstLineColumnOffset : 0;
|
||||
var columnNum = sourceLocation.column + columnOffset;
|
||||
var locationStr = "".concat(source.name, ":").concat(lineNum, ":").concat(columnNum, "\n");
|
||||
var lines = body.split(/\r\n|[\n\r]/g);
|
||||
var locationLine = lines[lineIndex]; // Special case for minified documents
|
||||
|
||||
if (locationLine.length > 120) {
|
||||
var sublineIndex = Math.floor(columnNum / 80);
|
||||
var sublineColumnNum = columnNum % 80;
|
||||
var sublines = [];
|
||||
|
||||
for (var i = 0; i < locationLine.length; i += 80) {
|
||||
sublines.push(locationLine.slice(i, i + 80));
|
||||
}
|
||||
|
||||
return locationStr + printPrefixedLines([["".concat(lineNum), sublines[0]]].concat(sublines.slice(1, sublineIndex + 1).map(function (subline) {
|
||||
return ['', subline];
|
||||
}), [[' ', whitespace(sublineColumnNum - 1) + '^'], ['', sublines[sublineIndex + 1]]]));
|
||||
}
|
||||
|
||||
return locationStr + printPrefixedLines([// Lines specified like this: ["prefix", "string"],
|
||||
["".concat(lineNum - 1), lines[lineIndex - 1]], ["".concat(lineNum), locationLine], ['', whitespace(columnNum - 1) + '^'], ["".concat(lineNum + 1), lines[lineIndex + 1]]]);
|
||||
}
|
||||
|
||||
function printPrefixedLines(lines) {
|
||||
var existingLines = lines.filter(function (_ref) {
|
||||
var _ = _ref[0],
|
||||
line = _ref[1];
|
||||
return line !== undefined;
|
||||
});
|
||||
var padLen = Math.max.apply(Math, existingLines.map(function (_ref2) {
|
||||
var prefix = _ref2[0];
|
||||
return prefix.length;
|
||||
}));
|
||||
return existingLines.map(function (_ref3) {
|
||||
var prefix = _ref3[0],
|
||||
line = _ref3[1];
|
||||
return lpad(padLen, prefix) + (line ? ' | ' + line : ' |');
|
||||
}).join('\n');
|
||||
}
|
||||
|
||||
function whitespace(len) {
|
||||
return Array(len + 1).join(' ');
|
||||
}
|
||||
|
||||
function lpad(len, str) {
|
||||
return whitespace(len - str.length) + str;
|
||||
}
|
||||
87
node_modules/graphql/language/printLocation.js.flow
generated
vendored
Normal file
87
node_modules/graphql/language/printLocation.js.flow
generated
vendored
Normal file
@@ -0,0 +1,87 @@
|
||||
// @flow strict
|
||||
|
||||
import { type Location } from '../language/ast';
|
||||
import { type Source } from '../language/source';
|
||||
import { type SourceLocation, getLocation } from '../language/location';
|
||||
|
||||
/**
|
||||
* Render a helpful description of the location in the GraphQL Source document.
|
||||
*/
|
||||
export function printLocation(location: Location): string {
|
||||
return printSourceLocation(
|
||||
location.source,
|
||||
getLocation(location.source, location.start),
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Render a helpful description of the location in the GraphQL Source document.
|
||||
*/
|
||||
export function printSourceLocation(
|
||||
source: Source,
|
||||
sourceLocation: SourceLocation,
|
||||
): string {
|
||||
const firstLineColumnOffset = source.locationOffset.column - 1;
|
||||
const body = whitespace(firstLineColumnOffset) + source.body;
|
||||
|
||||
const lineIndex = sourceLocation.line - 1;
|
||||
const lineOffset = source.locationOffset.line - 1;
|
||||
const lineNum = sourceLocation.line + lineOffset;
|
||||
|
||||
const columnOffset = sourceLocation.line === 1 ? firstLineColumnOffset : 0;
|
||||
const columnNum = sourceLocation.column + columnOffset;
|
||||
const locationStr = `${source.name}:${lineNum}:${columnNum}\n`;
|
||||
|
||||
const lines = body.split(/\r\n|[\n\r]/g);
|
||||
const locationLine = lines[lineIndex];
|
||||
|
||||
// Special case for minified documents
|
||||
if (locationLine.length > 120) {
|
||||
const sublineIndex = Math.floor(columnNum / 80);
|
||||
const sublineColumnNum = columnNum % 80;
|
||||
const sublines = [];
|
||||
for (let i = 0; i < locationLine.length; i += 80) {
|
||||
sublines.push(locationLine.slice(i, i + 80));
|
||||
}
|
||||
|
||||
return (
|
||||
locationStr +
|
||||
printPrefixedLines([
|
||||
[`${lineNum}`, sublines[0]],
|
||||
...sublines.slice(1, sublineIndex + 1).map(subline => ['', subline]),
|
||||
[' ', whitespace(sublineColumnNum - 1) + '^'],
|
||||
['', sublines[sublineIndex + 1]],
|
||||
])
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
locationStr +
|
||||
printPrefixedLines([
|
||||
// Lines specified like this: ["prefix", "string"],
|
||||
[`${lineNum - 1}`, lines[lineIndex - 1]],
|
||||
[`${lineNum}`, locationLine],
|
||||
['', whitespace(columnNum - 1) + '^'],
|
||||
[`${lineNum + 1}`, lines[lineIndex + 1]],
|
||||
])
|
||||
);
|
||||
}
|
||||
|
||||
function printPrefixedLines(lines: $ReadOnlyArray<[string, string]>): string {
|
||||
const existingLines = lines.filter(([_, line]) => line !== undefined);
|
||||
|
||||
const padLen = Math.max(...existingLines.map(([prefix]) => prefix.length));
|
||||
return existingLines
|
||||
.map(
|
||||
([prefix, line]) => lpad(padLen, prefix) + (line ? ' | ' + line : ' |'),
|
||||
)
|
||||
.join('\n');
|
||||
}
|
||||
|
||||
function whitespace(len: number): string {
|
||||
return Array(len + 1).join(' ');
|
||||
}
|
||||
|
||||
function lpad(len: number, str: string): string {
|
||||
return whitespace(len - str.length) + str;
|
||||
}
|
||||
66
node_modules/graphql/language/printLocation.mjs
generated
vendored
Normal file
66
node_modules/graphql/language/printLocation.mjs
generated
vendored
Normal file
@@ -0,0 +1,66 @@
|
||||
import { getLocation } from '../language/location';
|
||||
/**
|
||||
* Render a helpful description of the location in the GraphQL Source document.
|
||||
*/
|
||||
|
||||
export function printLocation(location) {
|
||||
return printSourceLocation(location.source, getLocation(location.source, location.start));
|
||||
}
|
||||
/**
|
||||
* Render a helpful description of the location in the GraphQL Source document.
|
||||
*/
|
||||
|
||||
export function printSourceLocation(source, sourceLocation) {
|
||||
var firstLineColumnOffset = source.locationOffset.column - 1;
|
||||
var body = whitespace(firstLineColumnOffset) + source.body;
|
||||
var lineIndex = sourceLocation.line - 1;
|
||||
var lineOffset = source.locationOffset.line - 1;
|
||||
var lineNum = sourceLocation.line + lineOffset;
|
||||
var columnOffset = sourceLocation.line === 1 ? firstLineColumnOffset : 0;
|
||||
var columnNum = sourceLocation.column + columnOffset;
|
||||
var locationStr = "".concat(source.name, ":").concat(lineNum, ":").concat(columnNum, "\n");
|
||||
var lines = body.split(/\r\n|[\n\r]/g);
|
||||
var locationLine = lines[lineIndex]; // Special case for minified documents
|
||||
|
||||
if (locationLine.length > 120) {
|
||||
var sublineIndex = Math.floor(columnNum / 80);
|
||||
var sublineColumnNum = columnNum % 80;
|
||||
var sublines = [];
|
||||
|
||||
for (var i = 0; i < locationLine.length; i += 80) {
|
||||
sublines.push(locationLine.slice(i, i + 80));
|
||||
}
|
||||
|
||||
return locationStr + printPrefixedLines([["".concat(lineNum), sublines[0]]].concat(sublines.slice(1, sublineIndex + 1).map(function (subline) {
|
||||
return ['', subline];
|
||||
}), [[' ', whitespace(sublineColumnNum - 1) + '^'], ['', sublines[sublineIndex + 1]]]));
|
||||
}
|
||||
|
||||
return locationStr + printPrefixedLines([// Lines specified like this: ["prefix", "string"],
|
||||
["".concat(lineNum - 1), lines[lineIndex - 1]], ["".concat(lineNum), locationLine], ['', whitespace(columnNum - 1) + '^'], ["".concat(lineNum + 1), lines[lineIndex + 1]]]);
|
||||
}
|
||||
|
||||
function printPrefixedLines(lines) {
|
||||
var existingLines = lines.filter(function (_ref) {
|
||||
var _ = _ref[0],
|
||||
line = _ref[1];
|
||||
return line !== undefined;
|
||||
});
|
||||
var padLen = Math.max.apply(Math, existingLines.map(function (_ref2) {
|
||||
var prefix = _ref2[0];
|
||||
return prefix.length;
|
||||
}));
|
||||
return existingLines.map(function (_ref3) {
|
||||
var prefix = _ref3[0],
|
||||
line = _ref3[1];
|
||||
return lpad(padLen, prefix) + (line ? ' | ' + line : ' |');
|
||||
}).join('\n');
|
||||
}
|
||||
|
||||
function whitespace(len) {
|
||||
return Array(len + 1).join(' ');
|
||||
}
|
||||
|
||||
function lpad(len, str) {
|
||||
return whitespace(len - str.length) + str;
|
||||
}
|
||||
7
node_modules/graphql/language/printer.d.ts
generated
vendored
Normal file
7
node_modules/graphql/language/printer.d.ts
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
import { ASTNode } from './ast';
|
||||
|
||||
/**
|
||||
* Converts an AST into a string, using one set of reasonable
|
||||
* formatting rules.
|
||||
*/
|
||||
export function print(ast: ASTNode): string;
|
||||
309
node_modules/graphql/language/printer.js
generated
vendored
Normal file
309
node_modules/graphql/language/printer.js
generated
vendored
Normal file
@@ -0,0 +1,309 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.print = print;
|
||||
|
||||
var _visitor = require("./visitor");
|
||||
|
||||
var _blockString = require("./blockString");
|
||||
|
||||
/**
|
||||
* Converts an AST into a string, using one set of reasonable
|
||||
* formatting rules.
|
||||
*/
|
||||
function print(ast) {
|
||||
return (0, _visitor.visit)(ast, {
|
||||
leave: printDocASTReducer
|
||||
});
|
||||
} // TODO: provide better type coverage in future
|
||||
|
||||
|
||||
var printDocASTReducer = {
|
||||
Name: function Name(node) {
|
||||
return node.value;
|
||||
},
|
||||
Variable: function Variable(node) {
|
||||
return '$' + node.name;
|
||||
},
|
||||
// Document
|
||||
Document: function Document(node) {
|
||||
return join(node.definitions, '\n\n') + '\n';
|
||||
},
|
||||
OperationDefinition: function OperationDefinition(node) {
|
||||
var op = node.operation;
|
||||
var name = node.name;
|
||||
var varDefs = wrap('(', join(node.variableDefinitions, ', '), ')');
|
||||
var directives = join(node.directives, ' ');
|
||||
var selectionSet = node.selectionSet; // Anonymous queries with no directives or variable definitions can use
|
||||
// the query short form.
|
||||
|
||||
return !name && !directives && !varDefs && op === 'query' ? selectionSet : join([op, join([name, varDefs]), directives, selectionSet], ' ');
|
||||
},
|
||||
VariableDefinition: function VariableDefinition(_ref) {
|
||||
var variable = _ref.variable,
|
||||
type = _ref.type,
|
||||
defaultValue = _ref.defaultValue,
|
||||
directives = _ref.directives;
|
||||
return variable + ': ' + type + wrap(' = ', defaultValue) + wrap(' ', join(directives, ' '));
|
||||
},
|
||||
SelectionSet: function SelectionSet(_ref2) {
|
||||
var selections = _ref2.selections;
|
||||
return block(selections);
|
||||
},
|
||||
Field: function Field(_ref3) {
|
||||
var alias = _ref3.alias,
|
||||
name = _ref3.name,
|
||||
args = _ref3.arguments,
|
||||
directives = _ref3.directives,
|
||||
selectionSet = _ref3.selectionSet;
|
||||
return join([wrap('', alias, ': ') + name + wrap('(', join(args, ', '), ')'), join(directives, ' '), selectionSet], ' ');
|
||||
},
|
||||
Argument: function Argument(_ref4) {
|
||||
var name = _ref4.name,
|
||||
value = _ref4.value;
|
||||
return name + ': ' + value;
|
||||
},
|
||||
// Fragments
|
||||
FragmentSpread: function FragmentSpread(_ref5) {
|
||||
var name = _ref5.name,
|
||||
directives = _ref5.directives;
|
||||
return '...' + name + wrap(' ', join(directives, ' '));
|
||||
},
|
||||
InlineFragment: function InlineFragment(_ref6) {
|
||||
var typeCondition = _ref6.typeCondition,
|
||||
directives = _ref6.directives,
|
||||
selectionSet = _ref6.selectionSet;
|
||||
return join(['...', wrap('on ', typeCondition), join(directives, ' '), selectionSet], ' ');
|
||||
},
|
||||
FragmentDefinition: function FragmentDefinition(_ref7) {
|
||||
var name = _ref7.name,
|
||||
typeCondition = _ref7.typeCondition,
|
||||
variableDefinitions = _ref7.variableDefinitions,
|
||||
directives = _ref7.directives,
|
||||
selectionSet = _ref7.selectionSet;
|
||||
return (// Note: fragment variable definitions are experimental and may be changed
|
||||
// or removed in the future.
|
||||
"fragment ".concat(name).concat(wrap('(', join(variableDefinitions, ', '), ')'), " ") + "on ".concat(typeCondition, " ").concat(wrap('', join(directives, ' '), ' ')) + selectionSet
|
||||
);
|
||||
},
|
||||
// Value
|
||||
IntValue: function IntValue(_ref8) {
|
||||
var value = _ref8.value;
|
||||
return value;
|
||||
},
|
||||
FloatValue: function FloatValue(_ref9) {
|
||||
var value = _ref9.value;
|
||||
return value;
|
||||
},
|
||||
StringValue: function StringValue(_ref10, key) {
|
||||
var value = _ref10.value,
|
||||
isBlockString = _ref10.block;
|
||||
return isBlockString ? (0, _blockString.printBlockString)(value, key === 'description' ? '' : ' ') : JSON.stringify(value);
|
||||
},
|
||||
BooleanValue: function BooleanValue(_ref11) {
|
||||
var value = _ref11.value;
|
||||
return value ? 'true' : 'false';
|
||||
},
|
||||
NullValue: function NullValue() {
|
||||
return 'null';
|
||||
},
|
||||
EnumValue: function EnumValue(_ref12) {
|
||||
var value = _ref12.value;
|
||||
return value;
|
||||
},
|
||||
ListValue: function ListValue(_ref13) {
|
||||
var values = _ref13.values;
|
||||
return '[' + join(values, ', ') + ']';
|
||||
},
|
||||
ObjectValue: function ObjectValue(_ref14) {
|
||||
var fields = _ref14.fields;
|
||||
return '{' + join(fields, ', ') + '}';
|
||||
},
|
||||
ObjectField: function ObjectField(_ref15) {
|
||||
var name = _ref15.name,
|
||||
value = _ref15.value;
|
||||
return name + ': ' + value;
|
||||
},
|
||||
// Directive
|
||||
Directive: function Directive(_ref16) {
|
||||
var name = _ref16.name,
|
||||
args = _ref16.arguments;
|
||||
return '@' + name + wrap('(', join(args, ', '), ')');
|
||||
},
|
||||
// Type
|
||||
NamedType: function NamedType(_ref17) {
|
||||
var name = _ref17.name;
|
||||
return name;
|
||||
},
|
||||
ListType: function ListType(_ref18) {
|
||||
var type = _ref18.type;
|
||||
return '[' + type + ']';
|
||||
},
|
||||
NonNullType: function NonNullType(_ref19) {
|
||||
var type = _ref19.type;
|
||||
return type + '!';
|
||||
},
|
||||
// Type System Definitions
|
||||
SchemaDefinition: function SchemaDefinition(_ref20) {
|
||||
var directives = _ref20.directives,
|
||||
operationTypes = _ref20.operationTypes;
|
||||
return join(['schema', join(directives, ' '), block(operationTypes)], ' ');
|
||||
},
|
||||
OperationTypeDefinition: function OperationTypeDefinition(_ref21) {
|
||||
var operation = _ref21.operation,
|
||||
type = _ref21.type;
|
||||
return operation + ': ' + type;
|
||||
},
|
||||
ScalarTypeDefinition: addDescription(function (_ref22) {
|
||||
var name = _ref22.name,
|
||||
directives = _ref22.directives;
|
||||
return join(['scalar', name, join(directives, ' ')], ' ');
|
||||
}),
|
||||
ObjectTypeDefinition: addDescription(function (_ref23) {
|
||||
var name = _ref23.name,
|
||||
interfaces = _ref23.interfaces,
|
||||
directives = _ref23.directives,
|
||||
fields = _ref23.fields;
|
||||
return join(['type', name, wrap('implements ', join(interfaces, ' & ')), join(directives, ' '), block(fields)], ' ');
|
||||
}),
|
||||
FieldDefinition: addDescription(function (_ref24) {
|
||||
var name = _ref24.name,
|
||||
args = _ref24.arguments,
|
||||
type = _ref24.type,
|
||||
directives = _ref24.directives;
|
||||
return name + (hasMultilineItems(args) ? wrap('(\n', indent(join(args, '\n')), '\n)') : wrap('(', join(args, ', '), ')')) + ': ' + type + wrap(' ', join(directives, ' '));
|
||||
}),
|
||||
InputValueDefinition: addDescription(function (_ref25) {
|
||||
var name = _ref25.name,
|
||||
type = _ref25.type,
|
||||
defaultValue = _ref25.defaultValue,
|
||||
directives = _ref25.directives;
|
||||
return join([name + ': ' + type, wrap('= ', defaultValue), join(directives, ' ')], ' ');
|
||||
}),
|
||||
InterfaceTypeDefinition: addDescription(function (_ref26) {
|
||||
var name = _ref26.name,
|
||||
directives = _ref26.directives,
|
||||
fields = _ref26.fields;
|
||||
return join(['interface', name, join(directives, ' '), block(fields)], ' ');
|
||||
}),
|
||||
UnionTypeDefinition: addDescription(function (_ref27) {
|
||||
var name = _ref27.name,
|
||||
directives = _ref27.directives,
|
||||
types = _ref27.types;
|
||||
return join(['union', name, join(directives, ' '), types && types.length !== 0 ? '= ' + join(types, ' | ') : ''], ' ');
|
||||
}),
|
||||
EnumTypeDefinition: addDescription(function (_ref28) {
|
||||
var name = _ref28.name,
|
||||
directives = _ref28.directives,
|
||||
values = _ref28.values;
|
||||
return join(['enum', name, join(directives, ' '), block(values)], ' ');
|
||||
}),
|
||||
EnumValueDefinition: addDescription(function (_ref29) {
|
||||
var name = _ref29.name,
|
||||
directives = _ref29.directives;
|
||||
return join([name, join(directives, ' ')], ' ');
|
||||
}),
|
||||
InputObjectTypeDefinition: addDescription(function (_ref30) {
|
||||
var name = _ref30.name,
|
||||
directives = _ref30.directives,
|
||||
fields = _ref30.fields;
|
||||
return join(['input', name, join(directives, ' '), block(fields)], ' ');
|
||||
}),
|
||||
DirectiveDefinition: addDescription(function (_ref31) {
|
||||
var name = _ref31.name,
|
||||
args = _ref31.arguments,
|
||||
repeatable = _ref31.repeatable,
|
||||
locations = _ref31.locations;
|
||||
return 'directive @' + name + (hasMultilineItems(args) ? wrap('(\n', indent(join(args, '\n')), '\n)') : wrap('(', join(args, ', '), ')')) + (repeatable ? ' repeatable' : '') + ' on ' + join(locations, ' | ');
|
||||
}),
|
||||
SchemaExtension: function SchemaExtension(_ref32) {
|
||||
var directives = _ref32.directives,
|
||||
operationTypes = _ref32.operationTypes;
|
||||
return join(['extend schema', join(directives, ' '), block(operationTypes)], ' ');
|
||||
},
|
||||
ScalarTypeExtension: function ScalarTypeExtension(_ref33) {
|
||||
var name = _ref33.name,
|
||||
directives = _ref33.directives;
|
||||
return join(['extend scalar', name, join(directives, ' ')], ' ');
|
||||
},
|
||||
ObjectTypeExtension: function ObjectTypeExtension(_ref34) {
|
||||
var name = _ref34.name,
|
||||
interfaces = _ref34.interfaces,
|
||||
directives = _ref34.directives,
|
||||
fields = _ref34.fields;
|
||||
return join(['extend type', name, wrap('implements ', join(interfaces, ' & ')), join(directives, ' '), block(fields)], ' ');
|
||||
},
|
||||
InterfaceTypeExtension: function InterfaceTypeExtension(_ref35) {
|
||||
var name = _ref35.name,
|
||||
directives = _ref35.directives,
|
||||
fields = _ref35.fields;
|
||||
return join(['extend interface', name, join(directives, ' '), block(fields)], ' ');
|
||||
},
|
||||
UnionTypeExtension: function UnionTypeExtension(_ref36) {
|
||||
var name = _ref36.name,
|
||||
directives = _ref36.directives,
|
||||
types = _ref36.types;
|
||||
return join(['extend union', name, join(directives, ' '), types && types.length !== 0 ? '= ' + join(types, ' | ') : ''], ' ');
|
||||
},
|
||||
EnumTypeExtension: function EnumTypeExtension(_ref37) {
|
||||
var name = _ref37.name,
|
||||
directives = _ref37.directives,
|
||||
values = _ref37.values;
|
||||
return join(['extend enum', name, join(directives, ' '), block(values)], ' ');
|
||||
},
|
||||
InputObjectTypeExtension: function InputObjectTypeExtension(_ref38) {
|
||||
var name = _ref38.name,
|
||||
directives = _ref38.directives,
|
||||
fields = _ref38.fields;
|
||||
return join(['extend input', name, join(directives, ' '), block(fields)], ' ');
|
||||
}
|
||||
};
|
||||
|
||||
function addDescription(cb) {
|
||||
return function (node) {
|
||||
return join([node.description, cb(node)], '\n');
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Given maybeArray, print an empty string if it is null or empty, otherwise
|
||||
* print all items together separated by separator if provided
|
||||
*/
|
||||
|
||||
|
||||
function join(maybeArray, separator) {
|
||||
return maybeArray ? maybeArray.filter(function (x) {
|
||||
return x;
|
||||
}).join(separator || '') : '';
|
||||
}
|
||||
/**
|
||||
* Given array, print each item on its own line, wrapped in an
|
||||
* indented "{ }" block.
|
||||
*/
|
||||
|
||||
|
||||
function block(array) {
|
||||
return array && array.length !== 0 ? '{\n' + indent(join(array, '\n')) + '\n}' : '';
|
||||
}
|
||||
/**
|
||||
* If maybeString is not null or empty, then wrap with start and end, otherwise
|
||||
* print an empty string.
|
||||
*/
|
||||
|
||||
|
||||
function wrap(start, maybeString, end) {
|
||||
return maybeString ? start + maybeString + (end || '') : '';
|
||||
}
|
||||
|
||||
function indent(maybeString) {
|
||||
return maybeString && ' ' + maybeString.replace(/\n/g, '\n ');
|
||||
}
|
||||
|
||||
function isMultiline(string) {
|
||||
return string.indexOf('\n') !== -1;
|
||||
}
|
||||
|
||||
function hasMultilineItems(maybeArray) {
|
||||
return maybeArray && maybeArray.some(isMultiline);
|
||||
}
|
||||
270
node_modules/graphql/language/printer.js.flow
generated
vendored
Normal file
270
node_modules/graphql/language/printer.js.flow
generated
vendored
Normal file
@@ -0,0 +1,270 @@
|
||||
// @flow strict
|
||||
|
||||
import { visit } from './visitor';
|
||||
import { type ASTNode } from './ast';
|
||||
import { printBlockString } from './blockString';
|
||||
|
||||
/**
|
||||
* Converts an AST into a string, using one set of reasonable
|
||||
* formatting rules.
|
||||
*/
|
||||
export function print(ast: ASTNode): string {
|
||||
return visit(ast, { leave: printDocASTReducer });
|
||||
}
|
||||
|
||||
// TODO: provide better type coverage in future
|
||||
const printDocASTReducer: any = {
|
||||
Name: node => node.value,
|
||||
Variable: node => '$' + node.name,
|
||||
|
||||
// Document
|
||||
|
||||
Document: node => join(node.definitions, '\n\n') + '\n',
|
||||
|
||||
OperationDefinition(node) {
|
||||
const op = node.operation;
|
||||
const name = node.name;
|
||||
const varDefs = wrap('(', join(node.variableDefinitions, ', '), ')');
|
||||
const directives = join(node.directives, ' ');
|
||||
const selectionSet = node.selectionSet;
|
||||
// Anonymous queries with no directives or variable definitions can use
|
||||
// the query short form.
|
||||
return !name && !directives && !varDefs && op === 'query'
|
||||
? selectionSet
|
||||
: join([op, join([name, varDefs]), directives, selectionSet], ' ');
|
||||
},
|
||||
|
||||
VariableDefinition: ({ variable, type, defaultValue, directives }) =>
|
||||
variable +
|
||||
': ' +
|
||||
type +
|
||||
wrap(' = ', defaultValue) +
|
||||
wrap(' ', join(directives, ' ')),
|
||||
SelectionSet: ({ selections }) => block(selections),
|
||||
|
||||
Field: ({ alias, name, arguments: args, directives, selectionSet }) =>
|
||||
join(
|
||||
[
|
||||
wrap('', alias, ': ') + name + wrap('(', join(args, ', '), ')'),
|
||||
join(directives, ' '),
|
||||
selectionSet,
|
||||
],
|
||||
' ',
|
||||
),
|
||||
|
||||
Argument: ({ name, value }) => name + ': ' + value,
|
||||
|
||||
// Fragments
|
||||
|
||||
FragmentSpread: ({ name, directives }) =>
|
||||
'...' + name + wrap(' ', join(directives, ' ')),
|
||||
|
||||
InlineFragment: ({ typeCondition, directives, selectionSet }) =>
|
||||
join(
|
||||
['...', wrap('on ', typeCondition), join(directives, ' '), selectionSet],
|
||||
' ',
|
||||
),
|
||||
|
||||
FragmentDefinition: ({
|
||||
name,
|
||||
typeCondition,
|
||||
variableDefinitions,
|
||||
directives,
|
||||
selectionSet,
|
||||
}) =>
|
||||
// Note: fragment variable definitions are experimental and may be changed
|
||||
// or removed in the future.
|
||||
`fragment ${name}${wrap('(', join(variableDefinitions, ', '), ')')} ` +
|
||||
`on ${typeCondition} ${wrap('', join(directives, ' '), ' ')}` +
|
||||
selectionSet,
|
||||
|
||||
// Value
|
||||
|
||||
IntValue: ({ value }) => value,
|
||||
FloatValue: ({ value }) => value,
|
||||
StringValue: ({ value, block: isBlockString }, key) =>
|
||||
isBlockString
|
||||
? printBlockString(value, key === 'description' ? '' : ' ')
|
||||
: JSON.stringify(value),
|
||||
BooleanValue: ({ value }) => (value ? 'true' : 'false'),
|
||||
NullValue: () => 'null',
|
||||
EnumValue: ({ value }) => value,
|
||||
ListValue: ({ values }) => '[' + join(values, ', ') + ']',
|
||||
ObjectValue: ({ fields }) => '{' + join(fields, ', ') + '}',
|
||||
ObjectField: ({ name, value }) => name + ': ' + value,
|
||||
|
||||
// Directive
|
||||
|
||||
Directive: ({ name, arguments: args }) =>
|
||||
'@' + name + wrap('(', join(args, ', '), ')'),
|
||||
|
||||
// Type
|
||||
|
||||
NamedType: ({ name }) => name,
|
||||
ListType: ({ type }) => '[' + type + ']',
|
||||
NonNullType: ({ type }) => type + '!',
|
||||
|
||||
// Type System Definitions
|
||||
|
||||
SchemaDefinition: ({ directives, operationTypes }) =>
|
||||
join(['schema', join(directives, ' '), block(operationTypes)], ' '),
|
||||
|
||||
OperationTypeDefinition: ({ operation, type }) => operation + ': ' + type,
|
||||
|
||||
ScalarTypeDefinition: addDescription(({ name, directives }) =>
|
||||
join(['scalar', name, join(directives, ' ')], ' '),
|
||||
),
|
||||
|
||||
ObjectTypeDefinition: addDescription(
|
||||
({ name, interfaces, directives, fields }) =>
|
||||
join(
|
||||
[
|
||||
'type',
|
||||
name,
|
||||
wrap('implements ', join(interfaces, ' & ')),
|
||||
join(directives, ' '),
|
||||
block(fields),
|
||||
],
|
||||
' ',
|
||||
),
|
||||
),
|
||||
|
||||
FieldDefinition: addDescription(
|
||||
({ name, arguments: args, type, directives }) =>
|
||||
name +
|
||||
(hasMultilineItems(args)
|
||||
? wrap('(\n', indent(join(args, '\n')), '\n)')
|
||||
: wrap('(', join(args, ', '), ')')) +
|
||||
': ' +
|
||||
type +
|
||||
wrap(' ', join(directives, ' ')),
|
||||
),
|
||||
|
||||
InputValueDefinition: addDescription(
|
||||
({ name, type, defaultValue, directives }) =>
|
||||
join(
|
||||
[name + ': ' + type, wrap('= ', defaultValue), join(directives, ' ')],
|
||||
' ',
|
||||
),
|
||||
),
|
||||
|
||||
InterfaceTypeDefinition: addDescription(({ name, directives, fields }) =>
|
||||
join(['interface', name, join(directives, ' '), block(fields)], ' '),
|
||||
),
|
||||
|
||||
UnionTypeDefinition: addDescription(({ name, directives, types }) =>
|
||||
join(
|
||||
[
|
||||
'union',
|
||||
name,
|
||||
join(directives, ' '),
|
||||
types && types.length !== 0 ? '= ' + join(types, ' | ') : '',
|
||||
],
|
||||
' ',
|
||||
),
|
||||
),
|
||||
|
||||
EnumTypeDefinition: addDescription(({ name, directives, values }) =>
|
||||
join(['enum', name, join(directives, ' '), block(values)], ' '),
|
||||
),
|
||||
|
||||
EnumValueDefinition: addDescription(({ name, directives }) =>
|
||||
join([name, join(directives, ' ')], ' '),
|
||||
),
|
||||
|
||||
InputObjectTypeDefinition: addDescription(({ name, directives, fields }) =>
|
||||
join(['input', name, join(directives, ' '), block(fields)], ' '),
|
||||
),
|
||||
|
||||
DirectiveDefinition: addDescription(
|
||||
({ name, arguments: args, repeatable, locations }) =>
|
||||
'directive @' +
|
||||
name +
|
||||
(hasMultilineItems(args)
|
||||
? wrap('(\n', indent(join(args, '\n')), '\n)')
|
||||
: wrap('(', join(args, ', '), ')')) +
|
||||
(repeatable ? ' repeatable' : '') +
|
||||
' on ' +
|
||||
join(locations, ' | '),
|
||||
),
|
||||
|
||||
SchemaExtension: ({ directives, operationTypes }) =>
|
||||
join(['extend schema', join(directives, ' '), block(operationTypes)], ' '),
|
||||
|
||||
ScalarTypeExtension: ({ name, directives }) =>
|
||||
join(['extend scalar', name, join(directives, ' ')], ' '),
|
||||
|
||||
ObjectTypeExtension: ({ name, interfaces, directives, fields }) =>
|
||||
join(
|
||||
[
|
||||
'extend type',
|
||||
name,
|
||||
wrap('implements ', join(interfaces, ' & ')),
|
||||
join(directives, ' '),
|
||||
block(fields),
|
||||
],
|
||||
' ',
|
||||
),
|
||||
|
||||
InterfaceTypeExtension: ({ name, directives, fields }) =>
|
||||
join(['extend interface', name, join(directives, ' '), block(fields)], ' '),
|
||||
|
||||
UnionTypeExtension: ({ name, directives, types }) =>
|
||||
join(
|
||||
[
|
||||
'extend union',
|
||||
name,
|
||||
join(directives, ' '),
|
||||
types && types.length !== 0 ? '= ' + join(types, ' | ') : '',
|
||||
],
|
||||
' ',
|
||||
),
|
||||
|
||||
EnumTypeExtension: ({ name, directives, values }) =>
|
||||
join(['extend enum', name, join(directives, ' '), block(values)], ' '),
|
||||
|
||||
InputObjectTypeExtension: ({ name, directives, fields }) =>
|
||||
join(['extend input', name, join(directives, ' '), block(fields)], ' '),
|
||||
};
|
||||
|
||||
function addDescription(cb) {
|
||||
return node => join([node.description, cb(node)], '\n');
|
||||
}
|
||||
|
||||
/**
|
||||
* Given maybeArray, print an empty string if it is null or empty, otherwise
|
||||
* print all items together separated by separator if provided
|
||||
*/
|
||||
function join(maybeArray, separator) {
|
||||
return maybeArray ? maybeArray.filter(x => x).join(separator || '') : '';
|
||||
}
|
||||
|
||||
/**
|
||||
* Given array, print each item on its own line, wrapped in an
|
||||
* indented "{ }" block.
|
||||
*/
|
||||
function block(array) {
|
||||
return array && array.length !== 0
|
||||
? '{\n' + indent(join(array, '\n')) + '\n}'
|
||||
: '';
|
||||
}
|
||||
|
||||
/**
|
||||
* If maybeString is not null or empty, then wrap with start and end, otherwise
|
||||
* print an empty string.
|
||||
*/
|
||||
function wrap(start, maybeString, end) {
|
||||
return maybeString ? start + maybeString + (end || '') : '';
|
||||
}
|
||||
|
||||
function indent(maybeString) {
|
||||
return maybeString && ' ' + maybeString.replace(/\n/g, '\n ');
|
||||
}
|
||||
|
||||
function isMultiline(string) {
|
||||
return string.indexOf('\n') !== -1;
|
||||
}
|
||||
|
||||
function hasMultilineItems(maybeArray) {
|
||||
return maybeArray && maybeArray.some(isMultiline);
|
||||
}
|
||||
300
node_modules/graphql/language/printer.mjs
generated
vendored
Normal file
300
node_modules/graphql/language/printer.mjs
generated
vendored
Normal file
@@ -0,0 +1,300 @@
|
||||
import { visit } from './visitor';
|
||||
import { printBlockString } from './blockString';
|
||||
/**
|
||||
* Converts an AST into a string, using one set of reasonable
|
||||
* formatting rules.
|
||||
*/
|
||||
|
||||
export function print(ast) {
|
||||
return visit(ast, {
|
||||
leave: printDocASTReducer
|
||||
});
|
||||
} // TODO: provide better type coverage in future
|
||||
|
||||
var printDocASTReducer = {
|
||||
Name: function Name(node) {
|
||||
return node.value;
|
||||
},
|
||||
Variable: function Variable(node) {
|
||||
return '$' + node.name;
|
||||
},
|
||||
// Document
|
||||
Document: function Document(node) {
|
||||
return join(node.definitions, '\n\n') + '\n';
|
||||
},
|
||||
OperationDefinition: function OperationDefinition(node) {
|
||||
var op = node.operation;
|
||||
var name = node.name;
|
||||
var varDefs = wrap('(', join(node.variableDefinitions, ', '), ')');
|
||||
var directives = join(node.directives, ' ');
|
||||
var selectionSet = node.selectionSet; // Anonymous queries with no directives or variable definitions can use
|
||||
// the query short form.
|
||||
|
||||
return !name && !directives && !varDefs && op === 'query' ? selectionSet : join([op, join([name, varDefs]), directives, selectionSet], ' ');
|
||||
},
|
||||
VariableDefinition: function VariableDefinition(_ref) {
|
||||
var variable = _ref.variable,
|
||||
type = _ref.type,
|
||||
defaultValue = _ref.defaultValue,
|
||||
directives = _ref.directives;
|
||||
return variable + ': ' + type + wrap(' = ', defaultValue) + wrap(' ', join(directives, ' '));
|
||||
},
|
||||
SelectionSet: function SelectionSet(_ref2) {
|
||||
var selections = _ref2.selections;
|
||||
return block(selections);
|
||||
},
|
||||
Field: function Field(_ref3) {
|
||||
var alias = _ref3.alias,
|
||||
name = _ref3.name,
|
||||
args = _ref3.arguments,
|
||||
directives = _ref3.directives,
|
||||
selectionSet = _ref3.selectionSet;
|
||||
return join([wrap('', alias, ': ') + name + wrap('(', join(args, ', '), ')'), join(directives, ' '), selectionSet], ' ');
|
||||
},
|
||||
Argument: function Argument(_ref4) {
|
||||
var name = _ref4.name,
|
||||
value = _ref4.value;
|
||||
return name + ': ' + value;
|
||||
},
|
||||
// Fragments
|
||||
FragmentSpread: function FragmentSpread(_ref5) {
|
||||
var name = _ref5.name,
|
||||
directives = _ref5.directives;
|
||||
return '...' + name + wrap(' ', join(directives, ' '));
|
||||
},
|
||||
InlineFragment: function InlineFragment(_ref6) {
|
||||
var typeCondition = _ref6.typeCondition,
|
||||
directives = _ref6.directives,
|
||||
selectionSet = _ref6.selectionSet;
|
||||
return join(['...', wrap('on ', typeCondition), join(directives, ' '), selectionSet], ' ');
|
||||
},
|
||||
FragmentDefinition: function FragmentDefinition(_ref7) {
|
||||
var name = _ref7.name,
|
||||
typeCondition = _ref7.typeCondition,
|
||||
variableDefinitions = _ref7.variableDefinitions,
|
||||
directives = _ref7.directives,
|
||||
selectionSet = _ref7.selectionSet;
|
||||
return (// Note: fragment variable definitions are experimental and may be changed
|
||||
// or removed in the future.
|
||||
"fragment ".concat(name).concat(wrap('(', join(variableDefinitions, ', '), ')'), " ") + "on ".concat(typeCondition, " ").concat(wrap('', join(directives, ' '), ' ')) + selectionSet
|
||||
);
|
||||
},
|
||||
// Value
|
||||
IntValue: function IntValue(_ref8) {
|
||||
var value = _ref8.value;
|
||||
return value;
|
||||
},
|
||||
FloatValue: function FloatValue(_ref9) {
|
||||
var value = _ref9.value;
|
||||
return value;
|
||||
},
|
||||
StringValue: function StringValue(_ref10, key) {
|
||||
var value = _ref10.value,
|
||||
isBlockString = _ref10.block;
|
||||
return isBlockString ? printBlockString(value, key === 'description' ? '' : ' ') : JSON.stringify(value);
|
||||
},
|
||||
BooleanValue: function BooleanValue(_ref11) {
|
||||
var value = _ref11.value;
|
||||
return value ? 'true' : 'false';
|
||||
},
|
||||
NullValue: function NullValue() {
|
||||
return 'null';
|
||||
},
|
||||
EnumValue: function EnumValue(_ref12) {
|
||||
var value = _ref12.value;
|
||||
return value;
|
||||
},
|
||||
ListValue: function ListValue(_ref13) {
|
||||
var values = _ref13.values;
|
||||
return '[' + join(values, ', ') + ']';
|
||||
},
|
||||
ObjectValue: function ObjectValue(_ref14) {
|
||||
var fields = _ref14.fields;
|
||||
return '{' + join(fields, ', ') + '}';
|
||||
},
|
||||
ObjectField: function ObjectField(_ref15) {
|
||||
var name = _ref15.name,
|
||||
value = _ref15.value;
|
||||
return name + ': ' + value;
|
||||
},
|
||||
// Directive
|
||||
Directive: function Directive(_ref16) {
|
||||
var name = _ref16.name,
|
||||
args = _ref16.arguments;
|
||||
return '@' + name + wrap('(', join(args, ', '), ')');
|
||||
},
|
||||
// Type
|
||||
NamedType: function NamedType(_ref17) {
|
||||
var name = _ref17.name;
|
||||
return name;
|
||||
},
|
||||
ListType: function ListType(_ref18) {
|
||||
var type = _ref18.type;
|
||||
return '[' + type + ']';
|
||||
},
|
||||
NonNullType: function NonNullType(_ref19) {
|
||||
var type = _ref19.type;
|
||||
return type + '!';
|
||||
},
|
||||
// Type System Definitions
|
||||
SchemaDefinition: function SchemaDefinition(_ref20) {
|
||||
var directives = _ref20.directives,
|
||||
operationTypes = _ref20.operationTypes;
|
||||
return join(['schema', join(directives, ' '), block(operationTypes)], ' ');
|
||||
},
|
||||
OperationTypeDefinition: function OperationTypeDefinition(_ref21) {
|
||||
var operation = _ref21.operation,
|
||||
type = _ref21.type;
|
||||
return operation + ': ' + type;
|
||||
},
|
||||
ScalarTypeDefinition: addDescription(function (_ref22) {
|
||||
var name = _ref22.name,
|
||||
directives = _ref22.directives;
|
||||
return join(['scalar', name, join(directives, ' ')], ' ');
|
||||
}),
|
||||
ObjectTypeDefinition: addDescription(function (_ref23) {
|
||||
var name = _ref23.name,
|
||||
interfaces = _ref23.interfaces,
|
||||
directives = _ref23.directives,
|
||||
fields = _ref23.fields;
|
||||
return join(['type', name, wrap('implements ', join(interfaces, ' & ')), join(directives, ' '), block(fields)], ' ');
|
||||
}),
|
||||
FieldDefinition: addDescription(function (_ref24) {
|
||||
var name = _ref24.name,
|
||||
args = _ref24.arguments,
|
||||
type = _ref24.type,
|
||||
directives = _ref24.directives;
|
||||
return name + (hasMultilineItems(args) ? wrap('(\n', indent(join(args, '\n')), '\n)') : wrap('(', join(args, ', '), ')')) + ': ' + type + wrap(' ', join(directives, ' '));
|
||||
}),
|
||||
InputValueDefinition: addDescription(function (_ref25) {
|
||||
var name = _ref25.name,
|
||||
type = _ref25.type,
|
||||
defaultValue = _ref25.defaultValue,
|
||||
directives = _ref25.directives;
|
||||
return join([name + ': ' + type, wrap('= ', defaultValue), join(directives, ' ')], ' ');
|
||||
}),
|
||||
InterfaceTypeDefinition: addDescription(function (_ref26) {
|
||||
var name = _ref26.name,
|
||||
directives = _ref26.directives,
|
||||
fields = _ref26.fields;
|
||||
return join(['interface', name, join(directives, ' '), block(fields)], ' ');
|
||||
}),
|
||||
UnionTypeDefinition: addDescription(function (_ref27) {
|
||||
var name = _ref27.name,
|
||||
directives = _ref27.directives,
|
||||
types = _ref27.types;
|
||||
return join(['union', name, join(directives, ' '), types && types.length !== 0 ? '= ' + join(types, ' | ') : ''], ' ');
|
||||
}),
|
||||
EnumTypeDefinition: addDescription(function (_ref28) {
|
||||
var name = _ref28.name,
|
||||
directives = _ref28.directives,
|
||||
values = _ref28.values;
|
||||
return join(['enum', name, join(directives, ' '), block(values)], ' ');
|
||||
}),
|
||||
EnumValueDefinition: addDescription(function (_ref29) {
|
||||
var name = _ref29.name,
|
||||
directives = _ref29.directives;
|
||||
return join([name, join(directives, ' ')], ' ');
|
||||
}),
|
||||
InputObjectTypeDefinition: addDescription(function (_ref30) {
|
||||
var name = _ref30.name,
|
||||
directives = _ref30.directives,
|
||||
fields = _ref30.fields;
|
||||
return join(['input', name, join(directives, ' '), block(fields)], ' ');
|
||||
}),
|
||||
DirectiveDefinition: addDescription(function (_ref31) {
|
||||
var name = _ref31.name,
|
||||
args = _ref31.arguments,
|
||||
repeatable = _ref31.repeatable,
|
||||
locations = _ref31.locations;
|
||||
return 'directive @' + name + (hasMultilineItems(args) ? wrap('(\n', indent(join(args, '\n')), '\n)') : wrap('(', join(args, ', '), ')')) + (repeatable ? ' repeatable' : '') + ' on ' + join(locations, ' | ');
|
||||
}),
|
||||
SchemaExtension: function SchemaExtension(_ref32) {
|
||||
var directives = _ref32.directives,
|
||||
operationTypes = _ref32.operationTypes;
|
||||
return join(['extend schema', join(directives, ' '), block(operationTypes)], ' ');
|
||||
},
|
||||
ScalarTypeExtension: function ScalarTypeExtension(_ref33) {
|
||||
var name = _ref33.name,
|
||||
directives = _ref33.directives;
|
||||
return join(['extend scalar', name, join(directives, ' ')], ' ');
|
||||
},
|
||||
ObjectTypeExtension: function ObjectTypeExtension(_ref34) {
|
||||
var name = _ref34.name,
|
||||
interfaces = _ref34.interfaces,
|
||||
directives = _ref34.directives,
|
||||
fields = _ref34.fields;
|
||||
return join(['extend type', name, wrap('implements ', join(interfaces, ' & ')), join(directives, ' '), block(fields)], ' ');
|
||||
},
|
||||
InterfaceTypeExtension: function InterfaceTypeExtension(_ref35) {
|
||||
var name = _ref35.name,
|
||||
directives = _ref35.directives,
|
||||
fields = _ref35.fields;
|
||||
return join(['extend interface', name, join(directives, ' '), block(fields)], ' ');
|
||||
},
|
||||
UnionTypeExtension: function UnionTypeExtension(_ref36) {
|
||||
var name = _ref36.name,
|
||||
directives = _ref36.directives,
|
||||
types = _ref36.types;
|
||||
return join(['extend union', name, join(directives, ' '), types && types.length !== 0 ? '= ' + join(types, ' | ') : ''], ' ');
|
||||
},
|
||||
EnumTypeExtension: function EnumTypeExtension(_ref37) {
|
||||
var name = _ref37.name,
|
||||
directives = _ref37.directives,
|
||||
values = _ref37.values;
|
||||
return join(['extend enum', name, join(directives, ' '), block(values)], ' ');
|
||||
},
|
||||
InputObjectTypeExtension: function InputObjectTypeExtension(_ref38) {
|
||||
var name = _ref38.name,
|
||||
directives = _ref38.directives,
|
||||
fields = _ref38.fields;
|
||||
return join(['extend input', name, join(directives, ' '), block(fields)], ' ');
|
||||
}
|
||||
};
|
||||
|
||||
function addDescription(cb) {
|
||||
return function (node) {
|
||||
return join([node.description, cb(node)], '\n');
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Given maybeArray, print an empty string if it is null or empty, otherwise
|
||||
* print all items together separated by separator if provided
|
||||
*/
|
||||
|
||||
|
||||
function join(maybeArray, separator) {
|
||||
return maybeArray ? maybeArray.filter(function (x) {
|
||||
return x;
|
||||
}).join(separator || '') : '';
|
||||
}
|
||||
/**
|
||||
* Given array, print each item on its own line, wrapped in an
|
||||
* indented "{ }" block.
|
||||
*/
|
||||
|
||||
|
||||
function block(array) {
|
||||
return array && array.length !== 0 ? '{\n' + indent(join(array, '\n')) + '\n}' : '';
|
||||
}
|
||||
/**
|
||||
* If maybeString is not null or empty, then wrap with start and end, otherwise
|
||||
* print an empty string.
|
||||
*/
|
||||
|
||||
|
||||
function wrap(start, maybeString, end) {
|
||||
return maybeString ? start + maybeString + (end || '') : '';
|
||||
}
|
||||
|
||||
function indent(maybeString) {
|
||||
return maybeString && ' ' + maybeString.replace(/\n/g, '\n ');
|
||||
}
|
||||
|
||||
function isMultiline(string) {
|
||||
return string.indexOf('\n') !== -1;
|
||||
}
|
||||
|
||||
function hasMultilineItems(maybeArray) {
|
||||
return maybeArray && maybeArray.some(isMultiline);
|
||||
}
|
||||
19
node_modules/graphql/language/source.d.ts
generated
vendored
Normal file
19
node_modules/graphql/language/source.d.ts
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
interface Location {
|
||||
line: number;
|
||||
column: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* A representation of source input to GraphQL.
|
||||
* `name` and `locationOffset` are optional. They are useful for clients who
|
||||
* store GraphQL documents in source files; for example, if the GraphQL input
|
||||
* starts at line 40 in a file named Foo.graphql, it might be useful for name to
|
||||
* be "Foo.graphql" and location to be `{ line: 40, column: 0 }`.
|
||||
* line and column in locationOffset are 1-indexed
|
||||
*/
|
||||
export class Source {
|
||||
body: string;
|
||||
name: string;
|
||||
locationOffset: Location;
|
||||
constructor(body: string, name?: string, locationOffset?: Location);
|
||||
}
|
||||
35
node_modules/graphql/language/source.js
generated
vendored
Normal file
35
node_modules/graphql/language/source.js
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.Source = void 0;
|
||||
|
||||
var _devAssert = _interopRequireDefault(require("../jsutils/devAssert"));
|
||||
|
||||
var _defineToStringTag = _interopRequireDefault(require("../jsutils/defineToStringTag"));
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
/**
|
||||
* A representation of source input to GraphQL.
|
||||
* `name` and `locationOffset` are optional. They are useful for clients who
|
||||
* store GraphQL documents in source files; for example, if the GraphQL input
|
||||
* starts at line 40 in a file named Foo.graphql, it might be useful for name to
|
||||
* be "Foo.graphql" and location to be `{ line: 40, column: 0 }`.
|
||||
* line and column in locationOffset are 1-indexed
|
||||
*/
|
||||
var Source = function Source(body, name, locationOffset) {
|
||||
this.body = body;
|
||||
this.name = name || 'GraphQL request';
|
||||
this.locationOffset = locationOffset || {
|
||||
line: 1,
|
||||
column: 1
|
||||
};
|
||||
this.locationOffset.line > 0 || (0, _devAssert.default)(0, 'line in locationOffset is 1-indexed and must be positive');
|
||||
this.locationOffset.column > 0 || (0, _devAssert.default)(0, 'column in locationOffset is 1-indexed and must be positive');
|
||||
}; // Conditionally apply `[Symbol.toStringTag]` if `Symbol`s are supported
|
||||
|
||||
|
||||
exports.Source = Source;
|
||||
(0, _defineToStringTag.default)(Source);
|
||||
40
node_modules/graphql/language/source.js.flow
generated
vendored
Normal file
40
node_modules/graphql/language/source.js.flow
generated
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
// @flow strict
|
||||
|
||||
import devAssert from '../jsutils/devAssert';
|
||||
import defineToStringTag from '../jsutils/defineToStringTag';
|
||||
|
||||
type Location = {|
|
||||
line: number,
|
||||
column: number,
|
||||
|};
|
||||
|
||||
/**
|
||||
* A representation of source input to GraphQL.
|
||||
* `name` and `locationOffset` are optional. They are useful for clients who
|
||||
* store GraphQL documents in source files; for example, if the GraphQL input
|
||||
* starts at line 40 in a file named Foo.graphql, it might be useful for name to
|
||||
* be "Foo.graphql" and location to be `{ line: 40, column: 0 }`.
|
||||
* line and column in locationOffset are 1-indexed
|
||||
*/
|
||||
export class Source {
|
||||
body: string;
|
||||
name: string;
|
||||
locationOffset: Location;
|
||||
|
||||
constructor(body: string, name?: string, locationOffset?: Location): void {
|
||||
this.body = body;
|
||||
this.name = name || 'GraphQL request';
|
||||
this.locationOffset = locationOffset || { line: 1, column: 1 };
|
||||
devAssert(
|
||||
this.locationOffset.line > 0,
|
||||
'line in locationOffset is 1-indexed and must be positive',
|
||||
);
|
||||
devAssert(
|
||||
this.locationOffset.column > 0,
|
||||
'column in locationOffset is 1-indexed and must be positive',
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Conditionally apply `[Symbol.toStringTag]` if `Symbol`s are supported
|
||||
defineToStringTag(Source);
|
||||
23
node_modules/graphql/language/source.mjs
generated
vendored
Normal file
23
node_modules/graphql/language/source.mjs
generated
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
import devAssert from '../jsutils/devAssert';
|
||||
import defineToStringTag from '../jsutils/defineToStringTag';
|
||||
|
||||
/**
|
||||
* A representation of source input to GraphQL.
|
||||
* `name` and `locationOffset` are optional. They are useful for clients who
|
||||
* store GraphQL documents in source files; for example, if the GraphQL input
|
||||
* starts at line 40 in a file named Foo.graphql, it might be useful for name to
|
||||
* be "Foo.graphql" and location to be `{ line: 40, column: 0 }`.
|
||||
* line and column in locationOffset are 1-indexed
|
||||
*/
|
||||
export var Source = function Source(body, name, locationOffset) {
|
||||
this.body = body;
|
||||
this.name = name || 'GraphQL request';
|
||||
this.locationOffset = locationOffset || {
|
||||
line: 1,
|
||||
column: 1
|
||||
};
|
||||
this.locationOffset.line > 0 || devAssert(0, 'line in locationOffset is 1-indexed and must be positive');
|
||||
this.locationOffset.column > 0 || devAssert(0, 'column in locationOffset is 1-indexed and must be positive');
|
||||
}; // Conditionally apply `[Symbol.toStringTag]` if `Symbol`s are supported
|
||||
|
||||
defineToStringTag(Source);
|
||||
35
node_modules/graphql/language/tokenKind.d.ts
generated
vendored
Normal file
35
node_modules/graphql/language/tokenKind.d.ts
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
/**
|
||||
* An exported enum describing the different kinds of tokens that the
|
||||
* lexer emits.
|
||||
*/
|
||||
export const TokenKind: _TokenKind;
|
||||
|
||||
type _TokenKind = {
|
||||
SOF: '<SOF>';
|
||||
EOF: '<EOF>';
|
||||
BANG: '!';
|
||||
DOLLAR: '$';
|
||||
AMP: '&';
|
||||
PAREN_L: '(';
|
||||
PAREN_R: ')';
|
||||
SPREAD: '...';
|
||||
COLON: ':';
|
||||
EQUALS: '=';
|
||||
AT: '@';
|
||||
BRACKET_L: '[';
|
||||
BRACKET_R: ']';
|
||||
BRACE_L: '{';
|
||||
PIPE: '|';
|
||||
BRACE_R: '}';
|
||||
NAME: 'Name';
|
||||
INT: 'Int';
|
||||
FLOAT: 'Float';
|
||||
STRING: 'String';
|
||||
BLOCK_STRING: 'BlockString';
|
||||
COMMENT: 'Comment';
|
||||
};
|
||||
|
||||
/**
|
||||
* The enum type representing the token kinds values.
|
||||
*/
|
||||
export type TokenKindEnum = _TokenKind[keyof _TokenKind];
|
||||
40
node_modules/graphql/language/tokenKind.js
generated
vendored
Normal file
40
node_modules/graphql/language/tokenKind.js
generated
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.TokenKind = void 0;
|
||||
|
||||
/**
|
||||
* An exported enum describing the different kinds of tokens that the
|
||||
* lexer emits.
|
||||
*/
|
||||
var TokenKind = Object.freeze({
|
||||
SOF: '<SOF>',
|
||||
EOF: '<EOF>',
|
||||
BANG: '!',
|
||||
DOLLAR: '$',
|
||||
AMP: '&',
|
||||
PAREN_L: '(',
|
||||
PAREN_R: ')',
|
||||
SPREAD: '...',
|
||||
COLON: ':',
|
||||
EQUALS: '=',
|
||||
AT: '@',
|
||||
BRACKET_L: '[',
|
||||
BRACKET_R: ']',
|
||||
BRACE_L: '{',
|
||||
PIPE: '|',
|
||||
BRACE_R: '}',
|
||||
NAME: 'Name',
|
||||
INT: 'Int',
|
||||
FLOAT: 'Float',
|
||||
STRING: 'String',
|
||||
BLOCK_STRING: 'BlockString',
|
||||
COMMENT: 'Comment'
|
||||
});
|
||||
/**
|
||||
* The enum type representing the token kinds values.
|
||||
*/
|
||||
|
||||
exports.TokenKind = TokenKind;
|
||||
35
node_modules/graphql/language/tokenKind.js.flow
generated
vendored
Normal file
35
node_modules/graphql/language/tokenKind.js.flow
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
// @flow strict
|
||||
|
||||
/**
|
||||
* An exported enum describing the different kinds of tokens that the
|
||||
* lexer emits.
|
||||
*/
|
||||
export const TokenKind = Object.freeze({
|
||||
SOF: '<SOF>',
|
||||
EOF: '<EOF>',
|
||||
BANG: '!',
|
||||
DOLLAR: '$',
|
||||
AMP: '&',
|
||||
PAREN_L: '(',
|
||||
PAREN_R: ')',
|
||||
SPREAD: '...',
|
||||
COLON: ':',
|
||||
EQUALS: '=',
|
||||
AT: '@',
|
||||
BRACKET_L: '[',
|
||||
BRACKET_R: ']',
|
||||
BRACE_L: '{',
|
||||
PIPE: '|',
|
||||
BRACE_R: '}',
|
||||
NAME: 'Name',
|
||||
INT: 'Int',
|
||||
FLOAT: 'Float',
|
||||
STRING: 'String',
|
||||
BLOCK_STRING: 'BlockString',
|
||||
COMMENT: 'Comment',
|
||||
});
|
||||
|
||||
/**
|
||||
* The enum type representing the token kinds values.
|
||||
*/
|
||||
export type TokenKindEnum = $Values<typeof TokenKind>;
|
||||
31
node_modules/graphql/language/tokenKind.mjs
generated
vendored
Normal file
31
node_modules/graphql/language/tokenKind.mjs
generated
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
/**
|
||||
* An exported enum describing the different kinds of tokens that the
|
||||
* lexer emits.
|
||||
*/
|
||||
export var TokenKind = Object.freeze({
|
||||
SOF: '<SOF>',
|
||||
EOF: '<EOF>',
|
||||
BANG: '!',
|
||||
DOLLAR: '$',
|
||||
AMP: '&',
|
||||
PAREN_L: '(',
|
||||
PAREN_R: ')',
|
||||
SPREAD: '...',
|
||||
COLON: ':',
|
||||
EQUALS: '=',
|
||||
AT: '@',
|
||||
BRACKET_L: '[',
|
||||
BRACKET_R: ']',
|
||||
BRACE_L: '{',
|
||||
PIPE: '|',
|
||||
BRACE_R: '}',
|
||||
NAME: 'Name',
|
||||
INT: 'Int',
|
||||
FLOAT: 'Float',
|
||||
STRING: 'String',
|
||||
BLOCK_STRING: 'BlockString',
|
||||
COMMENT: 'Comment'
|
||||
});
|
||||
/**
|
||||
* The enum type representing the token kinds values.
|
||||
*/
|
||||
264
node_modules/graphql/language/visitor.d.ts
generated
vendored
Normal file
264
node_modules/graphql/language/visitor.d.ts
generated
vendored
Normal file
@@ -0,0 +1,264 @@
|
||||
import Maybe from '../tsutils/Maybe';
|
||||
import { TypeInfo } from '../utilities/TypeInfo';
|
||||
import { ASTNode, ASTKindToNode } from './ast';
|
||||
|
||||
/**
|
||||
* A visitor is provided to visit, it contains the collection of
|
||||
* relevant functions to be called during the visitor's traversal.
|
||||
*/
|
||||
export type ASTVisitor = Visitor<ASTKindToNode>;
|
||||
export type Visitor<KindToNode, Nodes = KindToNode[keyof KindToNode]> =
|
||||
| EnterLeaveVisitor<KindToNode, Nodes>
|
||||
| ShapeMapVisitor<KindToNode, Nodes>;
|
||||
|
||||
interface EnterLeave<T> {
|
||||
readonly enter?: T;
|
||||
readonly leave?: T;
|
||||
}
|
||||
|
||||
type EnterLeaveVisitor<KindToNode, Nodes> = EnterLeave<
|
||||
VisitFn<Nodes> | { [K in keyof KindToNode]?: VisitFn<Nodes, KindToNode[K]> }
|
||||
>;
|
||||
|
||||
type ShapeMapVisitor<KindToNode, Nodes> = {
|
||||
[K in keyof KindToNode]?:
|
||||
| VisitFn<Nodes, KindToNode[K]>
|
||||
| EnterLeave<VisitFn<Nodes, KindToNode[K]>>;
|
||||
};
|
||||
|
||||
/**
|
||||
* A visitor is comprised of visit functions, which are called on each node
|
||||
* during the visitor's traversal.
|
||||
*/
|
||||
export type VisitFn<TAnyNode, TVisitedNode = TAnyNode> = (
|
||||
/** The current node being visiting.*/
|
||||
node: TVisitedNode,
|
||||
/** The index or key to this node from the parent node or Array. */
|
||||
key: string | number | undefined,
|
||||
/** The parent immediately above this node, which may be an Array. */
|
||||
parent: TAnyNode | ReadonlyArray<TAnyNode> | undefined,
|
||||
/** The key path to get to this node from the root node. */
|
||||
path: ReadonlyArray<string | number>,
|
||||
/** All nodes and Arrays visited before reaching parent of this node.
|
||||
* These correspond to array indices in `path`.
|
||||
* Note: ancestors includes arrays which contain the parent of visited node.
|
||||
*/
|
||||
ancestors: ReadonlyArray<TAnyNode | ReadonlyArray<TAnyNode>>,
|
||||
) => any;
|
||||
|
||||
/**
|
||||
* A KeyMap describes each the traversable properties of each kind of node.
|
||||
*/
|
||||
export type VisitorKeyMap<T> = { [P in keyof T]: ReadonlyArray<keyof T[P]> };
|
||||
|
||||
// TODO: Should be `[]`, but that requires TypeScript@3
|
||||
type EmptyTuple = never[];
|
||||
|
||||
export const QueryDocumentKeys: {
|
||||
Name: EmptyTuple;
|
||||
|
||||
Document: ['definitions'];
|
||||
// Prettier forces trailing commas, but TS pre 3.2 doesn't allow them.
|
||||
// prettier-ignore
|
||||
OperationDefinition: [
|
||||
'name',
|
||||
'variableDefinitions',
|
||||
'directives',
|
||||
'selectionSet'
|
||||
];
|
||||
VariableDefinition: ['variable', 'type', 'defaultValue', 'directives'];
|
||||
Variable: ['name'];
|
||||
SelectionSet: ['selections'];
|
||||
Field: ['alias', 'name', 'arguments', 'directives', 'selectionSet'];
|
||||
Argument: ['name', 'value'];
|
||||
|
||||
FragmentSpread: ['name', 'directives'];
|
||||
InlineFragment: ['typeCondition', 'directives', 'selectionSet'];
|
||||
// prettier-ignore
|
||||
FragmentDefinition: [
|
||||
'name',
|
||||
// Note: fragment variable definitions are experimental and may be changed
|
||||
// or removed in the future.
|
||||
'variableDefinitions',
|
||||
'typeCondition',
|
||||
'directives',
|
||||
'selectionSet'
|
||||
];
|
||||
|
||||
IntValue: EmptyTuple;
|
||||
FloatValue: EmptyTuple;
|
||||
StringValue: EmptyTuple;
|
||||
BooleanValue: EmptyTuple;
|
||||
NullValue: EmptyTuple;
|
||||
EnumValue: EmptyTuple;
|
||||
ListValue: ['values'];
|
||||
ObjectValue: ['fields'];
|
||||
ObjectField: ['name', 'value'];
|
||||
|
||||
Directive: ['name', 'arguments'];
|
||||
|
||||
NamedType: ['name'];
|
||||
ListType: ['type'];
|
||||
NonNullType: ['type'];
|
||||
|
||||
SchemaDefinition: ['directives', 'operationTypes'];
|
||||
OperationTypeDefinition: ['type'];
|
||||
|
||||
ScalarTypeDefinition: ['description', 'name', 'directives'];
|
||||
// prettier-ignore
|
||||
ObjectTypeDefinition: [
|
||||
'description',
|
||||
'name',
|
||||
'interfaces',
|
||||
'directives',
|
||||
'fields'
|
||||
];
|
||||
FieldDefinition: ['description', 'name', 'arguments', 'type', 'directives'];
|
||||
// prettier-ignore
|
||||
InputValueDefinition: [
|
||||
'description',
|
||||
'name',
|
||||
'type',
|
||||
'defaultValue',
|
||||
'directives'
|
||||
];
|
||||
InterfaceTypeDefinition: ['description', 'name', 'directives', 'fields'];
|
||||
UnionTypeDefinition: ['description', 'name', 'directives', 'types'];
|
||||
EnumTypeDefinition: ['description', 'name', 'directives', 'values'];
|
||||
EnumValueDefinition: ['description', 'name', 'directives'];
|
||||
InputObjectTypeDefinition: ['description', 'name', 'directives', 'fields'];
|
||||
|
||||
DirectiveDefinition: ['description', 'name', 'arguments', 'locations'];
|
||||
|
||||
SchemaExtension: ['directives', 'operationTypes'];
|
||||
|
||||
ScalarTypeExtension: ['name', 'directives'];
|
||||
ObjectTypeExtension: ['name', 'interfaces', 'directives', 'fields'];
|
||||
InterfaceTypeExtension: ['name', 'directives', 'fields'];
|
||||
UnionTypeExtension: ['name', 'directives', 'types'];
|
||||
EnumTypeExtension: ['name', 'directives', 'values'];
|
||||
InputObjectTypeExtension: ['name', 'directives', 'fields'];
|
||||
};
|
||||
|
||||
export const BREAK: any;
|
||||
|
||||
/**
|
||||
* visit() will walk through an AST using a depth first traversal, calling
|
||||
* the visitor's enter function at each node in the traversal, and calling the
|
||||
* leave function after visiting that node and all of its child nodes.
|
||||
*
|
||||
* By returning different values from the enter and leave functions, the
|
||||
* behavior of the visitor can be altered, including skipping over a sub-tree of
|
||||
* the AST (by returning false), editing the AST by returning a value or null
|
||||
* to remove the value, or to stop the whole traversal by returning BREAK.
|
||||
*
|
||||
* When using visit() to edit an AST, the original AST will not be modified, and
|
||||
* a new version of the AST with the changes applied will be returned from the
|
||||
* visit function.
|
||||
*
|
||||
* const editedAST = visit(ast, {
|
||||
* enter(node, key, parent, path, ancestors) {
|
||||
* // @return
|
||||
* // undefined: no action
|
||||
* // false: skip visiting this node
|
||||
* // visitor.BREAK: stop visiting altogether
|
||||
* // null: delete this node
|
||||
* // any value: replace this node with the returned value
|
||||
* },
|
||||
* leave(node, key, parent, path, ancestors) {
|
||||
* // @return
|
||||
* // undefined: no action
|
||||
* // false: no action
|
||||
* // visitor.BREAK: stop visiting altogether
|
||||
* // null: delete this node
|
||||
* // any value: replace this node with the returned value
|
||||
* }
|
||||
* });
|
||||
*
|
||||
* Alternatively to providing enter() and leave() functions, a visitor can
|
||||
* instead provide functions named the same as the kinds of AST nodes, or
|
||||
* enter/leave visitors at a named key, leading to four permutations of
|
||||
* visitor API:
|
||||
*
|
||||
* 1) Named visitors triggered when entering a node a specific kind.
|
||||
*
|
||||
* visit(ast, {
|
||||
* Kind(node) {
|
||||
* // enter the "Kind" node
|
||||
* }
|
||||
* })
|
||||
*
|
||||
* 2) Named visitors that trigger upon entering and leaving a node of
|
||||
* a specific kind.
|
||||
*
|
||||
* visit(ast, {
|
||||
* Kind: {
|
||||
* enter(node) {
|
||||
* // enter the "Kind" node
|
||||
* }
|
||||
* leave(node) {
|
||||
* // leave the "Kind" node
|
||||
* }
|
||||
* }
|
||||
* })
|
||||
*
|
||||
* 3) Generic visitors that trigger upon entering and leaving any node.
|
||||
*
|
||||
* visit(ast, {
|
||||
* enter(node) {
|
||||
* // enter any node
|
||||
* },
|
||||
* leave(node) {
|
||||
* // leave any node
|
||||
* }
|
||||
* })
|
||||
*
|
||||
* 4) Parallel visitors for entering and leaving nodes of a specific kind.
|
||||
*
|
||||
* visit(ast, {
|
||||
* enter: {
|
||||
* Kind(node) {
|
||||
* // enter the "Kind" node
|
||||
* }
|
||||
* },
|
||||
* leave: {
|
||||
* Kind(node) {
|
||||
* // leave the "Kind" node
|
||||
* }
|
||||
* }
|
||||
* })
|
||||
*/
|
||||
export function visit(
|
||||
root: ASTNode,
|
||||
visitor: Visitor<ASTKindToNode>,
|
||||
visitorKeys?: VisitorKeyMap<ASTKindToNode>, // default: QueryDocumentKeys
|
||||
): any;
|
||||
|
||||
/**
|
||||
* Creates a new visitor instance which delegates to many visitors to run in
|
||||
* parallel. Each visitor will be visited for each node before moving on.
|
||||
*
|
||||
* If a prior visitor edits a node, no following visitors will see that node.
|
||||
*/
|
||||
export function visitInParallel(
|
||||
visitors: ReadonlyArray<Visitor<ASTKindToNode>>,
|
||||
): Visitor<ASTKindToNode>;
|
||||
|
||||
/**
|
||||
* Creates a new visitor instance which maintains a provided TypeInfo instance
|
||||
* along with visiting visitor.
|
||||
*/
|
||||
export function visitWithTypeInfo(
|
||||
typeInfo: TypeInfo,
|
||||
visitor: Visitor<ASTKindToNode>,
|
||||
): Visitor<ASTKindToNode>;
|
||||
|
||||
/**
|
||||
* Given a visitor instance, if it is leaving or not, and a node kind, return
|
||||
* the function the visitor runtime should call.
|
||||
*/
|
||||
export function getVisitFn(
|
||||
visitor: Visitor<any>,
|
||||
kind: string,
|
||||
isLeaving: boolean,
|
||||
): Maybe<VisitFn<any>>;
|
||||
441
node_modules/graphql/language/visitor.js
generated
vendored
Normal file
441
node_modules/graphql/language/visitor.js
generated
vendored
Normal file
@@ -0,0 +1,441 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.visit = visit;
|
||||
exports.visitInParallel = visitInParallel;
|
||||
exports.visitWithTypeInfo = visitWithTypeInfo;
|
||||
exports.getVisitFn = getVisitFn;
|
||||
exports.BREAK = exports.QueryDocumentKeys = void 0;
|
||||
|
||||
var _inspect = _interopRequireDefault(require("../jsutils/inspect"));
|
||||
|
||||
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
|
||||
|
||||
var QueryDocumentKeys = {
|
||||
Name: [],
|
||||
Document: ['definitions'],
|
||||
OperationDefinition: ['name', 'variableDefinitions', 'directives', 'selectionSet'],
|
||||
VariableDefinition: ['variable', 'type', 'defaultValue', 'directives'],
|
||||
Variable: ['name'],
|
||||
SelectionSet: ['selections'],
|
||||
Field: ['alias', 'name', 'arguments', 'directives', 'selectionSet'],
|
||||
Argument: ['name', 'value'],
|
||||
FragmentSpread: ['name', 'directives'],
|
||||
InlineFragment: ['typeCondition', 'directives', 'selectionSet'],
|
||||
FragmentDefinition: ['name', // Note: fragment variable definitions are experimental and may be changed
|
||||
// or removed in the future.
|
||||
'variableDefinitions', 'typeCondition', 'directives', 'selectionSet'],
|
||||
IntValue: [],
|
||||
FloatValue: [],
|
||||
StringValue: [],
|
||||
BooleanValue: [],
|
||||
NullValue: [],
|
||||
EnumValue: [],
|
||||
ListValue: ['values'],
|
||||
ObjectValue: ['fields'],
|
||||
ObjectField: ['name', 'value'],
|
||||
Directive: ['name', 'arguments'],
|
||||
NamedType: ['name'],
|
||||
ListType: ['type'],
|
||||
NonNullType: ['type'],
|
||||
SchemaDefinition: ['directives', 'operationTypes'],
|
||||
OperationTypeDefinition: ['type'],
|
||||
ScalarTypeDefinition: ['description', 'name', 'directives'],
|
||||
ObjectTypeDefinition: ['description', 'name', 'interfaces', 'directives', 'fields'],
|
||||
FieldDefinition: ['description', 'name', 'arguments', 'type', 'directives'],
|
||||
InputValueDefinition: ['description', 'name', 'type', 'defaultValue', 'directives'],
|
||||
InterfaceTypeDefinition: ['description', 'name', 'directives', 'fields'],
|
||||
UnionTypeDefinition: ['description', 'name', 'directives', 'types'],
|
||||
EnumTypeDefinition: ['description', 'name', 'directives', 'values'],
|
||||
EnumValueDefinition: ['description', 'name', 'directives'],
|
||||
InputObjectTypeDefinition: ['description', 'name', 'directives', 'fields'],
|
||||
DirectiveDefinition: ['description', 'name', 'arguments', 'locations'],
|
||||
SchemaExtension: ['directives', 'operationTypes'],
|
||||
ScalarTypeExtension: ['name', 'directives'],
|
||||
ObjectTypeExtension: ['name', 'interfaces', 'directives', 'fields'],
|
||||
InterfaceTypeExtension: ['name', 'directives', 'fields'],
|
||||
UnionTypeExtension: ['name', 'directives', 'types'],
|
||||
EnumTypeExtension: ['name', 'directives', 'values'],
|
||||
InputObjectTypeExtension: ['name', 'directives', 'fields']
|
||||
};
|
||||
exports.QueryDocumentKeys = QueryDocumentKeys;
|
||||
var BREAK = Object.freeze({});
|
||||
/**
|
||||
* visit() will walk through an AST using a depth first traversal, calling
|
||||
* the visitor's enter function at each node in the traversal, and calling the
|
||||
* leave function after visiting that node and all of its child nodes.
|
||||
*
|
||||
* By returning different values from the enter and leave functions, the
|
||||
* behavior of the visitor can be altered, including skipping over a sub-tree of
|
||||
* the AST (by returning false), editing the AST by returning a value or null
|
||||
* to remove the value, or to stop the whole traversal by returning BREAK.
|
||||
*
|
||||
* When using visit() to edit an AST, the original AST will not be modified, and
|
||||
* a new version of the AST with the changes applied will be returned from the
|
||||
* visit function.
|
||||
*
|
||||
* const editedAST = visit(ast, {
|
||||
* enter(node, key, parent, path, ancestors) {
|
||||
* // @return
|
||||
* // undefined: no action
|
||||
* // false: skip visiting this node
|
||||
* // visitor.BREAK: stop visiting altogether
|
||||
* // null: delete this node
|
||||
* // any value: replace this node with the returned value
|
||||
* },
|
||||
* leave(node, key, parent, path, ancestors) {
|
||||
* // @return
|
||||
* // undefined: no action
|
||||
* // false: no action
|
||||
* // visitor.BREAK: stop visiting altogether
|
||||
* // null: delete this node
|
||||
* // any value: replace this node with the returned value
|
||||
* }
|
||||
* });
|
||||
*
|
||||
* Alternatively to providing enter() and leave() functions, a visitor can
|
||||
* instead provide functions named the same as the kinds of AST nodes, or
|
||||
* enter/leave visitors at a named key, leading to four permutations of
|
||||
* visitor API:
|
||||
*
|
||||
* 1) Named visitors triggered when entering a node a specific kind.
|
||||
*
|
||||
* visit(ast, {
|
||||
* Kind(node) {
|
||||
* // enter the "Kind" node
|
||||
* }
|
||||
* })
|
||||
*
|
||||
* 2) Named visitors that trigger upon entering and leaving a node of
|
||||
* a specific kind.
|
||||
*
|
||||
* visit(ast, {
|
||||
* Kind: {
|
||||
* enter(node) {
|
||||
* // enter the "Kind" node
|
||||
* }
|
||||
* leave(node) {
|
||||
* // leave the "Kind" node
|
||||
* }
|
||||
* }
|
||||
* })
|
||||
*
|
||||
* 3) Generic visitors that trigger upon entering and leaving any node.
|
||||
*
|
||||
* visit(ast, {
|
||||
* enter(node) {
|
||||
* // enter any node
|
||||
* },
|
||||
* leave(node) {
|
||||
* // leave any node
|
||||
* }
|
||||
* })
|
||||
*
|
||||
* 4) Parallel visitors for entering and leaving nodes of a specific kind.
|
||||
*
|
||||
* visit(ast, {
|
||||
* enter: {
|
||||
* Kind(node) {
|
||||
* // enter the "Kind" node
|
||||
* }
|
||||
* },
|
||||
* leave: {
|
||||
* Kind(node) {
|
||||
* // leave the "Kind" node
|
||||
* }
|
||||
* }
|
||||
* })
|
||||
*/
|
||||
|
||||
exports.BREAK = BREAK;
|
||||
|
||||
function visit(root, visitor) {
|
||||
var visitorKeys = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : QueryDocumentKeys;
|
||||
|
||||
/* eslint-disable no-undef-init */
|
||||
var stack = undefined;
|
||||
var inArray = Array.isArray(root);
|
||||
var keys = [root];
|
||||
var index = -1;
|
||||
var edits = [];
|
||||
var node = undefined;
|
||||
var key = undefined;
|
||||
var parent = undefined;
|
||||
var path = [];
|
||||
var ancestors = [];
|
||||
var newRoot = root;
|
||||
/* eslint-enable no-undef-init */
|
||||
|
||||
do {
|
||||
index++;
|
||||
var isLeaving = index === keys.length;
|
||||
var isEdited = isLeaving && edits.length !== 0;
|
||||
|
||||
if (isLeaving) {
|
||||
key = ancestors.length === 0 ? undefined : path[path.length - 1];
|
||||
node = parent;
|
||||
parent = ancestors.pop();
|
||||
|
||||
if (isEdited) {
|
||||
if (inArray) {
|
||||
node = node.slice();
|
||||
} else {
|
||||
var clone = {};
|
||||
|
||||
for (var _i2 = 0, _Object$keys2 = Object.keys(node); _i2 < _Object$keys2.length; _i2++) {
|
||||
var k = _Object$keys2[_i2];
|
||||
clone[k] = node[k];
|
||||
}
|
||||
|
||||
node = clone;
|
||||
}
|
||||
|
||||
var editOffset = 0;
|
||||
|
||||
for (var ii = 0; ii < edits.length; ii++) {
|
||||
var editKey = edits[ii][0];
|
||||
var editValue = edits[ii][1];
|
||||
|
||||
if (inArray) {
|
||||
editKey -= editOffset;
|
||||
}
|
||||
|
||||
if (inArray && editValue === null) {
|
||||
node.splice(editKey, 1);
|
||||
editOffset++;
|
||||
} else {
|
||||
node[editKey] = editValue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
index = stack.index;
|
||||
keys = stack.keys;
|
||||
edits = stack.edits;
|
||||
inArray = stack.inArray;
|
||||
stack = stack.prev;
|
||||
} else {
|
||||
key = parent ? inArray ? index : keys[index] : undefined;
|
||||
node = parent ? parent[key] : newRoot;
|
||||
|
||||
if (node === null || node === undefined) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (parent) {
|
||||
path.push(key);
|
||||
}
|
||||
}
|
||||
|
||||
var result = void 0;
|
||||
|
||||
if (!Array.isArray(node)) {
|
||||
if (!isNode(node)) {
|
||||
throw new Error('Invalid AST Node: ' + (0, _inspect.default)(node));
|
||||
}
|
||||
|
||||
var visitFn = getVisitFn(visitor, node.kind, isLeaving);
|
||||
|
||||
if (visitFn) {
|
||||
result = visitFn.call(visitor, node, key, parent, path, ancestors);
|
||||
|
||||
if (result === BREAK) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (result === false) {
|
||||
if (!isLeaving) {
|
||||
path.pop();
|
||||
continue;
|
||||
}
|
||||
} else if (result !== undefined) {
|
||||
edits.push([key, result]);
|
||||
|
||||
if (!isLeaving) {
|
||||
if (isNode(result)) {
|
||||
node = result;
|
||||
} else {
|
||||
path.pop();
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (result === undefined && isEdited) {
|
||||
edits.push([key, node]);
|
||||
}
|
||||
|
||||
if (isLeaving) {
|
||||
path.pop();
|
||||
} else {
|
||||
stack = {
|
||||
inArray: inArray,
|
||||
index: index,
|
||||
keys: keys,
|
||||
edits: edits,
|
||||
prev: stack
|
||||
};
|
||||
inArray = Array.isArray(node);
|
||||
keys = inArray ? node : visitorKeys[node.kind] || [];
|
||||
index = -1;
|
||||
edits = [];
|
||||
|
||||
if (parent) {
|
||||
ancestors.push(parent);
|
||||
}
|
||||
|
||||
parent = node;
|
||||
}
|
||||
} while (stack !== undefined);
|
||||
|
||||
if (edits.length !== 0) {
|
||||
newRoot = edits[edits.length - 1][1];
|
||||
}
|
||||
|
||||
return newRoot;
|
||||
}
|
||||
|
||||
function isNode(maybeNode) {
|
||||
return Boolean(maybeNode && typeof maybeNode.kind === 'string');
|
||||
}
|
||||
/**
|
||||
* Creates a new visitor instance which delegates to many visitors to run in
|
||||
* parallel. Each visitor will be visited for each node before moving on.
|
||||
*
|
||||
* If a prior visitor edits a node, no following visitors will see that node.
|
||||
*/
|
||||
|
||||
|
||||
function visitInParallel(visitors) {
|
||||
var skipping = new Array(visitors.length);
|
||||
return {
|
||||
enter: function enter(node) {
|
||||
for (var i = 0; i < visitors.length; i++) {
|
||||
if (!skipping[i]) {
|
||||
var fn = getVisitFn(visitors[i], node.kind,
|
||||
/* isLeaving */
|
||||
false);
|
||||
|
||||
if (fn) {
|
||||
var result = fn.apply(visitors[i], arguments);
|
||||
|
||||
if (result === false) {
|
||||
skipping[i] = node;
|
||||
} else if (result === BREAK) {
|
||||
skipping[i] = BREAK;
|
||||
} else if (result !== undefined) {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
leave: function leave(node) {
|
||||
for (var i = 0; i < visitors.length; i++) {
|
||||
if (!skipping[i]) {
|
||||
var fn = getVisitFn(visitors[i], node.kind,
|
||||
/* isLeaving */
|
||||
true);
|
||||
|
||||
if (fn) {
|
||||
var result = fn.apply(visitors[i], arguments);
|
||||
|
||||
if (result === BREAK) {
|
||||
skipping[i] = BREAK;
|
||||
} else if (result !== undefined && result !== false) {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
} else if (skipping[i] === node) {
|
||||
skipping[i] = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Creates a new visitor instance which maintains a provided TypeInfo instance
|
||||
* along with visiting visitor.
|
||||
*/
|
||||
|
||||
|
||||
function visitWithTypeInfo(typeInfo, visitor) {
|
||||
return {
|
||||
enter: function enter(node) {
|
||||
typeInfo.enter(node);
|
||||
var fn = getVisitFn(visitor, node.kind,
|
||||
/* isLeaving */
|
||||
false);
|
||||
|
||||
if (fn) {
|
||||
var result = fn.apply(visitor, arguments);
|
||||
|
||||
if (result !== undefined) {
|
||||
typeInfo.leave(node);
|
||||
|
||||
if (isNode(result)) {
|
||||
typeInfo.enter(result);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
},
|
||||
leave: function leave(node) {
|
||||
var fn = getVisitFn(visitor, node.kind,
|
||||
/* isLeaving */
|
||||
true);
|
||||
var result;
|
||||
|
||||
if (fn) {
|
||||
result = fn.apply(visitor, arguments);
|
||||
}
|
||||
|
||||
typeInfo.leave(node);
|
||||
return result;
|
||||
}
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Given a visitor instance, if it is leaving or not, and a node kind, return
|
||||
* the function the visitor runtime should call.
|
||||
*/
|
||||
|
||||
|
||||
function getVisitFn(visitor, kind, isLeaving) {
|
||||
var kindVisitor = visitor[kind];
|
||||
|
||||
if (kindVisitor) {
|
||||
if (!isLeaving && typeof kindVisitor === 'function') {
|
||||
// { Kind() {} }
|
||||
return kindVisitor;
|
||||
}
|
||||
|
||||
var kindSpecificVisitor = isLeaving ? kindVisitor.leave : kindVisitor.enter;
|
||||
|
||||
if (typeof kindSpecificVisitor === 'function') {
|
||||
// { Kind: { enter() {}, leave() {} } }
|
||||
return kindSpecificVisitor;
|
||||
}
|
||||
} else {
|
||||
var specificVisitor = isLeaving ? visitor.leave : visitor.enter;
|
||||
|
||||
if (specificVisitor) {
|
||||
if (typeof specificVisitor === 'function') {
|
||||
// { enter() {}, leave() {} }
|
||||
return specificVisitor;
|
||||
}
|
||||
|
||||
var specificKindVisitor = specificVisitor[kind];
|
||||
|
||||
if (typeof specificKindVisitor === 'function') {
|
||||
// { enter: { Kind() {} }, leave: { Kind() {} } }
|
||||
return specificKindVisitor;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
472
node_modules/graphql/language/visitor.js.flow
generated
vendored
Normal file
472
node_modules/graphql/language/visitor.js.flow
generated
vendored
Normal file
@@ -0,0 +1,472 @@
|
||||
// @flow strict
|
||||
|
||||
import inspect from '../jsutils/inspect';
|
||||
|
||||
import { type TypeInfo } from '../utilities/TypeInfo';
|
||||
|
||||
import { type ASTNode, type ASTKindToNode } from './ast';
|
||||
|
||||
/**
|
||||
* A visitor is provided to visit, it contains the collection of
|
||||
* relevant functions to be called during the visitor's traversal.
|
||||
*/
|
||||
export type ASTVisitor = Visitor<ASTKindToNode>;
|
||||
export type Visitor<KindToNode, Nodes = $Values<KindToNode>> =
|
||||
| EnterLeave<
|
||||
| VisitFn<Nodes>
|
||||
| ShapeMap<KindToNode, <Node>(Node) => VisitFn<Nodes, Node>>,
|
||||
>
|
||||
| ShapeMap<
|
||||
KindToNode,
|
||||
<Node>(Node) => VisitFn<Nodes, Node> | EnterLeave<VisitFn<Nodes, Node>>,
|
||||
>;
|
||||
type EnterLeave<T> = {| +enter?: T, +leave?: T |};
|
||||
type ShapeMap<O, F> = $Shape<$ObjMap<O, F>>;
|
||||
|
||||
/**
|
||||
* A visitor is comprised of visit functions, which are called on each node
|
||||
* during the visitor's traversal.
|
||||
*/
|
||||
export type VisitFn<TAnyNode, TVisitedNode: TAnyNode = TAnyNode> = (
|
||||
// The current node being visiting.
|
||||
node: TVisitedNode,
|
||||
// The index or key to this node from the parent node or Array.
|
||||
key: string | number | void,
|
||||
// The parent immediately above this node, which may be an Array.
|
||||
parent: TAnyNode | $ReadOnlyArray<TAnyNode> | void,
|
||||
// The key path to get to this node from the root node.
|
||||
path: $ReadOnlyArray<string | number>,
|
||||
// All nodes and Arrays visited before reaching parent of this node.
|
||||
// These correspond to array indices in `path`.
|
||||
// Note: ancestors includes arrays which contain the parent of visited node.
|
||||
ancestors: $ReadOnlyArray<TAnyNode | $ReadOnlyArray<TAnyNode>>,
|
||||
) => any;
|
||||
|
||||
/**
|
||||
* A KeyMap describes each the traversable properties of each kind of node.
|
||||
*/
|
||||
export type VisitorKeyMap<KindToNode> = $ObjMap<
|
||||
KindToNode,
|
||||
<T>(T) => $ReadOnlyArray<$Keys<T>>,
|
||||
>;
|
||||
|
||||
export const QueryDocumentKeys = {
|
||||
Name: [],
|
||||
|
||||
Document: ['definitions'],
|
||||
OperationDefinition: [
|
||||
'name',
|
||||
'variableDefinitions',
|
||||
'directives',
|
||||
'selectionSet',
|
||||
],
|
||||
VariableDefinition: ['variable', 'type', 'defaultValue', 'directives'],
|
||||
Variable: ['name'],
|
||||
SelectionSet: ['selections'],
|
||||
Field: ['alias', 'name', 'arguments', 'directives', 'selectionSet'],
|
||||
Argument: ['name', 'value'],
|
||||
|
||||
FragmentSpread: ['name', 'directives'],
|
||||
InlineFragment: ['typeCondition', 'directives', 'selectionSet'],
|
||||
FragmentDefinition: [
|
||||
'name',
|
||||
// Note: fragment variable definitions are experimental and may be changed
|
||||
// or removed in the future.
|
||||
'variableDefinitions',
|
||||
'typeCondition',
|
||||
'directives',
|
||||
'selectionSet',
|
||||
],
|
||||
|
||||
IntValue: [],
|
||||
FloatValue: [],
|
||||
StringValue: [],
|
||||
BooleanValue: [],
|
||||
NullValue: [],
|
||||
EnumValue: [],
|
||||
ListValue: ['values'],
|
||||
ObjectValue: ['fields'],
|
||||
ObjectField: ['name', 'value'],
|
||||
|
||||
Directive: ['name', 'arguments'],
|
||||
|
||||
NamedType: ['name'],
|
||||
ListType: ['type'],
|
||||
NonNullType: ['type'],
|
||||
|
||||
SchemaDefinition: ['directives', 'operationTypes'],
|
||||
OperationTypeDefinition: ['type'],
|
||||
|
||||
ScalarTypeDefinition: ['description', 'name', 'directives'],
|
||||
ObjectTypeDefinition: [
|
||||
'description',
|
||||
'name',
|
||||
'interfaces',
|
||||
'directives',
|
||||
'fields',
|
||||
],
|
||||
FieldDefinition: ['description', 'name', 'arguments', 'type', 'directives'],
|
||||
InputValueDefinition: [
|
||||
'description',
|
||||
'name',
|
||||
'type',
|
||||
'defaultValue',
|
||||
'directives',
|
||||
],
|
||||
InterfaceTypeDefinition: ['description', 'name', 'directives', 'fields'],
|
||||
UnionTypeDefinition: ['description', 'name', 'directives', 'types'],
|
||||
EnumTypeDefinition: ['description', 'name', 'directives', 'values'],
|
||||
EnumValueDefinition: ['description', 'name', 'directives'],
|
||||
InputObjectTypeDefinition: ['description', 'name', 'directives', 'fields'],
|
||||
|
||||
DirectiveDefinition: ['description', 'name', 'arguments', 'locations'],
|
||||
|
||||
SchemaExtension: ['directives', 'operationTypes'],
|
||||
|
||||
ScalarTypeExtension: ['name', 'directives'],
|
||||
ObjectTypeExtension: ['name', 'interfaces', 'directives', 'fields'],
|
||||
InterfaceTypeExtension: ['name', 'directives', 'fields'],
|
||||
UnionTypeExtension: ['name', 'directives', 'types'],
|
||||
EnumTypeExtension: ['name', 'directives', 'values'],
|
||||
InputObjectTypeExtension: ['name', 'directives', 'fields'],
|
||||
};
|
||||
|
||||
export const BREAK = Object.freeze({});
|
||||
|
||||
/**
|
||||
* visit() will walk through an AST using a depth first traversal, calling
|
||||
* the visitor's enter function at each node in the traversal, and calling the
|
||||
* leave function after visiting that node and all of its child nodes.
|
||||
*
|
||||
* By returning different values from the enter and leave functions, the
|
||||
* behavior of the visitor can be altered, including skipping over a sub-tree of
|
||||
* the AST (by returning false), editing the AST by returning a value or null
|
||||
* to remove the value, or to stop the whole traversal by returning BREAK.
|
||||
*
|
||||
* When using visit() to edit an AST, the original AST will not be modified, and
|
||||
* a new version of the AST with the changes applied will be returned from the
|
||||
* visit function.
|
||||
*
|
||||
* const editedAST = visit(ast, {
|
||||
* enter(node, key, parent, path, ancestors) {
|
||||
* // @return
|
||||
* // undefined: no action
|
||||
* // false: skip visiting this node
|
||||
* // visitor.BREAK: stop visiting altogether
|
||||
* // null: delete this node
|
||||
* // any value: replace this node with the returned value
|
||||
* },
|
||||
* leave(node, key, parent, path, ancestors) {
|
||||
* // @return
|
||||
* // undefined: no action
|
||||
* // false: no action
|
||||
* // visitor.BREAK: stop visiting altogether
|
||||
* // null: delete this node
|
||||
* // any value: replace this node with the returned value
|
||||
* }
|
||||
* });
|
||||
*
|
||||
* Alternatively to providing enter() and leave() functions, a visitor can
|
||||
* instead provide functions named the same as the kinds of AST nodes, or
|
||||
* enter/leave visitors at a named key, leading to four permutations of
|
||||
* visitor API:
|
||||
*
|
||||
* 1) Named visitors triggered when entering a node a specific kind.
|
||||
*
|
||||
* visit(ast, {
|
||||
* Kind(node) {
|
||||
* // enter the "Kind" node
|
||||
* }
|
||||
* })
|
||||
*
|
||||
* 2) Named visitors that trigger upon entering and leaving a node of
|
||||
* a specific kind.
|
||||
*
|
||||
* visit(ast, {
|
||||
* Kind: {
|
||||
* enter(node) {
|
||||
* // enter the "Kind" node
|
||||
* }
|
||||
* leave(node) {
|
||||
* // leave the "Kind" node
|
||||
* }
|
||||
* }
|
||||
* })
|
||||
*
|
||||
* 3) Generic visitors that trigger upon entering and leaving any node.
|
||||
*
|
||||
* visit(ast, {
|
||||
* enter(node) {
|
||||
* // enter any node
|
||||
* },
|
||||
* leave(node) {
|
||||
* // leave any node
|
||||
* }
|
||||
* })
|
||||
*
|
||||
* 4) Parallel visitors for entering and leaving nodes of a specific kind.
|
||||
*
|
||||
* visit(ast, {
|
||||
* enter: {
|
||||
* Kind(node) {
|
||||
* // enter the "Kind" node
|
||||
* }
|
||||
* },
|
||||
* leave: {
|
||||
* Kind(node) {
|
||||
* // leave the "Kind" node
|
||||
* }
|
||||
* }
|
||||
* })
|
||||
*/
|
||||
export function visit(
|
||||
root: ASTNode,
|
||||
visitor: Visitor<ASTKindToNode>,
|
||||
visitorKeys: VisitorKeyMap<ASTKindToNode> = QueryDocumentKeys,
|
||||
): any {
|
||||
/* eslint-disable no-undef-init */
|
||||
let stack: any = undefined;
|
||||
let inArray = Array.isArray(root);
|
||||
let keys: any = [root];
|
||||
let index = -1;
|
||||
let edits = [];
|
||||
let node: any = undefined;
|
||||
let key: any = undefined;
|
||||
let parent: any = undefined;
|
||||
const path: any = [];
|
||||
const ancestors = [];
|
||||
let newRoot = root;
|
||||
/* eslint-enable no-undef-init */
|
||||
|
||||
do {
|
||||
index++;
|
||||
const isLeaving = index === keys.length;
|
||||
const isEdited = isLeaving && edits.length !== 0;
|
||||
if (isLeaving) {
|
||||
key = ancestors.length === 0 ? undefined : path[path.length - 1];
|
||||
node = parent;
|
||||
parent = ancestors.pop();
|
||||
if (isEdited) {
|
||||
if (inArray) {
|
||||
node = node.slice();
|
||||
} else {
|
||||
const clone = {};
|
||||
for (const k of Object.keys(node)) {
|
||||
clone[k] = node[k];
|
||||
}
|
||||
node = clone;
|
||||
}
|
||||
let editOffset = 0;
|
||||
for (let ii = 0; ii < edits.length; ii++) {
|
||||
let editKey: any = edits[ii][0];
|
||||
const editValue = edits[ii][1];
|
||||
if (inArray) {
|
||||
editKey -= editOffset;
|
||||
}
|
||||
if (inArray && editValue === null) {
|
||||
node.splice(editKey, 1);
|
||||
editOffset++;
|
||||
} else {
|
||||
node[editKey] = editValue;
|
||||
}
|
||||
}
|
||||
}
|
||||
index = stack.index;
|
||||
keys = stack.keys;
|
||||
edits = stack.edits;
|
||||
inArray = stack.inArray;
|
||||
stack = stack.prev;
|
||||
} else {
|
||||
key = parent ? (inArray ? index : keys[index]) : undefined;
|
||||
node = parent ? parent[key] : newRoot;
|
||||
if (node === null || node === undefined) {
|
||||
continue;
|
||||
}
|
||||
if (parent) {
|
||||
path.push(key);
|
||||
}
|
||||
}
|
||||
|
||||
let result;
|
||||
if (!Array.isArray(node)) {
|
||||
if (!isNode(node)) {
|
||||
throw new Error('Invalid AST Node: ' + inspect(node));
|
||||
}
|
||||
const visitFn = getVisitFn(visitor, node.kind, isLeaving);
|
||||
if (visitFn) {
|
||||
result = visitFn.call(visitor, node, key, parent, path, ancestors);
|
||||
|
||||
if (result === BREAK) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (result === false) {
|
||||
if (!isLeaving) {
|
||||
path.pop();
|
||||
continue;
|
||||
}
|
||||
} else if (result !== undefined) {
|
||||
edits.push([key, result]);
|
||||
if (!isLeaving) {
|
||||
if (isNode(result)) {
|
||||
node = result;
|
||||
} else {
|
||||
path.pop();
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (result === undefined && isEdited) {
|
||||
edits.push([key, node]);
|
||||
}
|
||||
|
||||
if (isLeaving) {
|
||||
path.pop();
|
||||
} else {
|
||||
stack = { inArray, index, keys, edits, prev: stack };
|
||||
inArray = Array.isArray(node);
|
||||
keys = inArray ? node : visitorKeys[node.kind] || [];
|
||||
index = -1;
|
||||
edits = [];
|
||||
if (parent) {
|
||||
ancestors.push(parent);
|
||||
}
|
||||
parent = node;
|
||||
}
|
||||
} while (stack !== undefined);
|
||||
|
||||
if (edits.length !== 0) {
|
||||
newRoot = edits[edits.length - 1][1];
|
||||
}
|
||||
|
||||
return newRoot;
|
||||
}
|
||||
|
||||
function isNode(maybeNode): boolean %checks {
|
||||
return Boolean(maybeNode && typeof maybeNode.kind === 'string');
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new visitor instance which delegates to many visitors to run in
|
||||
* parallel. Each visitor will be visited for each node before moving on.
|
||||
*
|
||||
* If a prior visitor edits a node, no following visitors will see that node.
|
||||
*/
|
||||
export function visitInParallel(
|
||||
visitors: $ReadOnlyArray<Visitor<ASTKindToNode>>,
|
||||
): Visitor<ASTKindToNode> {
|
||||
const skipping = new Array(visitors.length);
|
||||
|
||||
return {
|
||||
enter(node) {
|
||||
for (let i = 0; i < visitors.length; i++) {
|
||||
if (!skipping[i]) {
|
||||
const fn = getVisitFn(visitors[i], node.kind, /* isLeaving */ false);
|
||||
if (fn) {
|
||||
const result = fn.apply(visitors[i], arguments);
|
||||
if (result === false) {
|
||||
skipping[i] = node;
|
||||
} else if (result === BREAK) {
|
||||
skipping[i] = BREAK;
|
||||
} else if (result !== undefined) {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
leave(node) {
|
||||
for (let i = 0; i < visitors.length; i++) {
|
||||
if (!skipping[i]) {
|
||||
const fn = getVisitFn(visitors[i], node.kind, /* isLeaving */ true);
|
||||
if (fn) {
|
||||
const result = fn.apply(visitors[i], arguments);
|
||||
if (result === BREAK) {
|
||||
skipping[i] = BREAK;
|
||||
} else if (result !== undefined && result !== false) {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
} else if (skipping[i] === node) {
|
||||
skipping[i] = null;
|
||||
}
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new visitor instance which maintains a provided TypeInfo instance
|
||||
* along with visiting visitor.
|
||||
*/
|
||||
export function visitWithTypeInfo(
|
||||
typeInfo: TypeInfo,
|
||||
visitor: Visitor<ASTKindToNode>,
|
||||
): Visitor<ASTKindToNode> {
|
||||
return {
|
||||
enter(node) {
|
||||
typeInfo.enter(node);
|
||||
const fn = getVisitFn(visitor, node.kind, /* isLeaving */ false);
|
||||
if (fn) {
|
||||
const result = fn.apply(visitor, arguments);
|
||||
if (result !== undefined) {
|
||||
typeInfo.leave(node);
|
||||
if (isNode(result)) {
|
||||
typeInfo.enter(result);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
},
|
||||
leave(node) {
|
||||
const fn = getVisitFn(visitor, node.kind, /* isLeaving */ true);
|
||||
let result;
|
||||
if (fn) {
|
||||
result = fn.apply(visitor, arguments);
|
||||
}
|
||||
typeInfo.leave(node);
|
||||
return result;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a visitor instance, if it is leaving or not, and a node kind, return
|
||||
* the function the visitor runtime should call.
|
||||
*/
|
||||
export function getVisitFn(
|
||||
visitor: Visitor<any>,
|
||||
kind: string,
|
||||
isLeaving: boolean,
|
||||
): ?VisitFn<any> {
|
||||
const kindVisitor = visitor[kind];
|
||||
if (kindVisitor) {
|
||||
if (!isLeaving && typeof kindVisitor === 'function') {
|
||||
// { Kind() {} }
|
||||
return kindVisitor;
|
||||
}
|
||||
const kindSpecificVisitor = isLeaving
|
||||
? kindVisitor.leave
|
||||
: kindVisitor.enter;
|
||||
if (typeof kindSpecificVisitor === 'function') {
|
||||
// { Kind: { enter() {}, leave() {} } }
|
||||
return kindSpecificVisitor;
|
||||
}
|
||||
} else {
|
||||
const specificVisitor = isLeaving ? visitor.leave : visitor.enter;
|
||||
if (specificVisitor) {
|
||||
if (typeof specificVisitor === 'function') {
|
||||
// { enter() {}, leave() {} }
|
||||
return specificVisitor;
|
||||
}
|
||||
const specificKindVisitor = specificVisitor[kind];
|
||||
if (typeof specificKindVisitor === 'function') {
|
||||
// { enter: { Kind() {} }, leave: { Kind() {} } }
|
||||
return specificKindVisitor;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
422
node_modules/graphql/language/visitor.mjs
generated
vendored
Normal file
422
node_modules/graphql/language/visitor.mjs
generated
vendored
Normal file
@@ -0,0 +1,422 @@
|
||||
import inspect from '../jsutils/inspect';
|
||||
export var QueryDocumentKeys = {
|
||||
Name: [],
|
||||
Document: ['definitions'],
|
||||
OperationDefinition: ['name', 'variableDefinitions', 'directives', 'selectionSet'],
|
||||
VariableDefinition: ['variable', 'type', 'defaultValue', 'directives'],
|
||||
Variable: ['name'],
|
||||
SelectionSet: ['selections'],
|
||||
Field: ['alias', 'name', 'arguments', 'directives', 'selectionSet'],
|
||||
Argument: ['name', 'value'],
|
||||
FragmentSpread: ['name', 'directives'],
|
||||
InlineFragment: ['typeCondition', 'directives', 'selectionSet'],
|
||||
FragmentDefinition: ['name', // Note: fragment variable definitions are experimental and may be changed
|
||||
// or removed in the future.
|
||||
'variableDefinitions', 'typeCondition', 'directives', 'selectionSet'],
|
||||
IntValue: [],
|
||||
FloatValue: [],
|
||||
StringValue: [],
|
||||
BooleanValue: [],
|
||||
NullValue: [],
|
||||
EnumValue: [],
|
||||
ListValue: ['values'],
|
||||
ObjectValue: ['fields'],
|
||||
ObjectField: ['name', 'value'],
|
||||
Directive: ['name', 'arguments'],
|
||||
NamedType: ['name'],
|
||||
ListType: ['type'],
|
||||
NonNullType: ['type'],
|
||||
SchemaDefinition: ['directives', 'operationTypes'],
|
||||
OperationTypeDefinition: ['type'],
|
||||
ScalarTypeDefinition: ['description', 'name', 'directives'],
|
||||
ObjectTypeDefinition: ['description', 'name', 'interfaces', 'directives', 'fields'],
|
||||
FieldDefinition: ['description', 'name', 'arguments', 'type', 'directives'],
|
||||
InputValueDefinition: ['description', 'name', 'type', 'defaultValue', 'directives'],
|
||||
InterfaceTypeDefinition: ['description', 'name', 'directives', 'fields'],
|
||||
UnionTypeDefinition: ['description', 'name', 'directives', 'types'],
|
||||
EnumTypeDefinition: ['description', 'name', 'directives', 'values'],
|
||||
EnumValueDefinition: ['description', 'name', 'directives'],
|
||||
InputObjectTypeDefinition: ['description', 'name', 'directives', 'fields'],
|
||||
DirectiveDefinition: ['description', 'name', 'arguments', 'locations'],
|
||||
SchemaExtension: ['directives', 'operationTypes'],
|
||||
ScalarTypeExtension: ['name', 'directives'],
|
||||
ObjectTypeExtension: ['name', 'interfaces', 'directives', 'fields'],
|
||||
InterfaceTypeExtension: ['name', 'directives', 'fields'],
|
||||
UnionTypeExtension: ['name', 'directives', 'types'],
|
||||
EnumTypeExtension: ['name', 'directives', 'values'],
|
||||
InputObjectTypeExtension: ['name', 'directives', 'fields']
|
||||
};
|
||||
export var BREAK = Object.freeze({});
|
||||
/**
|
||||
* visit() will walk through an AST using a depth first traversal, calling
|
||||
* the visitor's enter function at each node in the traversal, and calling the
|
||||
* leave function after visiting that node and all of its child nodes.
|
||||
*
|
||||
* By returning different values from the enter and leave functions, the
|
||||
* behavior of the visitor can be altered, including skipping over a sub-tree of
|
||||
* the AST (by returning false), editing the AST by returning a value or null
|
||||
* to remove the value, or to stop the whole traversal by returning BREAK.
|
||||
*
|
||||
* When using visit() to edit an AST, the original AST will not be modified, and
|
||||
* a new version of the AST with the changes applied will be returned from the
|
||||
* visit function.
|
||||
*
|
||||
* const editedAST = visit(ast, {
|
||||
* enter(node, key, parent, path, ancestors) {
|
||||
* // @return
|
||||
* // undefined: no action
|
||||
* // false: skip visiting this node
|
||||
* // visitor.BREAK: stop visiting altogether
|
||||
* // null: delete this node
|
||||
* // any value: replace this node with the returned value
|
||||
* },
|
||||
* leave(node, key, parent, path, ancestors) {
|
||||
* // @return
|
||||
* // undefined: no action
|
||||
* // false: no action
|
||||
* // visitor.BREAK: stop visiting altogether
|
||||
* // null: delete this node
|
||||
* // any value: replace this node with the returned value
|
||||
* }
|
||||
* });
|
||||
*
|
||||
* Alternatively to providing enter() and leave() functions, a visitor can
|
||||
* instead provide functions named the same as the kinds of AST nodes, or
|
||||
* enter/leave visitors at a named key, leading to four permutations of
|
||||
* visitor API:
|
||||
*
|
||||
* 1) Named visitors triggered when entering a node a specific kind.
|
||||
*
|
||||
* visit(ast, {
|
||||
* Kind(node) {
|
||||
* // enter the "Kind" node
|
||||
* }
|
||||
* })
|
||||
*
|
||||
* 2) Named visitors that trigger upon entering and leaving a node of
|
||||
* a specific kind.
|
||||
*
|
||||
* visit(ast, {
|
||||
* Kind: {
|
||||
* enter(node) {
|
||||
* // enter the "Kind" node
|
||||
* }
|
||||
* leave(node) {
|
||||
* // leave the "Kind" node
|
||||
* }
|
||||
* }
|
||||
* })
|
||||
*
|
||||
* 3) Generic visitors that trigger upon entering and leaving any node.
|
||||
*
|
||||
* visit(ast, {
|
||||
* enter(node) {
|
||||
* // enter any node
|
||||
* },
|
||||
* leave(node) {
|
||||
* // leave any node
|
||||
* }
|
||||
* })
|
||||
*
|
||||
* 4) Parallel visitors for entering and leaving nodes of a specific kind.
|
||||
*
|
||||
* visit(ast, {
|
||||
* enter: {
|
||||
* Kind(node) {
|
||||
* // enter the "Kind" node
|
||||
* }
|
||||
* },
|
||||
* leave: {
|
||||
* Kind(node) {
|
||||
* // leave the "Kind" node
|
||||
* }
|
||||
* }
|
||||
* })
|
||||
*/
|
||||
|
||||
export function visit(root, visitor) {
|
||||
var visitorKeys = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : QueryDocumentKeys;
|
||||
|
||||
/* eslint-disable no-undef-init */
|
||||
var stack = undefined;
|
||||
var inArray = Array.isArray(root);
|
||||
var keys = [root];
|
||||
var index = -1;
|
||||
var edits = [];
|
||||
var node = undefined;
|
||||
var key = undefined;
|
||||
var parent = undefined;
|
||||
var path = [];
|
||||
var ancestors = [];
|
||||
var newRoot = root;
|
||||
/* eslint-enable no-undef-init */
|
||||
|
||||
do {
|
||||
index++;
|
||||
var isLeaving = index === keys.length;
|
||||
var isEdited = isLeaving && edits.length !== 0;
|
||||
|
||||
if (isLeaving) {
|
||||
key = ancestors.length === 0 ? undefined : path[path.length - 1];
|
||||
node = parent;
|
||||
parent = ancestors.pop();
|
||||
|
||||
if (isEdited) {
|
||||
if (inArray) {
|
||||
node = node.slice();
|
||||
} else {
|
||||
var clone = {};
|
||||
|
||||
for (var _i2 = 0, _Object$keys2 = Object.keys(node); _i2 < _Object$keys2.length; _i2++) {
|
||||
var k = _Object$keys2[_i2];
|
||||
clone[k] = node[k];
|
||||
}
|
||||
|
||||
node = clone;
|
||||
}
|
||||
|
||||
var editOffset = 0;
|
||||
|
||||
for (var ii = 0; ii < edits.length; ii++) {
|
||||
var editKey = edits[ii][0];
|
||||
var editValue = edits[ii][1];
|
||||
|
||||
if (inArray) {
|
||||
editKey -= editOffset;
|
||||
}
|
||||
|
||||
if (inArray && editValue === null) {
|
||||
node.splice(editKey, 1);
|
||||
editOffset++;
|
||||
} else {
|
||||
node[editKey] = editValue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
index = stack.index;
|
||||
keys = stack.keys;
|
||||
edits = stack.edits;
|
||||
inArray = stack.inArray;
|
||||
stack = stack.prev;
|
||||
} else {
|
||||
key = parent ? inArray ? index : keys[index] : undefined;
|
||||
node = parent ? parent[key] : newRoot;
|
||||
|
||||
if (node === null || node === undefined) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (parent) {
|
||||
path.push(key);
|
||||
}
|
||||
}
|
||||
|
||||
var result = void 0;
|
||||
|
||||
if (!Array.isArray(node)) {
|
||||
if (!isNode(node)) {
|
||||
throw new Error('Invalid AST Node: ' + inspect(node));
|
||||
}
|
||||
|
||||
var visitFn = getVisitFn(visitor, node.kind, isLeaving);
|
||||
|
||||
if (visitFn) {
|
||||
result = visitFn.call(visitor, node, key, parent, path, ancestors);
|
||||
|
||||
if (result === BREAK) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (result === false) {
|
||||
if (!isLeaving) {
|
||||
path.pop();
|
||||
continue;
|
||||
}
|
||||
} else if (result !== undefined) {
|
||||
edits.push([key, result]);
|
||||
|
||||
if (!isLeaving) {
|
||||
if (isNode(result)) {
|
||||
node = result;
|
||||
} else {
|
||||
path.pop();
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (result === undefined && isEdited) {
|
||||
edits.push([key, node]);
|
||||
}
|
||||
|
||||
if (isLeaving) {
|
||||
path.pop();
|
||||
} else {
|
||||
stack = {
|
||||
inArray: inArray,
|
||||
index: index,
|
||||
keys: keys,
|
||||
edits: edits,
|
||||
prev: stack
|
||||
};
|
||||
inArray = Array.isArray(node);
|
||||
keys = inArray ? node : visitorKeys[node.kind] || [];
|
||||
index = -1;
|
||||
edits = [];
|
||||
|
||||
if (parent) {
|
||||
ancestors.push(parent);
|
||||
}
|
||||
|
||||
parent = node;
|
||||
}
|
||||
} while (stack !== undefined);
|
||||
|
||||
if (edits.length !== 0) {
|
||||
newRoot = edits[edits.length - 1][1];
|
||||
}
|
||||
|
||||
return newRoot;
|
||||
}
|
||||
|
||||
function isNode(maybeNode) {
|
||||
return Boolean(maybeNode && typeof maybeNode.kind === 'string');
|
||||
}
|
||||
/**
|
||||
* Creates a new visitor instance which delegates to many visitors to run in
|
||||
* parallel. Each visitor will be visited for each node before moving on.
|
||||
*
|
||||
* If a prior visitor edits a node, no following visitors will see that node.
|
||||
*/
|
||||
|
||||
|
||||
export function visitInParallel(visitors) {
|
||||
var skipping = new Array(visitors.length);
|
||||
return {
|
||||
enter: function enter(node) {
|
||||
for (var i = 0; i < visitors.length; i++) {
|
||||
if (!skipping[i]) {
|
||||
var fn = getVisitFn(visitors[i], node.kind,
|
||||
/* isLeaving */
|
||||
false);
|
||||
|
||||
if (fn) {
|
||||
var result = fn.apply(visitors[i], arguments);
|
||||
|
||||
if (result === false) {
|
||||
skipping[i] = node;
|
||||
} else if (result === BREAK) {
|
||||
skipping[i] = BREAK;
|
||||
} else if (result !== undefined) {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
leave: function leave(node) {
|
||||
for (var i = 0; i < visitors.length; i++) {
|
||||
if (!skipping[i]) {
|
||||
var fn = getVisitFn(visitors[i], node.kind,
|
||||
/* isLeaving */
|
||||
true);
|
||||
|
||||
if (fn) {
|
||||
var result = fn.apply(visitors[i], arguments);
|
||||
|
||||
if (result === BREAK) {
|
||||
skipping[i] = BREAK;
|
||||
} else if (result !== undefined && result !== false) {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
} else if (skipping[i] === node) {
|
||||
skipping[i] = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Creates a new visitor instance which maintains a provided TypeInfo instance
|
||||
* along with visiting visitor.
|
||||
*/
|
||||
|
||||
export function visitWithTypeInfo(typeInfo, visitor) {
|
||||
return {
|
||||
enter: function enter(node) {
|
||||
typeInfo.enter(node);
|
||||
var fn = getVisitFn(visitor, node.kind,
|
||||
/* isLeaving */
|
||||
false);
|
||||
|
||||
if (fn) {
|
||||
var result = fn.apply(visitor, arguments);
|
||||
|
||||
if (result !== undefined) {
|
||||
typeInfo.leave(node);
|
||||
|
||||
if (isNode(result)) {
|
||||
typeInfo.enter(result);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
},
|
||||
leave: function leave(node) {
|
||||
var fn = getVisitFn(visitor, node.kind,
|
||||
/* isLeaving */
|
||||
true);
|
||||
var result;
|
||||
|
||||
if (fn) {
|
||||
result = fn.apply(visitor, arguments);
|
||||
}
|
||||
|
||||
typeInfo.leave(node);
|
||||
return result;
|
||||
}
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Given a visitor instance, if it is leaving or not, and a node kind, return
|
||||
* the function the visitor runtime should call.
|
||||
*/
|
||||
|
||||
export function getVisitFn(visitor, kind, isLeaving) {
|
||||
var kindVisitor = visitor[kind];
|
||||
|
||||
if (kindVisitor) {
|
||||
if (!isLeaving && typeof kindVisitor === 'function') {
|
||||
// { Kind() {} }
|
||||
return kindVisitor;
|
||||
}
|
||||
|
||||
var kindSpecificVisitor = isLeaving ? kindVisitor.leave : kindVisitor.enter;
|
||||
|
||||
if (typeof kindSpecificVisitor === 'function') {
|
||||
// { Kind: { enter() {}, leave() {} } }
|
||||
return kindSpecificVisitor;
|
||||
}
|
||||
} else {
|
||||
var specificVisitor = isLeaving ? visitor.leave : visitor.enter;
|
||||
|
||||
if (specificVisitor) {
|
||||
if (typeof specificVisitor === 'function') {
|
||||
// { enter() {}, leave() {} }
|
||||
return specificVisitor;
|
||||
}
|
||||
|
||||
var specificKindVisitor = specificVisitor[kind];
|
||||
|
||||
if (typeof specificKindVisitor === 'function') {
|
||||
// { enter: { Kind() {} }, leave: { Kind() {} } }
|
||||
return specificKindVisitor;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user