Skip to content

Commit

Permalink
Move TokenKind into separate file to solve cycle import (#1981)
Browse files Browse the repository at this point in the history
  • Loading branch information
IvanGoncharov committed Jun 13, 2019
1 parent 2787f27 commit 3a71d3e
Show file tree
Hide file tree
Showing 9 changed files with 50 additions and 48 deletions.
3 changes: 2 additions & 1 deletion src/language/__tests__/lexer-test.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,8 @@ import dedent from '../../jsutils/dedent';
import inspect from '../../jsutils/inspect';
import { GraphQLError } from '../../error';
import { Source } from '../source';
import { createLexer, TokenKind, isPunctuatorToken } from '../lexer';
import { TokenKind } from '../tokenKind';
import { createLexer, isPunctuatorToken } from '../lexer';

function lexOne(str) {
const lexer = createLexer(new Source(str));
Expand Down
2 changes: 1 addition & 1 deletion src/language/__tests__/parser-test.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import { inspect as nodeInspect } from 'util';
import { expect } from 'chai';
import { describe, it } from 'mocha';
import { Kind } from '../kinds';
import { TokenKind } from '../lexer';
import { TokenKind } from '../tokenKind';
import { parse, parseValue, parseType } from '../parser';
import { Source } from '../source';
import dedent from '../../jsutils/dedent';
Expand Down
2 changes: 1 addition & 1 deletion src/language/ast.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
// @flow strict

import { type Source } from './source';
import { type TokenKindEnum } from './lexer';
import { type TokenKindEnum } from './tokenKind';

/**
* Contains a range of UTF-8 character offsets and token references that
Expand Down
7 changes: 5 additions & 2 deletions src/language/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,11 @@ export type { SourceLocation } from './location';
export { Kind } from './kinds';
export type { KindEnum } from './kinds';

export { createLexer, TokenKind } from './lexer';
export type { Lexer, TokenKindEnum } from './lexer';
export { TokenKind } from './tokenKind';
export type { TokenKindEnum } from './tokenKind';

export { createLexer } from './lexer';
export type { Lexer } from './lexer';

export { parse, parseValue, parseType } from './parser';
export type { ParseOptions } from './parser';
Expand Down
35 changes: 1 addition & 34 deletions src/language/lexer.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import defineToJSON from '../jsutils/defineToJSON';
import { type Token } from './ast';
import { type Source } from './source';
import { type TokenKindEnum, TokenKind } from './tokenKind';
import { syntaxError } from '../error/syntaxError';
import { dedentBlockStringValue } from './blockString';

Expand Down Expand Up @@ -90,40 +91,6 @@ export type Lexer<TOptions> = {
...
};

/**
* An exported enum describing the different kinds of tokens that the
* lexer emits.
*/
export const TokenKind = Object.freeze({
SOF: '<SOF>',
EOF: '<EOF>',
BANG: '!',
DOLLAR: '$',
AMP: '&',
PAREN_L: '(',
PAREN_R: ')',
SPREAD: '...',
COLON: ':',
EQUALS: '=',
AT: '@',
BRACKET_L: '[',
BRACKET_R: ']',
BRACE_L: '{',
PIPE: '|',
BRACE_R: '}',
NAME: 'Name',
INT: 'Int',
FLOAT: 'Float',
STRING: 'String',
BLOCK_STRING: 'BlockString',
COMMENT: 'Comment',
});

/**
* The enum type representing the token kinds values.
*/
export type TokenKindEnum = $Values<typeof TokenKind>;

// @internal
export function isPunctuatorToken(token: Token) {
const kind = token.kind;
Expand Down
9 changes: 2 additions & 7 deletions src/language/parser.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,8 @@ import defineToJSON from '../jsutils/defineToJSON';
import { Source } from './source';
import { type GraphQLError } from '../error/GraphQLError';
import { syntaxError } from '../error/syntaxError';
import {
type Lexer,
type TokenKindEnum,
TokenKind,
getTokenDesc,
createLexer,
} from './lexer';
import { type TokenKindEnum, TokenKind } from './tokenKind';
import { type Lexer, getTokenDesc, createLexer } from './lexer';
import {
type Location,
type Token,
Expand Down
35 changes: 35 additions & 0 deletions src/language/tokenKind.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
// @flow strict

/**
* An exported enum describing the different kinds of tokens that the
* lexer emits.
*/
export const TokenKind = Object.freeze({
SOF: '<SOF>',
EOF: '<EOF>',
BANG: '!',
DOLLAR: '$',
AMP: '&',
PAREN_L: '(',
PAREN_R: ')',
SPREAD: '...',
COLON: ':',
EQUALS: '=',
AT: '@',
BRACKET_L: '[',
BRACKET_R: ']',
BRACE_L: '{',
PIPE: '|',
BRACE_R: '}',
NAME: 'Name',
INT: 'Int',
FLOAT: 'Float',
STRING: 'String',
BLOCK_STRING: 'BlockString',
COMMENT: 'Comment',
});

/**
* The enum type representing the token kinds values.
*/
export type TokenKindEnum = $Values<typeof TokenKind>;
2 changes: 1 addition & 1 deletion src/utilities/buildASTSchema.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ import { type ObjMap } from '../jsutils/ObjMap';
import { valueFromAST } from './valueFromAST';
import { assertValidSDL } from '../validation/validate';
import { dedentBlockStringValue } from '../language/blockString';
import { TokenKind } from '../language/lexer';
import { TokenKind } from '../language/tokenKind';
import { type ParseOptions, parse } from '../language/parser';
import { type Source } from '../language/source';
import { getDirectiveValues } from '../execution/values';
Expand Down
3 changes: 2 additions & 1 deletion src/utilities/stripIgnoredCharacters.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@

import inspect from '../jsutils/inspect';
import { Source } from '../language/source';
import { createLexer, TokenKind, isPunctuatorToken } from '../language/lexer';
import { TokenKind } from '../language/tokenKind';
import { createLexer, isPunctuatorToken } from '../language/lexer';
import {
dedentBlockStringValue,
getBlockStringIndentation,
Expand Down

0 comments on commit 3a71d3e

Please sign in to comment.