first
This commit is contained in:
27
node_modules/@babel/parser/lib/tokenizer/context.js
generated
vendored
Normal file
27
node_modules/@babel/parser/lib/tokenizer/context.js
generated
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.types = exports.TokContext = void 0;
|
||||
class TokContext {
|
||||
constructor(token, preserveSpace) {
|
||||
this.token = void 0;
|
||||
this.preserveSpace = void 0;
|
||||
this.token = token;
|
||||
this.preserveSpace = !!preserveSpace;
|
||||
}
|
||||
}
|
||||
exports.TokContext = TokContext;
|
||||
const types = {
|
||||
brace: new TokContext("{"),
|
||||
j_oTag: new TokContext("<tag"),
|
||||
j_cTag: new TokContext("</tag"),
|
||||
j_expr: new TokContext("<tag>...</tag>", true)
|
||||
};
|
||||
exports.types = types;
|
||||
{
|
||||
types.template = new TokContext("`", true);
|
||||
}
|
||||
|
||||
//# sourceMappingURL=context.js.map
|
1
node_modules/@babel/parser/lib/tokenizer/context.js.map
generated
vendored
Normal file
1
node_modules/@babel/parser/lib/tokenizer/context.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"names":["TokContext","constructor","token","preserveSpace","exports","types","brace","j_oTag","j_cTag","j_expr","template"],"sources":["../../src/tokenizer/context.ts"],"sourcesContent":["// The token context is used in JSX plugin to track\n// jsx tag / jsx text / normal JavaScript expression\n\nexport class TokContext {\n constructor(token: string, preserveSpace?: boolean) {\n this.token = token;\n this.preserveSpace = !!preserveSpace;\n }\n\n token: string;\n preserveSpace: boolean;\n}\n\nconst types: {\n [key: string]: TokContext;\n} = {\n brace: new TokContext(\"{\"), // normal JavaScript expression\n j_oTag: new TokContext(\"<tag\"), // JSX opening tag\n j_cTag: new TokContext(\"</tag\"), // JSX closing tag\n j_expr: new TokContext(\"<tag>...</tag>\", true), // JSX expressions\n};\n\nif (!process.env.BABEL_8_BREAKING) {\n types.template = new TokContext(\"`\", true);\n}\n\nexport { types };\n"],"mappings":";;;;;;AAGO,MAAMA,UAAU,CAAC;EACtBC,WAAWA,CAACC,KAAa,EAAEC,aAAuB,EAAE;IAAA,KAKpDD,KAAK;IAAA,KACLC,aAAa;IALX,IAAI,CAACD,KAAK,GAAGA,KAAK;IAClB,IAAI,CAACC,aAAa,GAAG,CAAC,CAACA,aAAa;EACtC;AAIF;AAACC,OAAA,CAAAJ,UAAA,GAAAA,UAAA;AAED,MAAMK,KAEL,GAAG;EACFC,KAAK,EAAE,IAAIN,UAAU,CAAC,GAAG,CAAC;EAC1BO,MAAM,EAAE,IAAIP,UAAU,CAAC,MAAM,CAAC;EAC9BQ,MAAM,EAAE,IAAIR,UAAU,CAAC,OAAO,CAAC;EAC/BS,MAAM,EAAE,IAAIT,UAAU,CAAC,gBAAgB,EAAE,IAAI;AAC/C,CAAC;AAACI,OAAA,CAAAC,KAAA,GAAAA,KAAA;AAEiC;EACjCA,KAAK,CAACK,QAAQ,GAAG,IAAIV,UAAU,CAAC,GAAG,EAAE,IAAI,CAAC;AAC5C"}
|
1119
node_modules/@babel/parser/lib/tokenizer/index.js
generated
vendored
Normal file
1119
node_modules/@babel/parser/lib/tokenizer/index.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1
node_modules/@babel/parser/lib/tokenizer/index.js.map
generated
vendored
Normal file
1
node_modules/@babel/parser/lib/tokenizer/index.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
82
node_modules/@babel/parser/lib/tokenizer/state.js
generated
vendored
Normal file
82
node_modules/@babel/parser/lib/tokenizer/state.js
generated
vendored
Normal file
@ -0,0 +1,82 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.default = void 0;
|
||||
var _location = require("../util/location");
|
||||
var _context = require("./context");
|
||||
var _types = require("./types");
|
||||
class State {
|
||||
constructor() {
|
||||
this.strict = void 0;
|
||||
this.curLine = void 0;
|
||||
this.lineStart = void 0;
|
||||
this.startLoc = void 0;
|
||||
this.endLoc = void 0;
|
||||
this.errors = [];
|
||||
this.potentialArrowAt = -1;
|
||||
this.noArrowAt = [];
|
||||
this.noArrowParamsConversionAt = [];
|
||||
this.maybeInArrowParameters = false;
|
||||
this.inType = false;
|
||||
this.noAnonFunctionType = false;
|
||||
this.hasFlowComment = false;
|
||||
this.isAmbientContext = false;
|
||||
this.inAbstractClass = false;
|
||||
this.inDisallowConditionalTypesContext = false;
|
||||
this.topicContext = {
|
||||
maxNumOfResolvableTopics: 0,
|
||||
maxTopicIndex: null
|
||||
};
|
||||
this.soloAwait = false;
|
||||
this.inFSharpPipelineDirectBody = false;
|
||||
this.labels = [];
|
||||
this.comments = [];
|
||||
this.commentStack = [];
|
||||
this.pos = 0;
|
||||
this.type = 137;
|
||||
this.value = null;
|
||||
this.start = 0;
|
||||
this.end = 0;
|
||||
this.lastTokEndLoc = null;
|
||||
this.lastTokStartLoc = null;
|
||||
this.lastTokStart = 0;
|
||||
this.context = [_context.types.brace];
|
||||
this.canStartJSXElement = true;
|
||||
this.containsEsc = false;
|
||||
this.firstInvalidTemplateEscapePos = null;
|
||||
this.strictErrors = new Map();
|
||||
this.tokensLength = 0;
|
||||
}
|
||||
init({
|
||||
strictMode,
|
||||
sourceType,
|
||||
startLine,
|
||||
startColumn
|
||||
}) {
|
||||
this.strict = strictMode === false ? false : strictMode === true ? true : sourceType === "module";
|
||||
this.curLine = startLine;
|
||||
this.lineStart = -startColumn;
|
||||
this.startLoc = this.endLoc = new _location.Position(startLine, startColumn, 0);
|
||||
}
|
||||
curPosition() {
|
||||
return new _location.Position(this.curLine, this.pos - this.lineStart, this.pos);
|
||||
}
|
||||
clone(skipArrays) {
|
||||
const state = new State();
|
||||
const keys = Object.keys(this);
|
||||
for (let i = 0, length = keys.length; i < length; i++) {
|
||||
const key = keys[i];
|
||||
let val = this[key];
|
||||
if (!skipArrays && Array.isArray(val)) {
|
||||
val = val.slice();
|
||||
}
|
||||
state[key] = val;
|
||||
}
|
||||
return state;
|
||||
}
|
||||
}
|
||||
exports.default = State;
|
||||
|
||||
//# sourceMappingURL=state.js.map
|
1
node_modules/@babel/parser/lib/tokenizer/state.js.map
generated
vendored
Normal file
1
node_modules/@babel/parser/lib/tokenizer/state.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
586
node_modules/@babel/parser/lib/tokenizer/types.js
generated
vendored
Normal file
586
node_modules/@babel/parser/lib/tokenizer/types.js
generated
vendored
Normal file
@ -0,0 +1,586 @@
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", {
|
||||
value: true
|
||||
});
|
||||
exports.ExportedTokenType = void 0;
|
||||
exports.getExportedToken = getExportedToken;
|
||||
exports.isTokenType = isTokenType;
|
||||
exports.keywords = void 0;
|
||||
exports.tokenCanStartExpression = tokenCanStartExpression;
|
||||
exports.tokenComesBeforeExpression = tokenComesBeforeExpression;
|
||||
exports.tokenIsAssignment = tokenIsAssignment;
|
||||
exports.tokenIsBinaryOperator = tokenIsBinaryOperator;
|
||||
exports.tokenIsFlowInterfaceOrTypeOrOpaque = tokenIsFlowInterfaceOrTypeOrOpaque;
|
||||
exports.tokenIsIdentifier = tokenIsIdentifier;
|
||||
exports.tokenIsKeyword = tokenIsKeyword;
|
||||
exports.tokenIsKeywordOrIdentifier = tokenIsKeywordOrIdentifier;
|
||||
exports.tokenIsLiteralPropertyName = tokenIsLiteralPropertyName;
|
||||
exports.tokenIsLoop = tokenIsLoop;
|
||||
exports.tokenIsOperator = tokenIsOperator;
|
||||
exports.tokenIsPostfix = tokenIsPostfix;
|
||||
exports.tokenIsPrefix = tokenIsPrefix;
|
||||
exports.tokenIsRightAssociative = tokenIsRightAssociative;
|
||||
exports.tokenIsTSDeclarationStart = tokenIsTSDeclarationStart;
|
||||
exports.tokenIsTSTypeOperator = tokenIsTSTypeOperator;
|
||||
exports.tokenIsTemplate = tokenIsTemplate;
|
||||
exports.tokenKeywordOrIdentifierIsKeyword = tokenKeywordOrIdentifierIsKeyword;
|
||||
exports.tokenLabelName = tokenLabelName;
|
||||
exports.tokenOperatorPrecedence = tokenOperatorPrecedence;
|
||||
exports.tt = exports.tokenTypes = void 0;
|
||||
var _context = require("./context");
|
||||
const beforeExpr = true;
|
||||
const startsExpr = true;
|
||||
const isLoop = true;
|
||||
const isAssign = true;
|
||||
const prefix = true;
|
||||
const postfix = true;
|
||||
class ExportedTokenType {
|
||||
constructor(label, conf = {}) {
|
||||
this.label = void 0;
|
||||
this.keyword = void 0;
|
||||
this.beforeExpr = void 0;
|
||||
this.startsExpr = void 0;
|
||||
this.rightAssociative = void 0;
|
||||
this.isLoop = void 0;
|
||||
this.isAssign = void 0;
|
||||
this.prefix = void 0;
|
||||
this.postfix = void 0;
|
||||
this.binop = void 0;
|
||||
this.label = label;
|
||||
this.keyword = conf.keyword;
|
||||
this.beforeExpr = !!conf.beforeExpr;
|
||||
this.startsExpr = !!conf.startsExpr;
|
||||
this.rightAssociative = !!conf.rightAssociative;
|
||||
this.isLoop = !!conf.isLoop;
|
||||
this.isAssign = !!conf.isAssign;
|
||||
this.prefix = !!conf.prefix;
|
||||
this.postfix = !!conf.postfix;
|
||||
this.binop = conf.binop != null ? conf.binop : null;
|
||||
{
|
||||
this.updateContext = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.ExportedTokenType = ExportedTokenType;
|
||||
const keywords = new Map();
|
||||
exports.keywords = keywords;
|
||||
function createKeyword(name, options = {}) {
|
||||
options.keyword = name;
|
||||
const token = createToken(name, options);
|
||||
keywords.set(name, token);
|
||||
return token;
|
||||
}
|
||||
function createBinop(name, binop) {
|
||||
return createToken(name, {
|
||||
beforeExpr,
|
||||
binop
|
||||
});
|
||||
}
|
||||
let tokenTypeCounter = -1;
|
||||
const tokenTypes = [];
|
||||
exports.tokenTypes = tokenTypes;
|
||||
const tokenLabels = [];
|
||||
const tokenBinops = [];
|
||||
const tokenBeforeExprs = [];
|
||||
const tokenStartsExprs = [];
|
||||
const tokenPrefixes = [];
|
||||
function createToken(name, options = {}) {
|
||||
var _options$binop, _options$beforeExpr, _options$startsExpr, _options$prefix;
|
||||
++tokenTypeCounter;
|
||||
tokenLabels.push(name);
|
||||
tokenBinops.push((_options$binop = options.binop) != null ? _options$binop : -1);
|
||||
tokenBeforeExprs.push((_options$beforeExpr = options.beforeExpr) != null ? _options$beforeExpr : false);
|
||||
tokenStartsExprs.push((_options$startsExpr = options.startsExpr) != null ? _options$startsExpr : false);
|
||||
tokenPrefixes.push((_options$prefix = options.prefix) != null ? _options$prefix : false);
|
||||
tokenTypes.push(new ExportedTokenType(name, options));
|
||||
return tokenTypeCounter;
|
||||
}
|
||||
function createKeywordLike(name, options = {}) {
|
||||
var _options$binop2, _options$beforeExpr2, _options$startsExpr2, _options$prefix2;
|
||||
++tokenTypeCounter;
|
||||
keywords.set(name, tokenTypeCounter);
|
||||
tokenLabels.push(name);
|
||||
tokenBinops.push((_options$binop2 = options.binop) != null ? _options$binop2 : -1);
|
||||
tokenBeforeExprs.push((_options$beforeExpr2 = options.beforeExpr) != null ? _options$beforeExpr2 : false);
|
||||
tokenStartsExprs.push((_options$startsExpr2 = options.startsExpr) != null ? _options$startsExpr2 : false);
|
||||
tokenPrefixes.push((_options$prefix2 = options.prefix) != null ? _options$prefix2 : false);
|
||||
tokenTypes.push(new ExportedTokenType("name", options));
|
||||
return tokenTypeCounter;
|
||||
}
|
||||
const tt = {
|
||||
bracketL: createToken("[", {
|
||||
beforeExpr,
|
||||
startsExpr
|
||||
}),
|
||||
bracketHashL: createToken("#[", {
|
||||
beforeExpr,
|
||||
startsExpr
|
||||
}),
|
||||
bracketBarL: createToken("[|", {
|
||||
beforeExpr,
|
||||
startsExpr
|
||||
}),
|
||||
bracketR: createToken("]"),
|
||||
bracketBarR: createToken("|]"),
|
||||
braceL: createToken("{", {
|
||||
beforeExpr,
|
||||
startsExpr
|
||||
}),
|
||||
braceBarL: createToken("{|", {
|
||||
beforeExpr,
|
||||
startsExpr
|
||||
}),
|
||||
braceHashL: createToken("#{", {
|
||||
beforeExpr,
|
||||
startsExpr
|
||||
}),
|
||||
braceR: createToken("}"),
|
||||
braceBarR: createToken("|}"),
|
||||
parenL: createToken("(", {
|
||||
beforeExpr,
|
||||
startsExpr
|
||||
}),
|
||||
parenR: createToken(")"),
|
||||
comma: createToken(",", {
|
||||
beforeExpr
|
||||
}),
|
||||
semi: createToken(";", {
|
||||
beforeExpr
|
||||
}),
|
||||
colon: createToken(":", {
|
||||
beforeExpr
|
||||
}),
|
||||
doubleColon: createToken("::", {
|
||||
beforeExpr
|
||||
}),
|
||||
dot: createToken("."),
|
||||
question: createToken("?", {
|
||||
beforeExpr
|
||||
}),
|
||||
questionDot: createToken("?."),
|
||||
arrow: createToken("=>", {
|
||||
beforeExpr
|
||||
}),
|
||||
template: createToken("template"),
|
||||
ellipsis: createToken("...", {
|
||||
beforeExpr
|
||||
}),
|
||||
backQuote: createToken("`", {
|
||||
startsExpr
|
||||
}),
|
||||
dollarBraceL: createToken("${", {
|
||||
beforeExpr,
|
||||
startsExpr
|
||||
}),
|
||||
templateTail: createToken("...`", {
|
||||
startsExpr
|
||||
}),
|
||||
templateNonTail: createToken("...${", {
|
||||
beforeExpr,
|
||||
startsExpr
|
||||
}),
|
||||
at: createToken("@"),
|
||||
hash: createToken("#", {
|
||||
startsExpr
|
||||
}),
|
||||
interpreterDirective: createToken("#!..."),
|
||||
eq: createToken("=", {
|
||||
beforeExpr,
|
||||
isAssign
|
||||
}),
|
||||
assign: createToken("_=", {
|
||||
beforeExpr,
|
||||
isAssign
|
||||
}),
|
||||
slashAssign: createToken("_=", {
|
||||
beforeExpr,
|
||||
isAssign
|
||||
}),
|
||||
xorAssign: createToken("_=", {
|
||||
beforeExpr,
|
||||
isAssign
|
||||
}),
|
||||
moduloAssign: createToken("_=", {
|
||||
beforeExpr,
|
||||
isAssign
|
||||
}),
|
||||
incDec: createToken("++/--", {
|
||||
prefix,
|
||||
postfix,
|
||||
startsExpr
|
||||
}),
|
||||
bang: createToken("!", {
|
||||
beforeExpr,
|
||||
prefix,
|
||||
startsExpr
|
||||
}),
|
||||
tilde: createToken("~", {
|
||||
beforeExpr,
|
||||
prefix,
|
||||
startsExpr
|
||||
}),
|
||||
doubleCaret: createToken("^^", {
|
||||
startsExpr
|
||||
}),
|
||||
doubleAt: createToken("@@", {
|
||||
startsExpr
|
||||
}),
|
||||
pipeline: createBinop("|>", 0),
|
||||
nullishCoalescing: createBinop("??", 1),
|
||||
logicalOR: createBinop("||", 1),
|
||||
logicalAND: createBinop("&&", 2),
|
||||
bitwiseOR: createBinop("|", 3),
|
||||
bitwiseXOR: createBinop("^", 4),
|
||||
bitwiseAND: createBinop("&", 5),
|
||||
equality: createBinop("==/!=/===/!==", 6),
|
||||
lt: createBinop("</>/<=/>=", 7),
|
||||
gt: createBinop("</>/<=/>=", 7),
|
||||
relational: createBinop("</>/<=/>=", 7),
|
||||
bitShift: createBinop("<</>>/>>>", 8),
|
||||
bitShiftL: createBinop("<</>>/>>>", 8),
|
||||
bitShiftR: createBinop("<</>>/>>>", 8),
|
||||
plusMin: createToken("+/-", {
|
||||
beforeExpr,
|
||||
binop: 9,
|
||||
prefix,
|
||||
startsExpr
|
||||
}),
|
||||
modulo: createToken("%", {
|
||||
binop: 10,
|
||||
startsExpr
|
||||
}),
|
||||
star: createToken("*", {
|
||||
binop: 10
|
||||
}),
|
||||
slash: createBinop("/", 10),
|
||||
exponent: createToken("**", {
|
||||
beforeExpr,
|
||||
binop: 11,
|
||||
rightAssociative: true
|
||||
}),
|
||||
_in: createKeyword("in", {
|
||||
beforeExpr,
|
||||
binop: 7
|
||||
}),
|
||||
_instanceof: createKeyword("instanceof", {
|
||||
beforeExpr,
|
||||
binop: 7
|
||||
}),
|
||||
_break: createKeyword("break"),
|
||||
_case: createKeyword("case", {
|
||||
beforeExpr
|
||||
}),
|
||||
_catch: createKeyword("catch"),
|
||||
_continue: createKeyword("continue"),
|
||||
_debugger: createKeyword("debugger"),
|
||||
_default: createKeyword("default", {
|
||||
beforeExpr
|
||||
}),
|
||||
_else: createKeyword("else", {
|
||||
beforeExpr
|
||||
}),
|
||||
_finally: createKeyword("finally"),
|
||||
_function: createKeyword("function", {
|
||||
startsExpr
|
||||
}),
|
||||
_if: createKeyword("if"),
|
||||
_return: createKeyword("return", {
|
||||
beforeExpr
|
||||
}),
|
||||
_switch: createKeyword("switch"),
|
||||
_throw: createKeyword("throw", {
|
||||
beforeExpr,
|
||||
prefix,
|
||||
startsExpr
|
||||
}),
|
||||
_try: createKeyword("try"),
|
||||
_var: createKeyword("var"),
|
||||
_const: createKeyword("const"),
|
||||
_with: createKeyword("with"),
|
||||
_new: createKeyword("new", {
|
||||
beforeExpr,
|
||||
startsExpr
|
||||
}),
|
||||
_this: createKeyword("this", {
|
||||
startsExpr
|
||||
}),
|
||||
_super: createKeyword("super", {
|
||||
startsExpr
|
||||
}),
|
||||
_class: createKeyword("class", {
|
||||
startsExpr
|
||||
}),
|
||||
_extends: createKeyword("extends", {
|
||||
beforeExpr
|
||||
}),
|
||||
_export: createKeyword("export"),
|
||||
_import: createKeyword("import", {
|
||||
startsExpr
|
||||
}),
|
||||
_null: createKeyword("null", {
|
||||
startsExpr
|
||||
}),
|
||||
_true: createKeyword("true", {
|
||||
startsExpr
|
||||
}),
|
||||
_false: createKeyword("false", {
|
||||
startsExpr
|
||||
}),
|
||||
_typeof: createKeyword("typeof", {
|
||||
beforeExpr,
|
||||
prefix,
|
||||
startsExpr
|
||||
}),
|
||||
_void: createKeyword("void", {
|
||||
beforeExpr,
|
||||
prefix,
|
||||
startsExpr
|
||||
}),
|
||||
_delete: createKeyword("delete", {
|
||||
beforeExpr,
|
||||
prefix,
|
||||
startsExpr
|
||||
}),
|
||||
_do: createKeyword("do", {
|
||||
isLoop,
|
||||
beforeExpr
|
||||
}),
|
||||
_for: createKeyword("for", {
|
||||
isLoop
|
||||
}),
|
||||
_while: createKeyword("while", {
|
||||
isLoop
|
||||
}),
|
||||
_as: createKeywordLike("as", {
|
||||
startsExpr
|
||||
}),
|
||||
_assert: createKeywordLike("assert", {
|
||||
startsExpr
|
||||
}),
|
||||
_async: createKeywordLike("async", {
|
||||
startsExpr
|
||||
}),
|
||||
_await: createKeywordLike("await", {
|
||||
startsExpr
|
||||
}),
|
||||
_from: createKeywordLike("from", {
|
||||
startsExpr
|
||||
}),
|
||||
_get: createKeywordLike("get", {
|
||||
startsExpr
|
||||
}),
|
||||
_let: createKeywordLike("let", {
|
||||
startsExpr
|
||||
}),
|
||||
_meta: createKeywordLike("meta", {
|
||||
startsExpr
|
||||
}),
|
||||
_of: createKeywordLike("of", {
|
||||
startsExpr
|
||||
}),
|
||||
_sent: createKeywordLike("sent", {
|
||||
startsExpr
|
||||
}),
|
||||
_set: createKeywordLike("set", {
|
||||
startsExpr
|
||||
}),
|
||||
_static: createKeywordLike("static", {
|
||||
startsExpr
|
||||
}),
|
||||
_using: createKeywordLike("using", {
|
||||
startsExpr
|
||||
}),
|
||||
_yield: createKeywordLike("yield", {
|
||||
startsExpr
|
||||
}),
|
||||
_asserts: createKeywordLike("asserts", {
|
||||
startsExpr
|
||||
}),
|
||||
_checks: createKeywordLike("checks", {
|
||||
startsExpr
|
||||
}),
|
||||
_exports: createKeywordLike("exports", {
|
||||
startsExpr
|
||||
}),
|
||||
_global: createKeywordLike("global", {
|
||||
startsExpr
|
||||
}),
|
||||
_implements: createKeywordLike("implements", {
|
||||
startsExpr
|
||||
}),
|
||||
_intrinsic: createKeywordLike("intrinsic", {
|
||||
startsExpr
|
||||
}),
|
||||
_infer: createKeywordLike("infer", {
|
||||
startsExpr
|
||||
}),
|
||||
_is: createKeywordLike("is", {
|
||||
startsExpr
|
||||
}),
|
||||
_mixins: createKeywordLike("mixins", {
|
||||
startsExpr
|
||||
}),
|
||||
_proto: createKeywordLike("proto", {
|
||||
startsExpr
|
||||
}),
|
||||
_require: createKeywordLike("require", {
|
||||
startsExpr
|
||||
}),
|
||||
_satisfies: createKeywordLike("satisfies", {
|
||||
startsExpr
|
||||
}),
|
||||
_keyof: createKeywordLike("keyof", {
|
||||
startsExpr
|
||||
}),
|
||||
_readonly: createKeywordLike("readonly", {
|
||||
startsExpr
|
||||
}),
|
||||
_unique: createKeywordLike("unique", {
|
||||
startsExpr
|
||||
}),
|
||||
_abstract: createKeywordLike("abstract", {
|
||||
startsExpr
|
||||
}),
|
||||
_declare: createKeywordLike("declare", {
|
||||
startsExpr
|
||||
}),
|
||||
_enum: createKeywordLike("enum", {
|
||||
startsExpr
|
||||
}),
|
||||
_module: createKeywordLike("module", {
|
||||
startsExpr
|
||||
}),
|
||||
_namespace: createKeywordLike("namespace", {
|
||||
startsExpr
|
||||
}),
|
||||
_interface: createKeywordLike("interface", {
|
||||
startsExpr
|
||||
}),
|
||||
_type: createKeywordLike("type", {
|
||||
startsExpr
|
||||
}),
|
||||
_opaque: createKeywordLike("opaque", {
|
||||
startsExpr
|
||||
}),
|
||||
name: createToken("name", {
|
||||
startsExpr
|
||||
}),
|
||||
string: createToken("string", {
|
||||
startsExpr
|
||||
}),
|
||||
num: createToken("num", {
|
||||
startsExpr
|
||||
}),
|
||||
bigint: createToken("bigint", {
|
||||
startsExpr
|
||||
}),
|
||||
decimal: createToken("decimal", {
|
||||
startsExpr
|
||||
}),
|
||||
regexp: createToken("regexp", {
|
||||
startsExpr
|
||||
}),
|
||||
privateName: createToken("#name", {
|
||||
startsExpr
|
||||
}),
|
||||
eof: createToken("eof"),
|
||||
jsxName: createToken("jsxName"),
|
||||
jsxText: createToken("jsxText", {
|
||||
beforeExpr: true
|
||||
}),
|
||||
jsxTagStart: createToken("jsxTagStart", {
|
||||
startsExpr: true
|
||||
}),
|
||||
jsxTagEnd: createToken("jsxTagEnd"),
|
||||
placeholder: createToken("%%", {
|
||||
startsExpr: true
|
||||
})
|
||||
};
|
||||
exports.tt = tt;
|
||||
function tokenIsIdentifier(token) {
|
||||
return token >= 93 && token <= 130;
|
||||
}
|
||||
function tokenKeywordOrIdentifierIsKeyword(token) {
|
||||
return token <= 92;
|
||||
}
|
||||
function tokenIsKeywordOrIdentifier(token) {
|
||||
return token >= 58 && token <= 130;
|
||||
}
|
||||
function tokenIsLiteralPropertyName(token) {
|
||||
return token >= 58 && token <= 134;
|
||||
}
|
||||
function tokenComesBeforeExpression(token) {
|
||||
return tokenBeforeExprs[token];
|
||||
}
|
||||
function tokenCanStartExpression(token) {
|
||||
return tokenStartsExprs[token];
|
||||
}
|
||||
function tokenIsAssignment(token) {
|
||||
return token >= 29 && token <= 33;
|
||||
}
|
||||
function tokenIsFlowInterfaceOrTypeOrOpaque(token) {
|
||||
return token >= 127 && token <= 129;
|
||||
}
|
||||
function tokenIsLoop(token) {
|
||||
return token >= 90 && token <= 92;
|
||||
}
|
||||
function tokenIsKeyword(token) {
|
||||
return token >= 58 && token <= 92;
|
||||
}
|
||||
function tokenIsOperator(token) {
|
||||
return token >= 39 && token <= 59;
|
||||
}
|
||||
function tokenIsPostfix(token) {
|
||||
return token === 34;
|
||||
}
|
||||
function tokenIsPrefix(token) {
|
||||
return tokenPrefixes[token];
|
||||
}
|
||||
function tokenIsTSTypeOperator(token) {
|
||||
return token >= 119 && token <= 121;
|
||||
}
|
||||
function tokenIsTSDeclarationStart(token) {
|
||||
return token >= 122 && token <= 128;
|
||||
}
|
||||
function tokenLabelName(token) {
|
||||
return tokenLabels[token];
|
||||
}
|
||||
function tokenOperatorPrecedence(token) {
|
||||
return tokenBinops[token];
|
||||
}
|
||||
function tokenIsBinaryOperator(token) {
|
||||
return tokenBinops[token] !== -1;
|
||||
}
|
||||
function tokenIsRightAssociative(token) {
|
||||
return token === 57;
|
||||
}
|
||||
function tokenIsTemplate(token) {
|
||||
return token >= 24 && token <= 25;
|
||||
}
|
||||
function getExportedToken(token) {
|
||||
return tokenTypes[token];
|
||||
}
|
||||
function isTokenType(obj) {
|
||||
return typeof obj === "number";
|
||||
}
|
||||
{
|
||||
tokenTypes[8].updateContext = context => {
|
||||
context.pop();
|
||||
};
|
||||
tokenTypes[5].updateContext = tokenTypes[7].updateContext = tokenTypes[23].updateContext = context => {
|
||||
context.push(_context.types.brace);
|
||||
};
|
||||
tokenTypes[22].updateContext = context => {
|
||||
if (context[context.length - 1] === _context.types.template) {
|
||||
context.pop();
|
||||
} else {
|
||||
context.push(_context.types.template);
|
||||
}
|
||||
};
|
||||
tokenTypes[140].updateContext = context => {
|
||||
context.push(_context.types.j_expr, _context.types.j_oTag);
|
||||
};
|
||||
}
|
||||
|
||||
//# sourceMappingURL=types.js.map
|
1
node_modules/@babel/parser/lib/tokenizer/types.js.map
generated
vendored
Normal file
1
node_modules/@babel/parser/lib/tokenizer/types.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
Reference in New Issue
Block a user