inital
This commit is contained in:
		
							
								
								
									
										60
									
								
								node_modules/sucrase/dist/parser/traverser/base.js
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										60
									
								
								node_modules/sucrase/dist/parser/traverser/base.js
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							@@ -0,0 +1,60 @@
 | 
			
		||||
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }var _state = require('../tokenizer/state'); var _state2 = _interopRequireDefault(_state);
 | 
			
		||||
var _charcodes = require('../util/charcodes');
 | 
			
		||||
 | 
			
		||||
 exports.isJSXEnabled;
 | 
			
		||||
 exports.isTypeScriptEnabled;
 | 
			
		||||
 exports.isFlowEnabled;
 | 
			
		||||
 exports.state;
 | 
			
		||||
 exports.input;
 | 
			
		||||
 exports.nextContextId;
 | 
			
		||||
 | 
			
		||||
 function getNextContextId() {
 | 
			
		||||
  return exports.nextContextId++;
 | 
			
		||||
} exports.getNextContextId = getNextContextId;
 | 
			
		||||
 | 
			
		||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
 | 
			
		||||
 function augmentError(error) {
 | 
			
		||||
  if ("pos" in error) {
 | 
			
		||||
    const loc = locationForIndex(error.pos);
 | 
			
		||||
    error.message += ` (${loc.line}:${loc.column})`;
 | 
			
		||||
    error.loc = loc;
 | 
			
		||||
  }
 | 
			
		||||
  return error;
 | 
			
		||||
} exports.augmentError = augmentError;
 | 
			
		||||
 | 
			
		||||
 class Loc {
 | 
			
		||||
  
 | 
			
		||||
  
 | 
			
		||||
  constructor(line, column) {
 | 
			
		||||
    this.line = line;
 | 
			
		||||
    this.column = column;
 | 
			
		||||
  }
 | 
			
		||||
} exports.Loc = Loc;
 | 
			
		||||
 | 
			
		||||
 function locationForIndex(pos) {
 | 
			
		||||
  let line = 1;
 | 
			
		||||
  let column = 1;
 | 
			
		||||
  for (let i = 0; i < pos; i++) {
 | 
			
		||||
    if (exports.input.charCodeAt(i) === _charcodes.charCodes.lineFeed) {
 | 
			
		||||
      line++;
 | 
			
		||||
      column = 1;
 | 
			
		||||
    } else {
 | 
			
		||||
      column++;
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
  return new Loc(line, column);
 | 
			
		||||
} exports.locationForIndex = locationForIndex;
 | 
			
		||||
 | 
			
		||||
 function initParser(
 | 
			
		||||
  inputCode,
 | 
			
		||||
  isJSXEnabledArg,
 | 
			
		||||
  isTypeScriptEnabledArg,
 | 
			
		||||
  isFlowEnabledArg,
 | 
			
		||||
) {
 | 
			
		||||
  exports.input = inputCode;
 | 
			
		||||
  exports.state = new (0, _state2.default)();
 | 
			
		||||
  exports.nextContextId = 1;
 | 
			
		||||
  exports.isJSXEnabled = isJSXEnabledArg;
 | 
			
		||||
  exports.isTypeScriptEnabled = isTypeScriptEnabledArg;
 | 
			
		||||
  exports.isFlowEnabled = isFlowEnabledArg;
 | 
			
		||||
} exports.initParser = initParser;
 | 
			
		||||
							
								
								
									
										1022
									
								
								node_modules/sucrase/dist/parser/traverser/expression.js
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										1022
									
								
								node_modules/sucrase/dist/parser/traverser/expression.js
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										18
									
								
								node_modules/sucrase/dist/parser/traverser/index.js
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										18
									
								
								node_modules/sucrase/dist/parser/traverser/index.js
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							@@ -0,0 +1,18 @@
 | 
			
		||||
"use strict";Object.defineProperty(exports, "__esModule", {value: true});
 | 
			
		||||
var _index = require('../tokenizer/index');
 | 
			
		||||
var _charcodes = require('../util/charcodes');
 | 
			
		||||
var _base = require('./base');
 | 
			
		||||
var _statement = require('./statement');
 | 
			
		||||
 | 
			
		||||
 function parseFile() {
 | 
			
		||||
  // If enabled, skip leading hashbang line.
 | 
			
		||||
  if (
 | 
			
		||||
    _base.state.pos === 0 &&
 | 
			
		||||
    _base.input.charCodeAt(0) === _charcodes.charCodes.numberSign &&
 | 
			
		||||
    _base.input.charCodeAt(1) === _charcodes.charCodes.exclamationMark
 | 
			
		||||
  ) {
 | 
			
		||||
    _index.skipLineComment.call(void 0, 2);
 | 
			
		||||
  }
 | 
			
		||||
  _index.nextToken.call(void 0, );
 | 
			
		||||
  return _statement.parseTopLevel.call(void 0, );
 | 
			
		||||
} exports.parseFile = parseFile;
 | 
			
		||||
							
								
								
									
										159
									
								
								node_modules/sucrase/dist/parser/traverser/lval.js
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										159
									
								
								node_modules/sucrase/dist/parser/traverser/lval.js
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							@@ -0,0 +1,159 @@
 | 
			
		||||
"use strict";Object.defineProperty(exports, "__esModule", {value: true});var _flow = require('../plugins/flow');
 | 
			
		||||
var _typescript = require('../plugins/typescript');
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
var _index = require('../tokenizer/index');
 | 
			
		||||
var _keywords = require('../tokenizer/keywords');
 | 
			
		||||
var _types = require('../tokenizer/types');
 | 
			
		||||
var _base = require('./base');
 | 
			
		||||
var _expression = require('./expression');
 | 
			
		||||
var _util = require('./util');
 | 
			
		||||
 | 
			
		||||
 function parseSpread() {
 | 
			
		||||
  _index.next.call(void 0, );
 | 
			
		||||
  _expression.parseMaybeAssign.call(void 0, false);
 | 
			
		||||
} exports.parseSpread = parseSpread;
 | 
			
		||||
 | 
			
		||||
 function parseRest(isBlockScope) {
 | 
			
		||||
  _index.next.call(void 0, );
 | 
			
		||||
  parseBindingAtom(isBlockScope);
 | 
			
		||||
} exports.parseRest = parseRest;
 | 
			
		||||
 | 
			
		||||
 function parseBindingIdentifier(isBlockScope) {
 | 
			
		||||
  _expression.parseIdentifier.call(void 0, );
 | 
			
		||||
  markPriorBindingIdentifier(isBlockScope);
 | 
			
		||||
} exports.parseBindingIdentifier = parseBindingIdentifier;
 | 
			
		||||
 | 
			
		||||
 function parseImportedIdentifier() {
 | 
			
		||||
  _expression.parseIdentifier.call(void 0, );
 | 
			
		||||
  _base.state.tokens[_base.state.tokens.length - 1].identifierRole = _index.IdentifierRole.ImportDeclaration;
 | 
			
		||||
} exports.parseImportedIdentifier = parseImportedIdentifier;
 | 
			
		||||
 | 
			
		||||
 function markPriorBindingIdentifier(isBlockScope) {
 | 
			
		||||
  let identifierRole;
 | 
			
		||||
  if (_base.state.scopeDepth === 0) {
 | 
			
		||||
    identifierRole = _index.IdentifierRole.TopLevelDeclaration;
 | 
			
		||||
  } else if (isBlockScope) {
 | 
			
		||||
    identifierRole = _index.IdentifierRole.BlockScopedDeclaration;
 | 
			
		||||
  } else {
 | 
			
		||||
    identifierRole = _index.IdentifierRole.FunctionScopedDeclaration;
 | 
			
		||||
  }
 | 
			
		||||
  _base.state.tokens[_base.state.tokens.length - 1].identifierRole = identifierRole;
 | 
			
		||||
} exports.markPriorBindingIdentifier = markPriorBindingIdentifier;
 | 
			
		||||
 | 
			
		||||
// Parses lvalue (assignable) atom.
 | 
			
		||||
 function parseBindingAtom(isBlockScope) {
 | 
			
		||||
  switch (_base.state.type) {
 | 
			
		||||
    case _types.TokenType._this: {
 | 
			
		||||
      // In TypeScript, "this" may be the name of a parameter, so allow it.
 | 
			
		||||
      const oldIsType = _index.pushTypeContext.call(void 0, 0);
 | 
			
		||||
      _index.next.call(void 0, );
 | 
			
		||||
      _index.popTypeContext.call(void 0, oldIsType);
 | 
			
		||||
      return;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    case _types.TokenType._yield:
 | 
			
		||||
    case _types.TokenType.name: {
 | 
			
		||||
      _base.state.type = _types.TokenType.name;
 | 
			
		||||
      parseBindingIdentifier(isBlockScope);
 | 
			
		||||
      return;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    case _types.TokenType.bracketL: {
 | 
			
		||||
      _index.next.call(void 0, );
 | 
			
		||||
      parseBindingList(_types.TokenType.bracketR, isBlockScope, true /* allowEmpty */);
 | 
			
		||||
      return;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    case _types.TokenType.braceL:
 | 
			
		||||
      _expression.parseObj.call(void 0, true, isBlockScope);
 | 
			
		||||
      return;
 | 
			
		||||
 | 
			
		||||
    default:
 | 
			
		||||
      _util.unexpected.call(void 0, );
 | 
			
		||||
  }
 | 
			
		||||
} exports.parseBindingAtom = parseBindingAtom;
 | 
			
		||||
 | 
			
		||||
 function parseBindingList(
 | 
			
		||||
  close,
 | 
			
		||||
  isBlockScope,
 | 
			
		||||
  allowEmpty = false,
 | 
			
		||||
  allowModifiers = false,
 | 
			
		||||
  contextId = 0,
 | 
			
		||||
) {
 | 
			
		||||
  let first = true;
 | 
			
		||||
 | 
			
		||||
  let hasRemovedComma = false;
 | 
			
		||||
  const firstItemTokenIndex = _base.state.tokens.length;
 | 
			
		||||
 | 
			
		||||
  while (!_index.eat.call(void 0, close) && !_base.state.error) {
 | 
			
		||||
    if (first) {
 | 
			
		||||
      first = false;
 | 
			
		||||
    } else {
 | 
			
		||||
      _util.expect.call(void 0, _types.TokenType.comma);
 | 
			
		||||
      _base.state.tokens[_base.state.tokens.length - 1].contextId = contextId;
 | 
			
		||||
      // After a "this" type in TypeScript, we need to set the following comma (if any) to also be
 | 
			
		||||
      // a type token so that it will be removed.
 | 
			
		||||
      if (!hasRemovedComma && _base.state.tokens[firstItemTokenIndex].isType) {
 | 
			
		||||
        _base.state.tokens[_base.state.tokens.length - 1].isType = true;
 | 
			
		||||
        hasRemovedComma = true;
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
    if (allowEmpty && _index.match.call(void 0, _types.TokenType.comma)) {
 | 
			
		||||
      // Empty item; nothing further to parse for this item.
 | 
			
		||||
    } else if (_index.eat.call(void 0, close)) {
 | 
			
		||||
      break;
 | 
			
		||||
    } else if (_index.match.call(void 0, _types.TokenType.ellipsis)) {
 | 
			
		||||
      parseRest(isBlockScope);
 | 
			
		||||
      parseAssignableListItemTypes();
 | 
			
		||||
      // Support rest element trailing commas allowed by TypeScript <2.9.
 | 
			
		||||
      _index.eat.call(void 0, _types.TokenType.comma);
 | 
			
		||||
      _util.expect.call(void 0, close);
 | 
			
		||||
      break;
 | 
			
		||||
    } else {
 | 
			
		||||
      parseAssignableListItem(allowModifiers, isBlockScope);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
} exports.parseBindingList = parseBindingList;
 | 
			
		||||
 | 
			
		||||
function parseAssignableListItem(allowModifiers, isBlockScope) {
 | 
			
		||||
  if (allowModifiers) {
 | 
			
		||||
    _typescript.tsParseModifiers.call(void 0, [
 | 
			
		||||
      _keywords.ContextualKeyword._public,
 | 
			
		||||
      _keywords.ContextualKeyword._protected,
 | 
			
		||||
      _keywords.ContextualKeyword._private,
 | 
			
		||||
      _keywords.ContextualKeyword._readonly,
 | 
			
		||||
      _keywords.ContextualKeyword._override,
 | 
			
		||||
    ]);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  parseMaybeDefault(isBlockScope);
 | 
			
		||||
  parseAssignableListItemTypes();
 | 
			
		||||
  parseMaybeDefault(isBlockScope, true /* leftAlreadyParsed */);
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
function parseAssignableListItemTypes() {
 | 
			
		||||
  if (_base.isFlowEnabled) {
 | 
			
		||||
    _flow.flowParseAssignableListItemTypes.call(void 0, );
 | 
			
		||||
  } else if (_base.isTypeScriptEnabled) {
 | 
			
		||||
    _typescript.tsParseAssignableListItemTypes.call(void 0, );
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// Parses assignment pattern around given atom if possible.
 | 
			
		||||
 function parseMaybeDefault(isBlockScope, leftAlreadyParsed = false) {
 | 
			
		||||
  if (!leftAlreadyParsed) {
 | 
			
		||||
    parseBindingAtom(isBlockScope);
 | 
			
		||||
  }
 | 
			
		||||
  if (!_index.eat.call(void 0, _types.TokenType.eq)) {
 | 
			
		||||
    return;
 | 
			
		||||
  }
 | 
			
		||||
  const eqIndex = _base.state.tokens.length - 1;
 | 
			
		||||
  _expression.parseMaybeAssign.call(void 0, );
 | 
			
		||||
  _base.state.tokens[eqIndex].rhsEndIndex = _base.state.tokens.length;
 | 
			
		||||
} exports.parseMaybeDefault = parseMaybeDefault;
 | 
			
		||||
							
								
								
									
										1332
									
								
								node_modules/sucrase/dist/parser/traverser/statement.js
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										1332
									
								
								node_modules/sucrase/dist/parser/traverser/statement.js
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										104
									
								
								node_modules/sucrase/dist/parser/traverser/util.js
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										104
									
								
								node_modules/sucrase/dist/parser/traverser/util.js
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							@@ -0,0 +1,104 @@
 | 
			
		||||
"use strict";Object.defineProperty(exports, "__esModule", {value: true});var _index = require('../tokenizer/index');
 | 
			
		||||
 | 
			
		||||
var _types = require('../tokenizer/types');
 | 
			
		||||
var _charcodes = require('../util/charcodes');
 | 
			
		||||
var _base = require('./base');
 | 
			
		||||
 | 
			
		||||
// ## Parser utilities
 | 
			
		||||
 | 
			
		||||
// Tests whether parsed token is a contextual keyword.
 | 
			
		||||
 function isContextual(contextualKeyword) {
 | 
			
		||||
  return _base.state.contextualKeyword === contextualKeyword;
 | 
			
		||||
} exports.isContextual = isContextual;
 | 
			
		||||
 | 
			
		||||
 function isLookaheadContextual(contextualKeyword) {
 | 
			
		||||
  const l = _index.lookaheadTypeAndKeyword.call(void 0, );
 | 
			
		||||
  return l.type === _types.TokenType.name && l.contextualKeyword === contextualKeyword;
 | 
			
		||||
} exports.isLookaheadContextual = isLookaheadContextual;
 | 
			
		||||
 | 
			
		||||
// Consumes contextual keyword if possible.
 | 
			
		||||
 function eatContextual(contextualKeyword) {
 | 
			
		||||
  return _base.state.contextualKeyword === contextualKeyword && _index.eat.call(void 0, _types.TokenType.name);
 | 
			
		||||
} exports.eatContextual = eatContextual;
 | 
			
		||||
 | 
			
		||||
// Asserts that following token is given contextual keyword.
 | 
			
		||||
 function expectContextual(contextualKeyword) {
 | 
			
		||||
  if (!eatContextual(contextualKeyword)) {
 | 
			
		||||
    unexpected();
 | 
			
		||||
  }
 | 
			
		||||
} exports.expectContextual = expectContextual;
 | 
			
		||||
 | 
			
		||||
// Test whether a semicolon can be inserted at the current position.
 | 
			
		||||
 function canInsertSemicolon() {
 | 
			
		||||
  return _index.match.call(void 0, _types.TokenType.eof) || _index.match.call(void 0, _types.TokenType.braceR) || hasPrecedingLineBreak();
 | 
			
		||||
} exports.canInsertSemicolon = canInsertSemicolon;
 | 
			
		||||
 | 
			
		||||
 function hasPrecedingLineBreak() {
 | 
			
		||||
  const prevToken = _base.state.tokens[_base.state.tokens.length - 1];
 | 
			
		||||
  const lastTokEnd = prevToken ? prevToken.end : 0;
 | 
			
		||||
  for (let i = lastTokEnd; i < _base.state.start; i++) {
 | 
			
		||||
    const code = _base.input.charCodeAt(i);
 | 
			
		||||
    if (
 | 
			
		||||
      code === _charcodes.charCodes.lineFeed ||
 | 
			
		||||
      code === _charcodes.charCodes.carriageReturn ||
 | 
			
		||||
      code === 0x2028 ||
 | 
			
		||||
      code === 0x2029
 | 
			
		||||
    ) {
 | 
			
		||||
      return true;
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
  return false;
 | 
			
		||||
} exports.hasPrecedingLineBreak = hasPrecedingLineBreak;
 | 
			
		||||
 | 
			
		||||
 function hasFollowingLineBreak() {
 | 
			
		||||
  const nextStart = _index.nextTokenStart.call(void 0, );
 | 
			
		||||
  for (let i = _base.state.end; i < nextStart; i++) {
 | 
			
		||||
    const code = _base.input.charCodeAt(i);
 | 
			
		||||
    if (
 | 
			
		||||
      code === _charcodes.charCodes.lineFeed ||
 | 
			
		||||
      code === _charcodes.charCodes.carriageReturn ||
 | 
			
		||||
      code === 0x2028 ||
 | 
			
		||||
      code === 0x2029
 | 
			
		||||
    ) {
 | 
			
		||||
      return true;
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
  return false;
 | 
			
		||||
} exports.hasFollowingLineBreak = hasFollowingLineBreak;
 | 
			
		||||
 | 
			
		||||
 function isLineTerminator() {
 | 
			
		||||
  return _index.eat.call(void 0, _types.TokenType.semi) || canInsertSemicolon();
 | 
			
		||||
} exports.isLineTerminator = isLineTerminator;
 | 
			
		||||
 | 
			
		||||
// Consume a semicolon, or, failing that, see if we are allowed to
 | 
			
		||||
// pretend that there is a semicolon at this position.
 | 
			
		||||
 function semicolon() {
 | 
			
		||||
  if (!isLineTerminator()) {
 | 
			
		||||
    unexpected('Unexpected token, expected ";"');
 | 
			
		||||
  }
 | 
			
		||||
} exports.semicolon = semicolon;
 | 
			
		||||
 | 
			
		||||
// Expect a token of a given type. If found, consume it, otherwise,
 | 
			
		||||
// raise an unexpected token error at given pos.
 | 
			
		||||
 function expect(type) {
 | 
			
		||||
  const matched = _index.eat.call(void 0, type);
 | 
			
		||||
  if (!matched) {
 | 
			
		||||
    unexpected(`Unexpected token, expected "${_types.formatTokenType.call(void 0, type)}"`);
 | 
			
		||||
  }
 | 
			
		||||
} exports.expect = expect;
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * Transition the parser to an error state. All code needs to be written to naturally unwind in this
 | 
			
		||||
 * state, which allows us to backtrack without exceptions and without error plumbing everywhere.
 | 
			
		||||
 */
 | 
			
		||||
 function unexpected(message = "Unexpected token", pos = _base.state.start) {
 | 
			
		||||
  if (_base.state.error) {
 | 
			
		||||
    return;
 | 
			
		||||
  }
 | 
			
		||||
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
 | 
			
		||||
  const err = new SyntaxError(message);
 | 
			
		||||
  err.pos = pos;
 | 
			
		||||
  _base.state.error = err;
 | 
			
		||||
  _base.state.pos = _base.input.length;
 | 
			
		||||
  _index.finishToken.call(void 0, _types.TokenType.eof);
 | 
			
		||||
} exports.unexpected = unexpected;
 | 
			
		||||
		Reference in New Issue
	
	Block a user