diff --git a/examples/grammars/css/css.js b/examples/grammars/css/css.js index 60868f831..5eec11f0b 100644 --- a/examples/grammars/css/css.js +++ b/examples/grammars/css/css.js @@ -68,7 +68,7 @@ const Uri = createToken({ name: "Uri", pattern: Lexer.NA }); const UriString = createToken({ name: "UriString", pattern: MAKE_PATTERN( - "url\\((:?{{spaces}})?({{string1}}|{{string2}})(:?{{spaces}})?\\)" + "url\\((:?{{spaces}})?({{string1}}|{{string2}})(:?{{spaces}})?\\)", ), categories: Uri, }); diff --git a/examples/grammars/ecma5/ecma5_parser.js b/examples/grammars/ecma5/ecma5_parser.js index d13041761..a866b0628 100644 --- a/examples/grammars/ecma5/ecma5_parser.js +++ b/examples/grammars/ecma5/ecma5_parser.js @@ -60,7 +60,7 @@ export class ECMAScript5Parser extends EmbeddedActionsParser { { ALT: () => $.SUBRULE($.ArrayLiteral) }, { ALT: () => $.SUBRULE($.ObjectLiteral) }, { ALT: () => $.SUBRULE($.ParenthesisExpression) }, - ]) + ]), ); }); @@ -246,7 +246,7 @@ export class ECMAScript5Parser extends EmbeddedActionsParser { { ALT: () => $.CONSUME(t.Minus) }, { ALT: () => $.CONSUME(t.Tilde) }, { ALT: () => $.CONSUME(t.Exclamation) }, - ]) + ]), ); $.SUBRULE($.UnaryExpression); }, @@ -276,7 +276,7 @@ export class ECMAScript5Parser extends EmbeddedActionsParser { ALT: () => $.CONSUME(t.AbsMultiplicativeOperator), }, { ALT: () => $.CONSUME(t.AbsAdditiveOperator) }, - ]) + ]), ); $.SUBRULE2($.UnaryExpression); }); @@ -303,7 +303,7 @@ export class ECMAScript5Parser extends EmbeddedActionsParser { ALT: () => $.CONSUME(t.AbsMultiplicativeOperator), }, { ALT: () => $.CONSUME(t.AbsAdditiveOperator) }, - ]) + ]), ); $.SUBRULE2($.UnaryExpression); }); @@ -378,7 +378,7 @@ export class ECMAScript5Parser extends EmbeddedActionsParser { { ALT: () => $.SUBRULE($.ThrowStatement) }, { ALT: () => $.SUBRULE($.TryStatement) }, { ALT: () => $.SUBRULE($.DebuggerStatement) }, - ]) + ]), ); }); @@ -575,7 +575,7 @@ export class ECMAScript5Parser extends EmbeddedActionsParser { }, }, ]); - } + }, ); // See 12.7 @@ -691,9 +691,9 @@ export class ECMAScript5Parser extends EmbeddedActionsParser { // happening in this case because that type of recovery can only happen if CONSUME(...) was invoked. this.SAVE_ERROR( new MismatchedTokenException( - "Line Terminator not allowed before Expression in Throw Statement" + "Line Terminator not allowed before Expression in Throw Statement", // TODO: create line terminator token on the fly? - ) + ), ); } $.SUBRULE($.Expression); diff --git a/examples/grammars/graphql/graphql.js b/examples/grammars/graphql/graphql.js index 99da276ea..78f328552 100644 --- a/examples/grammars/graphql/graphql.js +++ b/examples/grammars/graphql/graphql.js @@ -65,7 +65,7 @@ const createNotToken = function (config) { // All matching keywords now match the category of the NOT token. matchingKeywords.forEach((keywordTokType) => - keywordTokType.CATEGORIES.push(newNotTokenCategory) + keywordTokType.CATEGORIES.push(newNotTokenCategory), ); // Name always matches the Not token @@ -251,20 +251,20 @@ const IntValue = createToken({ const FloatValue = createToken({ name: "FloatValue", pattern: MAKE_PATTERN( - "{{IntegerPart}}{{FractionalPart}}({{ExponentPart}})?|{{IntegerPart}}{{ExponentPart}}" + "{{IntegerPart}}{{FractionalPart}}({{ExponentPart}})?|{{IntegerPart}}{{ExponentPart}}", ), }); FRAGMENT("EscapedCharacter", '[\\\\/"bfnrt]'); FRAGMENT("EscapedUnicode", "[0-9a-fA-F]{4}"); FRAGMENT( "StringCharacter", - '(?:[^\\\\"\\n\\r]|\\\\(?:{{EscapedUnicode}}|u{{EscapedCharacter}}))' + '(?:[^\\\\"\\n\\r]|\\\\(?:{{EscapedUnicode}}|u{{EscapedCharacter}}))', ); FRAGMENT("BlockStringCharacter", '\\\\"""|[^"]|"(?!"")'); const StringValue = createToken({ name: "StringValue", pattern: MAKE_PATTERN( - '"""(?:{{BlockStringCharacter}})*"""|"(?:{{StringCharacter}})*"' + '"""(?:{{BlockStringCharacter}})*"""|"(?:{{StringCharacter}})*"', ), }); diff --git a/examples/grammars/xml/xml_lexer.js b/examples/grammars/xml/xml_lexer.js index 1ff682c66..b0bbcc49a 100644 --- a/examples/grammars/xml/xml_lexer.js +++ b/examples/grammars/xml/xml_lexer.js @@ -40,12 +40,12 @@ function createToken(options) { FRAGMENT( "NameStartChar", - "([a-zA-Z]|\\u2070-\\u218F|\\u2C00-\\u2FEF|\\u3001-\\uD7FF|\\uF900-\\uFDCF|\\uFDF0-\\uFFFD)" + "([a-zA-Z]|\\u2070-\\u218F|\\u2C00-\\u2FEF|\\u3001-\\uD7FF|\\uF900-\\uFDCF|\\uFDF0-\\uFFFD)", ); FRAGMENT( "NameChar", - makePattern`${f.NameStartChar}|-|_|\\.|\\d|\\u00B7||[\\u0300-\\u036F]|[\\u203F-\\u2040]` + makePattern`${f.NameStartChar}|-|_|\\.|\\d|\\u00B7||[\\u0300-\\u036F]|[\\u203F-\\u2040]`, ); FRAGMENT("Name", makePattern`${f.NameStartChar}(${f.NameChar})*`); diff --git a/examples/lexer/custom_errors/custom_errors.js b/examples/lexer/custom_errors/custom_errors.js index f9555d8fb..9cb13863a 100644 --- a/examples/lexer/custom_errors/custom_errors.js +++ b/examples/lexer/custom_errors/custom_errors.js @@ -19,11 +19,11 @@ const OyVeyErrorMessageProvider = { // eslint-disable-next-line no-unused-vars -- template line, // eslint-disable-next-line no-unused-vars -- template - column + column, ) { return ( `Oy Vey!!! unexpected character: ->${fullText.charAt( - startOffset + startOffset, )}<- at offset: ${startOffset},` + ` skipped ${length} characters.` ); }, diff --git a/examples/lexer/python_indentation/python_indentation.js b/examples/lexer/python_indentation/python_indentation.js index 32022d92b..671c77b1f 100644 --- a/examples/lexer/python_indentation/python_indentation.js +++ b/examples/lexer/python_indentation/python_indentation.js @@ -70,7 +70,7 @@ function matchIndentBase(text, offset, matchedTokens, groups, type) { else if (currIndentLevel < prevIndentLevel && type === "outdent") { const matchIndentIndex = _.findLastIndex( indentStack, - (stackIndentDepth) => stackIndentDepth === currIndentLevel + (stackIndentDepth) => stackIndentDepth === currIndentLevel, ); // any outdent must match some previous indentation level. @@ -89,7 +89,7 @@ function matchIndentBase(text, offset, matchedTokens, groups, type) { for (let i = iStart; i < numberOfDedents; i++) { indentStack.pop(); matchedTokens.push( - createTokenInstance(Outdent, "", NaN, NaN, NaN, NaN, NaN, NaN) + createTokenInstance(Outdent, "", NaN, NaN, NaN, NaN, NaN, NaN), ); } @@ -176,7 +176,7 @@ export function tokenize(text) { //add remaining Outdents while (indentStack.length > 1) { lexResult.tokens.push( - createTokenInstance(Outdent, "", NaN, NaN, NaN, NaN, NaN, NaN) + createTokenInstance(Outdent, "", NaN, NaN, NaN, NaN, NaN, NaN), ); indentStack.pop(); } diff --git a/examples/parser/content_assist/content_assist_complex.js b/examples/parser/content_assist/content_assist_complex.js index 95f41f9ee..9419e5c9d 100644 --- a/examples/parser/content_assist/content_assist_complex.js +++ b/examples/parser/content_assist/content_assist_complex.js @@ -157,7 +157,7 @@ export function getContentAssistSuggestions(text, symbolTable) { const syntacticSuggestions = parserInstance.computeContentAssist( "startRule", - assistanceTokenVector + assistanceTokenVector, ); let finalSuggestions = []; diff --git a/examples/parser/content_assist/content_assist_complex_spec.js b/examples/parser/content_assist/content_assist_complex_spec.js index 6c888e7df..511c7d779 100644 --- a/examples/parser/content_assist/content_assist_complex_spec.js +++ b/examples/parser/content_assist/content_assist_complex_spec.js @@ -74,6 +74,6 @@ describe("The Official Content Assist Feature example Example", () => { .to.have.members(["function"]) .and.to.have.lengthOf(1); }); - } + }, ); }); diff --git a/examples/parser/content_assist/content_assist_simple.js b/examples/parser/content_assist/content_assist_simple.js index f413ee0a8..9c6d7a340 100644 --- a/examples/parser/content_assist/content_assist_simple.js +++ b/examples/parser/content_assist/content_assist_simple.js @@ -55,13 +55,13 @@ export function getContentAssistSuggestions(text) { const syntacticSuggestions = parserInstance.computeContentAssist( "myRule", - partialTokenVector + partialTokenVector, ); // The suggestions also include the context, we are only interested // in the TokenTypes in this example. const tokenTypesSuggestions = syntacticSuggestions.map( - (suggestion) => suggestion.nextTokenType + (suggestion) => suggestion.nextTokenType, ); return tokenTypesSuggestions; diff --git a/examples/parser/custom_errors/custom_errors_spec.js b/examples/parser/custom_errors/custom_errors_spec.js index 0d6c89493..4729ff0e5 100644 --- a/examples/parser/custom_errors/custom_errors_spec.js +++ b/examples/parser/custom_errors/custom_errors_spec.js @@ -12,14 +12,14 @@ describe("The Chevrotain support for custom error provider", () => { const errorsOverride = parseMismatch("A C"); expect(errorsOverride).to.have.lengthOf(1); expect(errorsOverride[0].message).to.equal( - "expecting Bravo at end of mis_match" + "expecting Bravo at end of mis_match", ); // we only modified the error for Bravo mismatches const errorsDefault = parseMismatch("C"); expect(errorsDefault).to.have.lengthOf(1); expect(errorsDefault[0].message).to.equal( - "Expecting token of type --> Alpha <-- but found --> 'C' <--" + "Expecting token of type --> Alpha <-- but found --> 'C' <--", ); }); @@ -33,7 +33,7 @@ describe("The Chevrotain support for custom error provider", () => { const errors = parseRedundant("A B C"); expect(errors).to.have.lengthOf(1); expect(errors[0].message).to.equal( - "very bad dog! you still have some input remaining at offset:4" + "very bad dog! you still have some input remaining at offset:4", ); }); @@ -41,7 +41,7 @@ describe("The Chevrotain support for custom error provider", () => { const errors = parseNoViable("C"); expect(errors).to.have.lengthOf(1); expect(errors[0].message).to.equal( - "Expecting: one of these possible Token sequences:\n 1. [Alpha]\n 2. [Bravo]\nbut found: 'C'" + "Expecting: one of these possible Token sequences:\n 1. [Alpha]\n 2. [Bravo]\nbut found: 'C'", ); }); @@ -49,7 +49,7 @@ describe("The Chevrotain support for custom error provider", () => { const errors = parseEarlyExit("A"); expect(errors).to.have.lengthOf(1); expect(errors[0].message).to.equal( - "Esperando por lo menos una iteraciĆ³n de: Bravo" + "Esperando por lo menos una iteraciĆ³n de: Bravo", ); }); }); diff --git a/examples/tutorial/step2_parsing/step2_parsing.js b/examples/tutorial/step2_parsing/step2_parsing.js index f0447f8cd..a1e4418da 100644 --- a/examples/tutorial/step2_parsing/step2_parsing.js +++ b/examples/tutorial/step2_parsing/step2_parsing.js @@ -102,7 +102,7 @@ export function parse(inputText) { if (parserInstance.errors.length > 0) { throw Error( "Sad sad panda, parsing errors detected!\n" + - parserInstance.errors[0].message + parserInstance.errors[0].message, ); } } diff --git a/examples/tutorial/step2_parsing/step2_parsing_spec.js b/examples/tutorial/step2_parsing/step2_parsing_spec.js index 832ec6233..8fab27624 100644 --- a/examples/tutorial/step2_parsing/step2_parsing_spec.js +++ b/examples/tutorial/step2_parsing/step2_parsing_spec.js @@ -12,10 +12,10 @@ describe("Chevrotain Tutorial", () => { // missing table name let inputText = "SELECT FROM table2"; expect(() => parse(inputText)).to.throw( - "expecting at least one iteration which starts with one of these possible Token sequences" + "expecting at least one iteration which starts with one of these possible Token sequences", ); expect(() => parse(inputText)).to.throw( - "<[Identifier]>\nbut found: 'FROM'" + "<[Identifier]>\nbut found: 'FROM'", ); }); }); diff --git a/examples/tutorial/step3_actions/main.js b/examples/tutorial/step3_actions/main.js index a833fe671..574d0934e 100644 --- a/examples/tutorial/step3_actions/main.js +++ b/examples/tutorial/step3_actions/main.js @@ -12,5 +12,5 @@ console.log(JSON.stringify(astFromVisitor, null, "\t")); assert.deepEqual( astFromVisitor, astFromEmbedded, - "Both ASTs should be identical" + "Both ASTs should be identical", ); diff --git a/examples/tutorial/step3_actions/step3a_actions_visitor.js b/examples/tutorial/step3_actions/step3a_actions_visitor.js index 7bdf0eeaf..3c97dfe05 100644 --- a/examples/tutorial/step3_actions/step3a_actions_visitor.js +++ b/examples/tutorial/step3_actions/step3a_actions_visitor.js @@ -116,7 +116,7 @@ export function toAstVisitor(inputText) { if (parserInstance.errors.length > 0) { throw Error( "Sad sad panda, parsing errors detected!\n" + - parserInstance.errors[0].message + parserInstance.errors[0].message, ); } diff --git a/examples/tutorial/step3_actions/step3b_actions_embedded.js b/examples/tutorial/step3_actions/step3b_actions_embedded.js index 6b4381f6c..aa674f054 100644 --- a/examples/tutorial/step3_actions/step3b_actions_embedded.js +++ b/examples/tutorial/step3_actions/step3b_actions_embedded.js @@ -145,7 +145,7 @@ export function toAstEmbedded(inputText) { if (parserInstance.errors.length > 0) { throw Error( "Sad sad panda, parsing errors detected!\n" + - parserInstance.errors[0].message + parserInstance.errors[0].message, ); } diff --git a/examples/tutorial/step4_error_recovery/step4_error_recovery_spec.js b/examples/tutorial/step4_error_recovery/step4_error_recovery_spec.js index ffe3bb97c..ceef0b879 100644 --- a/examples/tutorial/step4_error_recovery/step4_error_recovery_spec.js +++ b/examples/tutorial/step4_error_recovery/step4_error_recovery_spec.js @@ -34,7 +34,7 @@ describe("Chevrotain Tutorial", () => { let parsingResult = parseJsonToCst(invalidInput); expect(parsingResult.parseErrors).to.have.lengthOf(1); expect(parsingResult.parseErrors[0].message).to.include( - "Expecting token of type --> Colon <-- but found --> '666' <--" + "Expecting token of type --> Colon <-- but found --> '666' <--", ); let minimizedCst = minimizeCst(parsingResult.cst); @@ -116,7 +116,7 @@ describe("Chevrotain Tutorial", () => { let parsingResult = parseJsonToCst(invalidInput); expect(parsingResult.parseErrors).to.have.lengthOf(1); expect(parsingResult.parseErrors[0].message).to.include( - "Expecting token of type --> Colon <-- but found --> '}' <--" + "Expecting token of type --> Colon <-- but found --> '}' <--", ); let minimizedCst = minimizeCst(parsingResult.cst); @@ -194,7 +194,7 @@ describe("Chevrotain Tutorial", () => { let parsingResult = parseJsonToCst(invalidInput); expect(parsingResult.parseErrors).to.have.lengthOf(1); expect(parsingResult.parseErrors[0].message).to.include( - "Expecting token of type --> RCurly <-- but found --> '666'" + "Expecting token of type --> RCurly <-- but found --> '666'", ); let minimizedCst = minimizeCst(parsingResult.cst); @@ -205,15 +205,15 @@ describe("Chevrotain Tutorial", () => { // because the parser will re-sync to the next comma "," as that is the expected next Token after a key/value pair. expect(objectItemCstArr).to.have.lengthOf(3); expect(objectItemCstArr[0].children.StringLiteral[0].image).to.equal( - '"key1"' + '"key1"', ); expect(objectItemCstArr[1].children.StringLiteral[0].image).to.equal( - '"key2"' + '"key2"', ); // key3 will be re-synced // key4 appears in the input AFTER the error, yet due to error recovery it is still appears in the output expect(objectItemCstArr[2].children.StringLiteral[0].image).to.equal( - '"key4"' + '"key4"', ); }); @@ -228,7 +228,7 @@ describe("Chevrotain Tutorial", () => { let parsingResult = parseJsonToCst(invalidInput); expect(parsingResult.parseErrors).to.have.lengthOf(1); expect(parsingResult.parseErrors[0].message).to.include( - "Expecting: one of these possible Token sequences:\n 1. [StringLiteral]\n 2. [NumberLiteral]\n 3. [LCurly]\n 4. [LSquare]\n 5. [True]\n 6. [False]\n 7. [Null]\nbut found: ':'" + "Expecting: one of these possible Token sequences:\n 1. [StringLiteral]\n 2. [NumberLiteral]\n 3. [LCurly]\n 4. [LSquare]\n 5. [True]\n 6. [False]\n 7. [Null]\nbut found: ':'", ); let minimizedCst = minimizeCst(parsingResult.cst); @@ -237,18 +237,18 @@ describe("Chevrotain Tutorial", () => { minimizedCst.children.object[0].children.objectItem; expect(objectItemCstArr).to.have.lengthOf(4); expect(objectItemCstArr[0].children.StringLiteral[0].image).to.equal( - '"firstName"' + '"firstName"', ); // There is an error inside "someData" value, but we still get the key back (and part of the value...) expect(objectItemCstArr[1].children.StringLiteral[0].image).to.equal( - '"someData"' + '"someData"', ); // These keys appear AFTER the error, yet they were still parsed successfully due to error recovery. expect(objectItemCstArr[2].children.StringLiteral[0].image).to.equal( - '"isAlive"' + '"isAlive"', ); expect(objectItemCstArr[3].children.StringLiteral[0].image).to.equal( - '"age"' + '"age"', ); }); }); diff --git a/package.json b/package.json index 738051ab0..5608e5f46 100644 --- a/package.json +++ b/package.json @@ -48,7 +48,7 @@ "lint-staged": "13.2.3", "mocha": "10.2.0", "npm-run-all": "4.1.5", - "prettier": "2.8.8", + "prettier": "3.0.0", "shx": "0.3.4", "cz-conventional-changelog": "3.3.0", "@commitlint/cli": "17.6.6", diff --git a/packages/chevrotain/benchmark_web/parsers/api.js b/packages/chevrotain/benchmark_web/parsers/api.js index 9b440080f..dc627a966 100644 --- a/packages/chevrotain/benchmark_web/parsers/api.js +++ b/packages/chevrotain/benchmark_web/parsers/api.js @@ -10,7 +10,7 @@ function parseBench( parser, rootRule, options, - parserConfig + parserConfig, ) { if (lexerInstance === undefined) { if (customLexer !== undefined) { diff --git a/packages/chevrotain/benchmark_web/parsers/css/css_parser.js b/packages/chevrotain/benchmark_web/parsers/css/css_parser.js index d2bafcf14..a4ae8b542 100644 --- a/packages/chevrotain/benchmark_web/parsers/css/css_parser.js +++ b/packages/chevrotain/benchmark_web/parsers/css/css_parser.js @@ -63,7 +63,7 @@ var Uri = createToken({ name: "Uri", pattern: Lexer.NA }); var UriString = createToken({ name: "UriString", pattern: MAKE_PATTERN( - "url\\((:?{{spaces}})?({{string1}}|{{string2}})(:?{{spaces}})?\\)" + "url\\((:?{{spaces}})?({{string1}}|{{string2}})(:?{{spaces}})?\\)", ), categories: Uri, }); diff --git a/packages/chevrotain/benchmark_web/parsers/ecma5/ecma5_parser.js b/packages/chevrotain/benchmark_web/parsers/ecma5/ecma5_parser.js index 390c1fe93..2dc92bbe3 100644 --- a/packages/chevrotain/benchmark_web/parsers/ecma5/ecma5_parser.js +++ b/packages/chevrotain/benchmark_web/parsers/ecma5/ecma5_parser.js @@ -59,7 +59,7 @@ class ECMAScript5Parser extends ChevrotainParser { { ALT: () => $.SUBRULE($.ArrayLiteral) }, { ALT: () => $.SUBRULE($.ObjectLiteral) }, { ALT: () => $.SUBRULE($.ParenthesisExpression) }, - ]) + ]), ); }); @@ -245,7 +245,7 @@ class ECMAScript5Parser extends ChevrotainParser { { ALT: () => $.CONSUME(t.Minus) }, { ALT: () => $.CONSUME(t.Tilde) }, { ALT: () => $.CONSUME(t.Exclamation) }, - ]) + ]), ); $.SUBRULE($.UnaryExpression); }, @@ -275,7 +275,7 @@ class ECMAScript5Parser extends ChevrotainParser { ALT: () => $.CONSUME(t.AbsMultiplicativeOperator), }, { ALT: () => $.CONSUME(t.AbsAdditiveOperator) }, - ]) + ]), ); $.SUBRULE2($.UnaryExpression); }); @@ -302,7 +302,7 @@ class ECMAScript5Parser extends ChevrotainParser { ALT: () => $.CONSUME(t.AbsMultiplicativeOperator), }, { ALT: () => $.CONSUME(t.AbsAdditiveOperator) }, - ]) + ]), ); $.SUBRULE2($.UnaryExpression); }); @@ -371,7 +371,7 @@ class ECMAScript5Parser extends ChevrotainParser { { ALT: () => $.SUBRULE($.ThrowStatement) }, { ALT: () => $.SUBRULE($.TryStatement) }, { ALT: () => $.SUBRULE($.DebuggerStatement) }, - ]) + ]), ); }); @@ -568,7 +568,7 @@ class ECMAScript5Parser extends ChevrotainParser { }, }, ]); - } + }, ); // See 12.7 @@ -684,9 +684,9 @@ class ECMAScript5Parser extends ChevrotainParser { // happening in this case because that type of recovery can only happen if CONSUME(...) was invoked. this.SAVE_ERROR( new chevrotain.exceptions.MismatchedTokenException( - "Line Terminator not allowed before Expression in Throw Statement" + "Line Terminator not allowed before Expression in Throw Statement", // TODO: create line terminator token on the fly? - ) + ), ); } $.SUBRULE($.Expression); diff --git a/packages/chevrotain/benchmark_web/parsers/json/json_parser.js b/packages/chevrotain/benchmark_web/parsers/json/json_parser.js index c289d4408..058afb40e 100644 --- a/packages/chevrotain/benchmark_web/parsers/json/json_parser.js +++ b/packages/chevrotain/benchmark_web/parsers/json/json_parser.js @@ -149,7 +149,7 @@ class parser extends ChevrotainParser { $.CONSUME(Null) } } - ]) + ]), ); }); diff --git a/packages/chevrotain/benchmark_web/parsers/worker_impel.js b/packages/chevrotain/benchmark_web/parsers/worker_impel.js index 1bd9a2149..2c17d361a 100644 --- a/packages/chevrotain/benchmark_web/parsers/worker_impel.js +++ b/packages/chevrotain/benchmark_web/parsers/worker_impel.js @@ -30,7 +30,7 @@ onmessage = function (event) { parser, startRule, options, - parserConfig + parserConfig, ); postMessage(0); } catch (e) { diff --git a/packages/chevrotain/diagrams/src/diagrams_behavior.js b/packages/chevrotain/diagrams/src/diagrams_behavior.js index 5eceb896b..81b6246e6 100644 --- a/packages/chevrotain/diagrams/src/diagrams_behavior.js +++ b/packages/chevrotain/diagrams/src/diagrams_behavior.js @@ -23,16 +23,16 @@ } var diagramHeaders = toArr( - document.getElementsByClassName("diagramHeader") + document.getElementsByClassName("diagramHeader"), ); diagramHeaders.forEach(function (header) { header.addEventListener( "mouseover", - toggleNonTerminalUsageAndDef_fromHeader + toggleNonTerminalUsageAndDef_fromHeader, ); header.addEventListener( "mouseout", - toggleNonTerminalUsageAndDef_fromHeader + toggleNonTerminalUsageAndDef_fromHeader, ); }); @@ -41,11 +41,11 @@ noneTerminalsText.forEach(function (nonTerminal) { nonTerminal.addEventListener( "mouseover", - toggleNonTerminalUsageAndDef_fromNoneTerminal + toggleNonTerminalUsageAndDef_fromNoneTerminal, ); nonTerminal.addEventListener( "mouseout", - toggleNonTerminalUsageAndDef_fromNoneTerminal + toggleNonTerminalUsageAndDef_fromNoneTerminal, ); if (scrollingEnabled) { @@ -166,7 +166,7 @@ // min time .1, max time .8 seconds var time = Math.max( 0.1, - Math.min(Math.abs(scrollY - scrollTargetY) / speed, 0.8) + Math.min(Math.abs(scrollY - scrollTargetY) / speed, 0.8), ); // easing equations from https://github.com/danro/easing-js/blob/master/easing.js diff --git a/packages/chevrotain/diagrams/src/diagrams_builder.js b/packages/chevrotain/diagrams/src/diagrams_builder.js index 639ad1ad6..4e195b067 100644 --- a/packages/chevrotain/diagrams/src/diagrams_builder.js +++ b/packages/chevrotain/diagrams/src/diagrams_builder.js @@ -34,7 +34,7 @@ topRules.forEach(function (production) { var currDiagramHtml = convertProductionToDiagram( production, - production.name + production.name, ); diagramsHtml += '

' + @@ -78,7 +78,7 @@ prod.occurrenceInParent, topRuleName, dslRuleName, - prod.name + prod.name, ); } @@ -98,7 +98,7 @@ getNonTerminalName(prod), undefined, prod.occurrenceInParent, - topRuleName + topRuleName, ); } else if (prod.type !== "Terminal") { var subDiagrams = definitionsToSubDiagrams(prod.definition, topRuleName); @@ -147,13 +147,13 @@ createTerminalFromSerializedGast( prod.separator, topRuleName, - "many_sep" + "many_sep", ), - ].concat(subDiagrams) - ) + ].concat(subDiagrams), + ), ), - ]) - ) + ]), + ), ); } else { throw Error("Empty Optional production, OOPS!"); @@ -171,12 +171,12 @@ createTerminalFromSerializedGast( prod.separator, topRuleName, - "at_least_one_sep" + "at_least_one_sep", ), - ].concat(subDiagrams) - ) + ].concat(subDiagrams), + ), ), - ]) + ]), ); } else { throw Error("Empty Optional production, OOPS!"); diff --git a/packages/chevrotain/diagrams/src/diagrams_serializer.js b/packages/chevrotain/diagrams/src/diagrams_serializer.js index 9ae41b8bb..264411186 100644 --- a/packages/chevrotain/diagrams/src/diagrams_serializer.js +++ b/packages/chevrotain/diagrams/src/diagrams_serializer.js @@ -11,7 +11,7 @@ function serializeGrammarToFile(targetFilePath, varName, parserInstance) { // generated a JavaScript file which exports the serialized grammar on the global scope (Window) fs.writeFileSync( targetFilePath, - "var " + varName + " = " + serializedGrammarText + "var " + varName + " = " + serializedGrammarText, ); } diff --git a/packages/chevrotain/diagrams/src/main.js b/packages/chevrotain/diagrams/src/main.js index f9cff2ab9..509b66430 100644 --- a/packages/chevrotain/diagrams/src/main.js +++ b/packages/chevrotain/diagrams/src/main.js @@ -8,7 +8,7 @@ // like Node. module.exports = factory( require("./diagrams_builder"), - require("./diagrams_behavior") + require("./diagrams_behavior"), ); } else { // Browser globals (root is window) diff --git a/packages/chevrotain/diagrams/vendor/railroad-diagrams.js b/packages/chevrotain/diagrams/vendor/railroad-diagrams.js index 34be69776..e544311e9 100644 --- a/packages/chevrotain/diagrams/vendor/railroad-diagrams.js +++ b/packages/chevrotain/diagrams/vendor/railroad-diagrams.js @@ -207,7 +207,7 @@ FakeSVG.call(this, "svg", { class: Diagram.DIAGRAM_CLASS }); if (stackAtIllegalPosition(items)) { throw new RangeError( - "Stack() must only occur at the very last position of Diagram()." + "Stack() must only occur at the very last position of Diagram().", ); } this.items = items.map(wrapString); @@ -224,13 +224,13 @@ null, this.items.map(function (x) { return x.up; - }) + }), ); this.down = Math.max.apply( null, this.items.map(function (x) { return x.down; - }) + }), ); this.formatted = false; } @@ -248,7 +248,7 @@ y += this.up; var g = FakeSVG( "g", - Diagram.STROKE_ODD_PIXEL_LENGTH ? { transform: "translate(.5 .5)" } : {} + Diagram.STROKE_ODD_PIXEL_LENGTH ? { transform: "translate(.5 .5)" } : {}, ); for (var i = 0; i < this.items.length; i++) { var item = this.items[i]; @@ -308,7 +308,7 @@ FakeSVG.call(this, "g"); if (stackAtIllegalPosition(items)) { throw new RangeError( - "Stack() must only occur at the very last position of Sequence()." + "Stack() must only occur at the very last position of Sequence().", ); } this.items = items.map(wrapString); @@ -358,7 +358,7 @@ FakeSVG.call(this, "g"); if (stackAtIllegalPosition(items)) { throw new RangeError( - "Stack() must only occur at the very last position of Stack()." + "Stack() must only occur at the very last position of Stack().", ); } if (items.length === 0) { @@ -415,7 +415,7 @@ .format( x, y, - Math.max(item.width + item.offsetX, Diagram.ARC_RADIUS * 2) + Math.max(item.width + item.offsetX, Diagram.ARC_RADIUS * 2), ) .addTo(this); x += Math.max(item.width + item.offsetX, Diagram.ARC_RADIUS * 2); @@ -459,7 +459,7 @@ throw new TypeError("The first argument of Choice() must be an integer."); } else if (normal < 0 || normal >= items.length) { throw new RangeError( - "The first argument of Choice() must be an index for one of the items." + "The first argument of Choice() must be an index for one of the items.", ); } else { this.normal = normal; @@ -478,7 +478,7 @@ if (i < normal) { this.up += Math.max( Diagram.ARC_RADIUS, - item.up + item.height + item.down + Diagram.VERTICAL_SEPARATION + item.up + item.height + item.down + Diagram.VERTICAL_SEPARATION, ); } if (i == normal) { @@ -488,7 +488,7 @@ if (i > normal) { this.down += Math.max( Diagram.ARC_RADIUS, - Diagram.VERTICAL_SEPARATION + item.up + item.down + item.height + Diagram.VERTICAL_SEPARATION + item.up + item.down + item.height, ); } } @@ -515,7 +515,7 @@ this.items[i + 1].up + Diagram.VERTICAL_SEPARATION + item.height + - item.down + item.down, ); } Path(x, y) @@ -528,14 +528,14 @@ .addTo(this); Path( x + Diagram.ARC_RADIUS * 2 + innerWidth, - y - distanceFromY + item.height + y - distanceFromY + item.height, ) .arc("ne") .down( distanceFromY - item.height + this.items[this.normal].height - - Diagram.ARC_RADIUS * 2 + Diagram.ARC_RADIUS * 2, ) .arc("ws") .addTo(this); @@ -543,7 +543,7 @@ Diagram.ARC_RADIUS, item.up + Diagram.VERTICAL_SEPARATION + - (i == 0 ? 0 : this.items[i - 1].down + this.items[i - 1].height) + (i == 0 ? 0 : this.items[i - 1].down + this.items[i - 1].height), ); } @@ -567,7 +567,7 @@ this.items[i - 1].height + this.items[i - 1].down + Diagram.VERTICAL_SEPARATION + - item.up + item.up, ); } Path(x, y) @@ -580,14 +580,14 @@ .addTo(this); Path( x + Diagram.ARC_RADIUS * 2 + innerWidth, - y + distanceFromY + item.height + y + distanceFromY + item.height, ) .arc("se") .up( distanceFromY - Diagram.ARC_RADIUS * 2 + item.height - - this.items[this.normal].height + this.items[this.normal].height, ) .arc("wn") .addTo(this); @@ -596,7 +596,7 @@ item.height + item.down + Diagram.VERTICAL_SEPARATION + - (i == last ? 0 : this.items[i + 1].up) + (i == last ? 0 : this.items[i + 1].up), ); } @@ -626,7 +626,7 @@ Diagram.VERTICAL_SEPARATION + this.rep.up + this.rep.height + - this.rep.down + this.rep.down, ); } subclassOf(OneOrMore, FakeSVG); @@ -655,7 +655,7 @@ this.item.height + this.item.down + Diagram.VERTICAL_SEPARATION + - this.rep.up + this.rep.up, ); Path(x + Diagram.ARC_RADIUS, y) .arc("nw") @@ -666,19 +666,19 @@ .format( x + Diagram.ARC_RADIUS, y + distanceFromY, - this.width - Diagram.ARC_RADIUS * 2 + this.width - Diagram.ARC_RADIUS * 2, ) .addTo(this); Path( x + this.width - Diagram.ARC_RADIUS, - y + distanceFromY + this.rep.height + y + distanceFromY + this.rep.height, ) .arc("se") .up( distanceFromY - Diagram.ARC_RADIUS * 2 + this.rep.height - - this.item.height + this.item.height, ) .arc("en") .addTo(this); @@ -738,7 +738,7 @@ occurrenceIdx, topRuleName, dslRuleName, - tokenName + tokenName, ) { if (!(this instanceof Terminal)) return new Terminal( @@ -748,7 +748,7 @@ occurrenceIdx, topRuleName, dslRuleName, - tokenName + tokenName, ); FakeSVG.call(this, "g", { class: "terminal" }); this.text = text; @@ -798,7 +798,7 @@ tokenName: this.tokenName, label: this.label, }, - this.text + this.text, ); var title = FakeSVG("title", {}, this.title); if (this.href) @@ -853,7 +853,7 @@ topRuleName: this.topRuleName, ruleName: this.ruleName, }, - this.text + this.text, ); if (this.href) FakeSVG("a", { "xlink:href": this.href }, [text]).addTo(this); @@ -889,7 +889,7 @@ y: y + 5, class: "comment", }, - this.text + this.text, ).addTo(this); return this; }; diff --git a/packages/chevrotain/scripts/version-update.js b/packages/chevrotain/scripts/version-update.js index 903867289..addd2ae4f 100644 --- a/packages/chevrotain/scripts/version-update.js +++ b/packages/chevrotain/scripts/version-update.js @@ -13,14 +13,14 @@ const myRepo = git("../../"); const newVersion = currVersion; const oldVersionRegExpGlobal = new RegExp( oldVersion.replace(/\./g, "\\."), - "g" + "g", ); console.log("bumping version on <" + versionPath + ">"); const bumpedVersionTsFileContents = apiString.replace( oldVersionRegExpGlobal, - newVersion + newVersion, ); fs.writeFileSync(versionPath, bumpedVersionTsFileContents); @@ -28,7 +28,7 @@ console.log("bumping unpkg link in: <" + readmePath + ">"); const readmeContents = fs.readFileSync(readmePath, "utf8").toString(); const bumpedReadmeContents = readmeContents.replace( oldVersionRegExpGlobal, - newVersion + newVersion, ); fs.writeFileSync(readmePath, bumpedReadmeContents); diff --git a/packages/chevrotain/src/api.ts b/packages/chevrotain/src/api.ts index 83c15a763..8d6a24540 100644 --- a/packages/chevrotain/src/api.ts +++ b/packages/chevrotain/src/api.ts @@ -71,7 +71,7 @@ export function clearCache() { console.warn( "The clearCache function was 'soft' removed from the Chevrotain API." + "\n\t It performs no action other than printing this message." + - "\n\t Please avoid using it as it will be completely removed in the future" + "\n\t Please avoid using it as it will be completely removed in the future", ); } @@ -81,7 +81,7 @@ export class Parser { constructor() { throw new Error( "The Parser class has been deprecated, use CstParser or EmbeddedActionsParser instead.\t\n" + - "See: https://chevrotain.io/docs/changes/BREAKING_CHANGES.html#_7-0-0" + "See: https://chevrotain.io/docs/changes/BREAKING_CHANGES.html#_7-0-0", ); } } diff --git a/packages/chevrotain/src/diagrams/render_public.ts b/packages/chevrotain/src/diagrams/render_public.ts index d241ff34b..6d03de011 100644 --- a/packages/chevrotain/src/diagrams/render_public.ts +++ b/packages/chevrotain/src/diagrams/render_public.ts @@ -9,7 +9,7 @@ export function createSyntaxDiagramsCode( }: { resourceBase?: string; css?: string; - } = {} + } = {}, ) { const header = ` diff --git a/packages/chevrotain/src/parse/cst/cst.ts b/packages/chevrotain/src/parse/cst/cst.ts index efd5e716a..d3e0486ad 100644 --- a/packages/chevrotain/src/parse/cst/cst.ts +++ b/packages/chevrotain/src/parse/cst/cst.ts @@ -9,7 +9,7 @@ import { CstNode, CstNodeLocation, IToken } from "@chevrotain/types"; */ export function setNodeLocationOnlyOffset( currNodeLocation: CstNodeLocation, - newLocationInfo: Required> + newLocationInfo: Required>, ): void { // First (valid) update for this cst node if (isNaN(currNodeLocation.startOffset) === true) { @@ -37,7 +37,7 @@ export function setNodeLocationOnlyOffset( */ export function setNodeLocationFull( currNodeLocation: CstNodeLocation, - newLocationInfo: CstNodeLocation + newLocationInfo: CstNodeLocation, ): void { // First (valid) update for this cst node if (isNaN(currNodeLocation.startOffset) === true) { @@ -65,7 +65,7 @@ export function setNodeLocationFull( export function addTerminalToCst( node: CstNode, token: IToken, - tokenTypeName: string + tokenTypeName: string, ): void { if (node.children[tokenTypeName] === undefined) { node.children[tokenTypeName] = [token]; @@ -77,7 +77,7 @@ export function addTerminalToCst( export function addNoneTerminalToCst( node: CstNode, ruleName: string, - ruleResult: any + ruleResult: any, ): void { if (node.children[ruleName] === undefined) { node.children[ruleName] = [ruleResult]; diff --git a/packages/chevrotain/src/parse/cst/cst_visitor.ts b/packages/chevrotain/src/parse/cst/cst_visitor.ts index 50aadd3dd..8d5817dfe 100644 --- a/packages/chevrotain/src/parse/cst/cst_visitor.ts +++ b/packages/chevrotain/src/parse/cst/cst_visitor.ts @@ -32,7 +32,7 @@ export function defaultVisit(ctx: any, param: IN): void { export function createBaseSemanticVisitorConstructor( grammarName: string, - ruleNames: string[] + ruleNames: string[], ): { new (...args: any[]): ICstVisitor; } { @@ -65,11 +65,11 @@ export function createBaseSemanticVisitorConstructor( if (!isEmpty(semanticDefinitionErrors)) { const errorMessages = map( semanticDefinitionErrors, - (currDefError) => currDefError.msg + (currDefError) => currDefError.msg, ); throw Error( `Errors Detected in CST Visitor <${this.constructor.name}>:\n\t` + - `${errorMessages.join("\n\n").replace(/\n/g, "\n\t")}` + `${errorMessages.join("\n\n").replace(/\n/g, "\n\t")}`, ); } }, @@ -86,7 +86,7 @@ export function createBaseSemanticVisitorConstructor( export function createBaseVisitorConstructorWithDefaults( grammarName: string, ruleNames: string[], - baseConstructor: Function + baseConstructor: Function, ): { new (...args: any[]): ICstVisitor; } { @@ -121,7 +121,7 @@ export interface IVisitorDefinitionError { export function validateVisitor( visitorInstance: ICstVisitor, - ruleNames: string[] + ruleNames: string[], ): IVisitorDefinitionError[] { const missingErrors = validateMissingCstMethods(visitorInstance, ruleNames); @@ -130,7 +130,7 @@ export function validateVisitor( export function validateMissingCstMethods( visitorInstance: ICstVisitor, - ruleNames: string[] + ruleNames: string[], ): IVisitorDefinitionError[] { const missingRuleNames = filter(ruleNames, (currRuleName) => { return isFunction((visitorInstance as any)[currRuleName]) === false; @@ -146,7 +146,7 @@ export function validateMissingCstMethods( type: CstVisitorDefinitionError.MISSING_METHOD, methodName: currRuleName, }; - } + }, ); return compact(errors); diff --git a/packages/chevrotain/src/parse/errors_public.ts b/packages/chevrotain/src/parse/errors_public.ts index 5067060da..31896a4d4 100644 --- a/packages/chevrotain/src/parse/errors_public.ts +++ b/packages/chevrotain/src/parse/errors_public.ts @@ -51,21 +51,21 @@ export const defaultParserErrorProvider: IParserErrorMessageProvider = { const allLookAheadPaths = reduce( expectedPathsPerAlt, (result, currAltPaths) => result.concat(currAltPaths), - [] as TokenType[][] + [] as TokenType[][], ); const nextValidTokenSequences = map( allLookAheadPaths, (currPath) => `[${map(currPath, (currTokenType) => tokenLabel(currTokenType)).join( - ", " - )}]` + ", ", + )}]`, ); const nextValidSequenceItems = map( nextValidTokenSequences, - (itemMsg, idx) => ` ${idx + 1}. ${itemMsg}` + (itemMsg, idx) => ` ${idx + 1}. ${itemMsg}`, ); const calculatedDescription = `one of these possible Token sequences:\n${nextValidSequenceItems.join( - "\n" + "\n", )}`; return errPrefix + calculatedDescription + errSuffix; @@ -90,8 +90,8 @@ export const defaultParserErrorProvider: IParserErrorMessageProvider = { expectedIterationPaths, (currPath) => `[${map(currPath, (currTokenType) => tokenLabel(currTokenType)).join( - "," - )}]` + ",", + )}]`, ); const calculatedDescription = `expecting at least one iteration which starts with one of these possible Token sequences::\n ` + @@ -108,7 +108,7 @@ export const defaultGrammarResolverErrorProvider: IGrammarResolverErrorMessagePr { buildRuleNotFoundError( topLevelRule: Rule, - undefinedRule: NonTerminal + undefinedRule: NonTerminal, ): string { const msg = "Invalid grammar, reference to a rule which is not defined: ->" + @@ -125,10 +125,10 @@ export const defaultGrammarValidatorErrorProvider: IGrammarValidatorErrorMessage { buildDuplicateFoundError( topLevelRule: Rule, - duplicateProds: IProductionWithOccurrence[] + duplicateProds: IProductionWithOccurrence[], ): string { function getExtraProductionArgument( - prod: IProductionWithOccurrence + prod: IProductionWithOccurrence, ): string { if (prod instanceof Terminal) { return prod.terminalType.name; @@ -180,13 +180,13 @@ export const defaultGrammarValidatorErrorProvider: IGrammarValidatorErrorMessage alternation: Alternation; }): string { const pathMsg = map(options.prefixPath, (currTok) => - tokenLabel(currTok) + tokenLabel(currTok), ).join(", "); const occurrence = options.alternation.idx === 0 ? "" : options.alternation.idx; const errMsg = `Ambiguous alternatives: <${options.ambiguityIndices.join( - " ," + " ,", )}> due to common lookahead prefix\n` + `in inside <${options.topLevelRule.name}> Rule,\n` + `<${pathMsg}> may appears as a prefix path in all these alternatives.\n` + @@ -203,13 +203,13 @@ export const defaultGrammarValidatorErrorProvider: IGrammarValidatorErrorMessage alternation: Alternation; }): string { const pathMsg = map(options.prefixPath, (currtok) => - tokenLabel(currtok) + tokenLabel(currtok), ).join(", "); const occurrence = options.alternation.idx === 0 ? "" : options.alternation.idx; let currMessage = `Ambiguous Alternatives Detected: <${options.ambiguityIndices.join( - " ," + " ,", )}> in ` + ` inside <${options.topLevelRule.name}> Rule,\n` + `<${pathMsg}> may appears as a prefix path in all these alternatives.\n`; @@ -282,7 +282,7 @@ export const defaultGrammarValidatorErrorProvider: IGrammarValidatorErrorMessage const ruleName = options.topLevelRule.name; const pathNames = map( options.leftRecursionPath, - (currRule) => currRule.name + (currRule) => currRule.name, ); const leftRecursivePath = `${ruleName} --> ${pathNames .concat([ruleName]) diff --git a/packages/chevrotain/src/parse/exceptions_public.ts b/packages/chevrotain/src/parse/exceptions_public.ts index fdd548943..a63015287 100644 --- a/packages/chevrotain/src/parse/exceptions_public.ts +++ b/packages/chevrotain/src/parse/exceptions_public.ts @@ -32,7 +32,10 @@ abstract class RecognitionException context: IRecognizerContext; resyncedTokens: IToken[] = []; - protected constructor(message: string, public token: IToken) { + protected constructor( + message: string, + public token: IToken, + ) { super(message); // fix prototype chain when typescript target is ES5 @@ -46,14 +49,22 @@ abstract class RecognitionException } export class MismatchedTokenException extends RecognitionException { - constructor(message: string, token: IToken, public previousToken: IToken) { + constructor( + message: string, + token: IToken, + public previousToken: IToken, + ) { super(message, token); this.name = MISMATCHED_TOKEN_EXCEPTION; } } export class NoViableAltException extends RecognitionException { - constructor(message: string, token: IToken, public previousToken: IToken) { + constructor( + message: string, + token: IToken, + public previousToken: IToken, + ) { super(message, token); this.name = NO_VIABLE_ALT_EXCEPTION; } @@ -67,7 +78,11 @@ export class NotAllInputParsedException extends RecognitionException { } export class EarlyExitException extends RecognitionException { - constructor(message: string, token: IToken, public previousToken: IToken) { + constructor( + message: string, + token: IToken, + public previousToken: IToken, + ) { super(message, token); this.name = EARLY_EXIT_EXCEPTION; } diff --git a/packages/chevrotain/src/parse/grammar/checks.ts b/packages/chevrotain/src/parse/grammar/checks.ts index ded38f6c6..8e2947160 100644 --- a/packages/chevrotain/src/parse/grammar/checks.ts +++ b/packages/chevrotain/src/parse/grammar/checks.ts @@ -81,21 +81,22 @@ export function validateGrammar( topLevels: Rule[], tokenTypes: TokenType[], errMsgProvider: IGrammarValidatorErrorMessageProvider, - grammarName: string + grammarName: string, ): IParserDefinitionError[] { const duplicateErrors: IParserDefinitionError[] = flatMap( topLevels, - (currTopLevel) => validateDuplicateProductions(currTopLevel, errMsgProvider) + (currTopLevel) => + validateDuplicateProductions(currTopLevel, errMsgProvider), ); const termsNamespaceConflictErrors = checkTerminalAndNoneTerminalsNameSpace( topLevels, tokenTypes, - errMsgProvider + errMsgProvider, ); const tooManyAltsErrors = flatMap(topLevels, (curRule) => - validateTooManyAlts(curRule, errMsgProvider) + validateTooManyAlts(curRule, errMsgProvider), ); const duplicateRulesError = flatMap(topLevels, (curRule) => @@ -103,20 +104,20 @@ export function validateGrammar( curRule, topLevels, grammarName, - errMsgProvider - ) + errMsgProvider, + ), ); return duplicateErrors.concat( termsNamespaceConflictErrors, tooManyAltsErrors, - duplicateRulesError + duplicateRulesError, ); } function validateDuplicateProductions( topLevelRule: Rule, - errMsgProvider: IGrammarValidatorErrorMessageProvider + errMsgProvider: IGrammarValidatorErrorMessageProvider, ): IParserDuplicatesDefinitionError[] { const collectorVisitor = new OccurrenceValidationCollector(); topLevelRule.accept(collectorVisitor); @@ -124,7 +125,7 @@ function validateDuplicateProductions( const productionGroups = groupBy( allRuleProductions, - identifyProductionForDuplicates + identifyProductionForDuplicates, ); const duplicates: any = pickBy(productionGroups, (currGroup) => { @@ -135,7 +136,7 @@ function validateDuplicateProductions( const firstProd: any = first(currDuplicates); const msg = errMsgProvider.buildDuplicateFoundError( topLevelRule, - currDuplicates + currDuplicates, ); const dslName = getProductionDslName(firstProd); const defError: IParserDuplicatesDefinitionError = { @@ -157,7 +158,7 @@ function validateDuplicateProductions( } export function identifyProductionForDuplicates( - prod: IProductionWithOccurrence + prod: IProductionWithOccurrence, ): string { return `${getProductionDslName(prod)}_#_${ prod.idx @@ -194,7 +195,7 @@ export class OccurrenceValidationCollector extends GAstVisitor { } public visitRepetitionMandatoryWithSeparator( - atLeastOneSep: RepetitionMandatoryWithSeparator + atLeastOneSep: RepetitionMandatoryWithSeparator, ): void { this.allProductions.push(atLeastOneSep); } @@ -216,7 +217,7 @@ export function validateRuleDoesNotAlreadyExist( rule: Rule, allRules: Rule[], className: string, - errMsgProvider: IGrammarValidatorErrorMessageProvider + errMsgProvider: IGrammarValidatorErrorMessageProvider, ): IParserDefinitionError[] { const errors = []; const occurrences = reduce( @@ -227,7 +228,7 @@ export function validateRuleDoesNotAlreadyExist( } return result; }, - 0 + 0, ); if (occurrences > 1) { const errMsg = errMsgProvider.buildDuplicateRuleNameError({ @@ -250,7 +251,7 @@ export function validateRuleDoesNotAlreadyExist( export function validateRuleIsOverridden( ruleName: string, definedRulesNames: string[], - className: string + className: string, ): IParserDefinitionError[] { const errors = []; let errMsg; @@ -273,7 +274,7 @@ export function validateNoLeftRecursion( topRule: Rule, currRule: Rule, errMsgProvider: IGrammarValidatorErrorMessageProvider, - path: Rule[] = [] + path: Rule[] = [], ): IParserDefinitionError[] { const errors: IParserDefinitionError[] = []; const nextNonTerminals = getFirstNoneTerminal(currRule.definition); @@ -303,7 +304,7 @@ export function validateNoLeftRecursion( topRule, currRefRule, errMsgProvider, - newPath + newPath, ); }); @@ -330,14 +331,14 @@ export function getFirstNoneTerminal(definition: IProduction[]): Rule[] { firstProd instanceof Repetition ) { result = result.concat( - getFirstNoneTerminal(firstProd.definition) + getFirstNoneTerminal(firstProd.definition), ); } else if (firstProd instanceof Alternation) { // each sub definition in alternation is a FLAT result = flatten( map(firstProd.definition, (currSubDef) => - getFirstNoneTerminal((currSubDef).definition) - ) + getFirstNoneTerminal((currSubDef).definition), + ), ); } else if (firstProd instanceof Terminal) { // nothing to see, move along @@ -365,7 +366,7 @@ class OrCollector extends GAstVisitor { export function validateEmptyOrAlternative( topLevelRule: Rule, - errMsgProvider: IGrammarValidatorErrorMessageProvider + errMsgProvider: IGrammarValidatorErrorMessageProvider, ): IParserEmptyAlternativeDefinitionError[] { const orCollector = new OrCollector(); topLevelRule.accept(orCollector); @@ -380,7 +381,7 @@ export function validateEmptyOrAlternative( [currAlternative], [], tokenStructuredMatcher, - 1 + 1, ); if (isEmpty(possibleFirstInAlt)) { return [ @@ -400,7 +401,7 @@ export function validateEmptyOrAlternative( return []; } }); - } + }, ); return errors; @@ -409,7 +410,7 @@ export function validateEmptyOrAlternative( export function validateAmbiguousAlternationAlternatives( topLevelRule: Rule, globalMaxLookahead: number, - errMsgProvider: IGrammarValidatorErrorMessageProvider + errMsgProvider: IGrammarValidatorErrorMessageProvider, ): IParserAmbiguousAlternativesDefinitionError[] { const orCollector = new OrCollector(); topLevelRule.accept(orCollector); @@ -426,19 +427,19 @@ export function validateAmbiguousAlternationAlternatives( currOccurrence, topLevelRule, actualMaxLookahead, - currOr + currOr, ); const altsAmbiguityErrors = checkAlternativesAmbiguities( alternatives, currOr, topLevelRule, - errMsgProvider + errMsgProvider, ); const altsPrefixAmbiguityErrors = checkPrefixAlternativesAmbiguities( alternatives, currOr, topLevelRule, - errMsgProvider + errMsgProvider, ); return altsAmbiguityErrors.concat(altsPrefixAmbiguityErrors); @@ -461,7 +462,7 @@ export class RepetitionCollector extends GAstVisitor { } public visitRepetitionMandatoryWithSeparator( - atLeastOneSep: RepetitionMandatoryWithSeparator + atLeastOneSep: RepetitionMandatoryWithSeparator, ): void { this.allProductions.push(atLeastOneSep); } @@ -473,7 +474,7 @@ export class RepetitionCollector extends GAstVisitor { export function validateTooManyAlts( topLevelRule: Rule, - errMsgProvider: IGrammarValidatorErrorMessageProvider + errMsgProvider: IGrammarValidatorErrorMessageProvider, ): IParserDefinitionError[] { const orCollector = new OrCollector(); topLevelRule.accept(orCollector); @@ -503,7 +504,7 @@ export function validateTooManyAlts( export function validateSomeNonEmptyLookaheadPath( topLevelRules: Rule[], maxLookahead: number, - errMsgProvider: IGrammarValidatorErrorMessageProvider + errMsgProvider: IGrammarValidatorErrorMessageProvider, ): IParserDefinitionError[] { const errors: IParserDefinitionError[] = []; forEach(topLevelRules, (currTopRule) => { @@ -518,7 +519,7 @@ export function validateSomeNonEmptyLookaheadPath( currOccurrence, currTopRule, prodType, - actualMaxLookahead + actualMaxLookahead, ); const pathsInsideProduction = paths[0]; if (isEmpty(flatten(pathsInsideProduction))) { @@ -547,7 +548,7 @@ function checkAlternativesAmbiguities( alternatives: Alternative[], alternation: Alternation, rule: Rule, - errMsgProvider: IGrammarValidatorErrorMessageProvider + errMsgProvider: IGrammarValidatorErrorMessageProvider, ): IParserAmbiguousAlternativesDefinitionError[] { const foundAmbiguousPaths: Alternative = []; const identicalAmbiguities = reduce( @@ -584,13 +585,13 @@ function checkAlternativesAmbiguities( }); return result; }, - [] as { alts: number[]; path: TokenType[] }[] + [] as { alts: number[]; path: TokenType[] }[], ); const currErrors = map(identicalAmbiguities, (currAmbDescriptor) => { const ambgIndices = map( currAmbDescriptor.alts, - (currAltIdx) => currAltIdx + 1 + (currAltIdx) => currAltIdx + 1, ); const currMessage = errMsgProvider.buildAlternationAmbiguityError({ @@ -616,7 +617,7 @@ export function checkPrefixAlternativesAmbiguities( alternatives: Alternative[], alternation: Alternation, rule: Rule, - errMsgProvider: IGrammarValidatorErrorMessageProvider + errMsgProvider: IGrammarValidatorErrorMessageProvider, ): IParserAmbiguousAlternativesDefinitionError[] { // flatten const pathsAndIndices = reduce( @@ -627,7 +628,7 @@ export function checkPrefixAlternativesAmbiguities( }); return result.concat(currPathsAndIdx); }, - [] as { idx: number; path: TokenType[] }[] + [] as { idx: number; path: TokenType[] }[], ); const errors = compact( @@ -653,7 +654,7 @@ export function checkPrefixAlternativesAmbiguities( // will be be detected using a different validation. isStrictPrefixOfPath(searchPathAndIdx.path, targetPath) ); - } + }, ); const currPathPrefixErrors = map( @@ -675,11 +676,11 @@ export function checkPrefixAlternativesAmbiguities( occurrence: occurrence, alternatives: ambgIndices, }; - } + }, ); return currPathPrefixErrors; - }) + }), ); return errors; @@ -688,7 +689,7 @@ export function checkPrefixAlternativesAmbiguities( function checkTerminalAndNoneTerminalsNameSpace( topLevels: Rule[], tokenTypes: TokenType[], - errMsgProvider: IGrammarValidatorErrorMessageProvider + errMsgProvider: IGrammarValidatorErrorMessageProvider, ): IParserDefinitionError[] { const errors: IParserDefinitionError[] = []; diff --git a/packages/chevrotain/src/parse/grammar/first.ts b/packages/chevrotain/src/parse/grammar/first.ts index 5f3a7105c..dd9fc6d3d 100644 --- a/packages/chevrotain/src/parse/grammar/first.ts +++ b/packages/chevrotain/src/parse/grammar/first.ts @@ -60,7 +60,7 @@ export function firstForBranching(prod: { prod.definition, (innerProd) => { return first(innerProd); - } + }, ); return uniq(flatten(allAlternativesFirsts)); } diff --git a/packages/chevrotain/src/parse/grammar/follow.ts b/packages/chevrotain/src/parse/grammar/follow.ts index 660917334..ac1a64e7a 100644 --- a/packages/chevrotain/src/parse/grammar/follow.ts +++ b/packages/chevrotain/src/parse/grammar/follow.ts @@ -22,7 +22,7 @@ export class ResyncFollowsWalker extends RestWalker { walkTerminal( terminal: Terminal, currRest: IProduction[], - prevRest: IProduction[] + prevRest: IProduction[], ): void { // do nothing! just like in the public sector after 13:00 } @@ -30,7 +30,7 @@ export class ResyncFollowsWalker extends RestWalker { walkProdRef( refProd: NonTerminal, currRest: IProduction[], - prevRest: IProduction[] + prevRest: IProduction[], ): void { const followName = buildBetweenProdsFollowPrefix(refProd.referencedRule, refProd.idx) + @@ -43,7 +43,7 @@ export class ResyncFollowsWalker extends RestWalker { } export function computeAllProdsFollows( - topProductions: Rule[] + topProductions: Rule[], ): Record { const reSyncFollows = {}; @@ -56,7 +56,7 @@ export function computeAllProdsFollows( export function buildBetweenProdsFollowPrefix( inner: Rule, - occurenceInParent: number + occurenceInParent: number, ): string { return inner.name + occurenceInParent + IN; } diff --git a/packages/chevrotain/src/parse/grammar/gast/gast_resolver_public.ts b/packages/chevrotain/src/parse/grammar/gast/gast_resolver_public.ts index f7ce7c01f..0ef62f9f8 100644 --- a/packages/chevrotain/src/parse/grammar/gast/gast_resolver_public.ts +++ b/packages/chevrotain/src/parse/grammar/gast/gast_resolver_public.ts @@ -18,7 +18,7 @@ type ResolveGrammarOpts = { errMsgProvider?: IGrammarResolverErrorMessageProvider; }; export function resolveGrammar( - options: ResolveGrammarOpts + options: ResolveGrammarOpts, ): IParserDefinitionError[] { const actualOptions: Required = defaults(options, { errMsgProvider: defaultGrammarResolverErrorProvider, @@ -45,6 +45,6 @@ export function validateGrammar(options: { options.rules, options.tokenTypes, options.errMsgProvider, - options.grammarName + options.grammarName, ); } diff --git a/packages/chevrotain/src/parse/grammar/interpreter.ts b/packages/chevrotain/src/parse/grammar/interpreter.ts index 7ad6679d8..37804e90e 100644 --- a/packages/chevrotain/src/parse/grammar/interpreter.ts +++ b/packages/chevrotain/src/parse/grammar/interpreter.ts @@ -41,7 +41,10 @@ export abstract class AbstractNextPossibleTokensWalker extends RestWalker { protected found = false; protected isAtEndOfPath = false; - constructor(protected topProd: Rule, protected path: IGrammarPath) { + constructor( + protected topProd: Rule, + protected path: IGrammarPath, + ) { super(); } @@ -68,7 +71,7 @@ export abstract class AbstractNextPossibleTokensWalker extends RestWalker { walk( prod: { definition: IProduction[] }, - prevRest: IProduction[] = [] + prevRest: IProduction[] = [], ): void { // stop scanning once we found the path if (!this.found) { @@ -79,7 +82,7 @@ export abstract class AbstractNextPossibleTokensWalker extends RestWalker { walkProdRef( refProd: NonTerminal, currRest: IProduction[], - prevRest: IProduction[] + prevRest: IProduction[], ): void { // found the next production, need to keep walking in it if ( @@ -111,7 +114,10 @@ export class NextAfterTokenWalker extends AbstractNextPossibleTokensWalker { private nextTerminalName = ""; private nextTerminalOccurrence = 0; - constructor(topProd: Rule, protected path: ITokenGrammarPath) { + constructor( + topProd: Rule, + protected path: ITokenGrammarPath, + ) { super(topProd, path); this.nextTerminalName = this.path.lastTok.name; this.nextTerminalOccurrence = this.path.lastTokOccurrence; @@ -120,7 +126,7 @@ export class NextAfterTokenWalker extends AbstractNextPossibleTokensWalker { walkTerminal( terminal: Terminal, currRest: IProduction[], - prevRest: IProduction[] + prevRest: IProduction[], ): void { if ( this.isAtEndOfPath && @@ -155,7 +161,10 @@ export class AbstractNextTerminalAfterProductionWalker extends RestWalker { isEndOfRule: undefined, }; - constructor(protected topRule: Rule, protected occurrence: number) { + constructor( + protected topRule: Rule, + protected occurrence: number, + ) { super(); } @@ -169,7 +178,7 @@ export class NextTerminalAfterManyWalker extends AbstractNextTerminalAfterProduc walkMany( manyProd: Repetition, currRest: IProduction[], - prevRest: IProduction[] + prevRest: IProduction[], ): void { if (manyProd.idx === this.occurrence) { const firstAfterMany = _first(currRest.concat(prevRest)); @@ -188,7 +197,7 @@ export class NextTerminalAfterManySepWalker extends AbstractNextTerminalAfterPro walkManySep( manySepProd: RepetitionWithSeparator, currRest: IProduction[], - prevRest: IProduction[] + prevRest: IProduction[], ): void { if (manySepProd.idx === this.occurrence) { const firstAfterManySep = _first(currRest.concat(prevRest)); @@ -207,7 +216,7 @@ export class NextTerminalAfterAtLeastOneWalker extends AbstractNextTerminalAfter walkAtLeastOne( atLeastOneProd: RepetitionMandatory, currRest: IProduction[], - prevRest: IProduction[] + prevRest: IProduction[], ): void { if (atLeastOneProd.idx === this.occurrence) { const firstAfterAtLeastOne = _first(currRest.concat(prevRest)); @@ -227,11 +236,11 @@ export class NextTerminalAfterAtLeastOneSepWalker extends AbstractNextTerminalAf walkAtLeastOneSep( atleastOneSepProd: RepetitionMandatoryWithSeparator, currRest: IProduction[], - prevRest: IProduction[] + prevRest: IProduction[], ): void { if (atleastOneSepProd.idx === this.occurrence) { const firstAfterfirstAfterAtLeastOneSep = _first( - currRest.concat(prevRest) + currRest.concat(prevRest), ); this.result.isEndOfRule = firstAfterfirstAfterAtLeastOneSep === undefined; if (firstAfterfirstAfterAtLeastOneSep instanceof Terminal) { @@ -252,7 +261,7 @@ export interface PartialPathAndSuffixes { export function possiblePathsFrom( targetDef: IProduction[], maxLength: number, - currPath: TokenType[] = [] + currPath: TokenType[] = [], ): PartialPathAndSuffixes[] { // avoid side effects currPath = clone(currPath); @@ -269,7 +278,7 @@ export function possiblePathsFrom( const alternatives = possiblePathsFrom( remainingPathWith(definition), maxLength, - currPath + currPath, ); return result.concat(alternatives); } @@ -303,7 +312,7 @@ export function possiblePathsFrom( new Alternative({ definition: prod.definition }), new Repetition({ definition: [new Terminal({ terminalType: prod.separator })].concat( - prod.definition + prod.definition, ), }), ]; @@ -312,7 +321,7 @@ export function possiblePathsFrom( const newDef = prod.definition.concat([ new Repetition({ definition: [new Terminal({ terminalType: prod.separator })].concat( - prod.definition + prod.definition, ), }), ]); @@ -361,7 +370,7 @@ export function nextPossibleTokensAfter( initialDef: IProduction[], tokenVector: IToken[], tokMatcher: TokenMatcher, - maxLookAhead: number + maxLookAhead: number, ): ISyntacticContentAssistPath[] { const EXIT_NON_TERMINAL: any = "EXIT_NONE_TERMINAL"; // to avoid creating a new Array each time. @@ -583,7 +592,7 @@ export function nextPossibleTokensAfter( } else if (prod instanceof Rule) { // last because we should only encounter at most a single one of these per invocation. possiblePaths.push( - expandTopLevelRule(prod, currIdx, currRuleStack, currOccurrenceStack) + expandTopLevelRule(prod, currIdx, currRuleStack, currOccurrenceStack), ); } else { throw Error("non exhaustive match"); @@ -596,7 +605,7 @@ function expandTopLevelRule( topRule: Rule, currIdx: number, currRuleStack: string[], - currOccurrenceStack: number[] + currOccurrenceStack: number[], ): IPathToExamine { const newRuleStack = clone(currRuleStack); newRuleStack.push(topRule.name); diff --git a/packages/chevrotain/src/parse/grammar/keys.ts b/packages/chevrotain/src/parse/grammar/keys.ts index 7320d591e..25b7ca772 100644 --- a/packages/chevrotain/src/parse/grammar/keys.ts +++ b/packages/chevrotain/src/parse/grammar/keys.ts @@ -25,7 +25,7 @@ export const AT_LEAST_ONE_SEP_IDX = 6 << BITS_FOR_OCCURRENCE_IDX; export function getKeyForAutomaticLookahead( ruleIdx: number, dslMethodIdx: number, - occurrence: number + occurrence: number, ): number { return occurrence | dslMethodIdx | ruleIdx; } diff --git a/packages/chevrotain/src/parse/grammar/llk_lookahead.ts b/packages/chevrotain/src/parse/grammar/llk_lookahead.ts index c6837561f..bd5a70848 100644 --- a/packages/chevrotain/src/parse/grammar/llk_lookahead.ts +++ b/packages/chevrotain/src/parse/grammar/llk_lookahead.ts @@ -43,11 +43,11 @@ export class LLkLookaheadStrategy implements ILookaheadStrategy { const emptyAltErrors = this.validateEmptyOrAlternatives(options.rules); const ambiguousAltsErrors = this.validateAmbiguousAlternationAlternatives( options.rules, - this.maxLookahead + this.maxLookahead, ); const emptyRepetitionErrors = this.validateSomeNonEmptyLookaheadPath( options.rules, - this.maxLookahead + this.maxLookahead, ); const allErrors = [ ...leftRecursionErrors, @@ -65,8 +65,8 @@ export class LLkLookaheadStrategy implements ILookaheadStrategy { validateNoLeftRecursion( currTopRule, currTopRule, - defaultGrammarValidatorErrorProvider - ) + defaultGrammarValidatorErrorProvider, + ), ); } @@ -74,32 +74,32 @@ export class LLkLookaheadStrategy implements ILookaheadStrategy { return flatMap(rules, (currTopRule) => validateEmptyOrAlternative( currTopRule, - defaultGrammarValidatorErrorProvider - ) + defaultGrammarValidatorErrorProvider, + ), ); } validateAmbiguousAlternationAlternatives( rules: Rule[], - maxLookahead: number + maxLookahead: number, ): IParserDefinitionError[] { return flatMap(rules, (currTopRule) => validateAmbiguousAlternationAlternatives( currTopRule, maxLookahead, - defaultGrammarValidatorErrorProvider - ) + defaultGrammarValidatorErrorProvider, + ), ); } validateSomeNonEmptyLookaheadPath( rules: Rule[], - maxLookahead: number + maxLookahead: number, ): IParserDefinitionError[] { return validateSomeNonEmptyLookaheadPath( rules, maxLookahead, - defaultGrammarValidatorErrorProvider + defaultGrammarValidatorErrorProvider, ); } @@ -116,7 +116,7 @@ export class LLkLookaheadStrategy implements ILookaheadStrategy { options.maxLookahead, options.hasPredicates, options.dynamicTokensEnabled, - buildAlternativesLookAheadFunc + buildAlternativesLookAheadFunc, ); } @@ -133,7 +133,7 @@ export class LLkLookaheadStrategy implements ILookaheadStrategy { options.maxLookahead, options.dynamicTokensEnabled, getProdType(options.prodType), - buildSingleAlternativeLookaheadFunction + buildSingleAlternativeLookaheadFunction, ); } } diff --git a/packages/chevrotain/src/parse/grammar/lookahead.ts b/packages/chevrotain/src/parse/grammar/lookahead.ts index 775a4cf73..de4870cd9 100644 --- a/packages/chevrotain/src/parse/grammar/lookahead.ts +++ b/packages/chevrotain/src/parse/grammar/lookahead.ts @@ -37,7 +37,7 @@ export enum PROD_TYPE { } export function getProdType( - prod: IProduction | LookaheadProductionType + prod: IProduction | LookaheadProductionType, ): PROD_TYPE { /* istanbul ignore else */ if (prod instanceof Option || prod === "Option") { @@ -81,7 +81,7 @@ export function getLookaheadPaths(options: { occurrence, rule, type, - maxLookahead + maxLookahead, ); } } @@ -92,12 +92,12 @@ export function buildLookaheadFuncForOr( maxLookahead: number, hasPredicates: boolean, dynamicTokensEnabled: boolean, - laFuncBuilder: Function + laFuncBuilder: Function, ): (orAlts?: IOrAlt[]) => number | undefined { const lookAheadPaths = getLookaheadPathsForOr( occurrence, ruleGrammar, - maxLookahead + maxLookahead, ); const tokenMatcher = areTokenCategoriesNotUsed(lookAheadPaths) @@ -108,7 +108,7 @@ export function buildLookaheadFuncForOr( lookAheadPaths, hasPredicates, tokenMatcher, - dynamicTokensEnabled + dynamicTokensEnabled, ); } @@ -133,14 +133,14 @@ export function buildLookaheadFuncForOptionalProd( lookaheadBuilder: ( lookAheadSequence: LookaheadSequence, tokenMatcher: TokenMatcher, - dynamicTokensEnabled: boolean - ) => () => boolean + dynamicTokensEnabled: boolean, + ) => () => boolean, ): () => boolean { const lookAheadPaths = getLookaheadPathsForOptionalProd( occurrence, ruleGrammar, prodType, - k + k, ); const tokenMatcher = areTokenCategoriesNotUsed(lookAheadPaths) @@ -150,7 +150,7 @@ export function buildLookaheadFuncForOptionalProd( return lookaheadBuilder( lookAheadPaths[0], tokenMatcher, - dynamicTokensEnabled + dynamicTokensEnabled, ); } @@ -160,7 +160,7 @@ export function buildAlternativesLookAheadFunc( alts: LookaheadSequence[], hasPredicates: boolean, tokenMatcher: TokenMatcher, - dynamicTokensEnabled: boolean + dynamicTokensEnabled: boolean, ): (orAlts: IOrAlt[]) => number | undefined { const numOfAlts = alts.length; const areAllOneTokenLookahead = every(alts, (currAlt) => { @@ -176,14 +176,14 @@ export function buildAlternativesLookAheadFunc( */ return function ( this: BaseParser, - orAlts: IOrAlt[] + orAlts: IOrAlt[], ): number | undefined { // unfortunately the predicates must be extracted every single time // as they cannot be cached due to references to parameters(vars) which are no longer valid. // note that in the common case of no predicates, no cpu time will be wasted on this (see else block) const predicates: (Predicate | undefined)[] = map( orAlts, - (currAlt) => currAlt.GATE + (currAlt) => currAlt.GATE, ); for (let t = 0; t < numOfAlts; t++) { @@ -238,7 +238,7 @@ export function buildAlternativesLookAheadFunc( }); return result; }, - {} as Record + {} as Record, ); /** @@ -285,7 +285,7 @@ export function buildAlternativesLookAheadFunc( export function buildSingleAlternativeLookaheadFunction( alt: LookaheadSequence, tokenMatcher: TokenMatcher, - dynamicTokensEnabled: boolean + dynamicTokensEnabled: boolean, ): () => boolean { const areAllOneTokenLookahead = every(alt, (currPath) => { return currPath.length === 1; @@ -318,7 +318,7 @@ export function buildSingleAlternativeLookaheadFunction( }); return result; }, - [] as boolean[] + [] as boolean[], ); return function (this: BaseParser): boolean { @@ -355,7 +355,7 @@ class RestDefinitionFinderWalker extends RestWalker { constructor( private topProd: Rule, private targetOccurrence: number, - private targetProdType: PROD_TYPE + private targetProdType: PROD_TYPE, ) { super(); } @@ -369,7 +369,7 @@ class RestDefinitionFinderWalker extends RestWalker { node: IProductionWithOccurrence, expectedProdType: PROD_TYPE, currRest: IProduction[], - prevRest: IProduction[] + prevRest: IProduction[], ): boolean { if ( node.idx === this.targetOccurrence && @@ -385,7 +385,7 @@ class RestDefinitionFinderWalker extends RestWalker { walkOption( optionProd: Option, currRest: IProduction[], - prevRest: IProduction[] + prevRest: IProduction[], ): void { if (!this.checkIsTarget(optionProd, PROD_TYPE.OPTION, currRest, prevRest)) { super.walkOption(optionProd, currRest, prevRest); @@ -395,14 +395,14 @@ class RestDefinitionFinderWalker extends RestWalker { walkAtLeastOne( atLeastOneProd: RepetitionMandatory, currRest: IProduction[], - prevRest: IProduction[] + prevRest: IProduction[], ): void { if ( !this.checkIsTarget( atLeastOneProd, PROD_TYPE.REPETITION_MANDATORY, currRest, - prevRest + prevRest, ) ) { super.walkOption(atLeastOneProd, currRest, prevRest); @@ -412,14 +412,14 @@ class RestDefinitionFinderWalker extends RestWalker { walkAtLeastOneSep( atLeastOneSepProd: RepetitionMandatoryWithSeparator, currRest: IProduction[], - prevRest: IProduction[] + prevRest: IProduction[], ): void { if ( !this.checkIsTarget( atLeastOneSepProd, PROD_TYPE.REPETITION_MANDATORY_WITH_SEPARATOR, currRest, - prevRest + prevRest, ) ) { super.walkOption(atLeastOneSepProd, currRest, prevRest); @@ -429,7 +429,7 @@ class RestDefinitionFinderWalker extends RestWalker { walkMany( manyProd: Repetition, currRest: IProduction[], - prevRest: IProduction[] + prevRest: IProduction[], ): void { if ( !this.checkIsTarget(manyProd, PROD_TYPE.REPETITION, currRest, prevRest) @@ -441,14 +441,14 @@ class RestDefinitionFinderWalker extends RestWalker { walkManySep( manySepProd: RepetitionWithSeparator, currRest: IProduction[], - prevRest: IProduction[] + prevRest: IProduction[], ): void { if ( !this.checkIsTarget( manySepProd, PROD_TYPE.REPETITION_WITH_SEPARATOR, currRest, - prevRest + prevRest, ) ) { super.walkOption(manySepProd, currRest, prevRest); @@ -465,14 +465,14 @@ class InsideDefinitionFinderVisitor extends GAstVisitor { constructor( private targetOccurrence: number, private targetProdType: PROD_TYPE, - private targetRef?: any + private targetRef?: any, ) { super(); } private checkIsTarget( node: { definition: IProduction[] } & IProductionWithOccurrence, - expectedProdName: PROD_TYPE + expectedProdName: PROD_TYPE, ): void { if ( node.idx === this.targetOccurrence && @@ -496,7 +496,7 @@ class InsideDefinitionFinderVisitor extends GAstVisitor { } public visitRepetitionMandatoryWithSeparator( - node: RepetitionMandatoryWithSeparator + node: RepetitionMandatoryWithSeparator, ): void { this.checkIsTarget(node, PROD_TYPE.REPETITION_MANDATORY_WITH_SEPARATOR); } @@ -547,7 +547,7 @@ function pathToHashKeys(path: TokenType[]): string[] { function isUniquePrefixHash( altKnownPathsKeys: Record[], searchPathKeys: string[], - idx: number + idx: number, ): boolean { for ( let currAltIdx = 0; @@ -572,10 +572,10 @@ function isUniquePrefixHash( export function lookAheadSequenceFromAlternatives( altsDefs: IProduction[], - k: number + k: number, ): LookaheadSequence[] { const partialAlts = map(altsDefs, (currAlt) => - possiblePathsFrom([currAlt], 1) + possiblePathsFrom([currAlt], 1), ); const finalResult = initializeArrayOfArrays(partialAlts.length); const altsHashes = map(partialAlts, (currAltPaths) => { @@ -626,7 +626,7 @@ export function lookAheadSequenceFromAlternatives( const newPartialPathsAndSuffixes = possiblePathsFrom( suffixDef, pathLength + 1, - currPathPrefix + currPathPrefix, ); newData[altIdx] = newData[altIdx].concat(newPartialPathsAndSuffixes); @@ -649,12 +649,12 @@ export function getLookaheadPathsForOr( occurrence: number, ruleGrammar: Rule, k: number, - orProd?: Alternation + orProd?: Alternation, ): LookaheadSequence[] { const visitor = new InsideDefinitionFinderVisitor( occurrence, PROD_TYPE.ALTERNATION, - orProd + orProd, ); ruleGrammar.accept(visitor); return lookAheadSequenceFromAlternatives(visitor.result, k); @@ -664,11 +664,11 @@ export function getLookaheadPathsForOptionalProd( occurrence: number, ruleGrammar: Rule, prodType: PROD_TYPE, - k: number + k: number, ): LookaheadSequence[] { const insideDefVisitor = new InsideDefinitionFinderVisitor( occurrence, - prodType + prodType, ); ruleGrammar.accept(insideDefVisitor); const insideDef = insideDefVisitor.result; @@ -676,7 +676,7 @@ export function getLookaheadPathsForOptionalProd( const afterDefWalker = new RestDefinitionFinderWalker( ruleGrammar, occurrence, - prodType + prodType, ); const afterDef = afterDefWalker.startWalking(); @@ -688,7 +688,7 @@ export function getLookaheadPathsForOptionalProd( export function containsPath( alternative: Alternative, - searchPath: TokenType[] + searchPath: TokenType[], ): boolean { compareOtherPath: for (let i = 0; i < alternative.length; i++) { const otherPath = alternative[i]; @@ -714,7 +714,7 @@ export function containsPath( export function isStrictPrefixOfPath( prefix: TokenType[], - other: TokenType[] + other: TokenType[], ): boolean { return ( prefix.length < other.length && @@ -729,11 +729,11 @@ export function isStrictPrefixOfPath( } export function areTokenCategoriesNotUsed( - lookAheadPaths: LookaheadSequence[] + lookAheadPaths: LookaheadSequence[], ): boolean { return every(lookAheadPaths, (singleAltPaths) => every(singleAltPaths, (singlePath) => - every(singlePath, (token) => isEmpty(token.categoryMatches!)) - ) + every(singlePath, (token) => isEmpty(token.categoryMatches!)), + ), ); } diff --git a/packages/chevrotain/src/parse/grammar/resolver.ts b/packages/chevrotain/src/parse/grammar/resolver.ts index 6489b1c26..6038e7b82 100644 --- a/packages/chevrotain/src/parse/grammar/resolver.ts +++ b/packages/chevrotain/src/parse/grammar/resolver.ts @@ -11,7 +11,7 @@ import { export function resolveGrammar( topLevels: Record, - errMsgProvider: IGrammarResolverErrorMessageProvider + errMsgProvider: IGrammarResolverErrorMessageProvider, ): IParserDefinitionError[] { const refResolver = new GastRefResolverVisitor(topLevels, errMsgProvider); refResolver.resolveRefs(); @@ -24,7 +24,7 @@ export class GastRefResolverVisitor extends GAstVisitor { constructor( private nameToTopRule: Record, - private errMsgProvider: IGrammarResolverErrorMessageProvider + private errMsgProvider: IGrammarResolverErrorMessageProvider, ) { super(); } @@ -42,7 +42,7 @@ export class GastRefResolverVisitor extends GAstVisitor { if (!ref) { const msg = this.errMsgProvider.buildRuleNotFoundError( this.currTopLevel, - node + node, ); this.errors.push({ message: msg, diff --git a/packages/chevrotain/src/parse/grammar/rest.ts b/packages/chevrotain/src/parse/grammar/rest.ts index 31deea501..c77ef5075 100644 --- a/packages/chevrotain/src/parse/grammar/rest.ts +++ b/packages/chevrotain/src/parse/grammar/rest.ts @@ -47,19 +47,19 @@ export abstract class RestWalker { walkTerminal( terminal: Terminal, currRest: IProduction[], - prevRest: IProduction[] + prevRest: IProduction[], ): void {} walkProdRef( refProd: NonTerminal, currRest: IProduction[], - prevRest: IProduction[] + prevRest: IProduction[], ): void {} walkFlat( flatProd: Alternative, currRest: IProduction[], - prevRest: IProduction[] + prevRest: IProduction[], ): void { // ABCDEF => after the D the rest is EF const fullOrRest = currRest.concat(prevRest); @@ -69,7 +69,7 @@ export abstract class RestWalker { walkOption( optionProd: Option, currRest: IProduction[], - prevRest: IProduction[] + prevRest: IProduction[], ): void { // ABC(DE)?F => after the (DE)? the rest is F const fullOrRest = currRest.concat(prevRest); @@ -79,7 +79,7 @@ export abstract class RestWalker { walkAtLeastOne( atLeastOneProd: RepetitionMandatory, currRest: IProduction[], - prevRest: IProduction[] + prevRest: IProduction[], ): void { // ABC(DE)+F => after the (DE)+ the rest is (DE)?F const fullAtLeastOneRest: IProduction[] = [ @@ -91,13 +91,13 @@ export abstract class RestWalker { walkAtLeastOneSep( atLeastOneSepProd: RepetitionMandatoryWithSeparator, currRest: IProduction[], - prevRest: IProduction[] + prevRest: IProduction[], ): void { // ABC DE(,DE)* F => after the (,DE)+ the rest is (,DE)?F const fullAtLeastOneSepRest = restForRepetitionWithSeparator( atLeastOneSepProd, currRest, - prevRest + prevRest, ); this.walk(atLeastOneSepProd, fullAtLeastOneSepRest); } @@ -105,7 +105,7 @@ export abstract class RestWalker { walkMany( manyProd: Repetition, currRest: IProduction[], - prevRest: IProduction[] + prevRest: IProduction[], ): void { // ABC(DE)*F => after the (DE)* the rest is (DE)?F const fullManyRest: IProduction[] = [ @@ -117,13 +117,13 @@ export abstract class RestWalker { walkManySep( manySepProd: RepetitionWithSeparator, currRest: IProduction[], - prevRest: IProduction[] + prevRest: IProduction[], ): void { // ABC (DE(,DE)*)? F => after the (,DE)* the rest is (,DE)?F const fullManySepRest = restForRepetitionWithSeparator( manySepProd, currRest, - prevRest + prevRest, ); this.walk(manySepProd, fullManySepRest); } @@ -131,7 +131,7 @@ export abstract class RestWalker { walkOr( orProd: Alternation, currRest: IProduction[], - prevRest: IProduction[] + prevRest: IProduction[], ): void { // ABC(D|E|F)G => when finding the (D|E|F) the rest is G const fullOrRest = currRest.concat(prevRest); @@ -149,7 +149,7 @@ export abstract class RestWalker { function restForRepetitionWithSeparator( repSepProd: RepetitionWithSeparator, currRest: IProduction[], - prevRest: IProduction[] + prevRest: IProduction[], ) { const repSepRest = [ new Option({ diff --git a/packages/chevrotain/src/parse/grammar/types.ts b/packages/chevrotain/src/parse/grammar/types.ts index f00a55355..c1629dc0e 100644 --- a/packages/chevrotain/src/parse/grammar/types.ts +++ b/packages/chevrotain/src/parse/grammar/types.ts @@ -32,7 +32,7 @@ export declare enum ParserDefinitionErrorType { export interface IGrammarValidatorErrorMessageProvider { buildDuplicateFoundError( topLevelRule: Rule, - duplicateProds: IProductionWithOccurrence[] + duplicateProds: IProductionWithOccurrence[], ): string; buildNamespaceConflictError(topLevelRule: Rule): string; buildAlternationPrefixAmbiguityError(options: { @@ -92,6 +92,6 @@ export interface IGrammarValidatorErrorMessageProvider { export interface IGrammarResolverErrorMessageProvider { buildRuleNotFoundError( topLevelRule: Rule, - undefinedRule: NonTerminal + undefinedRule: NonTerminal, ): string; } diff --git a/packages/chevrotain/src/parse/parser/parser.ts b/packages/chevrotain/src/parse/parser/parser.ts index 509fc390c..c46c13b45 100644 --- a/packages/chevrotain/src/parse/parser/parser.ts +++ b/packages/chevrotain/src/parse/parser/parser.ts @@ -45,7 +45,7 @@ export const END_OF_FILE = createTokenInstance( NaN, NaN, NaN, - NaN + NaN, ); Object.freeze(END_OF_FILE); @@ -142,7 +142,7 @@ export class Parser { static performSelfAnalysis(parserInstance: Parser): void { throw Error( "The **static** `performSelfAnalysis` method has been deprecated." + - "\t\nUse the **instance** method with the same name instead." + "\t\nUse the **instance** method with the same name instead.", ); } @@ -173,7 +173,7 @@ export class Parser { this.TRACE_INIT(`${currRuleName} Rule`, () => { recordedRuleGast = this.topLevelRuleRecord( currRuleName, - originalGrammarAction + originalGrammarAction, ); }); this.gastProductionsCache[currRuleName] = recordedRuleGast; @@ -209,7 +209,7 @@ export class Parser { }); this.definitionErrors = this.definitionErrors.concat( validationErrors, - lookaheadValidationErrors + lookaheadValidationErrors, ); } }); @@ -220,7 +220,7 @@ export class Parser { if (this.recoveryEnabled) { this.TRACE_INIT("computeAllProdsFollows", () => { const allFollows = computeAllProdsFollows( - values(this.gastProductionsCache) + values(this.gastProductionsCache), ); this.resyncFollows = allFollows; }); @@ -240,12 +240,12 @@ export class Parser { ) { defErrorsMsgs = map( this.definitionErrors, - (defError) => defError.message + (defError) => defError.message, ); throw new Error( `Parser Definition Errors detected:\n ${defErrorsMsgs.join( - "\n-------------------------------\n" - )}` + "\n-------------------------------\n", + )}`, ); } }); @@ -272,7 +272,7 @@ export class Parser { "The IParserConfig property has been deprecated.\n\t" + "Please use the flag on the relevant DSL method instead.\n\t" + "See: https://chevrotain.io/docs/guide/resolving_grammar_errors.html#IGNORING_AMBIGUITIES\n\t" + - "For further details." + "For further details.", ); } @@ -298,7 +298,7 @@ applyMixins(Parser, [ export class CstParser extends Parser { constructor( tokenVocabulary: TokenVocabulary, - config: IParserConfigInternal = DEFAULT_PARSER_CONFIG + config: IParserConfigInternal = DEFAULT_PARSER_CONFIG, ) { const configClone = clone(config); configClone.outputCst = true; @@ -309,7 +309,7 @@ export class CstParser extends Parser { export class EmbeddedActionsParser extends Parser { constructor( tokenVocabulary: TokenVocabulary, - config: IParserConfigInternal = DEFAULT_PARSER_CONFIG + config: IParserConfigInternal = DEFAULT_PARSER_CONFIG, ) { const configClone = clone(config); configClone.outputCst = false; diff --git a/packages/chevrotain/src/parse/parser/traits/context_assist.ts b/packages/chevrotain/src/parse/parser/traits/context_assist.ts index 75a3fe54c..8995bc560 100644 --- a/packages/chevrotain/src/parse/parser/traits/context_assist.ts +++ b/packages/chevrotain/src/parse/parser/traits/context_assist.ts @@ -17,7 +17,7 @@ export class ContentAssist { public computeContentAssist( this: MixedInParser, startRuleName: string, - precedingInput: IToken[] + precedingInput: IToken[], ): ISyntacticContentAssistPath[] { const startRuleGast = this.gastProductionsCache[startRuleName]; @@ -29,7 +29,7 @@ export class ContentAssist { [startRuleGast], precedingInput, this.tokenMatcher, - this.maxLookahead + this.maxLookahead, ); } @@ -37,14 +37,14 @@ export class ContentAssist { // TODO: should this be more explicitly part of the public API? public getNextPossibleTokenTypes( this: MixedInParser, - grammarPath: ITokenGrammarPath + grammarPath: ITokenGrammarPath, ): TokenType[] { const topRuleName = first(grammarPath.ruleStack)!; const gastProductions = this.getGAstProductions(); const topProduction = gastProductions[topRuleName]; const nextPossibleTokenTypes = new NextAfterTokenWalker( topProduction, - grammarPath + grammarPath, ).startWalking(); return nextPossibleTokenTypes; } diff --git a/packages/chevrotain/src/parse/parser/traits/error_handler.ts b/packages/chevrotain/src/parse/parser/traits/error_handler.ts index 293879aac..68dbdb0ae 100644 --- a/packages/chevrotain/src/parse/parser/traits/error_handler.ts +++ b/packages/chevrotain/src/parse/parser/traits/error_handler.ts @@ -33,7 +33,7 @@ export class ErrorHandler { SAVE_ERROR( this: MixedInParser, - error: IRecognitionException + error: IRecognitionException, ): IRecognitionException { if (isRecognitionException(error)) { error.context = { @@ -44,7 +44,7 @@ export class ErrorHandler { return error; } else { throw Error( - "Trying to save an Error which is not a RecognitionException" + "Trying to save an Error which is not a RecognitionException", ); } } @@ -62,7 +62,7 @@ export class ErrorHandler { this: MixedInParser, occurrence: number, prodType: PROD_TYPE, - userDefinedErrMsg: string | undefined + userDefinedErrMsg: string | undefined, ): never { const ruleName = this.getCurrRuleFullName(); const ruleGrammar = this.getGAstProductions()[ruleName]; @@ -70,7 +70,7 @@ export class ErrorHandler { occurrence, ruleGrammar, prodType, - this.maxLookahead + this.maxLookahead, ); const insideProdPaths = lookAheadPathsPerAlternative[0]; const actualTokens = []; @@ -92,7 +92,7 @@ export class ErrorHandler { raiseNoAltException( this: MixedInParser, occurrence: number, - errMsgTypes: string | undefined + errMsgTypes: string | undefined, ): never { const ruleName = this.getCurrRuleFullName(); const ruleGrammar = this.getGAstProductions()[ruleName]; @@ -100,7 +100,7 @@ export class ErrorHandler { const lookAheadPathsPerAlternative = getLookaheadPathsForOr( occurrence, ruleGrammar, - this.maxLookahead + this.maxLookahead, ); const actualTokens = []; @@ -118,7 +118,7 @@ export class ErrorHandler { }); throw this.SAVE_ERROR( - new NoViableAltException(errMsg, this.LA(1), previousToken) + new NoViableAltException(errMsg, this.LA(1), previousToken), ); } } diff --git a/packages/chevrotain/src/parse/parser/traits/gast_recorder.ts b/packages/chevrotain/src/parse/parser/traits/gast_recorder.ts index 52e29e8d9..6be7e5070 100644 --- a/packages/chevrotain/src/parse/parser/traits/gast_recorder.ts +++ b/packages/chevrotain/src/parse/parser/traits/gast_recorder.ts @@ -71,7 +71,7 @@ const RECORDING_PHASE_TOKEN = createTokenInstance( -1, -1, -1, - -1 + -1, ); Object.freeze(RECORDING_PHASE_TOKEN); @@ -205,7 +205,7 @@ export class GastRecorder { // Executing backtracking logic will break our recording logic assumptions BACKTRACK_RECORD( grammarRule: (...args: any[]) => T, - args?: any[] + args?: any[], ): () => boolean { return () => true; } @@ -246,7 +246,7 @@ export class GastRecorder { optionInternalRecord( this: MixedInParser, actionORMethodDef: GrammarAction | DSLMethodOpts, - occurrence: number + occurrence: number, ): OUT { return recordProd.call(this, Option, actionORMethodDef, occurrence); } @@ -254,7 +254,7 @@ export class GastRecorder { atLeastOneInternalRecord( this: MixedInParser, occurrence: number, - actionORMethodDef: GrammarAction | DSLMethodOptsWithErr + actionORMethodDef: GrammarAction | DSLMethodOptsWithErr, ): void { recordProd.call(this, RepetitionMandatory, actionORMethodDef, occurrence); } @@ -262,21 +262,21 @@ export class GastRecorder { atLeastOneSepFirstInternalRecord( this: MixedInParser, occurrence: number, - options: AtLeastOneSepMethodOpts + options: AtLeastOneSepMethodOpts, ): void { recordProd.call( this, RepetitionMandatoryWithSeparator, options, occurrence, - HANDLE_SEPARATOR + HANDLE_SEPARATOR, ); } manyInternalRecord( this: MixedInParser, occurrence: number, - actionORMethodDef: GrammarAction | DSLMethodOpts + actionORMethodDef: GrammarAction | DSLMethodOpts, ): void { recordProd.call(this, Repetition, actionORMethodDef, occurrence); } @@ -284,21 +284,21 @@ export class GastRecorder { manySepFirstInternalRecord( this: MixedInParser, occurrence: number, - options: ManySepMethodOpts + options: ManySepMethodOpts, ): void { recordProd.call( this, RepetitionWithSeparator, options, occurrence, - HANDLE_SEPARATOR + HANDLE_SEPARATOR, ); } orInternalRecord( this: MixedInParser, altsOrOpts: IOrAlt[] | OrMethodOpts, - occurrence: number + occurrence: number, ): T { return recordOrProd.call(this, altsOrOpts, occurrence); } @@ -307,18 +307,18 @@ export class GastRecorder { this: MixedInParser, ruleToCall: ParserMethodInternal, occurrence: number, - options?: SubruleMethodOpts + options?: SubruleMethodOpts, ): R | CstNode { assertMethodIdxIsValid(occurrence); if (!ruleToCall || has(ruleToCall, "ruleName") === false) { const error: any = new Error( ` argument is invalid` + ` expecting a Parser method reference but got: <${JSON.stringify( - ruleToCall + ruleToCall, )}>` + `\n inside top level rule: <${ (this.recordingProdStack[0]).name - }>` + }>`, ); error.KNOWN_RECORDER_ERROR = true; throw error; @@ -344,18 +344,18 @@ export class GastRecorder { this: MixedInParser, tokType: TokenType, occurrence: number, - options?: ConsumeMethodOpts + options?: ConsumeMethodOpts, ): IToken { assertMethodIdxIsValid(occurrence); if (!hasShortKeyProperty(tokType)) { const error: any = new Error( ` argument is invalid` + ` expecting a TokenType reference but got: <${JSON.stringify( - tokType + tokType, )}>` + `\n inside top level rule: <${ (this.recordingProdStack[0]).name - }>` + }>`, ); error.KNOWN_RECORDER_ERROR = true; throw error; @@ -376,7 +376,7 @@ function recordProd( prodConstructor: any, mainProdArg: any, occurrence: number, - handleSep: boolean = false + handleSep: boolean = false, ): any { assertMethodIdxIsValid(occurrence); const prevProd: any = peek(this.recordingProdStack); @@ -448,7 +448,7 @@ function assertMethodIdxIsValid(idx: number): void { `Invalid DSL Method idx value: <${idx}>\n\t` + `Idx value must be a none negative value smaller than ${ MAX_METHOD_IDX + 1 - }` + }`, ); error.KNOWN_RECORDER_ERROR = true; throw error; diff --git a/packages/chevrotain/src/parse/parser/traits/lexer_adapter.ts b/packages/chevrotain/src/parse/parser/traits/lexer_adapter.ts index 9960236bf..5188b29ec 100644 --- a/packages/chevrotain/src/parse/parser/traits/lexer_adapter.ts +++ b/packages/chevrotain/src/parse/parser/traits/lexer_adapter.ts @@ -25,7 +25,7 @@ export class LexerAdapter { // - https://www.typescriptlang.org/docs/handbook/functions.html#this-parameters if (this.selfAnalysisDone !== true) { throw Error( - `Missing invocation at the end of the Parser's constructor.` + `Missing invocation at the end of the Parser's constructor.`, ); } // @ts-ignore - `this parameter` not supported in setters/getters diff --git a/packages/chevrotain/src/parse/parser/traits/looksahead.ts b/packages/chevrotain/src/parse/parser/traits/looksahead.ts index 778e4f718..280eb60e9 100644 --- a/packages/chevrotain/src/parse/parser/traits/looksahead.ts +++ b/packages/chevrotain/src/parse/parser/traits/looksahead.ts @@ -79,7 +79,7 @@ export class LooksAhead { const key = getKeyForAutomaticLookahead( this.fullRuleNameToShort[currRule.name], OR_IDX, - currProd.idx + currProd.idx, ); this.setLaFuncCache(key, laFunc); }); @@ -92,7 +92,7 @@ export class LooksAhead { MANY_IDX, "Repetition", currProd.maxLookahead, - getProductionDslName(currProd) + getProductionDslName(currProd), ); }); @@ -103,7 +103,7 @@ export class LooksAhead { OPTION_IDX, "Option", currProd.maxLookahead, - getProductionDslName(currProd) + getProductionDslName(currProd), ); }); @@ -114,7 +114,7 @@ export class LooksAhead { AT_LEAST_ONE_IDX, "RepetitionMandatory", currProd.maxLookahead, - getProductionDslName(currProd) + getProductionDslName(currProd), ); }); @@ -125,7 +125,7 @@ export class LooksAhead { AT_LEAST_ONE_SEP_IDX, "RepetitionMandatoryWithSeparator", currProd.maxLookahead, - getProductionDslName(currProd) + getProductionDslName(currProd), ); }); @@ -136,7 +136,7 @@ export class LooksAhead { MANY_SEP_IDX, "RepetitionWithSeparator", currProd.maxLookahead, - getProductionDslName(currProd) + getProductionDslName(currProd), ); }); }); @@ -150,7 +150,7 @@ export class LooksAhead { prodKey: number, prodType: OptionalProductionType, prodMaxLookahead: number | undefined, - dslMethodName: string + dslMethodName: string, ): void { this.TRACE_INIT( `${dslMethodName}${prodOccurrence === 0 ? "" : prodOccurrence}`, @@ -165,10 +165,10 @@ export class LooksAhead { const key = getKeyForAutomaticLookahead( this.fullRuleNameToShort[rule.name], prodKey, - prodOccurrence + prodOccurrence, ); this.setLaFuncCache(key, laFunc); - } + }, ); } @@ -176,13 +176,13 @@ export class LooksAhead { getKeyForAutomaticLookahead( this: MixedInParser, dslMethodIdx: number, - occurrence: number + occurrence: number, ): number { const currRuleShortName: any = this.getLastExplicitRuleShortName(); return getKeyForAutomaticLookahead( currRuleShortName, dslMethodIdx, - occurrence + occurrence, ); } @@ -237,7 +237,7 @@ class DslMethodsCollectorVisitor extends GAstVisitor { } public visitRepetitionMandatoryWithSeparator( - atLeastOneSep: RepetitionMandatoryWithSeparator + atLeastOneSep: RepetitionMandatoryWithSeparator, ): void { this.dslMethods.repetitionMandatoryWithSeparator.push(atLeastOneSep); } diff --git a/packages/chevrotain/src/parse/parser/traits/parser_traits.ts b/packages/chevrotain/src/parse/parser/traits/parser_traits.ts index a4de94e51..787b82e59 100644 --- a/packages/chevrotain/src/parse/parser/traits/parser_traits.ts +++ b/packages/chevrotain/src/parse/parser/traits/parser_traits.ts @@ -38,7 +38,7 @@ export type MixedInParser = ParserConstructorImpel & interface MixedInCstParserConstructor { new ( tokenVocabulary: defs.TokenVocabulary, - config?: defs.IParserConfig + config?: defs.IParserConfig, ): defs.CstParser; } @@ -49,7 +49,7 @@ export const CstParser: MixedInCstParserConstructor = ( interface MixedInEmbeddedActionsParserConstructor { new ( tokenVocabulary: defs.TokenVocabulary, - config?: defs.IParserConfig + config?: defs.IParserConfig, ): defs.EmbeddedActionsParser; } diff --git a/packages/chevrotain/src/parse/parser/traits/recognizer_api.ts b/packages/chevrotain/src/parse/parser/traits/recognizer_api.ts index 52f113023..b82c53403 100644 --- a/packages/chevrotain/src/parse/parser/traits/recognizer_api.ts +++ b/packages/chevrotain/src/parse/parser/traits/recognizer_api.ts @@ -40,7 +40,7 @@ export class RecognizerApi { this: MixedInParser, idx: number, tokType: TokenType, - options?: ConsumeMethodOpts + options?: ConsumeMethodOpts, ): IToken { return this.consumeInternal(tokType, idx, options); } @@ -49,7 +49,7 @@ export class RecognizerApi { this: MixedInParser, idx: number, ruleToCall: ParserMethodInternal, - options?: SubruleMethodOpts + options?: SubruleMethodOpts, ): R { return this.subruleInternal(ruleToCall, idx, options); } @@ -57,7 +57,7 @@ export class RecognizerApi { option( this: MixedInParser, idx: number, - actionORMethodDef: GrammarAction | DSLMethodOpts + actionORMethodDef: GrammarAction | DSLMethodOpts, ): OUT | undefined { return this.optionInternal(actionORMethodDef, idx); } @@ -65,7 +65,7 @@ export class RecognizerApi { or( this: MixedInParser, idx: number, - altsOrOpts: IOrAlt[] | OrMethodOpts + altsOrOpts: IOrAlt[] | OrMethodOpts, ): any { return this.orInternal(altsOrOpts, idx); } @@ -73,7 +73,7 @@ export class RecognizerApi { many( this: MixedInParser, idx: number, - actionORMethodDef: GrammarAction | DSLMethodOpts + actionORMethodDef: GrammarAction | DSLMethodOpts, ): void { return this.manyInternal(idx, actionORMethodDef); } @@ -81,7 +81,7 @@ export class RecognizerApi { atLeastOne( this: MixedInParser, idx: number, - actionORMethodDef: GrammarAction | DSLMethodOptsWithErr + actionORMethodDef: GrammarAction | DSLMethodOptsWithErr, ): void { return this.atLeastOneInternal(idx, actionORMethodDef); } @@ -89,7 +89,7 @@ export class RecognizerApi { CONSUME( this: MixedInParser, tokType: TokenType, - options?: ConsumeMethodOpts + options?: ConsumeMethodOpts, ): IToken { return this.consumeInternal(tokType, 0, options); } @@ -97,7 +97,7 @@ export class RecognizerApi { CONSUME1( this: MixedInParser, tokType: TokenType, - options?: ConsumeMethodOpts + options?: ConsumeMethodOpts, ): IToken { return this.consumeInternal(tokType, 1, options); } @@ -105,7 +105,7 @@ export class RecognizerApi { CONSUME2( this: MixedInParser, tokType: TokenType, - options?: ConsumeMethodOpts + options?: ConsumeMethodOpts, ): IToken { return this.consumeInternal(tokType, 2, options); } @@ -113,7 +113,7 @@ export class RecognizerApi { CONSUME3( this: MixedInParser, tokType: TokenType, - options?: ConsumeMethodOpts + options?: ConsumeMethodOpts, ): IToken { return this.consumeInternal(tokType, 3, options); } @@ -121,7 +121,7 @@ export class RecognizerApi { CONSUME4( this: MixedInParser, tokType: TokenType, - options?: ConsumeMethodOpts + options?: ConsumeMethodOpts, ): IToken { return this.consumeInternal(tokType, 4, options); } @@ -129,7 +129,7 @@ export class RecognizerApi { CONSUME5( this: MixedInParser, tokType: TokenType, - options?: ConsumeMethodOpts + options?: ConsumeMethodOpts, ): IToken { return this.consumeInternal(tokType, 5, options); } @@ -137,7 +137,7 @@ export class RecognizerApi { CONSUME6( this: MixedInParser, tokType: TokenType, - options?: ConsumeMethodOpts + options?: ConsumeMethodOpts, ): IToken { return this.consumeInternal(tokType, 6, options); } @@ -145,7 +145,7 @@ export class RecognizerApi { CONSUME7( this: MixedInParser, tokType: TokenType, - options?: ConsumeMethodOpts + options?: ConsumeMethodOpts, ): IToken { return this.consumeInternal(tokType, 7, options); } @@ -153,7 +153,7 @@ export class RecognizerApi { CONSUME8( this: MixedInParser, tokType: TokenType, - options?: ConsumeMethodOpts + options?: ConsumeMethodOpts, ): IToken { return this.consumeInternal(tokType, 8, options); } @@ -161,7 +161,7 @@ export class RecognizerApi { CONSUME9( this: MixedInParser, tokType: TokenType, - options?: ConsumeMethodOpts + options?: ConsumeMethodOpts, ): IToken { return this.consumeInternal(tokType, 9, options); } @@ -169,7 +169,7 @@ export class RecognizerApi { SUBRULE( this: MixedInParser, ruleToCall: ParserMethodInternal, - options?: SubruleMethodOpts + options?: SubruleMethodOpts, ): R { return this.subruleInternal(ruleToCall, 0, options); } @@ -177,7 +177,7 @@ export class RecognizerApi { SUBRULE1( this: MixedInParser, ruleToCall: ParserMethodInternal, - options?: SubruleMethodOpts + options?: SubruleMethodOpts, ): R { return this.subruleInternal(ruleToCall, 1, options); } @@ -185,7 +185,7 @@ export class RecognizerApi { SUBRULE2( this: MixedInParser, ruleToCall: ParserMethodInternal, - options?: SubruleMethodOpts + options?: SubruleMethodOpts, ): R { return this.subruleInternal(ruleToCall, 2, options); } @@ -193,7 +193,7 @@ export class RecognizerApi { SUBRULE3( this: MixedInParser, ruleToCall: ParserMethodInternal, - options?: SubruleMethodOpts + options?: SubruleMethodOpts, ): R { return this.subruleInternal(ruleToCall, 3, options); } @@ -201,7 +201,7 @@ export class RecognizerApi { SUBRULE4( this: MixedInParser, ruleToCall: ParserMethodInternal, - options?: SubruleMethodOpts + options?: SubruleMethodOpts, ): R { return this.subruleInternal(ruleToCall, 4, options); } @@ -209,7 +209,7 @@ export class RecognizerApi { SUBRULE5( this: MixedInParser, ruleToCall: ParserMethodInternal, - options?: SubruleMethodOpts + options?: SubruleMethodOpts, ): R { return this.subruleInternal(ruleToCall, 5, options); } @@ -217,7 +217,7 @@ export class RecognizerApi { SUBRULE6( this: MixedInParser, ruleToCall: ParserMethodInternal, - options?: SubruleMethodOpts + options?: SubruleMethodOpts, ): R { return this.subruleInternal(ruleToCall, 6, options); } @@ -225,7 +225,7 @@ export class RecognizerApi { SUBRULE7( this: MixedInParser, ruleToCall: ParserMethodInternal, - options?: SubruleMethodOpts + options?: SubruleMethodOpts, ): R { return this.subruleInternal(ruleToCall, 7, options); } @@ -233,7 +233,7 @@ export class RecognizerApi { SUBRULE8( this: MixedInParser, ruleToCall: ParserMethodInternal, - options?: SubruleMethodOpts + options?: SubruleMethodOpts, ): R { return this.subruleInternal(ruleToCall, 8, options); } @@ -241,217 +241,217 @@ export class RecognizerApi { SUBRULE9( this: MixedInParser, ruleToCall: ParserMethodInternal, - options?: SubruleMethodOpts + options?: SubruleMethodOpts, ): R { return this.subruleInternal(ruleToCall, 9, options); } OPTION( this: MixedInParser, - actionORMethodDef: GrammarAction | DSLMethodOpts + actionORMethodDef: GrammarAction | DSLMethodOpts, ): OUT | undefined { return this.optionInternal(actionORMethodDef, 0); } OPTION1( this: MixedInParser, - actionORMethodDef: GrammarAction | DSLMethodOpts + actionORMethodDef: GrammarAction | DSLMethodOpts, ): OUT | undefined { return this.optionInternal(actionORMethodDef, 1); } OPTION2( this: MixedInParser, - actionORMethodDef: GrammarAction | DSLMethodOpts + actionORMethodDef: GrammarAction | DSLMethodOpts, ): OUT | undefined { return this.optionInternal(actionORMethodDef, 2); } OPTION3( this: MixedInParser, - actionORMethodDef: GrammarAction | DSLMethodOpts + actionORMethodDef: GrammarAction | DSLMethodOpts, ): OUT | undefined { return this.optionInternal(actionORMethodDef, 3); } OPTION4( this: MixedInParser, - actionORMethodDef: GrammarAction | DSLMethodOpts + actionORMethodDef: GrammarAction | DSLMethodOpts, ): OUT | undefined { return this.optionInternal(actionORMethodDef, 4); } OPTION5( this: MixedInParser, - actionORMethodDef: GrammarAction | DSLMethodOpts + actionORMethodDef: GrammarAction | DSLMethodOpts, ): OUT | undefined { return this.optionInternal(actionORMethodDef, 5); } OPTION6( this: MixedInParser, - actionORMethodDef: GrammarAction | DSLMethodOpts + actionORMethodDef: GrammarAction | DSLMethodOpts, ): OUT | undefined { return this.optionInternal(actionORMethodDef, 6); } OPTION7( this: MixedInParser, - actionORMethodDef: GrammarAction | DSLMethodOpts + actionORMethodDef: GrammarAction | DSLMethodOpts, ): OUT | undefined { return this.optionInternal(actionORMethodDef, 7); } OPTION8( this: MixedInParser, - actionORMethodDef: GrammarAction | DSLMethodOpts + actionORMethodDef: GrammarAction | DSLMethodOpts, ): OUT | undefined { return this.optionInternal(actionORMethodDef, 8); } OPTION9( this: MixedInParser, - actionORMethodDef: GrammarAction | DSLMethodOpts + actionORMethodDef: GrammarAction | DSLMethodOpts, ): OUT | undefined { return this.optionInternal(actionORMethodDef, 9); } OR( this: MixedInParser, - altsOrOpts: IOrAlt[] | OrMethodOpts + altsOrOpts: IOrAlt[] | OrMethodOpts, ): T { return this.orInternal(altsOrOpts, 0); } OR1( this: MixedInParser, - altsOrOpts: IOrAlt[] | OrMethodOpts + altsOrOpts: IOrAlt[] | OrMethodOpts, ): T { return this.orInternal(altsOrOpts, 1); } OR2( this: MixedInParser, - altsOrOpts: IOrAlt[] | OrMethodOpts + altsOrOpts: IOrAlt[] | OrMethodOpts, ): T { return this.orInternal(altsOrOpts, 2); } OR3( this: MixedInParser, - altsOrOpts: IOrAlt[] | OrMethodOpts + altsOrOpts: IOrAlt[] | OrMethodOpts, ): T { return this.orInternal(altsOrOpts, 3); } OR4( this: MixedInParser, - altsOrOpts: IOrAlt[] | OrMethodOpts + altsOrOpts: IOrAlt[] | OrMethodOpts, ): T { return this.orInternal(altsOrOpts, 4); } OR5( this: MixedInParser, - altsOrOpts: IOrAlt[] | OrMethodOpts + altsOrOpts: IOrAlt[] | OrMethodOpts, ): T { return this.orInternal(altsOrOpts, 5); } OR6( this: MixedInParser, - altsOrOpts: IOrAlt[] | OrMethodOpts + altsOrOpts: IOrAlt[] | OrMethodOpts, ): T { return this.orInternal(altsOrOpts, 6); } OR7( this: MixedInParser, - altsOrOpts: IOrAlt[] | OrMethodOpts + altsOrOpts: IOrAlt[] | OrMethodOpts, ): T { return this.orInternal(altsOrOpts, 7); } OR8( this: MixedInParser, - altsOrOpts: IOrAlt[] | OrMethodOpts + altsOrOpts: IOrAlt[] | OrMethodOpts, ): T { return this.orInternal(altsOrOpts, 8); } OR9( this: MixedInParser, - altsOrOpts: IOrAlt[] | OrMethodOpts + altsOrOpts: IOrAlt[] | OrMethodOpts, ): T { return this.orInternal(altsOrOpts, 9); } MANY( this: MixedInParser, - actionORMethodDef: GrammarAction | DSLMethodOpts + actionORMethodDef: GrammarAction | DSLMethodOpts, ): void { this.manyInternal(0, actionORMethodDef); } MANY1( this: MixedInParser, - actionORMethodDef: GrammarAction | DSLMethodOpts + actionORMethodDef: GrammarAction | DSLMethodOpts, ): void { this.manyInternal(1, actionORMethodDef); } MANY2( this: MixedInParser, - actionORMethodDef: GrammarAction | DSLMethodOpts + actionORMethodDef: GrammarAction | DSLMethodOpts, ): void { this.manyInternal(2, actionORMethodDef); } MANY3( this: MixedInParser, - actionORMethodDef: GrammarAction | DSLMethodOpts + actionORMethodDef: GrammarAction | DSLMethodOpts, ): void { this.manyInternal(3, actionORMethodDef); } MANY4( this: MixedInParser, - actionORMethodDef: GrammarAction | DSLMethodOpts + actionORMethodDef: GrammarAction | DSLMethodOpts, ): void { this.manyInternal(4, actionORMethodDef); } MANY5( this: MixedInParser, - actionORMethodDef: GrammarAction | DSLMethodOpts + actionORMethodDef: GrammarAction | DSLMethodOpts, ): void { this.manyInternal(5, actionORMethodDef); } MANY6( this: MixedInParser, - actionORMethodDef: GrammarAction | DSLMethodOpts + actionORMethodDef: GrammarAction | DSLMethodOpts, ): void { this.manyInternal(6, actionORMethodDef); } MANY7( this: MixedInParser, - actionORMethodDef: GrammarAction | DSLMethodOpts + actionORMethodDef: GrammarAction | DSLMethodOpts, ): void { this.manyInternal(7, actionORMethodDef); } MANY8( this: MixedInParser, - actionORMethodDef: GrammarAction | DSLMethodOpts + actionORMethodDef: GrammarAction | DSLMethodOpts, ): void { this.manyInternal(8, actionORMethodDef); } MANY9( this: MixedInParser, - actionORMethodDef: GrammarAction | DSLMethodOpts + actionORMethodDef: GrammarAction | DSLMethodOpts, ): void { this.manyInternal(9, actionORMethodDef); } @@ -498,140 +498,140 @@ export class RecognizerApi { AT_LEAST_ONE( this: MixedInParser, - actionORMethodDef: GrammarAction | DSLMethodOptsWithErr + actionORMethodDef: GrammarAction | DSLMethodOptsWithErr, ): void { this.atLeastOneInternal(0, actionORMethodDef); } AT_LEAST_ONE1( this: MixedInParser, - actionORMethodDef: GrammarAction | DSLMethodOptsWithErr + actionORMethodDef: GrammarAction | DSLMethodOptsWithErr, ): void { return this.atLeastOneInternal(1, actionORMethodDef); } AT_LEAST_ONE2( this: MixedInParser, - actionORMethodDef: GrammarAction | DSLMethodOptsWithErr + actionORMethodDef: GrammarAction | DSLMethodOptsWithErr, ): void { this.atLeastOneInternal(2, actionORMethodDef); } AT_LEAST_ONE3( this: MixedInParser, - actionORMethodDef: GrammarAction | DSLMethodOptsWithErr + actionORMethodDef: GrammarAction | DSLMethodOptsWithErr, ): void { this.atLeastOneInternal(3, actionORMethodDef); } AT_LEAST_ONE4( this: MixedInParser, - actionORMethodDef: GrammarAction | DSLMethodOptsWithErr + actionORMethodDef: GrammarAction | DSLMethodOptsWithErr, ): void { this.atLeastOneInternal(4, actionORMethodDef); } AT_LEAST_ONE5( this: MixedInParser, - actionORMethodDef: GrammarAction | DSLMethodOptsWithErr + actionORMethodDef: GrammarAction | DSLMethodOptsWithErr, ): void { this.atLeastOneInternal(5, actionORMethodDef); } AT_LEAST_ONE6( this: MixedInParser, - actionORMethodDef: GrammarAction | DSLMethodOptsWithErr + actionORMethodDef: GrammarAction | DSLMethodOptsWithErr, ): void { this.atLeastOneInternal(6, actionORMethodDef); } AT_LEAST_ONE7( this: MixedInParser, - actionORMethodDef: GrammarAction | DSLMethodOptsWithErr + actionORMethodDef: GrammarAction | DSLMethodOptsWithErr, ): void { this.atLeastOneInternal(7, actionORMethodDef); } AT_LEAST_ONE8( this: MixedInParser, - actionORMethodDef: GrammarAction | DSLMethodOptsWithErr + actionORMethodDef: GrammarAction | DSLMethodOptsWithErr, ): void { this.atLeastOneInternal(8, actionORMethodDef); } AT_LEAST_ONE9( this: MixedInParser, - actionORMethodDef: GrammarAction | DSLMethodOptsWithErr + actionORMethodDef: GrammarAction | DSLMethodOptsWithErr, ): void { this.atLeastOneInternal(9, actionORMethodDef); } AT_LEAST_ONE_SEP( this: MixedInParser, - options: AtLeastOneSepMethodOpts + options: AtLeastOneSepMethodOpts, ): void { this.atLeastOneSepFirstInternal(0, options); } AT_LEAST_ONE_SEP1( this: MixedInParser, - options: AtLeastOneSepMethodOpts + options: AtLeastOneSepMethodOpts, ): void { this.atLeastOneSepFirstInternal(1, options); } AT_LEAST_ONE_SEP2( this: MixedInParser, - options: AtLeastOneSepMethodOpts + options: AtLeastOneSepMethodOpts, ): void { this.atLeastOneSepFirstInternal(2, options); } AT_LEAST_ONE_SEP3( this: MixedInParser, - options: AtLeastOneSepMethodOpts + options: AtLeastOneSepMethodOpts, ): void { this.atLeastOneSepFirstInternal(3, options); } AT_LEAST_ONE_SEP4( this: MixedInParser, - options: AtLeastOneSepMethodOpts + options: AtLeastOneSepMethodOpts, ): void { this.atLeastOneSepFirstInternal(4, options); } AT_LEAST_ONE_SEP5( this: MixedInParser, - options: AtLeastOneSepMethodOpts + options: AtLeastOneSepMethodOpts, ): void { this.atLeastOneSepFirstInternal(5, options); } AT_LEAST_ONE_SEP6( this: MixedInParser, - options: AtLeastOneSepMethodOpts + options: AtLeastOneSepMethodOpts, ): void { this.atLeastOneSepFirstInternal(6, options); } AT_LEAST_ONE_SEP7( this: MixedInParser, - options: AtLeastOneSepMethodOpts + options: AtLeastOneSepMethodOpts, ): void { this.atLeastOneSepFirstInternal(7, options); } AT_LEAST_ONE_SEP8( this: MixedInParser, - options: AtLeastOneSepMethodOpts + options: AtLeastOneSepMethodOpts, ): void { this.atLeastOneSepFirstInternal(8, options); } AT_LEAST_ONE_SEP9( this: MixedInParser, - options: AtLeastOneSepMethodOpts + options: AtLeastOneSepMethodOpts, ): void { this.atLeastOneSepFirstInternal(9, options); } @@ -640,7 +640,7 @@ export class RecognizerApi { this: MixedInParser, name: string, implementation: (...implArgs: any[]) => T, - config: IRuleConfig = DEFAULT_RULE_CONFIG + config: IRuleConfig = DEFAULT_RULE_CONFIG, ): (idxInCallingRule?: number, ...args: any[]) => T | any { if (includes(this.definedRulesNames, name)) { const errMsg = @@ -668,12 +668,12 @@ export class RecognizerApi { this: MixedInParser, name: string, impl: (...implArgs: any[]) => T, - config: IRuleConfig = DEFAULT_RULE_CONFIG + config: IRuleConfig = DEFAULT_RULE_CONFIG, ): (idxInCallingRule?: number, ...args: any[]) => T { const ruleErrors: IParserDefinitionError[] = validateRuleIsOverridden( name, this.definedRulesNames, - this.className + this.className, ); this.definitionErrors = this.definitionErrors.concat(ruleErrors); @@ -685,7 +685,7 @@ export class RecognizerApi { BACKTRACK( this: MixedInParser, grammarRule: (...args: any[]) => T, - args?: any[] + args?: any[], ): () => boolean { return function () { // save org state diff --git a/packages/chevrotain/src/parse/parser/traits/recognizer_engine.ts b/packages/chevrotain/src/parse/parser/traits/recognizer_engine.ts index 12f27c78c..5786bd8da 100644 --- a/packages/chevrotain/src/parse/parser/traits/recognizer_engine.ts +++ b/packages/chevrotain/src/parse/parser/traits/recognizer_engine.ts @@ -85,7 +85,7 @@ export class RecognizerEngine { initRecognizerEngine( tokenVocabulary: TokenVocabulary, - config: IParserConfig + config: IParserConfig, ) { this.className = this.constructor.name; // TODO: would using an ES6 Map or plain object be faster (CST building scenario) @@ -106,7 +106,7 @@ export class RecognizerEngine { throw Error( "The Parser's configuration can no longer contain a property.\n" + "\tSee: https://chevrotain.io/docs/changes/BREAKING_CHANGES.html#_6-0-0\n" + - "\tFor Further details." + "\tFor Further details.", ); } @@ -118,7 +118,7 @@ export class RecognizerEngine { throw Error( "A Token Vocabulary cannot be empty.\n" + "\tNote that the first argument for the parser constructor\n" + - "\tis no longer a Token vector (since v4.0)." + "\tis no longer a Token vector (since v4.0).", ); } @@ -126,7 +126,7 @@ export class RecognizerEngine { throw Error( "The Parser constructor no longer accepts a token vector as the first argument.\n" + "\tSee: https://chevrotain.io/docs/changes/BREAKING_CHANGES.html#_4-0-0\n" + - "\tFor Further details." + "\tFor Further details.", ); } } @@ -138,7 +138,7 @@ export class RecognizerEngine { acc[tokType.name] = tokType; return acc; }, - {} as { [tokenName: string]: TokenType } + {} as { [tokenName: string]: TokenType }, ); } else if ( has(tokenVocabulary, "modes") && @@ -152,14 +152,14 @@ export class RecognizerEngine { acc[tokType.name] = tokType; return acc; }, - {} as { [tokenName: string]: TokenType } + {} as { [tokenName: string]: TokenType }, ); } else if (isObject(tokenVocabulary)) { this.tokensMap = clone(tokenVocabulary as TokenTypeDictionary); } else { throw new Error( " argument must be An Array of Token constructors," + - " A dictionary of Token constructors or an IMultiModeLexerDefinition" + " A dictionary of Token constructors or an IMultiModeLexerDefinition", ); } @@ -171,7 +171,7 @@ export class RecognizerEngine { ? flatten(values((tokenVocabulary).modes)) : values(tokenVocabulary); const noTokenCategoriesUsed = every(allTokenTypes, (tokenConstructor) => - isEmpty(tokenConstructor.categoryMatches) + isEmpty(tokenConstructor.categoryMatches), ); this.tokenMatcher = noTokenCategoriesUsed @@ -188,12 +188,12 @@ export class RecognizerEngine { this: MixedInParser, ruleName: string, impl: (...args: ARGS) => R, - config: IRuleConfig + config: IRuleConfig, ): ParserMethodInternal { if (this.selfAnalysisDone) { throw Error( `Grammar rule <${ruleName}> may not be defined after the 'performSelfAnalysis' method has been called'\n` + - `Make sure that all grammar rule definitions are done before 'performSelfAnalysis' is called.` + `Make sure that all grammar rule definitions are done before 'performSelfAnalysis' is called.`, ); } const resyncEnabled: boolean = has(config, "resyncEnabled") @@ -251,7 +251,7 @@ export class RecognizerEngine { const wrappedGrammarRule: ParserMethodInternal = Object.assign( invokeRuleWithTry as any, - { ruleName, originalGrammarAction: impl } + { ruleName, originalGrammarAction: impl }, ); return wrappedGrammarRule; @@ -261,7 +261,7 @@ export class RecognizerEngine { this: MixedInParser, e: Error, resyncEnabledConfig: boolean, - recoveryValueFunc: Function + recoveryValueFunc: Function, ): unknown { const isFirstInvokedRule = this.RULE_STACK.length === 1; // note the reSync is always enabled for the first rule invocation, because we must always be able to @@ -315,7 +315,7 @@ export class RecognizerEngine { optionInternal( this: MixedInParser, actionORMethodDef: GrammarAction | DSLMethodOpts, - occurrence: number + occurrence: number, ): OUT | undefined { const key = this.getKeyForAutomaticLookahead(OPTION_IDX, occurrence); return this.optionInternalLogic(actionORMethodDef, occurrence, key); @@ -325,7 +325,7 @@ export class RecognizerEngine { this: MixedInParser, actionORMethodDef: GrammarAction | DSLMethodOpts, occurrence: number, - key: number + key: number, ): OUT | undefined { let lookAheadFunc = this.getLaFuncFromCache(key); let action: GrammarAction; @@ -352,16 +352,16 @@ export class RecognizerEngine { atLeastOneInternal( this: MixedInParser, prodOccurrence: number, - actionORMethodDef: GrammarAction | DSLMethodOptsWithErr + actionORMethodDef: GrammarAction | DSLMethodOptsWithErr, ): void { const laKey = this.getKeyForAutomaticLookahead( AT_LEAST_ONE_IDX, - prodOccurrence + prodOccurrence, ); return this.atLeastOneInternalLogic( prodOccurrence, actionORMethodDef, - laKey + laKey, ); } @@ -369,7 +369,7 @@ export class RecognizerEngine { this: MixedInParser, prodOccurrence: number, actionORMethodDef: GrammarAction | DSLMethodOptsWithErr, - key: number + key: number, ): void { let lookAheadFunc = this.getLaFuncFromCache(key); let action; @@ -399,7 +399,7 @@ export class RecognizerEngine { throw this.raiseEarlyExitException( prodOccurrence, PROD_TYPE.REPETITION_MANDATORY, - (>actionORMethodDef).ERR_MSG + (>actionORMethodDef).ERR_MSG, ); } @@ -414,18 +414,18 @@ export class RecognizerEngine { lookAheadFunc, AT_LEAST_ONE_IDX, prodOccurrence, - NextTerminalAfterAtLeastOneWalker + NextTerminalAfterAtLeastOneWalker, ); } atLeastOneSepFirstInternal( this: MixedInParser, prodOccurrence: number, - options: AtLeastOneSepMethodOpts + options: AtLeastOneSepMethodOpts, ): void { const laKey = this.getKeyForAutomaticLookahead( AT_LEAST_ONE_SEP_IDX, - prodOccurrence + prodOccurrence, ); this.atLeastOneSepFirstInternalLogic(prodOccurrence, options, laKey); } @@ -434,7 +434,7 @@ export class RecognizerEngine { this: MixedInParser, prodOccurrence: number, options: AtLeastOneSepMethodOpts, - key: number + key: number, ): void { const action = options.DEF; const separator = options.SEP; @@ -473,13 +473,13 @@ export class RecognizerEngine { separatorLookAheadFunc, AT_LEAST_ONE_SEP_IDX, prodOccurrence, - NextTerminalAfterAtLeastOneSepWalker + NextTerminalAfterAtLeastOneSepWalker, ); } else { throw this.raiseEarlyExitException( prodOccurrence, PROD_TYPE.REPETITION_MANDATORY_WITH_SEPARATOR, - options.ERR_MSG + options.ERR_MSG, ); } } @@ -487,7 +487,7 @@ export class RecognizerEngine { manyInternal( this: MixedInParser, prodOccurrence: number, - actionORMethodDef: GrammarAction | DSLMethodOpts + actionORMethodDef: GrammarAction | DSLMethodOpts, ): void { const laKey = this.getKeyForAutomaticLookahead(MANY_IDX, prodOccurrence); return this.manyInternalLogic(prodOccurrence, actionORMethodDef, laKey); @@ -497,7 +497,7 @@ export class RecognizerEngine { this: MixedInParser, prodOccurrence: number, actionORMethodDef: GrammarAction | DSLMethodOpts, - key: number + key: number, ) { let lookaheadFunction = this.getLaFuncFromCache(key); let action; @@ -533,18 +533,18 @@ export class RecognizerEngine { // An infinite loop cannot occur as: // - Either the lookahead is guaranteed to consume something (Single Token Separator) // - AT_LEAST_ONE by definition is guaranteed to consume something (or error out). - notStuck + notStuck, ); } manySepFirstInternal( this: MixedInParser, prodOccurrence: number, - options: ManySepMethodOpts + options: ManySepMethodOpts, ): void { const laKey = this.getKeyForAutomaticLookahead( MANY_SEP_IDX, - prodOccurrence + prodOccurrence, ); this.manySepFirstInternalLogic(prodOccurrence, options, laKey); } @@ -553,7 +553,7 @@ export class RecognizerEngine { this: MixedInParser, prodOccurrence: number, options: ManySepMethodOpts, - key: number + key: number, ): void { const action = options.DEF; const separator = options.SEP; @@ -588,7 +588,7 @@ export class RecognizerEngine { separatorLookAheadFunc, MANY_SEP_IDX, prodOccurrence, - NextTerminalAfterManySepWalker + NextTerminalAfterManySepWalker, ); } } @@ -599,7 +599,7 @@ export class RecognizerEngine { separator: TokenType, separatorLookAheadFunc: () => boolean, action: GrammarAction, - nextTerminalAfterWalker: typeof AbstractNextTerminalAfterProductionWalker + nextTerminalAfterWalker: typeof AbstractNextTerminalAfterProductionWalker, ): void { while (separatorLookAheadFunc()) { // note that this CONSUME will never enter recovery because @@ -626,7 +626,7 @@ export class RecognizerEngine { separatorLookAheadFunc, AT_LEAST_ONE_SEP_IDX, prodOccurrence, - nextTerminalAfterWalker + nextTerminalAfterWalker, ); } @@ -643,7 +643,7 @@ export class RecognizerEngine { orInternal( this: MixedInParser, altsOrOpts: IOrAlt[] | OrMethodOpts, - occurrence: number + occurrence: number, ): T { const laKey = this.getKeyForAutomaticLookahead(OR_IDX, occurrence); const alts = isArray(altsOrOpts) ? altsOrOpts : altsOrOpts.DEF; @@ -656,7 +656,7 @@ export class RecognizerEngine { } this.raiseNoAltException( occurrence, - (altsOrOpts as OrMethodOpts).ERR_MSG + (altsOrOpts as OrMethodOpts).ERR_MSG, ); } @@ -674,7 +674,7 @@ export class RecognizerEngine { ruleName: this.getCurrRuleFullName(), }); this.SAVE_ERROR( - new NotAllInputParsedException(errMsg, firstRedundantTok) + new NotAllInputParsedException(errMsg, firstRedundantTok), ); } } @@ -683,7 +683,7 @@ export class RecognizerEngine { this: MixedInParser, ruleToCall: ParserMethodInternal, idx: number, - options?: SubruleMethodOpts + options?: SubruleMethodOpts, ): R { let ruleResult; try { @@ -694,7 +694,7 @@ export class RecognizerEngine { ruleResult, options !== undefined && options.LABEL !== undefined ? options.LABEL - : ruleToCall.ruleName + : ruleToCall.ruleName, ); return ruleResult; } catch (e) { @@ -706,14 +706,14 @@ export class RecognizerEngine { this: MixedInParser, e: any, options: SubruleMethodOpts | undefined, - ruleName: string + ruleName: string, ): void { if (isRecognitionException(e) && e.partialCstResult !== undefined) { this.cstPostNonTerminal( e.partialCstResult, options !== undefined && options.LABEL !== undefined ? options.LABEL - : ruleName + : ruleName, ); delete e.partialCstResult; @@ -725,7 +725,7 @@ export class RecognizerEngine { this: MixedInParser, tokType: TokenType, idx: number, - options: ConsumeMethodOpts | undefined + options: ConsumeMethodOpts | undefined, ): IToken { let consumedToken!: IToken; try { @@ -740,7 +740,7 @@ export class RecognizerEngine { consumedToken = this.consumeInternalRecovery( tokType, idx, - eFromConsumption + eFromConsumption, ); } @@ -748,7 +748,7 @@ export class RecognizerEngine { options !== undefined && options.LABEL !== undefined ? options.LABEL : tokType.name, - consumedToken + consumedToken, ); return consumedToken; } @@ -757,7 +757,7 @@ export class RecognizerEngine { this: MixedInParser, tokType: TokenType, nextToken: IToken, - options: ConsumeMethodOpts | undefined + options: ConsumeMethodOpts | undefined, ): void { let msg; const previousToken = this.LA(0); @@ -772,7 +772,7 @@ export class RecognizerEngine { }); } throw this.SAVE_ERROR( - new MismatchedTokenException(msg, nextToken, previousToken) + new MismatchedTokenException(msg, nextToken, previousToken), ); } @@ -780,7 +780,7 @@ export class RecognizerEngine { this: MixedInParser, tokType: TokenType, idx: number, - eFromConsumption: Error + eFromConsumption: Error, ): IToken { // no recovery allowed during backtracking, otherwise backtracking may recover invalid syntax and accept it // but the original syntax could have been parsed successfully without any backtracking + recovery @@ -829,7 +829,7 @@ export class RecognizerEngine { this: MixedInParser, shortName: number, fullName: string, - idxInCallingRule: number + idxInCallingRule: number, ): void { this.RULE_OCCURRENCE_STACK.push(idxInCallingRule); this.RULE_STACK.push(shortName); diff --git a/packages/chevrotain/src/parse/parser/traits/recoverable.ts b/packages/chevrotain/src/parse/parser/traits/recoverable.ts index 582cd3a9f..0a387a853 100644 --- a/packages/chevrotain/src/parse/parser/traits/recoverable.ts +++ b/packages/chevrotain/src/parse/parser/traits/recoverable.ts @@ -78,7 +78,7 @@ export class Recoverable { NaN, NaN, NaN, - NaN + NaN, ); tokToInsert.isInsertedInRecovery = true; return tokToInsert; @@ -97,7 +97,7 @@ export class Recoverable { grammarRule: Function, grammarRuleArgs: any[], lookAheadFunc: () => boolean, - expectedTokType: TokenType + expectedTokType: TokenType, ): void { // TODO: can the resyncTokenType be cached? const reSyncTokType = this.findReSyncTokenType(); @@ -121,7 +121,7 @@ export class Recoverable { const error = new MismatchedTokenException( msg, nextTokenWithoutResync, - this.LA(0) + this.LA(0), ); // the first token here will be the original cause of the error, this is not part of the resyncedTokens property. error.resyncedTokens = dropRight(resyncedTokens); @@ -157,7 +157,7 @@ export class Recoverable { this: MixedInParser, expectTokAfterLastMatch: TokenType, nextTokIdx: number, - notStuck: boolean | undefined + notStuck: boolean | undefined, ): boolean { // Edge case of arriving from a MANY repetition which is stuck // Attempting recovery in this case could cause an infinite loop @@ -182,7 +182,7 @@ export class Recoverable { if ( this.canPerformInRuleRecovery( expectTokAfterLastMatch, - this.getFollowsForInRuleRecovery(expectTokAfterLastMatch, nextTokIdx) + this.getFollowsForInRuleRecovery(expectTokAfterLastMatch, nextTokIdx), ) ) { return false; @@ -195,7 +195,7 @@ export class Recoverable { getFollowsForInRuleRecovery( this: MixedInParser, tokType: TokenType, - tokIdxInRule: number + tokIdxInRule: number, ): TokenType[] { const grammarPath = this.getCurrentGrammarPath(tokType, tokIdxInRule); const follows = this.getNextPossibleTokenTypes(grammarPath); @@ -205,7 +205,7 @@ export class Recoverable { tryInRuleRecovery( this: MixedInParser, expectedTokType: TokenType, - follows: TokenType[] + follows: TokenType[], ): IToken { if (this.canRecoverWithSingleTokenInsertion(expectedTokType, follows)) { const tokToInsert = this.getTokenToInsert(expectedTokType); @@ -224,7 +224,7 @@ export class Recoverable { canPerformInRuleRecovery( this: MixedInParser, expectedToken: TokenType, - follows: TokenType[] + follows: TokenType[], ): boolean { return ( this.canRecoverWithSingleTokenInsertion(expectedToken, follows) || @@ -235,7 +235,7 @@ export class Recoverable { canRecoverWithSingleTokenInsertion( this: MixedInParser, expectedTokType: TokenType, - follows: TokenType[] + follows: TokenType[], ): boolean { if (!this.canTokenTypeBeInsertedInRecovery(expectedTokType)) { return false; @@ -257,7 +257,7 @@ export class Recoverable { canRecoverWithSingleTokenDeletion( this: MixedInParser, - expectedTokType: TokenType + expectedTokType: TokenType, ): boolean { if (!this.canTokenTypeBeDeletedInRecovery(expectedTokType)) { return false; @@ -265,14 +265,14 @@ export class Recoverable { const isNextTokenWhatIsExpected = this.tokenMatcher( this.LA(2), - expectedTokType + expectedTokType, ); return isNextTokenWhatIsExpected; } isInCurrentRuleReSyncSet( this: MixedInParser, - tokenTypeIdx: TokenType + tokenTypeIdx: TokenType, ): boolean { const followKey = this.getCurrFollowKey(); const currentRuleReSyncSet = this.getFollowSetFromFollowKey(followKey); @@ -338,7 +338,7 @@ export class Recoverable { getFollowSetFromFollowKey( this: MixedInParser, - followKey: IFollowKey + followKey: IFollowKey, ): TokenType[] { if (followKey === EOF_FOLLOW_KEY) { return [EOF]; @@ -355,7 +355,7 @@ export class Recoverable { addToResyncTokens( this: MixedInParser, token: IToken, - resyncTokens: IToken[] + resyncTokens: IToken[], ): IToken[] { if (!this.tokenMatcher(token, EOF)) { resyncTokens.push(token); @@ -382,7 +382,7 @@ export class Recoverable { dslMethodIdx: number, prodOccurrence: number, nextToksWalker: typeof AbstractNextTerminalAfterProductionWalker, - notStuck?: boolean + notStuck?: boolean, ): void { // by default this is a NO-OP // The actual implementation is with the function(not method) below @@ -391,7 +391,7 @@ export class Recoverable { getCurrentGrammarPath( this: MixedInParser, tokType: TokenType, - tokIdxInRule: number + tokIdxInRule: number, ): ITokenGrammarPath { const pathRuleStack: string[] = this.getHumanReadableRuleStack(); const pathOccurrenceStack: number[] = clone(this.RULE_OCCURRENCE_STACK); @@ -406,7 +406,7 @@ export class Recoverable { } getHumanReadableRuleStack(this: MixedInParser): string[] { return map(this.RULE_STACK, (currShortName) => - this.shortRuleNameToFullName(currShortName) + this.shortRuleNameToFullName(currShortName), ); } } @@ -419,7 +419,7 @@ export function attemptInRepetitionRecovery( dslMethodIdx: number, prodOccurrence: number, nextToksWalker: typeof AbstractNextTerminalAfterProductionWalker, - notStuck?: boolean + notStuck?: boolean, ): void { const key = this.getKeyForAutomaticLookahead(dslMethodIdx, prodOccurrence); let firstAfterRepInfo = this.firstAfterRepMap[key]; @@ -457,7 +457,7 @@ export function attemptInRepetitionRecovery( this.shouldInRepetitionRecoveryBeTried( expectTokAfterLastMatch, nextTokIdx, - notStuck + notStuck, ) ) { // TODO: performance optimization: instead of passing the original args here, we modify @@ -467,7 +467,7 @@ export function attemptInRepetitionRecovery( prodFunc, args, lookaheadFunc, - expectTokAfterLastMatch + expectTokAfterLastMatch, ); } } diff --git a/packages/chevrotain/src/parse/parser/traits/tree_builder.ts b/packages/chevrotain/src/parse/parser/traits/tree_builder.ts index 3330e6b7c..9dfd025ce 100644 --- a/packages/chevrotain/src/parse/parser/traits/tree_builder.ts +++ b/packages/chevrotain/src/parse/parser/traits/tree_builder.ts @@ -32,11 +32,11 @@ export class TreeBuilder { // dynamically assigned Methods setNodeLocationFromNode: ( nodeLocation: CstNodeLocation, - locationInformation: CstNodeLocation + locationInformation: CstNodeLocation, ) => void; setNodeLocationFromToken: ( nodeLocation: CstNodeLocation, - locationInformation: CstNodeLocation + locationInformation: CstNodeLocation, ) => void; cstPostRule: (this: MixedInParser, ruleCstNode: CstNode) => void; @@ -93,7 +93,7 @@ export class TreeBuilder { this.setInitialNodeLocation = noop; } else { throw Error( - `Invalid config option: "${config.nodeLocationTracking}"` + `Invalid config option: "${config.nodeLocationTracking}"`, ); } } @@ -101,7 +101,7 @@ export class TreeBuilder { setInitialNodeLocationOnlyOffsetRecovery( this: MixedInParser, - cstNode: any + cstNode: any, ): void { cstNode.location = { startOffset: NaN, @@ -111,7 +111,7 @@ export class TreeBuilder { setInitialNodeLocationOnlyOffsetRegular( this: MixedInParser, - cstNode: any + cstNode: any, ): void { cstNode.location = { // without error recovery the starting Location of a new CstNode is guaranteed @@ -204,7 +204,7 @@ export class TreeBuilder { cstPostTerminal( this: MixedInParser, key: string, - consumedToken: IToken + consumedToken: IToken, ): void { const rootCst = this.CST_STACK[this.CST_STACK.length - 1]; addTerminalToCst(rootCst, consumedToken, key); @@ -215,7 +215,7 @@ export class TreeBuilder { cstPostNonTerminal( this: MixedInParser, ruleCstResult: CstNode, - ruleName: string + ruleName: string, ): void { const preCstNode = this.CST_STACK[this.CST_STACK.length - 1]; addNoneTerminalToCst(preCstNode, ruleName, ruleCstResult); @@ -224,14 +224,14 @@ export class TreeBuilder { } getBaseCstVisitorConstructor( - this: MixedInParser + this: MixedInParser, ): { new (...args: any[]): ICstVisitor; } { if (isUndefined(this.baseCstVisitorConstructor)) { const newBaseCstVisitorConstructor = createBaseSemanticVisitorConstructor( this.className, - keys(this.gastProductionsCache) + keys(this.gastProductionsCache), ); this.baseCstVisitorConstructor = newBaseCstVisitorConstructor; return newBaseCstVisitorConstructor; @@ -241,7 +241,7 @@ export class TreeBuilder { } getBaseCstVisitorConstructorWithDefaults( - this: MixedInParser + this: MixedInParser, ): { new (...args: any[]): ICstVisitor; } { @@ -249,7 +249,7 @@ export class TreeBuilder { const newConstructor = createBaseVisitorConstructorWithDefaults( this.className, keys(this.gastProductionsCache), - this.getBaseCstVisitorConstructor() + this.getBaseCstVisitorConstructor(), ); this.baseCstVisitorWithDefaultsConstructor = newConstructor; return newConstructor; diff --git a/packages/chevrotain/src/parse/parser/utils/apply_mixins.ts b/packages/chevrotain/src/parse/parser/utils/apply_mixins.ts index bdedccc6a..8cb088c70 100644 --- a/packages/chevrotain/src/parse/parser/utils/apply_mixins.ts +++ b/packages/chevrotain/src/parse/parser/utils/apply_mixins.ts @@ -8,7 +8,7 @@ export function applyMixins(derivedCtor: any, baseCtors: any[]) { const basePropDescriptor = Object.getOwnPropertyDescriptor( baseProto, - propName + propName, ); // Handle Accessors if ( @@ -18,7 +18,7 @@ export function applyMixins(derivedCtor: any, baseCtors: any[]) { Object.defineProperty( derivedCtor.prototype, propName, - basePropDescriptor + basePropDescriptor, ); } else { derivedCtor.prototype[propName] = baseCtor.prototype[propName]; diff --git a/packages/chevrotain/src/scan/lexer.ts b/packages/chevrotain/src/scan/lexer.ts index 47162b2a8..35bac099d 100644 --- a/packages/chevrotain/src/scan/lexer.ts +++ b/packages/chevrotain/src/scan/lexer.ts @@ -89,7 +89,7 @@ export function analyzeTokenTypes( useSticky?: boolean; safeMode?: boolean; tracer?: (msg: string, action: () => void) => void; - } + }, ): IAnalyzeResult { options = defaults(options, { useSticky: SUPPORT_STICKY, @@ -157,7 +157,7 @@ export function analyzeTokenTypes( "w", "W", ], - regExpSource[1] + regExpSource[1], ) ) { // escaped meta Characters: /\+/ /\[/ @@ -183,7 +183,7 @@ export function analyzeTokenTypes( } else { const escapedRegExpString = currPattern.replace( /[\\^$.*+?()[\]{}|]/g, - "\\$&" + "\\$&", ); const wrappedRegExp = new RegExp(escapedRegExpString); return options.useSticky @@ -193,7 +193,7 @@ export function analyzeTokenTypes( } else { throw Error("non exhaustive match"); } - } + }, ); }); @@ -205,7 +205,7 @@ export function analyzeTokenTypes( tracer("misc mapping", () => { patternIdxToType = map( onlyRelevantTypes, - (currType) => currType.tokenTypeIdx! + (currType) => currType.tokenTypeIdx!, ); patternIdxToGroup = map(onlyRelevantTypes, (clazz: any) => { @@ -235,18 +235,18 @@ export function analyzeTokenTypes( patternIdxToPushMode = map( onlyRelevantTypes, - (clazz: any) => clazz.PUSH_MODE + (clazz: any) => clazz.PUSH_MODE, ); patternIdxToPopMode = map(onlyRelevantTypes, (clazz: any) => - has(clazz, "POP_MODE") + has(clazz, "POP_MODE"), ); }); let patternIdxToCanLineTerminator: boolean[]; tracer("Line Terminator Handling", () => { const lineTerminatorCharCodes = getCharCodes( - options.lineTerminatorCharacters! + options.lineTerminatorCharacters!, ); patternIdxToCanLineTerminator = map(onlyRelevantTypes, (tokType) => false); if (options.positionTracking !== "onlyOffset") { @@ -258,7 +258,7 @@ export function analyzeTokenTypes( checkLineBreaksIssues(tokType, lineTerminatorCharCodes) === false && canMatchCharCode( lineTerminatorCharCodes, - tokType.PATTERN as RegExp | string + tokType.PATTERN as RegExp | string, ) ); } @@ -283,7 +283,7 @@ export function analyzeTokenTypes( } return acc; }, - {} as { [groupName: string]: IToken[] } + {} as { [groupName: string]: IToken[] }, ); patternIdxToConfig = map( @@ -301,7 +301,7 @@ export function analyzeTokenTypes( tokenTypeIdx: patternIdxToType[idx], tokenType: onlyRelevantTypes[idx], }; - } + }, ); }); @@ -335,7 +335,7 @@ export function analyzeTokenTypes( addToMapOfArrays( result, currOptimizedIdx, - patternIdxToConfig[idx] + patternIdxToConfig[idx], ); } }); @@ -348,13 +348,13 @@ export function analyzeTokenTypes( `\tUnable to analyze < ${currTokType.PATTERN.toString()} > pattern.\n` + "\tThe regexp unicode flag is not currently supported by the regexp-to-ast library.\n" + "\tThis will disable the lexer's first char optimizations.\n" + - "\tFor details See: https://chevrotain.io/docs/guide/resolving_lexer_errors.html#UNICODE_OPTIMIZE" + "\tFor details See: https://chevrotain.io/docs/guide/resolving_lexer_errors.html#UNICODE_OPTIMIZE", ); } } else { const optimizedCodes = getOptimizedStartCodesIndices( currTokType.PATTERN, - options.ensureOptimizations + options.ensureOptimizations, ); /* istanbul ignore if */ // start code will only be empty given an empty regExp or failure of regexp-to-ast library @@ -375,7 +375,7 @@ export function analyzeTokenTypes( `${failedOptimizationPrefixMsg}` + `\tTokenType: <${currTokType.name}> is using a custom token pattern without providing parameter.\n` + "\tThis will disable the lexer's first char optimizations.\n" + - "\tFor details See: https://chevrotain.io/docs/guide/resolving_lexer_errors.html#CUSTOM_OPTIMIZE" + "\tFor details See: https://chevrotain.io/docs/guide/resolving_lexer_errors.html#CUSTOM_OPTIMIZE", ); } canBeOptimized = false; @@ -383,7 +383,7 @@ export function analyzeTokenTypes( return result; }, - [] as { [charCode: number]: IPatternConfig[] } + [] as { [charCode: number]: IPatternConfig[] }, ); }); } @@ -399,7 +399,7 @@ export function analyzeTokenTypes( export function validatePatterns( tokenTypes: TokenType[], - validModesNames: string[] + validModesNames: string[], ): ILexerDefinitionError[] { let errors: ILexerDefinitionError[] = []; @@ -415,7 +415,7 @@ export function validatePatterns( errors = errors.concat(findInvalidGroupType(validTokenTypes)); errors = errors.concat( - findModesThatDoNotExist(validTokenTypes, validModesNames) + findModesThatDoNotExist(validTokenTypes, validModesNames), ); errors = errors.concat(findUnreachablePatterns(validTokenTypes)); @@ -424,11 +424,11 @@ export function validatePatterns( } function validateRegExpPattern( - tokenTypes: TokenType[] + tokenTypes: TokenType[], ): ILexerDefinitionError[] { let errors: ILexerDefinitionError[] = []; const withRegExpPatterns = filter(tokenTypes, (currTokType) => - isRegExp(currTokType[PATTERN]) + isRegExp(currTokType[PATTERN]), ); errors = errors.concat(findEndOfInputAnchor(withRegExpPatterns)); @@ -450,7 +450,7 @@ export interface ILexerFilterResult { } export function findMissingPatterns( - tokenTypes: TokenType[] + tokenTypes: TokenType[], ): ILexerFilterResult { const tokenTypesWithMissingPattern = filter(tokenTypes, (currType) => { return !has(currType, PATTERN); @@ -472,7 +472,7 @@ export function findMissingPatterns( } export function findInvalidPatterns( - tokenTypes: TokenType[] + tokenTypes: TokenType[], ): ILexerFilterResult { const tokenTypesWithInvalidPattern = filter(tokenTypes, (currType) => { const pattern = currType[PATTERN]; @@ -503,7 +503,7 @@ export function findInvalidPatterns( const end_of_input = /[^\\][$]/; export function findEndOfInputAnchor( - tokenTypes: TokenType[] + tokenTypes: TokenType[], ): ILexerDefinitionError[] { class EndAnchorFinder extends BaseRegExpVisitor { found = false; @@ -547,7 +547,7 @@ export function findEndOfInputAnchor( } export function findEmptyMatchRegExps( - tokenTypes: TokenType[] + tokenTypes: TokenType[], ): ILexerDefinitionError[] { const matchesEmptyString = filter(tokenTypes, (currType) => { const pattern = currType.PATTERN as RegExp; @@ -571,7 +571,7 @@ export function findEmptyMatchRegExps( const start_of_input = /[^\\[][\^]|^\^/; export function findStartOfInputAnchor( - tokenTypes: TokenType[] + tokenTypes: TokenType[], ): ILexerDefinitionError[] { class StartAnchorFinder extends BaseRegExpVisitor { found = false; @@ -614,7 +614,7 @@ export function findStartOfInputAnchor( } export function findUnsupportedFlags( - tokenTypes: TokenType[] + tokenTypes: TokenType[], ): ILexerDefinitionError[] { const invalidFlags = filter(tokenTypes, (currType) => { const pattern = currType[PATTERN]; @@ -637,7 +637,7 @@ export function findUnsupportedFlags( // This can only test for identical duplicate RegExps, not semantically equivalent ones. export function findDuplicatePatterns( - tokenTypes: TokenType[] + tokenTypes: TokenType[], ): ILexerDefinitionError[] { const found: TokenType[] = []; let identicalPatterns = map(tokenTypes, (outerType: any) => { @@ -657,7 +657,7 @@ export function findDuplicatePatterns( } return result; }, - [] as TokenType[] + [] as TokenType[], ); }); @@ -677,7 +677,7 @@ export function findDuplicatePatterns( message: `The same RegExp pattern ->${dupPatternSrc}<-` + `has been used in all of the following Token Types: ${tokenTypeNames.join( - ", " + ", ", )} <-`, type: LexerDefinitionErrorType.DUPLICATE_PATTERNS_FOUND, tokenTypes: setOfIdentical, @@ -688,7 +688,7 @@ export function findDuplicatePatterns( } export function findInvalidGroupType( - tokenTypes: TokenType[] + tokenTypes: TokenType[], ): ILexerDefinitionError[] { const invalidTypes = filter(tokenTypes, (clazz: any) => { if (!has(clazz, "GROUP")) { @@ -715,7 +715,7 @@ export function findInvalidGroupType( export function findModesThatDoNotExist( tokenTypes: TokenType[], - validModes: string[] + validModes: string[], ): ILexerDefinitionError[] { const invalidModes = filter(tokenTypes, (clazz: any) => { return ( @@ -738,7 +738,7 @@ export function findModesThatDoNotExist( } export function findUnreachablePatterns( - tokenTypes: TokenType[] + tokenTypes: TokenType[], ): ILexerDefinitionError[] { const errors: ILexerDefinitionError[] = []; @@ -760,7 +760,7 @@ export function findUnreachablePatterns( } return result; }, - [] as { str: string; idx: number; tokenType: TokenType }[] + [] as { str: string; idx: number; tokenType: TokenType }[], ); forEach(tokenTypes, (tokType, testIdx) => { @@ -840,7 +840,7 @@ export function addStickyFlag(pattern: RegExp): RegExp { export function performRuntimeChecks( lexerDefinition: IMultiModeLexerDefinition, trackLines: boolean, - lineTerminatorCharacters: (number | string)[] + lineTerminatorCharacters: (number | string)[], ): ILexerDefinitionError[] { const errors: ILexerDefinitionError[] = []; @@ -913,7 +913,7 @@ export function performRuntimeChecks( export function performWarningRuntimeChecks( lexerDefinition: IMultiModeLexerDefinition, trackLines: boolean, - lineTerminatorCharacters: (number | string)[] + lineTerminatorCharacters: (number | string)[], ): ILexerDefinitionError[] { const warnings = []; let hasAnyLineBreak = false; @@ -921,7 +921,7 @@ export function performWarningRuntimeChecks( const concreteTokenTypes = reject( allTokenTypes, - (currType) => currType[PATTERN] === Lexer.NA + (currType) => currType[PATTERN] === Lexer.NA, ); const terminatorCharCodes = getCharCodes(lineTerminatorCharacters); if (trackLines) { @@ -1042,7 +1042,7 @@ export const LineTerminatorOptimizedTester: ILineTerminatorsTester = { function checkLineBreaksIssues( tokType: TokenType, - lineTerminatorCharCodes: number[] + lineTerminatorCharCodes: number[], ): | { issue: @@ -1088,7 +1088,7 @@ export function buildLineBreakIssueMessage( | LexerDefinitionErrorType.IDENTIFY_TERMINATOR | LexerDefinitionErrorType.CUSTOM_LINE_BREAK; errMsg?: string; - } + }, ): string { /* istanbul ignore else */ if (details.issue === LexerDefinitionErrorType.IDENTIFY_TERMINATOR) { @@ -1124,7 +1124,7 @@ function getCharCodes(charsOrCodes: (number | string)[]): number[] { function addToMapOfArrays( map: Record, key: number, - value: T + value: T, ): void { if (map[key] === undefined) { map[key] = [value]; diff --git a/packages/chevrotain/src/scan/lexer_errors_public.ts b/packages/chevrotain/src/scan/lexer_errors_public.ts index 9afb21633..5faf94ee7 100644 --- a/packages/chevrotain/src/scan/lexer_errors_public.ts +++ b/packages/chevrotain/src/scan/lexer_errors_public.ts @@ -10,11 +10,11 @@ export const defaultLexerErrorProvider: ILexerErrorMessageProvider = { startOffset: number, length: number, line?: number, - column?: number + column?: number, ): string { return ( `unexpected character: ->${fullText.charAt( - startOffset + startOffset, )}<- at offset: ${startOffset},` + ` skipped ${length} characters.` ); }, diff --git a/packages/chevrotain/src/scan/lexer_public.ts b/packages/chevrotain/src/scan/lexer_public.ts index 961ee3b43..d8927f3aa 100644 --- a/packages/chevrotain/src/scan/lexer_public.ts +++ b/packages/chevrotain/src/scan/lexer_public.ts @@ -117,12 +117,12 @@ export class Lexer { constructor( protected lexerDefinition: TokenType[] | IMultiModeLexerDefinition, - config: ILexerConfig = DEFAULT_LEXER_CONFIG + config: ILexerConfig = DEFAULT_LEXER_CONFIG, ) { if (typeof config === "boolean") { throw Error( "The second argument to the Lexer constructor is now an ILexerConfig Object.\n" + - "a boolean 2nd argument is no longer supported" + "a boolean 2nd argument is no longer supported", ); } @@ -156,19 +156,19 @@ export class Lexer { ) { throw Error( "Error: Missing property on the Lexer config.\n" + - "\tFor details See: https://chevrotain.io/docs/guide/resolving_lexer_errors.html#MISSING_LINE_TERM_CHARS" + "\tFor details See: https://chevrotain.io/docs/guide/resolving_lexer_errors.html#MISSING_LINE_TERM_CHARS", ); } } if (config.safeMode && config.ensureOptimizations) { throw Error( - '"safeMode" and "ensureOptimizations" flags are mutually exclusive.' + '"safeMode" and "ensureOptimizations" flags are mutually exclusive.', ); } this.trackStartLines = /full|onlyStart/i.test( - this.config.positionTracking + this.config.positionTracking, ); this.trackEndLines = /full/i.test(this.config.positionTracking); @@ -191,8 +191,8 @@ export class Lexer { performRuntimeChecks( actualDefinition, this.trackStartLines, - this.config.lineTerminatorCharacters - ) + this.config.lineTerminatorCharacters, + ), ); }); @@ -201,8 +201,8 @@ export class Lexer { performWarningRuntimeChecks( actualDefinition, this.trackStartLines, - this.config.lineTerminatorCharacters - ) + this.config.lineTerminatorCharacters, + ), ); }); } @@ -217,7 +217,7 @@ export class Lexer { forEach(actualDefinition.modes, (currModeValue, currModeName) => { actualDefinition.modes[currModeName] = reject( currModeValue, - (currTokType) => isUndefined(currTokType) + (currTokType) => isUndefined(currTokType), ); }); @@ -232,7 +232,7 @@ export class Lexer { if (this.config.skipValidations === false) { this.TRACE_INIT(`validatePatterns`, () => { this.lexerDefinitionErrors = this.lexerDefinitionErrors.concat( - validatePatterns(currModDef, allModeNames) + validatePatterns(currModDef, allModeNames), ); }); } @@ -264,7 +264,7 @@ export class Lexer { this.emptyGroups = assign( {}, this.emptyGroups, - currAnalyzeResult.emptyGroups + currAnalyzeResult.emptyGroups, ) as any; this.hasCustom = currAnalyzeResult.hasCustom || this.hasCustom; @@ -273,7 +273,7 @@ export class Lexer { currAnalyzeResult.canBeOptimized; } }); - } + }, ); this.defaultMode = actualDefinition.defaultMode; @@ -286,10 +286,10 @@ export class Lexer { return error.message; }); const allErrMessagesString = allErrMessages.join( - "-----------------------\n" + "-----------------------\n", ); throw new Error( - "Errors detected in definition of Lexer:\n" + allErrMessagesString + "Errors detected in definition of Lexer:\n" + allErrMessagesString, ); } @@ -330,7 +330,7 @@ export class Lexer { this.createTokenInstance = this.createOffsetOnlyToken; } else { throw Error( - `Invalid config option: "${this.config.positionTracking}"` + `Invalid config option: "${this.config.positionTracking}"`, ); } @@ -352,16 +352,16 @@ export class Lexer { } return cannotBeOptimized; }, - [] as string[] + [] as string[], ); if (config.ensureOptimizations && !isEmpty(unOptimizedModes)) { throw Error( `Lexer Modes: < ${unOptimizedModes.join( - ", " + ", ", )} > cannot be optimized.\n` + '\t Disable the "ensureOptimizations" lexer config flag to silently ignore this and run the lexer in an un-optimized mode.\n' + - "\t Or inspect the console log for details on how to resolve these issues." + "\t Or inspect the console log for details on how to resolve these issues.", ); } }); @@ -378,18 +378,18 @@ export class Lexer { public tokenize( text: string, - initialMode: string = this.defaultMode + initialMode: string = this.defaultMode, ): ILexingResult { if (!isEmpty(this.lexerDefinitionErrors)) { const allErrMessages = map(this.lexerDefinitionErrors, (error) => { return error.message; }); const allErrMessagesString = allErrMessages.join( - "-----------------------\n" + "-----------------------\n", ); throw new Error( "Unable to Tokenize because Errors detected in definition of Lexer:\n" + - allErrMessagesString + allErrMessagesString, ); } @@ -475,7 +475,7 @@ export class Lexer { // thus the pop is ignored, an error will be created and the lexer will continue parsing in the previous mode. const msg = this.config.errorMessageProvider.buildUnableToPopLexerModeMessage( - popToken + popToken, ); errors.push({ @@ -554,7 +554,7 @@ export class Lexer { orgText, offset, matchedTokens, - groups + groups, ); if (match !== null) { matchedImage = match[0]; @@ -589,7 +589,7 @@ export class Lexer { orgText, offset, matchedTokens, - groups + groups, ); if (match !== null) { matchAltImage = match[0]; @@ -606,7 +606,7 @@ export class Lexer { matchAltImage = this.match( longerAltPattern as RegExp, text, - offset + offset, ); } @@ -639,7 +639,7 @@ export class Lexer { currConfig.tokenType, line, column, - imageLength + imageLength, ); this.handlePayload(newToken, payload); @@ -649,7 +649,7 @@ export class Lexer { matchedTokensIndex = this.addToken( matchedTokens, matchedTokensIndex, - newToken + newToken, ); } else { groups[group].push(newToken); @@ -684,7 +684,7 @@ export class Lexer { numOfLTsInMatch, line, column, - imageLength + imageLength, ); } } @@ -718,7 +718,7 @@ export class Lexer { orgText, offset, matchedTokens, - groups + groups, ) !== null; } else { this.updateLastIndex(currPattern as RegExp, offset); @@ -738,7 +738,7 @@ export class Lexer { errorStartOffset, errLength, errorLine, - errorColumn + errorColumn, ); errors.push({ offset: errorStartOffset, @@ -772,7 +772,7 @@ export class Lexer { config: IPatternConfig, pop_mode: (tok: IToken) => void, push_mode: (this: Lexer, pushMode: string) => void, - newToken: IToken + newToken: IToken, ) { if (config.pop === true) { // need to save the PUSH_MODE property as if the mode is popped @@ -803,7 +803,7 @@ export class Lexer { numOfLTsInMatch: number, line: number, column: number, - imageLength: number + imageLength: number, ): void { let lastCharIsLT, fixForEndingInLT; if (group !== undefined) { @@ -833,7 +833,7 @@ export class Lexer { image: string, startOffset: number, tokenTypeIdx: number, - tokenType: TokenType + tokenType: TokenType, ) { return { image, @@ -849,7 +849,7 @@ export class Lexer { tokenTypeIdx: number, tokenType: TokenType, startLine: number, - startColumn: number + startColumn: number, ) { return { image, @@ -868,7 +868,7 @@ export class Lexer { tokenType: TokenType, startLine: number, startColumn: number, - imageLength: number + imageLength: number, ): IToken { return { image, @@ -888,13 +888,13 @@ export class Lexer { private addToken!: ( tokenVector: IToken[], index: number, - tokenToAdd: IToken + tokenToAdd: IToken, ) => number; private addTokenUsingPush( tokenVector: IToken[], index: number, - tokenToAdd: IToken + tokenToAdd: IToken, ): number { tokenVector.push(tokenToAdd); return index; @@ -903,7 +903,7 @@ export class Lexer { private addTokenUsingMemberAccess( tokenVector: IToken[], index: number, - tokenToAdd: IToken + tokenToAdd: IToken, ): number { tokenVector[index] = tokenToAdd; index++; @@ -925,13 +925,13 @@ export class Lexer { private match!: ( pattern: RegExp, text: string, - offset: number + offset: number, ) => string | null; private matchWithTest( pattern: RegExp, text: string, - offset: number + offset: number, ): string | null { const found = pattern.test(text); if (found === true) { diff --git a/packages/chevrotain/src/scan/reg_exp.ts b/packages/chevrotain/src/scan/reg_exp.ts index 068fba847..733febf89 100644 --- a/packages/chevrotain/src/scan/reg_exp.ts +++ b/packages/chevrotain/src/scan/reg_exp.ts @@ -19,14 +19,14 @@ export const failedOptimizationPrefixMsg = export function getOptimizedStartCodesIndices( regExp: RegExp, - ensureOptimizations = false + ensureOptimizations = false, ): number[] { try { const ast = getRegExpAst(regExp); const firstChars = firstCharOptimizedIndices( ast.value, {}, - ast.flags.ignoreCase + ast.flags.ignoreCase, ); return firstChars; } catch (e) { @@ -40,7 +40,7 @@ export function getOptimizedStartCodesIndices( `\tUnable to optimize: < ${regExp.toString()} >\n` + "\tComplement Sets cannot be automatically optimized.\n" + "\tThis will disable the lexer's first char optimizations.\n" + - "\tSee: https://chevrotain.io/docs/guide/resolving_lexer_errors.html#COMPLEMENT for details." + "\tSee: https://chevrotain.io/docs/guide/resolving_lexer_errors.html#COMPLEMENT for details.", ); } } else { @@ -55,7 +55,7 @@ export function getOptimizedStartCodesIndices( `\tFailed parsing: < ${regExp.toString()} >\n` + `\tUsing the @chevrotain/regexp-to-ast library\n` + "\tPlease open an issue at: https://github.com/chevrotain/chevrotain/issues" + - msgSuffix + msgSuffix, ); } } @@ -66,7 +66,7 @@ export function getOptimizedStartCodesIndices( export function firstCharOptimizedIndices( ast: ASTNode, result: { [charCode: number]: number }, - ignoreCase: boolean + ignoreCase: boolean, ): number[] { switch (ast.type) { case "Disjunction": @@ -187,7 +187,7 @@ export function firstCharOptimizedIndices( function addOptimizedIdxToResult( code: number, result: { [charCode: number]: number }, - ignoreCase: boolean + ignoreCase: boolean, ) { const optimizedCharIdx = charCodeToOptimizedIndex(code); result[optimizedCharIdx] = optimizedCharIdx; @@ -199,7 +199,7 @@ function addOptimizedIdxToResult( function handleIgnoreCase( code: number, - result: { [charCode: number]: number } + result: { [charCode: number]: number }, ) { const char = String.fromCharCode(code); const upperChar = char.toUpperCase(); @@ -211,7 +211,7 @@ function handleIgnoreCase( const lowerChar = char.toLowerCase(); if (lowerChar !== char) { const optimizedCharIdx = charCodeToOptimizedIndex( - lowerChar.charCodeAt(0) + lowerChar.charCodeAt(0), ); result[optimizedCharIdx] = optimizedCharIdx; } @@ -228,7 +228,7 @@ function findCode(setNode: Set, targetCharCodes: number[]) { return ( find( targetCharCodes, - (targetCode) => range.from <= targetCode && targetCode <= range.to + (targetCode) => range.from <= targetCode && targetCode <= range.to, ) !== undefined ); } @@ -298,7 +298,7 @@ class CharCodeFinder extends BaseRegExpVisitor { export function canMatchCharCode( charCodes: number[], - pattern: RegExp | string + pattern: RegExp | string, ) { if (pattern instanceof RegExp) { const ast = getRegExpAst(pattern); diff --git a/packages/chevrotain/src/scan/tokens.ts b/packages/chevrotain/src/scan/tokens.ts index c0ce5db5f..03bb3e480 100644 --- a/packages/chevrotain/src/scan/tokens.ts +++ b/packages/chevrotain/src/scan/tokens.ts @@ -14,7 +14,7 @@ import { IToken, TokenType } from "@chevrotain/types"; export function tokenStructuredMatcher( tokInstance: IToken, - tokConstructor: TokenType + tokConstructor: TokenType, ) { const instanceType = tokInstance.tokenTypeIdx; if (instanceType === tokConstructor.tokenTypeIdx) { @@ -31,7 +31,7 @@ export function tokenStructuredMatcher( // Being so tiny it is much more likely to be in-lined and this avoid the function call overhead export function tokenStructuredMatcherNoCategories( token: IToken, - tokType: TokenType + tokType: TokenType, ) { return token.tokenTypeIdx === tokType.tokenTypeIdx; } @@ -62,7 +62,7 @@ export function expandCategories(tokenTypes: TokenType[]): TokenType[] { let searching = true; while (searching) { categories = compact( - flatten(map(categories, (currTokType) => currTokType.CATEGORIES)) + flatten(map(categories, (currTokType) => currTokType.CATEGORIES)), ); const newCategories = difference(categories, result); @@ -115,7 +115,7 @@ export function assignCategoriesTokensProp(tokenTypes: TokenType[]): void { currTokType.categoryMatches = []; forEach(currTokType.categoryMatchesMap!, (val, key) => { currTokType.categoryMatches!.push( - tokenIdxToClass[key as unknown as number].tokenTypeIdx! + tokenIdxToClass[key as unknown as number].tokenTypeIdx!, ); }); }); @@ -129,7 +129,7 @@ export function assignCategoriesMapProp(tokenTypes: TokenType[]): void { export function singleAssignCategoriesToksMap( path: TokenType[], - nextNode: TokenType + nextNode: TokenType, ): void { forEach(path, (pathNode) => { nextNode.categoryMatchesMap![pathNode.tokenTypeIdx!] = true; @@ -157,7 +157,7 @@ export function hasExtendingTokensTypesProperty(tokType: TokenType): boolean { } export function hasExtendingTokensTypesMapProperty( - tokType: TokenType + tokType: TokenType, ): boolean { return has(tokType, "categoryMatchesMap"); } diff --git a/packages/chevrotain/src/scan/tokens_public.ts b/packages/chevrotain/src/scan/tokens_public.ts index 50d3b77d2..98c556098 100644 --- a/packages/chevrotain/src/scan/tokens_public.ts +++ b/packages/chevrotain/src/scan/tokens_public.ts @@ -16,7 +16,7 @@ export function tokenName(tokType: TokenType): string { } export function hasTokenLabel( - obj: TokenType + obj: TokenType, ): obj is TokenType & Pick, "LABEL"> { return isString(obj.LABEL) && obj.LABEL !== ""; } @@ -101,7 +101,7 @@ export function createTokenInstance( startLine: number, endLine: number, startColumn: number, - endColumn: number + endColumn: number, ): IToken { return { image, diff --git a/packages/chevrotain/src/text/range.ts b/packages/chevrotain/src/text/range.ts index 01a7e5100..cf50d7225 100644 --- a/packages/chevrotain/src/text/range.ts +++ b/packages/chevrotain/src/text/range.ts @@ -14,7 +14,10 @@ export interface IRange { } export class Range implements IRange { - constructor(public start: number, public end: number) { + constructor( + public start: number, + public end: number, + ) { if (!isValidRange(start, end)) { throw new Error("INVALID RANGE"); } diff --git a/packages/chevrotain/test/deprecation_spec.ts b/packages/chevrotain/test/deprecation_spec.ts index a01161825..2a71993ec 100644 --- a/packages/chevrotain/test/deprecation_spec.ts +++ b/packages/chevrotain/test/deprecation_spec.ts @@ -7,7 +7,7 @@ describe("Chevrotain's runtime deprecation checks", () => { expect(() => new Parser()).to.throw("The Parser class has been deprecated"); expect(() => new Parser()).to.throw("CstParser or EmbeddedActionsParser"); expect(() => new Parser()).to.throw( - "https://chevrotain.io/docs/changes/BREAKING_CHANGES.html#_7-0-0" + "https://chevrotain.io/docs/changes/BREAKING_CHANGES.html#_7-0-0", ); }); @@ -21,7 +21,7 @@ describe("Chevrotain's runtime deprecation checks", () => { } expect(() => new StaticSelfAnalysisParser()).to.throw( - "The **static** `performSelfAnalysis` method has been deprecated" + "The **static** `performSelfAnalysis` method has been deprecated", ); }); }); diff --git a/packages/chevrotain/test/diagrams/render_spec.ts b/packages/chevrotain/test/diagrams/render_spec.ts index 34f996258..88d9bc1f0 100644 --- a/packages/chevrotain/test/diagrams/render_spec.ts +++ b/packages/chevrotain/test/diagrams/render_spec.ts @@ -44,7 +44,7 @@ describe("The Chevrotain diagrams rendering APIs", function () { done(e); } }, - false + false, ); }); @@ -74,7 +74,7 @@ describe("The Chevrotain diagrams rendering APIs", function () { done(e); } }, - false + false, ); }); }); diff --git a/packages/chevrotain/test/full_flow/backtracking/backtracking_parser.ts b/packages/chevrotain/test/full_flow/backtracking/backtracking_parser.ts index 9238141ac..b2b2ef4e0 100644 --- a/packages/chevrotain/test/full_flow/backtracking/backtracking_parser.ts +++ b/packages/chevrotain/test/full_flow/backtracking/backtracking_parser.ts @@ -62,7 +62,7 @@ export class BackTrackingParser extends EmbeddedActionsParser { SemiColonTok, IdentTok, ], - configuration + configuration, ); // DOCS: The call to performSelfAnalysis needs to happen after all the RULEs have been defined // The typescript compiler places the constructor body last after initializations in the class's body @@ -76,14 +76,14 @@ export class BackTrackingParser extends EmbeddedActionsParser { public withEqualsStatement = this.RULE( "withEqualsStatement", this.parseWithEqualsStatement, - { recoveryValueFunc: INVALID(RET_TYPE.INVALID_WITH_EQUALS) } + { recoveryValueFunc: INVALID(RET_TYPE.INVALID_WITH_EQUALS) }, ); public withDefaultStatement = this.RULE( "withDefaultStatement", this.parseWithDefaultStatement, { recoveryValueFunc: INVALID(RET_TYPE.INVALID_WITH_DEFAULT), - } + }, ); public qualifiedName = this.RULE("qualifiedName", this.parseQualifiedName, { recoveryValueFunc: INVALID(RET_TYPE.INVALID_FQN), diff --git a/packages/chevrotain/test/full_flow/ecma_quirks/ecma_quirks.ts b/packages/chevrotain/test/full_flow/ecma_quirks/ecma_quirks.ts index a24a8ccb5..aa64e168a 100644 --- a/packages/chevrotain/test/full_flow/ecma_quirks/ecma_quirks.ts +++ b/packages/chevrotain/test/full_flow/ecma_quirks/ecma_quirks.ts @@ -79,7 +79,7 @@ function deferredInitTokens() { forEach(allTokens, (currTokType) => { currTokType.PATTERN = new RegExp( (currTokType.PATTERN as RegExp).source, - "y" + "y", ); }); } @@ -116,7 +116,7 @@ class EcmaScriptQuirksLookaheadStrategy implements ILookaheadStrategy { if ( !every(alts, (currPath) => - every(currPath, (currAlt) => currAlt.length === 1) + every(currPath, (currAlt) => currAlt.length === 1), ) ) { throw Error("This scannerLess parser only supports LL(1) lookahead."); @@ -283,7 +283,7 @@ class EcmaScriptQuirksParser extends EmbeddedActionsParser { consumeInternal( this: MixedInParser & EcmaScriptQuirksParser, tokClass: TokenType, - idx: number + idx: number, ): IToken { this.skipWhitespace(); const nextToken = this.consumeExpected(tokClass); @@ -304,7 +304,7 @@ class EcmaScriptQuirksParser extends EmbeddedActionsParser { ruleName: this.getCurrRuleFullName(), }); throw this.SAVE_ERROR( - new MismatchedTokenException(msg, errorToken, previousToken) + new MismatchedTokenException(msg, errorToken, previousToken), ); } } diff --git a/packages/chevrotain/test/full_flow/error_recovery/sql_statements/sql_recovery_spec.ts b/packages/chevrotain/test/full_flow/error_recovery/sql_statements/sql_recovery_spec.ts index 2c188cab6..bc880ac97 100644 --- a/packages/chevrotain/test/full_flow/error_recovery/sql_statements/sql_recovery_spec.ts +++ b/packages/chevrotain/test/full_flow/error_recovery/sql_statements/sql_recovery_spec.ts @@ -254,16 +254,16 @@ describe("Error Recovery SQL DDL Example", () => { expect(ptResult.children.length).to.equal(3); expect(ptResult.children[0].payload.tokenType).to.equal(CREATE_STMT); expect(ptResult.children[0].payload.tokenType).to.not.equal( - INVALID_CREATE_STMT + INVALID_CREATE_STMT, ); // but the second one is marked as invalid expect(ptResult.children[1].payload.tokenType).to.equal( - INVALID_INSERT_STMT + INVALID_INSERT_STMT, ); // yet the third one is still valid!, we recovered and continued parsing. expect(ptResult.children[2].payload.tokenType).to.equal(DELETE_STMT); expect(ptResult.children[2].payload.tokenType).to.not.equal( - INVALID_DELETE_STMT + INVALID_DELETE_STMT, ); }); @@ -279,16 +279,16 @@ describe("Error Recovery SQL DDL Example", () => { expect(ptResult.children.length).to.equal(3); expect(ptResult.children[0].payload.tokenType).to.equal(CREATE_STMT); expect(ptResult.children[0].payload.tokenType).to.not.equal( - INVALID_CREATE_STMT + INVALID_CREATE_STMT, ); // but the second one is marked as invalid, this means we kept trying to re-sync to an "higher" rule expect(ptResult.children[1].payload.tokenType).to.equal( - INVALID_INSERT_STMT + INVALID_INSERT_STMT, ); // yet the third one is still valid!, we recovered and continued parsing. expect(ptResult.children[2].payload.tokenType).to.equal(DELETE_STMT); expect(ptResult.children[2].payload.tokenType).to.not.equal( - INVALID_DELETE_STMT + INVALID_DELETE_STMT, ); }); @@ -310,15 +310,15 @@ describe("Error Recovery SQL DDL Example", () => { expect(ptResult.children.length).to.equal(3); expect(ptResult.children[0].payload.tokenType).to.equal(CREATE_STMT); expect(ptResult.children[0].payload.tokenType).to.not.equal( - INVALID_CREATE_STMT + INVALID_CREATE_STMT, ); expect(ptResult.children[1].payload.tokenType).to.equal(INSERT_STMT); expect(ptResult.children[1].payload.tokenType).to.not.equal( - INVALID_INSERT_STMT + INVALID_INSERT_STMT, ); expect(ptResult.children[2].payload.tokenType).to.equal(DELETE_STMT); expect(ptResult.children[2].payload.tokenType).to.not.equal( - INVALID_DELETE_STMT + INVALID_DELETE_STMT, ); } @@ -369,7 +369,7 @@ describe("Error Recovery SQL DDL Example", () => { expect(ptResult.children.length).to.equal(1); expect(ptResult.children[0].payload.tokenType).to.equal(DELETE_STMT); expect(ptResult.children[0].payload.tokenType).to.not.equal( - INVALID_DELETE_STMT + INVALID_DELETE_STMT, ); }); diff --git a/packages/chevrotain/test/full_flow/error_recovery/switch_case/switchcase_recovery_parser.ts b/packages/chevrotain/test/full_flow/error_recovery/switch_case/switchcase_recovery_parser.ts index eb6b0b5ab..218c6f650 100644 --- a/packages/chevrotain/test/full_flow/error_recovery/switch_case/switchcase_recovery_parser.ts +++ b/packages/chevrotain/test/full_flow/error_recovery/switch_case/switchcase_recovery_parser.ts @@ -57,7 +57,7 @@ export class SwitchCaseRecoveryParser extends EmbeddedActionsParser { { ...allTokens }, { recoveryEnabled: true, - } + }, ); this.performSelfAnalysis(); } @@ -88,7 +88,7 @@ export class SwitchCaseRecoveryParser extends EmbeddedActionsParser { public canTokenTypeBeInsertedInRecovery(tokType: TokenType) { return !includes( this.tokTypesThatCannotBeInsertedInRecovery, - tokType as unknown + tokType as unknown, ); } diff --git a/packages/chevrotain/test/full_flow/parse_tree.ts b/packages/chevrotain/test/full_flow/parse_tree.ts index c55e79fe4..4c7da5cf1 100644 --- a/packages/chevrotain/test/full_flow/parse_tree.ts +++ b/packages/chevrotain/test/full_flow/parse_tree.ts @@ -14,7 +14,10 @@ export class ParseTree { return this.payload.startColumn; } - constructor(public payload: IToken, public children: ParseTree[] = []) {} + constructor( + public payload: IToken, + public children: ParseTree[] = [], + ) {} } /** @@ -27,7 +30,7 @@ export class ParseTree { */ export function PT( tokenOrTokenClass: TokenType | IToken, - children: ParseTree[] = [] + children: ParseTree[] = [], ): ParseTree | null { const childrenCompact = compact(children); diff --git a/packages/chevrotain/test/parse/cst_spec.ts b/packages/chevrotain/test/parse/cst_spec.ts index 469f04543..d097b43d1 100644 --- a/packages/chevrotain/test/parse/cst_spec.ts +++ b/packages/chevrotain/test/parse/cst_spec.ts @@ -14,7 +14,7 @@ function createTokenVector(tokTypes: TokenType[]): any[] { const tokenStructuredMatcher = tokenStructuredMatcherStrict as ( a: CstElement, - b: TokenType + b: TokenType, ) => boolean; function defineTestSuite(recoveryMode: boolean) { @@ -103,7 +103,7 @@ function defineTestSuite(recoveryMode: boolean) { expect(tokenStructuredMatcher(cst.children.B[0], B)).to.be.true; expect(cst.children.myOtherLabel[0].name).to.equal("bamba"); expect( - tokenStructuredMatcher(cst.children.myOtherLabel[0].children.C[0], C) + tokenStructuredMatcher(cst.children.myOtherLabel[0].children.C[0], C), ).to.be.true; }); @@ -718,7 +718,7 @@ function defineTestSuite(recoveryMode: boolean) { const cst = parser.root(); expect(parser.errors).to.have.lengthOf(1); expect(parser.errors[0].message).to.include( - "Expecting token of type --> B <--" + "Expecting token of type --> B <--", ); expect(parser.errors[0].resyncedTokens).to.have.lengthOf(1); expect(tokenStructuredMatcher(parser.errors[0].resyncedTokens[0], E)).to @@ -797,7 +797,7 @@ function defineTestSuite(recoveryMode: boolean) { const cst = parser.root(); expect(parser.errors).to.have.lengthOf(1); expect(parser.errors[0].message).to.include( - "Expecting token of type --> B <--" + "Expecting token of type --> B <--", ); expect(parser.errors[0].resyncedTokens).to.have.lengthOf(1); expect(tokenStructuredMatcher(parser.errors[0].resyncedTokens[0], E)).to diff --git a/packages/chevrotain/test/parse/cst_visitor_spec.ts b/packages/chevrotain/test/parse/cst_visitor_spec.ts index a755d1af4..1449cdcd7 100644 --- a/packages/chevrotain/test/parse/cst_visitor_spec.ts +++ b/packages/chevrotain/test/parse/cst_visitor_spec.ts @@ -188,10 +188,10 @@ describe("The CSTVisitor", () => { } expect(() => new CstVisitorValidator()).to.throw( - "Missing visitor method: " + "Missing visitor method: ", ); expect(() => new CstVisitorValidator()).to.throw( - "Errors Detected in CST Visitor" + "Errors Detected in CST Visitor", ); }); }); diff --git a/packages/chevrotain/test/parse/exceptions_spec.ts b/packages/chevrotain/test/parse/exceptions_spec.ts index ef9f8a06c..09c7a44f1 100644 --- a/packages/chevrotain/test/parse/exceptions_spec.ts +++ b/packages/chevrotain/test/parse/exceptions_spec.ts @@ -23,7 +23,7 @@ describe("Chevrotain's Parsing Exceptions", () => { const exceptionInstance = new EarlyExitException( "error message", currentToken, - previousToken + previousToken, ); expect(exceptionInstance).to.be.an.instanceOf(EarlyExitException); expect(exceptionInstance).to.be.an.instanceOf(Error); @@ -38,7 +38,7 @@ describe("Chevrotain's Parsing Exceptions", () => { const exceptionInstance = new NoViableAltException( "error message", currentToken, - previousToken + previousToken, ); expect(exceptionInstance).to.be.an.instanceOf(NoViableAltException); expect(exceptionInstance).to.be.an.instanceOf(Error); @@ -52,7 +52,7 @@ describe("Chevrotain's Parsing Exceptions", () => { it("NotAllInputParsedException", () => { const exceptionInstance = new NotAllInputParsedException( "error message", - currentToken + currentToken, ); expect(exceptionInstance).to.be.an.instanceOf(NotAllInputParsedException); expect(exceptionInstance).to.be.an.instanceOf(Error); @@ -66,7 +66,7 @@ describe("Chevrotain's Parsing Exceptions", () => { const exceptionInstance = new MismatchedTokenException( "error message", currentToken, - previousToken + previousToken, ); expect(exceptionInstance).to.be.an.instanceOf(MismatchedTokenException); expect(exceptionInstance).to.be.an.instanceOf(Error); @@ -90,7 +90,7 @@ describe("Chevrotain's Parsing Exceptions", () => { it("EarlyExitException", () => { const exceptionInstance = throwAndCatchException( - () => new EarlyExitException("", dummyToken, dummyToken) + () => new EarlyExitException("", dummyToken, dummyToken), ); const stacktrace = ErrorStackParser.parse(exceptionInstance); expect(stacktrace[0].functionName).to.equal(""); // lambda function @@ -99,7 +99,7 @@ describe("Chevrotain's Parsing Exceptions", () => { it("NoViableAltException", () => { const exceptionInstance = throwAndCatchException( - () => new NoViableAltException("", dummyToken, dummyToken) + () => new NoViableAltException("", dummyToken, dummyToken), ); const stacktrace = ErrorStackParser.parse(exceptionInstance); expect(stacktrace[0].functionName).to.equal(""); // lambda function @@ -108,7 +108,7 @@ describe("Chevrotain's Parsing Exceptions", () => { it("NotAllInputParsedException", () => { const exceptionInstance = throwAndCatchException( - () => new NotAllInputParsedException("", dummyToken) + () => new NotAllInputParsedException("", dummyToken), ); const stacktrace = ErrorStackParser.parse(exceptionInstance); expect(stacktrace[0].functionName).to.equal(""); // lambda function @@ -117,7 +117,7 @@ describe("Chevrotain's Parsing Exceptions", () => { it("MismatchedTokenException", () => { const exceptionInstance = throwAndCatchException( - () => new MismatchedTokenException("", dummyToken, dummyToken) + () => new MismatchedTokenException("", dummyToken, dummyToken), ); const stacktrace = ErrorStackParser.parse(exceptionInstance); expect(stacktrace[0].functionName).to.equal(""); // lambda function diff --git a/packages/chevrotain/test/parse/grammar/checks_spec.ts b/packages/chevrotain/test/parse/grammar/checks_spec.ts index d90e62ff7..563e11125 100644 --- a/packages/chevrotain/test/parse/grammar/checks_spec.ts +++ b/packages/chevrotain/test/parse/grammar/checks_spec.ts @@ -133,10 +133,10 @@ describe("the grammar validations", () => { [qualifiedNameErr1, qualifiedNameErr2], [], defaultGrammarValidatorErrorProvider, - "bamba" + "bamba", ); expect(actualErrors.map((e) => omit(e, "message"))).to.deep.equal( - expectedErrorsNoMsg + expectedErrorsNoMsg, ); }); @@ -148,7 +148,7 @@ describe("the grammar validations", () => { new Rule({ name: "C", definition: [] }), ], "className", - defaultGrammarValidatorErrorProvider + defaultGrammarValidatorErrorProvider, ); //noinspection BadExpressionStatementJS expect(noErrors).to.be.empty; @@ -162,14 +162,14 @@ describe("the grammar validations", () => { new Rule({ name: "C", definition: [] }), ], "className", - defaultGrammarValidatorErrorProvider + defaultGrammarValidatorErrorProvider, ); //noinspection BadExpressionStatementJS expect(duplicateErr).to.have.length(1); expect(duplicateErr[0]).to.have.property("message"); expect(duplicateErr[0]).to.have.property( "type", - ParserDefinitionErrorType.DUPLICATE_RULE_NAME + ParserDefinitionErrorType.DUPLICATE_RULE_NAME, ); expect(duplicateErr[0]).to.have.property("ruleName", "A"); }); @@ -178,19 +178,19 @@ describe("the grammar validations", () => { const positive = validateRuleIsOverridden( "AAA", ["BBB", "CCC"], - "className" + "className", ); expect(positive).to.have.lengthOf(1); expect(positive[0].message).to.contain("Invalid rule override"); expect(positive[0].type).to.equal( - ParserDefinitionErrorType.INVALID_RULE_OVERRIDE + ParserDefinitionErrorType.INVALID_RULE_OVERRIDE, ); expect(positive[0].ruleName).to.equal("AAA"); const negative = validateRuleIsOverridden( "AAA", ["BBB", "CCC", "AAA"], - "className" + "className", ); expect(negative).to.have.lengthOf(0); }); @@ -199,42 +199,42 @@ describe("the grammar validations", () => { describe("identifyProductionForDuplicates function", () => { it("generates DSL code for a ProdRef", () => { const dslCode = identifyProductionForDuplicates( - new NonTerminal({ nonTerminalName: "ActionDeclaration" }) + new NonTerminal({ nonTerminalName: "ActionDeclaration" }), ); expect(dslCode).to.equal("SUBRULE_#_1_#_ActionDeclaration"); }); it("generates DSL code for a OPTION", () => { const dslCode = identifyProductionForDuplicates( - new Option({ definition: [], idx: 3 }) + new Option({ definition: [], idx: 3 }), ); expect(dslCode).to.equal("OPTION_#_3_#_"); }); it("generates DSL code for a AT_LEAST_ONE", () => { const dslCode = identifyProductionForDuplicates( - new RepetitionMandatory({ definition: [] }) + new RepetitionMandatory({ definition: [] }), ); expect(dslCode).to.equal("AT_LEAST_ONE_#_1_#_"); }); it("generates DSL code for a MANY", () => { const dslCode = identifyProductionForDuplicates( - new Repetition({ definition: [], idx: 5 }) + new Repetition({ definition: [], idx: 5 }), ); expect(dslCode).to.equal("MANY_#_5_#_"); }); it("generates DSL code for a OR", () => { const dslCode = identifyProductionForDuplicates( - new Alternation({ definition: [], idx: 1 }) + new Alternation({ definition: [], idx: 1 }), ); expect(dslCode).to.equal("OR_#_1_#_"); }); it("generates DSL code for a Terminal", () => { const dslCode = identifyProductionForDuplicates( - new Terminal({ terminalType: getIdentTok(), idx: 4 }) + new Terminal({ terminalType: getIdentTok(), idx: 4 }), ); expect(dslCode).to.equal("CONSUME_#_4_#_IdentTok"); }); @@ -520,7 +520,7 @@ describe("the getFirstNoneTerminal function", () => { referencedRule: dummyRule, }), ], - }) + }), ); } @@ -531,13 +531,13 @@ describe("the getFirstNoneTerminal function", () => { const actual = validateTooManyAlts( ruleWithTooManyAlts, - defaultGrammarValidatorErrorProvider + defaultGrammarValidatorErrorProvider, ); expect(actual).to.have.lengthOf(1); expect(actual[0].type).to.equal(ParserDefinitionErrorType.TOO_MANY_ALTS); expect(actual[0].ruleName).to.equal("blah"); expect(actual[0].message).to.contain( - "An Alternation cannot have more than 256 alternatives" + "An Alternation cannot have more than 256 alternatives", ); }); }); @@ -582,10 +582,10 @@ describe("The duplicate occurrence validations full flow", () => { } expect(() => new ErroneousOccurrenceNumUsageParser1()).to.throw( - "->SUBRULE1<- with argument: ->anotherRule<-" + "->SUBRULE1<- with argument: ->anotherRule<-", ); expect(() => new ErroneousOccurrenceNumUsageParser1()).to.throw( - "appears more than once (2 times) in the top level rule: ->duplicateRef<-" + "appears more than once (2 times) in the top level rule: ->duplicateRef<-", ); }); @@ -607,7 +607,7 @@ describe("The duplicate occurrence validations full flow", () => { expect(() => new ErroneousOccurrenceNumUsageParser2()).to.throw("3"); expect(() => new ErroneousOccurrenceNumUsageParser2()).to.throw("PlusTok"); expect(() => new ErroneousOccurrenceNumUsageParser2()).to.throw( - "duplicateTerminal" + "duplicateTerminal", ); }); @@ -631,10 +631,10 @@ describe("The duplicate occurrence validations full flow", () => { expect(() => new ErroneousOccurrenceNumUsageParser3()).to.throw("->MANY<-"); expect(() => new ErroneousOccurrenceNumUsageParser3()).to.throw( - "appears more than once (2 times) in the top level rule: ->duplicateMany<-" + "appears more than once (2 times) in the top level rule: ->duplicateMany<-", ); expect(() => new ErroneousOccurrenceNumUsageParser3()).to.throw( - "https://chevrotain.io/docs/FAQ.html#NUMERICAL_SUFFIXES" + "https://chevrotain.io/docs/FAQ.html#NUMERICAL_SUFFIXES", ); }); @@ -747,7 +747,7 @@ describe("The Recorder runtime checks full flow", () => { expect(() => new InvalidRefParser()).to.throw("argument is invalid"); expect(() => new InvalidRefParser()).to.throw("but got: "); expect(() => new InvalidRefParser()).to.throw( - "inside top level rule: " + "inside top level rule: ", ); }); @@ -768,7 +768,7 @@ describe("The Recorder runtime checks full flow", () => { expect(() => new InvalidTokTypeParser()).to.throw("argument is invalid"); expect(() => new InvalidTokTypeParser()).to.throw("but got: "); expect(() => new InvalidTokTypeParser()).to.throw( - "inside top level rule: " + "inside top level rule: ", ); }); @@ -789,10 +789,10 @@ describe("The Recorder runtime checks full flow", () => { } expect(() => new InvalidIdxParser()).to.throw( - "Invalid DSL Method idx value: <256>" + "Invalid DSL Method idx value: <256>", ); expect(() => new InvalidIdxParser()).to.throw( - "Idx value must be a none negative value smaller than 256" + "Idx value must be a none negative value smaller than 256", ); }); @@ -815,10 +815,10 @@ describe("The Recorder runtime checks full flow", () => { } expect(() => new InvalidIdxParser()).to.throw( - "Invalid DSL Method idx value: <-1>" + "Invalid DSL Method idx value: <-1>", ); expect(() => new InvalidIdxParser()).to.throw( - "Idx value must be a none negative value smaller than 256" + "Idx value must be a none negative value smaller than 256", ); }); @@ -838,10 +838,10 @@ describe("The Recorder runtime checks full flow", () => { } expect(() => new InvalidIdxParser()).to.throw( - "Invalid DSL Method idx value: <666>" + "Invalid DSL Method idx value: <666>", ); expect(() => new InvalidIdxParser()).to.throw( - "Idx value must be a none negative value smaller than 256" + "Idx value must be a none negative value smaller than 256", ); }); @@ -861,10 +861,10 @@ describe("The Recorder runtime checks full flow", () => { } expect(() => new InvalidIdxParser()).to.throw( - "Invalid DSL Method idx value: <-333>" + "Invalid DSL Method idx value: <-333>", ); expect(() => new InvalidIdxParser()).to.throw( - "Idx value must be a none negative value smaller than 256" + "Idx value must be a none negative value smaller than 256", ); }); @@ -884,10 +884,10 @@ describe("The Recorder runtime checks full flow", () => { } expect(() => new InvalidIdxParser()).to.throw( - "Invalid DSL Method idx value: <1999>" + "Invalid DSL Method idx value: <1999>", ); expect(() => new InvalidIdxParser()).to.throw( - "Idx value must be a none negative value smaller than 256" + "Idx value must be a none negative value smaller than 256", ); }); @@ -911,13 +911,13 @@ describe("The Recorder runtime checks full flow", () => { } expect(() => new InvalidIdxParser()).to.throw( - "Invalid DSL Method idx value: <543>" + "Invalid DSL Method idx value: <543>", ); expect(() => new InvalidIdxParser()).to.throw( - "Idx value must be a none negative value smaller than 256" + "Idx value must be a none negative value smaller than 256", ); }); - } + }, ); context("augmenting error messages", () => { @@ -935,7 +935,7 @@ describe("The Recorder runtime checks full flow", () => { } expect(() => new OtherRecordingErrorParser()).to.throw( - 'This error was thrown during the "grammar recording phase"' + 'This error was thrown during the "grammar recording phase"', ); }); @@ -980,15 +980,15 @@ describe("The reference resolver validation full flow", () => { } Object.getPrototypeOf( - EmbeddedActionsParser + EmbeddedActionsParser, ).DEFER_DEFINITION_ERRORS_HANDLING = true; expect(() => new DupConsumeParser()).to.not.throw(); expect(() => new DupConsumeParser()).to.not.throw(); expect(() => new DupConsumeParser()).to.not.throw(); Object.getPrototypeOf( - EmbeddedActionsParser + EmbeddedActionsParser, ).DEFER_DEFINITION_ERRORS_HANDLING = false; - } + }, ); }); @@ -1013,7 +1013,7 @@ describe("The rule names validation full flow", () => { public two = this.RULE("oops_duplicate", () => {}); } expect(() => new DuplicateRulesParser()).to.throw( - "is already defined in the grammar" + "is already defined in the grammar", ); expect(() => new DuplicateRulesParser()).to.throw("DuplicateRulesParser"); expect(() => new DuplicateRulesParser()).to.throw("oops_duplicate"); @@ -1043,16 +1043,16 @@ describe("The rule names validation full flow", () => { public two = this.RULE("oops_duplicate", () => {}); } Object.getPrototypeOf( - EmbeddedActionsParser + EmbeddedActionsParser, ).DEFER_DEFINITION_ERRORS_HANDLING = true; expect(() => new InvalidRuleNameParser()).to.not.throw(); expect(() => new InvalidRuleNameParser()).to.not.throw(); expect(() => new DuplicateRulesParser()).to.not.throw(); expect(() => new DuplicateRulesParser()).to.not.throw(); Object.getPrototypeOf( - EmbeddedActionsParser + EmbeddedActionsParser, ).DEFER_DEFINITION_ERRORS_HANDLING = false; - } + }, ); }); @@ -1110,24 +1110,24 @@ class ComplexInDirectlyLeftRecursive extends EmbeddedActionsParser { describe("The left recursion detection full flow", () => { it("will throw an error when trying to init a parser with direct left recursion", () => { expect(() => new DirectlyLeftRecursive()).to.throw( - "Left Recursion found in grammar" + "Left Recursion found in grammar", ); expect(() => new DirectlyLeftRecursive()).to.throw("A --> A"); }); it("will throw an error when trying to init a parser with indirect left recursion", () => { expect(() => new InDirectlyLeftRecursive()).to.throw( - "Left Recursion found in grammar" + "Left Recursion found in grammar", ); expect(() => new InDirectlyLeftRecursive()).to.throw("A --> B --> A"); }); it("will throw an error when trying to init a parser with indirect left recursion - complex", () => { expect(() => new ComplexInDirectlyLeftRecursive()).to.throw( - "Left Recursion found in grammar" + "Left Recursion found in grammar", ); expect(() => new ComplexInDirectlyLeftRecursive()).to.throw( - "A --> B --> A" + "A --> B --> A", ); }); }); @@ -1162,7 +1162,7 @@ describe("The empty alternative detection full flow", () => { }); } expect(() => new EmptyAltAmbiguityParser()).to.throw( - "Ambiguous empty alternative" + "Ambiguous empty alternative", ); expect(() => new EmptyAltAmbiguityParser()).to.throw("3"); expect(() => new EmptyAltAmbiguityParser()).to.throw("2"); @@ -1201,7 +1201,7 @@ describe("The empty alternative detection full flow", () => { public emptyRule = this.RULE("emptyRule", () => {}); } expect(() => new EmptyAltIndirectAmbiguityParser()).to.throw( - "Ambiguous empty alternative" + "Ambiguous empty alternative", ); expect(() => new EmptyAltIndirectAmbiguityParser()).to.throw("3"); expect(() => new EmptyAltIndirectAmbiguityParser()).to.throw("2"); @@ -1233,12 +1233,12 @@ describe("The empty alternative detection full flow", () => { }); } expect(() => new AltAmbiguityParserImplicitOccurence()).to.throw( - "Ambiguous Alternatives Detected" + "Ambiguous Alternatives Detected", ); expect(() => new AltAmbiguityParserImplicitOccurence()).to.throw("1"); expect(() => new AltAmbiguityParserImplicitOccurence()).to.throw("2"); expect(() => new AltAmbiguityParserImplicitOccurence()).to.throw( - " may appears as a prefix path" + " may appears as a prefix path", ); }); @@ -1270,16 +1270,16 @@ describe("The empty alternative detection full flow", () => { }, ], }); - } + }, ); } expect(() => new AltAmbiguityParserImplicitOccurrence()).to.throw( - "Ambiguous Alternatives Detected" + "Ambiguous Alternatives Detected", ); expect(() => new AltAmbiguityParserImplicitOccurrence()).to.throw("1"); expect(() => new AltAmbiguityParserImplicitOccurrence()).to.throw("2"); expect(() => new AltAmbiguityParserImplicitOccurrence()).to.throw( - " may appears as a prefix path" + " may appears as a prefix path", ); }); @@ -1379,11 +1379,11 @@ describe("The empty alternative detection full flow", () => { }); } expect(() => new EmptyAltAmbiguityParser2()).to.throw( - "Ambiguous empty alternative" + "Ambiguous empty alternative", ); expect(() => new EmptyAltAmbiguityParser2()).to.throw("1"); expect(() => new EmptyAltAmbiguityParser2()).to.throw( - "Only the last alternative may be an empty alternative." + "Only the last alternative may be an empty alternative.", ); expect(() => new EmptyAltAmbiguityParser2()).to.not.throw("undefined"); }); @@ -1423,11 +1423,11 @@ describe("The prefix ambiguity detection full flow", () => { expect(() => new PrefixAltAmbiguity()).to.throw("OR3"); expect(() => new PrefixAltAmbiguity()).to.throw("Ambiguous alternatives"); expect(() => new PrefixAltAmbiguity()).to.throw( - "due to common lookahead prefix" + "due to common lookahead prefix", ); expect(() => new PrefixAltAmbiguity()).to.throw(""); expect(() => new PrefixAltAmbiguity()).to.throw( - "https://chevrotain.io/docs/guide/resolving_grammar_errors.html#COMMON_PREFIX" + "https://chevrotain.io/docs/guide/resolving_grammar_errors.html#COMMON_PREFIX", ); }); @@ -1468,16 +1468,16 @@ describe("The prefix ambiguity detection full flow", () => { }); } expect(() => new AlternativesAmbiguityParser()).to.throw( - "Ambiguous Alternatives Detected: <1 ,2>" + "Ambiguous Alternatives Detected: <1 ,2>", ); expect(() => new AlternativesAmbiguityParser()).to.throw( - "in inside
Rule" + "in inside
Rule", ); expect(() => new AlternativesAmbiguityParser()).to.throw( - "Comma, Comma, Comma, Comma" + "Comma, Comma, Comma, Comma", ); expect(() => new AlternativesAmbiguityParser()).to.throw( - "https://chevrotain.io/docs/guide/resolving_grammar_errors.html#AMBIGUOUS_ALTERNATIVES" + "https://chevrotain.io/docs/guide/resolving_grammar_errors.html#AMBIGUOUS_ALTERNATIVES", ); }); @@ -1519,14 +1519,14 @@ describe("The prefix ambiguity detection full flow", () => { }); } expect(() => new AlternativesAmbiguityParser()).to.throw( - "Ambiguous Alternatives Detected: <1 ,2>" + "Ambiguous Alternatives Detected: <1 ,2>", ); expect(() => new AlternativesAmbiguityParser()).to.throw( - "in inside
Rule" + "in inside
Rule", ); expect(() => new AlternativesAmbiguityParser()).to.throw("D, D, D, D"); expect(() => new AlternativesAmbiguityParser()).to.throw( - "https://chevrotain.io/docs/guide/resolving_grammar_errors.html#AMBIGUOUS_ALTERNATIVES" + "https://chevrotain.io/docs/guide/resolving_grammar_errors.html#AMBIGUOUS_ALTERNATIVES", ); }); @@ -1560,11 +1560,11 @@ describe("The prefix ambiguity detection full flow", () => { expect(() => new PrefixAltAmbiguity2()).to.throw("OR"); expect(() => new PrefixAltAmbiguity2()).to.throw("Ambiguous alternatives"); expect(() => new PrefixAltAmbiguity2()).to.throw( - "due to common lookahead prefix" + "due to common lookahead prefix", ); expect(() => new PrefixAltAmbiguity2()).to.throw(""); expect(() => new PrefixAltAmbiguity2()).to.throw( - "https://chevrotain.io/docs/guide/resolving_grammar_errors.html#COMMON_PREFIX" + "https://chevrotain.io/docs/guide/resolving_grammar_errors.html#COMMON_PREFIX", ); }); }); @@ -1592,7 +1592,7 @@ describe("The namespace conflict detection full flow", () => { } expect(() => new NameSpaceConflict([])).to.throw( - "The grammar has both a Terminal(Token) and a Non-Terminal(Rule) named: " + "The grammar has both a Terminal(Token) and a Non-Terminal(Rule) named: ", ); }); }); @@ -1607,14 +1607,14 @@ describe("The no non-empty lookahead validation", () => { } public someRule = this.RULE("someRule", () => - this.AT_LEAST_ONE(() => {}) + this.AT_LEAST_ONE(() => {}), ); } expect(() => new EmptyLookaheadParserAtLeastOne()).to.throw( - "The repetition " + "The repetition ", ); expect(() => new EmptyLookaheadParserAtLeastOne()).to.throw( - " can never consume any tokens" + " can never consume any tokens", ); }); @@ -1630,14 +1630,14 @@ describe("The no non-empty lookahead validation", () => { this.AT_LEAST_ONE_SEP5({ SEP: PlusTok, DEF: () => {}, - }) + }), ); } expect(() => new EmptyLookaheadParserAtLeastOneSep()).to.throw( - "The repetition " + "The repetition ", ); expect(() => new EmptyLookaheadParserAtLeastOneSep()).to.throw( - "within Rule " + "within Rule ", ); }); @@ -1652,10 +1652,10 @@ describe("The no non-empty lookahead validation", () => { public someRule = this.RULE("someRule", () => this.MANY2(() => {})); } expect(() => new EmptyLookaheadParserMany()).to.throw( - "The repetition " + "The repetition ", ); expect(() => new EmptyLookaheadParserMany()).to.throw( - " can never consume any tokens" + " can never consume any tokens", ); }); @@ -1671,14 +1671,14 @@ describe("The no non-empty lookahead validation", () => { this.MANY_SEP3({ SEP: PlusTok, DEF: () => {}, - }) + }), ); } expect(() => new EmptyLookaheadParserManySep()).to.throw( - "The repetition " + "The repetition ", ); expect(() => new EmptyLookaheadParserManySep()).to.throw( - "within Rule " + "within Rule ", ); }); }); diff --git a/packages/chevrotain/test/parse/grammar/interperter_spec.ts b/packages/chevrotain/test/parse/grammar/interperter_spec.ts index 65d9144d9..c91083582 100644 --- a/packages/chevrotain/test/parse/grammar/interperter_spec.ts +++ b/packages/chevrotain/test/parse/grammar/interperter_spec.ts @@ -164,7 +164,7 @@ describe("The Grammar Interpeter namespace", () => { const possibleNextTokTypes = new NextAfterTokenWalker( actionDec, - caPath + caPath, ).startWalking(); expect(possibleNextTokTypes.length).to.equal(1); expect(possibleNextTokTypes[0]).to.equal(getIdentTok()); @@ -180,7 +180,7 @@ describe("The Grammar Interpeter namespace", () => { const possibleNextTokTypes = new NextAfterTokenWalker( actionDec, - caPath + caPath, ).startWalking(); expect(possibleNextTokTypes.length).to.equal(1); expect(possibleNextTokTypes[0]).to.equal(LParenTok); @@ -196,7 +196,7 @@ describe("The Grammar Interpeter namespace", () => { const possibleNextTokTypes = new NextAfterTokenWalker( actionDec, - caPath + caPath, ).startWalking(); expect(possibleNextTokTypes.length).to.equal(2); setEquality(possibleNextTokTypes, [getIdentTok(), RParenTok]); @@ -212,7 +212,7 @@ describe("The Grammar Interpeter namespace", () => { const possibleNextTokTypes = new NextAfterTokenWalker( actionDec, - caPath + caPath, ).startWalking(); expect(possibleNextTokTypes.length).to.equal(1); expect(possibleNextTokTypes[0]).to.equal(getIdentTok()); @@ -228,7 +228,7 @@ describe("The Grammar Interpeter namespace", () => { const possibleNextTokTypes = new NextAfterTokenWalker( actionDec, - caPath + caPath, ).startWalking(); expect(possibleNextTokTypes.length).to.equal(2); setEquality(possibleNextTokTypes, [SemicolonTok, getColonTok()]); @@ -244,7 +244,7 @@ describe("The Grammar Interpeter namespace", () => { const possibleNextTokTypes = new NextAfterTokenWalker( actionDec, - caPath + caPath, ).startWalking(); expect(possibleNextTokTypes.length).to.equal(1); expect(possibleNextTokTypes[0]).to.equal(getIdentTok()); @@ -260,7 +260,7 @@ describe("The Grammar Interpeter namespace", () => { const possibleNextTokTypes = new NextAfterTokenWalker( actionDec, - caPath + caPath, ).startWalking(); expect(possibleNextTokTypes.length).to.equal(0); }); @@ -275,7 +275,7 @@ describe("The Grammar Interpeter namespace", () => { const possibleNextTokTypes = new NextAfterTokenWalker( actionDec, - caPath + caPath, ).startWalking(); expect(possibleNextTokTypes.length).to.equal(1); expect(possibleNextTokTypes[0]).to.equal(getColonTok()); @@ -291,7 +291,7 @@ describe("The Grammar Interpeter namespace", () => { const possibleNextTokTypes = new NextAfterTokenWalker( actionDec, - caPath + caPath, ).startWalking(); expect(possibleNextTokTypes.length).to.equal(1); expect(possibleNextTokTypes[0]).to.equal(getIdentTok()); @@ -307,7 +307,7 @@ describe("The Grammar Interpeter namespace", () => { const possibleNextTokTypes = new NextAfterTokenWalker( actionDec, - caPath + caPath, ).startWalking(); expect(possibleNextTokTypes.length).to.equal(1); expect(possibleNextTokTypes[0]).to.equal(getRSquareTok()); @@ -323,7 +323,7 @@ describe("The Grammar Interpeter namespace", () => { const possibleNextTokTypes = new NextAfterTokenWalker( actionDec, - caPath + caPath, ).startWalking(); expect(possibleNextTokTypes.length).to.equal(2); setEquality(possibleNextTokTypes, [CommaTok, RParenTok]); @@ -339,7 +339,7 @@ describe("The Grammar Interpeter namespace", () => { const possibleNextTokTypes = new NextAfterTokenWalker( actionDec, - caPath + caPath, ).startWalking(); expect(possibleNextTokTypes.length).to.equal(1); expect(possibleNextTokTypes[0]).to.equal(getColonTok()); @@ -355,7 +355,7 @@ describe("The Grammar Interpeter namespace", () => { const possibleNextTokTypes = new NextAfterTokenWalker( actionDec, - caPath + caPath, ).startWalking(); expect(possibleNextTokTypes.length).to.equal(1); expect(possibleNextTokTypes[0]).to.equal(getIdentTok()); @@ -371,7 +371,7 @@ describe("The Grammar Interpeter namespace", () => { const possibleNextTokTypes = new NextAfterTokenWalker( actionDec, - caPath + caPath, ).startWalking(); expect(possibleNextTokTypes.length).to.equal(1); expect(possibleNextTokTypes[0]).to.equal(getRSquareTok()); @@ -387,7 +387,7 @@ describe("The Grammar Interpeter namespace", () => { const possibleNextTokTypes = new NextAfterTokenWalker( actionDec, - caPath + caPath, ).startWalking(); expect(possibleNextTokTypes.length).to.equal(2); setEquality(possibleNextTokTypes, [CommaTok, RParenTok]); @@ -406,7 +406,7 @@ describe("The Grammar Interpeter namespace", () => { const possibleNextTokTypes = new NextAfterTokenWalker( actionDec, - caPath + caPath, ).startWalking(); expect(possibleNextTokTypes.length).to.equal(4); setEquality(possibleNextTokTypes, [ @@ -415,7 +415,7 @@ describe("The Grammar Interpeter namespace", () => { CommaTok, RParenTok, ]); - } + }, ); it( @@ -431,11 +431,11 @@ describe("The Grammar Interpeter namespace", () => { const possibleNextTokTypes = new NextAfterTokenWalker( actionDec, - caPath + caPath, ).startWalking(); expect(possibleNextTokTypes.length).to.equal(1); expect(possibleNextTokTypes[0]).to.equal(getIdentTok()); - } + }, ); it( @@ -451,7 +451,7 @@ describe("The Grammar Interpeter namespace", () => { const possibleNextTokTypes = new NextAfterTokenWalker( actionDec, - caPath + caPath, ).startWalking(); expect(possibleNextTokTypes.length).to.equal(4); setEquality(possibleNextTokTypes, [ @@ -460,7 +460,7 @@ describe("The Grammar Interpeter namespace", () => { CommaTok, RParenTok, ]); - } + }, ); it( @@ -476,11 +476,11 @@ describe("The Grammar Interpeter namespace", () => { const possibleNextTokTypes = new NextAfterTokenWalker( getParamSpec(), - caPath + caPath, ).startWalking(); expect(possibleNextTokTypes.length).to.equal(2); setEquality(possibleNextTokTypes, [getDotTok(), getLSquareTok()]); - } + }, ); it( @@ -496,11 +496,11 @@ describe("The Grammar Interpeter namespace", () => { const possibleNextTokTypes = new NextAfterTokenWalker( getParamSpec(), - caPath + caPath, ).startWalking(); expect(possibleNextTokTypes.length).to.equal(1); expect(possibleNextTokTypes[0]).to.equal(getIdentTok()); - } + }, ); it( @@ -516,11 +516,11 @@ describe("The Grammar Interpeter namespace", () => { const possibleNextTokTypes = new NextAfterTokenWalker( getParamSpec(), - caPath + caPath, ).startWalking(); expect(possibleNextTokTypes.length).to.equal(2); setEquality(possibleNextTokTypes, [getDotTok(), getLSquareTok()]); - } + }, ); it("will fail if we try to compute the next token starting from a rule that does not match the path", () => { @@ -533,7 +533,7 @@ describe("The Grammar Interpeter namespace", () => { const walker = new NextAfterTokenWalker(getParamSpec(), caPath); expect(() => walker.startWalking()).to.throw( - "The path does not start with the walker's top Rule!" + "The path does not start with the walker's top Rule!", ); }); }); @@ -578,7 +578,7 @@ describe("The Grammar Interpeter namespace", () => { it("can compute the next possible token types after the MANY in paramSpec inside ActionDec", () => { const result = new NextTerminalAfterManyWalker( actionDec, - 1 + 1, ).startWalking(); expect(result.occurrence).to.equal(1); expect(result.token).to.equal(RParenTok); @@ -603,7 +603,7 @@ describe("The Grammar Interpeter namespace", () => { }); const result = new NextTerminalAfterManySepWalker( callArguments, - 1 + 1, ).startWalking(); //noinspection BadExpressionStatementJS expect(result.occurrence).to.be.undefined; @@ -647,7 +647,7 @@ describe("The Grammar Interpeter namespace", () => { }); const result = new NextTerminalAfterManySepWalker( actionDecSep, - 1 + 1, ).startWalking(); expect(result.occurrence).to.equal(1); expect(result.token).to.equal(RParenTok); @@ -681,21 +681,21 @@ describe("The Grammar Interpeter namespace", () => { const result = new NextTerminalAfterAtLeastOneWalker( atLeastOneRule, - 1 + 1, ).startWalking(); expect(result.occurrence).to.equal(2); expect(result.token).to.equal(getDotTok()); const result2 = new NextTerminalAfterAtLeastOneWalker( atLeastOneRule, - 2 + 2, ).startWalking(); expect(result2.occurrence).to.equal(1); expect(result2.token).to.equal(getDotTok()); const result3 = new NextTerminalAfterAtLeastOneWalker( atLeastOneRule, - 3 + 3, ).startWalking(); expect(result3.occurrence).to.equal(1); expect(result3.token).to.equal(CommaTok); @@ -718,7 +718,7 @@ describe("The Grammar Interpeter namespace", () => { const result = new NextTerminalAfterAtLeastOneWalker( atLeastOneRule, - 1 + 1, ).startWalking(); expect(result.occurrence).to.be.undefined; expect(result.token).to.be.undefined; @@ -755,21 +755,21 @@ describe("The Grammar Interpeter namespace", () => { const result = new NextTerminalAfterAtLeastOneSepWalker( atLeastOneSepRule, - 1 + 1, ).startWalking(); expect(result.occurrence).to.equal(2); expect(result.token).to.equal(getDotTok()); const result2 = new NextTerminalAfterAtLeastOneSepWalker( atLeastOneSepRule, - 2 + 2, ).startWalking(); expect(result2.occurrence).to.equal(1); expect(result2.token).to.equal(getDotTok()); const result3 = new NextTerminalAfterAtLeastOneSepWalker( atLeastOneSepRule, - 3 + 3, ).startWalking(); expect(result3.occurrence).to.equal(1); expect(result3.token).to.equal(CommaTok); @@ -787,7 +787,7 @@ describe("The Grammar Interpeter namespace", () => { }); const result = new NextTerminalAfterAtLeastOneSepWalker( qualifiedNameSep, - 1 + 1, ).startWalking(); //noinspection BadExpressionStatementJS expect(result.occurrence).to.be.undefined; @@ -958,19 +958,19 @@ describe("The Grammar Interpeter namespace", () => { ]; expect( - extractPartialPaths(possiblePathsFrom(repMand, 1)) + extractPartialPaths(possiblePathsFrom(repMand, 1)), ).to.deep.equal([[Alpha]]); expect( - extractPartialPaths(possiblePathsFrom(repMand, 2)) + extractPartialPaths(possiblePathsFrom(repMand, 2)), ).to.deep.equal([[Alpha, Alpha]]); expect( - extractPartialPaths(possiblePathsFrom(repMand, 3)) + extractPartialPaths(possiblePathsFrom(repMand, 3)), ).to.deep.equal([ [Alpha, Alpha, Alpha], [Alpha, Alpha, Gamma], ]); expect( - extractPartialPaths(possiblePathsFrom(repMand, 4)) + extractPartialPaths(possiblePathsFrom(repMand, 4)), ).to.deep.equal([ [Alpha, Alpha, Alpha, Alpha], [Alpha, Alpha, Gamma], @@ -1025,19 +1025,19 @@ describe("The Grammar Interpeter namespace", () => { ]; expect( - extractPartialPaths(possiblePathsFrom(repMandSep, 1)) + extractPartialPaths(possiblePathsFrom(repMandSep, 1)), ).to.deep.equal([[Alpha]]); expect( - extractPartialPaths(possiblePathsFrom(repMandSep, 2)) + extractPartialPaths(possiblePathsFrom(repMandSep, 2)), ).to.deep.equal([[Alpha, Alpha]]); expect( - extractPartialPaths(possiblePathsFrom(repMandSep, 3)) + extractPartialPaths(possiblePathsFrom(repMandSep, 3)), ).to.deep.equal([ [Alpha, Alpha, Comma], [Alpha, Alpha, Gamma], ]); expect( - extractPartialPaths(possiblePathsFrom(repMandSep, 4)) + extractPartialPaths(possiblePathsFrom(repMandSep, 4)), ).to.deep.equal([ [Alpha, Alpha, Comma, Alpha], [Alpha, Alpha, Gamma], @@ -1096,9 +1096,9 @@ describe("The Grammar Interpeter namespace", () => { setEquality( pluckTokenTypes( - nextPossibleTokensAfter(seq, INPUT([]), tokenStructuredMatcher, 5) + nextPossibleTokensAfter(seq, INPUT([]), tokenStructuredMatcher, 5), ), - [Alpha] + [Alpha], ); setEquality( pluckTokenTypes( @@ -1106,10 +1106,10 @@ describe("The Grammar Interpeter namespace", () => { seq, INPUT([Alpha]), tokenStructuredMatcher, - 5 - ) + 5, + ), ), - [Beta] + [Beta], ); setEquality( pluckTokenTypes( @@ -1117,10 +1117,10 @@ describe("The Grammar Interpeter namespace", () => { seq, INPUT([Alpha, Beta]), tokenStructuredMatcher, - 5 - ) + 5, + ), ), - [Gamma] + [Gamma], ); }); @@ -1141,19 +1141,24 @@ describe("The Grammar Interpeter namespace", () => { seq, INPUT([Alpha, Beta, Gamma]), tokenStructuredMatcher, - 5 - ) + 5, + ), ).to.be.empty; expect( nextPossibleTokensAfter( seq, INPUT([Alpha, Gamma]), tokenStructuredMatcher, - 5 - ) + 5, + ), ).to.be.empty; expect( - nextPossibleTokensAfter(seq, INPUT([Beta]), tokenStructuredMatcher, 5) + nextPossibleTokensAfter( + seq, + INPUT([Beta]), + tokenStructuredMatcher, + 5, + ), ).to.be.empty; }); @@ -1173,10 +1178,10 @@ describe("The Grammar Interpeter namespace", () => { seq, INPUT([Alpha]), tokenStructuredMatcher, - 5 - ) + 5, + ), ), - [Beta, Gamma] + [Beta, Gamma], ); // setEquality(pluckTokenTypes(nextPossibleTokensAfter(seq, INPUT([Alpha, Beta]), tokenStructuredMatcher, 5)), [Gamma]) }); @@ -1191,23 +1196,28 @@ describe("The Grammar Interpeter namespace", () => { ]; expect( - nextPossibleTokensAfter(seq, INPUT([Beta]), tokenStructuredMatcher, 5) + nextPossibleTokensAfter( + seq, + INPUT([Beta]), + tokenStructuredMatcher, + 5, + ), ).to.be.empty; expect( nextPossibleTokensAfter( seq, INPUT([Alpha, Alpha]), tokenStructuredMatcher, - 5 - ) + 5, + ), ).to.be.empty; expect( nextPossibleTokensAfter( seq, INPUT([Alpha, Beta, Gamma]), tokenStructuredMatcher, - 5 - ) + 5, + ), ).to.be.empty; }); @@ -1240,9 +1250,9 @@ describe("The Grammar Interpeter namespace", () => { setEquality( pluckTokenTypes( - nextPossibleTokensAfter(alts, INPUT([]), tokenStructuredMatcher, 5) + nextPossibleTokensAfter(alts, INPUT([]), tokenStructuredMatcher, 5), ), - [Alpha, Beta, Beta, Gamma] + [Alpha, Beta, Beta, Gamma], ); setEquality( pluckTokenTypes( @@ -1250,10 +1260,10 @@ describe("The Grammar Interpeter namespace", () => { alts, INPUT([Beta]), tokenStructuredMatcher, - 5 - ) + 5, + ), ), - [Beta, Alpha] + [Beta, Alpha], ); setEquality( pluckTokenTypes( @@ -1261,10 +1271,10 @@ describe("The Grammar Interpeter namespace", () => { alts, INPUT([Beta, Alpha]), tokenStructuredMatcher, - 5 - ) + 5, + ), ), - [Gamma] + [Gamma], ); }); @@ -1297,32 +1307,32 @@ describe("The Grammar Interpeter namespace", () => { alts, INPUT([Alpha]), tokenStructuredMatcher, - 5 - ) + 5, + ), ).to.be.empty; expect( nextPossibleTokensAfter( alts, INPUT([Gamma, Alpha]), tokenStructuredMatcher, - 5 - ) + 5, + ), ).to.be.empty; expect( nextPossibleTokensAfter( alts, INPUT([Beta, Beta]), tokenStructuredMatcher, - 5 - ) + 5, + ), ).to.be.empty; expect( nextPossibleTokensAfter( alts, INPUT([Gamma]), tokenStructuredMatcher, - 5 - ) + 5, + ), ).to.be.empty; }); @@ -1339,9 +1349,9 @@ describe("The Grammar Interpeter namespace", () => { setEquality( pluckTokenTypes( - nextPossibleTokensAfter(rep, INPUT([]), tokenStructuredMatcher, 5) + nextPossibleTokensAfter(rep, INPUT([]), tokenStructuredMatcher, 5), ), - [Alpha, Gamma] + [Alpha, Gamma], ); setEquality( pluckTokenTypes( @@ -1349,10 +1359,10 @@ describe("The Grammar Interpeter namespace", () => { rep, INPUT([Alpha]), tokenStructuredMatcher, - 5 - ) + 5, + ), ), - [Beta] + [Beta], ); setEquality( pluckTokenTypes( @@ -1360,10 +1370,10 @@ describe("The Grammar Interpeter namespace", () => { rep, INPUT([Alpha, Beta]), tokenStructuredMatcher, - 5 - ) + 5, + ), ), - [Alpha, Gamma] + [Alpha, Gamma], ); setEquality( pluckTokenTypes( @@ -1371,10 +1381,10 @@ describe("The Grammar Interpeter namespace", () => { rep, INPUT([Alpha, Beta, Alpha]), tokenStructuredMatcher, - 5 - ) + 5, + ), ), - [Beta] + [Beta], ); setEquality( pluckTokenTypes( @@ -1382,10 +1392,10 @@ describe("The Grammar Interpeter namespace", () => { rep, INPUT([Alpha, Beta, Alpha, Beta]), tokenStructuredMatcher, - 5 - ) + 5, + ), ), - [Alpha, Gamma] + [Alpha, Gamma], ); }); @@ -1401,31 +1411,36 @@ describe("The Grammar Interpeter namespace", () => { ]; expect( - nextPossibleTokensAfter(rep, INPUT([Beta]), tokenStructuredMatcher, 5) + nextPossibleTokensAfter( + rep, + INPUT([Beta]), + tokenStructuredMatcher, + 5, + ), ).to.be.empty; expect( nextPossibleTokensAfter( rep, INPUT([Alpha, Gamma]), tokenStructuredMatcher, - 5 - ) + 5, + ), ).to.be.empty; expect( nextPossibleTokensAfter( rep, INPUT([Alpha, Beta, Alpha, Gamma]), tokenStructuredMatcher, - 5 - ) + 5, + ), ).to.be.empty; expect( nextPossibleTokensAfter( rep, INPUT([Alpha, Beta, Alpha, Beta, Gamma]), tokenStructuredMatcher, - 5 - ) + 5, + ), ).to.be.empty; }); @@ -1446,10 +1461,10 @@ describe("The Grammar Interpeter namespace", () => { repMand, INPUT([]), tokenStructuredMatcher, - 5 - ) + 5, + ), ), - [Alpha] + [Alpha], ); setEquality( pluckTokenTypes( @@ -1457,10 +1472,10 @@ describe("The Grammar Interpeter namespace", () => { repMand, INPUT([Alpha]), tokenStructuredMatcher, - 5 - ) + 5, + ), ), - [Beta] + [Beta], ); setEquality( pluckTokenTypes( @@ -1468,10 +1483,10 @@ describe("The Grammar Interpeter namespace", () => { repMand, INPUT([Alpha, Beta]), tokenStructuredMatcher, - 5 - ) + 5, + ), ), - [Alpha, Gamma] + [Alpha, Gamma], ); setEquality( pluckTokenTypes( @@ -1479,10 +1494,10 @@ describe("The Grammar Interpeter namespace", () => { repMand, INPUT([Alpha, Beta, Alpha]), tokenStructuredMatcher, - 5 - ) + 5, + ), ), - [Beta] + [Beta], ); setEquality( pluckTokenTypes( @@ -1490,10 +1505,10 @@ describe("The Grammar Interpeter namespace", () => { repMand, INPUT([Alpha, Beta, Alpha, Beta]), tokenStructuredMatcher, - 5 - ) + 5, + ), ), - [Alpha, Gamma] + [Alpha, Gamma], ); }); @@ -1513,32 +1528,32 @@ describe("The Grammar Interpeter namespace", () => { repMand, INPUT([Beta]), tokenStructuredMatcher, - 5 - ) + 5, + ), ).to.be.empty; expect( nextPossibleTokensAfter( repMand, INPUT([Alpha, Gamma]), tokenStructuredMatcher, - 5 - ) + 5, + ), ).to.be.empty; expect( nextPossibleTokensAfter( repMand, INPUT([Alpha, Beta, Alpha, Gamma]), tokenStructuredMatcher, - 5 - ) + 5, + ), ).to.be.empty; expect( nextPossibleTokensAfter( repMand, INPUT([Alpha, Beta, Alpha, Beta, Gamma]), tokenStructuredMatcher, - 5 - ) + 5, + ), ).to.be.empty; }); @@ -1560,10 +1575,10 @@ describe("The Grammar Interpeter namespace", () => { repSep, INPUT([]), tokenStructuredMatcher, - 5 - ) + 5, + ), ), - [Alpha, Gamma] + [Alpha, Gamma], ); setEquality( pluckTokenTypes( @@ -1571,10 +1586,10 @@ describe("The Grammar Interpeter namespace", () => { repSep, INPUT([Alpha]), tokenStructuredMatcher, - 5 - ) + 5, + ), ), - [Beta] + [Beta], ); setEquality( pluckTokenTypes( @@ -1582,10 +1597,10 @@ describe("The Grammar Interpeter namespace", () => { repSep, INPUT([Alpha, Beta]), tokenStructuredMatcher, - 5 - ) + 5, + ), ), - [Comma, Gamma] + [Comma, Gamma], ); setEquality( pluckTokenTypes( @@ -1593,10 +1608,10 @@ describe("The Grammar Interpeter namespace", () => { repSep, INPUT([Alpha, Beta, Comma]), tokenStructuredMatcher, - 5 - ) + 5, + ), ), - [Alpha] + [Alpha], ); setEquality( pluckTokenTypes( @@ -1604,10 +1619,10 @@ describe("The Grammar Interpeter namespace", () => { repSep, INPUT([Alpha, Beta, Comma, Alpha, Beta]), tokenStructuredMatcher, - 5 - ) + 5, + ), ), - [Comma, Gamma] + [Comma, Gamma], ); }); @@ -1628,32 +1643,32 @@ describe("The Grammar Interpeter namespace", () => { repMand, INPUT([Comma]), tokenStructuredMatcher, - 5 - ) + 5, + ), ).to.be.empty; expect( nextPossibleTokensAfter( repMand, INPUT([Alpha, Gamma]), tokenStructuredMatcher, - 5 - ) + 5, + ), ).to.be.empty; expect( nextPossibleTokensAfter( repMand, INPUT([Alpha, Beta, Comma, Alpha, Gamma]), tokenStructuredMatcher, - 5 - ) + 5, + ), ).to.be.empty; expect( nextPossibleTokensAfter( repMand, INPUT([Alpha, Beta, Comma, Alpha, Beta, Gamma]), tokenStructuredMatcher, - 5 - ) + 5, + ), ).to.be.empty; }); @@ -1675,10 +1690,10 @@ describe("The Grammar Interpeter namespace", () => { repSep, INPUT([]), tokenStructuredMatcher, - 5 - ) + 5, + ), ), - [Alpha] + [Alpha], ); setEquality( pluckTokenTypes( @@ -1686,10 +1701,10 @@ describe("The Grammar Interpeter namespace", () => { repSep, INPUT([Alpha]), tokenStructuredMatcher, - 5 - ) + 5, + ), ), - [Beta] + [Beta], ); setEquality( pluckTokenTypes( @@ -1697,10 +1712,10 @@ describe("The Grammar Interpeter namespace", () => { repSep, INPUT([Alpha, Beta]), tokenStructuredMatcher, - 5 - ) + 5, + ), ), - [Comma, Gamma] + [Comma, Gamma], ); setEquality( pluckTokenTypes( @@ -1708,10 +1723,10 @@ describe("The Grammar Interpeter namespace", () => { repSep, INPUT([Alpha, Beta, Comma]), tokenStructuredMatcher, - 5 - ) + 5, + ), ), - [Alpha] + [Alpha], ); setEquality( pluckTokenTypes( @@ -1719,10 +1734,10 @@ describe("The Grammar Interpeter namespace", () => { repSep, INPUT([Alpha, Beta, Comma, Alpha, Beta]), tokenStructuredMatcher, - 5 - ) + 5, + ), ), - [Comma, Gamma] + [Comma, Gamma], ); }); @@ -1743,40 +1758,40 @@ describe("The Grammar Interpeter namespace", () => { repMand, INPUT([Comma]), tokenStructuredMatcher, - 5 - ) + 5, + ), ).to.be.empty; expect( nextPossibleTokensAfter( repMand, INPUT([Alpha, Gamma]), tokenStructuredMatcher, - 5 - ) + 5, + ), ).to.be.empty; expect( nextPossibleTokensAfter( repMand, INPUT([Gamma]), tokenStructuredMatcher, - 5 - ) + 5, + ), ).to.be.empty; expect( nextPossibleTokensAfter( repMand, INPUT([Alpha, Beta, Comma, Alpha, Gamma]), tokenStructuredMatcher, - 5 - ) + 5, + ), ).to.be.empty; expect( nextPossibleTokensAfter( repMand, INPUT([Alpha, Beta, Comma, Alpha, Beta, Gamma]), tokenStructuredMatcher, - 5 - ) + 5, + ), ).to.be.empty; }); @@ -1797,9 +1812,9 @@ describe("The Grammar Interpeter namespace", () => { setEquality( pluckTokenTypes( - nextPossibleTokensAfter(seq, INPUT([]), tokenStructuredMatcher, 5) + nextPossibleTokensAfter(seq, INPUT([]), tokenStructuredMatcher, 5), ), - [Alpha] + [Alpha], ); setEquality( pluckTokenTypes( @@ -1807,10 +1822,10 @@ describe("The Grammar Interpeter namespace", () => { seq, INPUT([Alpha]), tokenStructuredMatcher, - 5 - ) + 5, + ), ), - [Beta] + [Beta], ); setEquality( pluckTokenTypes( @@ -1818,10 +1833,10 @@ describe("The Grammar Interpeter namespace", () => { seq, INPUT([Alpha, Beta]), tokenStructuredMatcher, - 5 - ) + 5, + ), ), - [Gamma] + [Gamma], ); }); @@ -1841,23 +1856,28 @@ describe("The Grammar Interpeter namespace", () => { ]; expect( - nextPossibleTokensAfter(seq, INPUT([Beta]), tokenStructuredMatcher, 5) + nextPossibleTokensAfter( + seq, + INPUT([Beta]), + tokenStructuredMatcher, + 5, + ), ).to.be.empty; expect( nextPossibleTokensAfter( seq, INPUT([Alpha, Gamma]), tokenStructuredMatcher, - 5 - ) + 5, + ), ).to.be.empty; expect( nextPossibleTokensAfter( seq, INPUT([Alpha, Beta, Gamma]), tokenStructuredMatcher, - 5 - ) + 5, + ), ).to.be.empty; }); }); @@ -1937,7 +1957,7 @@ describe("The Grammar Interpeter namespace", () => { } expect(() => new Issue391Parser([])).to.not.throw( - "Ambiguous alternatives: <1 ,2>" + "Ambiguous alternatives: <1 ,2>", ); const myParser = new Issue391Parser([]); diff --git a/packages/chevrotain/test/parse/grammar/lookahead_spec.ts b/packages/chevrotain/test/parse/grammar/lookahead_spec.ts index 854bd938b..922021d5c 100644 --- a/packages/chevrotain/test/parse/grammar/lookahead_spec.ts +++ b/packages/chevrotain/test/parse/grammar/lookahead_spec.ts @@ -34,17 +34,17 @@ import { expect } from "chai"; describe("getProdType", () => { it("handles `Option`", () => { expect(getProdType(new Option({ definition: [] }))).to.equal( - PROD_TYPE.OPTION + PROD_TYPE.OPTION, ); }); it("handles `Repetition`", () => { expect(getProdType(new Repetition({ definition: [] }))).to.equal( - PROD_TYPE.REPETITION + PROD_TYPE.REPETITION, ); }); it("handles `RepetitionMandatory`", () => { expect(getProdType(new RepetitionMandatory({ definition: [] }))).to.equal( - PROD_TYPE.REPETITION_MANDATORY + PROD_TYPE.REPETITION_MANDATORY, ); }); it("handles `RepetitionWithSeparator`", () => { @@ -53,8 +53,8 @@ describe("getProdType", () => { new RepetitionWithSeparator({ definition: [], separator: createToken({ name: "Comma" }), - }) - ) + }), + ), ).to.equal(PROD_TYPE.REPETITION_WITH_SEPARATOR); }); it("handles `RepetitionMandatoryWithSeparator`", () => { @@ -63,13 +63,13 @@ describe("getProdType", () => { new RepetitionMandatoryWithSeparator({ definition: [], separator: createToken({ name: "Comma" }), - }) - ) + }), + ), ).to.equal(PROD_TYPE.REPETITION_MANDATORY_WITH_SEPARATOR); }); it("handles `Alternation`", () => { expect(getProdType(new Alternation({ definition: [] }))).to.equal( - PROD_TYPE.ALTERNATION + PROD_TYPE.ALTERNATION, ); }); }); @@ -316,7 +316,7 @@ context("lookahead specs", () => { 1, false, PROD_TYPE.OPTION, - buildSingleAlternativeLookaheadFunction + buildSingleAlternativeLookaheadFunction, ); expect(laFunc.call(colonMock)).to.equal(false); @@ -333,7 +333,7 @@ context("lookahead specs", () => { 1, false, PROD_TYPE.OPTION, - buildSingleAlternativeLookaheadFunction + buildSingleAlternativeLookaheadFunction, ); expect(laFunc.call(colonParserMock)).to.equal(true); @@ -364,7 +364,7 @@ context("lookahead specs", () => { 1, false, PROD_TYPE.OPTION, - buildSingleAlternativeLookaheadFunction + buildSingleAlternativeLookaheadFunction, ); const laMock = { @@ -387,7 +387,7 @@ context("lookahead specs", () => { 1, false, PROD_TYPE.REPETITION, - buildSingleAlternativeLookaheadFunction + buildSingleAlternativeLookaheadFunction, ); expect(laFunc.call(commaParserMock)).to.equal(true); @@ -406,7 +406,7 @@ context("lookahead specs", () => { 1, false, false, - buildAlternativesLookAheadFunc + buildAlternativesLookAheadFunc, ); expect(laFunc.call(commaParserMock)).to.equal(0); @@ -452,7 +452,7 @@ context("lookahead specs", () => { 1, false, false, - buildAlternativesLookAheadFunc + buildAlternativesLookAheadFunc, ); const laMock = { @@ -476,7 +476,7 @@ context("lookahead specs", () => { 1, false, false, - buildAlternativesLookAheadFunc + buildAlternativesLookAheadFunc, ); expect(laFunc.call(keyParserMock)).to.equal(0); @@ -742,7 +742,7 @@ context("lookahead specs", () => { constructor(public inputConstructors: TokenType[]) { this.input = map(inputConstructors, (currConst) => - createRegularToken(currConst) + createRegularToken(currConst), ); } @@ -765,7 +765,7 @@ context("lookahead specs", () => { alternatives, false, tokenStructuredMatcher, - false + false, ); expect(laFunc.call(new MockParser([Alpha]))).to.equal(2); @@ -783,7 +783,7 @@ context("lookahead specs", () => { alternatives, false, tokenStructuredMatcher, - false + false, ); expect(laFunc.call(new MockParser([Alpha]))).to.equal(0); @@ -802,7 +802,7 @@ context("lookahead specs", () => { alternatives, false, tokenStructuredMatcher, - false + false, ); expect(laFunc.call(new MockParser([]))).to.be.undefined; @@ -822,12 +822,12 @@ context("lookahead specs", () => { alternatives, false, tokenStructuredMatcher, - false + false, ); expect(laFunc.call(new MockParser([Alpha, Beta, Gamma]))).to.equal(0); expect( - laFunc.call(new MockParser([Alpha, Beta, Gamma, Delta])) + laFunc.call(new MockParser([Alpha, Beta, Gamma, Delta])), ).to.equal(0); expect(laFunc.call(new MockParser([Alpha, Beta, Delta]))).to.equal(0); expect(laFunc.call(new MockParser([Alpha, Beta, Beta]))).to.equal(1); @@ -847,7 +847,7 @@ context("lookahead specs", () => { alternatives, false, tokenStructuredMatcher, - false + false, ); expect(laFunc.call(new MockParser([]))).to.be.undefined; @@ -868,12 +868,12 @@ context("lookahead specs", () => { alternatives, false, tokenStructuredMatcher, - false + false, ); expect(laFunc.call(new MockParser([Alpha, Beta]))).to.equal(1); expect(laFunc.call(new MockParser([ExtendsAlphaAlpha, Beta]))).to.equal( - 0 + 0, ); // expect( // laFunc.call(new MockParser([ExtendsAlpha, Beta])) @@ -890,7 +890,7 @@ context("lookahead specs", () => { alternatives, false, tokenStructuredMatcher, - false + false, ); expect(laFunc.call(new MockParser([Alpha, Beta]))).to.be.undefined; @@ -907,7 +907,7 @@ context("lookahead specs", () => { alternatives, false, tokenStructuredMatcher, - false + false, ); expect(laFunc.call(new MockParser([Alpha]))).to.equal(0); @@ -920,7 +920,7 @@ context("lookahead specs", () => { const laFunc = buildSingleAlternativeLookaheadFunction( alternative, tokenStructuredMatcher, - false + false, ); expect(laFunc.call(new MockParser([Alpha]))).to.be.true; @@ -933,7 +933,7 @@ context("lookahead specs", () => { const laFunc = buildSingleAlternativeLookaheadFunction( alternative, tokenStructuredMatcher, - false + false, ); expect(laFunc.call(new MockParser([Delta]))).to.be.false; @@ -945,7 +945,7 @@ context("lookahead specs", () => { const laFunc = buildSingleAlternativeLookaheadFunction( alternative, tokenStructuredMatcher, - false + false, ); expect(laFunc.call(new MockParser([Alpha, Beta, Gamma]))).to.be.true; @@ -958,7 +958,7 @@ context("lookahead specs", () => { const laFunc = buildSingleAlternativeLookaheadFunction( alternative, tokenStructuredMatcher, - false + false, ); expect(laFunc.call(new MockParser([Alpha, Charlie, Gamma]))).to.be @@ -972,21 +972,25 @@ context("lookahead specs", () => { const laFunc = buildSingleAlternativeLookaheadFunction( alternative, tokenStructuredMatcher, - false + false, ); expect( - laFunc.call(new MockParser([Alpha, ExtendsAlpha, ExtendsAlphaAlpha])) + laFunc.call(new MockParser([Alpha, ExtendsAlpha, ExtendsAlphaAlpha])), ).to.be.true; expect( laFunc.call( - new MockParser([ExtendsAlpha, ExtendsAlpha, ExtendsAlphaAlpha]) - ) + new MockParser([ExtendsAlpha, ExtendsAlpha, ExtendsAlphaAlpha]), + ), ).to.be.true; expect( laFunc.call( - new MockParser([ExtendsAlphaAlpha, ExtendsAlpha, ExtendsAlphaAlpha]) - ) + new MockParser([ + ExtendsAlphaAlpha, + ExtendsAlpha, + ExtendsAlphaAlpha, + ]), + ), ).to.be.true; expect( laFunc.call( @@ -994,8 +998,8 @@ context("lookahead specs", () => { ExtendsAlphaAlpha, ExtendsAlphaAlpha, ExtendsAlphaAlpha, - ]) - ) + ]), + ), ).to.be.true; }); @@ -1004,19 +1008,19 @@ context("lookahead specs", () => { const laFunc = buildSingleAlternativeLookaheadFunction( alternative, tokenStructuredMatcher, - false + false, ); expect( - laFunc.call(new MockParser([Gamma, ExtendsAlpha, ExtendsAlphaAlpha])) + laFunc.call(new MockParser([Gamma, ExtendsAlpha, ExtendsAlphaAlpha])), ).to.be.false; expect( - laFunc.call(new MockParser([ExtendsAlpha, Alpha, ExtendsAlphaAlpha])) + laFunc.call(new MockParser([ExtendsAlpha, Alpha, ExtendsAlphaAlpha])), ).to.be.false; expect( laFunc.call( - new MockParser([ExtendsAlphaAlpha, ExtendsAlpha, ExtendsAlpha]) - ) + new MockParser([ExtendsAlphaAlpha, ExtendsAlpha, ExtendsAlpha]), + ), ).to.be.false; }); }); diff --git a/packages/chevrotain/test/parse/grammar/resolver_spec.ts b/packages/chevrotain/test/parse/grammar/resolver_spec.ts index 84c9b07dd..e603129c7 100644 --- a/packages/chevrotain/test/parse/grammar/resolver_spec.ts +++ b/packages/chevrotain/test/parse/grammar/resolver_spec.ts @@ -12,18 +12,18 @@ describe("The RefResolverVisitor", () => { topLevelRules["TOP"] = topLevel; const resolver = new GastRefResolverVisitor( topLevelRules, - defaultGrammarResolverErrorProvider + defaultGrammarResolverErrorProvider, ); resolver.resolveRefs(); expect(resolver.errors).to.have.lengthOf(1); expect(resolver.errors[0].message).to.contain( - "Invalid grammar, reference to a rule which is not defined: ->missingRule<-" + "Invalid grammar, reference to a rule which is not defined: ->missingRule<-", ); expect(resolver.errors[0].message).to.contain( - "inside top level rule: ->TOP<-" + "inside top level rule: ->TOP<-", ); expect(resolver.errors[0].type).to.equal( - ParserDefinitionErrorType.UNRESOLVED_SUBRULE_REF + ParserDefinitionErrorType.UNRESOLVED_SUBRULE_REF, ); expect(resolver.errors[0].ruleName).to.equal("TOP"); }); diff --git a/packages/chevrotain/test/parse/predicate_spec.ts b/packages/chevrotain/test/parse/predicate_spec.ts index 2517cba33..c00b56137 100644 --- a/packages/chevrotain/test/parse/predicate_spec.ts +++ b/packages/chevrotain/test/parse/predicate_spec.ts @@ -33,7 +33,10 @@ describe("The chevrotain support for custom gates/predicates on DSL production:" } class PredicateOptionParser extends EmbeddedActionsParser { - constructor(input: IToken[] = [], private gate: boolean) { + constructor( + input: IToken[] = [], + private gate: boolean, + ) { super(ALL_TOKENS, {}); this.performSelfAnalysis(); this.input = input; @@ -54,25 +57,25 @@ describe("The chevrotain support for custom gates/predicates on DSL production:" const gateOpenInputGood = new PredicateOptionParser( [createRegularToken(A)], - true + true, ).optionRule(); expect(gateOpenInputGood).to.equal("entered!"); const gateOpenInputBad = new PredicateOptionParser( [createRegularToken(B)], - true + true, ).optionRule(); expect(gateOpenInputBad).to.equal("not entered!"); const gateClosedInputGood = new PredicateOptionParser( [createRegularToken(A)], - false + false, ).optionRule(); expect(gateClosedInputGood).to.equal("not entered!"); const gateClosedInputBad = new PredicateOptionParser( [createRegularToken(B)], - false + false, ).optionRule(); expect(gateClosedInputBad).to.equal("not entered!"); }); @@ -83,7 +86,10 @@ describe("The chevrotain support for custom gates/predicates on DSL production:" } class PredicateManyParser extends EmbeddedActionsParser { - constructor(input: IToken[] = [], private gate: boolean) { + constructor( + input: IToken[] = [], + private gate: boolean, + ) { super(ALL_TOKENS, {}); this.performSelfAnalysis(); this.input = input; @@ -105,25 +111,25 @@ describe("The chevrotain support for custom gates/predicates on DSL production:" const gateOpenInputGood = new PredicateManyParser( [createRegularToken(A), createRegularToken(A)], - true + true, ).manyRule(); expect(gateOpenInputGood).to.equal("entered!"); const gateOpenInputBad = new PredicateManyParser( [createRegularToken(B)], - true + true, ).manyRule(); expect(gateOpenInputBad).to.equal("not entered!"); const gateClosedInputGood = new PredicateManyParser( [createRegularToken(A), createRegularToken(A)], - false + false, ).manyRule(); expect(gateClosedInputGood).to.equal("not entered!"); const gateClosedInputBad = new PredicateManyParser( [createRegularToken(B)], - false + false, ).manyRule(); expect(gateClosedInputBad).to.equal("not entered!"); }); @@ -134,7 +140,10 @@ describe("The chevrotain support for custom gates/predicates on DSL production:" } class PredicateAtLeastOneParser extends EmbeddedActionsParser { - constructor(input: IToken[] = [], private gate: boolean) { + constructor( + input: IToken[] = [], + private gate: boolean, + ) { super(ALL_TOKENS, {}); this.performSelfAnalysis(); this.input = input; @@ -156,38 +165,38 @@ describe("The chevrotain support for custom gates/predicates on DSL production:" const gateOpenInputGood = new PredicateAtLeastOneParser( [createRegularToken(A), createRegularToken(A)], - true + true, ).atLeastOneRule(); expect(gateOpenInputGood).to.equal("entered!"); const gateOpenInputBadParser = new PredicateAtLeastOneParser( [createRegularToken(B)], - true + true, ); gateOpenInputBadParser.atLeastOneRule(); expect(gateOpenInputBadParser.errors).to.have.lengthOf(1); expect(gateOpenInputBadParser.errors[0]).to.be.an.instanceOf( - EarlyExitException + EarlyExitException, ); const gateClosedInputGood = new PredicateAtLeastOneParser( [createRegularToken(A), createRegularToken(A)], - false + false, ); gateClosedInputGood.atLeastOneRule(); expect(gateClosedInputGood.errors).to.have.lengthOf(1); expect(gateClosedInputGood.errors[0]).to.be.an.instanceOf( - EarlyExitException + EarlyExitException, ); const gateClosedInputBad = new PredicateAtLeastOneParser( [createRegularToken(B)], - false + false, ); gateClosedInputBad.atLeastOneRule(); expect(gateClosedInputBad.errors).to.have.lengthOf(1); expect(gateClosedInputBad.errors[0]).to.be.an.instanceOf( - EarlyExitException + EarlyExitException, ); }); @@ -197,7 +206,10 @@ describe("The chevrotain support for custom gates/predicates on DSL production:" } class PredicateOrParser extends EmbeddedActionsParser { - constructor(input: IToken[] = [], private gate: boolean) { + constructor( + input: IToken[] = [], + private gate: boolean, + ) { super(ALL_TOKENS, {}); this.performSelfAnalysis(); this.input = input; @@ -232,41 +244,41 @@ describe("The chevrotain support for custom gates/predicates on DSL production:" const gateOpenInputA = new PredicateOrParser( [createRegularToken(A)], - true + true, ).orRule(); expect(gateOpenInputA).to.equal("A"); const gateOpenInputB = new PredicateOrParser( [createRegularToken(B)], - true + true, ).orRule(); expect(gateOpenInputB).to.equal("B"); const gateOpenInputC = new PredicateOrParser( [createRegularToken(C)], - true + true, ).orRule(); expect(gateOpenInputC).to.equal("C"); const gateClosedInputA = new PredicateOrParser( [createRegularToken(A)], - false + false, ).orRule(); expect(gateClosedInputA).to.equal("A"); const gateClosedInputBad = new PredicateOrParser( [createRegularToken(B)], - false + false, ); gateClosedInputBad.orRule(); expect(gateClosedInputBad.errors).to.have.lengthOf(1); expect(gateClosedInputBad.errors[0]).to.be.an.instanceOf( - NoViableAltException + NoViableAltException, ); const gateClosedInputC = new PredicateOrParser( [createRegularToken(C)], - false + false, ).orRule(); expect(gateClosedInputC).to.equal("C"); }); diff --git a/packages/chevrotain/test/parse/recognizer/infinite_loop_spec.ts b/packages/chevrotain/test/parse/recognizer/infinite_loop_spec.ts index cb3bdf5e1..15fbc2ed4 100644 --- a/packages/chevrotain/test/parse/recognizer/infinite_loop_spec.ts +++ b/packages/chevrotain/test/parse/recognizer/infinite_loop_spec.ts @@ -47,7 +47,7 @@ describe("The Recognizer's capabilities for detecting / handling infinite loops" parser.input = [createRegularToken(PlusTok)]; const parseResult = parser.loop(); expect(parser.errors[0].message).to.match( - /Redundant input, expecting EOF but found/ + /Redundant input, expecting EOF but found/, ); }); @@ -97,7 +97,7 @@ describe("The Recognizer's capabilities for detecting / handling infinite loops" parser.input = [createRegularToken(A)]; const parseResult = parser.block(); expect(parser.errors[0].message).to.match( - /Expecting: one of these possible Token sequences:/ + /Expecting: one of these possible Token sequences:/, ); expect(parser.errors[0].message).to.match(/[A, B]/); expect(parser.errors[0].message).to.match(/[A, C]/); diff --git a/packages/chevrotain/test/parse/recognizer/recognizer_config_spec.ts b/packages/chevrotain/test/parse/recognizer/recognizer_config_spec.ts index 062c937d1..f66853c06 100644 --- a/packages/chevrotain/test/parse/recognizer/recognizer_config_spec.ts +++ b/packages/chevrotain/test/parse/recognizer/recognizer_config_spec.ts @@ -16,7 +16,7 @@ describe("The Recognizer's Configuration", () => { } expect(() => new InvalidNodeLocationTrackingOption()).to.throw( - 'Invalid config option: "oops"' + 'Invalid config option: "oops"', ); }); @@ -60,7 +60,7 @@ describe("The Recognizer's Configuration", () => { } } expect(() => new IgnoredIssuesParser()).to.throw( - "The IParserConfig property has been deprecated" + "The IParserConfig property has been deprecated", ); }); }); diff --git a/packages/chevrotain/test/parse/recognizer/rules_override_spec.ts b/packages/chevrotain/test/parse/recognizer/rules_override_spec.ts index a514c492d..339236bb3 100644 --- a/packages/chevrotain/test/parse/recognizer/rules_override_spec.ts +++ b/packages/chevrotain/test/parse/recognizer/rules_override_spec.ts @@ -88,15 +88,15 @@ describe("The Recognizer's capabilities for overriding grammar productions", () this.CONSUME(PlusTok); return "poof"; }, - { recoveryValueFunc: () => "boom" } + { recoveryValueFunc: () => "boom" }, ); } expect(() => new InvalidOverrideParser([])).to.throw( - "Parser Definition Errors detected" + "Parser Definition Errors detected", ); expect(() => new InvalidOverrideParser([])).to.throw( - "Invalid rule override" + "Invalid rule override", ); expect(() => new InvalidOverrideParser([])).to.throw("->oops<-"); }); diff --git a/packages/chevrotain/test/parse/recognizer_lookahead_spec.ts b/packages/chevrotain/test/parse/recognizer_lookahead_spec.ts index 63c184fb4..0ce4cc502 100644 --- a/packages/chevrotain/test/parse/recognizer_lookahead_spec.ts +++ b/packages/chevrotain/test/parse/recognizer_lookahead_spec.ts @@ -57,7 +57,7 @@ describe("lookahead Regular Tokens Mode", () => { public manyOptionsRule = this.RULE( "manyOptionsRule", - this.parseManyOptionsRule + this.parseManyOptionsRule, ); private parseManyOptionsRule(): string { @@ -458,7 +458,7 @@ describe("lookahead Regular Tokens Mode", () => { recoveryValueFunc: () => { return "-666"; }, - } + }, ); private parseAtLeastOneRule(): string { @@ -570,7 +570,7 @@ describe("lookahead Regular Tokens Mode", () => { separators: [], }; }, - } + }, ); private parseAtLeastOneRule(): any { @@ -943,7 +943,7 @@ describe("lookahead Regular Tokens Mode", () => { public ambiguityRule = this.RULE( "ambiguityRule", - this.parseAmbiguityRule + this.parseAmbiguityRule, ); private parseAmbiguityRule(): void { @@ -974,7 +974,7 @@ describe("lookahead Regular Tokens Mode", () => { } expect(() => new OrAmbiguityLookAheadParser()).to.throw( - "Ambiguous Alternatives Detected:" + "Ambiguous Alternatives Detected:", ); expect(() => new OrAmbiguityLookAheadParser()).to.throw("OneTok"); }); @@ -990,7 +990,7 @@ describe("lookahead Regular Tokens Mode", () => { public ambiguityRule = this.RULE( "ambiguityRule", - this.parseAmbiguityRule + this.parseAmbiguityRule, ); private parseAmbiguityRule(): void { @@ -1018,10 +1018,10 @@ describe("lookahead Regular Tokens Mode", () => { } } expect(() => new OrAmbiguityMultiTokenLookAheadParser()).to.throw( - "Ambiguous Alternatives Detected:" + "Ambiguous Alternatives Detected:", ); expect(() => new OrAmbiguityMultiTokenLookAheadParser()).to.throw( - "TwoTok, ThreeTok, FourTok" + "TwoTok, ThreeTok, FourTok", ); }); }); @@ -1562,7 +1562,7 @@ describe("lookahead Regular Tokens Mode", () => { createRegularToken(ThreeTok), createRegularToken(OneTok), createRegularToken(TwoTok), - ] + ], ); expect(twoIterationsParser.rule()).to.equal(2); @@ -1622,7 +1622,7 @@ describe("lookahead Regular Tokens Mode", () => { expect(parser.errors.length).to.eql(1); // wrong path chosen due to low explicit lookahead expect(parser.errors[0].message).to.include( - "Expecting token of type --> ThreeTok <--" + "Expecting token of type --> ThreeTok <--", ); }); @@ -1928,7 +1928,7 @@ describe("lookahead Regular Tokens Mode", () => { createRegularToken(ThreeTok), createRegularToken(OneTok), createRegularToken(TwoTok), - ] + ], ); expect(twoIterationsParser.rule()).to.equal(2); diff --git a/packages/chevrotain/test/parse/recognizer_spec.ts b/packages/chevrotain/test/parse/recognizer_spec.ts index 717ef53d1..e28058112 100644 --- a/packages/chevrotain/test/parse/recognizer_spec.ts +++ b/packages/chevrotain/test/parse/recognizer_spec.ts @@ -26,7 +26,7 @@ function defineRecognizerSpecs( contextName: string, createToken: (c: ITokenConfig) => TokenType, createTokenInstance: typeof createRegularToken, - tokenMatcher: TokenMatcher + tokenMatcher: TokenMatcher, ) { context("Recognizer " + contextName, () => { let PlusTok: TokenType; @@ -125,7 +125,7 @@ function defineRecognizerSpecs( this.numbers += numFromCaller; this.letters += charFromCaller; }); - } + }, ); public subRule2 = this.RULE( @@ -137,7 +137,7 @@ function defineRecognizerSpecs( this.numbers += numFromCaller; this.letters += charFromCaller; }); - } + }, ); } @@ -263,7 +263,7 @@ function defineRecognizerSpecs( this.parseQualifiedName, { recoveryValueFunc: () => ["666"], - } + }, ); private parseQualifiedName(): string[] { @@ -298,7 +298,7 @@ function defineRecognizerSpecs( this.parseQualifiedName, { recoveryValueFunc: () => ["333"], - } + }, ); private parseQualifiedName(): string[] { @@ -332,7 +332,7 @@ function defineRecognizerSpecs( public qualifiedName = this.RULE( "qualifiedName", - this.parseQualifiedName + this.parseQualifiedName, ); public identifier = this.RULE("identifier", this.parseIdentifier); public idents: string[] = []; @@ -377,7 +377,7 @@ function defineRecognizerSpecs( this.parseQualifiedName, { recoveryValueFunc: () => ["777"], - } + }, ); private parseQualifiedName(): string[] { @@ -413,7 +413,7 @@ function defineRecognizerSpecs( this.parseQualifiedName, { recoveryValueFunc: () => ["999"], - } + }, ); private parseQualifiedName(): string[] { @@ -459,7 +459,7 @@ function defineRecognizerSpecs( const parser: any = new EmbeddedActionsParser([PlusTok], {}); parser.isBackTrackingStack.push(1); expect(parser.shouldInRepetitionRecoveryBeTried(MinusTok, 1)).to.equal( - false + false, ); }); @@ -705,22 +705,23 @@ function defineRecognizerSpecs( describe("The BaseRecognizer", () => { it("Cannot be initialized with a token vector (pre v4.0 API) ", () => { expect( - () => new EmbeddedActionsParser([createTokenInstance(PlusTok)] as any) + () => + new EmbeddedActionsParser([createTokenInstance(PlusTok)] as any), ).to.throw( - "The Parser constructor no longer accepts a token vector as the first argument" + "The Parser constructor no longer accepts a token vector as the first argument", ); }); it("Cannot be initialized with a serializedGrammar property (pre v6.0 API)", () => { const config: any = { serializedGrammar: {} }; expect(() => new EmbeddedActionsParser([], config)).to.throw( - "The Parser's configuration can no longer contain a property." + "The Parser's configuration can no longer contain a property.", ); }); it("Cannot be initialized with an empty Token vocabulary", () => { expect(() => new EmbeddedActionsParser([])).to.throw( - "A Token Vocabulary cannot be empty" + "A Token Vocabulary cannot be empty", ); }); @@ -742,16 +743,16 @@ function defineRecognizerSpecs( expect(() => new SkipValidationsParser(true)).to.not.throw(); expect(() => new SkipValidationsParser(false)).to.throw( - "Parser Definition Errors detected:" + "Parser Definition Errors detected:", ); }); it("can only SAVE_ERROR for recognition exceptions", () => { const parser: any = new EmbeddedActionsParser([IntTok]); expect(() => - parser.SAVE_ERROR(new Error("I am some random Error")) + parser.SAVE_ERROR(new Error("I am some random Error")), ).to.throw( - "Trying to save an Error which is not a RecognitionException" + "Trying to save an Error which is not a RecognitionException", ); expect(parser.input).to.be.an.instanceof(Array); }); @@ -822,16 +823,16 @@ function defineRecognizerSpecs( }, () => { return true; - } + }, ); expect(() => backTrackingThrows.call(parser)).to.throw( - "division by zero, boom" + "division by zero, boom", ); const throwsRecogError = () => { throw new NotAllInputParsedException( "sad sad panda", - createTokenInstance(PlusTok) + createTokenInstance(PlusTok), ); }; const backTrackingFalse = parser.BACKTRACK(throwsRecogError, () => { @@ -859,7 +860,7 @@ function defineRecognizerSpecs( expect(() => { new WrongOrderOfSelfAnalysisParser().input = []; }).to.throw( - `Missing invocation at the end of the Parser's constructor.` + `Missing invocation at the end of the Parser's constructor.`, ); }); @@ -880,7 +881,7 @@ function defineRecognizerSpecs( } expect(() => new WrongOrderOfSelfAnalysisParser()).to.throw( - "Grammar rule may not be defined after the 'performSelfAnalysis' method has been called" + "Grammar rule may not be defined after the 'performSelfAnalysis' method has been called", ); }); @@ -901,7 +902,7 @@ function defineRecognizerSpecs( } expect(() => new WrongOrderOfSelfAnalysisParser()).to.throw( - "Grammar rule may not be defined after the 'performSelfAnalysis' method has been called" + "Grammar rule may not be defined after the 'performSelfAnalysis' method has been called", ); }); @@ -1047,7 +1048,7 @@ function defineRecognizerSpecs( }, { resyncEnabled: false, - } + }, ); } const parser: any = new RethrowOtherErrors([ @@ -1123,7 +1124,7 @@ function defineRecognizerSpecs( parser.myStatement(); expect(parser.errors[0]).to.be.an.instanceof(MismatchedTokenException); expect(parser.errors[0].message).to.equal( - "expecting semiColon at end of myStatement" + "expecting semiColon at end of myStatement", ); expect(parser.errors[0].context.ruleStack).to.deep.equal([ "myStatement", @@ -1156,7 +1157,7 @@ function defineRecognizerSpecs( parser.myStatement(); expect(parser.errors[0]).to.be.an.instanceof(NoViableAltException); expect(parser.errors[0].message).to.include( - "None of the alternatives matched" + "None of the alternatives matched", ); expect(parser.errors[0].context.ruleStack).to.deep.equal([ "myStatement", @@ -1350,7 +1351,7 @@ function defineRecognizerSpecs( parser.rule(); expect(parser.errors[0]).to.be.an.instanceof(EarlyExitException); expect(parser.errors[0].message).to.contain( - "expecting at least one iteration" + "expecting at least one iteration", ); expect(parser.errors[0].message).to.contain("MinusTok"); expect(parser.errors[0].message).to.contain("+"); @@ -1445,7 +1446,7 @@ function defineRecognizerSpecs( parser.rule(); expect(parser.errors[0]).to.be.an.instanceof(EarlyExitException); expect(parser.errors[0].message).to.contain( - "expecting at least one iteration" + "expecting at least one iteration", ); expect(parser.errors[0].message).to.contain("MinusTok"); expect(parser.errors[0].message).to.contain("+"); @@ -1560,11 +1561,11 @@ function defineRecognizerSpecs( ruleStack: ["topRule", "rule2"], occurrenceStack: [1, 4], }, - ] + ], ); expect(() => - parser.computeContentAssist("invalid_rule_name", []) + parser.computeContentAssist("invalid_rule_name", []), ).to.throw("does not exist in this grammar"); }); }); @@ -1575,5 +1576,5 @@ defineRecognizerSpecs( "Regular Tokens Mode", createToken, createRegularToken, - tokenStructuredMatcher + tokenStructuredMatcher, ); diff --git a/packages/chevrotain/test/scan/first_char_spec.ts b/packages/chevrotain/test/scan/first_char_spec.ts index 0c8141e76..bd5b5bbf0 100644 --- a/packages/chevrotain/test/scan/first_char_spec.ts +++ b/packages/chevrotain/test/scan/first_char_spec.ts @@ -8,7 +8,7 @@ describe("The Chevrotain Lexer First Char Optimization", () => { const firstChars = firstCharOptimizedIndices( ast.value, {}, - ast.flags.ignoreCase + ast.flags.ignoreCase, ); expect(firstChars).to.deep.equal([65, 97]); }); @@ -18,7 +18,7 @@ describe("The Chevrotain Lexer First Char Optimization", () => { const firstChars = firstCharOptimizedIndices( ast.value, {}, - ast.flags.ignoreCase + ast.flags.ignoreCase, ); expect(firstChars).to.deep.equal([65, 66, 97, 98]); }); @@ -28,7 +28,7 @@ describe("The Chevrotain Lexer First Char Optimization", () => { const firstChars = firstCharOptimizedIndices( ast.value, {}, - ast.flags.ignoreCase + ast.flags.ignoreCase, ); expect(firstChars).to.deep.equal([256, 257, 258, 259]); }); @@ -38,7 +38,7 @@ describe("The Chevrotain Lexer First Char Optimization", () => { const firstChars = firstCharOptimizedIndices( ast.value, {}, - ast.flags.ignoreCase + ast.flags.ignoreCase, ); expect(firstChars).to.deep.equal([255, 256, 257, 258, 259]); }); diff --git a/packages/chevrotain/test/scan/lexer_errors_public_spec.ts b/packages/chevrotain/test/scan/lexer_errors_public_spec.ts index 790ba1514..84b409357 100644 --- a/packages/chevrotain/test/scan/lexer_errors_public_spec.ts +++ b/packages/chevrotain/test/scan/lexer_errors_public_spec.ts @@ -10,11 +10,11 @@ describe("The Chevrotain default lexer error message provider", () => { 23, 1, 0, - 23 + 23, ); expect(msg).to.equal( - "unexpected character: ->+<- at offset: 23, skipped 1 characters." + "unexpected character: ->+<- at offset: 23, skipped 1 characters.", ); }); @@ -28,7 +28,7 @@ describe("The Chevrotain default lexer error message provider", () => { defaultLexerErrorProvider.buildUnableToPopLexerModeMessage(popToken); expect(msg).to.equal( - "Unable to pop Lexer Mode after encountering Token ->EXIT_NUMBERS<- The Mode Stack is empty" + "Unable to pop Lexer Mode after encountering Token ->EXIT_NUMBERS<- The Mode Stack is empty", ); }); }); diff --git a/packages/chevrotain/test/scan/lexer_spec.ts b/packages/chevrotain/test/scan/lexer_spec.ts index b32e79d4f..94b02b8c0 100644 --- a/packages/chevrotain/test/scan/lexer_spec.ts +++ b/packages/chevrotain/test/scan/lexer_spec.ts @@ -42,7 +42,7 @@ function defineLexerSpecs( createToken: (c: ITokenConfig) => TokenType, tokenMatcher: TokenMatcher, skipValidationChecks = false, - lexerConfig: ILexerConfig + lexerConfig: ILexerConfig, ) { const testFull = lexerConfig.positionTracking === "full"; const testStart = lexerConfig.positionTracking === "onlyStart" || testFull; @@ -80,7 +80,7 @@ function defineLexerSpecs( ], { positionTracking: "onlyOffset", - } + }, ); }); @@ -264,7 +264,7 @@ function defineLexerSpecs( let input = "a"; let result = testLexer.tokenize(input); expect( - tokenMatcher(result.tokens[0], SingleCharacterWithIgnoreCaseFlagTok) + tokenMatcher(result.tokens[0], SingleCharacterWithIgnoreCaseFlagTok), ).to.be.true; expect(result.tokens[0].image).to.equal("a"); expect(result.tokens[0].startOffset).to.equal(0); @@ -272,7 +272,7 @@ function defineLexerSpecs( input = "A"; result = testLexer.tokenize(input); expect( - tokenMatcher(result.tokens[0], SingleCharacterWithIgnoreCaseFlagTok) + tokenMatcher(result.tokens[0], SingleCharacterWithIgnoreCaseFlagTok), ).to.be.true; expect(result.tokens[0].image).to.equal("A"); expect(result.tokens[0].startOffset).to.equal(0); @@ -359,7 +359,7 @@ function defineLexerSpecs( expect(result.errors.length).to.equal(1); expect(result.errors[0].tokenTypes).to.deep.equal([MissingPattern]); expect(result.errors[0].type).to.equal( - LexerDefinitionErrorType.MISSING_PATTERN + LexerDefinitionErrorType.MISSING_PATTERN, ); expect(result.errors[0].message).to.contain("MissingPattern"); expect(result.valid).to.deep.equal([ValidNaPattern]); @@ -387,7 +387,7 @@ function defineLexerSpecs( expect(result.errors.length).to.equal(1); expect(result.errors[0].tokenTypes).to.deep.equal([InvalidPattern]); expect(result.errors[0].type).to.equal( - LexerDefinitionErrorType.INVALID_PATTERN + LexerDefinitionErrorType.INVALID_PATTERN, ); expect(result.errors[0].message).to.contain("InvalidPattern"); expect(result.valid).to.deep.equal([ValidNaPattern]); @@ -409,7 +409,7 @@ function defineLexerSpecs( expect(errors.length).to.equal(1); expect(errors[0].tokenTypes).to.deep.equal([MultiLinePattern]); expect(errors[0].type).to.equal( - LexerDefinitionErrorType.UNSUPPORTED_FLAGS_FOUND + LexerDefinitionErrorType.UNSUPPORTED_FLAGS_FOUND, ); expect(errors[0].message).to.contain("MultiLinePattern"); }); @@ -420,7 +420,7 @@ function defineLexerSpecs( expect(errors.length).to.equal(1); expect(errors[0].tokenTypes).to.deep.equal([GlobalPattern]); expect(errors[0].type).to.equal( - LexerDefinitionErrorType.UNSUPPORTED_FLAGS_FOUND + LexerDefinitionErrorType.UNSUPPORTED_FLAGS_FOUND, ); expect(errors[0].message).to.contain("GlobalPattern"); }); @@ -451,7 +451,7 @@ function defineLexerSpecs( expect(errors.length).to.equal(1); expect(errors[0].tokenTypes).to.deep.equal([InvalidToken]); expect(errors[0].type).to.equal( - LexerDefinitionErrorType.EOI_ANCHOR_FOUND + LexerDefinitionErrorType.EOI_ANCHOR_FOUND, ); expect(errors[0].message).to.contain("InvalidToken"); }); @@ -471,7 +471,7 @@ function defineLexerSpecs( expect(errors.length).to.equal(1); expect(errors[0].tokenTypes).to.deep.equal([InvalidToken]); expect(errors[0].type).to.equal( - LexerDefinitionErrorType.SOI_ANCHOR_FOUND + LexerDefinitionErrorType.SOI_ANCHOR_FOUND, ); expect(errors[0].message).to.contain("InvalidToken"); }); @@ -495,7 +495,7 @@ function defineLexerSpecs( ClassKeyword, ]); expect(errors[0].type).to.equal( - LexerDefinitionErrorType.UNREACHABLE_PATTERN + LexerDefinitionErrorType.UNREACHABLE_PATTERN, ); expect(errors[0].message).to.contain("can never be matched"); }); @@ -523,7 +523,7 @@ function defineLexerSpecs( IntegerValid, ]); expect(errors[0].type).to.equal( - LexerDefinitionErrorType.DUPLICATE_PATTERNS_FOUND + LexerDefinitionErrorType.DUPLICATE_PATTERNS_FOUND, ); expect(errors[0].message).to.contain("IntegerValid"); expect(errors[0].message).to.contain("DecimalInvalid"); @@ -541,11 +541,11 @@ function defineLexerSpecs( expect(errors.length).to.equal(1); expect(errors[0].tokenTypes).to.deep.equal([emptyMatch]); expect(errors[0].type).to.equal( - LexerDefinitionErrorType.EMPTY_MATCH_PATTERN + LexerDefinitionErrorType.EMPTY_MATCH_PATTERN, ); expect(errors[0].message).to.contain("emptyMatch"); expect(errors[0].message).to.contain( - "must not match an empty string" + "must not match an empty string", ); }); @@ -561,7 +561,7 @@ function defineLexerSpecs( expect(errors.length).to.equal(1); expect(errors[0].tokenTypes).to.deep.equal([InvalidGroupNumber]); expect(errors[0].type).to.equal( - LexerDefinitionErrorType.INVALID_GROUP_TYPE_FOUND + LexerDefinitionErrorType.INVALID_GROUP_TYPE_FOUND, ); expect(errors[0].message).to.contain("InvalidGroupNumber"); }); @@ -671,7 +671,7 @@ function defineLexerSpecs( const allPatterns = map( analyzeResult.patternIdxToConfig, - (currConfig) => currConfig.pattern + (currConfig) => currConfig.pattern, ); expect(allPatterns.length).to.equal(8); @@ -691,7 +691,7 @@ function defineLexerSpecs( const patternIdxToClass = map( analyzeResult.patternIdxToConfig, - (currConfig) => currConfig.tokenType + (currConfig) => currConfig.tokenType, ); expect(keys(patternIdxToClass).length).to.equal(8); expect(patternIdxToClass[0]).to.equal(If); @@ -725,7 +725,7 @@ function defineLexerSpecs( }); const allPatterns = map( analyzeResult.patternIdxToConfig, - (currConfig) => currConfig.pattern + (currConfig) => currConfig.pattern, ); expect(allPatterns.length).to.equal(8); const allPatternsString = map(allPatterns, (pattern) => { @@ -749,7 +749,7 @@ function defineLexerSpecs( }); const patternIdxToClass = map( analyzeResult.patternIdxToConfig, - (currConfig) => currConfig.tokenType + (currConfig) => currConfig.tokenType, ); expect(keys(patternIdxToClass).length).to.equal(8); expect(patternIdxToClass[0]).to.equal(If); @@ -992,7 +992,7 @@ function defineLexerSpecs( Whitespace, NewLine, ], - lexerConfig + lexerConfig, ); //noinspection BadExpressionStatementJS expect(ifElseLexer.lexerDefinitionErrors).to.be.empty; @@ -1136,13 +1136,13 @@ function defineLexerSpecs( () => new Lexer([], { positionTracking: "oops", - }) + }), ).to.throw("Invalid config option:" + ' "oops"'); }); it("Will throw an error during the creation of a Lexer if the lexer config argument is a boolean", () => { expect(() => new Lexer([], false)).to.throw( - "The second argument to the Lexer constructor is now an ILexerConfig" + "The second argument to the Lexer constructor is now an ILexerConfig", ); }); @@ -1151,19 +1151,19 @@ function defineLexerSpecs( "line terminators without specifying the lineTerminatorCharacters", () => { expect( - () => new Lexer([], { lineTerminatorsPattern: /\n/g }) + () => new Lexer([], { lineTerminatorsPattern: /\n/g }), ).to.throw( - "Error: Missing property on the Lexer config." + "Error: Missing property on the Lexer config.", ); - } + }, ); it("Will throw an error during the creation of a Lexer if the Lexer's definition is invalid", () => { expect( - () => new Lexer([EndOfInputAnchor, If, Else], lexerConfig) + () => new Lexer([EndOfInputAnchor, If, Else], lexerConfig), ).to.throw(/Errors detected in definition of Lexer/); expect( - () => new Lexer([EndOfInputAnchor, If, Else], lexerConfig) + () => new Lexer([EndOfInputAnchor, If, Else], lexerConfig), ).to.throw(/EndOfInputAnchor/); }); @@ -1173,14 +1173,14 @@ function defineLexerSpecs( new Lexer([EndOfInputAnchor, If, Else], { positionTracking: "onlyOffset", deferDefinitionErrorsHandling: true, - }) + }), ).to.not.throw(/Errors detected in definition of Lexer/); expect( () => new Lexer([EndOfInputAnchor, If, Else], { positionTracking: "onlyOffset", deferDefinitionErrorsHandling: true, - }) + }), ).to.not.throw(/EndOfInputAnchor/); const lexerWithErrs = new Lexer([EndOfInputAnchor, If, Else], { @@ -1191,10 +1191,10 @@ function defineLexerSpecs( expect(lexerWithErrs.lexerDefinitionErrors).to.not.be.empty; // even when the Error handling is deferred, actual usage of an invalid lexer is not permitted! expect(() => lexerWithErrs.tokenize("else")).to.throw( - /Unable to Tokenize because Errors detected in definition of Lexer/ + /Unable to Tokenize because Errors detected in definition of Lexer/, ); expect(() => lexerWithErrs.tokenize("else")).to.throw( - /EndOfInputAnchor/ + /EndOfInputAnchor/, ); }); } @@ -1213,7 +1213,7 @@ function defineLexerSpecs( Whitespace, NewLine, ], - lexerConfig + lexerConfig, ); const input = "if (666) return 1@#$@#$\n" + "\telse return 2"; @@ -1317,7 +1317,7 @@ function defineLexerSpecs( Whitespace, NewLine, ], - lexerConfig + lexerConfig, ); const input = "if&&&&&&&&&&&&&&&&&&&&&&&&&&&&"; @@ -1359,7 +1359,7 @@ function defineLexerSpecs( { ...lexerConfig, recoveryEnabled: false, - } + }, ); const input = "if (666) return 1@#$@#$\n" + "\telse return 2"; @@ -1430,7 +1430,7 @@ function defineLexerSpecs( it("can deal with line terminators inside multi-line Tokens", () => { const ifElseLexer = new Lexer( [If, Else, WhitespaceNotSkipped], - lexerConfig + lexerConfig, ); const input = "if\r\r\telse\rif\n"; @@ -1582,7 +1582,7 @@ function defineLexerSpecs( it("supports Token groups", () => { const ifElseLexer = new Lexer( [If, Else, Comment, NewLine], - lexerConfig + lexerConfig, ); const input = "if//else"; const lexResult = ifElseLexer.tokenize(input); @@ -1618,7 +1618,7 @@ function defineLexerSpecs( it("won't have leftover state when using token groups", () => { const ifElseLexer = new Lexer( [If, Else, Comment, NewLine], - lexerConfig + lexerConfig, ); const input = "if//else"; let lexResult = ifElseLexer.tokenize(input); @@ -1747,7 +1747,7 @@ function defineLexerSpecs( const lexResult = ModeLexer.tokenize(input); expect(lexResult.errors).to.have.lengthOf(1); expect(lexResult.errors[0].message).to.equal( - "unexpected character: ->+<- at offset: 23, skipped 1 characters." + "unexpected character: ->+<- at offset: 23, skipped 1 characters.", ); }); @@ -1841,16 +1841,16 @@ function defineLexerSpecs( EnterNumbers, ]); expect(badLexer.lexerDefinitionErrors[0].type).to.equal( - LexerDefinitionErrorType.PUSH_MODE_DOES_NOT_EXIST + LexerDefinitionErrorType.PUSH_MODE_DOES_NOT_EXIST, ); expect(badLexer.lexerDefinitionErrors[0].message).to.include( - "PUSH_MODE" + "PUSH_MODE", ); expect(badLexer.lexerDefinitionErrors[0].message).to.include( - "EnterNumbers" + "EnterNumbers", ); expect(badLexer.lexerDefinitionErrors[0].message).to.include( - "which does not exist" + "which does not exist", ); }); @@ -1869,13 +1869,13 @@ function defineLexerSpecs( }); expect(badLexer.lexerDefinitionErrors).to.have.lengthOf(1); expect(badLexer.lexerDefinitionErrors[0].type).to.equal( - LexerDefinitionErrorType.MULTI_MODE_LEXER_WITHOUT_MODES_PROPERTY + LexerDefinitionErrorType.MULTI_MODE_LEXER_WITHOUT_MODES_PROPERTY, ); expect(badLexer.lexerDefinitionErrors[0].message).to.include( - "MultiMode Lexer cannot be initialized" + "MultiMode Lexer cannot be initialized", ); expect(badLexer.lexerDefinitionErrors[0].message).to.include( - "without a property" + "without a property", ); }); @@ -1892,13 +1892,13 @@ function defineLexerSpecs( }); expect(badLexer.lexerDefinitionErrors).to.have.lengthOf(1); expect(badLexer.lexerDefinitionErrors[0].type).to.equal( - LexerDefinitionErrorType.MULTI_MODE_LEXER_WITHOUT_DEFAULT_MODE + LexerDefinitionErrorType.MULTI_MODE_LEXER_WITHOUT_DEFAULT_MODE, ); expect(badLexer.lexerDefinitionErrors[0].message).to.include( - "MultiMode Lexer cannot be initialized" + "MultiMode Lexer cannot be initialized", ); expect(badLexer.lexerDefinitionErrors[0].message).to.include( - "without a property" + "without a property", ); }); @@ -1919,18 +1919,18 @@ function defineLexerSpecs( }); expect(badLexer.lexerDefinitionErrors).to.have.lengthOf(1); expect(badLexer.lexerDefinitionErrors[0].type).to.equal( - LexerDefinitionErrorType.MULTI_MODE_LEXER_DEFAULT_MODE_VALUE_DOES_NOT_EXIST + LexerDefinitionErrorType.MULTI_MODE_LEXER_DEFAULT_MODE_VALUE_DOES_NOT_EXIST, ); expect(badLexer.lexerDefinitionErrors[0].message).to.include( - "MultiMode Lexer cannot be initialized" + "MultiMode Lexer cannot be initialized", ); expect(badLexer.lexerDefinitionErrors[0].message).to.include( - "which does not exist" + "which does not exist", ); expect(badLexer.lexerDefinitionErrors[0].message).to.include( - "bisli" + "bisli", ); - } + }, ); it("Will detect a Lexer definition which has undefined Token Typees", () => { @@ -1941,10 +1941,10 @@ function defineLexerSpecs( }); expect(badLexer.lexerDefinitionErrors).to.have.lengthOf(1); expect(badLexer.lexerDefinitionErrors[0].type).to.equal( - LexerDefinitionErrorType.LEXER_DEFINITION_CANNOT_CONTAIN_UNDEFINED + LexerDefinitionErrorType.LEXER_DEFINITION_CANNOT_CONTAIN_UNDEFINED, ); expect(badLexer.lexerDefinitionErrors[0].message).to.include( - "A Lexer cannot be initialized using an undefined Token Type" + "A Lexer cannot be initialized using an undefined Token Type", ); expect(badLexer.lexerDefinitionErrors[0].message).to.include("2"); }); @@ -1970,10 +1970,10 @@ function defineLexerSpecs( expect(badLexer.lexerDefinitionErrors).to.have.lengthOf(1); const error = badLexer.lexerDefinitionErrors[0]; expect(error.type).to.equal( - LexerDefinitionErrorType.MULTI_MODE_LEXER_LONGER_ALT_NOT_IN_CURRENT_MODE + LexerDefinitionErrorType.MULTI_MODE_LEXER_LONGER_ALT_NOT_IN_CURRENT_MODE, ); expect(error.message).to.include( - "A MultiMode Lexer cannot be initialized with a longer_alt" + "A MultiMode Lexer cannot be initialized with a longer_alt", ); expect(error.message).to.include(""); expect(error.message).to.include("on token"); @@ -1993,10 +1993,10 @@ function defineLexerSpecs( startOffset: number, length: number, line?: number, - column?: number + column?: number, ): string { return `[${line}, ${column}] Unknown character ${fullText.charAt( - startOffset + startOffset, )} at position ${startOffset} skipped ${length}`; }, }; @@ -2010,7 +2010,7 @@ function defineLexerSpecs( const lexResult = ModeLexerWithCustomErrors.tokenize(input); expect(lexResult.errors).to.have.lengthOf(1); expect(lexResult.errors[0].message).to.equal( - "[1, 24] Unknown character + at position 23 skipped 1" + "[1, 24] Unknown character + at position 23 skipped 1", ); }); @@ -2019,7 +2019,7 @@ function defineLexerSpecs( const lexResult = ModeLexerWithCustomErrors.tokenize(input); expect(lexResult.errors).to.have.lengthOf(1); expect(lexResult.errors[0].message).to.equal( - "No pop for you EXIT_NUMBERS" + "No pop for you EXIT_NUMBERS", ); }); }); @@ -2031,7 +2031,7 @@ function defineLexerSpecs( text: string, offset: number, tokens: IToken[], - groups: { [group: string]: IToken[] } + groups: { [group: string]: IToken[] }, ) { const result = /^B/.exec(text.substring(offset)); if (result !== null) { @@ -2138,11 +2138,11 @@ describe("debugging and messages and optimizations", () => { new Lexer([One], { ensureOptimizations: true, positionTracking: "onlyOffset", - }) + }), ).to.throw("Lexer Modes: < defaultMode > cannot be optimized."); expect(console.error).to.have.been.called; expect(consoleErrorSpy.args[0][0]).to.include( - "The regexp unicode flag is not currently supported by the regexp-to-ast library" + "The regexp unicode flag is not currently supported by the regexp-to-ast library", ); }); @@ -2160,7 +2160,7 @@ describe("debugging and messages and optimizations", () => { expect(() => new Lexer([Five, NewLine])).to.not.throw(); expect(console.warn).to.have.been.called; expect(consoleWarnSpy.args[0][0]).to.include( - "Warning: A Custom Token Pattern should specify the option" + "Warning: A Custom Token Pattern should specify the option", ); }); @@ -2176,11 +2176,11 @@ describe("debugging and messages and optimizations", () => { new Lexer([One], { ensureOptimizations: true, positionTracking: "onlyOffset", - }) + }), ).to.throw("Lexer Modes: < defaultMode > cannot be optimized."); expect(console.error).to.have.been.called; expect(consoleErrorSpy.args[0][0]).to.include( - "TokenType: is using a custom token pattern without providing " + "TokenType: is using a custom token pattern without providing ", ); }); @@ -2193,9 +2193,9 @@ describe("debugging and messages and optimizations", () => { safeMode: true, ensureOptimizations: true, positionTracking: "onlyOffset", - }) + }), ).to.throw( - '"safeMode" and "ensureOptimizations" flags are mutually exclusive.' + '"safeMode" and "ensureOptimizations" flags are mutually exclusive.', ); }); @@ -2217,7 +2217,7 @@ describe("debugging and messages and optimizations", () => { }); expect( (alphaLexerNoSafeMode).charCodeToPatternIdxToConfig - .defaultMode[97][0].tokenType + .defaultMode[97][0].tokenType, ).to.equal(Alpha); }); }); @@ -2250,14 +2250,14 @@ defineLexerSpecs( createToken, tokenStructuredMatcher, false, - { positionTracking: "full" } + { positionTracking: "full" }, ); defineLexerSpecs( "Regular Tokens Mode (custom mode)", wrapWithCustom(createToken), tokenStructuredMatcher, true, - { positionTracking: "full" } + { positionTracking: "full" }, ); defineLexerSpecs( @@ -2265,14 +2265,14 @@ defineLexerSpecs( createToken, tokenStructuredMatcher, false, - { positionTracking: "onlyStart" } + { positionTracking: "onlyStart" }, ); defineLexerSpecs( "Regular Tokens Mode (custom mode) - only start", wrapWithCustom(createToken), tokenStructuredMatcher, true, - { positionTracking: "onlyStart" } + { positionTracking: "onlyStart" }, ); defineLexerSpecs( @@ -2280,12 +2280,12 @@ defineLexerSpecs( createToken, tokenStructuredMatcher, false, - { positionTracking: "onlyOffset" } + { positionTracking: "onlyOffset" }, ); defineLexerSpecs( "Regular Tokens Mode (custom mode)", wrapWithCustom(createToken), tokenStructuredMatcher, true, - { positionTracking: "onlyOffset" } + { positionTracking: "onlyOffset" }, ); diff --git a/packages/chevrotain/test/scan/perf_tracer_spec.ts b/packages/chevrotain/test/scan/perf_tracer_spec.ts index 80915964a..1a2a631b7 100644 --- a/packages/chevrotain/test/scan/perf_tracer_spec.ts +++ b/packages/chevrotain/test/scan/perf_tracer_spec.ts @@ -26,7 +26,7 @@ describe("Chevrotain's Lexer Init Performance Tracing", () => { expect(consoleLogSpy).to.have.been.called; expect(consoleLogSpy.args[0][0]).to.include("--> "); expect(consoleLogSpy.args[1][0]).to.include( - "\t--> " + "\t--> ", ); }); diff --git a/packages/chevrotain/test/scan/regexp_spec.ts b/packages/chevrotain/test/scan/regexp_spec.ts index ef774f475..2fea4a7c7 100644 --- a/packages/chevrotain/test/scan/regexp_spec.ts +++ b/packages/chevrotain/test/scan/regexp_spec.ts @@ -40,8 +40,8 @@ describe("the regExp analysis", () => { it("can compute for string literal", () => { expect( getOptimizedStartCodesIndices( - /"(?:[^\\"]|\\(?:[bfnrtv"\\/]|u[0-9a-fA-F]{4}))*"/ - ) + /"(?:[^\\"]|\\(?:[bfnrtv"\\/]|u[0-9a-fA-F]{4}))*"/, + ), ).to.deep.equal([34]); }); diff --git a/packages/chevrotain/test/scan/skip_validations_spec.ts b/packages/chevrotain/test/scan/skip_validations_spec.ts index 0a7a72dff..fc3313203 100644 --- a/packages/chevrotain/test/scan/skip_validations_spec.ts +++ b/packages/chevrotain/test/scan/skip_validations_spec.ts @@ -22,15 +22,15 @@ describe("Chevrotain's Lexer Init Performance Tracing", () => { const consoleArgs = flatten(consoleLogSpy.args); const runtimeChecksArg = find(consoleArgs, (item: string) => - /performRuntimeChecks/.test(item) + /performRuntimeChecks/.test(item), ); expect(runtimeChecksArg).to.not.be.undefined; const warningRuntimeChecksAra = find(consoleArgs, (item: string) => - /performWarningRuntimeChecks/.test(item) + /performWarningRuntimeChecks/.test(item), ); expect(warningRuntimeChecksAra).to.not.be.undefined; const validateArg = find(consoleArgs, (item: string) => - /validatePatterns/.test(item) + /validatePatterns/.test(item), ); expect(validateArg).to.not.be.undefined; }); @@ -42,15 +42,15 @@ describe("Chevrotain's Lexer Init Performance Tracing", () => { const consoleArgs = flatten(consoleLogSpy.args); const runtimeChecksArg = find(consoleArgs, (item: string) => - /performRuntimeChecks/.test(item) + /performRuntimeChecks/.test(item), ); expect(runtimeChecksArg).to.be.undefined; const warningRuntimeChecksAra = find(consoleArgs, (item: string) => - /performWarningRuntimeChecks/.test(item) + /performWarningRuntimeChecks/.test(item), ); expect(warningRuntimeChecksAra).to.be.undefined; const validateArg = find(consoleArgs, (item: string) => - /validatePatterns/.test(item) + /validatePatterns/.test(item), ); expect(validateArg).to.be.undefined; }); diff --git a/packages/chevrotain/test/scan/token_spec.ts b/packages/chevrotain/test/scan/token_spec.ts index 1cc432caa..0888cc0ea 100644 --- a/packages/chevrotain/test/scan/token_spec.ts +++ b/packages/chevrotain/test/scan/token_spec.ts @@ -89,7 +89,7 @@ describe("The Chevrotain Tokens namespace", () => { -1, -1, -1, - -1 + -1, ); const BInstanceRegular = createTokenInstance( BTokRegular, @@ -99,7 +99,7 @@ describe("The Chevrotain Tokens namespace", () => { -1, -1, -1, - -1 + -1, ); expect(tokenMatcher(AInstanceRegular, ATokRegular)).to.be.true; @@ -176,7 +176,7 @@ describe("The Chevrotain Tokens namespace", () => { createToken({ name: "A", parent: "oops", - }) + }), ).to.throw("The parent property is no longer supported"); }); diff --git a/packages/chevrotain/test/utils/builders.ts b/packages/chevrotain/test/utils/builders.ts index 9bcf3b1e4..34aae9a03 100644 --- a/packages/chevrotain/test/utils/builders.ts +++ b/packages/chevrotain/test/utils/builders.ts @@ -2,7 +2,7 @@ import { ITokenConfig, TokenType } from "@chevrotain/types"; import { createToken } from "../../src/scan/tokens_public.js"; export function createDeferredTokenBuilder( - config: ITokenConfig + config: ITokenConfig, ): () => TokenType { let tokenCache: TokenType; return function createTokenOnDemand(): TokenType { diff --git a/packages/chevrotain/test/utils/matchers.ts b/packages/chevrotain/test/utils/matchers.ts index aea666fac..31d286f06 100644 --- a/packages/chevrotain/test/utils/matchers.ts +++ b/packages/chevrotain/test/utils/matchers.ts @@ -15,7 +15,7 @@ export function createRegularToken( startColumn?: number, endOffset?: number, endLine?: number, - endColumn?: number + endColumn?: number, ): IToken { return { image: image, diff --git a/packages/cst-dts-gen/src/api.ts b/packages/cst-dts-gen/src/api.ts index 2ab4414d3..c5023ccd7 100644 --- a/packages/cst-dts-gen/src/api.ts +++ b/packages/cst-dts-gen/src/api.ts @@ -9,7 +9,7 @@ const defaultOptions: Required = { export function generateCstDts( productions: Record, - options?: GenerateDtsOptions + options?: GenerateDtsOptions, ): string { const effectiveOptions = { ...defaultOptions, diff --git a/packages/cst-dts-gen/src/generate.ts b/packages/cst-dts-gen/src/generate.ts index 65a3d9e4b..f7dc20681 100644 --- a/packages/cst-dts-gen/src/generate.ts +++ b/packages/cst-dts-gen/src/generate.ts @@ -10,21 +10,21 @@ import { export function genDts( model: CstNodeTypeDefinition[], - options: Required + options: Required, ): string { let contentParts: string[] = []; contentParts = contentParts.concat( - `import type { CstNode, ICstVisitor, IToken } from "chevrotain";` + `import type { CstNode, ICstVisitor, IToken } from "chevrotain";`, ); contentParts = contentParts.concat( - flatten(map(model, (node) => genCstNodeTypes(node))) + flatten(map(model, (node) => genCstNodeTypes(node))), ); if (options.includeVisitorInterface) { contentParts = contentParts.concat( - genVisitor(options.visitorInterfaceName, model) + genVisitor(options.visitorInterfaceName, model), ); } diff --git a/packages/cst-dts-gen/src/model.ts b/packages/cst-dts-gen/src/model.ts index 6dbbabfd7..47f48e966 100644 --- a/packages/cst-dts-gen/src/model.ts +++ b/packages/cst-dts-gen/src/model.ts @@ -15,7 +15,7 @@ import { GAstVisitor, NonTerminal } from "@chevrotain/gast"; import { assign, flatten, groupBy, map, some, values } from "lodash-es"; export function buildModel( - productions: Record + productions: Record, ): CstNodeTypeDefinition[] { const generator = new CstNodeDefinitionGenerator(); const allRules = values(productions); @@ -89,7 +89,7 @@ class CstNodeDefinitionGenerator extends GAstVisitor { } visitRepetitionMandatoryWithSeparator( - node: RepetitionMandatoryWithSeparator + node: RepetitionMandatoryWithSeparator, ) { return this.visitEach(node.definition).concat({ propertyName: node.separator.name, @@ -134,11 +134,11 @@ class CstNodeDefinitionGenerator extends GAstVisitor { private visitEachAndOverrideWith( definition: IProduction[], - override: Partial + override: Partial, ) { return map( this.visitEach(definition), - (definition) => assign({}, definition, override) as PropertyTupleElement + (definition) => assign({}, definition, override) as PropertyTupleElement, ); } @@ -146,8 +146,8 @@ class CstNodeDefinitionGenerator extends GAstVisitor { return flatten( map( definition, - (definition) => this.visit(definition) as PropertyTupleElement[] - ) + (definition) => this.visit(definition) as PropertyTupleElement[], + ), ); } } @@ -159,7 +159,7 @@ type PropertyTupleElement = { }; function getType( - production: Terminal | NonTerminal | TokenType + production: Terminal | NonTerminal | TokenType, ): TokenArrayType | RuleArrayType { if (production instanceof NonTerminal) { return { diff --git a/packages/gast/src/helpers.ts b/packages/gast/src/helpers.ts index 17f5e224c..f395aee19 100644 --- a/packages/gast/src/helpers.ts +++ b/packages/gast/src/helpers.ts @@ -15,7 +15,7 @@ import { import type { IProduction, IProductionWithOccurrence } from "@chevrotain/types"; export function isSequenceProd( - prod: IProduction + prod: IProduction, ): prod is { definition: IProduction[] } & IProduction { return ( prod instanceof Alternative || @@ -31,7 +31,7 @@ export function isSequenceProd( export function isOptionalProd( prod: IProduction, - alreadyVisited: NonTerminal[] = [] + alreadyVisited: NonTerminal[] = [], ): boolean { const isDirectlyOptional = prod instanceof Option || @@ -60,7 +60,7 @@ export function isOptionalProd( (prod).definition, (subProd: IProduction) => { return isOptionalProd(subProd, alreadyVisited); - } + }, ); } else { return false; @@ -68,7 +68,7 @@ export function isOptionalProd( } export function isBranchingProd( - prod: IProduction + prod: IProduction, ): prod is { definition: IProduction[] } & IProduction { return prod instanceof Alternation; } diff --git a/packages/gast/src/model.ts b/packages/gast/src/model.ts index 64c3835be..fc0cb0f4e 100644 --- a/packages/gast/src/model.ts +++ b/packages/gast/src/model.ts @@ -18,7 +18,7 @@ function tokenLabel(tokType: TokenType): string { // TODO: duplicated code to avoid extracting another sub-package -- how to avoid? function hasTokenLabel( - obj: TokenType + obj: TokenType, ): obj is TokenType & Pick, "LABEL"> { return isString(obj.LABEL) && obj.LABEL !== ""; } @@ -61,7 +61,7 @@ export class NonTerminal super([]); assign( this, - pickBy(options, (v) => v !== undefined) + pickBy(options, (v) => v !== undefined), ); } @@ -94,7 +94,7 @@ export class Rule extends AbstractProduction { super(options.definition); assign( this, - pickBy(options, (v) => v !== undefined) + pickBy(options, (v) => v !== undefined), ); } } @@ -109,7 +109,7 @@ export class Alternative extends AbstractProduction { super(options.definition); assign( this, - pickBy(options, (v) => v !== undefined) + pickBy(options, (v) => v !== undefined), ); } } @@ -129,7 +129,7 @@ export class Option super(options.definition); assign( this, - pickBy(options, (v) => v !== undefined) + pickBy(options, (v) => v !== undefined), ); } } @@ -149,7 +149,7 @@ export class RepetitionMandatory super(options.definition); assign( this, - pickBy(options, (v) => v !== undefined) + pickBy(options, (v) => v !== undefined), ); } } @@ -170,7 +170,7 @@ export class RepetitionMandatoryWithSeparator super(options.definition); assign( this, - pickBy(options, (v) => v !== undefined) + pickBy(options, (v) => v !== undefined), ); } } @@ -191,7 +191,7 @@ export class Repetition super(options.definition); assign( this, - pickBy(options, (v) => v !== undefined) + pickBy(options, (v) => v !== undefined), ); } } @@ -212,7 +212,7 @@ export class RepetitionWithSeparator super(options.definition); assign( this, - pickBy(options, (v) => v !== undefined) + pickBy(options, (v) => v !== undefined), ); } } @@ -243,7 +243,7 @@ export class Alternation super(options.definition); assign( this, - pickBy(options, (v) => v !== undefined) + pickBy(options, (v) => v !== undefined), ); } } @@ -260,7 +260,7 @@ export class Terminal implements IProductionWithOccurrence { }) { assign( this, - pickBy(options, (v) => v !== undefined) + pickBy(options, (v) => v !== undefined), ); } diff --git a/packages/gast/src/visitor.ts b/packages/gast/src/visitor.ts index 5ecf16512..bf54d22ad 100644 --- a/packages/gast/src/visitor.ts +++ b/packages/gast/src/visitor.ts @@ -59,7 +59,7 @@ export abstract class GAstVisitor { /* c8 ignore next 3 */ public visitRepetitionMandatoryWithSeparator( - node: RepetitionMandatoryWithSeparator + node: RepetitionMandatoryWithSeparator, ): any {} /* c8 ignore next */ diff --git a/packages/gast/test/visitor_spec.ts b/packages/gast/test/visitor_spec.ts index 0e9b080aa..9801bda5d 100644 --- a/packages/gast/test/visitor_spec.ts +++ b/packages/gast/test/visitor_spec.ts @@ -186,7 +186,7 @@ describe("the gast visitor", () => { class TestVisitor extends GAstVisitor { visitRepetitionMandatoryWithSeparator( - node: RepetitionMandatoryWithSeparator + node: RepetitionMandatoryWithSeparator, ): void { expect(node).to.equal(rootNode); expect(node.separator).to.equal; diff --git a/packages/regexp-to-ast/src/regexp-parser.ts b/packages/regexp-to-ast/src/regexp-parser.ts index 943f246e1..3250a968a 100644 --- a/packages/regexp-to-ast/src/regexp-parser.ts +++ b/packages/regexp-to-ast/src/regexp-parser.ts @@ -198,7 +198,7 @@ export class RegExpParser { } protected quantifier( - isBacktracking: boolean = false + isBacktracking: boolean = false, ): Quantifier | undefined { let range: Partial | undefined = undefined; const begin = this.idx; @@ -824,7 +824,7 @@ export class RegExpParser { "' but found: '" + this.input[this.idx] + "' at offset: " + - this.idx + this.idx, ); } diff --git a/packages/regexp-to-ast/src/utils.ts b/packages/regexp-to-ast/src/utils.ts index 9f65a6eae..d59d22bc0 100644 --- a/packages/regexp-to-ast/src/utils.ts +++ b/packages/regexp-to-ast/src/utils.ts @@ -16,7 +16,7 @@ export function insertToSet(item: T | T[], set: T[]) { export function addFlag( flagObj: RegExpFlags, - flagKey: keyof Omit + flagKey: keyof Omit, ) { if (flagObj[flagKey] === true) { throw "duplicate flag " + flagKey; diff --git a/packages/regexp-to-ast/test/parser.spec.ts b/packages/regexp-to-ast/test/parser.spec.ts index f5d865226..f3fdf7744 100644 --- a/packages/regexp-to-ast/test/parser.spec.ts +++ b/packages/regexp-to-ast/test/parser.spec.ts @@ -1140,7 +1140,7 @@ describe("The RegExp to Ast parser", () => { it("invalid hex", () => { expect(() => parser.pattern("/\\x2v/")).to.throw( - "Expecting a HexDecimal digits" + "Expecting a HexDecimal digits", ); }); @@ -1439,7 +1439,7 @@ describe("The RegExp to Ast parser", () => { it("invalid range", () => { expect(() => parser.pattern("/[B-A]/")).to.throw( - "Range out of order in character class" + "Range out of order in character class", ); }); diff --git a/packages/types/api.d.ts b/packages/types/api.d.ts index 5c3e05c5f..20e11dced 100644 --- a/packages/types/api.d.ts +++ b/packages/types/api.d.ts @@ -61,7 +61,7 @@ declare abstract class BaseParser { */ computeContentAssist( startRuleName: string, - precedingInput: IToken[] + precedingInput: IToken[], ): ISyntacticContentAssistPath[]; /** @@ -72,7 +72,7 @@ declare abstract class BaseParser { */ protected BACKTRACK( grammarRule: (...args: any[]) => T, - args?: any[] + args?: any[], ): () => boolean; /** @@ -97,7 +97,7 @@ declare abstract class BaseParser { protected consume( idx: number, tokType: TokenType, - options?: ConsumeMethodOpts + options?: ConsumeMethodOpts, ): IToken; /** @@ -110,7 +110,7 @@ declare abstract class BaseParser { */ protected option( idx: number, - actionORMethodDef: GrammarAction | DSLMethodOpts + actionORMethodDef: GrammarAction | DSLMethodOpts, ): OUT | undefined; /** @@ -134,7 +134,7 @@ declare abstract class BaseParser { */ protected many( idx: number, - actionORMethodDef: GrammarAction | DSLMethodOpts + actionORMethodDef: GrammarAction | DSLMethodOpts, ): void; /** @@ -147,7 +147,7 @@ declare abstract class BaseParser { */ protected atLeastOne( idx: number, - actionORMethodDef: GrammarAction | DSLMethodOptsWithErr + actionORMethodDef: GrammarAction | DSLMethodOptsWithErr, ): void; /** @@ -271,7 +271,7 @@ declare abstract class BaseParser { * or `undefined` if not. */ protected OPTION( - actionORMethodDef: GrammarAction | DSLMethodOpts + actionORMethodDef: GrammarAction | DSLMethodOpts, ): OUT | undefined; /** @@ -279,7 +279,7 @@ declare abstract class BaseParser { * @hidden */ protected OPTION1( - actionORMethodDef: GrammarAction | DSLMethodOpts + actionORMethodDef: GrammarAction | DSLMethodOpts, ): OUT | undefined; /** @@ -287,7 +287,7 @@ declare abstract class BaseParser { * @hidden */ protected OPTION2( - actionORMethodDef: GrammarAction | DSLMethodOpts + actionORMethodDef: GrammarAction | DSLMethodOpts, ): OUT | undefined; /** @@ -295,7 +295,7 @@ declare abstract class BaseParser { * @hidden */ protected OPTION3( - actionORMethodDef: GrammarAction | DSLMethodOpts + actionORMethodDef: GrammarAction | DSLMethodOpts, ): OUT | undefined; /** @@ -303,7 +303,7 @@ declare abstract class BaseParser { * @hidden */ protected OPTION4( - actionORMethodDef: GrammarAction | DSLMethodOpts + actionORMethodDef: GrammarAction | DSLMethodOpts, ): OUT | undefined; /** @@ -311,7 +311,7 @@ declare abstract class BaseParser { * @hidden */ protected OPTION5( - actionORMethodDef: GrammarAction | DSLMethodOpts + actionORMethodDef: GrammarAction | DSLMethodOpts, ): OUT | undefined; /** @@ -319,7 +319,7 @@ declare abstract class BaseParser { * @hidden */ protected OPTION6( - actionORMethodDef: GrammarAction | DSLMethodOpts + actionORMethodDef: GrammarAction | DSLMethodOpts, ): OUT | undefined; /** @@ -327,7 +327,7 @@ declare abstract class BaseParser { * @hidden */ protected OPTION7( - actionORMethodDef: GrammarAction | DSLMethodOpts + actionORMethodDef: GrammarAction | DSLMethodOpts, ): OUT | undefined; /** @@ -335,7 +335,7 @@ declare abstract class BaseParser { * @hidden */ protected OPTION8( - actionORMethodDef: GrammarAction | DSLMethodOpts + actionORMethodDef: GrammarAction | DSLMethodOpts, ): OUT | undefined; /** @@ -343,7 +343,7 @@ declare abstract class BaseParser { * @hidden */ protected OPTION9( - actionORMethodDef: GrammarAction | DSLMethodOpts + actionORMethodDef: GrammarAction | DSLMethodOpts, ): OUT | undefined; /** @@ -507,7 +507,7 @@ declare abstract class BaseParser { * */ protected MANY( - actionORMethodDef: GrammarAction | DSLMethodOpts + actionORMethodDef: GrammarAction | DSLMethodOpts, ): void; /** @@ -515,7 +515,7 @@ declare abstract class BaseParser { * @hidden */ protected MANY1( - actionORMethodDef: GrammarAction | DSLMethodOpts + actionORMethodDef: GrammarAction | DSLMethodOpts, ): void; /** @@ -523,7 +523,7 @@ declare abstract class BaseParser { * @hidden */ protected MANY2( - actionORMethodDef: GrammarAction | DSLMethodOpts + actionORMethodDef: GrammarAction | DSLMethodOpts, ): void; /** @@ -531,7 +531,7 @@ declare abstract class BaseParser { * @hidden */ protected MANY3( - actionORMethodDef: GrammarAction | DSLMethodOpts + actionORMethodDef: GrammarAction | DSLMethodOpts, ): void; /** @@ -539,7 +539,7 @@ declare abstract class BaseParser { * @hidden */ protected MANY4( - actionORMethodDef: GrammarAction | DSLMethodOpts + actionORMethodDef: GrammarAction | DSLMethodOpts, ): void; /** @@ -547,7 +547,7 @@ declare abstract class BaseParser { * @hidden */ protected MANY5( - actionORMethodDef: GrammarAction | DSLMethodOpts + actionORMethodDef: GrammarAction | DSLMethodOpts, ): void; /** @@ -555,7 +555,7 @@ declare abstract class BaseParser { * @hidden */ protected MANY6( - actionORMethodDef: GrammarAction | DSLMethodOpts + actionORMethodDef: GrammarAction | DSLMethodOpts, ): void; /** @@ -563,7 +563,7 @@ declare abstract class BaseParser { * @hidden */ protected MANY7( - actionORMethodDef: GrammarAction | DSLMethodOpts + actionORMethodDef: GrammarAction | DSLMethodOpts, ): void; /** @@ -571,7 +571,7 @@ declare abstract class BaseParser { * @hidden */ protected MANY8( - actionORMethodDef: GrammarAction | DSLMethodOpts + actionORMethodDef: GrammarAction | DSLMethodOpts, ): void; /** @@ -579,7 +579,7 @@ declare abstract class BaseParser { * @hidden */ protected MANY9( - actionORMethodDef: GrammarAction | DSLMethodOpts + actionORMethodDef: GrammarAction | DSLMethodOpts, ): void; /** @@ -679,7 +679,7 @@ declare abstract class BaseParser { * */ protected AT_LEAST_ONE( - actionORMethodDef: GrammarAction | DSLMethodOptsWithErr + actionORMethodDef: GrammarAction | DSLMethodOptsWithErr, ): void; /** @@ -687,7 +687,7 @@ declare abstract class BaseParser { * @hidden */ protected AT_LEAST_ONE1( - actionORMethodDef: GrammarAction | DSLMethodOptsWithErr + actionORMethodDef: GrammarAction | DSLMethodOptsWithErr, ): void; /** @@ -695,7 +695,7 @@ declare abstract class BaseParser { * @hidden */ protected AT_LEAST_ONE2( - actionORMethodDef: GrammarAction | DSLMethodOptsWithErr + actionORMethodDef: GrammarAction | DSLMethodOptsWithErr, ): void; /** @@ -703,7 +703,7 @@ declare abstract class BaseParser { * @hidden */ protected AT_LEAST_ONE3( - actionORMethodDef: GrammarAction | DSLMethodOptsWithErr + actionORMethodDef: GrammarAction | DSLMethodOptsWithErr, ): void; /** @@ -711,7 +711,7 @@ declare abstract class BaseParser { * @hidden */ protected AT_LEAST_ONE4( - actionORMethodDef: GrammarAction | DSLMethodOptsWithErr + actionORMethodDef: GrammarAction | DSLMethodOptsWithErr, ): void; /** @@ -719,7 +719,7 @@ declare abstract class BaseParser { * @hidden */ protected AT_LEAST_ONE5( - actionORMethodDef: GrammarAction | DSLMethodOptsWithErr + actionORMethodDef: GrammarAction | DSLMethodOptsWithErr, ): void; /** @@ -727,7 +727,7 @@ declare abstract class BaseParser { * @hidden */ protected AT_LEAST_ONE6( - actionORMethodDef: GrammarAction | DSLMethodOptsWithErr + actionORMethodDef: GrammarAction | DSLMethodOptsWithErr, ): void; /** @@ -735,7 +735,7 @@ declare abstract class BaseParser { * @hidden */ protected AT_LEAST_ONE7( - actionORMethodDef: GrammarAction | DSLMethodOptsWithErr + actionORMethodDef: GrammarAction | DSLMethodOptsWithErr, ): void; /** @@ -743,7 +743,7 @@ declare abstract class BaseParser { * @hidden */ protected AT_LEAST_ONE8( - actionORMethodDef: GrammarAction | DSLMethodOptsWithErr + actionORMethodDef: GrammarAction | DSLMethodOptsWithErr, ): void; /** @@ -751,7 +751,7 @@ declare abstract class BaseParser { * @hidden */ protected AT_LEAST_ONE9( - actionORMethodDef: GrammarAction | DSLMethodOptsWithErr + actionORMethodDef: GrammarAction | DSLMethodOptsWithErr, ): void; /** @@ -849,7 +849,7 @@ declare abstract class BaseParser { * @deprecated - will be removed in the future */ protected getNextPossibleTokenTypes( - grammarPath: ITokenGrammarPath + grammarPath: ITokenGrammarPath, ): TokenType[]; input: IToken[]; @@ -893,7 +893,7 @@ export declare class CstParser extends BaseParser { protected RULE void>( name: string, implementation: F, - config?: IRuleConfig + config?: IRuleConfig, ): ParserMethod, CstNode>; /** @@ -903,7 +903,7 @@ export declare class CstParser extends BaseParser { protected OVERRIDE_RULE void>( name: string, implementation: F, - config?: IRuleConfig + config?: IRuleConfig, ): ParserMethod, CstNode>; /** @@ -917,7 +917,7 @@ export declare class CstParser extends BaseParser { protected subrule( idx: number, ruleToCall: ParserMethod, - options?: SubruleMethodOpts + options?: SubruleMethodOpts, ): CstNode; /** @@ -939,7 +939,7 @@ export declare class CstParser extends BaseParser { */ protected SUBRULE( ruleToCall: ParserMethod, - options?: SubruleMethodOpts + options?: SubruleMethodOpts, ): CstNode; /** @@ -948,7 +948,7 @@ export declare class CstParser extends BaseParser { */ protected SUBRULE1( ruleToCall: ParserMethod, - options?: SubruleMethodOpts + options?: SubruleMethodOpts, ): CstNode; /** @@ -957,7 +957,7 @@ export declare class CstParser extends BaseParser { */ protected SUBRULE2( ruleToCall: ParserMethod, - options?: SubruleMethodOpts + options?: SubruleMethodOpts, ): CstNode; /** @@ -966,7 +966,7 @@ export declare class CstParser extends BaseParser { */ protected SUBRULE3( ruleToCall: ParserMethod, - options?: SubruleMethodOpts + options?: SubruleMethodOpts, ): CstNode; /** @@ -975,7 +975,7 @@ export declare class CstParser extends BaseParser { */ protected SUBRULE4( ruleToCall: ParserMethod, - options?: SubruleMethodOpts + options?: SubruleMethodOpts, ): CstNode; /** @@ -984,7 +984,7 @@ export declare class CstParser extends BaseParser { */ protected SUBRULE5( ruleToCall: ParserMethod, - options?: SubruleMethodOpts + options?: SubruleMethodOpts, ): CstNode; /** @@ -993,7 +993,7 @@ export declare class CstParser extends BaseParser { */ protected SUBRULE6( ruleToCall: ParserMethod, - options?: SubruleMethodOpts + options?: SubruleMethodOpts, ): CstNode; /** @@ -1002,7 +1002,7 @@ export declare class CstParser extends BaseParser { */ protected SUBRULE7( ruleToCall: ParserMethod, - options?: SubruleMethodOpts + options?: SubruleMethodOpts, ): CstNode; /** @@ -1011,7 +1011,7 @@ export declare class CstParser extends BaseParser { */ protected SUBRULE8( ruleToCall: ParserMethod, - options?: SubruleMethodOpts + options?: SubruleMethodOpts, ): CstNode; /** @@ -1020,7 +1020,7 @@ export declare class CstParser extends BaseParser { */ protected SUBRULE9( ruleToCall: ParserMethod, - options?: SubruleMethodOpts + options?: SubruleMethodOpts, ): CstNode; } @@ -1040,7 +1040,7 @@ export declare class EmbeddedActionsParser extends BaseParser { protected RULE any>( name: string, implementation: F, - config?: IRuleConfig> + config?: IRuleConfig>, ): ParserMethod, ReturnType>; /** @@ -1050,7 +1050,7 @@ export declare class EmbeddedActionsParser extends BaseParser { protected OVERRIDE_RULE any>( name: string, implementation: F, - config?: IRuleConfig> + config?: IRuleConfig>, ): ParserMethod, ReturnType>; /** @@ -1064,7 +1064,7 @@ export declare class EmbeddedActionsParser extends BaseParser { protected subrule( idx: number, ruleToCall: ParserMethod, - options?: SubruleMethodOpts + options?: SubruleMethodOpts, ): R; /** @@ -1086,7 +1086,7 @@ export declare class EmbeddedActionsParser extends BaseParser { */ protected SUBRULE( ruleToCall: ParserMethod, - options?: SubruleMethodOpts + options?: SubruleMethodOpts, ): R; /** @@ -1095,7 +1095,7 @@ export declare class EmbeddedActionsParser extends BaseParser { */ protected SUBRULE1( ruleToCall: ParserMethod, - options?: SubruleMethodOpts + options?: SubruleMethodOpts, ): R; /** @@ -1104,7 +1104,7 @@ export declare class EmbeddedActionsParser extends BaseParser { */ protected SUBRULE2( ruleToCall: ParserMethod, - options?: SubruleMethodOpts + options?: SubruleMethodOpts, ): R; /** @@ -1113,7 +1113,7 @@ export declare class EmbeddedActionsParser extends BaseParser { */ protected SUBRULE3( ruleToCall: ParserMethod, - options?: SubruleMethodOpts + options?: SubruleMethodOpts, ): R; /** @@ -1122,7 +1122,7 @@ export declare class EmbeddedActionsParser extends BaseParser { */ protected SUBRULE4( ruleToCall: ParserMethod, - options?: SubruleMethodOpts + options?: SubruleMethodOpts, ): R; /** @@ -1131,7 +1131,7 @@ export declare class EmbeddedActionsParser extends BaseParser { */ protected SUBRULE5( ruleToCall: ParserMethod, - options?: SubruleMethodOpts + options?: SubruleMethodOpts, ): R; /** @@ -1140,7 +1140,7 @@ export declare class EmbeddedActionsParser extends BaseParser { */ protected SUBRULE6( ruleToCall: ParserMethod, - options?: SubruleMethodOpts + options?: SubruleMethodOpts, ): R; /** @@ -1149,7 +1149,7 @@ export declare class EmbeddedActionsParser extends BaseParser { */ protected SUBRULE7( ruleToCall: ParserMethod, - options?: SubruleMethodOpts + options?: SubruleMethodOpts, ): R; /** @@ -1158,7 +1158,7 @@ export declare class EmbeddedActionsParser extends BaseParser { */ protected SUBRULE8( ruleToCall: ParserMethod, - options?: SubruleMethodOpts + options?: SubruleMethodOpts, ): R; /** @@ -1167,7 +1167,7 @@ export declare class EmbeddedActionsParser extends BaseParser { */ protected SUBRULE9( ruleToCall: ParserMethod, - options?: SubruleMethodOpts + options?: SubruleMethodOpts, ): R; } @@ -1226,7 +1226,7 @@ export declare class Lexer { */ constructor( lexerDefinition: TokenType[] | IMultiModeLexerDefinition, - config?: ILexerConfig + config?: ILexerConfig, ); /** @@ -1400,7 +1400,7 @@ export interface ILexerErrorMessageProvider { startOffset: number, length: number, line?: number, - column?: number + column?: number, ): string; /** @@ -1571,7 +1571,7 @@ export declare function createTokenInstance( startLine: number, endLine: number, startColumn: number, - endColumn: number + endColumn: number, ): IToken; /** @@ -1595,7 +1595,7 @@ export declare type CustomPatternMatcherFunc = ( */ groups: { [groupName: string]: IToken[]; - } + }, ) => CustomPatternMatcherReturn | RegExpExecArray | null; // RegExpExecArray included for legacy reasons export type CustomPatternMatcherReturn = [string] & { payload?: any }; @@ -2140,12 +2140,12 @@ export interface ILLkLookaheadValidator { validateAmbiguousAlternationAlternatives( rules: Rule[], - maxLookahead: number + maxLookahead: number, ): ILookaheadValidationError[]; validateSomeNonEmptyLookaheadPath( rules: Rule[], - maxLookahead: number + maxLookahead: number, ): ILookaheadValidationError[]; } @@ -2153,8 +2153,9 @@ export interface ILLkLookaheadValidator { * @experimental */ export interface ILLkLookaheadStrategyConstructor { - new (options?: { maxLookahead?: number }): ILookaheadStrategy & - ILLkLookaheadValidator; + new (options?: { + maxLookahead?: number; + }): ILookaheadStrategy & ILLkLookaheadValidator; } /** @@ -2528,7 +2529,7 @@ export abstract class GAstVisitor { abstract visitRepetitionMandatory(node: RepetitionMandatory): any; abstract visitRepetitionMandatoryWithSeparator( - node: RepetitionMandatoryWithSeparator + node: RepetitionMandatoryWithSeparator, ): any; abstract visitRepetitionWithSeparator(node: RepetitionWithSeparator): any; @@ -2752,7 +2753,7 @@ export interface ICreateSyntaxDiagramsConfig { */ export declare function createSyntaxDiagramsCode( grammar: ISerializedGast[], - config?: ICreateSyntaxDiagramsConfig + config?: ICreateSyntaxDiagramsConfig, ): string; /** @@ -2770,7 +2771,7 @@ export declare function createSyntaxDiagramsCode( */ export declare function generateCstDts( productions: Record, - options?: GenerateDtsOptions + options?: GenerateDtsOptions, ): string; export declare type GenerateDtsOptions = { diff --git a/packages/types/scripts/update-api-docs.js b/packages/types/scripts/update-api-docs.js index df759eb60..8994a2f93 100644 --- a/packages/types/scripts/update-api-docs.js +++ b/packages/types/scripts/update-api-docs.js @@ -13,7 +13,7 @@ const version = pkg.version; const noDotsVersion = version.replace(/\./g, "_"); const newVersionApiDocsDir = join( __dirname, - "../gh-pages/documentation/" + noDotsVersion + "../gh-pages/documentation/" + noDotsVersion, ); try { @@ -30,14 +30,14 @@ try { // Update redirect to latest docs const docsIndexHtmlPath = join( __dirname, - "../gh-pages/documentation/index.html" + "../gh-pages/documentation/index.html", ); const docsIndexHtmlString = fs .readFileSync(docsIndexHtmlPath, "utf8") .toString(); const bumpedDocsIndexHtmlString = docsIndexHtmlString.replace( /\d+_\d+_\d+/, - noDotsVersion + noDotsVersion, ); fs.writeFileSync(docsIndexHtmlPath, bumpedDocsIndexHtmlString); diff --git a/packages/website/docs/changes/BREAKING_CHANGES.md b/packages/website/docs/changes/BREAKING_CHANGES.md index b8b9fcae1..a36a13285 100644 --- a/packages/website/docs/changes/BREAKING_CHANGES.md +++ b/packages/website/docs/changes/BREAKING_CHANGES.md @@ -192,7 +192,7 @@ [ /* token vector */ ], - {} + {}, ); // New API diff --git a/packages/website/docs/changes/CHANGELOG.md b/packages/website/docs/changes/CHANGELOG.md index ea1214093..7a55e1546 100644 --- a/packages/website/docs/changes/CHANGELOG.md +++ b/packages/website/docs/changes/CHANGELOG.md @@ -1217,7 +1217,7 @@ so not many changes will be needed (if at all) for most users. function () { /* ... */ }, - { resyncEnabled: false } + { resyncEnabled: false }, ); ``` diff --git a/packages/website/docs/features/regexp.md b/packages/website/docs/features/regexp.md index 7d4d414c9..ce7aeef34 100644 --- a/packages/website/docs/features/regexp.md +++ b/packages/website/docs/features/regexp.md @@ -35,7 +35,7 @@ $.RULE("statement", () => { name: "FloatValue", pattern: MAKE_PATTERN( // This regExp would be very hard to read without "named fragments" - "{{IntegerPart}}{{FractionalPart}}({{ExponentPart}})?|{{IntegerPart}}{{ExponentPart}}" + "{{IntegerPart}}{{FractionalPart}}({{ExponentPart}})?|{{IntegerPart}}{{ExponentPart}}", ), }); }); diff --git a/packages/website/docs/guide/concrete_syntax_tree.md b/packages/website/docs/guide/concrete_syntax_tree.md index ffdcd7895..d58d2f0a8 100644 --- a/packages/website/docs/guide/concrete_syntax_tree.md +++ b/packages/website/docs/guide/concrete_syntax_tree.md @@ -361,7 +361,7 @@ export function toAst(cst) { } default: { throw new Error( - `CST case handler not implemented for CST node <${cst.name}>` + `CST case handler not implemented for CST node <${cst.name}>`, ); } } diff --git a/packages/website/docs/guide/performance.md b/packages/website/docs/guide/performance.md index dd4f5d0d5..dd5fa2dfc 100644 --- a/packages/website/docs/guide/performance.md +++ b/packages/website/docs/guide/performance.md @@ -58,7 +58,7 @@ const myLexer = new Lexer( [ /* tokens */ ], - { ensureOptimizations: true } + { ensureOptimizations: true }, ); ``` @@ -100,7 +100,7 @@ $.RULE("value", function () { { ALT: () => $.CONSUME(True) }, { ALT: () => $.CONSUME(False) }, { ALT: () => $.CONSUME(Null) }, - ]) + ]), ); }); ``` @@ -131,7 +131,7 @@ It is important to note that: result = $.CONSUME(StringLiteral); }, }, - ]) + ]), ); }); @@ -147,7 +147,7 @@ It is important to note that: return $.CONSUME(StringLiteral); }, }, - ]) + ]), ); }); ``` diff --git a/packages/website/docs/tutorial/step3a_adding_actions_visitor.md b/packages/website/docs/tutorial/step3a_adding_actions_visitor.md index 6ea3ce507..5e0e36940 100644 --- a/packages/website/docs/tutorial/step3a_adding_actions_visitor.md +++ b/packages/website/docs/tutorial/step3a_adding_actions_visitor.md @@ -262,7 +262,7 @@ function toAst(inputText) { if (parserInstance.errors.length > 0) { throw Error( "Sad sad panda, parsing errors detected!\n" + - parserInstance.errors[0].message + parserInstance.errors[0].message, ); } diff --git a/packages/website/scripts/version-config.js b/packages/website/scripts/version-config.js index 247ccffa0..36f42b7c1 100644 --- a/packages/website/scripts/version-config.js +++ b/packages/website/scripts/version-config.js @@ -9,7 +9,7 @@ const __dirname = dirname(fileURLToPath(import.meta.url)); const packagePath = path.join(__dirname, "../package.json"); export const changeLogPath = path.join( __dirname, - "../docs/changes/CHANGELOG.md" + "../docs/changes/CHANGELOG.md", ); const docsDirPath = path.join(__dirname, "../docs"); @@ -30,11 +30,11 @@ export const markdownDocsFiles = _.reduce( if (fs.lstatSync(currPath).isDirectory()) { const nestedFiles = fs.readdirSync(currPath); const nestedPaths = _.map(nestedFiles, (currFile) => - path.join(currPath, currFile) + path.join(currPath, currFile), ); const newMarkdowns = _.filter( nestedPaths, - (currPath) => _.endsWith(currPath, ".md") && notChangesDocs(currPath) + (currPath) => _.endsWith(currPath, ".md") && notChangesDocs(currPath), ); result = result.concat(newMarkdowns); @@ -48,7 +48,7 @@ export const markdownDocsFiles = _.reduce( return result; }, - [] + [], ); const pkgJson = jf.readFileSync(packagePath); diff --git a/packages/website/scripts/version-update.js b/packages/website/scripts/version-update.js index 508e8c0f6..2df52b514 100644 --- a/packages/website/scripts/version-update.js +++ b/packages/website/scripts/version-update.js @@ -22,7 +22,7 @@ const nowDate = new Date(); const nowDateString = nowDate.toLocaleDateString("en-US").replace(/\//g, "-"); const changeLogDate = changeLogString.replace( dateTemplateRegExp, - "## " + newVersion + " " + "(" + nowDateString + ")" + "## " + newVersion + " " + "(" + nowDateString + ")", ); fs.writeFileSync(changeLogPath, changeLogDate); @@ -35,7 +35,7 @@ _.forEach(markdownDocsFiles, function (currDocPath) { const currItemContents = fs.readFileSync(currDocPath, "utf8").toString(); const bumpedItemContents = currItemContents.replace( /\d+_\d+_\d+/g, - newVersion.replace(/\./g, "_") + newVersion.replace(/\./g, "_"), ); fs.writeFileSync(currDocPath, bumpedItemContents); }); diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 040c0df10..9eabc7930 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -51,8 +51,8 @@ importers: specifier: 4.1.5 version: 4.1.5 prettier: - specifier: 2.8.8 - version: 2.8.8 + specifier: 3.0.0 + version: 3.0.0 shx: specifier: 0.3.4 version: 0.3.4 @@ -6423,9 +6423,9 @@ packages: resolution: {integrity: sha512-w0mCL5vICUAZrh1DuHEdOWBjxdO62lvcO++jbzr8UhhYcTbFkpegLH9XX+7MadjTl/y0feoqwQ/zAnzkc/EGog==} dev: true - /prettier@2.8.8: - resolution: {integrity: sha512-tdN8qQGvNjw4CHbY+XXk0JgCXn9QiF21a55rBe5LJAU+kDyC4WQn4+awm2Xfk2lQMk5fKup9XgzTZtGkjBdP9Q==} - engines: {node: '>=10.13.0'} + /prettier@3.0.0: + resolution: {integrity: sha512-zBf5eHpwHOGPC47h0zrPyNn+eAEIdEzfywMoYn2XPi0P44Zp0tSq64rq0xAREh4auw2cJZHo9QUob+NqCQky4g==} + engines: {node: '>=14'} hasBin: true dev: true