From 37c63bd4ed1a841901c746eff266d67b2188c5fe Mon Sep 17 00:00:00 2001 From: Adrian Leonhard Date: Sat, 23 Oct 2021 12:22:19 +0200 Subject: [PATCH 1/3] refactor(utils): set tsconfig strict=true for utils package and fix errors This also created some errors in other packages due to variables now being typed correctly. These errors were also fixed. This includes a non-type fix in errors_public.ts. Also removed packArray function entirely as it was broken. BREAKING CHANGE: packArray function was removed from utils --- .../chevrotain/src/parse/errors_public.ts | 2 +- .../chevrotain/src/parse/grammar/checks.ts | 10 +- .../chevrotain/src/parse/grammar/gast/gast.ts | 2 +- .../src/parse/grammar/gast/gast_public.ts | 2 + .../src/parse/parser/traits/gast_recorder.ts | 3 +- .../src/parse/parser/traits/looksahead.ts | 2 +- packages/chevrotain/src/scan/lexer.ts | 34 ++-- packages/chevrotain/src/scan/lexer_public.ts | 27 ++- packages/chevrotain/src/scan/reg_exp.ts | 2 +- .../test/full_flow/ecma_quirks/ecma_quirks.ts | 5 +- .../switch_case/switchcase_recovery_parser.ts | 7 +- .../test/parse/grammar/interperter_spec.ts | 3 +- packages/chevrotain/test/scan/lexer_spec.ts | 6 +- packages/types/api.d.ts | 23 +-- packages/utils/src/api.ts | 171 ++++++++---------- packages/utils/test/api_spec.ts | 21 +-- packages/utils/tsconfig.json | 3 +- 17 files changed, 146 insertions(+), 177 deletions(-) diff --git a/packages/chevrotain/src/parse/errors_public.ts b/packages/chevrotain/src/parse/errors_public.ts index 3c32ce670..74acf25a8 100644 --- a/packages/chevrotain/src/parse/errors_public.ts +++ b/packages/chevrotain/src/parse/errors_public.ts @@ -52,7 +52,7 @@ export const defaultParserErrorProvider: IParserErrorMessageProvider = { const allLookAheadPaths = reduce( expectedPathsPerAlt, (result, currAltPaths) => result.concat(currAltPaths), - [] + [] as TokenType[][] ) const nextValidTokenSequences = map( allLookAheadPaths, diff --git a/packages/chevrotain/src/parse/grammar/checks.ts b/packages/chevrotain/src/parse/grammar/checks.ts index 2440b4a86..58b8b8ccf 100644 --- a/packages/chevrotain/src/parse/grammar/checks.ts +++ b/packages/chevrotain/src/parse/grammar/checks.ts @@ -465,8 +465,8 @@ export function validateAmbiguousAlternationAlternatives( return errors } -export class RepetionCollector extends GAstVisitor { - public allProductions: IProduction[] = [] +export class RepetitionCollector extends GAstVisitor { + public allProductions: IProductionWithOccurrence[] = [] public visitRepetitionWithSeparator(manySep: RepetitionWithSeparator): void { this.allProductions.push(manySep) @@ -524,7 +524,7 @@ export function validateSomeNonEmptyLookaheadPath( ): IParserDefinitionError[] { const errors = [] forEach(topLevelRules, (currTopRule) => { - const collectorVisitor = new RepetionCollector() + const collectorVisitor = new RepetitionCollector() currTopRule.accept(collectorVisitor) const allRuleProductions = collectorVisitor.allProductions forEach(allRuleProductions, (currProd) => { @@ -601,7 +601,7 @@ function checkAlternativesAmbiguities( }) return result }, - [] + [] as { alts: number[]; path: TokenType[] }[] ) const currErrors = utils.map(identicalAmbiguities, (currAmbDescriptor) => { @@ -622,7 +622,7 @@ function checkAlternativesAmbiguities( type: ParserDefinitionErrorType.AMBIGUOUS_ALTS, ruleName: rule.name, occurrence: alternation.idx, - alternatives: [currAmbDescriptor.alts] + alternatives: currAmbDescriptor.alts } }) diff --git a/packages/chevrotain/src/parse/grammar/gast/gast.ts b/packages/chevrotain/src/parse/grammar/gast/gast.ts index b8f5abe4c..25058f701 100644 --- a/packages/chevrotain/src/parse/grammar/gast/gast.ts +++ b/packages/chevrotain/src/parse/grammar/gast/gast.ts @@ -166,7 +166,7 @@ export function collectMethods(rule: Rule): { repetition: Repetition[] repetitionWithSeparator: RepetitionWithSeparator[] repetitionMandatory: RepetitionMandatory[] - repetitionMandatoryWithSeparator: RepetitionMandatoryWithSeparator + repetitionMandatoryWithSeparator: RepetitionMandatoryWithSeparator[] } { collectorVisitor.reset() rule.accept(collectorVisitor) diff --git a/packages/chevrotain/src/parse/grammar/gast/gast_public.ts b/packages/chevrotain/src/parse/grammar/gast/gast_public.ts index 5b068ac2b..7f25c6fe4 100644 --- a/packages/chevrotain/src/parse/grammar/gast/gast_public.ts +++ b/packages/chevrotain/src/parse/grammar/gast/gast_public.ts @@ -152,6 +152,7 @@ export class RepetitionMandatoryWithSeparator { public separator: TokenType public idx: number = 1 + public maxLookahead?: number constructor(options: { definition: IProduction[] @@ -193,6 +194,7 @@ export class RepetitionWithSeparator { public separator: TokenType public idx: number = 1 + public maxLookahead?: number constructor(options: { definition: IProduction[] diff --git a/packages/chevrotain/src/parse/parser/traits/gast_recorder.ts b/packages/chevrotain/src/parse/parser/traits/gast_recorder.ts index f9fef1f29..95f80993c 100644 --- a/packages/chevrotain/src/parse/parser/traits/gast_recorder.ts +++ b/packages/chevrotain/src/parse/parser/traits/gast_recorder.ts @@ -393,7 +393,8 @@ function recordOrProd(mainProdArg: any, occurrence: number): any { const prevProd: any = peek(this.recordingProdStack) // Only an array of alternatives const hasOptions = isArray(mainProdArg) === false - const alts = hasOptions === false ? mainProdArg : mainProdArg.DEF + const alts: IOrAlt[] = + hasOptions === false ? mainProdArg : mainProdArg.DEF const newOrProd = new Alternation({ definition: [], diff --git a/packages/chevrotain/src/parse/parser/traits/looksahead.ts b/packages/chevrotain/src/parse/parser/traits/looksahead.ts index fb17331bb..be88de027 100644 --- a/packages/chevrotain/src/parse/parser/traits/looksahead.ts +++ b/packages/chevrotain/src/parse/parser/traits/looksahead.ts @@ -154,7 +154,7 @@ export class LooksAhead { prodOccurrence: number, prodKey: number, prodType: PROD_TYPE, - prodMaxLookahead: number, + prodMaxLookahead: number | undefined, dslMethodName: string ): void { this.TRACE_INIT( diff --git a/packages/chevrotain/src/scan/lexer.ts b/packages/chevrotain/src/scan/lexer.ts index 1c0828bee..279550c47 100644 --- a/packages/chevrotain/src/scan/lexer.ts +++ b/packages/chevrotain/src/scan/lexer.ts @@ -21,7 +21,6 @@ import { keys, map, mapValues, - packArray, PRINT_ERROR, reduce, reject @@ -53,6 +52,7 @@ export interface IPatternConfig { group: any push: string pop: boolean + tokenType: TokenType tokenTypeIdx: number } @@ -102,7 +102,7 @@ export function analyzeTokenTypes( initCharCodeToOptimizedIndexMap() }) - let onlyRelevantTypes + let onlyRelevantTypes: TokenType[] tracer("Reject Lexer.NA", () => { onlyRelevantTypes = reject(tokenTypes, (currType) => { return currType[PATTERN] === Lexer.NA @@ -250,7 +250,10 @@ export function analyzeTokenTypes( if ( checkLineBreaksIssues(tokType, lineTerminatorCharCodes) === false ) { - return canMatchCharCode(lineTerminatorCharCodes, tokType.PATTERN) + return canMatchCharCode( + lineTerminatorCharCodes, + tokType.PATTERN as RegExp | string + ) } } }) @@ -374,9 +377,6 @@ export function analyzeTokenTypes( ) }) } - tracer("ArrayPacking", () => { - charCodeToPatternIdxToConfig = packArray(charCodeToPatternIdxToConfig) - }) return { emptyGroups: emptyGroups, @@ -490,7 +490,7 @@ export function findInvalidPatterns( return { errors, valid } } -const end_of_input = /[^\\][\$]/ +const end_of_input = /[^\\][$]/ export function findEndOfInputAnchor( tokenTypes: TokenType[] @@ -504,10 +504,10 @@ export function findEndOfInputAnchor( } const invalidRegex = filter(tokenTypes, (currType) => { - const pattern = currType[PATTERN] + const pattern = currType.PATTERN try { - const regexpAst = getRegExpAst(pattern) + const regexpAst = getRegExpAst(pattern as RegExp) const endAnchorVisitor = new EndAnchorFinder() endAnchorVisitor.visit(regexpAst) @@ -515,7 +515,7 @@ export function findEndOfInputAnchor( } catch (e) { // old behavior in case of runtime exceptions with regexp-to-ast. /* istanbul ignore next - cannot ensure an error in regexp-to-ast*/ - return end_of_input.test(pattern.source) + return end_of_input.test((pattern as RegExp).source) } }) @@ -540,7 +540,7 @@ export function findEmptyMatchRegExps( tokenTypes: TokenType[] ): ILexerDefinitionError[] { const matchesEmptyString = filter(tokenTypes, (currType) => { - const pattern = currType[PATTERN] + const pattern = currType.PATTERN as RegExp return pattern.test("") }) @@ -572,7 +572,7 @@ export function findStartOfInputAnchor( } const invalidRegex = filter(tokenTypes, (currType) => { - const pattern = currType[PATTERN] + const pattern = currType.PATTERN as RegExp try { const regexpAst = getRegExpAst(pattern) const startAnchorVisitor = new StartAnchorFinder() @@ -919,7 +919,9 @@ export function performWarningRuntimeChecks( hasAnyLineBreak = true } } else { - if (canMatchCharCode(terminatorCharCodes, tokType.PATTERN)) { + if ( + canMatchCharCode(terminatorCharCodes, tokType.PATTERN as RegExp) + ) { hasAnyLineBreak = true } } @@ -1085,15 +1087,13 @@ export function buildLineBreakIssueMessage( } function getCharCodes(charsOrCodes: (number | string)[]): number[] { - const charCodes = map(charsOrCodes, (numOrString) => { - if (isString(numOrString) && numOrString.length > 0) { + return map(charsOrCodes, (numOrString) => { + if (isString(numOrString)) { return numOrString.charCodeAt(0) } else { return numOrString } }) - - return charCodes } function addToMapOfArrays(map, key, value): void { diff --git a/packages/chevrotain/src/scan/lexer_public.ts b/packages/chevrotain/src/scan/lexer_public.ts index 768750f1c..056ce5f89 100644 --- a/packages/chevrotain/src/scan/lexer_public.ts +++ b/packages/chevrotain/src/scan/lexer_public.ts @@ -21,7 +21,6 @@ import { keys, last, map, - merge, NOOP, PRINT_WARNING, reduce, @@ -103,19 +102,19 @@ export class Lexer { protected defaultMode: string protected emptyGroups: { [groupName: string]: IToken } = {} - private config: ILexerConfig = undefined + private config: ILexerConfig private trackStartLines: boolean = true private trackEndLines: boolean = true private hasCustom: boolean = false private canModeBeOptimized: any = {} - private traceInitPerf: boolean | number - private traceInitMaxIdent: number + private traceInitPerf!: boolean | number + private traceInitMaxIdent!: number private traceInitIndent: number constructor( protected lexerDefinition: TokenType[] | IMultiModeLexerDefinition, - config: ILexerConfig = DEFAULT_LEXER_CONFIG + config: Partial = DEFAULT_LEXER_CONFIG ) { if (typeof config === "boolean") { throw Error( @@ -125,7 +124,7 @@ export class Lexer { } // todo: defaults func? - this.config = merge(DEFAULT_LEXER_CONFIG, config) + this.config = Object.assign({}, DEFAULT_LEXER_CONFIG, config) const traceInitVal = this.config.traceInitPerf if (traceInitVal === true) { @@ -138,7 +137,7 @@ export class Lexer { this.traceInitIndent = -1 this.TRACE_INIT("Lexer Constructor", () => { - let actualDefinition: IMultiModeLexerDefinition + let actualDefinition!: IMultiModeLexerDefinition let hasOnlySingleMode = true this.TRACE_INIT("Lexer Config handling", () => { if ( @@ -172,11 +171,10 @@ export class Lexer { // Convert SingleModeLexerDefinition into a IMultiModeLexerDefinition. if (isArray(lexerDefinition)) { - actualDefinition = { modes: {} } - actualDefinition.modes[DEFAULT_MODE] = cloneArr( - lexerDefinition - ) - actualDefinition[DEFAULT_MODE] = DEFAULT_MODE + actualDefinition = { + modes: { defaultMode: cloneArr(lexerDefinition) }, + defaultMode: DEFAULT_MODE + } } else { // no conversion needed, input should already be a IMultiModeLexerDefinition hasOnlySingleMode = false @@ -186,7 +184,7 @@ export class Lexer { } }) - if (this.config.skipValidations === false) { + if (!this.config.skipValidations) { this.TRACE_INIT("performRuntimeChecks", () => { this.lexerDefinitionErrors = this.lexerDefinitionErrors.concat( performRuntimeChecks( @@ -262,7 +260,8 @@ export class Lexer { this.charCodeToPatternIdxToConfig[currModName] = currAnalyzeResult.charCodeToPatternIdxToConfig - this.emptyGroups = merge( + this.emptyGroups = Object.assign( + {}, this.emptyGroups, currAnalyzeResult.emptyGroups ) diff --git a/packages/chevrotain/src/scan/reg_exp.ts b/packages/chevrotain/src/scan/reg_exp.ts index 382f1aaf2..2261a5485 100644 --- a/packages/chevrotain/src/scan/reg_exp.ts +++ b/packages/chevrotain/src/scan/reg_exp.ts @@ -107,7 +107,7 @@ export function firstCharOptimizedIndices(ast, result, ignoreCase): number[] { addOptimizedIdxToResult(code, result, ignoreCase) } else { // range - const range = code + const range = code as any // cannot optimize when ignoreCase is if (ignoreCase === true) { for ( diff --git a/packages/chevrotain/test/full_flow/ecma_quirks/ecma_quirks.ts b/packages/chevrotain/test/full_flow/ecma_quirks/ecma_quirks.ts index 7032fb8a9..c1417eaff 100644 --- a/packages/chevrotain/test/full_flow/ecma_quirks/ecma_quirks.ts +++ b/packages/chevrotain/test/full_flow/ecma_quirks/ecma_quirks.ts @@ -58,7 +58,10 @@ const allTokens = [ // file even if the tests will avoid running it. if (typeof (new RegExp("(?:)")).sticky === "boolean") { forEach(allTokens, (currTokType) => { - currTokType.PATTERN = new RegExp(currTokType.PATTERN.source, "y") + currTokType.PATTERN = new RegExp( + (currTokType.PATTERN as RegExp).source, + "y" + ) }) } diff --git a/packages/chevrotain/test/full_flow/error_recovery/switch_case/switchcase_recovery_parser.ts b/packages/chevrotain/test/full_flow/error_recovery/switch_case/switchcase_recovery_parser.ts index c788cfde2..7d8c6f0f0 100644 --- a/packages/chevrotain/test/full_flow/error_recovery/switch_case/switchcase_recovery_parser.ts +++ b/packages/chevrotain/test/full_flow/error_recovery/switch_case/switchcase_recovery_parser.ts @@ -86,10 +86,13 @@ export class SwitchCaseRecoveryParser extends EmbeddedActionsParser { // what about a string with some random value? this could still lead to duplicate keys in the returned parse result private tokTypesThatCannotBeInsertedInRecovery = [IdentTok, StringTok, IntTok] - // DOCS: overriding this method allows us to customize the logic for which tokens may not be automaticaly inserted + // DOCS: overriding this method allows us to customize the logic for which tokens may not be automatically inserted // during error recovery. public canTokenTypeBeInsertedInRecovery(tokType: TokenType) { - return !contains(this.tokTypesThatCannotBeInsertedInRecovery, tokType) + return !contains( + this.tokTypesThatCannotBeInsertedInRecovery, + tokType as unknown + ) } public parseSwitchStmt(): RetType { diff --git a/packages/chevrotain/test/parse/grammar/interperter_spec.ts b/packages/chevrotain/test/parse/grammar/interperter_spec.ts index 9d60c1620..246790dc6 100644 --- a/packages/chevrotain/test/parse/grammar/interperter_spec.ts +++ b/packages/chevrotain/test/parse/grammar/interperter_spec.ts @@ -24,6 +24,7 @@ import { NextTerminalAfterAtLeastOneWalker, NextTerminalAfterManySepWalker, NextTerminalAfterManyWalker, + PartialPathAndSuffixes, possiblePathsFrom } from "../../../src/parse/grammar/interpreter" import { createRegularToken, setEquality } from "../../utils/matchers" @@ -588,7 +589,7 @@ describe("The NextTerminalAfterAtLeastOneSepWalker", () => { }) describe("The chevrotain grammar interpreter capabilities", () => { - function extractPartialPaths(newResultFormat) { + function extractPartialPaths(newResultFormat: PartialPathAndSuffixes[]) { return map(newResultFormat, (currItem) => currItem.partialPath) } diff --git a/packages/chevrotain/test/scan/lexer_spec.ts b/packages/chevrotain/test/scan/lexer_spec.ts index 1a3b730be..835e8f7ce 100644 --- a/packages/chevrotain/test/scan/lexer_spec.ts +++ b/packages/chevrotain/test/scan/lexer_spec.ts @@ -660,7 +660,7 @@ function defineLexerSpecs( expect(allPatterns.length).to.equal(8) const allPatternsString = map(allPatterns, (pattern) => { - return isString(pattern) ? pattern : pattern.source + return isString(pattern) ? pattern : (pattern as RegExp).source }) setEquality(allPatternsString, [ "^(?:(\\t| ))", @@ -690,7 +690,7 @@ function defineLexerSpecs( } if (!skipValidationChecks && ORG_SUPPORT_STICKY) { - it("can transform/analyze an array of Token Typees into matched/ignored/patternToClass - sticky", () => { + it("can transform/analyze an array of Token Types into matched/ignored/patternToClass - sticky", () => { const tokenClasses = [ Keyword, If, @@ -713,7 +713,7 @@ function defineLexerSpecs( ) expect(allPatterns.length).to.equal(8) const allPatternsString = map(allPatterns, (pattern) => { - return isString(pattern) ? pattern : pattern.source + return isString(pattern) ? pattern : (pattern as RegExp).source }) setEquality(allPatternsString, [ "(\\t| )", diff --git a/packages/types/api.d.ts b/packages/types/api.d.ts index 7cdcba62e..589420f6d 100644 --- a/packages/types/api.d.ts +++ b/packages/types/api.d.ts @@ -1246,7 +1246,7 @@ export interface ILexerConfig { * This can be useful when wishing to indicate lexer errors in another manner * than simply throwing an error (for example in an online playground). */ - deferDefinitionErrorsHandling?: boolean + deferDefinitionErrorsHandling: boolean /** * "full" location information means all six combinations of /(end|start)(Line|Column|Offset)/ properties. @@ -1258,7 +1258,7 @@ export interface ILexerConfig { * in edge cases where every last ounce of performance is needed. */ // TODO: consider renaming this to LocationTracking to align with NodeLocationTracking option on the ParserConfig. - positionTracking?: "full" | "onlyStart" | "onlyOffset" + positionTracking: "full" | "onlyStart" | "onlyOffset" /** * A regExp defining custom line terminators. @@ -1266,7 +1266,7 @@ export interface ILexerConfig { * * Note that the regExp should use the global flag, for example: /\n/g * - * The default is: /\n|\r\n?/g + * The default is: /\n|\r\n/g * * But some grammars have a different definition, for example in ECMAScript: * https://www.ecma-international.org/ecma-262/8.0/index.html#sec-line-terminators @@ -1278,17 +1278,17 @@ export interface ILexerConfig { * as only a subset of the RegExp APIs is needed, {@link ILineTerminatorsTester} * for details. * - * keep in mind that for the default pattern: /\n|\r\n?/g an optimized implementation is already built-in. + * keep in mind that for the default pattern: /\n|\r\n/g an optimized implementation is already built-in. * This means the optimization is only relevant for lexers overriding the default pattern. */ - lineTerminatorsPattern?: RegExp | ILineTerminatorsTester + lineTerminatorsPattern: RegExp | ILineTerminatorsTester /** * Characters or CharCodes that represent line terminators for this lexer. * This always needs to be provided when using a custom {@link ILexerConfig.lineTerminatorsPattern}. * In the future this duplication may be removed or reduced. */ - lineTerminatorCharacters?: (number | string)[] + lineTerminatorCharacters: (number | string)[] /** * When true this flag will cause the Lexer to throw an Error @@ -1301,14 +1301,14 @@ export interface ILexerConfig { * The optimizations can boost the lexer's performance anywhere from 30% * to 100%+ depending on the number of TokenTypes used. */ - ensureOptimizations?: boolean + ensureOptimizations: boolean /** * Can be used to disable lexer optimizations * If there is a suspicion they are causing incorrect behavior. * Note that this would have negative performance implications. */ - safeMode?: boolean + safeMode: boolean /** * A custom error message provider. @@ -1317,7 +1317,7 @@ export interface ILexerConfig { * - Translating the error messages to a different languages. * - Changing the formatting. */ - errorMessageProvider?: ILexerErrorMessageProvider + errorMessageProvider: ILexerErrorMessageProvider /** * Enabling this flag will print performance tracing logs during lexer @@ -1333,7 +1333,7 @@ export interface ILexerConfig { * * Note that passing the boolean `true` is identical to passing the numerical value `infinity` */ - traceInitPerf?: boolean | number + traceInitPerf: boolean | number /** * This flag will avoid running the Lexer validations during Lexer initialization. @@ -1346,7 +1346,7 @@ export interface ILexerConfig { * So they should not be skipped during development flows. * - For example: via a conditional that checks an env variable. */ - skipValidations?: boolean + skipValidations: boolean } export interface ILexerErrorMessageProvider { @@ -2300,6 +2300,7 @@ export interface IProduction { export interface IProductionWithOccurrence extends IProduction { idx: number + maxLookahead?: number } /** diff --git a/packages/utils/src/api.ts b/packages/utils/src/api.ts index 1ee412beb..c14955df8 100644 --- a/packages/utils/src/api.ts +++ b/packages/utils/src/api.ts @@ -36,21 +36,26 @@ export function mapValues( const objKeys = keys(obj) for (let idx = 0; idx < objKeys.length; idx++) { const currKey = objKeys[idx] - result.push(callback.call(null, obj[currKey], currKey)) + result.push(callback(obj[currKey], currKey)) } return result } -export function map(arr: I[], callback: (I, idx?: number) => O): O[] { +export function map( + arr: I[], + callback: (value: I, idx?: number) => O +): O[] { const result: O[] = [] for (let idx = 0; idx < arr.length; idx++) { - result.push(callback.call(null, arr[idx], idx)) + result.push(callback(arr[idx], idx)) } return result } -export function flatten(arr: any[]): T[] { - let result = [] +export function flatten(arr: T[][]): T[] +export function flatten(arr: any[]): any[] +export function flatten(arr: any[]): any[] { + let result: any[] = [] for (let idx = 0; idx < arr.length; idx++) { const currItem = arr[idx] @@ -63,34 +68,42 @@ export function flatten(arr: any[]): T[] { return result } -export function first(arr: T[]): T { +export function first(arr: T[]): T | undefined { return isEmpty(arr) ? undefined : arr[0] } -export function last(arr: T[]): T { +export function last(arr: T[]): T | undefined { const len = arr && arr.length return len ? arr[len - 1] : undefined } +export function forEach( + collection: T[], + iteratorCallback: (item: T, index: number) => void +): void +export function forEach( + collection: Record, + iteratorCallback: (value: T, key: string) => void +): void export function forEach(collection: any, iteratorCallback: Function): void { /* istanbul ignore else */ if (Array.isArray(collection)) { for (let i = 0; i < collection.length; i++) { - iteratorCallback.call(null, collection[i], i) + iteratorCallback(collection[i], i) } } else if (isObject(collection)) { const colKeys = keys(collection) for (let i = 0; i < colKeys.length; i++) { const key = colKeys[i] const value = collection[key] - iteratorCallback.call(null, value, key) + iteratorCallback(value, key) } } else { throw Error("non exhaustive match") } } -export function isString(item: any): boolean { +export function isString(item: any): item is string { return typeof item === "string" } @@ -98,7 +111,7 @@ export function isUndefined(item: any): boolean { return item === undefined } -export function isFunction(item: any): boolean { +export function isFunction(item: any): item is (...args: any[]) => any { return item instanceof Function } @@ -110,30 +123,24 @@ export function dropRight(arr: T[], howMuch: number = 1): T[] { return arr.slice(0, arr.length - howMuch) } -export function filter(arr: T[], predicate: (T) => boolean): T[] { - const result = [] - if (Array.isArray(arr)) { - for (let i = 0; i < arr.length; i++) { - const item = arr[i] - if (predicate.call(null, item)) { - result.push(item) - } - } - } - return result +export function filter( + arr: T[], + predicate: (value: T, index: number, array: T[]) => unknown +): T[] { + return arr.filter(predicate) } -export function reject(arr: T[], predicate: (T) => boolean): T[] { +export function reject(arr: T[], predicate: (item: T) => boolean): T[] { return filter(arr, (item) => !predicate(item)) } -export function pick(obj: Object, predicate: (item) => boolean) { +export function pick(obj: Object, predicate: (item: any) => boolean) { const keys = Object.keys(obj) - const result = {} + const result: any = {} for (let i = 0; i < keys.length; i++) { const currKey = keys[i] - const currItem = obj[currKey] + const currItem = (obj as any)[currKey] if (predicate(currItem)) { result[currKey] = currItem } @@ -142,15 +149,15 @@ export function pick(obj: Object, predicate: (item) => boolean) { return result } -export function has(obj: any, prop: string): boolean { +export function has(obj: any, prop: string | number): boolean { if (isObject(obj)) { return obj.hasOwnProperty(prop) } return false } -export function contains(arr: T[], item): boolean { - return find(arr, (currItem) => currItem === item) !== undefined ? true : false +export function contains(arr: T[], item: T): boolean { + return arr.includes(item) } /** @@ -168,17 +175,13 @@ export function cloneArr(arr: T[]): T[] { * shallow clone */ export function cloneObj(obj: Object): any { - const clonedObj = {} - for (const key in obj) { - /* istanbul ignore else */ - if (Object.prototype.hasOwnProperty.call(obj, key)) { - clonedObj[key] = obj[key] - } - } - return clonedObj + return Object.assign({}, obj) } -export function find(arr: T[], predicate: (item: T) => boolean): T { +export function find( + arr: T[], + predicate: (item: T) => boolean +): T | undefined { for (let i = 0; i < arr.length; i++) { const item = arr[i] if (predicate.call(null, item)) { @@ -199,9 +202,19 @@ export function findAll(arr: T[], predicate: (item: T) => boolean): T[] { return found } +export function reduce( + arrOrObj: Array, + iterator: (result: A, item: T, idx: number) => A, + initial: A +): A +export function reduce( + arrOrObj: Record, + iterator: (result: A, item: T, idx: string) => A, + initial: A +): A export function reduce( arrOrObj: Array | Object, - iterator: (result: A, item, idx?) => A, + iterator: (result: A, item: T, idx: any) => A, initial: A ): A { const isArr = Array.isArray(arrOrObj) @@ -211,25 +224,20 @@ export function reduce( let accumulator = initial for (let i = 0; i < vals.length; i++) { - accumulator = iterator.call( - null, - accumulator, - vals[i], - isArr ? i : objKeys[i] - ) + accumulator = iterator(accumulator, vals[i], isArr ? i : objKeys[i]) } return accumulator } -export function compact(arr: T[]): T[] { - return reject(arr, (item) => item === null || item === undefined) +export function compact(arr: (T | null | undefined)[]): T[] { + return reject(arr, (item) => item === null || item === undefined) as T[] } export function uniq( arr: T[], identity: (item: T) => any = (item) => item ): T[] { - const identities = [] + const identities: T[] = [] return reduce( arr, (result, currItem) => { @@ -241,14 +249,12 @@ export function uniq( return result.concat(currItem) } }, - [] + [] as T[] ) } export function partial(func: Function, ...restArgs: any[]): Function { - const firstArg = [null] - const allArgs = firstArg.concat(restArgs) - return Function.bind.apply(func, allArgs) + return func.bind(null, ...restArgs) } export function isArray(obj: any): obj is any[] { @@ -265,7 +271,7 @@ export function isObject(obj: any): obj is Object { export function every( arr: T[], - predicate: (item: T, idx?) => boolean + predicate: (item: T, idx: number) => boolean ): boolean { for (let i = 0; i < arr.length; i++) { if (!predicate(arr[i], i)) { @@ -308,7 +314,7 @@ export function zipObject(keys: any[], values: any[]): Object { throw Error("can't zipObject with different number of keys and values!") } - const result = {} + const result: Record = {} for (let i = 0; i < keys.length; i++) { result[keys[i]] = values[i] } @@ -318,24 +324,14 @@ export function zipObject(keys: any[], values: any[]): Object { /** * mutates! (and returns) target */ -export function assign(target: Object, ...sources: Object[]): Object { - for (let i = 0; i < sources.length; i++) { - const curSource = sources[i] - const currSourceKeys = keys(curSource) - for (let j = 0; j < currSourceKeys.length; j++) { - const currKey = currSourceKeys[j] - target[currKey] = curSource[currKey] - } - } - return target -} +export const assign = Object.assign /** * mutates! (and returns) target */ export function assignNoOverwrite( - target: Object, - ...sources: Object[] + target: Record, + ...sources: Record[] ): Object { for (let i = 0; i < sources.length; i++) { const curSource = sources[i] @@ -374,48 +370,20 @@ export function groupBy( return result } -/** - * Merge obj2 into obj1. - * Will overwrite existing properties with the same name - */ -export function merge(obj1: Object, obj2: Object): any { - const result = cloneObj(obj1) - const keys2 = keys(obj2) - for (let i = 0; i < keys2.length; i++) { - const key = keys2[i] - const value = obj2[key] - result[key] = value - } - - return result -} - export function NOOP() {} -export function IDENTITY(item) { +export function IDENTITY(item: T): T { return item } -/** - * Will return a new packed array with same values. - */ -export function packArray(holeyArr: T[]): T[] { - const result = [] - for (let i = 0; i < holeyArr.length; i++) { - const orgValue = holeyArr[i] - result.push(orgValue !== undefined ? orgValue : undefined) - } - return result -} - -export function PRINT_ERROR(msg) { +export function PRINT_ERROR(msg: string) { /* istanbul ignore else - can't override global.console in node.js */ if (console && console.error) { console.error(`Error: ${msg}`) } } -export function PRINT_WARNING(msg) { +export function PRINT_WARNING(msg: string) { /* istanbul ignore else - can't override global.console in node.js*/ if (console && console.warn) { // TODO: modify docs accordingly @@ -441,14 +409,17 @@ export function timer(func: () => T): { time: number; value: T } { } // based on: https://github.com/petkaantonov/bluebird/blob/b97c0d2d487e8c5076e8bd897e0dcd4622d31846/src/util.js#L201-L216 -export function toFastProperties(toBecomeFast) { +export function toFastProperties(toBecomeFast: any) { function FakeConstructor() {} + // If our object is used as a constructor it would receive FakeConstructor.prototype = toBecomeFast - const fakeInstance = new FakeConstructor() + const fakeInstance = new (FakeConstructor as any)() + function fakeAccess() { return typeof fakeInstance.bar } + // help V8 understand this is a "real" prototype by actually using // the fake instance. fakeAccess() diff --git a/packages/utils/test/api_spec.ts b/packages/utils/test/api_spec.ts index 325b127fb..b36e1df87 100644 --- a/packages/utils/test/api_spec.ts +++ b/packages/utils/test/api_spec.ts @@ -33,7 +33,6 @@ describe("The Utils functions namespace", () => { it("exports a last utility", () => { expect(last([1, 2, 3])).to.equal(3) expect(last([])).to.equal(undefined) - expect(last(null)).to.equal(undefined) }) it("exports a forEach utility", () => { @@ -41,8 +40,6 @@ describe("The Utils functions namespace", () => { expect(item).to.equal(idx + 1) }) - expect(() => forEach(null, (item) => {})).to.throw("non exhaustive match") - forEach([], (item) => { throw Error("call back should not be invoked for empty array") }) @@ -95,11 +92,6 @@ describe("The Utils functions namespace", () => { return item % 2 === 1 }) ).to.deep.equal([1, 3]) - expect( - filter(null, (item) => { - return item % 2 === 1 - }) - ).to.deep.equal([]) }) it("exports a reject utility", () => { @@ -128,11 +120,6 @@ describe("The Utils functions namespace", () => { return item % 2 === 1 }) ).to.deep.equal([2]) - expect( - reject(null, (item) => { - return item % 2 === 1 - }) - ).to.deep.equal([]) }) it("exports a has utility", () => { @@ -152,7 +139,7 @@ describe("The Utils functions namespace", () => { it("exports a cloneArr utility", () => { expect(cloneArr([1, 2, 3])).to.deep.equal([1, 2, 3]) expect(cloneArr([])).to.deep.equal([]) - const arr = [] + const arr: never[] = [] expect(cloneArr(arr)).to.not.equal(arr) }) @@ -182,7 +169,7 @@ describe("The Utils functions namespace", () => { (result, item) => { return result.concat([item * 2]) }, - [] + [] as number[] ) ).to.deep.equal([2, 4, 6]) @@ -192,7 +179,7 @@ describe("The Utils functions namespace", () => { (result, item) => { return result.concat([item * 2]) }, - [] + [] as number[] ) ).to.deep.equal([2, 4, 6]) }) @@ -222,7 +209,7 @@ describe("The Utils functions namespace", () => { }) it("exports a partial utility", () => { - const add = function (x, y) { + const add = function (x: number, y: number) { return x + y } expect(partial(add)(2, 3)).to.equal(5) diff --git a/packages/utils/tsconfig.json b/packages/utils/tsconfig.json index 1539875cc..85dd4f6f2 100644 --- a/packages/utils/tsconfig.json +++ b/packages/utils/tsconfig.json @@ -3,7 +3,8 @@ "compilerOptions": { "rootDir": ".", "outDir": "lib", - "baseUrl": "." + "baseUrl": ".", + "strict": true }, "include": ["./src/**/*", "./test/**/*", "api.d.ts"] } From 9b4c992d0296f0b67ea527c8b49e09bf0c16404c Mon Sep 17 00:00:00 2001 From: Adrian Leonhard Date: Sun, 24 Oct 2021 20:31:07 +0200 Subject: [PATCH 2/3] review fixes --- packages/chevrotain/src/scan/lexer.ts | 4 +++- packages/types/api.d.ts | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/packages/chevrotain/src/scan/lexer.ts b/packages/chevrotain/src/scan/lexer.ts index 279550c47..8f32389c7 100644 --- a/packages/chevrotain/src/scan/lexer.ts +++ b/packages/chevrotain/src/scan/lexer.ts @@ -1087,13 +1087,15 @@ export function buildLineBreakIssueMessage( } function getCharCodes(charsOrCodes: (number | string)[]): number[] { - return map(charsOrCodes, (numOrString) => { + const charCodes = map(charsOrCodes, (numOrString) => { if (isString(numOrString)) { return numOrString.charCodeAt(0) } else { return numOrString } }) + + return charCodes } function addToMapOfArrays(map, key, value): void { diff --git a/packages/types/api.d.ts b/packages/types/api.d.ts index 589420f6d..e93020c23 100644 --- a/packages/types/api.d.ts +++ b/packages/types/api.d.ts @@ -1278,7 +1278,7 @@ export interface ILexerConfig { * as only a subset of the RegExp APIs is needed, {@link ILineTerminatorsTester} * for details. * - * keep in mind that for the default pattern: /\n|\r\n/g an optimized implementation is already built-in. + * keep in mind that for the default pattern: /\n|\r\n?/g an optimized implementation is already built-in. * This means the optimization is only relevant for lexers overriding the default pattern. */ lineTerminatorsPattern: RegExp | ILineTerminatorsTester From be73dc98b9adc764141e1d618f4075ce84a3e7bd Mon Sep 17 00:00:00 2001 From: Adrian Leonhard Date: Mon, 25 Oct 2021 09:15:06 +0200 Subject: [PATCH 3/3] review fixes --- .../chevrotain/src/parse/grammar/checks.ts | 4 +++- packages/chevrotain/src/scan/lexer_public.ts | 8 +++---- packages/types/api.d.ts | 21 +++++++++---------- packages/utils/src/api.ts | 16 ++++++++++++++ 4 files changed, 33 insertions(+), 16 deletions(-) diff --git a/packages/chevrotain/src/parse/grammar/checks.ts b/packages/chevrotain/src/parse/grammar/checks.ts index 58b8b8ccf..863ca33fa 100644 --- a/packages/chevrotain/src/parse/grammar/checks.ts +++ b/packages/chevrotain/src/parse/grammar/checks.ts @@ -466,7 +466,9 @@ export function validateAmbiguousAlternationAlternatives( } export class RepetitionCollector extends GAstVisitor { - public allProductions: IProductionWithOccurrence[] = [] + public allProductions: (IProductionWithOccurrence & { + maxLookahead?: number + })[] = [] public visitRepetitionWithSeparator(manySep: RepetitionWithSeparator): void { this.allProductions.push(manySep) diff --git a/packages/chevrotain/src/scan/lexer_public.ts b/packages/chevrotain/src/scan/lexer_public.ts index 056ce5f89..948e7c530 100644 --- a/packages/chevrotain/src/scan/lexer_public.ts +++ b/packages/chevrotain/src/scan/lexer_public.ts @@ -71,7 +71,7 @@ export interface IRegExpExec { exec: CustomPatternMatcherFunc } -const DEFAULT_LEXER_CONFIG: ILexerConfig = { +const DEFAULT_LEXER_CONFIG: Required = { deferDefinitionErrorsHandling: false, positionTracking: "full", lineTerminatorsPattern: /\n|\r\n?/g, @@ -102,7 +102,7 @@ export class Lexer { protected defaultMode: string protected emptyGroups: { [groupName: string]: IToken } = {} - private config: ILexerConfig + private config: Required private trackStartLines: boolean = true private trackEndLines: boolean = true private hasCustom: boolean = false @@ -114,7 +114,7 @@ export class Lexer { constructor( protected lexerDefinition: TokenType[] | IMultiModeLexerDefinition, - config: Partial = DEFAULT_LEXER_CONFIG + config: ILexerConfig = DEFAULT_LEXER_CONFIG ) { if (typeof config === "boolean") { throw Error( @@ -184,7 +184,7 @@ export class Lexer { } }) - if (!this.config.skipValidations) { + if (this.config.skipValidations === false) { this.TRACE_INIT("performRuntimeChecks", () => { this.lexerDefinitionErrors = this.lexerDefinitionErrors.concat( performRuntimeChecks( diff --git a/packages/types/api.d.ts b/packages/types/api.d.ts index e93020c23..7cdcba62e 100644 --- a/packages/types/api.d.ts +++ b/packages/types/api.d.ts @@ -1246,7 +1246,7 @@ export interface ILexerConfig { * This can be useful when wishing to indicate lexer errors in another manner * than simply throwing an error (for example in an online playground). */ - deferDefinitionErrorsHandling: boolean + deferDefinitionErrorsHandling?: boolean /** * "full" location information means all six combinations of /(end|start)(Line|Column|Offset)/ properties. @@ -1258,7 +1258,7 @@ export interface ILexerConfig { * in edge cases where every last ounce of performance is needed. */ // TODO: consider renaming this to LocationTracking to align with NodeLocationTracking option on the ParserConfig. - positionTracking: "full" | "onlyStart" | "onlyOffset" + positionTracking?: "full" | "onlyStart" | "onlyOffset" /** * A regExp defining custom line terminators. @@ -1266,7 +1266,7 @@ export interface ILexerConfig { * * Note that the regExp should use the global flag, for example: /\n/g * - * The default is: /\n|\r\n/g + * The default is: /\n|\r\n?/g * * But some grammars have a different definition, for example in ECMAScript: * https://www.ecma-international.org/ecma-262/8.0/index.html#sec-line-terminators @@ -1281,14 +1281,14 @@ export interface ILexerConfig { * keep in mind that for the default pattern: /\n|\r\n?/g an optimized implementation is already built-in. * This means the optimization is only relevant for lexers overriding the default pattern. */ - lineTerminatorsPattern: RegExp | ILineTerminatorsTester + lineTerminatorsPattern?: RegExp | ILineTerminatorsTester /** * Characters or CharCodes that represent line terminators for this lexer. * This always needs to be provided when using a custom {@link ILexerConfig.lineTerminatorsPattern}. * In the future this duplication may be removed or reduced. */ - lineTerminatorCharacters: (number | string)[] + lineTerminatorCharacters?: (number | string)[] /** * When true this flag will cause the Lexer to throw an Error @@ -1301,14 +1301,14 @@ export interface ILexerConfig { * The optimizations can boost the lexer's performance anywhere from 30% * to 100%+ depending on the number of TokenTypes used. */ - ensureOptimizations: boolean + ensureOptimizations?: boolean /** * Can be used to disable lexer optimizations * If there is a suspicion they are causing incorrect behavior. * Note that this would have negative performance implications. */ - safeMode: boolean + safeMode?: boolean /** * A custom error message provider. @@ -1317,7 +1317,7 @@ export interface ILexerConfig { * - Translating the error messages to a different languages. * - Changing the formatting. */ - errorMessageProvider: ILexerErrorMessageProvider + errorMessageProvider?: ILexerErrorMessageProvider /** * Enabling this flag will print performance tracing logs during lexer @@ -1333,7 +1333,7 @@ export interface ILexerConfig { * * Note that passing the boolean `true` is identical to passing the numerical value `infinity` */ - traceInitPerf: boolean | number + traceInitPerf?: boolean | number /** * This flag will avoid running the Lexer validations during Lexer initialization. @@ -1346,7 +1346,7 @@ export interface ILexerConfig { * So they should not be skipped during development flows. * - For example: via a conditional that checks an env variable. */ - skipValidations: boolean + skipValidations?: boolean } export interface ILexerErrorMessageProvider { @@ -2300,7 +2300,6 @@ export interface IProduction { export interface IProductionWithOccurrence extends IProduction { idx: number - maxLookahead?: number } /** diff --git a/packages/utils/src/api.ts b/packages/utils/src/api.ts index c14955df8..195639330 100644 --- a/packages/utils/src/api.ts +++ b/packages/utils/src/api.ts @@ -370,6 +370,22 @@ export function groupBy( return result } +/** + * Merge obj2 into obj1. + * Will overwrite existing properties with the same name + */ +export function merge(obj1: any, obj2: any): any { + const result = cloneObj(obj1) + const keys2 = keys(obj2) + for (let i = 0; i < keys2.length; i++) { + const key = keys2[i] + const value = obj2[key] + result[key] = value + } + + return result +} + export function NOOP() {} export function IDENTITY(item: T): T {