Skip to content

Commit

Permalink
fix(message-compiler): linked modifier breaks with parenthesis (#1506)
Browse files Browse the repository at this point in the history
* fix(message-compiler): modifier breaks with parenthesis

* update

* update lock file

* drop rollup cjs hack

* update deps
  • Loading branch information
kazupon authored Aug 28, 2023
1 parent daf9603 commit b0c99b4
Show file tree
Hide file tree
Showing 6 changed files with 943 additions and 2,032 deletions.
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@
"@types/node": "^18.16.18",
"@typescript-eslint/eslint-plugin": "^6.0.0",
"@typescript-eslint/parser": "^6.0.0",
"@vitest/coverage-c8": "^0.33.0",
"@vitest/coverage-v8": "^0.34.3",
"api-docs-gen": "^0.4.0",
"benchmark": "^2.1.4",
"brotli": "^1.3.2",
Expand Down
5 changes: 4 additions & 1 deletion packages/message-compiler/src/tokenizer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ const enum TokenChars {
}

const EOF = undefined
const DOT = '.'
const LITERAL_DELIMITER = "'"
export const ERROR_DOMAIN = 'tokenizer'

Expand Down Expand Up @@ -659,10 +660,12 @@ export function createTokenizer(
return buf
} else if (ch === SPACE) {
return buf
} else if (ch === NEW_LINE) {
} else if (ch === NEW_LINE || ch === DOT) {
buf += ch
scnr.next()
return fn(detect, buf)
} else if (!isIdentifierStart(ch)) {
return buf
} else {
buf += ch
scnr.next()
Expand Down
150 changes: 150 additions & 0 deletions packages/message-compiler/test/tokenizer/linked.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -553,6 +553,156 @@ test('multiple', () => {
})
})

test('wrap with paren', () => {
const tokenizer = createTokenizer(`Welcome (@.upper:foo)`)
expect(tokenizer.nextToken()).toEqual({
type: TokenTypes.Text,
value: 'Welcome (',
loc: {
start: { line: 1, column: 1, offset: 0 },
end: { line: 1, column: 10, offset: 9 }
}
})
expect(tokenizer.nextToken()).toEqual({
type: TokenTypes.LinkedAlias,
value: '@',
loc: {
start: { line: 1, column: 10, offset: 9 },
end: { line: 1, column: 11, offset: 10 }
}
})
expect(tokenizer.nextToken()).toEqual({
type: TokenTypes.LinkedDot,
value: '.',
loc: {
start: { line: 1, column: 11, offset: 10 },
end: { line: 1, column: 12, offset: 11 }
}
})
expect(tokenizer.nextToken()).toEqual({
type: TokenTypes.LinkedModifier,
value: 'upper',
loc: {
start: { line: 1, column: 12, offset: 11 },
end: { line: 1, column: 17, offset: 16 }
}
})
expect(tokenizer.nextToken()).toEqual({
type: TokenTypes.LinkedDelimiter,
value: ':',
loc: {
start: { line: 1, column: 17, offset: 16 },
end: { line: 1, column: 18, offset: 17 }
}
})
expect(tokenizer.nextToken()).toEqual({
type: TokenTypes.LinkedKey,
value: 'foo',
loc: {
start: { line: 1, column: 18, offset: 17 },
end: { line: 1, column: 21, offset: 20 }
}
})
expect(tokenizer.nextToken()).toEqual({
type: TokenTypes.Text,
value: ')',
loc: {
start: { line: 1, column: 21, offset: 20 },
end: { line: 1, column: 22, offset: 21 }
}
})
expect(tokenizer.nextToken()).toEqual({
type: TokenTypes.EOF,
loc: {
start: { line: 1, column: 22, offset: 21 },
end: { line: 1, column: 22, offset: 21 }
}
})
})

test('wrap with paren, inside brace', () => {
const tokenizer = createTokenizer(`Welcome (@.upper:{param} )`)
expect(tokenizer.nextToken()).toEqual({
type: TokenTypes.Text,
value: 'Welcome (',
loc: {
start: { line: 1, column: 1, offset: 0 },
end: { line: 1, column: 10, offset: 9 }
}
})
expect(tokenizer.nextToken()).toEqual({
type: TokenTypes.LinkedAlias,
value: '@',
loc: {
start: { line: 1, column: 10, offset: 9 },
end: { line: 1, column: 11, offset: 10 }
}
})
expect(tokenizer.nextToken()).toEqual({
type: TokenTypes.LinkedDot,
value: '.',
loc: {
start: { line: 1, column: 11, offset: 10 },
end: { line: 1, column: 12, offset: 11 }
}
})
expect(tokenizer.nextToken()).toEqual({
type: TokenTypes.LinkedModifier,
value: 'upper',
loc: {
start: { line: 1, column: 12, offset: 11 },
end: { line: 1, column: 17, offset: 16 }
}
})
expect(tokenizer.nextToken()).toEqual({
type: TokenTypes.LinkedDelimiter,
value: ':',
loc: {
start: { line: 1, column: 17, offset: 16 },
end: { line: 1, column: 18, offset: 17 }
}
})
expect(tokenizer.nextToken()).toEqual({
type: TokenTypes.BraceLeft,
value: '{',
loc: {
start: { line: 1, column: 18, offset: 17 },
end: { line: 1, column: 19, offset: 18 }
}
})
expect(tokenizer.nextToken()).toEqual({
type: TokenTypes.Named,
value: 'param',
loc: {
start: { line: 1, column: 19, offset: 18 },
end: { line: 1, column: 24, offset: 23 }
}
})
expect(tokenizer.nextToken()).toEqual({
type: TokenTypes.BraceRight,
value: '}',
loc: {
start: { line: 1, column: 24, offset: 23 },
end: { line: 1, column: 25, offset: 24 }
}
})
expect(tokenizer.nextToken()).toEqual({
type: TokenTypes.Text,
value: ' )',
loc: {
start: { line: 1, column: 25, offset: 24 },
end: { line: 1, column: 27, offset: 26 }
}
})
expect(tokenizer.nextToken()).toEqual({
type: TokenTypes.EOF,
loc: {
start: { line: 1, column: 27, offset: 26 },
end: { line: 1, column: 27, offset: 26 }
}
})
})

describe('errors', () => {
let errors: CompileError[], options: TokenizeOptions
beforeEach(() => {
Expand Down
Loading

0 comments on commit b0c99b4

Please sign in to comment.