Fixed regression in f-string tokenization that led to a false positive error when an escape character (backslash) is used with a carriage return or line feed within the non-expression part of an f-string. This addresses https://github.com/microsoft/pyright/issues/5167.

This commit is contained in:
Eric Traut 2023-05-24 08:49:21 -07:00
parent 7e95fe8d9e
commit 91a2ec2c2c
3 changed files with 26 additions and 10 deletions

View File

@ -1564,17 +1564,17 @@ export class Tokenizer {
) {
isInNamedUnicodeEscape = true;
} else {
// If this is an f-string, the only escape that is allowed is for
// a single or double quote symbol.
if (
!isFString ||
// If this is an f-string, the only escapes that are allowed is for
// a single or double quote symbol or a newline/carriage return.
const isEscapedQuote =
this._cs.getCurrentChar() === Char.SingleQuote ||
this._cs.getCurrentChar() === Char.DoubleQuote
) {
if (
this._cs.getCurrentChar() === Char.CarriageReturn ||
this._cs.getCurrentChar() === Char.LineFeed
) {
this._cs.getCurrentChar() === Char.DoubleQuote;
const isEscapedNewLine =
this._cs.getCurrentChar() === Char.CarriageReturn ||
this._cs.getCurrentChar() === Char.LineFeed;
if (!isFString || isEscapedQuote || isEscapedNewLine) {
if (isEscapedNewLine) {
if (
this._cs.getCurrentChar() === Char.CarriageReturn &&
this._cs.nextChar === Char.LineFeed

View File

@ -140,3 +140,9 @@ u1 = f"'{{\"{0}\": {0}}}'"
def func1(x):
f"x:{yield (lambda i: x * i)}"
v1 = f"x \
y"
v2 = f'x \
y'

View File

@ -652,6 +652,16 @@ test('Strings: f-string with single right brace', () => {
assert.equal(fStringEndToken.length, 1);
});
test('Strings: f-string with new line escape', () => {
const t = new Tokenizer();
const results = t.tokenize(`f'x \\\ny'`);
assert.equal(results.tokens.count, 3 + _implicitTokenCount);
assert.equal(results.tokens.getItemAt(0).type, TokenType.FStringStart);
assert.equal(results.tokens.getItemAt(1).type, TokenType.FStringMiddle);
assert.equal(results.tokens.getItemAt(2).type, TokenType.FStringEnd);
});
test('Strings: f-string with escape in expression', () => {
const t = new Tokenizer();
const results = t.tokenize(`f'hello { "\\t" }'`);