Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Appearance settings

Commit e9cf5a3

Browse filesBrowse files
[3.12] gh-105820: Fix tok_mode expression buffer in file & readline tokenizer (GH-105828) (#105832)
(cherry picked from commit d382ad4) Co-authored-by: Lysandros Nikolaou <lisandrosnik@gmail.com>
1 parent abb4eaa commit e9cf5a3
Copy full SHA for e9cf5a3

File tree

4 files changed

+38
-4
lines changed
Filter options

4 files changed

+38
-4
lines changed

‎Lib/test/test_fstring.py

Copy file name to clipboardExpand all lines: Lib/test/test_fstring.py
+14-1Lines changed: 14 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
import unittest
1616
from test import support
1717
from test.support.os_helper import temp_cwd
18-
from test.support.script_helper import assert_python_failure
18+
from test.support.script_helper import assert_python_failure, assert_python_ok
1919

2020
a_global = 'global variable'
2121

@@ -1635,5 +1635,18 @@ def test_syntax_error_after_debug(self):
16351635
"f'{1=}{1;}'",
16361636
])
16371637

1638+
def test_debug_in_file(self):
1639+
with temp_cwd():
1640+
script = 'script.py'
1641+
with open('script.py', 'w') as f:
1642+
f.write(f"""\
1643+
print(f'''{{
1644+
3
1645+
=}}''')""")
1646+
1647+
_, stdout, _ = assert_python_ok(script)
1648+
self.assertEqual(stdout.decode('utf-8').strip().replace('\r\n', '\n').replace('\r', '\n'),
1649+
"3\n=3")
1650+
16381651
if __name__ == '__main__':
16391652
unittest.main()

‎Lib/test/test_tokenize.py

Copy file name to clipboardExpand all lines: Lib/test/test_tokenize.py
+13Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -558,6 +558,19 @@ def test_string(self):
558558
OP '}' (1, 39) (1, 40)
559559
FSTRING_MIDDLE ' final words' (1, 40) (1, 52)
560560
FSTRING_END "'" (1, 52) (1, 53)
561+
""")
562+
self.check_tokenize("""\
563+
f'''{
564+
3
565+
=}'''""", """\
566+
FSTRING_START "f'''" (1, 0) (1, 4)
567+
OP '{' (1, 4) (1, 5)
568+
NL '\\n' (1, 5) (1, 6)
569+
NUMBER '3' (2, 0) (2, 1)
570+
NL '\\n' (2, 1) (2, 2)
571+
OP '=' (3, 0) (3, 1)
572+
OP '}' (3, 1) (3, 2)
573+
FSTRING_END "'''" (3, 2) (3, 5)
561574
""")
562575

563576
def test_function(self):
+3Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
Fix an f-string bug, where using a debug expression (the ``=`` sign) that
2+
appears in the last line of a file results to the debug buffer that holds the
3+
expression text being one character too small.

‎Parser/tokenizer.c

Copy file name to clipboardExpand all lines: Parser/tokenizer.c
+8-3Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1039,9 +1039,6 @@ tok_readline_raw(struct tok_state *tok)
10391039
if (line == NULL) {
10401040
return 1;
10411041
}
1042-
if (tok->tok_mode_stack_index && !update_fstring_expr(tok, 0)) {
1043-
return 0;
1044-
}
10451042
if (tok->fp_interactive &&
10461043
tok_concatenate_interactive_new_line(tok, line) == -1) {
10471044
return 0;
@@ -1270,6 +1267,10 @@ tok_underflow_file(struct tok_state *tok) {
12701267
tok->implicit_newline = 1;
12711268
}
12721269

1270+
if (tok->tok_mode_stack_index && !update_fstring_expr(tok, 0)) {
1271+
return 0;
1272+
}
1273+
12731274
ADVANCE_LINENO();
12741275
if (tok->decoding_state != STATE_NORMAL) {
12751276
if (tok->lineno > 2) {
@@ -1314,6 +1315,10 @@ tok_underflow_readline(struct tok_state* tok) {
13141315
tok->implicit_newline = 1;
13151316
}
13161317

1318+
if (tok->tok_mode_stack_index && !update_fstring_expr(tok, 0)) {
1319+
return 0;
1320+
}
1321+
13171322
ADVANCE_LINENO();
13181323
/* The default encoding is UTF-8, so make sure we don't have any
13191324
non-UTF-8 sequences in it. */

0 commit comments

Comments
0 (0)
Morty Proxy This is a proxified and sanitized view of the page, visit original site.