Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Appearance settings

Commit 23e83a8

Browse filesBrowse files
authored
gh-94808: Coverage: Test that maximum indentation level is handled (#95926)
* gh-94808: Coverage: Test that maximum indentation level is handled * Use "compile" rather than "exec"
1 parent e2e6b95 commit 23e83a8
Copy full SHA for 23e83a8

File tree

Expand file treeCollapse file tree

1 file changed

+21
-1
lines changed
Filter options
Expand file treeCollapse file tree

1 file changed

+21
-1
lines changed

‎Lib/test/test_tokenize.py

Copy file name to clipboardExpand all lines: Lib/test/test_tokenize.py
+21-1Lines changed: 21 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
from tokenize import (tokenize, _tokenize, untokenize, NUMBER, NAME, OP,
44
STRING, ENDMARKER, ENCODING, tok_name, detect_encoding,
55
open as tokenize_open, Untokenizer, generate_tokens,
6-
NEWLINE, _generate_tokens_from_c_tokenizer)
6+
NEWLINE, _generate_tokens_from_c_tokenizer, DEDENT)
77
from io import BytesIO, StringIO
88
import unittest
99
from textwrap import dedent
@@ -2512,6 +2512,26 @@ def get_tokens(string):
25122512
self.assertRaises(SyntaxError, get_tokens, "("*1000+"a"+")"*1000)
25132513
self.assertRaises(SyntaxError, get_tokens, "]")
25142514

2515+
def test_max_indent(self):
2516+
MAXINDENT = 100
2517+
2518+
def generate_source(indents):
2519+
source = ''.join((' ' * x) + 'if True:\n' for x in range(indents))
2520+
source += ' ' * indents + 'pass\n'
2521+
return source
2522+
2523+
valid = generate_source(MAXINDENT - 1)
2524+
tokens = list(_generate_tokens_from_c_tokenizer(valid))
2525+
self.assertEqual(tokens[-1].type, DEDENT)
2526+
compile(valid, "<string>", "exec")
2527+
2528+
invalid = generate_source(MAXINDENT)
2529+
tokens = list(_generate_tokens_from_c_tokenizer(invalid))
2530+
self.assertEqual(tokens[-1].type, NEWLINE)
2531+
self.assertRaises(
2532+
IndentationError, compile, invalid, "<string>", "exec"
2533+
)
2534+
25152535
def test_continuation_lines_indentation(self):
25162536
def get_tokens(string):
25172537
return [(kind, string) for (kind, string, *_) in _generate_tokens_from_c_tokenizer(string)]

0 commit comments

Comments
0 (0)
Morty Proxy This is a proxified and sanitized view of the page, visit original site.