From a665706d8edd9bd40720e4a6d70e1fb9e7b238ab Mon Sep 17 00:00:00 2001 From: Michael Droettboom Date: Fri, 12 Aug 2022 11:07:34 -0400 Subject: [PATCH 1/2] gh-94808: Coverage: Test that maximum indentation level is handled --- Lib/test/test_tokenize.py | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py index 1272e1e9be002e..948ad15c381a1d 100644 --- a/Lib/test/test_tokenize.py +++ b/Lib/test/test_tokenize.py @@ -3,7 +3,7 @@ from tokenize import (tokenize, _tokenize, untokenize, NUMBER, NAME, OP, STRING, ENDMARKER, ENCODING, tok_name, detect_encoding, open as tokenize_open, Untokenizer, generate_tokens, - NEWLINE, _generate_tokens_from_c_tokenizer) + NEWLINE, _generate_tokens_from_c_tokenizer, DEDENT) from io import BytesIO, StringIO import unittest from textwrap import dedent @@ -2512,6 +2512,24 @@ def get_tokens(string): self.assertRaises(SyntaxError, get_tokens, "("*1000+"a"+")"*1000) self.assertRaises(SyntaxError, get_tokens, "]") + def test_max_indent(self): + MAXINDENT = 100 + + def generate_source(indents): + source = ''.join((' ' * x) + 'if True:\n' for x in range(indents)) + source += ' ' * indents + 'pass\n' + return source + + valid = generate_source(MAXINDENT - 1) + tokens = list(_generate_tokens_from_c_tokenizer(valid)) + self.assertEqual(tokens[-1].type, DEDENT) + exec(valid) + + invalid = generate_source(MAXINDENT) + tokens = list(_generate_tokens_from_c_tokenizer(invalid)) + self.assertEqual(tokens[-1].type, NEWLINE) + self.assertRaises(IndentationError, exec, invalid) + def test_continuation_lines_indentation(self): def get_tokens(string): return [(kind, string) for (kind, string, *_) in _generate_tokens_from_c_tokenizer(string)] From 0f7661b3f6b1877810b578f6a00b64011b68b394 Mon Sep 17 00:00:00 2001 From: Michael Droettboom Date: Thu, 22 Sep 2022 10:01:37 -0400 Subject: [PATCH 2/2] Use "compile" rather than "exec" --- Lib/test/test_tokenize.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py index 948ad15c381a1d..47f2c06685bcaa 100644 --- a/Lib/test/test_tokenize.py +++ b/Lib/test/test_tokenize.py @@ -2523,12 +2523,14 @@ def generate_source(indents): valid = generate_source(MAXINDENT - 1) tokens = list(_generate_tokens_from_c_tokenizer(valid)) self.assertEqual(tokens[-1].type, DEDENT) - exec(valid) + compile(valid, "", "exec") invalid = generate_source(MAXINDENT) tokens = list(_generate_tokens_from_c_tokenizer(invalid)) self.assertEqual(tokens[-1].type, NEWLINE) - self.assertRaises(IndentationError, exec, invalid) + self.assertRaises( + IndentationError, compile, invalid, "", "exec" + ) def test_continuation_lines_indentation(self): def get_tokens(string):