Skip to content

Commit

Permalink
Use two separate limits and update tests
Browse files Browse the repository at this point in the history
  • Loading branch information
pablogsal committed May 1, 2023
1 parent 2c48eae commit de9a495
Show file tree
Hide file tree
Showing 3 changed files with 26 additions and 11 deletions.
21 changes: 16 additions & 5 deletions Lib/test/test_fstring.py
Original file line number Diff line number Diff line change
Expand Up @@ -565,7 +565,23 @@ def test_fstring_nested_too_deeply(self):
self.assertAllRaise(SyntaxError,
"f-string: expressions nested too deeply",
['f"{1+2:{1+2:{1+1:{1}}}}"'])

def create_nested_fstring(n):
if n == 0:
return "1+1"
prev = create_nested_fstring(n-1)
return f'f"{{{prev}}}"'

self.assertAllRaise(SyntaxError,
"too many nested f-strings",
[create_nested_fstring(160)])

def test_syntax_error_in_nested_fstring(self):
# See gh-104016 for more information on this crash
self.assertAllRaise(SyntaxError,
"invalid syntax",
['f"{1 1:' + ('{f"1:' * 199)])

def test_double_braces(self):
self.assertEqual(f'{{', '{')
self.assertEqual(f'a{{', 'a{')
Expand Down Expand Up @@ -1355,7 +1371,6 @@ def test_filename_in_syntaxerror(self):
# see issue 38964
with temp_cwd() as cwd:
file_path = os.path.join(cwd, 't.py')
with open(file_path, 'w', encoding="utf-8") as f:
f.write('f"{a b}"') # This generates a SyntaxError
_, _, stderr = assert_python_failure(file_path,
PYTHONIOENCODING='ascii')
Expand Down Expand Up @@ -1549,9 +1564,5 @@ def test_syntax_error_after_debug(self):
"f'{1=}{1;}'",
])

def test_nested_fstring_max_stack_level(self):
with self.assertRaises(SyntaxError):
compile('f"{1 1:' + ('{f"1:' * 199), "?", "exec")

if __name__ == '__main__':
unittest.main()
9 changes: 6 additions & 3 deletions Parser/tokenizer.c
Original file line number Diff line number Diff line change
Expand Up @@ -43,12 +43,12 @@
#ifdef Py_DEBUG
static inline tokenizer_mode* TOK_GET_MODE(struct tok_state* tok) {
assert(tok->tok_mode_stack_index >= 0);
assert(tok->tok_mode_stack_index < MAXLEVEL);
assert(tok->tok_mode_stack_index < MAXFSTRINGLEVEL);
return &(tok->tok_mode_stack[tok->tok_mode_stack_index]);
}
static inline tokenizer_mode* TOK_NEXT_MODE(struct tok_state* tok) {
assert(tok->tok_mode_stack_index >= 0);
assert(tok->tok_mode_stack_index + 1 < MAXLEVEL);
assert(tok->tok_mode_stack_index + 1 < MAXFSTRINGLEVEL);
return &(tok->tok_mode_stack[++tok->tok_mode_stack_index]);
}
#else
Expand Down Expand Up @@ -2235,6 +2235,9 @@ tok_get_normal_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct t

p_start = tok->start;
p_end = tok->cur;
if (tok->tok_mode_stack_index + 1 >= MAXFSTRINGLEVEL) {
return MAKE_TOKEN(syntaxerror(tok, "too many nested f-strings"));
}
tokenizer_mode *the_current_tok = TOK_NEXT_MODE(tok);
the_current_tok->kind = TOK_FSTRING_MODE;
the_current_tok->f_string_quote = quote;
Expand Down Expand Up @@ -2413,7 +2416,7 @@ tok_get_normal_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct t
case '(':
case '[':
case '{':
if (tok->level >= MAXLEVEL || tok->tok_mode_stack_index + 1 >= MAXLEVEL) {
if (tok->level >= MAXLEVEL) {
return MAKE_TOKEN(syntaxerror(tok, "too many nested parentheses"));
}
tok->parenstack[tok->level] = c;
Expand Down
7 changes: 4 additions & 3 deletions Parser/tokenizer.h
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,9 @@ extern "C" {

#include "pycore_token.h" /* For token types */

#define MAXINDENT 100 /* Max indentation level */
#define MAXLEVEL 200 /* Max parentheses level */
#define MAXINDENT 100 /* Max indentation level */
#define MAXLEVEL 200 /* Max parentheses level */
#define MAXFSTRINGLEVEL 150 /* Max f-string nesting level */

enum decoding_state {
STATE_INIT,
Expand Down Expand Up @@ -123,7 +124,7 @@ struct tok_state {
enum interactive_underflow_t interactive_underflow;
int report_warnings;
// TODO: Factor this into its own thing
tokenizer_mode tok_mode_stack[MAXLEVEL];
tokenizer_mode tok_mode_stack[MAXFSTRINGLEVEL];
int tok_mode_stack_index;
int tok_report_warnings;
#ifdef Py_DEBUG
Expand Down

0 comments on commit de9a495

Please sign in to comment.