in Utilities/gyb.py [0:0]
def tokenize_python_to_unmatched_close_curly(source_text, start, line_starts):
"""Apply Python's tokenize to source_text starting at index start
while matching open and close curly braces. When an unmatched
close curly brace is found, return its index. If not found,
return len(source_text). If there's a tokenization error, return
the position of the error.
"""
stream = StringIO(source_text)
stream.seek(start)
nesting = 0
try:
for kind, text, token_start, token_end, line_text \
in tokenize.generate_tokens(stream.readline):
if text == '{':
nesting += 1
elif text == '}':
nesting -= 1
if nesting < 0:
return token_pos_to_index(token_start, start, line_starts)
except tokenize.TokenError as error:
(message, error_pos) = error.args
return token_pos_to_index(error_pos, start, line_starts)
return len(source_text)