Skip to content

Commit

Permalink
Merge pull request #865 from asottile/py312-fstrings
Browse files Browse the repository at this point in the history
fix some fstring edge cases for python 3.12
  • Loading branch information
asottile committed Jul 30, 2023
2 parents db692b2 + 6bef40c commit 80a6553
Show file tree
Hide file tree
Showing 28 changed files with 242 additions and 119 deletions.
10 changes: 5 additions & 5 deletions pyupgrade/_main.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,8 @@
from pyupgrade._string_helpers import is_codec
from pyupgrade._string_helpers import parse_format
from pyupgrade._string_helpers import unparse_parsed_string
from pyupgrade._token_helpers import CLOSING
from pyupgrade._token_helpers import OPENING
from pyupgrade._token_helpers import is_close
from pyupgrade._token_helpers import is_open
from pyupgrade._token_helpers import remove_brace


Expand Down Expand Up @@ -161,9 +161,9 @@ def _fix_extraneous_parens(tokens: list[Token], i: int) -> None:
# found comma or yield at depth 1: this is a tuple / coroutine
if depth == 1 and tokens[i].src in {',', 'yield'}:
return
elif tokens[i].src in OPENING:
elif is_open(tokens[i]):
depth += 1
elif tokens[i].src in CLOSING:
elif is_close(tokens[i]):
depth -= 1
end = i

Expand Down Expand Up @@ -284,7 +284,7 @@ def _fix_tokens(contents_text: str) -> str:
for i, token in reversed_enumerate(tokens):
if token.name == 'STRING':
tokens[i] = _fix_escape_sequences(_remove_u_prefix(tokens[i]))
elif token.src == '(':
elif token.matches(name='OP', src='('):
_fix_extraneous_parens(tokens, i)
elif token.src == 'format' and i > 0 and tokens[i - 1].src == '.':
_fix_format_literal(tokens, i - 2)
Expand Down
4 changes: 2 additions & 2 deletions pyupgrade/_plugins/default_encoding.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,11 @@
from pyupgrade._data import TokenFunc
from pyupgrade._string_helpers import is_codec
from pyupgrade._token_helpers import find_closing_bracket
from pyupgrade._token_helpers import find_open_paren
from pyupgrade._token_helpers import find_op


def _fix_default_encoding(i: int, tokens: list[Token]) -> None:
i = find_open_paren(tokens, i + 1)
i = find_op(tokens, i + 1, '(')
j = find_closing_bracket(tokens, i)
del tokens[i + 1:j]

Expand Down
7 changes: 3 additions & 4 deletions pyupgrade/_plugins/format_locals.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,12 @@
from pyupgrade._data import State
from pyupgrade._data import TokenFunc
from pyupgrade._token_helpers import find_closing_bracket
from pyupgrade._token_helpers import find_open_paren
from pyupgrade._token_helpers import find_token
from pyupgrade._token_helpers import find_op


def _fix(i: int, tokens: list[Token]) -> None:
dot_pos = find_token(tokens, i, '.')
open_pos = find_open_paren(tokens, dot_pos)
dot_pos = find_op(tokens, i, '.')
open_pos = find_op(tokens, dot_pos, '(')
close_pos = find_closing_bracket(tokens, open_pos)
for string_idx in rfind_string_parts(tokens, dot_pos - 1):
tok = tokens[string_idx]
Expand Down
4 changes: 2 additions & 2 deletions pyupgrade/_plugins/imports.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
from pyupgrade._data import State
from pyupgrade._data import TokenFunc
from pyupgrade._token_helpers import find_end
from pyupgrade._token_helpers import find_token
from pyupgrade._token_helpers import find_name
from pyupgrade._token_helpers import has_space_before
from pyupgrade._token_helpers import indented_amount

Expand Down Expand Up @@ -292,7 +292,7 @@ def parse(cls, i: int, tokens: list[Token]) -> FromImport:
j += 1
mod_start = j

import_token = find_token(tokens, j, 'import')
import_token = find_name(tokens, j, 'import')
j = import_token - 1
while tokens[j].name != 'NAME':
j -= 1
Expand Down
4 changes: 2 additions & 2 deletions pyupgrade/_plugins/io_open.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,11 @@
from pyupgrade._data import register
from pyupgrade._data import State
from pyupgrade._data import TokenFunc
from pyupgrade._token_helpers import find_open_paren
from pyupgrade._token_helpers import find_op


def _replace_io_open(i: int, tokens: list[Token]) -> None:
j = find_open_paren(tokens, i)
j = find_op(tokens, i, '(')
tokens[i:j] = [tokens[i]._replace(name='NAME', src='open')]


Expand Down
4 changes: 2 additions & 2 deletions pyupgrade/_plugins/legacy.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,13 +19,13 @@
from pyupgrade._token_helpers import Block
from pyupgrade._token_helpers import find_and_replace_call
from pyupgrade._token_helpers import find_block_start
from pyupgrade._token_helpers import find_token
from pyupgrade._token_helpers import find_name

FUNC_TYPES = (ast.Lambda, ast.FunctionDef, ast.AsyncFunctionDef)


def _fix_yield(i: int, tokens: list[Token]) -> None:
in_token = find_token(tokens, i, 'in')
in_token = find_name(tokens, i, 'in')
colon = find_block_start(tokens, i)
block = Block.find(tokens, i, trim_end=True)
container = tokens_to_src(tokens[in_token + 1:colon]).strip()
Expand Down
7 changes: 3 additions & 4 deletions pyupgrade/_plugins/lru_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,12 @@
from pyupgrade._data import State
from pyupgrade._data import TokenFunc
from pyupgrade._token_helpers import find_and_replace_call
from pyupgrade._token_helpers import find_open_paren
from pyupgrade._token_helpers import find_token
from pyupgrade._token_helpers import find_op


def _remove_call(i: int, tokens: list[Token]) -> None:
i = find_open_paren(tokens, i)
j = find_token(tokens, i, ')')
i = find_op(tokens, i, '(')
j = find_op(tokens, i, ')')
del tokens[i:j + 1]


Expand Down
4 changes: 2 additions & 2 deletions pyupgrade/_plugins/mock.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,11 @@
from pyupgrade._data import register
from pyupgrade._data import State
from pyupgrade._data import TokenFunc
from pyupgrade._token_helpers import find_token
from pyupgrade._token_helpers import find_name


def _fix_mock_mock(i: int, tokens: list[Token]) -> None:
j = find_token(tokens, i + 1, 'mock')
j = find_name(tokens, i + 1, 'mock')
del tokens[i + 1:j + 1]


Expand Down
4 changes: 2 additions & 2 deletions pyupgrade/_plugins/native_literals.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,15 +13,15 @@
from pyupgrade._data import register
from pyupgrade._data import State
from pyupgrade._data import TokenFunc
from pyupgrade._token_helpers import find_open_paren
from pyupgrade._token_helpers import find_op
from pyupgrade._token_helpers import parse_call_args
from pyupgrade._token_helpers import replace_call

SIX_NATIVE_STR = frozenset(('ensure_str', 'ensure_text', 'text_type'))


def _fix_literal(i: int, tokens: list[Token], *, empty: str) -> None:
j = find_open_paren(tokens, i)
j = find_op(tokens, i, '(')
func_args, end = parse_call_args(tokens, j)
if any(tok.name == 'NL' for tok in tokens[i:end]):
return
Expand Down
4 changes: 2 additions & 2 deletions pyupgrade/_plugins/open_mode.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
from pyupgrade._data import State
from pyupgrade._data import TokenFunc
from pyupgrade._token_helpers import delete_argument
from pyupgrade._token_helpers import find_open_paren
from pyupgrade._token_helpers import find_op
from pyupgrade._token_helpers import parse_call_args


Expand All @@ -41,7 +41,7 @@ class FunctionArg(NamedTuple):


def _fix_open_mode(i: int, tokens: list[Token], *, arg_idx: int) -> None:
j = find_open_paren(tokens, i)
j = find_op(tokens, i, '(')
func_args, end = parse_call_args(tokens, j)
mode = tokens_to_src(tokens[slice(*func_args[arg_idx])])
mode_stripped = mode.split('=')[-1]
Expand Down
4 changes: 2 additions & 2 deletions pyupgrade/_plugins/oserror_aliases.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
from pyupgrade._data import State
from pyupgrade._data import TokenFunc
from pyupgrade._token_helpers import arg_str
from pyupgrade._token_helpers import find_open_paren
from pyupgrade._token_helpers import find_op
from pyupgrade._token_helpers import parse_call_args
from pyupgrade._token_helpers import replace_name

Expand All @@ -30,7 +30,7 @@ def _fix_oserror_except(
except_index = i
while tokens[except_index].src != 'except':
except_index -= 1
start = find_open_paren(tokens, except_index)
start = find_op(tokens, except_index, '(')
func_args, end = parse_call_args(tokens, start)

# save the exceptions and remove the block
Expand Down
16 changes: 8 additions & 8 deletions pyupgrade/_plugins/set_literals.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,9 @@
from pyupgrade._data import register
from pyupgrade._data import State
from pyupgrade._data import TokenFunc
from pyupgrade._token_helpers import BRACES
from pyupgrade._token_helpers import immediately_paren
from pyupgrade._token_helpers import is_close
from pyupgrade._token_helpers import is_open
from pyupgrade._token_helpers import remove_brace
from pyupgrade._token_helpers import victims

Expand All @@ -25,13 +26,12 @@ def _fix_set_empty_literal(i: int, tokens: list[Token]) -> None:
return

j = i + 2
brace_stack = ['(']
while brace_stack:
token = tokens[j].src
if token == BRACES[brace_stack[-1]]:
brace_stack.pop()
elif token in BRACES:
brace_stack.append(token)
depth = 1
while depth:
if is_open(tokens[j]):
depth += 1
elif is_close(tokens[j]):
depth -= 1
j += 1

# Remove the inner tokens
Expand Down
8 changes: 4 additions & 4 deletions pyupgrade/_plugins/shlex_join.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,15 +12,15 @@
from pyupgrade._data import register
from pyupgrade._data import State
from pyupgrade._data import TokenFunc
from pyupgrade._token_helpers import find_open_paren
from pyupgrade._token_helpers import find_token
from pyupgrade._token_helpers import find_name
from pyupgrade._token_helpers import find_op
from pyupgrade._token_helpers import victims


def _fix_shlex_join(i: int, tokens: list[Token], *, arg: ast.expr) -> None:
j = find_open_paren(tokens, i)
j = find_op(tokens, i, '(')
comp_victims = victims(tokens, j, arg, gen=True)
k = find_token(tokens, comp_victims.arg_index, 'in') + 1
k = find_name(tokens, comp_victims.arg_index, 'in') + 1
while tokens[k].name in NON_CODING_TOKENS:
k += 1
tokens[comp_victims.ends[0]:comp_victims.ends[-1] + 1] = [Token('OP', ')')]
Expand Down
4 changes: 2 additions & 2 deletions pyupgrade/_plugins/six_calls.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
from pyupgrade._data import State
from pyupgrade._data import TokenFunc
from pyupgrade._token_helpers import find_and_replace_call
from pyupgrade._token_helpers import find_open_paren
from pyupgrade._token_helpers import find_op
from pyupgrade._token_helpers import parse_call_args
from pyupgrade._token_helpers import replace_call

Expand Down Expand Up @@ -53,7 +53,7 @@


def _fix_six_b(i: int, tokens: list[Token]) -> None:
j = find_open_paren(tokens, i)
j = find_op(tokens, i, '(')
if (
tokens[j + 1].name == 'STRING' and
tokens[j + 1].src.isascii() and
Expand Down
8 changes: 4 additions & 4 deletions pyupgrade/_plugins/six_metaclasses.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,20 +15,20 @@
from pyupgrade._data import TokenFunc
from pyupgrade._token_helpers import arg_str
from pyupgrade._token_helpers import find_block_start
from pyupgrade._token_helpers import find_open_paren
from pyupgrade._token_helpers import find_op
from pyupgrade._token_helpers import parse_call_args
from pyupgrade._token_helpers import remove_decorator
from pyupgrade._token_helpers import replace_call


def _fix_add_metaclass(i: int, tokens: list[Token]) -> None:
j = find_open_paren(tokens, i)
j = find_op(tokens, i, '(')
func_args, end = parse_call_args(tokens, j)
metaclass = f'metaclass={arg_str(tokens, *func_args[0])}'
# insert `metaclass={args[0]}` into `class:`
# search forward for the `class` token
j = i + 1
while tokens[j].src != 'class':
while not tokens[j].matches(name='NAME', src='class'):
j += 1
class_token = j
# then search forward for a `:` token, not inside a brace
Expand All @@ -55,7 +55,7 @@ def _fix_add_metaclass(i: int, tokens: list[Token]) -> None:


def _fix_with_metaclass(i: int, tokens: list[Token]) -> None:
j = find_open_paren(tokens, i)
j = find_op(tokens, i, '(')
func_args, end = parse_call_args(tokens, j)
if len(func_args) == 1:
tmpl = 'metaclass={args[0]}'
Expand Down
6 changes: 3 additions & 3 deletions pyupgrade/_plugins/subprocess_run.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
from pyupgrade._data import State
from pyupgrade._data import TokenFunc
from pyupgrade._token_helpers import delete_argument
from pyupgrade._token_helpers import find_open_paren
from pyupgrade._token_helpers import find_op
from pyupgrade._token_helpers import parse_call_args
from pyupgrade._token_helpers import replace_argument

Expand All @@ -25,7 +25,7 @@ def _use_capture_output(
stdout_arg_idx: int,
stderr_arg_idx: int,
) -> None:
j = find_open_paren(tokens, i)
j = find_op(tokens, i, '(')
func_args, _ = parse_call_args(tokens, j)
if stdout_arg_idx < stderr_arg_idx:
delete_argument(stderr_arg_idx, tokens, func_args)
Expand All @@ -51,7 +51,7 @@ def _replace_universal_newlines_with_text(
*,
arg_idx: int,
) -> None:
j = find_open_paren(tokens, i)
j = find_op(tokens, i, '(')
func_args, _ = parse_call_args(tokens, j)
for i in range(*func_args[arg_idx]):
if tokens[i].src == 'universal_newlines':
Expand Down
4 changes: 2 additions & 2 deletions pyupgrade/_plugins/type_of_primitive.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
from pyupgrade._data import State
from pyupgrade._data import TokenFunc
from pyupgrade._token_helpers import find_closing_bracket
from pyupgrade._token_helpers import find_open_paren
from pyupgrade._token_helpers import find_op

_TYPES = {
bool: 'bool',
Expand All @@ -30,7 +30,7 @@ def _rewrite_type_of_primitive(
*,
src: str,
) -> None:
open_paren = find_open_paren(tokens, i + 1)
open_paren = find_op(tokens, i + 1, '(')
j = find_closing_bracket(tokens, open_paren)
tokens[i] = tokens[i]._replace(src=src)
del tokens[i + 1:j + 1]
Expand Down
14 changes: 7 additions & 7 deletions pyupgrade/_plugins/typing_pep604.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,14 +14,14 @@
from pyupgrade._data import register
from pyupgrade._data import State
from pyupgrade._data import TokenFunc
from pyupgrade._token_helpers import CLOSING
from pyupgrade._token_helpers import find_closing_bracket
from pyupgrade._token_helpers import find_token
from pyupgrade._token_helpers import OPENING
from pyupgrade._token_helpers import find_op
from pyupgrade._token_helpers import is_close
from pyupgrade._token_helpers import is_open


def _fix_optional(i: int, tokens: list[Token]) -> None:
j = find_token(tokens, i, '[')
j = find_op(tokens, i, '[')
k = find_closing_bracket(tokens, j)
if tokens[j].line == tokens[k].line:
tokens[k] = Token('CODE', ' | None')
Expand All @@ -44,7 +44,7 @@ def _fix_union(
commas = []
coding_depth = None

j = find_token(tokens, i, '[')
j = find_op(tokens, i, '[')
k = j + 1
while depth:
# it's possible our first coding token is a close paren
Expand All @@ -59,12 +59,12 @@ def _fix_union(
else:
coding_depth = depth

if tokens[k].src in OPENING:
if is_open(tokens[k]):
if tokens[k].src == '(':
open_parens.append((depth, k))

depth += 1
elif tokens[k].src in CLOSING:
elif is_close(tokens[k]):
if tokens[k].src == ')':
paren_depth, open_paren = open_parens.pop()
parens_done.append((paren_depth, (open_paren, k)))
Expand Down

0 comments on commit 80a6553

Please sign in to comment.