Skip to content

Commit

Permalink
Merge pull request #857 from asottile/3-8-1-plus
Browse files Browse the repository at this point in the history
require 3.8.1+
  • Loading branch information
asottile committed Jul 3, 2023
2 parents 706789e + 5742eef commit a62c966
Show file tree
Hide file tree
Showing 3 changed files with 6 additions and 21 deletions.
14 changes: 3 additions & 11 deletions pyupgrade/_plugins/versioned_branches.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,19 +25,13 @@ def _find_if_else_block(tokens: list[Token], i: int) -> tuple[Block, Block]:
return if_block, else_block


def _find_elif(tokens: list[Token], i: int) -> int:
while tokens[i].src != 'elif': # pragma: no cover (only for <3.8.1)
i -= 1
return i


def _fix_py3_block(i: int, tokens: list[Token]) -> None:
if tokens[i].src == 'if':
if_block = Block.find(tokens, i)
if_block.dedent(tokens)
del tokens[if_block.start:if_block.block]
else:
if_block = Block.find(tokens, _find_elif(tokens, i))
if_block = Block.find(tokens, i)
if_block.replace_condition(tokens, [Token('NAME', 'else')])


Expand All @@ -47,8 +41,7 @@ def _fix_py2_block(i: int, tokens: list[Token]) -> None:
else_block.dedent(tokens)
del tokens[if_block.start:else_block.block]
else:
j = _find_elif(tokens, i)
if_block, else_block = _find_if_else_block(tokens, j)
if_block, else_block = _find_if_else_block(tokens, i)
del tokens[if_block.start:else_block.start]


Expand All @@ -59,8 +52,7 @@ def _fix_py3_block_else(i: int, tokens: list[Token]) -> None:
del tokens[if_block.end:else_block.end]
del tokens[if_block.start:if_block.block]
else:
j = _find_elif(tokens, i)
if_block, else_block = _find_if_else_block(tokens, j)
if_block, else_block = _find_if_else_block(tokens, i)
del tokens[if_block.end:else_block.end]
if_block.replace_condition(tokens, [Token('NAME', 'else')])

Expand Down
11 changes: 2 additions & 9 deletions pyupgrade/_token_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,17 +49,10 @@ def find_open_paren(tokens: list[Token], i: int) -> int:


def find_end(tokens: list[Token], i: int) -> int:
while tokens[i].name not in {'NEWLINE', 'ENDMARKER'}:
i += 1

# depending on the version of python, some will not emit
# NEWLINE('') at the end of a file which does not end with a
# newline (for example 3.7.0)
if tokens[i].name == 'ENDMARKER': # pragma: no cover
i -= 1
else:
while tokens[i].name != 'NEWLINE':
i += 1

i += 1
return i


Expand Down
2 changes: 1 addition & 1 deletion setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ classifiers =
packages = find:
install_requires =
tokenize-rt>=3.2.0
python_requires = >=3.8
python_requires = >=3.8.1

[options.packages.find]
exclude =
Expand Down

0 comments on commit a62c966

Please sign in to comment.