Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Adding pre commit #145

Merged
merged 3 commits into from
Apr 11, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
4 changes: 4 additions & 0 deletions .bandit.yml
Original file line number Diff line number Diff line change
@@ -1,2 +1,6 @@
skips:
- B101
- B311
- B320
- B410
exclude_dirs: ['tests']
1 change: 1 addition & 0 deletions .flake8
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ ignore =
W503
# too many leading '#' for block comment
E266
E704
exclude =
.git
.tox
Expand Down
2 changes: 2 additions & 0 deletions .git-blame-ignore-revs
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
# applying pre-commit hooks to the project
e91101b37f82558db84a6b8ee9a6dba1fd2ae0bb
15 changes: 6 additions & 9 deletions .github/workflows/checks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,18 +7,9 @@ jobs:
strategy:
matrix:
include:
- python-version: 3.12
env:
TOXENV: black
- python-version: 3.12
env:
TOXENV: flake8
- python-version: 3.12
env:
TOXENV: pylint
- python-version: 3.12
env:
TOXENV: security
- python-version: 3.12
env:
TOXENV: docs
Expand All @@ -40,3 +31,9 @@ jobs:
pip install -U pip
pip install -U tox
tox

pre-commit:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: pre-commit/action@v3.0.0
2 changes: 2 additions & 0 deletions .isort.cfg
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
[settings]
profile = black
18 changes: 18 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
repos:
- repo: https://github.com/PyCQA/bandit
rev: 1.7.8
hooks:
- id: bandit
args: [-r, -c, .bandit.yml]
- repo: https://github.com/PyCQA/flake8
rev: 7.0.0
hooks:
- id: flake8
- repo: https://github.com/psf/black.git
rev: 24.3.0
hooks:
- id: black
- repo: https://github.com/pycqa/isort
rev: 5.13.2
hooks:
- id: isort
6 changes: 3 additions & 3 deletions cssselect/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,13 +14,13 @@
"""

from cssselect.parser import (
parse,
Selector,
FunctionalPseudoElement,
Selector,
SelectorError,
SelectorSyntaxError,
parse,
)
from cssselect.xpath import GenericTranslator, HTMLTranslator, ExpressionError
from cssselect.xpath import ExpressionError, GenericTranslator, HTMLTranslator

__all__ = (
"ExpressionError",
Expand Down
93 changes: 66 additions & 27 deletions cssselect/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,9 @@

"""

import sys
import re
import operator
import re
import sys
import typing
from typing import Iterable, Iterator, List, Optional, Sequence, Tuple, Union

Expand Down Expand Up @@ -67,9 +67,13 @@ class Selector:

"""

def __init__(self, tree: Tree, pseudo_element: Optional[PseudoElement] = None) -> None:
def __init__(
self, tree: Tree, pseudo_element: Optional[PseudoElement] = None
) -> None:
self.parsed_tree = tree
if pseudo_element is not None and not isinstance(pseudo_element, FunctionalPseudoElement):
if pseudo_element is not None and not isinstance(
pseudo_element, FunctionalPseudoElement
):
pseudo_element = ascii_lower(pseudo_element)
#: A :class:`FunctionalPseudoElement`,
#: or the identifier for the pseudo-element as a string,
Expand Down Expand Up @@ -247,7 +251,11 @@ def __init__(self, selector: Tree, subselector: Tree) -> None:
self.subselector = subselector

def __repr__(self) -> str:
return "%s[%r:not(%r)]" % (self.__class__.__name__, self.selector, self.subselector)
return "%s[%r:not(%r)]" % (
self.__class__.__name__,
self.selector,
self.subselector,
)

def canonical(self) -> str:
subsel = self.subselector.canonical()
Expand Down Expand Up @@ -317,7 +325,10 @@ def canonical(self) -> str:
for s in self.selector_list:
selarg = s.canonical()
selector_arguments.append(selarg.lstrip("*"))
return "%s:is(%s)" % (self.selector.canonical(), ", ".join(map(str, selector_arguments)))
return "%s:is(%s)" % (
self.selector.canonical(),
", ".join(map(str, selector_arguments)),
)

def specificity(self) -> Tuple[int, int, int]:
return max(x.specificity() for x in self.selector_list)
Expand Down Expand Up @@ -367,14 +378,17 @@ def __init__(
attrib: str,
operator: 'typing.Literal["exists"]',
value: None,
) -> None:
...
) -> None: ...

@typing.overload
def __init__(
self, selector: Tree, namespace: Optional[str], attrib: str, operator: str, value: "Token"
) -> None:
...
self,
selector: Tree,
namespace: Optional[str],
attrib: str,
operator: str,
value: "Token",
) -> None: ...

def __init__(
self,
Expand Down Expand Up @@ -415,7 +429,11 @@ def canonical(self) -> str:
if self.operator == "exists":
op = attrib
else:
op = "%s%s%s" % (attrib, self.operator, typing.cast("Token", self.value).css())
op = "%s%s%s" % (
attrib,
self.operator,
typing.cast("Token", self.value).css(),
)

return "%s[%s]" % (self.selector.canonical(), op)

Expand All @@ -433,7 +451,9 @@ class Element:

"""

def __init__(self, namespace: Optional[str] = None, element: Optional[str] = None) -> None:
def __init__(
self, namespace: Optional[str] = None, element: Optional[str] = None
) -> None:
self.namespace = namespace
self.element = element

Expand Down Expand Up @@ -486,7 +506,12 @@ def __repr__(self) -> str:
comb = "<followed>"
else:
comb = self.combinator
return "%s[%r %s %r]" % (self.__class__.__name__, self.selector, comb, self.subselector)
return "%s[%r %s %r]" % (
self.__class__.__name__,
self.selector,
comb,
self.subselector,
)

def canonical(self) -> str:
subsel = self.subselector.canonical()
Expand All @@ -509,7 +534,9 @@ def specificity(self) -> Tuple[int, int, int]:
_id_re = re.compile(r"^[ \t\r\n\f]*([a-zA-Z]*)#([a-zA-Z0-9_-]+)[ \t\r\n\f]*$")

# foo.bar or .bar
_class_re = re.compile(r"^[ \t\r\n\f]*([a-zA-Z]*)\.([a-zA-Z][a-zA-Z0-9_-]*)[ \t\r\n\f]*$")
_class_re = re.compile(
r"^[ \t\r\n\f]*([a-zA-Z]*)\.([a-zA-Z][a-zA-Z0-9_-]*)[ \t\r\n\f]*$"
)


def parse(css: str) -> List[Selector]:
Expand All @@ -536,7 +563,9 @@ def parse(css: str) -> List[Selector]:
return [Selector(Hash(Element(element=match.group(1) or None), match.group(2)))]
match = _class_re.match(css)
if match is not None:
return [Selector(Class(Element(element=match.group(1) or None), match.group(2)))]
return [
Selector(Class(Element(element=match.group(1) or None), match.group(2)))
]

stream = TokenStream(tokenize(css))
stream.source = css
Expand Down Expand Up @@ -708,7 +737,10 @@ def parse_arguments(stream: "TokenStream") -> List["Token"]:
while 1:
stream.skip_whitespace()
next = stream.next()
if next.type in ("IDENT", "STRING", "NUMBER") or next in [("DELIM", "+"), ("DELIM", "-")]:
if next.type in ("IDENT", "STRING", "NUMBER") or next in [
("DELIM", "+"),
("DELIM", "-"),
]:
arguments.append(next)
elif next == ("DELIM", ")"):
return arguments
Expand All @@ -729,7 +761,10 @@ def parse_relative_selector(stream: "TokenStream") -> Tuple["Token", Selector]:
combinator = Token("DELIM", " ", pos=0)

while 1:
if next.type in ("IDENT", "STRING", "NUMBER") or next in [("DELIM", "."), ("DELIM", "*")]:
if next.type in ("IDENT", "STRING", "NUMBER") or next in [
("DELIM", "."),
("DELIM", "*"),
]:
subselector += typing.cast(str, next.value)
elif next == ("DELIM", ")"):
result = parse(subselector)
Expand Down Expand Up @@ -787,7 +822,9 @@ def parse_attrib(selector: Tree, stream: "TokenStream") -> Attrib:
return Attrib(selector, namespace, typing.cast(str, attrib), "exists", None)
elif next == ("DELIM", "="):
op = "="
elif next.is_delim("^", "$", "*", "~", "|", "!") and (stream.peek() == ("DELIM", "=")):
elif next.is_delim("^", "$", "*", "~", "|", "!") and (
stream.peek() == ("DELIM", "=")
):
op = typing.cast(str, next.value) + "="
stream.next()
else:
Expand Down Expand Up @@ -850,12 +887,12 @@ def __new__(
type_: 'typing.Literal["IDENT", "HASH", "STRING", "S", "DELIM", "NUMBER"]',
value: str,
pos: int,
) -> "Token":
...
) -> "Token": ...

@typing.overload
def __new__(cls, type_: 'typing.Literal["EOF"]', value: None, pos: int) -> "Token":
...
def __new__(
cls, type_: 'typing.Literal["EOF"]', value: None, pos: int
) -> "Token": ...

def __new__(cls, type_: str, value: Optional[str], pos: int) -> "Token":
obj = tuple.__new__(cls, (type_, value))
Expand Down Expand Up @@ -910,8 +947,7 @@ class TokenMacros:
class MatchFunc(typing.Protocol):
def __call__(
self, string: str, pos: int = ..., endpos: int = ...
) -> Optional["re.Match[str]"]:
...
) -> Optional["re.Match[str]"]: ...


def _compile(pattern: str) -> "MatchFunc":
Expand Down Expand Up @@ -970,7 +1006,8 @@ def tokenize(s: str) -> Iterator[Token]:
match = _match_hash(s, pos=pos)
if match:
value = _sub_simple_escape(
_replace_simple, _sub_unicode_escape(_replace_unicode, match.group()[1:])
_replace_simple,
_sub_unicode_escape(_replace_unicode, match.group()[1:]),
)
yield Token("HASH", value, pos)
pos = match.end()
Expand All @@ -987,7 +1024,9 @@ def tokenize(s: str) -> Iterator[Token]:
raise SelectorSyntaxError("Invalid string at %s" % pos)
value = _sub_simple_escape(
_replace_simple,
_sub_unicode_escape(_replace_unicode, _sub_newline_escape("", match.group())),
_sub_unicode_escape(
_replace_unicode, _sub_newline_escape("", match.group())
),
)
yield Token("STRING", value, pos)
pos = end_pos + 1
Expand Down