Skip to content

Commit

Permalink
馃憣 IMPROVE: Add more typing (#116)
Browse files Browse the repository at this point in the history
  • Loading branch information
hukkinj1 committed Dec 31, 2020
1 parent e303bd4 commit 4f1346c
Show file tree
Hide file tree
Showing 7 changed files with 87 additions and 73 deletions.
13 changes: 7 additions & 6 deletions markdown_it/cli/parse.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
"""
import argparse
import sys
from typing import Iterable, Optional, Sequence

from markdown_it import __version__
from markdown_it.main import MarkdownIt
Expand All @@ -14,7 +15,7 @@
version_str = "markdown-it-py [version {}]".format(__version__)


def main(args=None):
def main(args: Optional[Sequence[str]] = None) -> bool:
namespace = parse_args(args)
if namespace.filenames:
convert(namespace.filenames)
Expand All @@ -23,12 +24,12 @@ def main(args=None):
return True


def convert(filenames):
def convert(filenames: Iterable[str]) -> None:
for filename in filenames:
convert_file(filename)


def convert_file(filename):
def convert_file(filename: str) -> None:
"""
Parse a Markdown file and dump the output to stdout.
"""
Expand All @@ -40,7 +41,7 @@ def convert_file(filename):
sys.exit('Cannot open file "{}".'.format(filename))


def interactive():
def interactive() -> None:
"""
Parse user input, dump to stdout, rinse and repeat.
Python REPL style.
Expand All @@ -61,7 +62,7 @@ def interactive():
break


def parse_args(args):
def parse_args(args: Optional[Sequence[str]]) -> argparse.Namespace:
"""Parse input CLI arguments."""
parser = argparse.ArgumentParser(
description="Parse one or more markdown files, "
Expand Down Expand Up @@ -96,7 +97,7 @@ def parse_args(args):
return parser.parse_args(args)


def print_heading():
def print_heading() -> None:
print("{} (interactive)".format(version_str))
print("Type Ctrl-D to complete input, or Ctrl-C to exit.")

Expand Down
22 changes: 16 additions & 6 deletions markdown_it/main.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,15 @@
from contextlib import contextmanager
from typing import Any, Callable, Dict, Iterable, List, Mapping, Optional, Union
from typing import (
Any,
Callable,
Dict,
Generator,
Iterable,
List,
Mapping,
Optional,
Union,
)

from . import helpers, presets # noqa F401
from .common import utils # noqa F401
Expand Down Expand Up @@ -48,10 +58,10 @@ def __init__(

self.linkify = linkify_it.LinkifyIt() if linkify_it else None

def __repr__(self):
def __repr__(self) -> str:
return f"{self.__class__.__module__}.{self.__class__.__name__}()"

def __getitem__(self, name):
def __getitem__(self, name: str) -> Any:
return {
"inline": self.inline,
"block": self.block,
Expand All @@ -69,7 +79,7 @@ def set(self, options: AttrDict) -> None:
"""
self.options = options

def configure(self, presets: Union[str, Mapping]):
def configure(self, presets: Union[str, Mapping]) -> "MarkdownIt":
"""Batch load of all options and component settings.
This is an internal method, and you probably will not need it.
But if you will - see available presets and data structure
Expand Down Expand Up @@ -177,7 +187,7 @@ def disable(
return self

@contextmanager
def reset_rules(self):
def reset_rules(self) -> Generator[None, None, None]:
"""A context manager, that will reset the current enabled rules on exit."""
chain_rules = self.get_active_rules()
yield
Expand All @@ -186,7 +196,7 @@ def reset_rules(self):
self[chain].ruler.enableOnly(rules)
self.inline.ruler2.enableOnly(chain_rules["inline2"])

def add_render_rule(self, name: str, function: Callable, fmt="html"):
def add_render_rule(self, name: str, function: Callable, fmt: str = "html") -> None:
"""Add a rule for rendering a particular Token type.
Only applied when ``renderer.__output__ == fmt``
Expand Down
32 changes: 16 additions & 16 deletions markdown_it/parser_block.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,24 +10,24 @@
LOGGER = logging.getLogger(__name__)


_rules = [
_rules: List[Tuple] = [
# First 2 params - rule name & source. Secondary array - list of rules,
# which can be terminated by this one.
["table", rules_block.table, ["paragraph", "reference"]],
["code", rules_block.code],
["fence", rules_block.fence, ["paragraph", "reference", "blockquote", "list"]],
[
("table", rules_block.table, ["paragraph", "reference"]),
("code", rules_block.code),
("fence", rules_block.fence, ["paragraph", "reference", "blockquote", "list"]),
(
"blockquote",
rules_block.blockquote,
["paragraph", "reference", "blockquote", "list"],
],
["hr", rules_block.hr, ["paragraph", "reference", "blockquote", "list"]],
["list", rules_block.list_block, ["paragraph", "reference", "blockquote"]],
["reference", rules_block.reference],
["heading", rules_block.heading, ["paragraph", "reference", "blockquote"]],
["lheading", rules_block.lheading],
["html_block", rules_block.html_block, ["paragraph", "reference", "blockquote"]],
["paragraph", rules_block.paragraph],
),
("hr", rules_block.hr, ["paragraph", "reference", "blockquote", "list"]),
("list", rules_block.list_block, ["paragraph", "reference", "blockquote"]),
("reference", rules_block.reference),
("heading", rules_block.heading, ["paragraph", "reference", "blockquote"]),
("lheading", rules_block.lheading),
("html_block", rules_block.html_block, ["paragraph", "reference", "blockquote"]),
("paragraph", rules_block.paragraph),
]


Expand All @@ -47,7 +47,7 @@ def __init__(self):

def tokenize(
self, state: StateBlock, startLine: int, endLine: int, silent: bool = False
):
) -> None:
"""Generate tokens for input range."""
rules = self.ruler.getRules("")
line = startLine
Expand Down Expand Up @@ -99,10 +99,10 @@ def parse(
env,
outTokens: List[Token],
ords: Optional[Tuple[int, ...]] = None,
):
) -> Optional[List[Token]]:
"""Process input string and push block tokens into `outTokens`."""
if not src:
return
return None
state = StateBlock(src, md, env, outTokens, ords)
self.tokenize(state, state.line, state.lineMax)
return state.tokens
20 changes: 10 additions & 10 deletions markdown_it/parser_core.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,20 +4,20 @@
* Top-level rules executor. Glues block/inline parsers and does intermediate
* transformations.
"""
from typing import List, Tuple


from .ruler import Ruler
from .ruler import Ruler, RuleFunc
from .rules_core.state_core import StateCore
from .rules_core import normalize, block, inline, replace, smartquotes, linkify


_rules = [
["normalize", normalize],
["block", block],
["inline", inline],
["linkify", linkify],
["replacements", replace],
["smartquotes", smartquotes],
_rules: List[Tuple[str, RuleFunc]] = [
("normalize", normalize),
("block", block),
("inline", inline),
("linkify", linkify),
("replacements", replace),
("smartquotes", smartquotes),
]


Expand All @@ -27,7 +27,7 @@ def __init__(self):
for name, rule in _rules:
self.ruler.push(name, rule)

def process(self, state: StateCore):
def process(self, state: StateCore) -> None:
"""Executes core chain rules."""
for rule in self.ruler.getRules(""):
rule(state)
44 changes: 22 additions & 22 deletions markdown_it/parser_inline.py
Original file line number Diff line number Diff line change
@@ -1,32 +1,32 @@
"""Tokenizes paragraph content.
"""
from typing import List
from typing import List, Tuple

from .ruler import Ruler
from .ruler import Ruler, RuleFunc
from .token import Token
from .rules_inline.state_inline import StateInline
from . import rules_inline

# Parser rules
_rules = [
["text", rules_inline.text],
["newline", rules_inline.newline],
["escape", rules_inline.escape],
["backticks", rules_inline.backtick],
["strikethrough", rules_inline.strikethrough.tokenize],
["emphasis", rules_inline.emphasis.tokenize],
["link", rules_inline.link],
["image", rules_inline.image],
["autolink", rules_inline.autolink],
["html_inline", rules_inline.html_inline],
["entity", rules_inline.entity],
_rules: List[Tuple[str, RuleFunc]] = [
("text", rules_inline.text),
("newline", rules_inline.newline),
("escape", rules_inline.escape),
("backticks", rules_inline.backtick),
("strikethrough", rules_inline.strikethrough.tokenize),
("emphasis", rules_inline.emphasis.tokenize),
("link", rules_inline.link),
("image", rules_inline.image),
("autolink", rules_inline.autolink),
("html_inline", rules_inline.html_inline),
("entity", rules_inline.entity),
]

_rules2 = [
["balance_pairs", rules_inline.link_pairs],
["strikethrough", rules_inline.strikethrough.postProcess],
["emphasis", rules_inline.emphasis.postProcess],
["text_collapse", rules_inline.text_collapse],
_rules2: List[Tuple[str, RuleFunc]] = [
("balance_pairs", rules_inline.link_pairs),
("strikethrough", rules_inline.strikethrough.postProcess),
("emphasis", rules_inline.emphasis.postProcess),
("text_collapse", rules_inline.text_collapse),
]


Expand All @@ -40,7 +40,7 @@ def __init__(self):
for name, rule2 in _rules2:
self.ruler2.push(name, rule2)

def skipToken(self, state: StateInline):
def skipToken(self, state: StateInline) -> None:
"""Skip single token by running all rules in validation mode;
returns `True` if any rule reported success
"""
Expand Down Expand Up @@ -82,7 +82,7 @@ def skipToken(self, state: StateInline):
state.pos += 1
cache[pos] = state.pos

def tokenize(self, state: StateInline):
def tokenize(self, state: StateInline) -> None:
"""Generate tokens for input range."""
ok = False
rules = self.ruler.getRules("")
Expand Down Expand Up @@ -114,7 +114,7 @@ def tokenize(self, state: StateInline):
if state.pending:
state.pushPending()

def parse(self, src: str, md, env, tokens: List[Token]):
def parse(self, src: str, md, env, tokens: List[Token]) -> List[Token]:
"""Process input string and push inline tokens into `tokens`"""
state = StateInline(src, md, env, tokens)
self.tokenize(state)
Expand Down
27 changes: 15 additions & 12 deletions markdown_it/renderer.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ class Renderer
rules if you create plugin and adds new token types.
"""
import inspect
from typing import Sequence
from typing import Optional, Sequence

from .common.utils import unescapeAll, escapeHtml
from .token import Token
Expand Down Expand Up @@ -151,7 +151,7 @@ def renderToken(
return result

@staticmethod
def renderAttrs(token):
def renderAttrs(token: Token) -> str:
"""Render token attributes to string."""
if not token.attrs:
return ""
Expand All @@ -169,7 +169,9 @@ def renderAttrs(token):

return result

def renderInlineAsText(self, tokens: Sequence[Token], options, env) -> str:
def renderInlineAsText(
self, tokens: Optional[Sequence[Token]], options, env
) -> str:
"""Special kludge for image `alt` attributes to conform CommonMark spec.
Don't try to use it! Spec requires to show `alt` content with stripped markup,
Expand All @@ -192,7 +194,7 @@ def renderInlineAsText(self, tokens: Sequence[Token], options, env) -> str:

###################################################

def code_inline(self, tokens: Sequence[Token], idx, options, env):
def code_inline(self, tokens: Sequence[Token], idx: int, options, env) -> str:
token = tokens[idx]
return (
"<code"
Expand All @@ -202,7 +204,7 @@ def code_inline(self, tokens: Sequence[Token], idx, options, env):
+ "</code>"
)

def code_block(self, tokens: Sequence[Token], idx, options, env):
def code_block(self, tokens: Sequence[Token], idx: int, options, env) -> str:
token = tokens[idx]

return (
Expand All @@ -213,7 +215,7 @@ def code_block(self, tokens: Sequence[Token], idx, options, env):
+ "</code></pre>\n"
)

def fence(self, tokens: Sequence[Token], idx, options, env):
def fence(self, tokens: Sequence[Token], idx: int, options, env) -> str:
token = tokens[idx]
info = unescapeAll(token.info).strip() if token.info else ""
langName = ""
Expand Down Expand Up @@ -262,8 +264,9 @@ def fence(self, tokens: Sequence[Token], idx, options, env):
+ "</code></pre>\n"
)

def image(self, tokens: Sequence[Token], idx, options, env):
def image(self, tokens: Sequence[Token], idx: int, options, env) -> str:
token = tokens[idx]
assert token.attrs is not None, '"image" token\'s attrs must not be `None`'

# "alt" attr MUST be set, even if empty. Because it's mandatory and
# should be placed on proper position for tests.
Expand All @@ -276,19 +279,19 @@ def image(self, tokens: Sequence[Token], idx, options, env):

return self.renderToken(tokens, idx, options, env)

def hardbreak(self, tokens: Sequence[Token], idx, options, *args):
def hardbreak(self, tokens: Sequence[Token], idx: int, options, *args) -> str:
return "<br />\n" if options.xhtmlOut else "<br>\n"

def softbreak(self, tokens: Sequence[Token], idx, options, *args):
def softbreak(self, tokens: Sequence[Token], idx: int, options, *args) -> str:
return (
("<br />\n" if options.xhtmlOut else "<br>\n") if options.breaks else "\n"
)

def text(self, tokens: Sequence[Token], idx, *args):
def text(self, tokens: Sequence[Token], idx: int, *args) -> str:
return escapeHtml(tokens[idx].content)

def html_block(self, tokens: Sequence[Token], idx, *args):
def html_block(self, tokens: Sequence[Token], idx: int, *args) -> str:
return tokens[idx].content

def html_inline(self, tokens: Sequence[Token], idx, *args):
def html_inline(self, tokens: Sequence[Token], idx: int, *args) -> str:
return tokens[idx].content
2 changes: 1 addition & 1 deletion markdown_it/ruler.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ def __find__(self, name: str) -> int:
return i
return -1

def __compile__(self):
def __compile__(self) -> None:
"""Build rules lookup cache"""
chains = {""}
# collect unique names
Expand Down

0 comments on commit 4f1346c

Please sign in to comment.