Skip to content

Commit

Permalink
Merge pull request #10454 from tk0miya/9096_fix_progress_bar_on_paral…
Browse files Browse the repository at this point in the history
…lel_build

Fix #9096: sphinx-build: the value of progress bar for paralle build is wrong
  • Loading branch information
tk0miya committed May 22, 2022
2 parents ca25b4f + 7586f9c commit 21df068
Show file tree
Hide file tree
Showing 3 changed files with 35 additions and 15 deletions.
1 change: 1 addition & 0 deletions CHANGES
Expand Up @@ -23,6 +23,7 @@ Bugs fixed
* #10443: epub: EPUB builder can't detect the mimetype of .webp file
* #10456: py domain: ``:meta:`` fields are displayed if docstring contains two
or more meta-field
* #9096: sphinx-build: the value of progress bar for paralle build is wrong

Testing
--------
Expand Down
32 changes: 23 additions & 9 deletions sphinx/builders/__init__.py
Expand Up @@ -25,6 +25,7 @@
from sphinx.util.osutil import SEP, ensuredir, relative_uri, relpath
from sphinx.util.parallel import ParallelTasks, SerialTasks, make_chunks, parallel_available
from sphinx.util.tags import Tags
from sphinx.util.typing import NoneType

# side effect: registers roles and directives
from sphinx import directives # NOQA isort:skip
Expand Down Expand Up @@ -429,6 +430,13 @@ def _read_serial(self, docnames: List[str]) -> None:
self.read_doc(docname)

def _read_parallel(self, docnames: List[str], nproc: int) -> None:
chunks = make_chunks(docnames, nproc)

# create a status_iterator to step progressbar after reading a document
# (see: ``merge()`` function)
progress = status_iterator(chunks, __('reading sources... '), "purple",
len(chunks), self.app.verbosity)

# clear all outdated docs at once
for docname in docnames:
self.events.emit('env-purge-doc', self.env, docname)
Expand All @@ -445,16 +453,15 @@ def merge(docs: List[str], otherenv: bytes) -> None:
env = pickle.loads(otherenv)
self.env.merge_info_from(docs, env, self.app)

tasks = ParallelTasks(nproc)
chunks = make_chunks(docnames, nproc)
next(progress)

for chunk in status_iterator(chunks, __('reading sources... '), "purple",
len(chunks), self.app.verbosity):
tasks = ParallelTasks(nproc)
for chunk in chunks:
tasks.add_task(read_process, chunk, merge)

# make sure all threads have finished
logger.info(bold(__('waiting for workers...')))
tasks.join()
logger.info('')

def read_doc(self, docname: str) -> None:
"""Parse a file and add/update inventory entries for the doctree."""
Expand Down Expand Up @@ -563,19 +570,26 @@ def write_process(docs: List[Tuple[str, nodes.document]]) -> None:
tasks = ParallelTasks(nproc)
chunks = make_chunks(docnames, nproc)

# create a status_iterator to step progressbar after writing a document
# (see: ``on_chunk_done()`` function)
progress = status_iterator(chunks, __('writing output... '), "darkgreen",
len(chunks), self.app.verbosity)

def on_chunk_done(args: List[Tuple[str, NoneType]], result: NoneType) -> None:
next(progress)

self.app.phase = BuildPhase.RESOLVING
for chunk in status_iterator(chunks, __('writing output... '), "darkgreen",
len(chunks), self.app.verbosity):
for chunk in chunks:
arg = []
for docname in chunk:
doctree = self.env.get_and_resolve_doctree(docname, self)
self.write_doc_serialized(docname, doctree)
arg.append((docname, doctree))
tasks.add_task(write_process, arg)
tasks.add_task(write_process, arg, on_chunk_done)

# make sure all threads have finished
logger.info(bold(__('waiting for workers...')))
tasks.join()
logger.info('')

def prepare_writing(self, docnames: Set[str]) -> None:
"""A place where you can add logic before :meth:`write_doc` is run"""
Expand Down
17 changes: 11 additions & 6 deletions sphinx/util/__init__.py
Expand Up @@ -13,8 +13,8 @@
from importlib import import_module
from os import path
from time import mktime, strptime
from typing import (IO, TYPE_CHECKING, Any, Callable, Dict, Iterable, Iterator, List, Optional,
Pattern, Set, Tuple, Type)
from typing import (IO, TYPE_CHECKING, Any, Callable, Dict, Generator, Iterable, List,
Optional, Pattern, Set, Tuple, Type, TypeVar)
from urllib.parse import parse_qsl, quote_plus, urlencode, urlsplit, urlunsplit

from sphinx.errors import ExtensionError, FiletypeNotFoundError, SphinxParallelError
Expand Down Expand Up @@ -445,8 +445,12 @@ def display_chunk(chunk: Any) -> str:
return str(chunk)


def old_status_iterator(iterable: Iterable, summary: str, color: str = "darkgreen",
stringify_func: Callable[[Any], str] = display_chunk) -> Iterator:
T = TypeVar('T')


def old_status_iterator(iterable: Iterable[T], summary: str, color: str = "darkgreen",
stringify_func: Callable[[Any], str] = display_chunk
) -> Generator[T, None, None]:
l = 0
for item in iterable:
if l == 0:
Expand All @@ -460,9 +464,10 @@ def old_status_iterator(iterable: Iterable, summary: str, color: str = "darkgree


# new version with progress info
def status_iterator(iterable: Iterable, summary: str, color: str = "darkgreen",
def status_iterator(iterable: Iterable[T], summary: str, color: str = "darkgreen",
length: int = 0, verbosity: int = 0,
stringify_func: Callable[[Any], str] = display_chunk) -> Iterable:
stringify_func: Callable[[Any], str] = display_chunk
) -> Generator[T, None, None]:
if length == 0:
yield from old_status_iterator(iterable, summary, color, stringify_func)
return
Expand Down

0 comments on commit 21df068

Please sign in to comment.