MkDocs Plugins
SearchPlugin
Add plugin templates and scripts to config.
Source
mkdocs.contrib.search.SearchPlugin.on_config |
---|
| def on_config(self, config: MkDocsConfig, **kwargs) -> MkDocsConfig:
"""Add plugin templates and scripts to config."""
if config.theme.get('include_search_page'):
config.theme.static_templates.add('search.html')
if not config.theme.get('search_index_only'):
path = os.path.join(base_path, 'templates')
config.theme.dirs.append(path)
if 'search/main.js' not in config.extra_javascript:
config.extra_javascript.append('search/main.js') # type: ignore
if self.config.lang is None:
# lang setting undefined. Set default based on theme locale
validate = _PluginConfig.lang.run_validation
self.config.lang = validate(config.theme.locale.language)
# The `python` method of `prebuild_index` is pending deprecation as of version 1.2.
# TODO: Raise a deprecation warning in a future release (1.3?).
if self.config.prebuild_index == 'python':
log.info(
"The 'python' method of the search plugin's 'prebuild_index' config option "
"is pending deprecation and will not be supported in a future release."
)
return config
|
Hook info
::: mkdocs.plugins.BasePlugin.on_config
options:
show_source: False
show_root_toc_entry: False
Create search index instance for later use.
Source
mkdocs.contrib.search.SearchPlugin.on_pre_build |
---|
| def on_pre_build(self, config: MkDocsConfig, **kwargs) -> None:
"""Create search index instance for later use."""
self.search_index = SearchIndex(**self.config)
|
Hook info
::: mkdocs.plugins.BasePlugin.on_pre_build
options:
show_source: False
show_root_toc_entry: False
Build search index.
Source
mkdocs.contrib.search.SearchPlugin.on_post_build |
---|
| def on_post_build(self, config: MkDocsConfig, **kwargs) -> None:
"""Build search index."""
output_base_path = os.path.join(config.site_dir, 'search')
search_index = self.search_index.generate_search_index()
json_output_path = os.path.join(output_base_path, 'search_index.json')
utils.write_file(search_index.encode('utf-8'), json_output_path)
assert self.config.lang is not None
if not config.theme.get('search_index_only'):
# Include language support files in output. Copy them directly
# so that only the needed files are included.
files = []
if len(self.config.lang) > 1 or 'en' not in self.config.lang:
files.append('lunr.stemmer.support.js')
if len(self.config.lang) > 1:
files.append('lunr.multi.js')
if 'ja' in self.config.lang or 'jp' in self.config.lang:
files.append('tinyseg.js')
for lang in self.config.lang:
if lang != 'en':
files.append(f'lunr.{lang}.js')
for filename in files:
from_path = os.path.join(base_path, 'lunr-language', filename)
to_path = os.path.join(output_base_path, filename)
utils.copy_file(from_path, to_path)
|
Hook info
::: mkdocs.plugins.BasePlugin.on_post_build
options:
show_source: False
show_root_toc_entry: False
Add page to search index.
Source
mkdocs.contrib.search.SearchPlugin.on_page_context |
---|
| def on_page_context(self, context: TemplateContext, page: Page, **kwargs) -> None:
"""Add page to search index."""
self.search_index.add_entry_from_context(page)
|
Hook info
::: mkdocs.plugins.BasePlugin.on_page_context
options:
show_source: False
show_root_toc_entry: False
Build search index.
Source
mkdocs.contrib.search.SearchPlugin.on_post_build |
---|
| def on_post_build(self, config: MkDocsConfig, **kwargs) -> None:
"""Build search index."""
output_base_path = os.path.join(config.site_dir, 'search')
search_index = self.search_index.generate_search_index()
json_output_path = os.path.join(output_base_path, 'search_index.json')
utils.write_file(search_index.encode('utf-8'), json_output_path)
assert self.config.lang is not None
if not config.theme.get('search_index_only'):
# Include language support files in output. Copy them directly
# so that only the needed files are included.
files = []
if len(self.config.lang) > 1 or 'en' not in self.config.lang:
files.append('lunr.stemmer.support.js')
if len(self.config.lang) > 1:
files.append('lunr.multi.js')
if 'ja' in self.config.lang or 'jp' in self.config.lang:
files.append('tinyseg.js')
for lang in self.config.lang:
if lang != 'en':
files.append(f'lunr.{lang}.js')
for filename in files:
from_path = os.path.join(base_path, 'lunr-language', filename)
to_path = os.path.join(output_base_path, filename)
utils.copy_file(from_path, to_path)
|
Hook info
::: mkdocs.plugins.BasePlugin.on_post_build
options:
show_source: False
show_root_toc_entry: False
AutorefsPlugin
Instantiate our Markdown extension.
Hook for the on_config
event.
In this hook, we instantiate our [AutorefsExtension
][mkdocs_autorefs.references.AutorefsExtension]
and add it to the list of Markdown extensions used by mkdocs
.
Arguments:
config: The MkDocs config object.
Returns:
The modified config.
Source
mkdocs_autorefs.plugin.AutorefsPlugin.on_config |
---|
| def on_config(self, config: MkDocsConfig) -> MkDocsConfig | None:
"""Instantiate our Markdown extension.
Hook for the [`on_config` event](https://www.mkdocs.org/user-guide/plugins/#on_config).
In this hook, we instantiate our [`AutorefsExtension`][mkdocs_autorefs.references.AutorefsExtension]
and add it to the list of Markdown extensions used by `mkdocs`.
Arguments:
config: The MkDocs config object.
Returns:
The modified config.
"""
log.debug("Adding AutorefsExtension to the list")
config["markdown_extensions"].append(AutorefsExtension(self))
return config
|
Hook info
::: mkdocs.plugins.BasePlugin.on_config
options:
show_source: False
show_root_toc_entry: False
Remember which page is the current one.
Arguments:
markdown: Input Markdown.
page: The related MkDocs page instance.
kwargs: Additional arguments passed by MkDocs.
Returns:
The same Markdown. We only use this hook to keep a reference to the current page URL,
used during Markdown conversion by the anchor scanner tree processor.
Source
mkdocs_autorefs.plugin.AutorefsPlugin.on_page_markdown |
---|
| def on_page_markdown(self, markdown: str, page: Page, **kwargs: Any) -> str: # noqa: ARG002
"""Remember which page is the current one.
Arguments:
markdown: Input Markdown.
page: The related MkDocs page instance.
kwargs: Additional arguments passed by MkDocs.
Returns:
The same Markdown. We only use this hook to keep a reference to the current page URL,
used during Markdown conversion by the anchor scanner tree processor.
"""
self.current_page = page.url
return markdown
|
Hook info
::: mkdocs.plugins.BasePlugin.on_page_markdown
options:
show_source: False
show_root_toc_entry: False
Map anchors to URLs.
Hook for the on_page_content
event.
In this hook, we map the IDs of every anchor found in the table of contents to the anchors absolute URLs.
This mapping will be used later to fix unresolved reference of the form [title][identifier]
or
[identifier][]
.
Arguments:
html: HTML converted from Markdown.
page: The related MkDocs page instance.
kwargs: Additional arguments passed by MkDocs.
Returns:
The same HTML. We only use this hook to map anchors to URLs.
Source
mkdocs_autorefs.plugin.AutorefsPlugin.on_page_content |
---|
| def on_page_content(self, html: str, page: Page, **kwargs: Any) -> str: # noqa: ARG002
"""Map anchors to URLs.
Hook for the [`on_page_content` event](https://www.mkdocs.org/user-guide/plugins/#on_page_content).
In this hook, we map the IDs of every anchor found in the table of contents to the anchors absolute URLs.
This mapping will be used later to fix unresolved reference of the form `[title][identifier]` or
`[identifier][]`.
Arguments:
html: HTML converted from Markdown.
page: The related MkDocs page instance.
kwargs: Additional arguments passed by MkDocs.
Returns:
The same HTML. We only use this hook to map anchors to URLs.
"""
if self.scan_toc:
log.debug("Mapping identifiers to URLs for page %s", page.file.src_path)
for item in page.toc.items:
self.map_urls(page.url, item)
return html
|
Hook info
::: mkdocs.plugins.BasePlugin.on_page_content
options:
show_source: False
show_root_toc_entry: False
Fix cross-references.
Hook for the on_post_page
event.
In this hook, we try to fix unresolved references of the form [title][identifier]
or [identifier][]
.
Doing that allows the user of autorefs
to cross-reference objects in their documentation strings.
It uses the native Markdown syntax so it's easy to remember and use.
We log a warning for each reference that we couldn't map to an URL, but try to be smart and ignore identifiers
that do not look legitimate (sometimes documentation can contain strings matching
our [AUTO_REF_RE
][mkdocs_autorefs.references.AUTO_REF_RE] regular expression that did not intend to reference anything).
We currently ignore references when their identifier contains a space or a slash.
Arguments:
output: HTML converted from Markdown.
page: The related MkDocs page instance.
kwargs: Additional arguments passed by MkDocs.
Returns:
Modified HTML.
Source
mkdocs_autorefs.plugin.AutorefsPlugin.on_post_page |
---|
| def on_post_page(self, output: str, page: Page, **kwargs: Any) -> str: # noqa: ARG002
"""Fix cross-references.
Hook for the [`on_post_page` event](https://www.mkdocs.org/user-guide/plugins/#on_post_page).
In this hook, we try to fix unresolved references of the form `[title][identifier]` or `[identifier][]`.
Doing that allows the user of `autorefs` to cross-reference objects in their documentation strings.
It uses the native Markdown syntax so it's easy to remember and use.
We log a warning for each reference that we couldn't map to an URL, but try to be smart and ignore identifiers
that do not look legitimate (sometimes documentation can contain strings matching
our [`AUTO_REF_RE`][mkdocs_autorefs.references.AUTO_REF_RE] regular expression that did not intend to reference anything).
We currently ignore references when their identifier contains a space or a slash.
Arguments:
output: HTML converted from Markdown.
page: The related MkDocs page instance.
kwargs: Additional arguments passed by MkDocs.
Returns:
Modified HTML.
"""
log.debug("Fixing references in page %s", page.file.src_path)
url_mapper = functools.partial(self.get_item_url, from_url=page.url, fallback=self.get_fallback_anchor)
fixed_output, unmapped = fix_refs(output, url_mapper, _legacy_refs=self.legacy_refs)
if unmapped and log.isEnabledFor(logging.WARNING):
for ref, context in unmapped:
message = f"from {context.filepath}:{context.lineno}: ({context.origin}) " if context else ""
log.warning(f"{page.file.src_path}: {message}Could not find cross-reference target '{ref}'")
return fixed_output
|
Hook info
::: mkdocs.plugins.BasePlugin.on_post_page
options:
show_source: False
show_root_toc_entry: False
MkNodesPlugin
Activates new-style MkDocs plugin lifecycle.
Source
mkdocs_mknodes.plugin.MkNodesPlugin.on_startup |
---|
| def on_startup(self, *, command: CommandStr, dirty: bool):
"""Activates new-style MkDocs plugin lifecycle."""
|
Hook info
::: mkdocs.plugins.BasePlugin.on_startup
options:
show_source: False
show_root_toc_entry: False
Create the project based on MkDocs config.
Source
mkdocs_mknodes.plugin.MkNodesPlugin.on_config |
---|
| def on_config(self, config: mknodesconfig.MkNodesConfig): # type: ignore
"""Create the project based on MkDocs config."""
if config.build_folder:
self.build_folder = pathlib.Path(config.build_folder)
else:
self._dir = tempfile.TemporaryDirectory(
prefix="mknodes_",
ignore_cleanup_errors=True,
)
self.build_folder = pathlib.Path(self._dir.name)
logger.debug("Creating temporary dir %s", self._dir.name)
if not config.build_fn:
return
self.linkprovider = linkprovider.LinkProvider(
base_url=config.site_url or "",
use_directory_urls=config.use_directory_urls,
include_stdlib=True,
)
self.theme = mk.Theme.get_theme(
theme_name=config.theme.name or "material",
data=dict(config.theme),
)
git_repo = reporegistry.get_repo(
str(config.repo_path or "."),
clone_depth=config.clone_depth,
)
self.folderinfo = folderinfo.FolderInfo(git_repo.working_dir)
self.context = contexts.ProjectContext(
metadata=self.folderinfo.context,
git=self.folderinfo.git.context,
# github=self.folderinfo.github.context,
theme=self.theme.context,
links=self.linkprovider,
env_config=config.get_jinja_config(),
)
|
Hook info
::: mkdocs.plugins.BasePlugin.on_config
options:
show_source: False
show_root_toc_entry: False
Create the node tree and write files to build folder.
In this step we aggregate all files and info we need to build the website.
This includes:
- Markdown pages (MkPages)
- Templates
- CSS files
Source
mkdocs_mknodes.plugin.MkNodesPlugin.on_files |
---|
| def on_files(self, files: Files, *, config: mknodesconfig.MkNodesConfig) -> Files: # type: ignore
"""Create the node tree and write files to build folder.
In this step we aggregate all files and info we need to build the website.
This includes:
- Markdown pages (MkPages)
- Templates
- CSS files
"""
if not config.build_fn:
return files
logger.info("Generating pages...")
build_fn = config.get_builder()
self.root = mk.MkNav(context=self.context)
build_fn(theme=self.theme, root=self.root)
logger.debug("Finished building page.")
paths = [
pathlib.Path(node.resolved_file_path).stem
for _level, node in self.root.iter_nodes()
if hasattr(node, "resolved_file_path")
]
self.linkprovider.set_excludes(paths)
# now we add our stuff to the MkDocs build environment
cfg = mkdocsconfig.Config(config)
logger.info("Updating MkDocs config metadata...")
cfg.update_from_context(self.root.ctx)
self.theme.adapt_extras(cfg.extra)
logger.info("Setting up build backends...")
mkdocs_backend = mkdocsbackend.MkDocsBackend(
files=files,
config=config,
directory=self.build_folder,
)
markdown_backend = markdownbackend.MarkdownBackend(
directory=pathlib.Path(config.site_dir) / "src",
extension=".original",
)
collector = buildcollector.BuildCollector(
backends=[mkdocs_backend, markdown_backend],
show_page_info=config.show_page_info,
global_resources=config.global_resources,
render_by_default=config.render_by_default,
)
self.build_info = collector.collect(self.root, self.theme)
if nav_dict := self.root.nav.to_nav_dict():
match config.nav:
case list():
for k, v in nav_dict.items():
config.nav.append({k: v})
case dict():
config.nav |= nav_dict
case None:
config.nav = nav_dict
return mkdocs_backend.files
|
Hook info
::: mkdocs.plugins.BasePlugin.on_files
options:
show_source: False
show_root_toc_entry: False
Populate LinkReplacer and build path->MkPage mapping for following steps.
Source
mkdocs_mknodes.plugin.MkNodesPlugin.on_nav |
---|
| def on_nav(
self,
nav: Navigation,
/,
*,
config: MkDocsConfig,
files: Files,
) -> Navigation | None:
"""Populate LinkReplacer and build path->MkPage mapping for following steps."""
for file_ in files:
assert file_.abs_src_path
filename = pathlib.Path(file_.abs_src_path).name
url = urllib.parse.unquote(file_.src_uri)
self.link_replacer.mapping[filename].append(url)
return nav
|
Hook info
::: mkdocs.plugins.BasePlugin.on_nav
options:
show_source: False
show_root_toc_entry: False
Add our own info to the MkDocs environment.
Source
mkdocs_mknodes.plugin.MkNodesPlugin.on_env |
---|
| def on_env(
self,
env: jinja2.Environment,
/,
*,
config: mknodesconfig.MkNodesConfig, # type: ignore
files: Files,
) -> jinja2.Environment | None:
"""Add our own info to the MkDocs environment."""
rope_env = jinjarope.Environment()
env.globals["mknodes"] = rope_env.globals
env.filters |= rope_env.filters
logger.debug("Added macros / filters to MkDocs jinja2 environment.")
if config.rewrite_theme_templates:
assert env.loader
env.loader = jinjarope.RewriteLoader(env.loader, rewriteloader.rewrite)
logger.debug("Injected Jinja2 Rewrite loader.")
return env
|
Hook info
::: mkdocs.plugins.BasePlugin.on_env
options:
show_source: False
show_root_toc_entry: False
Delete the temporary template files.
Source
mkdocs_mknodes.plugin.MkNodesPlugin.on_post_build |
---|
| def on_post_build(self, *, config: mknodesconfig.MkNodesConfig) -> None: # type: ignore
"""Delete the temporary template files."""
if not config.theme.custom_dir or not config.build_fn:
return
if config.auto_delete_generated_templates:
logger.debug("Deleting page templates...")
for template in self.build_info.templates:
assert template.filename
path = pathlib.Path(config.theme.custom_dir) / template.filename
path.unlink(missing_ok=True)
|
Hook info
::: mkdocs.plugins.BasePlugin.on_post_build
options:
show_source: False
show_root_toc_entry: False
During this phase we set the edit paths.
Source
mkdocs_mknodes.plugin.MkNodesPlugin.on_pre_page |
---|
| def on_pre_page(
self,
page: Page,
/,
*,
config: MkDocsConfig,
files: Files,
) -> Page | None:
"""During this phase we set the edit paths."""
node = self.build_info.page_mapping.get(page.file.src_uri)
edit_path = node._edit_path if isinstance(node, mk.MkPage) else None
cfg = mkdocsconfig.Config(config)
if path := cfg.get_edit_url(edit_path):
page.edit_url = path
return page
|
Hook info
::: mkdocs.plugins.BasePlugin.on_pre_page
options:
show_source: False
show_root_toc_entry: False
During this phase links get replaced and jinja2
stuff get rendered.
Source
mkdocs_mknodes.plugin.MkNodesPlugin.on_page_markdown |
---|
| def on_page_markdown(
self,
markdown: str,
/,
*,
page: Page,
config: MkDocsConfig,
files: Files,
) -> str | None:
"""During this phase links get replaced and `jinja2` stuff get rendered."""
return self.link_replacer.replace(markdown, page.file.src_uri)
|
Hook info
::: mkdocs.plugins.BasePlugin.on_page_markdown
options:
show_source: False
show_root_toc_entry: False
Delete the temporary template files.
Source
mkdocs_mknodes.plugin.MkNodesPlugin.on_post_build |
---|
| def on_post_build(self, *, config: mknodesconfig.MkNodesConfig) -> None: # type: ignore
"""Delete the temporary template files."""
if not config.theme.custom_dir or not config.build_fn:
return
if config.auto_delete_generated_templates:
logger.debug("Deleting page templates...")
for template in self.build_info.templates:
assert template.filename
path = pathlib.Path(config.theme.custom_dir) / template.filename
path.unlink(missing_ok=True)
|
Hook info
::: mkdocs.plugins.BasePlugin.on_post_build
options:
show_source: False
show_root_toc_entry: False
MkdocstringsPlugin
Instantiate our Markdown extension.
Hook for the on_config
event.
In this hook, we instantiate our [MkdocstringsExtension
][mkdocstrings.extension.MkdocstringsExtension]
and add it to the list of Markdown extensions used by mkdocs
.
We pass this plugin's configuration dictionary to the extension when instantiating it (it will need it
later when processing markdown to get handlers and their global configurations).
Arguments:
config: The MkDocs config object.
Returns:
The modified config.
Source
mkdocstrings.plugin.MkdocstringsPlugin.on_config |
---|
| def on_config(self, config: MkDocsConfig) -> MkDocsConfig | None:
"""Instantiate our Markdown extension.
Hook for the [`on_config` event](https://www.mkdocs.org/user-guide/plugins/#on_config).
In this hook, we instantiate our [`MkdocstringsExtension`][mkdocstrings.extension.MkdocstringsExtension]
and add it to the list of Markdown extensions used by `mkdocs`.
We pass this plugin's configuration dictionary to the extension when instantiating it (it will need it
later when processing markdown to get handlers and their global configurations).
Arguments:
config: The MkDocs config object.
Returns:
The modified config.
"""
if not self.plugin_enabled:
log.debug("Plugin is not enabled. Skipping.")
return config
log.debug("Adding extension to the list")
theme_name = config.theme.name or os.path.dirname(config.theme.dirs[0])
to_import: InventoryImportType = []
for handler_name, conf in self.config.handlers.items():
for import_item in conf.pop("import", ()):
if isinstance(import_item, str):
import_item = {"url": import_item} # noqa: PLW2901
to_import.append((handler_name, import_item))
extension_config = {
"theme_name": theme_name,
"mdx": config.markdown_extensions,
"mdx_configs": config.mdx_configs,
"mkdocstrings": self.config,
"mkdocs": config,
}
self._handlers = Handlers(extension_config)
autorefs: AutorefsPlugin
try:
# If autorefs plugin is explicitly enabled, just use it.
autorefs = config.plugins["autorefs"] # type: ignore[assignment]
log.debug("Picked up existing autorefs instance %r", autorefs)
except KeyError:
# Otherwise, add a limited instance of it that acts only on what's added through `register_anchor`.
autorefs = AutorefsPlugin()
autorefs.config = AutorefsConfig()
autorefs.scan_toc = False
config.plugins["autorefs"] = autorefs
log.debug("Added a subdued autorefs instance %r", autorefs)
# Add collector-based fallback in either case.
autorefs.get_fallback_anchor = self.handlers.get_anchors
mkdocstrings_extension = MkdocstringsExtension(extension_config, self.handlers, autorefs)
config.markdown_extensions.append(mkdocstrings_extension) # type: ignore[arg-type]
config.extra_css.insert(0, self.css_filename) # So that it has lower priority than user files.
self._inv_futures = {}
if to_import:
inv_loader = futures.ThreadPoolExecutor(4)
for handler_name, import_item in to_import:
loader = self.get_handler(handler_name).load_inventory
future = inv_loader.submit(
self._load_inventory, # type: ignore[misc]
loader,
**import_item,
)
self._inv_futures[future] = (loader, import_item)
inv_loader.shutdown(wait=False)
return config
|
Hook info
::: mkdocs.plugins.BasePlugin.on_config
options:
show_source: False
show_root_toc_entry: False
Extra actions that need to happen after all Markdown rendering and before HTML rendering.
Hook for the on_env
event.
- Write mkdocstrings' extra files into the site dir.
- Gather results from background inventory download tasks.
Source
mkdocstrings.plugin.MkdocstringsPlugin.on_env |
---|
| def on_env(self, env: Environment, config: MkDocsConfig, *args: Any, **kwargs: Any) -> None: # noqa: ARG002
"""Extra actions that need to happen after all Markdown rendering and before HTML rendering.
Hook for the [`on_env` event](https://www.mkdocs.org/user-guide/plugins/#on_env).
- Write mkdocstrings' extra files into the site dir.
- Gather results from background inventory download tasks.
"""
if not self.plugin_enabled:
return
if self._handlers:
css_content = "\n".join(handler.extra_css for handler in self.handlers.seen_handlers)
write_file(css_content.encode("utf-8"), os.path.join(config.site_dir, self.css_filename))
if self.inventory_enabled:
log.debug("Creating inventory file objects.inv")
inv_contents = self.handlers.inventory.format_sphinx()
write_file(inv_contents, os.path.join(config.site_dir, "objects.inv"))
if self._inv_futures:
log.debug("Waiting for %s inventory download(s)", len(self._inv_futures))
futures.wait(self._inv_futures, timeout=30)
results = {}
# Reversed order so that pages from first futures take precedence:
for fut in reversed(list(self._inv_futures)):
try:
results.update(fut.result())
except Exception as error: # noqa: BLE001
loader, import_item = self._inv_futures[fut]
loader_name = loader.__func__.__qualname__
log.error("Couldn't load inventory %s through %s: %s", import_item, loader_name, error) # noqa: TRY400
for page, identifier in results.items():
config.plugins["autorefs"].register_url(page, identifier) # type: ignore[attr-defined]
self._inv_futures = {}
|
Hook info
::: mkdocs.plugins.BasePlugin.on_env
options:
show_source: False
show_root_toc_entry: False
Teardown the handlers.
Hook for the on_post_build
event.
This hook is used to teardown all the handlers that were instantiated and cached during documentation buildup.
For example, a handler could open a subprocess in the background and keep it open
to feed it "autodoc" instructions and get back JSON data. If so, it should then close the subprocess at some point:
the proper place to do this is in the handler's teardown
method, which is indirectly called by this hook.
Arguments:
config: The MkDocs config object.
**kwargs: Additional arguments passed by MkDocs.
Source
mkdocstrings.plugin.MkdocstringsPlugin.on_post_build |
---|
| def on_post_build(
self,
config: MkDocsConfig, # noqa: ARG002
**kwargs: Any, # noqa: ARG002
) -> None:
"""Teardown the handlers.
Hook for the [`on_post_build` event](https://www.mkdocs.org/user-guide/plugins/#on_post_build).
This hook is used to teardown all the handlers that were instantiated and cached during documentation buildup.
For example, a handler could open a subprocess in the background and keep it open
to feed it "autodoc" instructions and get back JSON data. If so, it should then close the subprocess at some point:
the proper place to do this is in the handler's `teardown` method, which is indirectly called by this hook.
Arguments:
config: The MkDocs config object.
**kwargs: Additional arguments passed by MkDocs.
"""
if not self.plugin_enabled:
return
for future in self._inv_futures:
future.cancel()
if self._handlers:
log.debug("Tearing handlers down")
self.handlers.teardown()
|
Hook info
::: mkdocs.plugins.BasePlugin.on_post_build
options:
show_source: False
show_root_toc_entry: False
Teardown the handlers.
Hook for the on_post_build
event.
This hook is used to teardown all the handlers that were instantiated and cached during documentation buildup.
For example, a handler could open a subprocess in the background and keep it open
to feed it "autodoc" instructions and get back JSON data. If so, it should then close the subprocess at some point:
the proper place to do this is in the handler's teardown
method, which is indirectly called by this hook.
Arguments:
config: The MkDocs config object.
**kwargs: Additional arguments passed by MkDocs.
Source
mkdocstrings.plugin.MkdocstringsPlugin.on_post_build |
---|
| def on_post_build(
self,
config: MkDocsConfig, # noqa: ARG002
**kwargs: Any, # noqa: ARG002
) -> None:
"""Teardown the handlers.
Hook for the [`on_post_build` event](https://www.mkdocs.org/user-guide/plugins/#on_post_build).
This hook is used to teardown all the handlers that were instantiated and cached during documentation buildup.
For example, a handler could open a subprocess in the background and keep it open
to feed it "autodoc" instructions and get back JSON data. If so, it should then close the subprocess at some point:
the proper place to do this is in the handler's `teardown` method, which is indirectly called by this hook.
Arguments:
config: The MkDocs config object.
**kwargs: Additional arguments passed by MkDocs.
"""
if not self.plugin_enabled:
return
for future in self._inv_futures:
future.cancel()
if self._handlers:
log.debug("Tearing handlers down")
self.handlers.teardown()
|
Hook info
::: mkdocs.plugins.BasePlugin.on_post_build
options:
show_source: False
show_root_toc_entry: False
BlogPlugin
Source
material.plugins.blog.plugin.BlogPlugin.on_startup |
---|
| def on_startup(self, *, command, dirty):
self.is_serve = command == "serve"
self.is_dirty = dirty
|
Hook info
::: mkdocs.plugins.BasePlugin.on_startup
options:
show_source: False
show_root_toc_entry: False
Source
material.plugins.blog.plugin.BlogPlugin.on_shutdown |
---|
| def on_shutdown(self):
rmtree(self.temp_dir)
|
Hook info
::: mkdocs.plugins.BasePlugin.on_shutdown
options:
show_source: False
show_root_toc_entry: False
Source
material.plugins.blog.plugin.BlogPlugin.on_config |
---|
| def on_config(self, config):
if not self.config.enabled:
return
# Initialize entrypoint
self.blog: View
# Initialize and resolve authors, if enabled
if self.config.authors:
self.authors = self._resolve_authors(config)
# Initialize table of contents settings
if not isinstance(self.config.archive_toc, bool):
self.config.archive_toc = self.config.blog_toc
if not isinstance(self.config.categories_toc, bool):
self.config.categories_toc = self.config.blog_toc
# By default, drafts are rendered when the documentation is served,
# but not when it is built, for a better user experience
if self.is_serve and self.config.draft_on_serve:
self.config.draft = True
|
Hook info
::: mkdocs.plugins.BasePlugin.on_config
options:
show_source: False
show_root_toc_entry: False
Source
material.plugins.blog.plugin.BlogPlugin.on_files |
---|
| @event_priority(-50)
def on_files(self, files, *, config):
if not self.config.enabled:
return
# Resolve path to entrypoint and site directory
root = posixpath.normpath(self.config.blog_dir)
site = config.site_dir
# Compute and normalize path to posts directory
path = self.config.post_dir.format(blog = root)
path = posixpath.normpath(path)
# Adjust destination paths for media files
for file in files.media_files():
if not file.src_uri.startswith(path):
continue
# We need to adjust destination paths for assets to remove the
# purely functional posts directory prefix when building
file.dest_uri = file.dest_uri.replace(path, root)
file.abs_dest_path = os.path.join(site, file.dest_path)
file.url = file.url.replace(path, root)
# Resolve entrypoint and posts sorted by descending date - if the posts
# directory or entrypoint do not exist, they are automatically created
self.blog = self._resolve(files, config)
self.blog.posts = sorted(
self._resolve_posts(files, config),
key = lambda post: post.config.date.created,
reverse = True
)
# Generate views for archive
if self.config.archive:
self.blog.views.extend(
self._generate_archive(config, files)
)
# Generate views for categories
if self.config.categories:
self.blog.views.extend(sorted(
self._generate_categories(config, files),
key = lambda view: view.name,
reverse = False
))
# Generate pages for views
if self.config.pagination:
for view in self._resolve_views(self.blog):
for page in self._generate_pages(view, config, files):
view.pages.append(page)
# Ensure that entrypoint is always included in navigation
self.blog.file.inclusion = InclusionLevel.INCLUDED
|
Hook info
::: mkdocs.plugins.BasePlugin.on_files
options:
show_source: False
show_root_toc_entry: False
Source
material.plugins.blog.plugin.BlogPlugin.on_nav |
---|
| @event_priority(-50)
def on_nav(self, nav, *, config, files):
if not self.config.enabled:
return
# If we're not building a standalone blog, the entrypoint will always
# have a parent when it is included in the navigation. The parent is
# essential to correctly resolve the location where the archive and
# category views are attached. If the entrypoint doesn't have a parent,
# we know that the author did not include it in the navigation, so we
# explicitly mark it as not included.
if not self.blog.parent and self.config.blog_dir != ".":
self.blog.file.inclusion = InclusionLevel.NOT_IN_NAV
# Attach posts to entrypoint without adding them to the navigation, so
# that the entrypoint is considered to be the active page for each post
self._attach(self.blog, [None, *reversed(self.blog.posts), None])
for post in self.blog.posts:
post.file.inclusion = InclusionLevel.NOT_IN_NAV
# Revert temporary exclusion of views from navigation
for view in self._resolve_views(self.blog):
view.file.inclusion = self.blog.file.inclusion
for page in view.pages:
page.file.inclusion = self.blog.file.inclusion
# Attach views for archive
if self.config.archive:
title = self._translate(self.config.archive_name, config)
views = [_ for _ in self.blog.views if isinstance(_, Archive)]
# Attach and link views for archive
if self.blog.file.inclusion.is_in_nav():
self._attach_to(self.blog, Section(title, views), nav)
# Attach views for categories
if self.config.categories:
title = self._translate(self.config.categories_name, config)
views = [_ for _ in self.blog.views if isinstance(_, Category)]
# Attach and link views for categories, if any
if self.blog.file.inclusion.is_in_nav() and views:
self._attach_to(self.blog, Section(title, views), nav)
# Attach pages for views
if self.config.pagination:
for view in self._resolve_views(self.blog):
for at in range(1, len(view.pages)):
self._attach_at(view.parent, view, view.pages[at])
|
Hook info
::: mkdocs.plugins.BasePlugin.on_nav
options:
show_source: False
show_root_toc_entry: False
Source
material.plugins.blog.plugin.BlogPlugin.on_env |
---|
| def on_env(self, env, *, config, files):
if not self.config.enabled:
return
# Filter for formatting dates related to posts
def date_filter(date: datetime):
return self._format_date_for_post(date, config)
# Patch URL template filter to add support for paginated views, i.e.,
# that paginated views never link to themselves but to the main view
@pass_context
def url_filter_with_pagination(context: Context, url: str | None):
page = context["page"]
# If the current page is a view, check if the URL links to the page
# itself, and replace it with the URL of the main view
if isinstance(page, View):
view = self._resolve_original(page)
if page.url == url:
url = view.url
# Forward to original template filter
return url_filter(context, url)
# Register custom template filters
env.filters["date"] = date_filter
env.filters["url"] = url_filter_with_pagination
|
Hook info
::: mkdocs.plugins.BasePlugin.on_env
options:
show_source: False
show_root_toc_entry: False
Source
material.plugins.blog.plugin.BlogPlugin.on_page_markdown |
---|
| @event_priority(-50)
def on_page_markdown(self, markdown, *, page, config, files):
if not self.config.enabled:
return
# Skip if page is not a post managed by this instance - this plugin has
# support for multiple instances, which is why this check is necessary
if page not in self.blog.posts:
if not self.config.pagination:
return
# We set the contents of the view to its title if pagination should
# not keep the content of the original view on paginated views
if not self.config.pagination_keep_content:
view = self._resolve_original(page)
if view in self._resolve_views(self.blog):
# If the current view is paginated, use the rendered title
# of the original view in case the author set the title in
# the page's contents, or it would be overridden with the
# one set in mkdocs.yml, leading to inconsistent headings
assert isinstance(view, View)
if view != page:
name = view._title_from_render or view.title
return f"# {name}"
# Nothing more to be done for views
return
# Extract and assign authors to post, if enabled
if self.config.authors:
for name in page.config.authors:
if name not in self.authors:
raise PluginError(f"Couldn't find author '{name}'")
# Append to list of authors
page.authors.append(self.authors[name])
# Extract settings for excerpts
separator = self.config.post_excerpt_separator
max_authors = self.config.post_excerpt_max_authors
max_categories = self.config.post_excerpt_max_categories
# Ensure presence of separator and throw, if its absent and required -
# we append the separator to the end of the contents of the post, if it
# is not already present, so we can remove footnotes or other content
# from the excerpt without affecting the content of the excerpt
if separator not in page.markdown:
if self.config.post_excerpt == "required":
docs = os.path.relpath(config.docs_dir)
path = os.path.relpath(page.file.abs_src_path, docs)
raise PluginError(
f"Couldn't find '{separator}' in post '{path}' in '{docs}'"
)
# Create excerpt for post and inherit authors and categories - excerpts
# can contain a subset of the authors and categories of the post
page.excerpt = Excerpt(page, config, files)
page.excerpt.authors = page.authors[:max_authors]
page.excerpt.categories = page.categories[:max_categories]
|
Hook info
::: mkdocs.plugins.BasePlugin.on_page_markdown
options:
show_source: False
show_root_toc_entry: False
Source
material.plugins.blog.plugin.BlogPlugin.on_page_content |
---|
| def on_page_content(self, html, *, page, config, files):
if not self.config.enabled:
return
# Skip if page is not a post managed by this instance - this plugin has
# support for multiple instances, which is why this check is necessary
if page not in self.blog.posts:
return
# Compute readtime of post, if enabled and not explicitly set
if self.config.post_readtime:
words_per_minute = self.config.post_readtime_words_per_minute
if not page.config.readtime:
page.config.readtime = readtime(html, words_per_minute)
|
Hook info
::: mkdocs.plugins.BasePlugin.on_page_content
options:
show_source: False
show_root_toc_entry: False
Source
material.plugins.blog.plugin.BlogPlugin.on_page_context |
---|
| @event_priority(-100)
def on_page_context(self, context, *, page, config, nav):
if not self.config.enabled:
return
# Skip if page is not a view managed by this instance - this plugin has
# support for multiple instances, which is why this check is necessary
view = self._resolve_original(page)
if view not in self._resolve_views(self.blog):
return
# Render excerpts and prepare pagination
posts, pagination = self._render(page)
# Render pagination links
def pager(args: object):
return pagination.pager(
format = self.config.pagination_format,
show_if_single_page = self.config.pagination_if_single_page,
**args
)
# Assign posts and pagination to context
context["posts"] = posts
context["pagination"] = pager if pagination else None
|
Hook info
::: mkdocs.plugins.BasePlugin.on_page_context
options:
show_source: False
show_root_toc_entry: False
Source
material.plugins.blog.plugin.BlogPlugin.on_shutdown |
---|
| def on_shutdown(self):
rmtree(self.temp_dir)
|
Hook info
::: mkdocs.plugins.BasePlugin.on_shutdown
options:
show_source: False
show_root_toc_entry: False
GroupPlugin
Source
material.plugins.group.plugin.GroupPlugin.on_startup |
---|
| def on_startup(self, *, command, dirty):
self.is_serve = command == "serve"
self.is_dirty = dirty
|
Hook info
::: mkdocs.plugins.BasePlugin.on_startup
options:
show_source: False
show_root_toc_entry: False
Source
material.plugins.group.plugin.GroupPlugin.on_config |
---|
| @event_priority(150)
def on_config(self, config):
if not self.config.enabled:
return
# Retrieve plugin collection from configuration
option: Plugins = dict(config._schema)["plugins"]
assert isinstance(option, Plugins)
# Load all plugins in group
self.plugins: dict[str, BasePlugin] = {}
try:
for name, plugin in self._load(option):
self.plugins[name] = plugin
# The plugin could not be loaded, likely because it's not installed or
# misconfigured, so we raise a plugin error for a nicer error message
except Exception as e:
raise PluginError(str(e))
# Patch order of plugin methods
for events in option.plugins.events.values():
self._patch(events, config)
# Invoke `on_startup` event for plugins in group
command = "serve" if self.is_serve else "build"
for method in option.plugins.events["startup"]:
plugin = self._get_plugin(method)
# Ensure that we have a method bound to a plugin (and not a hook)
if plugin and plugin in self.plugins.values():
method(command = command, dirty = self.is_dirty)
|
Hook info
::: mkdocs.plugins.BasePlugin.on_config
options:
show_source: False
show_root_toc_entry: False
InfoPlugin
Source
material.plugins.info.plugin.InfoPlugin.on_startup |
---|
| def on_startup(self, *, command, dirty):
self.is_serve = command == "serve"
|
Hook info
::: mkdocs.plugins.BasePlugin.on_startup
options:
show_source: False
show_root_toc_entry: False
Source
material.plugins.info.plugin.InfoPlugin.on_config |
---|
| @event_priority(100)
def on_config(self, config):
if not self.config.enabled:
return
# By default, the plugin is disabled when the documentation is served,
# but not when it is built. This should nicely align with the expected
# user experience when creating reproductions.
if not self.config.enabled_on_serve and self.is_serve:
return
# Resolve latest version
url = "https://github.com/squidfunk/mkdocs-material/releases/latest"
res = requests.get(url, allow_redirects = False)
# Check if we're running the latest version
_, current = res.headers.get("location").rsplit("/", 1)
present = version("mkdocs-material")
if not present.startswith(current):
log.error("Please upgrade to the latest version.")
self._help_on_versions_and_exit(present, current)
# Exit if archive creation is disabled
if not self.config.archive:
sys.exit(1)
# Print message that we're creating a bug report
log.info("Started archive creation for bug report")
# Check that there are no overrides in place - we need to use a little
# hack to detect whether the custom_dir setting was used without parsing
# mkdocs.yml again - we check at which position the directory provided
# by the theme resides, and if it's not the first one, abort.
if config.theme.custom_dir:
log.error("Please remove 'custom_dir' setting.")
self._help_on_customizations_and_exit()
# Check that there are no hooks in place - hooks can alter the behavior
# of MkDocs in unpredictable ways, which is why they must be considered
# being customizations. Thus, we can't offer support for debugging and
# must abort here.
if config.hooks:
log.error("Please remove 'hooks' setting.")
self._help_on_customizations_and_exit()
# Assure all paths that will be validated are absolute. Convert possible
# relative config_file_path to absolute. Its absolute directory path is
# being later used to resolve other paths.
config.config_file_path = _convert_to_abs(config.config_file_path)
config_file_parent = os.path.dirname(config.config_file_path)
# Convert relative custom_dir path to absolute. The Theme.custom_dir
# property cannot be set, therefore a helper variable is used.
if config.theme.custom_dir:
abs_custom_dir = _convert_to_abs(
config.theme.custom_dir,
abs_prefix = config_file_parent
)
else:
abs_custom_dir = ""
# Extract the absolute path to projects plugin's directory to explicitly
# support path validation and dynamic exclusion for the plugin
projects_plugin = config.plugins.get("material/projects")
if projects_plugin:
abs_projects_dir = _convert_to_abs(
projects_plugin.config.projects_dir,
abs_prefix = config_file_parent
)
else:
abs_projects_dir = ""
# MkDocs removes the INHERIT configuration key during load, and doesn't
# expose the information in any way, as the parent configuration is
# merged into one. To validate that the INHERIT config file will be
# included in the ZIP file the current config file must be loaded again
# without parsing. Each file can have their own INHERIT key, so a list
# of configurations is supported. The INHERIT path is converted during
# load to absolute.
loaded_configs = _load_yaml(config.config_file_path)
if not isinstance(loaded_configs, list):
loaded_configs = [loaded_configs]
# We need to make sure the user put every file in the current working
# directory. To assure the reproduction inside the ZIP file can be run,
# validate that the MkDocs paths are children of the current root.
paths_to_validate = [
config.config_file_path,
config.docs_dir,
abs_custom_dir,
abs_projects_dir,
*[cfg.get("INHERIT", "") for cfg in loaded_configs]
]
# Convert relative hook paths to absolute path
for hook in config.hooks:
path = _convert_to_abs(hook, abs_prefix = config_file_parent)
paths_to_validate.append(path)
# Remove valid paths from the list
for path in list(paths_to_validate):
if not path or path.startswith(os.getcwd()):
paths_to_validate.remove(path)
# Report the invalid paths to the user
if paths_to_validate:
log.error(f"One or more paths aren't children of root")
self._help_on_not_in_cwd(paths_to_validate)
# Create in-memory archive and prompt author for a short descriptive
# name for the archive, which is also used as the directory name. Note
# that the name is slugified for better readability and stripped of any
# file extension that the author might have entered.
archive = BytesIO()
example = input("\nPlease name your bug report (2-4 words): ")
example, _ = os.path.splitext(example)
example = "-".join([present, slugify(example, "-")])
# Get local copy of the exclusion patterns
self.exclusion_patterns = get_exclusion_patterns()
self.excluded_entries = []
# Exclude the site_dir at project root
if config.site_dir.startswith(os.getcwd()):
self.exclusion_patterns.append(_resolve_pattern(config.site_dir))
# Exclude the Virtual Environment directory. site.getsitepackages() has
# inconsistent results across operating systems, and relies on the
# PREFIXES that will contain the absolute path to the activated venv.
for path in site.PREFIXES:
if path.startswith(os.getcwd()):
self.exclusion_patterns.append(_resolve_pattern(path))
# Exclude site_dir for projects
if projects_plugin:
for path in glob.iglob(
pathname = projects_plugin.config.projects_config_files,
root_dir = abs_projects_dir,
recursive = True
):
current_config_file = os.path.join(abs_projects_dir, path)
project_config = _get_project_config(current_config_file)
pattern = _resolve_pattern(project_config.site_dir)
self.exclusion_patterns.append(pattern)
# Track dotpath inclusion to inform about it later
contains_dotpath: bool = False
# Create self-contained example from project
files: list[str] = []
with ZipFile(archive, "a", ZIP_DEFLATED, False) as f:
for abs_root, dirnames, filenames in os.walk(os.getcwd()):
# Set and print progress indicator
indicator = f"Processing: {abs_root}"
print(indicator, end="\r", flush=True)
# Prune the folders in-place to prevent their processing
for name in list(dirnames):
# Resolve the absolute directory path
path = os.path.join(abs_root, name)
# Exclude the directory and all subdirectories
if self._is_excluded(path):
dirnames.remove(name)
continue
# Warn about .dotdirectories
if _is_dotpath(path, log_warning = True):
contains_dotpath = True
# Write files to the in-memory archive
for name in filenames:
# Resolve the absolute file path
path = os.path.join(abs_root, name)
# Exclude the file
if self._is_excluded(path):
continue
# Warn about .dotfiles
if _is_dotpath(path, log_warning = True):
contains_dotpath = True
# Resolve the relative path to create a matching structure
path = os.path.relpath(path, os.path.curdir)
f.write(path, os.path.join(example, path))
# Clear the line for the next indicator
print(" " * len(indicator), end="\r", flush=True)
# Add information on installed packages
f.writestr(
os.path.join(example, "requirements.lock.txt"),
"\n".join(sorted([
"==".join([package.name, package.version])
for package in distributions()
]))
)
# Add information on platform
f.writestr(
os.path.join(example, "platform.json"),
json.dumps(
{
"system": platform.platform(),
"architecture": platform.architecture(),
"python": platform.python_version(),
"cwd": os.getcwd(),
"command": " ".join([
sys.argv[0].rsplit(os.sep, 1)[-1],
*sys.argv[1:]
]),
"env:$PYTHONPATH": os.getenv("PYTHONPATH", ""),
"sys.path": sys.path,
"excluded_entries": self.excluded_entries
},
default = str,
indent = 2
)
)
# Retrieve list of processed files
for a in f.filelist:
# Highlight .dotpaths in a more explicit manner
color = (Fore.LIGHTYELLOW_EX if "/." in a.filename
else Fore.LIGHTBLACK_EX)
files.append("".join([
color, a.filename, " ",
_size(a.compress_size)
]))
# Finally, write archive to disk
buffer = archive.getbuffer()
with open(f"{example}.zip", "wb") as f:
f.write(archive.getvalue())
# Print summary
log.info("Archive successfully created:")
print(Style.NORMAL)
# Print archive file names
files.sort()
for file in files:
print(f" {file}")
# Print archive name
print(Style.RESET_ALL)
print("".join([
" ", f.name, " ",
_size(buffer.nbytes, 10)
]))
# Print warning when file size is excessively large
print(Style.RESET_ALL)
if buffer.nbytes > 1000000:
log.warning("Archive exceeds recommended maximum size of 1 MB")
# Print warning when file contains hidden .dotpaths
if contains_dotpath:
log.warning(
"Archive contains dotpaths, which could contain sensitive "
"information.\nPlease review them at the bottom of the list "
"and share only necessary data to reproduce the issue."
)
# Aaaaaand done
sys.exit(1)
|
Hook info
::: mkdocs.plugins.BasePlugin.on_config
options:
show_source: False
show_root_toc_entry: False
OfflinePlugin
Source
material.plugins.offline.plugin.OfflinePlugin.on_config |
---|
| def on_config(self, config):
if not self.config.enabled:
return
# Ensure correct resolution of links when viewing the site from the
# file system by disabling directory URLs
config.use_directory_urls = False
# Append iframe-worker to polyfills/shims
config.extra["polyfills"] = config.extra.get("polyfills", [])
if not any("iframe-worker" in url for url in config.extra["polyfills"]):
script = "https://unpkg.com/iframe-worker/shim"
config.extra["polyfills"].append(script)
|
Hook info
::: mkdocs.plugins.BasePlugin.on_config
options:
show_source: False
show_root_toc_entry: False
Source
material.plugins.offline.plugin.OfflinePlugin.on_post_build |
---|
| @event_priority(-100)
def on_post_build(self, *, config):
if not self.config.enabled:
return
# Ensure presence of search index
path = os.path.join(config.site_dir, "search")
file = os.path.join(path, "search_index.json")
if not os.path.isfile(file):
return
# Obtain search index contents
with open(file, encoding = "utf-8") as f:
data = f.read()
# Inline search index contents into script
file = os.path.join(path, "search_index.js")
with open(file, "w", encoding = "utf-8") as f:
f.write(f"var __index = {data}")
|
Hook info
::: mkdocs.plugins.BasePlugin.on_post_build
options:
show_source: False
show_root_toc_entry: False
Source
material.plugins.offline.plugin.OfflinePlugin.on_post_build |
---|
| @event_priority(-100)
def on_post_build(self, *, config):
if not self.config.enabled:
return
# Ensure presence of search index
path = os.path.join(config.site_dir, "search")
file = os.path.join(path, "search_index.json")
if not os.path.isfile(file):
return
# Obtain search index contents
with open(file, encoding = "utf-8") as f:
data = f.read()
# Inline search index contents into script
file = os.path.join(path, "search_index.js")
with open(file, "w", encoding = "utf-8") as f:
f.write(f"var __index = {data}")
|
Hook info
::: mkdocs.plugins.BasePlugin.on_post_build
options:
show_source: False
show_root_toc_entry: False
PrivacyPlugin
Source
material.plugins.privacy.plugin.PrivacyPlugin.on_config |
---|
| def on_config(self, config):
self.site = urlparse(config.site_url or "")
if not self.config.enabled:
return
# Initialize thread pool
self.pool = ThreadPoolExecutor(self.config.concurrency)
self.pool_jobs: list[Future] = []
# Initialize collections of external assets
self.assets = Files([])
self.assets_expr_map = {
".css": r"url\(\s*([\"']?)(?P<url>http?[^)'\"]+)\1\s*\)",
".js": r"[\"'](?P<url>http[^\"']+\.(?:css|js(?:on)?))[\"']",
**self.config.assets_expr_map
}
|
Hook info
::: mkdocs.plugins.BasePlugin.on_config
options:
show_source: False
show_root_toc_entry: False
Source
material.plugins.privacy.plugin.PrivacyPlugin.on_files |
---|
| @event_priority(-100)
def on_files(self, files, *, config):
if not self.config.enabled:
return
# Skip if external assets must not be processed
if not self.config.assets:
return
# Find all external style sheet and script files that are provided as
# part of the build (= already known to MkDocs on startup)
for initiator in files.media_files():
file = None
# Check if the file has dependent external assets that must be
# downloaded. Create and enqueue a job for each external asset.
for url in self._parse_media(initiator):
if not self._is_excluded(url, initiator):
file = self._queue(url, config, concurrent = True)
# If site URL is not given, ensure that Mermaid.js is always
# present. This is a special case, as Material for MkDocs
# automatically loads Mermaid.js when a Mermaid diagram is
# found in the page - https://bit.ly/36tZXsA.
if "mermaid.min.js" in url.path and not config.site_url:
script = ExtraScriptValue(url.geturl())
if script not in config.extra_javascript:
config.extra_javascript.append(script)
# The local asset references at least one external asset, which
# means we must download and replace them later
if file:
self.assets.append(initiator)
files.remove(initiator)
# Process external style sheet files
for path in config.extra_css:
url = urlparse(path)
if not self._is_excluded(url):
self._queue(url, config, concurrent = True)
# Process external script files
for script in config.extra_javascript:
if isinstance(script, str):
script = ExtraScriptValue(script)
# Enqueue a job if the script needs to downloaded
url = urlparse(script.path)
if not self._is_excluded(url):
self._queue(url, config, concurrent = True)
|
Hook info
::: mkdocs.plugins.BasePlugin.on_files
options:
show_source: False
show_root_toc_entry: False
Source
material.plugins.privacy.plugin.PrivacyPlugin.on_env |
---|
| def on_env(self, env, *, config, files):
if not self.config.enabled:
return
# Wait until all jobs until now are finished
wait(self.pool_jobs)
|
Hook info
::: mkdocs.plugins.BasePlugin.on_env
options:
show_source: False
show_root_toc_entry: False
Source
material.plugins.privacy.plugin.PrivacyPlugin.on_post_build |
---|
| @event_priority(50)
def on_post_build(self, *, config):
if not self.config.enabled:
return
# Reconcile concurrent jobs and clear thread pool, as we will reuse the
# same thread pool for patching all links to external assets
wait(self.pool_jobs)
self.pool_jobs.clear()
# Spawn concurrent job to patch all links to dependent external asset
# in all style sheet and script files
for file in self.assets:
_, extension = posixpath.splitext(file.dest_uri)
if extension in [".css", ".js"]:
self.pool_jobs.append(self.pool.submit(
self._patch, file
))
# Otherwise just copy external asset to output directory
else:
file.copy_file()
# Reconcile concurrent jobs for the last time, so the plugins following
# in the build process always have a consistent state to work with
wait(self.pool_jobs)
self.pool.shutdown()
|
Hook info
::: mkdocs.plugins.BasePlugin.on_post_build
options:
show_source: False
show_root_toc_entry: False
Source
material.plugins.privacy.plugin.PrivacyPlugin.on_post_template |
---|
| @event_priority(-50)
def on_post_template(self, output_content, *, template_name, config):
if not self.config.enabled:
return
# Skip sitemap.xml and other non-HTML files
if not template_name.endswith(".html"):
return
# Parse and replace links to external assets in template
initiator = File(template_name, config.docs_dir, config.site_dir, False)
return self._parse_html(output_content, initiator, config)
|
Hook info
::: mkdocs.plugins.BasePlugin.on_post_template
options:
show_source: False
show_root_toc_entry: False
Source
material.plugins.privacy.plugin.PrivacyPlugin.on_page_content |
---|
| @event_priority(-100)
def on_page_content(self, html, *, page, config, files):
if not self.config.enabled:
return
# Skip if external assets must not be processed
if not self.config.assets:
return
# Find all external images and download them if not excluded
for match in re.findall(
r"<img[^>]+src=['\"]?http[^>]+>",
html, flags = re.I | re.M
):
el = self._parse_fragment(match)
# Create and enqueue job to fetch external image
url = urlparse(el.get("src"))
if not self._is_excluded(url, page.file):
self._queue(url, config, concurrent = True)
|
Hook info
::: mkdocs.plugins.BasePlugin.on_page_content
options:
show_source: False
show_root_toc_entry: False
Source
material.plugins.privacy.plugin.PrivacyPlugin.on_post_page |
---|
| @event_priority(-50)
def on_post_page(self, output, *, page, config):
if not self.config.enabled:
return
# Parse and replace links to external assets
return self._parse_html(output, page.file, config)
|
Hook info
::: mkdocs.plugins.BasePlugin.on_post_page
options:
show_source: False
show_root_toc_entry: False
Source
material.plugins.privacy.plugin.PrivacyPlugin.on_post_build |
---|
| @event_priority(50)
def on_post_build(self, *, config):
if not self.config.enabled:
return
# Reconcile concurrent jobs and clear thread pool, as we will reuse the
# same thread pool for patching all links to external assets
wait(self.pool_jobs)
self.pool_jobs.clear()
# Spawn concurrent job to patch all links to dependent external asset
# in all style sheet and script files
for file in self.assets:
_, extension = posixpath.splitext(file.dest_uri)
if extension in [".css", ".js"]:
self.pool_jobs.append(self.pool.submit(
self._patch, file
))
# Otherwise just copy external asset to output directory
else:
file.copy_file()
# Reconcile concurrent jobs for the last time, so the plugins following
# in the build process always have a consistent state to work with
wait(self.pool_jobs)
self.pool.shutdown()
|
Hook info
::: mkdocs.plugins.BasePlugin.on_post_build
options:
show_source: False
show_root_toc_entry: False
SearchPlugin
Source
material.plugins.search.plugin.SearchPlugin.on_startup |
---|
| def on_startup(self, *, command, dirty):
self.is_dirty = dirty
|
Hook info
::: mkdocs.plugins.BasePlugin.on_startup
options:
show_source: False
show_root_toc_entry: False
Source
material.plugins.search.plugin.SearchPlugin.on_serve |
---|
| def on_serve(self, server, *, config, builder):
self.is_dirtyreload = self.is_dirty
|
Hook info
::: mkdocs.plugins.BasePlugin.on_serve
options:
show_source: False
show_root_toc_entry: False
Source
material.plugins.search.plugin.SearchPlugin.on_config |
---|
| def on_config(self, config):
if not self.config.enabled:
return
# Retrieve default value for language
if not self.config.lang:
self.config.lang = [self._translate(
config, "search.config.lang"
)]
# Retrieve default value for separator
if not self.config.separator:
self.config.separator = self._translate(
config, "search.config.separator"
)
# Retrieve default value for pipeline
if self.config.pipeline is None:
self.config.pipeline = list(filter(len, re.split(
r"\s*,\s*", self._translate(config, "search.config.pipeline")
)))
# Initialize search index
self.search_index = SearchIndex(**self.config)
# Set jieba dictionary, if given
if self.config.jieba_dict:
path = os.path.normpath(self.config.jieba_dict)
if os.path.isfile(path):
jieba.set_dictionary(path)
log.debug(f"Loading jieba dictionary: {path}")
else:
log.warning(
f"Configuration error for 'search.jieba_dict': "
f"'{self.config.jieba_dict}' does not exist."
)
# Set jieba user dictionary, if given
if self.config.jieba_dict_user:
path = os.path.normpath(self.config.jieba_dict_user)
if os.path.isfile(path):
jieba.load_userdict(path)
log.debug(f"Loading jieba user dictionary: {path}")
else:
log.warning(
f"Configuration error for 'search.jieba_dict_user': "
f"'{self.config.jieba_dict_user}' does not exist."
)
|
Hook info
::: mkdocs.plugins.BasePlugin.on_config
options:
show_source: False
show_root_toc_entry: False
Source
material.plugins.search.plugin.SearchPlugin.on_post_build |
---|
| def on_post_build(self, *, config):
if not self.config.enabled:
return
# Write search index
base = os.path.join(config.site_dir, "search")
path = os.path.join(base, "search_index.json")
# Generate and write search index to file
data = self.search_index.generate_search_index(self.search_index_prev)
utils.write_file(data.encode("utf-8"), path)
# Persist search index for repeated invocation
if self.is_dirty:
self.search_index_prev = self.search_index
|
Hook info
::: mkdocs.plugins.BasePlugin.on_post_build
options:
show_source: False
show_root_toc_entry: False
Source
material.plugins.search.plugin.SearchPlugin.on_page_context |
---|
| def on_page_context(self, context, *, page, config, nav):
if not self.config.enabled:
return
# Index page
self.search_index.add_entry_from_context(page)
page.content = re.sub(
r"\s?data-search-\w+=\"[^\"]+\"",
"",
page.content
)
|
Hook info
::: mkdocs.plugins.BasePlugin.on_page_context
options:
show_source: False
show_root_toc_entry: False
Source
material.plugins.search.plugin.SearchPlugin.on_post_build |
---|
| def on_post_build(self, *, config):
if not self.config.enabled:
return
# Write search index
base = os.path.join(config.site_dir, "search")
path = os.path.join(base, "search_index.json")
# Generate and write search index to file
data = self.search_index.generate_search_index(self.search_index_prev)
utils.write_file(data.encode("utf-8"), path)
# Persist search index for repeated invocation
if self.is_dirty:
self.search_index_prev = self.search_index
|
Hook info
::: mkdocs.plugins.BasePlugin.on_post_build
options:
show_source: False
show_root_toc_entry: False
Source
material.plugins.search.plugin.SearchPlugin.on_serve |
---|
| def on_serve(self, server, *, config, builder):
self.is_dirtyreload = self.is_dirty
|
Hook info
::: mkdocs.plugins.BasePlugin.on_serve
options:
show_source: False
show_root_toc_entry: False
SocialPlugin
Source
material.plugins.social.plugin.SocialPlugin.on_config |
---|
| def on_config(self, config):
self.color = colors.get("indigo")
self.config.cards = self.config.enabled
if not self.config.cards:
return
# Check dependencies
if import_errors:
raise PluginError(
"Required dependencies of \"social\" plugin not found:\n"
+ str("\n".join(map(lambda x: "- " + x, import_errors)))
+ "\n\n--> Install with: pip install \"mkdocs-material[imaging]\""
)
if cairosvg_error:
raise PluginError(
"\"cairosvg\" Python module is installed, but it crashed with:\n"
+ cairosvg_error
+ "\n\n--> Check out the troubleshooting guide: https://t.ly/MfX6u"
)
# Move color options
if self.config.cards_color:
# Move background color to new option
value = self.config.cards_color.get("fill")
if value:
self.config.cards_layout_options["background_color"] = value
# Move color to new option
value = self.config.cards_color.get("text")
if value:
self.config.cards_layout_options["color"] = value
# Move font family to new option
if self.config.cards_font:
value = self.config.cards_font
self.config.cards_layout_options["font_family"] = value
# Check if site URL is defined
if not config.site_url:
log.warning(
"The \"site_url\" option is not set. The cards are generated, "
"but not linked, so they won't be visible on social media."
)
# Ensure presence of cache directory
self.cache = self.config.cache_dir
if not os.path.isdir(self.cache):
os.makedirs(self.cache)
# Retrieve palette from theme configuration
theme = config.theme
if "palette" in theme:
palette = theme["palette"]
# Find first palette that includes primary color definition
if isinstance(palette, list):
for p in palette:
if "primary" in p and p["primary"]:
palette = p
break
# Set colors according to palette
if "primary" in palette and palette["primary"]:
primary = palette["primary"].replace(" ", "-")
self.color = colors.get(primary, self.color)
# Retrieve color overrides
options = self.config.cards_layout_options
self.color = {
"fill": options.get("background_color", self.color["fill"]),
"text": options.get("color", self.color["text"])
}
# Retrieve logo and font
self._resized_logo_promise = self._executor.submit(self._load_resized_logo, config)
self.font = self._load_font(config)
self._image_promises = []
|
Hook info
::: mkdocs.plugins.BasePlugin.on_config
options:
show_source: False
show_root_toc_entry: False
Source
material.plugins.social.plugin.SocialPlugin.on_post_build |
---|
| def on_post_build(self, config):
if not self.config.cards:
return
# Check for exceptions
for promise in self._image_promises:
promise.result()
|
Hook info
::: mkdocs.plugins.BasePlugin.on_post_build
options:
show_source: False
show_root_toc_entry: False
Source
material.plugins.social.plugin.SocialPlugin.on_page_markdown |
---|
| def on_page_markdown(self, markdown, page, config, files):
if not self.config.cards:
return
# Resolve image directory
directory = self.config.cards_dir
file, _ = os.path.splitext(page.file.src_path)
# Resolve path of image
path = "{}.png".format(os.path.join(
config.site_dir,
directory,
file
))
# Resolve path of image directory
directory = os.path.dirname(path)
if not os.path.isdir(directory):
os.makedirs(directory)
# Compute site name
site_name = config.site_name
# Compute page title and description
title = page.meta.get("title", page.title)
description = config.site_description or ""
if "description" in page.meta:
description = page.meta["description"]
# Check type of meta title - see https://t.ly/m1Us
if not isinstance(title, str):
log.error(
f"Page meta title of page '{page.file.src_uri}' must be a "
f"string, but is of type \"{type(title)}\"."
)
sys.exit(1)
# Check type of meta description - see https://t.ly/m1Us
if not isinstance(description, str):
log.error(
f"Page meta description of '{page.file.src_uri}' must be a "
f"string, but is of type \"{type(description)}\"."
)
sys.exit(1)
# Generate social card if not in cache
hash = md5("".join([
site_name,
str(title),
description
]).encode("utf-8"))
file = os.path.join(self.cache, f"{hash.hexdigest()}.png")
self._image_promises.append(self._executor.submit(
self._cache_image,
cache_path = file, dest_path = path,
render_function = lambda: self._render_card(site_name, title, description)
))
# Inject meta tags into page
meta = page.meta.get("meta", [])
page.meta["meta"] = meta + self._generate_meta(page, config)
|
Hook info
::: mkdocs.plugins.BasePlugin.on_page_markdown
options:
show_source: False
show_root_toc_entry: False
Source
material.plugins.social.plugin.SocialPlugin.on_post_build |
---|
| def on_post_build(self, config):
if not self.config.cards:
return
# Check for exceptions
for promise in self._image_promises:
promise.result()
|
Hook info
::: mkdocs.plugins.BasePlugin.on_post_build
options:
show_source: False
show_root_toc_entry: False
Source
material.plugins.tags.plugin.TagsPlugin.on_config |
---|
| def on_config(self, config):
if not self.config.enabled:
return
# Skip if tags should not be built
if not self.config.tags:
return
# Initialize tags
self.tags = defaultdict(list)
self.tags_file = None
# Retrieve tags mapping from configuration
self.tags_map = config.extra.get("tags")
# Use override of slugify function
toc = { "slugify": slugify, "separator": "-" }
if "toc" in config.mdx_configs:
toc = { **toc, **config.mdx_configs["toc"] }
# Partially apply slugify function
self.slugify = lambda value: (
toc["slugify"](str(value), toc["separator"])
)
|
Hook info
::: mkdocs.plugins.BasePlugin.on_config
options:
show_source: False
show_root_toc_entry: False
Source
material.plugins.tags.plugin.TagsPlugin.on_nav |
---|
| def on_nav(self, nav, config, files):
if not self.config.enabled:
return
# Skip if tags should not be built
if not self.config.tags:
return
# Resolve tags index page
file = self.config.tags_file
if file:
self.tags_file = self._get_tags_file(files, file)
|
Hook info
::: mkdocs.plugins.BasePlugin.on_nav
options:
show_source: False
show_root_toc_entry: False
Source
material.plugins.tags.plugin.TagsPlugin.on_page_markdown |
---|
| def on_page_markdown(self, markdown, page, config, files):
if not self.config.enabled:
return
# Skip if tags should not be built
if not self.config.tags:
return
# Skip, if page is excluded
if page.file.inclusion.is_excluded():
return
# Render tags index page
if page.file == self.tags_file:
return self._render_tag_index(markdown)
# Add page to tags index
tags = page.meta.get("tags", [])
if tags:
for tag in tags:
self.tags[str(tag)].append(page)
|
Hook info
::: mkdocs.plugins.BasePlugin.on_page_markdown
options:
show_source: False
show_root_toc_entry: False
Source
material.plugins.tags.plugin.TagsPlugin.on_page_context |
---|
| def on_page_context(self, context, page, config, nav):
if not self.config.enabled:
return
# Skip if tags should not be built
if not self.config.tags:
return
# Provide tags for page
context["tags"] =[]
if "tags" in page.meta and page.meta["tags"]:
context["tags"] = [
self._render_tag(tag)
for tag in page.meta["tags"]
]
|
Hook info
::: mkdocs.plugins.BasePlugin.on_page_context
options:
show_source: False
show_root_toc_entry: False
Page info
Resources
Resources(css=[CSSText(filename='speechbubble_10d5a72.css')],
markdown_extensions={'admonition': {},
'attr_list': {},
'md_in_html': {},
'pymdownx.details': {},
'pymdownx.emoji': {'emoji_generator': <function to_svg at 0x7fa61df7c400>,
'emoji_index': <function twemoji at 0x7fa61df7c2c0>},
'pymdownx.highlight': {'anchor_linenums': True,
'line_spans': '__span',
'pygments_lang_class': True},
'pymdownx.magiclink': {'repo': 'mkdocs_mknodes',
'repo_url_shorthand': True,
'user': 'phil65'},
'pymdownx.superfences': {}},
plugins=[Plugin('mkdocstrings')],
js=[],
assets=[],
packages=[])
Metadata
created:
source_filename: /home/runner/work/mkdocs-mknodes/mkdocs-mknodes/.venv/lib/python3.12/site-packages/mknodes/navs/navparser.py
source_function: parse_new_style_nav
source_line_no: 464
title: MkDocs Plugins