Skip to content

utils

Class info

Classes

Name Children Inherits
CountHandler
mkdocs_mknodes.commands.utils
Counts all logged messages >\= level.

    🛈 DocStrings

    The Mkdocs Plugin.

    CountHandler

    Bases: NullHandler

    Counts all logged messages >= level.

    Source code in mkdocs_mknodes/commands/utils.py
    39
    40
    41
    42
    43
    44
    45
    46
    47
    48
    49
    50
    51
    52
    53
    54
    55
    56
    57
    class CountHandler(logging.NullHandler):
        """Counts all logged messages >= level."""
    
        def __init__(self, **kwargs: Any) -> None:
            self.counts: dict[int, int] = collections.defaultdict(int)
            super().__init__(**kwargs)
    
        def handle(self, record):
            rv = self.filter(record)
            if rv:
                # Use levelno for keys so they can be sorted later
                self.counts[record.levelno] += 1
            return rv
    
        def get_counts(self) -> list[tuple[str, int]]:
            return [
                (logging.getLevelName(k), v)
                for k, v in sorted(self.counts.items(), reverse=True)
            ]
    

    get_build_datetime

    get_build_datetime() -> datetime
    

    Get the build datetime, respecting SOURCE_DATE_EPOCH if set.

    Support SOURCE_DATE_EPOCH environment variable for reproducible builds. See https://reproducible-builds.org/specs/source-date-epoch/

    Returns:

    Type Description
    datetime

    Aware datetime object

    Source code in mkdocs_mknodes/commands/utils.py
    184
    185
    186
    187
    188
    189
    190
    191
    192
    193
    194
    195
    196
    197
    def get_build_datetime() -> datetime.datetime:
        """Get the build datetime, respecting SOURCE_DATE_EPOCH if set.
    
        Support SOURCE_DATE_EPOCH environment variable for reproducible builds.
        See https://reproducible-builds.org/specs/source-date-epoch/
    
        Returns:
            Aware datetime object
        """
        source_date_epoch = os.environ.get("SOURCE_DATE_EPOCH")
        if source_date_epoch is None:
            return datetime.datetime.now(datetime.UTC)
    
        return datetime.datetime.fromtimestamp(int(source_date_epoch), datetime.UTC)
    

    get_build_timestamp

    get_build_timestamp(*, pages: Collection[Page] | None = None) -> int
    

    Returns the number of seconds since the epoch for the latest updated page.

    In reality this is just today's date because that's how pages' update time is populated.

    Parameters:

    Name Type Description Default
    pages Collection[Page] | None

    Optional collection of pages to determine timestamp from

    None

    Returns:

    Type Description
    int

    Unix timestamp as integer

    Source code in mkdocs_mknodes/commands/utils.py
    215
    216
    217
    218
    219
    220
    221
    222
    223
    224
    225
    226
    227
    228
    229
    230
    231
    232
    233
    def get_build_timestamp(*, pages: Collection[Page] | None = None) -> int:
        """Returns the number of seconds since the epoch for the latest updated page.
    
        In reality this is just today's date because that's how pages' update time
        is populated.
    
        Args:
            pages: Optional collection of pages to determine timestamp from
    
        Returns:
            Unix timestamp as integer
        """
        if pages:
            # Lexicographic comparison is OK for ISO date.
            date_string = max(p.update_date for p in pages)
            dt = datetime.datetime.fromisoformat(date_string).replace(tzinfo=datetime.UTC)
        else:
            dt = get_build_datetime()
        return int(dt.timestamp())
    

    get_files

    get_files(config: MkNodesConfig) -> Files
    

    Walk the docs_dir and return a Files collection.

    Source code in mkdocs_mknodes/commands/utils.py
    121
    122
    123
    124
    125
    126
    127
    128
    129
    130
    131
    132
    133
    134
    135
    136
    137
    138
    139
    140
    141
    142
    143
    144
    145
    146
    147
    148
    149
    150
    151
    152
    153
    154
    155
    156
    157
    158
    159
    160
    161
    162
    def get_files(config: MkNodesConfig) -> Files:
        """Walk the `docs_dir` and return a Files collection."""
        files: list[File] = []
        conflicting_files: list[tuple[File, File]] = []
        for source_dir, dirnames, filenames in os.walk(config["docs_dir"], followlinks=True):
            relative_dir = os.path.relpath(source_dir, config["docs_dir"])
            dirnames.sort()
            filenames.sort(key=_file_sort_key)
    
            files_by_dest: dict[str, File] = {}
            for filename in filenames:
                path = pathlib.Path(relative_dir) / filename
                file = File(
                    str(path),
                    config["docs_dir"],
                    config["site_dir"],
                    config["use_directory_urls"],
                )
                # Skip README.md if an index file also exists in dir (part 1)
                prev_file = files_by_dest.setdefault(file.dest_uri, file)
                if prev_file is not file:
                    conflicting_files.append((prev_file, file))
                files.append(file)
                prev_file = file
    
        set_exclusions(files, config)
        # Skip README.md if an index file also exists in dir (part 2)
        for a, b in conflicting_files:
            if b.inclusion.is_included():
                if a.inclusion.is_included():
                    logger.warning(
                        "Excluding '%s' from site because it conflicts with '%s'.",
                        a.src_uri,
                        b.src_uri,
                    )
                # avoid errors if attempting to remove the same file twice.
                with contextlib.suppress(ValueError):
                    files.remove(a)
            else:
                with contextlib.suppress(ValueError):
                    files.remove(b)
        return Files(files)
    

    is_error_template

    is_error_template(path: str) -> bool
    

    Check if a template path is an error code template (like "404.html").

    Parameters:

    Name Type Description Default
    path str

    Template path to check

    required

    Returns:

    Type Description
    bool

    True if path matches error template pattern

    Source code in mkdocs_mknodes/commands/utils.py
    203
    204
    205
    206
    207
    208
    209
    210
    211
    212
    def is_error_template(path: str) -> bool:
        """Check if a template path is an error code template (like "404.html").
    
        Args:
            path: Template path to check
    
        Returns:
            True if path matches error template pattern
        """
        return bool(_ERROR_TEMPLATE_RE.match(path))
    

    set_exclusions

    set_exclusions(files: Iterable[File], config: MkNodesConfig) -> None
    

    Re-calculate which files are excluded, based on the patterns in the config.

    Source code in mkdocs_mknodes/commands/utils.py
    100
    101
    102
    103
    104
    105
    106
    107
    108
    109
    110
    111
    112
    113
    114
    115
    116
    117
    118
    def set_exclusions(files: Iterable[File], config: MkNodesConfig) -> None:
        """Re-calculate which files are excluded, based on the patterns in the config."""
        exclude: pathspec.gitignore.GitIgnoreSpec | None = config.get("exclude_docs")
        exclude = _default_exclude + exclude if exclude else _default_exclude
        drafts: pathspec.gitignore.GitIgnoreSpec | None = config.get("draft_docs")
        nav_exclude: pathspec.gitignore.GitIgnoreSpec | None = config.get("not_in_nav")
    
        for file in files:
            if file.inclusion == InclusionLevel.UNDEFINED:
                if exclude.match_file(file.src_uri):
                    file.inclusion = InclusionLevel.EXCLUDED
                elif drafts and drafts.match_file(file.src_uri):
                    file.inclusion = InclusionLevel.DRAFT
                elif nav_exclude and nav_exclude.match_file(file.src_uri):
                    file.inclusion = InclusionLevel.NOT_IN_NAV
                else:
                    file.inclusion = InclusionLevel.INCLUDED
    
                    file.inclusion = InclusionLevel.INCLUDED
    

    write_gzip

    write_gzip(output_path: str | PathLike[str], output: str, timestamp: int)
    

    Build a gzipped version of the sitemap.

    Parameters:

    Name Type Description Default
    output_path str | PathLike[str]

    Path to the sitemap

    required
    output str

    File content

    required
    timestamp int

    Optional numeric timestamp to be written to the last modification time field in the stream when compressing. If omitted or None, the current time is used.

    required
    Source code in mkdocs_mknodes/commands/utils.py
    165
    166
    167
    168
    169
    170
    171
    172
    173
    174
    175
    176
    177
    178
    179
    180
    181
    def write_gzip(output_path: str | os.PathLike[str], output: str, timestamp: int):
        """Build a gzipped version of the sitemap.
    
        Args:
            output_path: Path to the sitemap
            output: File content
            timestamp: Optional numeric timestamp to be written to the last modification time
                       field in the stream when compressing.
                       If omitted or None, the current time is used.
        """
        logger.debug("Gzipping %r", output_path)
        gz_filename = upath.UPath(output_path)
        with (
            gz_filename.open("wb") as f,
            gzip.GzipFile(gz_filename, fileobj=f, mode="wb", mtime=timestamp) as gz_buf,
        ):
            gz_buf.write(output.encode())