# Package initialization # Copyright (C) 2022-2023 Nguyễn Gia Phong # # This file is part of rub. # # Rub is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Rub is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with rub. If not, see . from functools import cached_property from os import walk from pathlib import Path from shutil import copytree, rmtree from typing import Iterator from doit import run as do from rub.xml import Processor, gen_metadata, gen_omnifeed __all__ = ['rub'] OMNIFEED = Path('feed.xml') def glob_files(root: Path, suffix='') -> list[Path]: """Return the list of all files in given directory, recursively.""" return [Path(path).relative_to(root)/file for path, dirs, files in walk(root) for file in files if file.endswith(suffix)] def replace(source: Path, destination: Path) -> None: """Replace destination with source directory.""" rmtree(destination, ignore_errors=True) copytree(source, destination, dirs_exist_ok=True) def processing_task(proc: Processor, path: Path, src_dir: Path, dest_dir: Path, doc: str) -> dict: source, dest = src_dir / path, proc.change_name(dest_dir/path) return {'name': f'/{proc.change_name(path)}', 'doc': doc, 'file_dep': [proc.xslt, source], 'actions': [(proc.process, [source, dest])], 'targets': [dest], 'clean': True} class Rubber: """Static generator.""" def __init__(self, page_proc: Processor, feed_proc: Processor, base: Path, src: Path, cache: Path, out: Path) -> None: self.page_proc, self.feed_proc = page_proc, feed_proc self.base, self.src = base, src self.cache, self.out = cache, out def task_base(self) -> dict: paths = glob_files(self.base) return {'doc': 'copy base directory', 'file_dep': [self.base/path for path in paths], 'actions': [(replace, [self.base, self.out])], 'targets': [self.out/path for path in paths], 'clean': True} @cached_property def sources(self) -> list[Path]: return glob_files(self.src, '.xml') @cached_property def metadata(self) -> Path: return self.cache / 'metadata.xml' @cached_property def page_tasks(self) -> list[dict]: return [processing_task(self.page_proc, path, self.src, self.out, f'process {path} into a web page') for path in self.sources] @cached_property def pages(self) -> list[Path]: for task in self.page_tasks: assert len(task['targets']) == 1 return [task['targets'][0] for task in self.page_tasks] def task_metadata(self) -> dict: sources = [self.src/path for path in self.sources] return {'doc': 'extract metadata from source pages', 'file_dep': sources, 'actions': [(gen_metadata, [sources, self.pages, self.out, self.metadata])], 'targets': [self.metadata], 'clean': True} def task_pages(self) -> Iterator[dict]: yield {'name': None, 'doc': 'process sources into web pages'} yield from self.page_tasks def task_global_feed(self) -> dict: src = self.cache / OMNIFEED task = processing_task(self.feed_proc, OMNIFEED, self.cache, self.out, 'generate global feed') file_dep = (file for file in task['file_dep'] if file != src) return {'doc': task['doc'], 'file_dep': [self.metadata, *self.pages, *file_dep], 'actions': [(gen_omnifeed, [self.metadata, self.out, src]), *task['actions']], 'targets': [src, *task['targets']], 'clean': True} def rub(page_proc: Processor, feed_proc: Processor, base: Path, src: Path, cache: Path, out: Path) -> None: """Generate static website.""" rubber = Rubber(page_proc, feed_proc, base, src, cache, out) do({k: getattr(rubber, k) for k in dir(rubber)})