about summary refs log tree commit diff
path: root/src/rub/__init__.py
blob: b80b00cc9350478321c8cfb3098c8f80d39d1bba (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
# Package initialization
# Copyright (C) 2022-2023  Nguyễn Gia Phong
#
# This file is part of rub.
#
# Rub is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Rub is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with rub.  If not, see <https://www.gnu.org/licenses/>.

from functools import cached_property
from json import dump as write_json, load as read_json
from os import walk
from pathlib import Path
from shutil import copytree, rmtree
from typing import Iterator

from doit import create_after, run as do

from rub.xml import Processor, gen_omnifeed, index_categories

__all__ = ['rub']

OMNIFEED = Path('feed.xml')


def glob_files(root: Path, suffix='') -> list[Path]:
    """Return the list of all files in given directory, recursively."""
    return [Path(path).relative_to(root)/file
            for path, dirs, files in walk(root)
            for file in files if file.endswith(suffix)]


def replace(source: Path, destination: Path) -> None:
    """Replace destination with source directory."""
    rmtree(destination, ignore_errors=True)
    copytree(source, destination, dirs_exist_ok=True)


def processing_task(proc: Processor, path: Path,
                    src_dir: Path, dest_dir: Path, doc: str) -> dict:
    source, dest = src_dir / path, proc.change_name(dest_dir/path)
    return {'name': f'/{proc.change_name(path)}', 'doc': doc,
            'file_dep': [proc.xslt, source],
            'actions': [(proc.process, [source, dest])],
            'targets': [dest], 'clean': True}


class Rubber:
    """Static generator."""

    def __init__(self, page_proc: Processor, feed_proc: Processor,
                 base: Path, src: Path, cache: Path, out: Path) -> None:
        self.page_proc, self.feed_proc = page_proc, feed_proc
        self.base, self.src = base, src
        self.cache, self.out = cache, out

    def task_base(self) -> dict:
        paths = glob_files(self.base)
        return {'doc': 'copy base directory',
                'file_dep': [self.base/path for path in paths],
                'actions': [(replace, [self.base, self.out])],
                'targets': [self.out/path for path in paths],
                'clean': True}

    @cached_property
    def sources(self) -> list[Path]:
        return glob_files(self.src, '.xml')

    @cached_property
    def page_tasks(self) -> list[dict]:
        return [processing_task(self.page_proc, path, self.src, self.out,
                                f'process {path} into a web page')
                for path in self.sources]

    def task_pages(self) -> Iterator[dict]:
        yield {'name': None, 'doc': 'process sources into web pages'}
        yield from self.page_tasks

    def task_global_feed(self) -> dict:
        sources = [self.src/path for path in self.sources]
        for task in self.page_tasks: assert len(task['targets']) == 1
        pages = [task['targets'][0] for task in self.page_tasks]
        src = self.cache / OMNIFEED
        task = processing_task(self.feed_proc, OMNIFEED, self.cache, self.out,
                               'generate global feed')
        file_dep = (file for file in task['file_dep'] if file != src)
        return {'doc': task['doc'],
                'file_dep': [*sources, *pages, *file_dep],
                'actions': [(gen_omnifeed, [sources, pages, self.out, src]),
                            *task['actions']],
                'targets': [src, *task['targets']], 'clean': True}

    @create_after(executed='global_feed')
    def task_categories(self) -> Iterator[dict]:
        yield {'name': None,
               'doc': 'generate web page and feed for each category'}
        omnifeed, index = self.cache / OMNIFEED, self.cache / 'categories.json'

        def write_index():
            with open(index, 'w') as f:
                write_json(index_categories(omnifeed), f)

        yield {'name': 'index', 'doc': 'index categories',
               'file_dep': [omnifeed], 'actions': [write_index],
               'targets': [index], 'clean': True}


def rub(page_proc: Processor, feed_proc: Processor,
        base: Path, src: Path, cache: Path, out: Path) -> None:
    """Generate static website."""
    rubber = Rubber(page_proc, feed_proc, base, src, cache, out)
    do({k: getattr(rubber, k) for k in dir(rubber)})