about summary refs log tree commit diff
path: root/src/rub/__init__.py
diff options
context:
space:
mode:
Diffstat (limited to 'src/rub/__init__.py')
-rw-r--r--src/rub/__init__.py78
1 files changed, 52 insertions, 26 deletions
diff --git a/src/rub/__init__.py b/src/rub/__init__.py
index 904f47a..3be303b 100644
--- a/src/rub/__init__.py
+++ b/src/rub/__init__.py
@@ -20,44 +20,49 @@ from functools import cached_property
 from os import walk
 from pathlib import Path
 from shutil import copytree, rmtree
+from typing import Iterator
 
-from rub import xml
+from doit import run as do
 
-__all__ = ['Rubber', 'xml']
+from rub.xml import Processor, gen_omnifeed
 
+__all__ = ['rub']
 
-def glob_files(root: Path, suffix=''):
+OMNIFEED = Path('feed.xml')
+
+
+def glob_files(root: Path, suffix='') -> list[Path]:
     """Return the list of all files in given directory, recursively."""
     return [Path(path).relative_to(root)/file
             for path, dirs, files in walk(root)
             for file in files if file.endswith(suffix)]
 
 
-def replace(source: Path, destination: Path):
+def replace(source: Path, destination: Path) -> None:
     """Replace destination with source directory."""
     rmtree(destination, ignore_errors=True)
     copytree(source, destination, dirs_exist_ok=True)
 
 
+def processing_task(proc: Processor, path: Path,
+                    src_dir: Path, dest_dir: Path, doc: str) -> dict:
+    source, dest = src_dir / path, proc.change_name(dest_dir/path)
+    return {'name': f'/{proc.change_name(path)}', 'doc': doc,
+            'file_dep': [proc.xslt, source],
+            'actions': [(proc.process, [source, dest])],
+            'targets': [dest], 'clean': True}
+
+
 class Rubber:
     """Static generator."""
 
-    def __init__(self, generate_article, base, src, cache, out):
-        self.generate_article = generate_article
+    def __init__(self, page_proc: Processor, feed_proc: Processor,
+                 base: Path, src: Path, cache: Path, out: Path) -> None:
+        self.page_proc, self.feed_proc = page_proc, feed_proc
         self.base, self.src = base, src
         self.cache, self.out = cache, out
 
-    @cached_property
-    def tasks(self):
-        def assox():
-            for k in dir(self):
-                if not k.startswith('task_'): continue
-                v = getattr(self, k)
-                if callable(v): yield k, v
-
-        return dict(assox())
-
-    def task_base(self):
+    def task_base(self) -> dict:
         paths = glob_files(self.base)
         return {'doc': 'copy base directory',
                 'file_dep': [self.base/path for path in paths],
@@ -65,12 +70,33 @@ class Rubber:
                 'targets': [self.out/path for path in paths],
                 'clean': True}
 
-    def task_articles(self):
-        """process articles into XHTML"""
-        for path in glob_files(self.src, '.xml'):
-            source = self.src / path
-            destination = self.out / path
-            yield {'name': path, 'doc': f'process {path} into XHTML',
-                   'file_dep': [source],
-                   'actions': [(self.generate_article, [source, destination])],
-                   'targets': [destination], 'clean': True}
+    @cached_property
+    def sources(self) -> list[Path]:
+        return glob_files(self.src, '.xml')
+
+    def task_pages(self) -> Iterator[dict]:
+        yield {'name': None, 'doc': 'process sources into web pages'}
+        for path in self.sources:
+            yield processing_task(self.page_proc, path, self.src, self.out,
+                                  f'process {path} into a web page')
+
+    def task_feeds(self) -> Iterator[dict]:
+        yield {'name': None, 'doc': 'generate web feeds'}
+        feed_src = self.cache / OMNIFEED
+        sources = [self.src/path for path in self.sources]
+        pages = [self.page_proc.change_name(self.out/path)
+                 for path in self.sources]
+        yield {'name': 'source', 'doc': 'generate generic global feed',
+               'file_dep': sources+pages,
+               'actions': [(gen_omnifeed,
+                            [sources, pages, self.out, feed_src])],
+               'targets': [feed_src], 'clean': True}
+        yield processing_task(self.feed_proc, OMNIFEED, self.cache, self.out,
+                              'generate global feed')
+
+
+def rub(page_proc: Processor, feed_proc: Processor,
+        base: Path, src: Path, cache: Path, out: Path) -> None:
+    """Generate static website."""
+    rubber = Rubber(page_proc, feed_proc, base, src, cache, out)
+    do({k: getattr(rubber, k) for k in dir(rubber)})