about summary refs log tree commit diff
path: root/src/rub/__init__.py
diff options
context:
space:
mode:
Diffstat (limited to 'src/rub/__init__.py')
-rw-r--r--src/rub/__init__.py53
1 files changed, 36 insertions, 17 deletions
diff --git a/src/rub/__init__.py b/src/rub/__init__.py
index 3be303b..b80b00c 100644
--- a/src/rub/__init__.py
+++ b/src/rub/__init__.py
@@ -17,14 +17,15 @@
 # along with rub.  If not, see <https://www.gnu.org/licenses/>.
 
 from functools import cached_property
+from json import dump as write_json, load as read_json
 from os import walk
 from pathlib import Path
 from shutil import copytree, rmtree
 from typing import Iterator
 
-from doit import run as do
+from doit import create_after, run as do
 
-from rub.xml import Processor, gen_omnifeed
+from rub.xml import Processor, gen_omnifeed, index_categories
 
 __all__ = ['rub']
 
@@ -74,25 +75,43 @@ class Rubber:
     def sources(self) -> list[Path]:
         return glob_files(self.src, '.xml')
 
+    @cached_property
+    def page_tasks(self) -> list[dict]:
+        return [processing_task(self.page_proc, path, self.src, self.out,
+                                f'process {path} into a web page')
+                for path in self.sources]
+
     def task_pages(self) -> Iterator[dict]:
         yield {'name': None, 'doc': 'process sources into web pages'}
-        for path in self.sources:
-            yield processing_task(self.page_proc, path, self.src, self.out,
-                                  f'process {path} into a web page')
+        yield from self.page_tasks
 
-    def task_feeds(self) -> Iterator[dict]:
-        yield {'name': None, 'doc': 'generate web feeds'}
-        feed_src = self.cache / OMNIFEED
+    def task_global_feed(self) -> dict:
         sources = [self.src/path for path in self.sources]
-        pages = [self.page_proc.change_name(self.out/path)
-                 for path in self.sources]
-        yield {'name': 'source', 'doc': 'generate generic global feed',
-               'file_dep': sources+pages,
-               'actions': [(gen_omnifeed,
-                            [sources, pages, self.out, feed_src])],
-               'targets': [feed_src], 'clean': True}
-        yield processing_task(self.feed_proc, OMNIFEED, self.cache, self.out,
-                              'generate global feed')
+        for task in self.page_tasks: assert len(task['targets']) == 1
+        pages = [task['targets'][0] for task in self.page_tasks]
+        src = self.cache / OMNIFEED
+        task = processing_task(self.feed_proc, OMNIFEED, self.cache, self.out,
+                               'generate global feed')
+        file_dep = (file for file in task['file_dep'] if file != src)
+        return {'doc': task['doc'],
+                'file_dep': [*sources, *pages, *file_dep],
+                'actions': [(gen_omnifeed, [sources, pages, self.out, src]),
+                            *task['actions']],
+                'targets': [src, *task['targets']], 'clean': True}
+
+    @create_after(executed='global_feed')
+    def task_categories(self) -> Iterator[dict]:
+        yield {'name': None,
+               'doc': 'generate web page and feed for each category'}
+        omnifeed, index = self.cache / OMNIFEED, self.cache / 'categories.json'
+
+        def write_index():
+            with open(index, 'w') as f:
+                write_json(index_categories(omnifeed), f)
+
+        yield {'name': 'index', 'doc': 'index categories',
+               'file_dep': [omnifeed], 'actions': [write_index],
+               'targets': [index], 'clean': True}
 
 
 def rub(page_proc: Processor, feed_proc: Processor,