commit 5821ab9590909fee385d2f35e5de99f638e4621c
parent 8235f2152df9eef7debda13422017439cf54f3e6
Author: gracefu <81774659+gracefuu@users.noreply.github.com>
Date: Sat, 19 Apr 2025 23:36:12 +0800
Build sites in parallel
Diffstat:
| M | make.py | | | 233 | ++++++++++++++++++++++++++++++++++++++++++++++++++++++++----------------------- |
1 file changed, 167 insertions(+), 66 deletions(-)
diff --git a/make.py b/make.py
@@ -51,11 +51,9 @@ import collections
import functools
import hashlib
import inspect
-import os
import pickle
import subprocess
import sys
-import threading
import traceback
from typing import (
@@ -72,7 +70,7 @@ from typing import (
class Fetch(Protocol):
"""Protocol defining the fetch operation used by tasks."""
- async def __call__(self, task: "Task") -> Any: ...
+ async def __call__(self, task_or_rule: "Task | Rule") -> Any: ...
RuleKey = bytes
@@ -158,7 +156,14 @@ class Rule:
return Task(
(
self.rule_key,
- *(arg.task_key if isinstance(arg, Task) else arg for arg in args),
+ *(
+ (
+ arg.task_key
+ if isinstance(arg, Task)
+ else arg().task_key if isinstance(arg, Rule) else arg
+ )
+ for arg in args
+ ),
),
self.rule_fn,
*args,
@@ -173,9 +178,24 @@ class Rule:
return self.hash
+def ensure_task(task_or_rule: Task | Rule) -> Task:
+ if isinstance(task_or_rule, Rule):
+ return task_or_rule()
+ return task_or_rule
+
+
+def singleton(cls):
+ cls.main = cls()
+ return cls
+
+
+@singleton
class Rules:
"""The registry of all rules created."""
+ # Main registry
+ main: "Rules"
+
__slots__ = "rules"
rules: dict[RuleKey, Rule]
@@ -242,7 +262,8 @@ class Rules:
return rule
@staticmethod
- async def track_fetch(fetch: Fetch, new_inputs: list, task: Task):
+ async def track_fetch(fetch: Fetch, new_inputs: list, task_or_rule: Task | Rule):
+ task = ensure_task(task_or_rule)
result = await fetch(task)
new_inputs.append((task.task_key, make_hash(result)))
return result
@@ -288,11 +309,11 @@ class Rules:
return new_value
-_rules = Rules()
-rule = _rules.rule
-rule_no_cache = _rules.rule_no_cache
-register = _rules.register
-hash_cache = _rules.hash_cache
+# Rules.main = Rules()
+rule = Rules.main.rule
+rule_no_cache = Rules.main.rule_no_cache
+register = Rules.main.register
+hash_cache = Rules.main.hash_cache
class Store:
@@ -334,10 +355,19 @@ class Detach:
def __init__(self):
self._background_tasks = set()
- def __call__(self, *args, **kwargs):
- task = asyncio.create_task(*args, **kwargs)
- self._background_tasks.add(task)
- task.add_done_callback(self._background_tasks.discard)
+ def __call__(self, awaitable):
+ if asyncio.coroutines.iscoroutine(awaitable):
+ task = asyncio.create_task(awaitable)
+ self._background_tasks.add(task)
+ task.add_done_callback(self._background_tasks.discard)
+ return task
+ return awaitable
+
+ async def wait(self):
+ while self._background_tasks:
+ t = self._background_tasks.pop()
+ if not t.done():
+ await t
detach = Detach()
@@ -354,14 +384,11 @@ class SuspendingScheduler:
self.done = set()
self.waits = dict()
- async def wait(self):
- while detach._background_tasks:
- await asyncio.gather(*detach._background_tasks)
-
- def build(self, task: Task):
- return self.fetch_once(task)
+ def build(self, *tasks: Task):
+ return asyncio.gather(*(self.fetch_once(task) for task in tasks))
- async def fetch_once(self, task: Task):
+ async def fetch_once(self, task_or_rule: Task | Rule):
+ task = ensure_task(task_or_rule)
task_key = task.task_key
wait = None
event = None
@@ -394,20 +421,17 @@ class SuspendingScheduler:
class Build:
__slots__ = "_store", "_scheduler"
- def __init__(self, filename, rules=_rules):
+ def __init__(self, filename, rules=Rules.main):
self._store = Store(filename, rules)
self._scheduler = SuspendingScheduler(self._store)
- async def __call__(self, task: Task):
- result = await self.build(task)
- await self.wait()
+ async def __call__(self, *tasks: Task):
+ result = await self.build(*tasks)
+ await detach.wait()
return result
- def wait(self):
- return self._scheduler.wait()
-
- def build(self, task: Task):
- return self._scheduler.build(task)
+ def build(self, *tasks: Task):
+ return self._scheduler.build(*tasks)
def __enter__(self):
self._store.__enter__()
@@ -417,6 +441,11 @@ class Build:
self._store.__exit__(exc_type, exc_val, exc_tb)
+def build(*tasks, filename=".makedb", rules=Rules.main):
+ with Build(filename, rules) as build:
+ asyncio.run(build(*tasks))
+
+
class ShellResult(collections.namedtuple("ShellResult", "stdout stderr returncode")):
__slots__ = ()
@@ -435,10 +464,53 @@ EchoStderr = 2
EchoAll = 3
+async def _exec_reader(istream, ostream, echo: Any = False):
+ contents = b""
+ async for chunk in istream:
+ contents += chunk
+ if echo:
+ ostream.write(chunk)
+ ostream.flush()
+ return contents
+
+
+async def exec(
+ program,
+ *args,
+ input: bytes | bytearray | memoryview | None = None,
+ echo: int = EchoNothing,
+) -> ShellResult:
+
+ proc = await asyncio.create_subprocess_exec(
+ program,
+ *args,
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ )
+
+ if input is not None:
+ proc.stdin.write(input) # type: ignore
+ _, stdout, stderr, returncode = await asyncio.gather(
+ proc.stdin.drain(), # type: ignore
+ _exec_reader(proc.stdout, sys.stdout.buffer, echo=echo & EchoStdout),
+ _exec_reader(proc.stderr, sys.stderr.buffer, echo=echo & EchoStderr),
+ proc.wait(),
+ )
+ else:
+ stdout, stderr, returncode = await asyncio.gather(
+ _exec_reader(proc.stdout, sys.stdout.buffer, echo=echo & EchoStdout),
+ _exec_reader(proc.stderr, sys.stderr.buffer, echo=echo & EchoStderr),
+ proc.wait(),
+ )
+
+ return ShellResult(stdout, stderr, returncode)
+
+
async def shell(
cmd,
- input=None,
- echo=EchoNothing,
+ input: bytes | bytearray | memoryview | None = None,
+ echo: int = EchoNothing,
) -> ShellResult:
proc = await asyncio.create_subprocess_shell(
cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE
@@ -457,6 +529,26 @@ def run_in_executor(f, *args, executor=None):
return asyncio.get_running_loop().run_in_executor(executor, f, *args)
+async def async_main(globals, filename=".makedb", default_target="all"):
+ targets = sys.argv[1:]
+ if not targets:
+ targets.append(default_target)
+
+ with Build(filename) as build:
+ await build(*(eval(target, globals=globals) for target in targets))
+ return 0
+
+
+def main(globals, filename=".makedb", default_target="all"):
+ targets = sys.argv[1:]
+ if not targets:
+ targets.append(default_target)
+
+ with Build(filename) as build:
+ asyncio.run(build(*(eval(target, globals=globals) for target in targets)))
+ return 0
+
+
# class AsyncWrapperSpec:
# __slots__ = "async_methods", "async_subobjects"
@@ -609,9 +701,10 @@ def run_in_executor(f, *args, executor=None):
import json
+import os
-async def main():
+async def our_main():
@rule_no_cache
def build_git_repo(
fetch: Fetch,
@@ -648,12 +741,14 @@ async def main():
cp -a "$GIT_PATH" -T "$PUBLIC_PATH"
+ echo + stagit {output_url}
( cd "$PUBLIC_PATH" &&
"$STAGIT_PATH" \
-u https://blog.grace.moe/{output_url} \
"$GIT_PATH" &&
echo '<meta http-equiv="refresh" content="0; url=log.html" />' > index.html
)
+ echo - stagit {output_url}
""",
echo=EchoAll,
)
@@ -665,33 +760,35 @@ async def main():
rm -rf public
"""
)
- await shell("zine release", echo=EchoAll)
- await fetch(
- build_git_repo(
- "stagit/stagit",
- ".",
- "git/blog.grace.moe",
- "git/blog.grace.moe",
- "Source for the blog blog.grace.moe",
- )
- )
- await fetch(
- build_git_repo(
- "stagit/stagit",
- "~/Documents/src/pymake",
- "git/pymake",
- "git/pymake",
- "A build system based on Build Systems à la Carte",
- )
- )
- await fetch(
- build_git_repo(
- "stagit/stagit",
- ".git/modules/stagit",
- "git/stagit",
- "git/stagit",
- "My personal fork of stagit https://codemadness.org/stagit.html",
- )
+ await asyncio.gather(
+ shell("zine release", echo=EchoAll),
+ fetch(
+ build_git_repo(
+ "stagit/stagit",
+ ".",
+ "git/blog.grace.moe",
+ "git/blog.grace.moe",
+ "Source for the blog blog.grace.moe",
+ )
+ ),
+ fetch(
+ build_git_repo(
+ "stagit/stagit",
+ "~/Documents/src/pymake",
+ "git/pymake",
+ "git/pymake",
+ "A build system based on Build Systems à la Carte",
+ )
+ ),
+ fetch(
+ build_git_repo(
+ "stagit/stagit",
+ ".git/modules/stagit",
+ "git/stagit",
+ "git/stagit",
+ "My personal fork of stagit https://codemadness.org/stagit.html",
+ )
+ ),
)
await shell(
"cd public/git && ../../stagit/stagit-index blog.grace.moe pymake stagit > index.html",
@@ -797,16 +894,20 @@ async def main():
await shell(f"{APICMD} -XPOST '{PURGEURL}'")
print("Purged.")
- with Build(".makedb") as build:
- await build(rebuild())
+ @rule_no_cache
+ async def all(fetch: Fetch):
+ await fetch(rebuild())
UPLOAD = (await shell(f"cd '{LOCALPATH}' && find . -type f")).utf8stdout
CLEAN = (await shell(f"cd '{LOCALPATH}' && find . -type d")).utf8stdout
await asyncio.gather(
- *(build(upload(path)) for path in UPLOAD.strip().split("\n")),
- *(build(clean(path)) for path in CLEAN.strip().split("\n")),
+ *(fetch(upload(path)) for path in UPLOAD.strip().split("\n")),
+ *(fetch(clean(path)) for path in CLEAN.strip().split("\n")),
)
- await build(purge())
+ await fetch(purge())
+
+ _ = all
+ return await async_main(locals())
if __name__ == "__main__":
- asyncio.run(main())
+ exit(asyncio.run(our_main()))