commit f70a70ec717164d5b23ccdf82c785e5d7ad1ba58
parent e49e1de445fac020e41338d8d018e960413bf60b
Author: gracefu <81774659+gracefuu@users.noreply.github.com>
Date: Tue, 22 Apr 2025 22:22:42 +0800
Move git part of blog site to new site
Diffstat:
7 files changed, 187 insertions(+), 824 deletions(-)
diff --git a/.gitmodules b/.gitmodules
@@ -1,3 +0,0 @@
-[submodule "stagit"]
- path = stagit
- url = git://git.codemadness.org/stagit
diff --git a/assets/404.html b/assets/404.html
@@ -0,0 +1,2 @@
+<!DOCTYPE html>
+404 not found
diff --git a/layouts/templates/base.shtml b/layouts/templates/base.shtml
@@ -269,7 +269,7 @@ ul, ol, dl, menu {
</div>
<footer id="base-footer" class="limit-children">
<hr id="base-footer-rule">
- <p id="base-no-cookie">This <a href="/git/blog.grace.moe/log.html">site</a> uses no cookies. ❌🍪</p>
+ <p id="base-no-cookie">This <a href="//git.grace.moe/blog.grace.moe/log.html">site</a> uses no cookies. ❌🍪</p>
</footer>
</div>
diff --git a/make.py b/make.py
@@ -1,800 +1,10 @@
-"""
-make.py
--------
-
-Design inspired by the paper "Build Systems à la Carte"
-
-- https://github.com/snowleopard/build
-- https://www.microsoft.com/en-us/research/wp-content/uploads/2018/03/build-systems.pdf
-
-Key concepts:
-
-- The goal is to maintain an up-to-date *store* mapping *tasks* to *values*.
-- Tasks are described using rules, functions from parameters to tasks.
-- Each rule can choose its own caching policy, the default is a persistent cache keyed by hashes.
-- The current scheduler is a top-down (suspending) scheduler.
-
-make.py improves upon the paper's design in a few ways:
-
-- Task keys (for book-keeping purposes) are automatically derived from the rule functions.
-- Tasks are executed concurrently.
-- We split the two concepts Rebuilder and Scheduler into three concepts:
-
- - (Per-task) Caching policies.
- - (Global) Updating strategy.
- - (Global) Metadata updaters.
-
-# Why we re-interpret the concepts Rebuilder and Scheduler
-
-The paper merges the concept of "metadata updaters" in the Rebuilder and Scheduler.
-This sort of makes sense as different rebuilders and schedulers require different metadata.
-
-However, it means that a rebuilder may need to override the `fetch` function in a call
-in order to ensure the metadata required for the rebuilder is created,
-and it encourages a local way to build metadata information.
-Furthermore, a rebuilder may sometimes require the same metadata as a scheduler's fetch function,
-for instance tracking dependency relationships is required for both the
-topological sort scheduler as well as trace-based rebuilders (e.g. constructive trace rebuilder).
-
-So, we instead factor out the metadata updating portion of both rebuilders and schedulers
-into a global metadata updater, which can be viewed as yet another wrapper around rules.
-However, as this must apply on a global level to support the whole scheduling strategy,
-metadata updaters are defined at a global level, unlike the per-task caching policies.
-
-# TODO
-
-- Make files on the filesystem a core concept as opposed to merely something you can do.
-"""
-
-import asyncio
-import collections
-import functools
+from make_utils import *
import hashlib
-import inspect
-import pickle
-import subprocess
-import sys
-import traceback
-
-from typing import (
- Any,
- Awaitable,
- Callable,
- Concatenate,
- Optional,
- ParamSpec,
- Protocol,
-)
-
-
-class Fetch(Protocol):
- """Protocol defining the fetch operation used by tasks."""
-
- async def __call__(self, task_or_rule: "Task | Rule") -> Any: ...
-
-
-RuleKey = bytes
-TaskKey = tuple
-ValueHash = bytes
-
-P = ParamSpec("P")
-RuleFn = Callable[Concatenate[Fetch, TaskKey, "Store", P], Awaitable[Any]]
-NiceRuleFn = Callable[Concatenate[Fetch, P], Awaitable[Any]]
-
-
-def make_hash(o: Any) -> bytes:
- if isinstance(o, bytes):
- h = hashlib.sha256(b"s")
- h.update(o)
- else:
- h = hashlib.sha256(b"r")
- h.update(repr(o).encode("utf-8"))
- return h.digest()
-
-
-def rule_fn_to_key(fn: Callable) -> RuleKey:
- name = fn.__name__
- source = inspect.getsource(fn)
- h = hashlib.sha256(source.encode("utf-8")).hexdigest()[:16]
- key = f"{name}-{len(source)}-{h}".encode("utf-8")
- return key
-
-
-class Task:
- """A computation of a value."""
-
- __slots__ = "task_key", "rule_fn", "args", "hash"
-
- task_key: TaskKey
- rule_fn: RuleFn
- args: tuple
- hash: int
-
- def __init__(self, task_key: TaskKey, rule_fn: RuleFn, *args):
- self.task_key = task_key
- self.rule_fn = rule_fn
- self.args = args
- self.hash = hash(self.task_key)
-
- def __call__(self, fetch: Fetch, store: "Store"):
- return self.rule_fn(fetch, self.task_key, store, *self.args)
-
- def __repr__(self) -> str:
- return repr(self.task_key)
-
- def __eq__(self, other: object) -> bool:
- if not isinstance(other, Task):
- return NotImplemented
- return self.task_key == other.task_key
-
- def __hash__(self) -> int:
- return self.hash
-
-
-class Rule:
- """A function that returns tasks."""
-
- __slots__ = "rule_key", "rule_fn", "hash"
-
- rule_key: RuleKey
- rule_fn: RuleFn
- hash: int
-
- @staticmethod
- def new(rule_fn: RuleFn):
- return Rule(
- rule_fn_to_key(rule_fn),
- rule_fn,
- )
-
- def __init__(self, rule_key: RuleKey, rule_fn: RuleFn):
- self.rule_key = rule_key
- self.rule_fn = rule_fn
- self.hash = hash(self.rule_key)
-
- def __call__(self, *args):
- return Task(
- (
- self.rule_key,
- *(
- (
- arg.task_key
- if isinstance(arg, Task)
- else arg().task_key if isinstance(arg, Rule) else arg
- )
- for arg in args
- ),
- ),
- self.rule_fn,
- *args,
- )
-
- def __eq__(self, other):
- if not isinstance(other, Rule):
- return NotImplemented
- return self.rule_key == other.rule_key
-
- def __hash__(self):
- return self.hash
-
-
-def ensure_task(task_or_rule: Task | Rule) -> Task:
- if isinstance(task_or_rule, Rule):
- return task_or_rule()
- return task_or_rule
-
-
-def singleton(cls):
- cls.main = cls()
- return cls
-
-
-@singleton
-class Rules:
- """The registry of all rules created."""
-
- # Main registry
- main: "Rules"
-
- __slots__ = "rules"
-
- rules: dict[RuleKey, Rule]
-
- def __init__(self):
- self.rules = dict()
-
- def eval_task_key(self, task_key: TaskKey) -> Optional[Task]:
- rule_key, *arg_keys = task_key
- if rule_key not in self.rules:
- return None
- rule = self.rules[rule_key]
- args = []
- for arg in arg_keys:
- if isinstance(arg, tuple) and arg[0] not in self.rules:
- return None
- args.append(self.eval_task_key(arg) if isinstance(arg, tuple) else arg)
- return rule(*args)
-
- @staticmethod
- def nice_rule_fn_to_rule_fn(nice_rule_fn, fetch, task_key, store, *args):
- return nice_rule_fn(fetch, *args)
-
- def rule(self, rule_fn: NiceRuleFn) -> Rule:
- return self.register(
- self.hash_cache(
- Rule.new(
- functools.update_wrapper(
- functools.partial(Rules.nice_rule_fn_to_rule_fn, rule_fn),
- rule_fn,
- )
- )
- )
- )
-
- def rule_no_cache(self, rule_fn: NiceRuleFn) -> Rule:
- return self.register(
- Rule.new(
- functools.update_wrapper(
- functools.partial(Rules.nice_rule_fn_to_rule_fn, rule_fn),
- rule_fn,
- )
- )
- )
-
- def register(self, rule: Rule) -> Rule:
- self.rules[rule.rule_key] = rule
- return rule
-
- def hash_cache(self, rule: Rule) -> Rule:
- """Adds hash based caching to a rule
-
- Attempts to replay the rule by checking if the hashes of each input
- it would have obtained if run now matches up with a previous run.
-
- Currently, there is no cache eviction policy (all previous runs are stored forever).
-
- TODO: Implement some cache eviction.
- """
- rule.rule_fn = functools.update_wrapper(
- functools.partial(Rules.hash_cache_fn, self, rule.rule_fn),
- rule.rule_fn,
- )
- return rule
-
- @staticmethod
- async def track_fetch(fetch: Fetch, new_inputs: list, task_or_rule: Task | Rule):
- task = ensure_task(task_or_rule)
- result = await fetch(task)
- new_inputs.append((task.task_key, make_hash(result)))
- return result
-
- async def hash_cache_fn(
- self,
- inner_rule_fn: RuleFn,
- fetch: Fetch,
- task_key: TaskKey,
- store: "Store",
- *args,
- ):
- """Actual implementation of hash_cache"""
- if task_key in store.key_info:
- past_runs = store.key_info[task_key]
- output_value = store.key_value[task_key]
- possible_values = []
- for past_inputs, past_value in past_runs:
- for past_input_key, past_input_hash in past_inputs:
- input_task = self.eval_task_key(past_input_key)
- if not input_task:
- break
- current_input_value = await fetch(input_task)
- if make_hash(current_input_value) != past_input_hash:
- break
- else:
- if output_value == past_value:
- return past_value
- possible_values.append(past_value)
-
- if possible_values:
- return possible_values[0]
-
- new_inputs = []
-
- new_value = await inner_rule_fn(
- functools.partial(Rules.track_fetch, fetch, new_inputs),
- task_key,
- store,
- *args,
- )
- store.key_info[task_key].append((new_inputs, new_value))
- return new_value
-
-
-# Rules.main = Rules()
-rule = Rules.main.rule
-rule_no_cache = Rules.main.rule_no_cache
-register = Rules.main.register
-hash_cache = Rules.main.hash_cache
-
-
-class Store:
- """Stores a mapping from tasks to their values."""
-
- __slots__ = "filename", "rules", "key_value", "key_info"
-
- @staticmethod
- def _fNone():
- return None
-
- def __init__(self, filename, rules):
- self.filename = filename
- self.rules = rules
-
- self.key_value = collections.defaultdict(Store._fNone)
- self.key_info = collections.defaultdict(list)
-
- try:
- with open(filename, "rb") as f:
- self.key_value, self.key_info = pickle.load(f)
- except:
- pass
-
- def save(self):
- with open(self.filename, "wb") as f:
- pickle.dump((self.key_value, self.key_info), f)
-
- def __enter__(self):
- return self
-
- def __exit__(self, exc_type, exc_val, exc_tb):
- self.save()
-
-
-class Detach:
- __slots__ = "_background_tasks"
-
- def __init__(self):
- self._background_tasks = set()
-
- def __call__(self, awaitable):
- if asyncio.coroutines.iscoroutine(awaitable):
- task = asyncio.create_task(awaitable)
- self._background_tasks.add(task)
- task.add_done_callback(self._background_tasks.discard)
- return task
- return awaitable
-
- async def wait(self):
- while self._background_tasks:
- t = self._background_tasks.pop()
- if not t.done():
- await t
-
-
-detach = Detach()
-
-
-class SuspendingScheduler:
- __slots__ = "store", "done", "waits"
- store: Store
- done: set[TaskKey]
- waits: dict[TaskKey, asyncio.Event]
-
- def __init__(self, store: Store):
- self.store = store
- self.done = set()
- self.waits = dict()
-
- def build(self, *tasks: Task):
- return asyncio.gather(*(self.fetch_once(task) for task in tasks))
-
- async def fetch_once(self, task_or_rule: Task | Rule):
- task = ensure_task(task_or_rule)
- task_key = task.task_key
- wait = None
- event = None
- if task_key in self.done:
- return self.store.key_value[task_key]
- if task_key in self.waits:
- wait = self.waits[task_key]
-
- if wait:
- await wait.wait()
- return self.store.key_value[task_key]
-
- event = self.waits[task_key] = asyncio.Event()
- try:
- self.store.key_value[task_key] = result = await task(
- self.fetch_once, self.store
- )
- except:
- print(traceback.format_exc())
- event.set()
- self.store.key_value[task_key] = None
- return None
-
- self.done.add(task_key)
- event.set()
-
- return result
-
-
-class Build:
- __slots__ = "_store", "_scheduler"
-
- def __init__(self, filename, rules=Rules.main):
- self._store = Store(filename, rules)
- self._scheduler = SuspendingScheduler(self._store)
-
- async def __call__(self, *tasks: Task):
- result = await self.build(*tasks)
- await detach.wait()
- return result
-
- def build(self, *tasks: Task):
- return self._scheduler.build(*tasks)
-
- def __enter__(self):
- self._store.__enter__()
- return self
-
- def __exit__(self, exc_type, exc_val, exc_tb):
- self._store.__exit__(exc_type, exc_val, exc_tb)
-
-
-def build(*tasks, filename=".makedb", rules=Rules.main):
- with Build(filename, rules) as build:
- asyncio.run(build(*tasks))
-
-
-class ShellResult(collections.namedtuple("ShellResult", "stdout stderr returncode")):
- __slots__ = ()
-
- @property
- def utf8stdout(self):
- return self.stdout.decode("utf-8")
-
- @property
- def utf8stderr(self):
- return self.stderr.decode("utf-8")
-
-
-EchoNothing = 0
-EchoStdout = 1
-EchoStderr = 2
-EchoAll = 3
-
-
-async def _exec_reader(istream, ostream, echo: Any = False):
- contents = b""
- async for chunk in istream:
- contents += chunk
- if echo:
- ostream.write(chunk)
- ostream.flush()
- return contents
-
-
-async def exec(
- program,
- *args,
- input: bytes | bytearray | memoryview | None = None,
- echo: int = EchoNothing,
-) -> ShellResult:
-
- proc = await asyncio.create_subprocess_exec(
- program,
- *args,
- stdin=subprocess.PIPE,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- )
-
- if input is not None:
- proc.stdin.write(input) # type: ignore
- _, stdout, stderr, returncode = await asyncio.gather(
- proc.stdin.drain(), # type: ignore
- _exec_reader(proc.stdout, sys.stdout.buffer, echo=echo & EchoStdout),
- _exec_reader(proc.stderr, sys.stderr.buffer, echo=echo & EchoStderr),
- proc.wait(),
- )
- else:
- stdout, stderr, returncode = await asyncio.gather(
- _exec_reader(proc.stdout, sys.stdout.buffer, echo=echo & EchoStdout),
- _exec_reader(proc.stderr, sys.stderr.buffer, echo=echo & EchoStderr),
- proc.wait(),
- )
-
- return ShellResult(stdout, stderr, returncode)
-
-
-async def shell(
- cmd,
- input: bytes | bytearray | memoryview | None = None,
- echo: int = EchoNothing,
-) -> ShellResult:
- proc = await asyncio.create_subprocess_shell(
- cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE
- )
- stdout, stderr = await proc.communicate(input)
- if echo & EchoStdout:
- sys.stdout.buffer.write(stdout)
- sys.stdout.buffer.flush()
- if echo & EchoStderr:
- sys.stderr.buffer.write(stderr)
- sys.stderr.buffer.flush()
- return ShellResult(stdout, stderr, proc.returncode)
-
-
-def run_in_executor(f, *args, executor=None):
- return asyncio.get_running_loop().run_in_executor(executor, f, *args)
-
-
-async def async_main(globals, filename=".makedb", default_target="all"):
- targets = sys.argv[1:]
- if not targets:
- targets.append(default_target)
-
- with Build(filename) as build:
- await build(*(eval(target, globals=globals) for target in targets))
- return 0
-
-
-def main(globals, filename=".makedb", default_target="all"):
- targets = sys.argv[1:]
- if not targets:
- targets.append(default_target)
-
- with Build(filename) as build:
- asyncio.run(build(*(eval(target, globals=globals) for target in targets)))
- return 0
-
-
-# class AsyncWrapperSpec:
-# __slots__ = "async_methods", "async_subobjects"
-
-# def __init__(
-# self,
-# async_methods=set(),
-# async_subobjects=dict(),
-# ):
-# self.async_methods = set(async_methods)
-# self.async_subobjects = dict(async_subobjects)
-
-
-# class AsyncWrapper:
-# __slots__ = "_obj", "_spec", "_executor"
-
-# def __init__(self, obj, spec=AsyncWrapperSpec(), executor=None):
-# self._obj = obj
-# self._spec = spec
-# self._executor = executor
-
-# @staticmethod
-# def wrapper(f, executor, *args):
-# return run_in_executor(f, *args, executor=executor)
-
-# def __getattr__(self, attr):
-# if attr in self._spec.async_methods:
-# return functools.partial(
-# self.wrapper, getattr(self._obj, attr), self._executor
-# )
-# if attr in self._spec.async_subobjects:
-# return AsyncWrapper(
-# getattr(self._obj, attr),
-# spec=self._spec.async_subobjects[attr],
-# executor=self._executor,
-# )
-# return getattr(self._obj, attr)
-
-# async def __aenter__(self):
-# return AsyncWrapper(
-# await run_in_executor(self._obj.__enter__, executor=self._executor),
-# spec=self._spec,
-# )
-
-# async def __aexit__(self, exc_type, exc_val, exc_tb):
-# return await run_in_executor(
-# self._obj.__exit__, exc_type, exc_val, exc_tb, executor=self._executor
-# )
-
-# def __aiter__(self):
-# return AsyncWrapper(self._obj.__iter__(), spec=self._spec)
-
-# @staticmethod
-# def wrapped_next(obj):
-# try:
-# return True, next(obj)
-# except StopIteration:
-# return False, None
-
-# async def __anext__(self):
-# ok, res = await run_in_executor(
-# functools.partial(self.wrapped_next, self._obj), executor=self._executor
-# )
-# if not ok:
-# raise StopAsyncIteration
-# return res
-
-# @staticmethod
-# def wrapped_foreach(f, obj):
-# for chunk in obj:
-# f(chunk)
-
-# async def foreach(self, f):
-# await run_in_executor(
-# functools.partial(self.wrapped_foreach, f, self._obj),
-# executor=self._executor,
-# )
-
-
-# class AsyncIO(Protocol):
-# async def __aenter__(self) -> "AsyncIO": ...
-# async def __aexit__(self, exc_type, exc_val, exc_tb) -> None: ...
-
-# async def close(self) -> None: ...
-# def fileno(self) -> int: ...
-# async def flush(self) -> None: ...
-# def isatty(self) -> bool: ...
-# def readable(self) -> bool: ...
-# async def readlines(self, hint: int = -1, /) -> list[bytes]: ...
-# async def seek(self, offset: int, whence: int = 0, /) -> int: ...
-# def seekable(self) -> bool: ...
-# async def tell(self) -> int: ...
-# async def truncate(self, size: int | None = None, /) -> int: ...
-# def writable(self) -> bool: ...
-# async def writelines(self, lines, /) -> None: ...
-# async def readline(self, size: int | None = -1, /) -> bytes: ...
-# @property
-# def closed(self) -> bool: ...
-# async def readall(self) -> bytes: ...
-# async def readinto(self, buffer, /) -> Any: ...
-# async def write(self, b, /) -> Any: ...
-# async def read(self, size: int = -1, /) -> Any: ...
-# def detach(self) -> "AsyncIO": ...
-# async def readinto1(self, buffer, /) -> int: ...
-# async def read1(self, size: int = -1, /) -> bytes: ...
-
-# mode: str
-# name: Any
-
-# @property
-# def closefd(self) -> bool: ...
-
-# raw: "AsyncIO"
-
-# async def peek(self, size: int = 0, /) -> bytes: ...
-
-# encoding: str
-# errors: str | None
-# newlines: str | tuple[str, ...] | None
-
-# def __aiter__(self) -> AsyncIterator[Any]: ...
-# async def __anext__(self) -> Any: ...
-
-# async def foreach(self, f) -> Any: ...
-
-
-# async def open_async(*args, executor=None) -> AsyncIO:
-# # List of methods: https://docs.python.org/3/library/io.html
-# async_methods = (
-# "close",
-# "detach",
-# "flush",
-# "peek",
-# "read",
-# "read1",
-# "readall",
-# "readinto",
-# "readinto1",
-# "readline",
-# "readlines",
-# "seek",
-# "tell",
-# "truncate",
-# "write",
-# "writelines",
-# )
-# return AsyncWrapper(
-# await run_in_executor(open, *args, executor=executor),
-# AsyncWrapperSpec(async_methods, {"buffer": AsyncWrapperSpec(async_methods)}),
-# ) # type: ignore
-
-
import json
import os
-async def our_main():
- @rule_no_cache
- def build_git_repo(
- fetch: Fetch,
- stagit_path: str,
- source_git_path: str,
- output_git_path: str,
- output_url: str,
- description: str,
- ):
- return shell(
- f"""
- SRC_PATH="$(realpath {source_git_path})"
-
- [ -d {output_git_path} ] || git init --bare {output_git_path}
- GIT_PATH="$(realpath {output_git_path})"
-
- mkdir -p public/{output_url}
- PUBLIC_PATH="$(realpath public/{output_url})"
-
- STAGIT_PATH="$(realpath {stagit_path})"
-
- rm -rf "$GIT_PATH"/hooks
-
- git -C "$SRC_PATH" push --no-recurse-submodules --mirror --force "$GIT_PATH"
- # HEAD is not updated by --mirror, because HEAD is not a ref.
- # Update it by hand
- cp "$(git -C "$SRC_PATH" rev-parse --path-format=absolute --git-path HEAD)" "$GIT_PATH"/HEAD
-
- git -C "$GIT_PATH" gc --no-detach --aggressive
- git -C "$GIT_PATH" update-server-info
-
- echo '{description}' > "$GIT_PATH"/description
- echo 'https://blog.grace.moe/{output_url}' > "$GIT_PATH"/url
-
- cp -a "$GIT_PATH" -T "$PUBLIC_PATH"
-
- echo + stagit {output_url}
- ( cd "$PUBLIC_PATH" &&
- "$STAGIT_PATH" \
- -u https://blog.grace.moe/{output_url} \
- "$GIT_PATH" &&
- echo '<meta http-equiv="refresh" content="0; url=log.html" />' > index.html
- )
- echo - stagit {output_url}
- """,
- echo=EchoAll,
- )
-
- @rule_no_cache
- async def rebuild(fetch: Fetch):
- await shell(
- """
- rm -rf public
- """
- )
- await asyncio.gather(
- shell("zine release", echo=EchoAll),
- fetch(
- build_git_repo(
- "stagit/stagit",
- ".",
- "git/blog.grace.moe",
- "git/blog.grace.moe",
- "Source for the blog blog.grace.moe",
- )
- ),
- fetch(
- build_git_repo(
- "stagit/stagit",
- "~/Documents/src/pymake",
- "git/pymake",
- "git/pymake",
- "A build system based on Build Systems à la Carte",
- )
- ),
- fetch(
- build_git_repo(
- "stagit/stagit",
- ".git/modules/stagit",
- "git/stagit",
- "git/stagit",
- "My personal fork of stagit https://codemadness.org/stagit.html",
- )
- ),
- )
- await shell(
- "cd public/git && ../../stagit/stagit-index blog.grace.moe pymake stagit > index.html",
- echo=EchoAll,
- )
-
+async def main():
STORAGENAME = "blog-grace-moe"
STORAGEPASSWORD = (
await shell("secret-tool lookup owner blog-grace-moe.b-cdn.net")
@@ -809,6 +19,18 @@ async def our_main():
PURGEURL = f"https://api.bunny.net/pullzone/{PULLZONEID}/purgeCache"
APICMD = f"curl -H 'AccessKey: {APIPASSWORD}' -s"
+ @once()
+ async def rebuild():
+ await shell(
+ """
+ rm -rf public
+ zine release
+ """,
+ echo=EchoAll,
+ )
+
+ @once()
+ @in_executor()
def hash_file(path):
with open(path, "rb") as f:
# print("+ hashing", path)
@@ -820,39 +42,39 @@ async def our_main():
bunny_sem = asyncio.Semaphore(80)
- @rule_no_cache
- async def contents(fetch: Fetch, path: str):
+ @once()
+ async def contents(path: str):
async with bunny_sem:
print("+++ download", path)
path_json = await shell(f"{STORAGECMD} '{STORAGEURL}/{path}/'")
print("--- download", path)
return json.loads(path_json.utf8stdout)
- @rule_no_cache
- async def cleanfile(fetch: Fetch, path: str):
+ @once()
+ async def cleanfile(path: str):
if not os.path.isfile(f"{LOCALPATH}/{path}"):
async with bunny_sem:
print("+++ cleanfile", path)
await shell(f"{STORAGECMD} -XDELETE '{STORAGEURL}/{path}'")
print("--- cleanfile", path)
- @rule_no_cache
- async def cleandir(fetch: Fetch, path: str):
+ @once()
+ async def cleandir(path: str):
if not os.path.isdir(f"{LOCALPATH}/{path}"):
async with bunny_sem:
print("+++ cleandir", path)
await shell(f"{STORAGECMD} -XDELETE '{STORAGEURL}/{path}/'")
print("--- cleandir", path)
- @rule_no_cache
- async def clean(fetch: Fetch, path: str):
- path_contents = await fetch(contents(path))
+ @once()
+ async def clean(path: str):
+ path_contents = await contents(path)
await asyncio.gather(
*(
(
- fetch(cleandir(path + "/" + ent["ObjectName"]))
+ (cleandir(path + "/" + ent["ObjectName"]))
if ent["IsDirectory"]
- else fetch(cleanfile(path + "/" + ent["ObjectName"]))
+ else (cleanfile(path + "/" + ent["ObjectName"]))
)
for ent in path_contents
if isinstance(ent, dict)
@@ -860,9 +82,9 @@ async def our_main():
)
# print("- clean", path)
- @rule_no_cache
- async def upload(fetch: Fetch, path: str):
- path_contents = await fetch(contents(path[: path.rfind("/")]))
+ @once()
+ async def upload(path: str):
+ path_contents = await contents(path[: path.rfind("/")])
bunny_checksum = None
if isinstance(path_contents, list):
@@ -877,7 +99,7 @@ async def our_main():
except StopIteration:
pass
- our_checksum = await run_in_executor(hash_file, f"{LOCALPATH}/{path}")
+ our_checksum = await hash_file(f"{LOCALPATH}/{path}")
if bunny_checksum != our_checksum:
async with bunny_sem:
@@ -888,27 +110,27 @@ async def our_main():
print("--- uploading", path)
# print("- upload", path)
- @rule_no_cache
- async def purge(fetch: Fetch):
+ @once()
+ async def purge():
async with bunny_sem:
print("+++ purge")
await shell(f"{APICMD} -XPOST '{PURGEURL}'")
print("--- purge")
- @rule_no_cache
- async def all(fetch: Fetch):
- await fetch(rebuild())
+ @once()
+ async def all():
+ await rebuild()
UPLOAD = (await shell(f"cd '{LOCALPATH}' && find . -type f")).utf8stdout
CLEAN = (await shell(f"cd '{LOCALPATH}' && find . -type d")).utf8stdout
await asyncio.gather(
- *(fetch(upload(path)) for path in UPLOAD.strip().split("\n")),
- *(fetch(clean(path)) for path in CLEAN.strip().split("\n")),
+ *((upload(path)) for path in UPLOAD.strip().split("\n")),
+ *((clean(path)) for path in CLEAN.strip().split("\n")),
)
- await fetch(purge())
+ await purge()
_ = all
- return await async_main(locals())
+ return await make_main(locals())
if __name__ == "__main__":
- exit(asyncio.run(our_main()))
+ exit(asyncio.run(main()))
diff --git a/make_utils.py b/make_utils.py
@@ -0,0 +1,141 @@
+from concurrent.futures import Executor
+from typing import Any, Awaitable, Callable, ParamSpec, TypeVar
+import asyncio
+import collections
+import functools
+import inspect
+import subprocess
+import sys
+import traceback
+
+
+def once():
+ def decorator(f):
+ futs: dict[tuple[Any, ...], asyncio.Future] = {}
+ sig = inspect.signature(f)
+
+ @functools.wraps(f)
+ async def wrapped(*args, **kwargs):
+ bound_args = sig.bind(*args, **kwargs)
+ bound_args.apply_defaults()
+ key = tuple(bound_args.arguments.values())
+ if key in futs:
+ return await futs[key]
+ futs[key] = asyncio.Future()
+ try:
+ res = await f(*args, **kwargs)
+ futs[key].set_result(res)
+ return res
+ except BaseException as e:
+ traceback.print_exc()
+ futs[key].set_exception(e)
+ raise
+
+ return wrapped
+
+ return decorator
+
+
+def in_executor(executor: Executor | None = None):
+ Args = ParamSpec("Args")
+ T = TypeVar("T")
+
+ def decorator(f: Callable[Args, T]) -> Callable[Args, Awaitable[T]]:
+ @functools.wraps(f)
+ def wrapped(*args, **kwargs):
+ if kwargs:
+ return asyncio.get_event_loop().run_in_executor(
+ executor, functools.partial(f, **kwargs), *args
+ )
+ else:
+ return asyncio.get_event_loop().run_in_executor(executor, f, *args)
+
+ return wrapped
+
+ return decorator
+
+
+class ShellResult(collections.namedtuple("ShellResult", "stdout stderr returncode")):
+ __slots__ = ()
+
+ @property
+ def utf8stdout(self):
+ return self.stdout.decode("utf-8")
+
+ @property
+ def utf8stderr(self):
+ return self.stderr.decode("utf-8")
+
+
+EchoNothing = 0
+EchoStdout = 1
+EchoStderr = 2
+EchoAll = 3
+
+
+async def _exec_reader(istream, ostream, echo: Any = False):
+ contents = b""
+ async for chunk in istream:
+ contents += chunk
+ if echo:
+ ostream.write(chunk)
+ ostream.flush()
+ return contents
+
+
+async def exec(
+ program,
+ *args,
+ input: bytes | bytearray | memoryview | None = None,
+ echo: int = EchoNothing,
+) -> ShellResult:
+
+ proc = await asyncio.create_subprocess_exec(
+ program,
+ *args,
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ )
+
+ if input is not None:
+ proc.stdin.write(input) # type: ignore
+ _, stdout, stderr, returncode = await asyncio.gather(
+ proc.stdin.drain(), # type: ignore
+ _exec_reader(proc.stdout, sys.stdout.buffer, echo=echo & EchoStdout),
+ _exec_reader(proc.stderr, sys.stderr.buffer, echo=echo & EchoStderr),
+ proc.wait(),
+ )
+ else:
+ stdout, stderr, returncode = await asyncio.gather(
+ _exec_reader(proc.stdout, sys.stdout.buffer, echo=echo & EchoStdout),
+ _exec_reader(proc.stderr, sys.stderr.buffer, echo=echo & EchoStderr),
+ proc.wait(),
+ )
+
+ return ShellResult(stdout, stderr, returncode)
+
+
+async def shell(
+ cmd,
+ input: bytes | bytearray | memoryview | None = None,
+ echo: int = EchoNothing,
+) -> ShellResult:
+ proc = await asyncio.create_subprocess_shell(
+ cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE
+ )
+ stdout, stderr = await proc.communicate(input)
+ if echo & EchoStdout:
+ sys.stdout.buffer.write(stdout)
+ sys.stdout.buffer.flush()
+ if echo & EchoStderr:
+ sys.stderr.buffer.write(stderr)
+ sys.stderr.buffer.flush()
+ return ShellResult(stdout, stderr, proc.returncode)
+
+
+async def make_main(globals, default_target="all()"):
+ targets = sys.argv[1:]
+ if not targets:
+ targets.append(default_target)
+ await asyncio.gather(*(eval(target, globals=globals) for target in targets))
diff --git a/stagit b/stagit
@@ -1 +0,0 @@
-Subproject commit 96cbcdf6e6f2e5e572c8b01a6990151462cb4b00
diff --git a/zine.ziggy b/zine.ziggy
@@ -6,6 +6,10 @@ Site {
.assets_dir_path = "assets",
.static_assets = [
"docs/POPL-2025-SRC-Grace-Tan-Extended-Abstract.pdf",
+
+ "404.html",
+ "css/AtkinsonHyperlegibleNext.css",
+ "fonts/AtkinsonHyperlegibleNextVF-Variable.woff2",
"icons/android-chrome-192x192.png",
"icons/android-chrome-512x512.png",
"icons/apple-touch-icon.png",
@@ -16,8 +20,6 @@ Site {
"icons/mstile-150x150.png",
"icons/safari-pinned-tab.svg",
"icons/site.webmanifest",
- "fonts/AtkinsonHyperlegibleNextVF-Variable.woff2",
"js/fontfaceobserver.standalone.js",
- "css/AtkinsonHyperlegibleNext.css",
],
}