git.grace.moe

Source for the git site git.grace.moe
git clone https://git.grace.moe/git.grace.moe
Log | Files | Refs | Submodules

commit 2b5ee7e03c4c07f39de11b31bcdfc2c345510a47
parent ec64b27836024d0583dca27782b0094ce104beb5
Author: gracefu <81774659+gracefuu@users.noreply.github.com>
Date:   Fri,  2 May 2025 22:09:00 +0800

Use copy of make3 in other repo, sorry if you're cloning from elsewhere

Diffstat:
Mmake.py | 2+-
Dmake3/__init__.py | 6------
Dmake3/exec.py | 93-------------------------------------------------------------------------------
Dmake3/helpers.py | 45---------------------------------------------
Dmake3/io.py | 34----------------------------------
Dmake3/main.py | 11-----------
Dmake3/once.py | 30------------------------------
Dmake3/rebuild.py | 246-------------------------------------------------------------------------------
Apyproject.toml | 10++++++++++
Auv.lock | 19+++++++++++++++++++
10 files changed, 30 insertions(+), 466 deletions(-)

diff --git a/make.py b/make.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env -S uv run from make3 import EchoAll, file_hash, make_main, once, shell import asyncio import http.client diff --git a/make3/__init__.py b/make3/__init__.py @@ -1,6 +0,0 @@ -from .exec import * -from .helpers import * -from .io import * -from .main import * -from .once import * -from .rebuild import * diff --git a/make3/exec.py b/make3/exec.py @@ -1,93 +0,0 @@ -from asyncio import create_subprocess_exec, create_subprocess_shell, gather -from asyncio.streams import StreamReader, StreamWriter -from asyncio.subprocess import Process, PIPE -from collections import namedtuple -import sys - - -class ShellResult(namedtuple("ShellResult", "stdout stderr returncode")): - __slots__ = () - - @property - def utf8stdout(self): - return self.stdout.decode("utf-8") - - @property - def utf8stderr(self): - return self.stderr.decode("utf-8") - - -EchoNothing = 0 -EchoStdout = 1 -EchoStderr = 2 -EchoAll = 3 - - -async def _exec_writer( - proc: Process, - ostream: StreamWriter, - input: bytes | bytearray | memoryview | None = None, -): - if input is not None: - ostream.write(input) - await ostream.drain() - return await proc.wait() - - -async def _exec_reader(istream: StreamReader, ostream=None): - contents = b"" - while not istream.at_eof(): - chunk = await istream.read(4096 * 16) - contents += chunk - if ostream: - ostream.write(chunk) - ostream.flush() - return contents - - -async def communicate_echo_wait( - proc: Process, - input: bytes | bytearray | memoryview | None = None, - echo: int = EchoNothing, -) -> ShellResult: - stdout, stderr, returncode = await gather( - _exec_reader( - proc.stdout, # type: ignore - sys.stdout.buffer if echo & EchoStdout else None, - ), - _exec_reader( - proc.stderr, # type: ignore - sys.stderr.buffer if echo & EchoStderr else None, - ), - _exec_writer( - proc, - proc.stdin, # type: ignore - input, - ), - ) - return ShellResult(stdout, stderr, returncode) - - -async def exec( - prog, - *args, - input: bytes | bytearray | memoryview | None = None, - echo: int = EchoNothing, -) -> ShellResult: - return await communicate_echo_wait( - await create_subprocess_exec(prog, *args, stdin=PIPE, stdout=PIPE, stderr=PIPE), - input, - echo, - ) - - -async def shell( - cmd, - input: bytes | bytearray | memoryview | None = None, - echo: int = EchoNothing, -) -> ShellResult: - return await communicate_echo_wait( - await create_subprocess_shell(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE), - input, - echo, - ) diff --git a/make3/helpers.py b/make3/helpers.py @@ -1,45 +0,0 @@ -from .io import open -from .once import once -from .rebuild import cache_conditionally, rerun_if_changed, rerun_always -import hashlib -import os - - -async def file_modtime(f: int | str | bytes | os.PathLike[str] | os.PathLike[bytes]): - return os.stat(f).st_mtime_ns - - -@once() -@cache_conditionally(lambda f, *args: (f.name if isinstance(f, open) else f, *args)) -async def file_hash(f: open | bytes | str, skip_if_modtime_matches=True): - if isinstance(f, bytes) or isinstance(f, str): - with open(f, "rb") as _f: - if skip_if_modtime_matches: - rerun_if_changed()(await file_modtime(_f.fileno())) - else: - rerun_always() - h = hashlib.sha256() - while True: - chunk = _f.read1() - if chunk: - h.update(chunk) - else: - break - d = h.hexdigest() - # print("hash", f.name, d) - return d - else: - if skip_if_modtime_matches: - rerun_if_changed()(await file_modtime(f.fileno())) - else: - rerun_always() - h = hashlib.sha256() - while True: - chunk = f.read1() - if chunk: - h.update(chunk) - else: - break - d = h.hexdigest() - # print("hash", f.name, d) - return d diff --git a/make3/io.py b/make3/io.py @@ -1,34 +0,0 @@ -_open = open - - -class open: - def __init__(self, *args, **kwargs): - self.args = args - self.kwargs = kwargs - self.entered = False - self.file = _open(*args, **kwargs) - - @staticmethod - def __open_seek(pos, entered, args, kwargs): - f = open(*args, **kwargs) - if entered: - f.__enter__() - f.seek(pos) - return f - - def __getattr__(self, attr): - return getattr(self.file, attr) - - def __iter__(self): - return iter(self.file) - - def __enter__(self): - self.file.__enter__() - self.entered = True - return self - - def __exit__(self, ty, exc, tb): - self.file.__exit__(ty, exc, tb) - - def __reduce__(self): - return (self.__open_seek, (self.tell(), self.entered, self.args, self.kwargs)) diff --git a/make3/main.py b/make3/main.py @@ -1,11 +0,0 @@ -from .rebuild import Rerunner -from asyncio import gather -import sys - - -async def make_main(globals, default_target="all()"): - targets = sys.argv[1:] - if not targets: - targets.append(default_target) - with Rerunner(): - await gather(*(eval(target, globals=globals) for target in targets)) diff --git a/make3/once.py b/make3/once.py @@ -1,30 +0,0 @@ -from typing import Any -from asyncio import Future -from functools import wraps -from inspect import signature - - -def once(): - def decorator(f): - futs: dict[tuple[Any, ...], Future] = {} - sig = signature(f) - - @wraps(f) - async def wrapped(*args, **kwargs): - bound_args = sig.bind(*args, **kwargs) - bound_args.apply_defaults() - key = tuple(bound_args.arguments.values()) - if key in futs: - return await futs[key] - futs[key] = Future() - try: - res = await f(*args, **kwargs) - futs[key].set_result(res) - return res - except BaseException as e: - futs[key].set_exception(e) - raise - - return wrapped - - return decorator diff --git a/make3/rebuild.py b/make3/rebuild.py @@ -1,246 +0,0 @@ -from asyncio import create_task -from contextvars import ContextVar, copy_context -from copyreg import dispatch_table -from functools import update_wrapper, wraps -from importlib import import_module -from inspect import getmodule -from io import BytesIO -from pickle import Pickler -from types import CellType, CodeType, FunctionType -from typing import Any -import ast -import inspect -import pickle - - -def pickle_code_type(code: CodeType): - return ( - unpickle_code_type, - ( - code.co_argcount, - code.co_posonlyargcount, - code.co_kwonlyargcount, - code.co_nlocals, - code.co_stacksize, - code.co_flags, - code.co_code, - code.co_consts, - code.co_names, - code.co_varnames, - code.co_filename, - code.co_name, - code.co_qualname, - code.co_firstlineno, - code.co_linetable, - code.co_exceptiontable, - code.co_freevars, - code.co_cellvars, - ), - ) - - -def unpickle_code_type(*args): - return CodeType(*args) - - -def pickle_cell_type(cell: CellType): - return (unpickle_cell_type, (cell.cell_contents,)) - - -def unpickle_cell_type(*args): - return CellType(*args) - - -def pickle_function_type(f: FunctionType): - mod = getmodule(f) - return ( - unpickle_function_type, - ( - f.__code__, - mod.__name__ if mod is not None else None, - ( - tuple(CellType(cell.cell_contents) for cell in f.__closure__) - if f.__closure__ - else None - ), - ), - ) - - -def unpickle_function_type(code, mod_name, closure): - return FunctionType(code, globals=import_module(mod_name).__dict__, closure=closure) - - -class FunctionPickler(Pickler): - dispatch_table = dispatch_table.copy() - dispatch_table[CodeType] = pickle_code_type - dispatch_table[CellType] = pickle_cell_type - - def reducer_override(self, obj): # type: ignore - if type(obj) is not FunctionType: - return NotImplemented - obj_mod = getmodule(obj) - if obj_mod is None: - return NotImplemented - if obj.__name__ in dir(obj_mod): - return NotImplemented - return pickle_function_type(obj) - - -def pickle_with(pickler_cls: type, obj: Any) -> bytes: - i = BytesIO() - pickler_cls(i).dump(obj) - i.seek(0) - return i.read() - - -rerun_db_var: ContextVar[dict] = ContextVar("rerun_db") -rerun_changes_var: ContextVar[list[tuple[Any, bytes]]] = ContextVar("rerun_changes") - - -async def with_rerun_context(rerun_changes, f, /, *args, **kwargs): - rerun_changes_var.set(rerun_changes) - return await f(*args, **kwargs) - - -def rewrite_rerun_if_changed(frame=None): - def decorator(fn): - s = inspect.getsource(fn).splitlines() - i = 0 - while not s[i].startswith("async def"): - i += 1 - s = s[i:] - s = "\n".join(s) - a = ast.parse(s).body[0] - - class RewriteCalls(ast.NodeTransformer): - def visit_Expr(self, node: ast.Expr): - if ( - isinstance(node.value, ast.Call) - and isinstance(node.value.func, ast.Call) - and isinstance(node.value.func.func, ast.Name) - and node.value.func.func.id == "rerun_if_changed" - ): - if len(node.value.func.args) == 0: - node.value.func.args.append(node.value.args[0]) - out = ast.AsyncFunctionDef( - "_", - ast.arguments(), - [ast.Return(node.value.args[0])], - [node.value.func], - ) - return out - return node - - a = ast.fix_missing_locations(RewriteCalls().visit(a)) - # print(ast.unparse(a)) - frame_ = frame if frame else inspect.currentframe().f_back # type: ignore - exec(ast.unparse(a), frame_.f_globals, frame_.f_locals, closure=fn.__closure__) # type: ignore - fn_ = list(frame_.f_locals.values())[-1] # type: ignore - - fn_ = update_wrapper(fn_, fn) - return fn_ - - return decorator - - -class _RunLaterNow: ... - - -def rerun_if_changed(now: Any = _RunLaterNow, *, pickler_cls=FunctionPickler): - def decorator(later): - later_pkl = pickle_with(pickler_cls, later) - if now is _RunLaterNow: - raise RuntimeError( - "Should have been preprocessed away by the cache_conditionally macro" - ) - else: - rerun_changes_var.get().append((now, later_pkl)) - - return decorator - - -def rerun_if(f): - @rerun_if_changed(False) - async def _(): - return bool(await f()) - - -def rerun_always(): - @rerun_if_changed(False) - async def _(): - return True - - -def cache_conditionally( - keys_fn=lambda *args, **kwargs: (args, tuple(sorted(kwargs.items()))), - store_fn=lambda result, /, *_, **__: result, - load_fn=lambda cached_result, /, *_, **__: cached_result, - rewrite=True, -): - def decorator(fn): - if rewrite: - fn = rewrite_rerun_if_changed(inspect.currentframe().f_back)(fn) # type: ignore - - @wraps(fn) - async def wrapped(*args, **kwargs): - db = rerun_db_var.get() - keys = keys_fn(*args, **kwargs) - db_key = ("track", fn.__qualname__, keys) - if db_key + ("result",) in db: - if db_key + ("rerun_changes",) not in db: - old_rerun_changes = [] - db[db_key + ("rerun_changes",)] = old_rerun_changes - else: - old_rerun_changes = db[db_key + ("rerun_changes",)] - for old_val, f_pkl in old_rerun_changes: - try: - f_unpkled = pickle.loads(f_pkl) - val = await f_unpkled() - if old_val != val: - break - except BaseException: - break - else: - return load_fn(db[db_key + ("result",)], *args, **kwargs) - - rerun_changes = [] - result = await create_task( - with_rerun_context(rerun_changes, fn, *args, **kwargs), - context=copy_context(), - ) - db[db_key + ("rerun_changes",)] = rerun_changes - db[db_key + ("result",)] = store_fn(result, *args, **kwargs) - return result - - return wrapped - - return decorator - - -class Rerunner: - def __init__(self, db_filename=b".makedb", db_file=None): - if db_file: - self.db_file = db_file - else: - self.db_file = open(db_filename, "a+b") - self.db_file.seek(0) - - def __enter__(self): - self.db_file.__enter__() - try: - self.db = pickle.load(self.db_file) - except pickle.PickleError: - self.db = dict() - except EOFError: - self.db = dict() - self.var_tok = rerun_db_var.set(self.db) - return self - - def __exit__(self, ty, exc, tb): - rerun_db_var.reset(self.var_tok) - if exc is None: - self.db_file.seek(0) - self.db_file.truncate(0) - pickle.dump(self.db, self.db_file) - self.db_file.__exit__(ty, exc, tb) diff --git a/pyproject.toml b/pyproject.toml @@ -0,0 +1,10 @@ +[project] +name = "git-grace-moe" +version = "0.1.0" +requires-python = ">=3.13" +dependencies = [ + "make", +] + +[tool.uv.sources] +make = { path = "../../src/pymake" } diff --git a/uv.lock b/uv.lock @@ -0,0 +1,19 @@ +version = 1 +revision = 2 +requires-python = ">=3.13" + +[[package]] +name = "git-grace-moe" +version = "0.1.0" +source = { virtual = "." } +dependencies = [ + { name = "make" }, +] + +[package.metadata] +requires-dist = [{ name = "make", directory = "../../src/pymake" }] + +[[package]] +name = "make" +version = "0.0.1" +source = { directory = "../../src/pymake" }