This commit is contained in:
Peder Bergebakken Sundt 2023-10-03 19:01:10 +02:00
parent 5132464026
commit 552dbef0b4
2 changed files with 34 additions and 24 deletions

View File

@ -11,17 +11,20 @@
nixpkgs, nixpkgs,
... } @ inputs: ... } @ inputs:
let let
systems = [ forSystems = systems: f: nixpkgs.lib.genAttrs systems (system: f {
"x86_64-linux"
"aarch64-linux"
];
forAllSystems = f: nixpkgs.lib.genAttrs systems (system: f {
inherit system; inherit system;
pkgs = nixpkgs.legacyPackages.${system}; pkgs = nixpkgs.legacyPackages.${system};
lib = nixpkgs.legacyPackages.${system}.lib; lib = nixpkgs.legacyPackages.${system}.lib;
}); });
forAllSystems = forSystems [
"x86_64-linux"
"aarch64-linux"
];
in { in {
inherit inputs; inherit inputs;
packages = forAllSystems ({pkgs, ...}: {
});
devShells = forAllSystems ({pkgs, ...}: { devShells = forAllSystems ({pkgs, ...}: {
default = pkgs.mkShell { default = pkgs.mkShell {
packages = with pkgs; [ packages = with pkgs; [
@ -35,7 +38,8 @@
httpx httpx
rich rich
typer typer
dataset #dataset
diskcache
python-lsp-server python-lsp-server
])) ]))

42
main.py
View File

@ -3,8 +3,9 @@ from concurrent.futures import ThreadPoolExecutor, as_completed
from enum import Enum from enum import Enum
from pathlib import Path from pathlib import Path
from typing import Literal, TypedDict from typing import Literal, TypedDict
from functool import lru_cache from functools import lru_cache
import dataset import diskcache
#import dataset
import httpx import httpx
import json import json
import os import os
@ -18,9 +19,7 @@ import typer
HERE = Path(__file__).parent.resolve() HERE = Path(__file__).parent.resolve()
#CACHE = dataset.connect(f"sqlite:///{HERE}") persistent_cache = diskcache.FanoutCache(Path(__file__).parent / ".cache")
CACHE = dataset.connect("sqlite:///ppm-cache.db")
CACHE.create_table("packages", primary_id="name")
def run(cmd: list[str] | str, **kw) -> subprocess.CompletedProcess: def run(cmd: list[str] | str, **kw) -> subprocess.CompletedProcess:
if isinstance(cmd, str): if isinstance(cmd, str):
@ -67,8 +66,12 @@ class Package(TypedDict):
class PackageDetailed(Package): class PackageDetailed(Package):
versions : dict[str, PackageMeta] versions : dict[str, PackageMeta]
@persistent_cache.memoize()
def api_get(path, kw={}, check=True, **params) -> httpx.Response: def api_get(path, kw={}, check=True, **params) -> httpx.Response:
resp = httpx.get(f"{API}/{path}", params=params, timeout=30, **kw) url = f"{API}/{path}"
print(f"GET {url!r}...", file=sys.stderr)
resp = httpx.get(url, params=params, timeout=30, **kw)
print(f"GET {url!r}, {resp.is_success = }", file=sys.stderr)
if check: resp.raise_for_status() if check: resp.raise_for_status()
return resp return resp
@ -122,7 +125,7 @@ def mk_derivation(
package : Package, package : Package,
version : None | str = None, version : None | str = None,
format : bool = True, format : bool = True,
dir : Path = HERE / "pkgs", workdir : Path = HERE / "pkgs",
call_package : bool = True, call_package : bool = True,
) -> str: ) -> str:
#url = get_tarball(package.name, package["metadata"]["version"], url_only=True) #url = get_tarball(package.name, package["metadata"]["version"], url_only=True)
@ -163,8 +166,8 @@ def mk_derivation(
is_yarn = True is_yarn = True
else: else:
(dir / "lockfiles").mkdir(parents=True, exist_ok=True) (workdir / "lockfiles").mkdir(parents=True, exist_ok=True)
lock_path = dir / "lockfiles" / f"{name}.json" lock_path = workdir / "lockfiles" / f"{name}-{version}.json"
# TODO: somehow sandbox this # TODO: somehow sandbox this
if not lock_path.is_file(): if not lock_path.is_file():
@ -174,7 +177,7 @@ def mk_derivation(
run(["npm", "install", "--package-lock-only"], cwd=tmp, stdout=None) run(["npm", "install", "--package-lock-only"], cwd=tmp, stdout=None)
shutil.move(f"{tmp}/package-lock.json", lock_path) shutil.move(f"{tmp}/package-lock.json", lock_path)
extra.append(f'postPatch = "ln -s ${{./lockfiles/{name}.json}} ./package-lock.json";') extra.append(f'postPatch = "ln -s ${{./lockfiles/{name}-{version}.json}} ./package-lock.json";')
#assert not is_yarn #assert not is_yarn
if is_yarn: if is_yarn:
@ -196,7 +199,7 @@ def mk_derivation(
{' '.join(extra)} {' '.join(extra)}
nativeBuildInputs = [ python3 ]; # node-gyp nativeBuildInputs = [ python3 ]; # node-gyp
npmFlags = [ "--legacy-peer-deps" ]; npmFlags = [ "--legacy-peer-deps" ];
ELECTRON_SKIP_BINARY_DOWNLOAD = "1"; # ELECTRON_SKIP_BINARY_DOWNLOAD = "1";
NODE_OPTIONS = "--no-experimental-fetch"; # https://github.com/parcel-bundler/parcel/issues/8005 NODE_OPTIONS = "--no-experimental-fetch"; # https://github.com/parcel-bundler/parcel/issues/8005
meta = {{ meta = {{
homepage = "https://web.pulsar-edit.dev/packages/{name}"; homepage = "https://web.pulsar-edit.dev/packages/{name}";
@ -264,21 +267,24 @@ def search(
@app.command() @app.command()
def crawl(pages: int = 10, j: int = 1): def crawl(pages: int = 10, j: int = 1):
# TODO: have it populate a cache
# TODO: make the getters use the cache
raise NotImplementedError
with ThreadPoolExecutor(max_workers=j or None) as e: with ThreadPoolExecutor(max_workers=j or None) as e:
futures = [e.submit(search_packages, page=page) for page in range(pages)] futures = []
#futures += [e.submit(search_themes, page=page) for page in range(pages)] for page in range(pages):
@futures.append
@e.submit
def future(page=page):
return search_packages(page=page)
#return search_themes(page=page)
for future in as_completed(futures): for future in as_completed(futures):
for package in future.result(): for package in future.result():
print(package.name) print(package["name"], file=sys.stderr)
print(json.dumps(package)) print(json.dumps(package))
@app.command() @app.command()
def drv(name: str, version: str | None = None, dir: Path = HERE / "pkgs"): def drv(name: str, version: str | None = None, dir: Path = HERE / "pkgs"):
package = get_package(name) package = get_package(name)
expr = mk_derivation(package, dir=dir) expr = mk_derivation(package, workdir=dir)
print(expr) print(expr)
try: try:
out_path = run([ out_path = run([