Initial commit
This commit is contained in:
commit
5132464026
|
@ -0,0 +1,8 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
use flake
|
||||||
|
if command -v gh >/dev/null && gh auth token >/dev/null; then
|
||||||
|
export NIX_CONFIG="access-tokens = github.com=$(gh auth token)"
|
||||||
|
else
|
||||||
|
>&2 echo "WARNING: You have no github token configured."
|
||||||
|
>&2 echo "WARNING: consider running 'gh auth login' then 'direnv reload'"
|
||||||
|
fi
|
|
@ -0,0 +1,3 @@
|
||||||
|
.direnv
|
||||||
|
result
|
||||||
|
result-*
|
|
@ -0,0 +1,27 @@
|
||||||
|
{
|
||||||
|
"nodes": {
|
||||||
|
"nixpkgs": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1673754118,
|
||||||
|
"narHash": "sha256-eJDQJ/adcyEtnocOCbFQPYa53z/axl84SycVheNTkU8=",
|
||||||
|
"owner": "winterqt",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"rev": "b889893300525688c08ff43446f823bc032e798c",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "winterqt",
|
||||||
|
"ref": "build-yarn-package",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": {
|
||||||
|
"inputs": {
|
||||||
|
"nixpkgs": "nixpkgs"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": "root",
|
||||||
|
"version": 7
|
||||||
|
}
|
|
@ -0,0 +1,49 @@
|
||||||
|
{
|
||||||
|
description = "Pulsar Editor packages";
|
||||||
|
|
||||||
|
# https://github.com/NixOS/nixpkgs/pull/210814
|
||||||
|
inputs.nixpkgs.url = "github:winterqt/nixpkgs/build-yarn-package";
|
||||||
|
|
||||||
|
# TODO: consider https://github.com/serokell/nix-npm-buildpackage
|
||||||
|
|
||||||
|
outputs = {
|
||||||
|
self,
|
||||||
|
nixpkgs,
|
||||||
|
... } @ inputs:
|
||||||
|
let
|
||||||
|
systems = [
|
||||||
|
"x86_64-linux"
|
||||||
|
"aarch64-linux"
|
||||||
|
];
|
||||||
|
forAllSystems = f: nixpkgs.lib.genAttrs systems (system: f {
|
||||||
|
inherit system;
|
||||||
|
pkgs = nixpkgs.legacyPackages.${system};
|
||||||
|
lib = nixpkgs.legacyPackages.${system}.lib;
|
||||||
|
});
|
||||||
|
in {
|
||||||
|
inherit inputs;
|
||||||
|
devShells = forAllSystems ({pkgs, ...}: {
|
||||||
|
default = pkgs.mkShell {
|
||||||
|
packages = with pkgs; [
|
||||||
|
nurl
|
||||||
|
prefetch-npm-deps
|
||||||
|
prefetch-yarn-deps
|
||||||
|
nodejs
|
||||||
|
#nix-prefetch
|
||||||
|
|
||||||
|
(python3.withPackages (ps: with ps; [
|
||||||
|
httpx
|
||||||
|
rich
|
||||||
|
typer
|
||||||
|
dataset
|
||||||
|
python-lsp-server
|
||||||
|
]))
|
||||||
|
|
||||||
|
#alejandra
|
||||||
|
nixfmt
|
||||||
|
];
|
||||||
|
NIX_PATH = "nixpkgs=${nixpkgs.outPath}";
|
||||||
|
};
|
||||||
|
});
|
||||||
|
};
|
||||||
|
}
|
|
@ -0,0 +1,314 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||||
|
from enum import Enum
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Literal, TypedDict
|
||||||
|
from functool import lru_cache
|
||||||
|
import dataset
|
||||||
|
import httpx
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import rich
|
||||||
|
import shlex
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import tempfile
|
||||||
|
import typer
|
||||||
|
|
||||||
|
HERE = Path(__file__).parent.resolve()
|
||||||
|
|
||||||
|
#CACHE = dataset.connect(f"sqlite:///{HERE}")
|
||||||
|
CACHE = dataset.connect("sqlite:///ppm-cache.db")
|
||||||
|
CACHE.create_table("packages", primary_id="name")
|
||||||
|
|
||||||
|
def run(cmd: list[str] | str, **kw) -> subprocess.CompletedProcess:
|
||||||
|
if isinstance(cmd, str):
|
||||||
|
cmd = shlex.split(cmd)
|
||||||
|
if __debug__:
|
||||||
|
print("+", *(shlex.quote(str(i)) for i in cmd), file=sys.stderr)
|
||||||
|
kw = dict(check=True, stdout=subprocess.PIPE, text=True) | kw
|
||||||
|
return subprocess.run(list(map(str, cmd)), **kw)
|
||||||
|
def run_nix(nix: str, *a):
|
||||||
|
return run(["nix", "eval", *a, "--expr", nix, "--raw"]).stdout
|
||||||
|
def run_nixpkgs(nix: str):
|
||||||
|
return run_nix(f"with (import <nixpkgs> {{}}); ({nix})", "--impure")
|
||||||
|
|
||||||
|
# === API ===
|
||||||
|
|
||||||
|
# https://api.pulsar-edit.dev/swagger-ui/
|
||||||
|
API = "https://api.pulsar-edit.dev/api"
|
||||||
|
|
||||||
|
class PackageMeta(TypedDict):
|
||||||
|
name : str
|
||||||
|
main : str
|
||||||
|
description : str | None
|
||||||
|
repository : str
|
||||||
|
keywords : list[str]
|
||||||
|
license : None | str
|
||||||
|
version : str
|
||||||
|
#engines : dict[str, str]
|
||||||
|
#theme : None | Literal["syntax", "ui"]
|
||||||
|
#...
|
||||||
|
|
||||||
|
class PackageRepository(TypedDict):
|
||||||
|
type : Literal["git"]
|
||||||
|
url : str
|
||||||
|
|
||||||
|
class Package(TypedDict):
|
||||||
|
name : str
|
||||||
|
readme : str
|
||||||
|
metadata : PackageMeta
|
||||||
|
repository : PackageRepository
|
||||||
|
downloads : None | int # in reality, often str. Thanks node!
|
||||||
|
stargazers_count : None | int # in reality, often str. Thanks node!
|
||||||
|
releases : None | dict[str, str] # commonly {"latest": "1.2.3"}
|
||||||
|
|
||||||
|
class PackageDetailed(Package):
|
||||||
|
versions : dict[str, PackageMeta]
|
||||||
|
|
||||||
|
def api_get(path, kw={}, check=True, **params) -> httpx.Response:
|
||||||
|
resp = httpx.get(f"{API}/{path}", params=params, timeout=30, **kw)
|
||||||
|
if check: resp.raise_for_status()
|
||||||
|
return resp
|
||||||
|
|
||||||
|
Sorting = Literal["downloads", "created_at", "updated_at", "stars"]
|
||||||
|
Direction = Literal["desc", "asc"]
|
||||||
|
|
||||||
|
def get_featured_packages() -> list[Package]:
|
||||||
|
return api_get("packages/featured").json()
|
||||||
|
|
||||||
|
def get_featured_themes() -> list[Package]:
|
||||||
|
return api_get("themes/featured").json()
|
||||||
|
|
||||||
|
def search_packages(query: str="", page: int = 1, sort: Sorting = "updated_at", direction: Direction = "desc") -> list[Package]:
|
||||||
|
return api_get("packages/search", q=query, page=page, sort=sort).json()
|
||||||
|
|
||||||
|
def search_themes(query: str="", page: int = 1, sort: Sorting = "updated_at", direction: Direction = "desc") -> list[Package]:
|
||||||
|
return api_get("themes/search", q=query, page=page, sort=sort).json()
|
||||||
|
|
||||||
|
def get_package(name: str) -> PackageDetailed:
|
||||||
|
return api_get(f"packages/{name}").json()
|
||||||
|
|
||||||
|
def get_tarball(name: str, version: str = None, url_only=False) -> httpx.Response | httpx.URL:
|
||||||
|
if version is None:
|
||||||
|
version = get(name)["metadata"]["version"]
|
||||||
|
if url_only:
|
||||||
|
return api_get(f"packages/{name}/versions/{version}/tarball", check=False).next_request.url
|
||||||
|
else:
|
||||||
|
return api_get(f"packages/{name}/versions/{version}/tarball", kw=dict(follow_redirects=True))
|
||||||
|
|
||||||
|
|
||||||
|
# === NIX ===
|
||||||
|
|
||||||
|
@lru_cache
|
||||||
|
def spdx2nixpkgs_license(spdx: str) -> str:
|
||||||
|
# TODO: instead of shelling out each time, perhaps dump the map to json once?
|
||||||
|
license = run_nixpkgs(f"""
|
||||||
|
(lib.mapAttrs'
|
||||||
|
(key: val: {{
|
||||||
|
name = val.spdxId or "unknown";
|
||||||
|
value = key;
|
||||||
|
}})
|
||||||
|
lib.licenses
|
||||||
|
)."{spdx}"
|
||||||
|
""")
|
||||||
|
if license is None:
|
||||||
|
return f'license.spdxId = "{spdx}";'
|
||||||
|
else:
|
||||||
|
return f"license = lib.licenses.{license};"
|
||||||
|
|
||||||
|
def mk_derivation(
|
||||||
|
package : Package,
|
||||||
|
version : None | str = None,
|
||||||
|
format : bool = True,
|
||||||
|
dir : Path = HERE / "pkgs",
|
||||||
|
call_package : bool = True,
|
||||||
|
) -> str:
|
||||||
|
#url = get_tarball(package.name, package["metadata"]["version"], url_only=True)
|
||||||
|
#m = re.search(r'^https://api.github.com/repos/(.*)/tarball/refs/tags/(.*)$', str(url))
|
||||||
|
#url, version = f"https://github.com/{m.group(1)}", m.group(2)
|
||||||
|
assert package["repository"]["type"] == "git", package["repository"]
|
||||||
|
name = package["name"]
|
||||||
|
desc = package["metadata"].get("description", "").split(". ", 1)[0].strip().removesuffix(".")
|
||||||
|
url = package["repository"]["url"]
|
||||||
|
license = package["metadata"].get("license", "") # https://spdx.org/licenses/
|
||||||
|
if version is None:
|
||||||
|
version = package["releases"]["latest"]
|
||||||
|
else:
|
||||||
|
if not version in package["versions"]:
|
||||||
|
raise ValueError("Version not found!")
|
||||||
|
|
||||||
|
# TODO: cache this
|
||||||
|
src = run([
|
||||||
|
"nurl",
|
||||||
|
url, f"v{version}", # we assume apm/ppm enforces a "v" version prefix
|
||||||
|
]).stdout
|
||||||
|
src_path = run(["nix-build", "--expr", "with import <nixpkgs> {}; " + src, "--no-out-link"]).stdout.strip()
|
||||||
|
print(src_path)
|
||||||
|
src_path = Path(src_path)
|
||||||
|
|
||||||
|
extra = []
|
||||||
|
|
||||||
|
with (src_path / "package.json").open() as f:
|
||||||
|
if "build" not in json.load(f).get("scripts", {}):
|
||||||
|
extra.append("dontNpmBuild = true;")
|
||||||
|
|
||||||
|
is_yarn = False
|
||||||
|
if (src_path / "package-lock.json").is_file():
|
||||||
|
lock_path = src_path / "package-lock.json"
|
||||||
|
|
||||||
|
elif (src_path / "yarn.lock").is_file():
|
||||||
|
lock_path = src_path / "yarn.lock"
|
||||||
|
is_yarn = True
|
||||||
|
|
||||||
|
else:
|
||||||
|
(dir / "lockfiles").mkdir(parents=True, exist_ok=True)
|
||||||
|
lock_path = dir / "lockfiles" / f"{name}.json"
|
||||||
|
|
||||||
|
# TODO: somehow sandbox this
|
||||||
|
if not lock_path.is_file():
|
||||||
|
with tempfile.TemporaryDirectory() as tmp:
|
||||||
|
#run(["npm", "install", "--package-lock-only", src_path], cwd=tmp, stdout=None) # doesn't work
|
||||||
|
(Path(tmp) / "package.json").symlink_to(src_path / "package.json")
|
||||||
|
run(["npm", "install", "--package-lock-only"], cwd=tmp, stdout=None)
|
||||||
|
shutil.move(f"{tmp}/package-lock.json", lock_path)
|
||||||
|
|
||||||
|
extra.append(f'postPatch = "ln -s ${{./lockfiles/{name}.json}} ./package-lock.json";')
|
||||||
|
|
||||||
|
#assert not is_yarn
|
||||||
|
if is_yarn:
|
||||||
|
builder = 'buildYarnPackage'
|
||||||
|
deps_hash = run(["prefetch-yarn-deps", lock_path]).stdout.strip()
|
||||||
|
deps_hash = run(["nix", "hash", "to-sri", "--type", "sha256", deps_hash]).stdout.strip()
|
||||||
|
extra.append(f'yarnDepsHash = "{deps_hash}";')
|
||||||
|
else:
|
||||||
|
builder = 'buildNpmPackage'
|
||||||
|
deps_hash = run(["prefetch-npm-deps", lock_path]).stdout.strip()
|
||||||
|
extra.append(f'npmDepsHash = "{deps_hash}";')
|
||||||
|
print(deps_hash)
|
||||||
|
|
||||||
|
expr = f"""
|
||||||
|
{builder} rec {{
|
||||||
|
pname = "pulsar-{name}";
|
||||||
|
version = "{version}";
|
||||||
|
src = {src.replace(version, "${version}")};
|
||||||
|
{' '.join(extra)}
|
||||||
|
nativeBuildInputs = [ python3 ]; # node-gyp
|
||||||
|
npmFlags = [ "--legacy-peer-deps" ];
|
||||||
|
ELECTRON_SKIP_BINARY_DOWNLOAD = "1"; #
|
||||||
|
NODE_OPTIONS = "--no-experimental-fetch"; # https://github.com/parcel-bundler/parcel/issues/8005
|
||||||
|
meta = {{
|
||||||
|
homepage = "https://web.pulsar-edit.dev/packages/{name}";
|
||||||
|
description = "{desc}";
|
||||||
|
{spdx2nixpkgs_license(license)}
|
||||||
|
maintainers = with lib.maintainers; [ pbsds ];
|
||||||
|
}};
|
||||||
|
}}
|
||||||
|
"""
|
||||||
|
|
||||||
|
if call_package:
|
||||||
|
expr = f"{{ lib, {builder}, fetchFromGitHub, python3 }}: {expr}"
|
||||||
|
|
||||||
|
if format:
|
||||||
|
#return run(["alejandra", "-"], input=expr).stdout
|
||||||
|
return run(["nixfmt"], input=expr).stdout
|
||||||
|
else:
|
||||||
|
return expr
|
||||||
|
|
||||||
|
# === CLI ===
|
||||||
|
|
||||||
|
app = typer.Typer(no_args_is_help=True)
|
||||||
|
|
||||||
|
@app.command()
|
||||||
|
def show(name: str):
|
||||||
|
rich.print_json(json.dumps(
|
||||||
|
get_package(name)
|
||||||
|
))
|
||||||
|
|
||||||
|
@app.command()
|
||||||
|
def featured(
|
||||||
|
packages: bool = False,
|
||||||
|
themes: bool = False,
|
||||||
|
):
|
||||||
|
if not (themes or packages):
|
||||||
|
rich.print("ERROR: neither --themes or --packages was chosen.")
|
||||||
|
raise typer.Exit(1)
|
||||||
|
|
||||||
|
if packages:
|
||||||
|
rich.print_json(json.dumps( get_featured_packages() ))
|
||||||
|
if themes:
|
||||||
|
rich.print_json(json.dumps( get_featured_themes() ))
|
||||||
|
|
||||||
|
@app.command()
|
||||||
|
def search(
|
||||||
|
query: str,
|
||||||
|
page: int = 1,
|
||||||
|
sort: Enum("Sorting", dict(zip(*(Sorting.__args__,)*2))) = "downloads",
|
||||||
|
dir: Enum("Direction", dict(zip(*(Direction.__args__,)*2))) = "desc",
|
||||||
|
packages: bool = False,
|
||||||
|
themes: bool = False,
|
||||||
|
):
|
||||||
|
if not (themes or packages):
|
||||||
|
rich.print("ERROR: neither --themes or --packages was chosen.")
|
||||||
|
raise typer.Exit(1)
|
||||||
|
|
||||||
|
if packages:
|
||||||
|
rich.print_json(json.dumps(
|
||||||
|
search_packages(query, page=page, sort=sort, direction=dir)
|
||||||
|
))
|
||||||
|
if themes:
|
||||||
|
rich.print_json(json.dumps(
|
||||||
|
search_themes(query, page=page, sort=sort, direction=dir)
|
||||||
|
))
|
||||||
|
|
||||||
|
@app.command()
|
||||||
|
def crawl(pages: int = 10, j: int = 1):
|
||||||
|
# TODO: have it populate a cache
|
||||||
|
# TODO: make the getters use the cache
|
||||||
|
raise NotImplementedError
|
||||||
|
with ThreadPoolExecutor(max_workers=j or None) as e:
|
||||||
|
futures = [e.submit(search_packages, page=page) for page in range(pages)]
|
||||||
|
#futures += [e.submit(search_themes, page=page) for page in range(pages)]
|
||||||
|
for future in as_completed(futures):
|
||||||
|
for package in future.result():
|
||||||
|
print(package.name)
|
||||||
|
print(json.dumps(package))
|
||||||
|
|
||||||
|
@app.command()
|
||||||
|
def drv(name: str, version: str | None = None, dir: Path = HERE / "pkgs"):
|
||||||
|
package = get_package(name)
|
||||||
|
expr = mk_derivation(package, dir=dir)
|
||||||
|
print(expr)
|
||||||
|
try:
|
||||||
|
out_path = run([
|
||||||
|
"nix-build", "--expr",
|
||||||
|
f"(import <nixpkgs> {{}}).callPackage (\n{expr.strip()}\n) {{}}",
|
||||||
|
"--no-out-link",
|
||||||
|
], cwd=dir).stdout.strip()
|
||||||
|
print(out_path)
|
||||||
|
except Exception:
|
||||||
|
raise typer.Exit(1)
|
||||||
|
|
||||||
|
dir.mkdir(exist_ok=True, parents=True)
|
||||||
|
with (dir / f"{name}.nix").open("w") as f:
|
||||||
|
f.write(expr)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
app()
|
||||||
|
|
||||||
|
|
||||||
|
# TODOs:
|
||||||
|
# * GitHub rate limit
|
||||||
|
# * fix lots of common build errors
|
||||||
|
# * [x] electron download
|
||||||
|
# * [ ] only if required
|
||||||
|
# * [x] node-gyp requires python3
|
||||||
|
# * [ ] only if required
|
||||||
|
# * [x] generate missing package-lock.json
|
||||||
|
# * [x] yarn.lock
|
||||||
|
# * [x] meta.license
|
||||||
|
# * [ ] determine if tag has v prefix
|
||||||
|
# * [ ] OSError: file not found
|
||||||
|
# * use npm vendored in ppm
|
||||||
|
# * test in pulsar with nixos vm?
|
Loading…
Reference in New Issue