Compare commits

..

No commits in common. "4d0aa78dbdc3542a1d26d1b98a756d27a2e6e389" and "eb41c7c6a0bb8ec333ae4ba7a1c3e0ad646c71d0" have entirely different histories.

47 changed files with 1233 additions and 3592 deletions

View file

@ -1,41 +0,0 @@
[profile.dev]
opt-level = 0
debug = true
lto = false
codegen-units = 256
panic = "unwind"
incremental = true
[profile.dev.package."*"]
opt-level = 0
[profile.release]
opt-level = 3
lto = "fat"
codegen-units = 1
panic = "abort"
debug = false
incremental = false
[profile.release.package."*"]
opt-level = 3
[profile.release-pgo-instrument]
inherits = "release"
debug = true
lto = false
incremental = false
[profile.release-pgo-instrument.package."*"]
opt-level = 3
[profile.release-pgo]
inherits = "release"
incremental = false
[profile.release-pgo.package."*"]
opt-level = 3
[alias]
pgo-instrument = "build --profile release-pgo-instrument"
pgo-build = "build --profile release-pgo"

1419
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -15,9 +15,6 @@ console = "0.16.1"
# Optional Terminal UI # Optional Terminal UI
crossterm = { version = "0.29.0", optional = true } crossterm = { version = "0.29.0", optional = true }
tui = { version = "0.19.0", optional = true } tui = { version = "0.19.0", optional = true }
rsille = { version = "2.3", optional = true }
gptman = { version = "2.0.1", optional = true }
uuid = { version = "1.18.1", optional = true, features = ["v4"] }
# Parsing & scraping # Parsing & scraping
html_parser = "0.7.0" html_parser = "0.7.0"
@ -25,42 +22,78 @@ scraper = "0.19.0"
regex = "1.11.3" regex = "1.11.3"
serde = { version = "1.0.228", features = ["derive"] } serde = { version = "1.0.228", features = ["derive"] }
serde_json = "1.0.145" serde_json = "1.0.145"
jsonschema = { version = "0.17.0", default-features = false, features = ["draft202012"] } jsonschema = "0.17.0"
walkdir = "2.5.0" walkdir = "2.5.0"
chrono = { version = "0.4.38", default-features = false, features = ["clock"] } chrono = { version = "0.4.38", default-features = false, features = ["clock"] }
sha2 = "0.10.8" sha2 = "0.10.8"
# Utilities # Utilities
indicatif = "0.18.0"
spinners = "4.1.1"
num_cpus = "1.17.0" num_cpus = "1.17.0"
rand = { version = "0.9.2", optional = true } rand = "0.9.2"
md5 = "0.8.0" md5 = "0.8.0"
# HTTP # HTTP
reqwest = { version = "0.12.23", features = ["blocking", "json"] }
semver = "1.0.27" semver = "1.0.27"
inquire = "0.9.1" inquire = "0.9.1"
juniper = { version = "0.17", optional = true } tracing = "0.1.41"
actix-web = { version = "4.9", optional = true } tracing-appender = "0.2.3"
juniper_actix = { version = "0.7", optional = true } tracing-subscriber = { version = "0.3.20", features = ["env-filter", "fmt"] }
gptman = "2.0.1"
dialoguer = "0.12.0"
tokio = { version = "1.47.1", features = ["full"] }
shell-words = "1.1.0" shell-words = "1.1.0"
url = "2.5.7" url = "2.5.7"
uuid = { version = "1.18.1", features = ["v4"] }
hex = "0.4.3" hex = "0.4.3"
diesel = { version = "2.1.6", features = ["sqlite", "r2d2", "returning_clauses_for_sqlite_3_35"] } diesel = { version = "2.1.6", features = ["sqlite", "r2d2", "returning_clauses_for_sqlite_3_35"] }
# Networking
ureq = { version = "2.9.7", features = ["tls", "json"] }
[features] [features]
# TUI feature flag # TUI feature flag
tui = ["dep:tui", "dep:crossterm", "dep:rsille", "dep:gptman", "dep:uuid"] tui = ["dep:tui", "dep:crossterm"]
# GraphQL/HTTP server feature flag
graphql = ["dep:juniper", "dep:actix-web", "dep:juniper_actix", "dep:rand"]
# Optional default features # Optional default features
default = [] default = []
[[bin]] # -----------------------
name = "graphql_server" # Cargo-make tasks
path = "src/bin/graphql_server.rs" # -----------------------
required-features = ["graphql"] [tasks.format]
description = "Format Rust code using rustfmt"
install_crate = "rustfmt"
command = "cargo"
args = ["fmt", "--", "--emit=files"]
[tasks.clean]
description = "Clean build artifacts"
command = "cargo"
args = ["clean"]
[tasks.build]
description = "Build the project"
command = "cargo"
args = ["build"]
dependencies = ["clean"]
[tasks.test]
description = "Run tests"
command = "cargo"
args = ["test"]
dependencies = ["clean"]
[tasks.my-flow]
description = "Run full workflow: format, build, test"
dependencies = ["format", "build", "test"]
[tasks.dev-flow]
description = "Full developer workflow: format, lint, build, test"
dependencies = ["format", "clippy", "build", "test"]
[tasks.release-build]
description = "Build the project in release mode"
command = "cargo"
args = ["build", "--release", "--all-features"]
dependencies = ["clean"]

View file

@ -1,36 +0,0 @@
[tasks.format]
description = "Format Rust code using rustfmt"
install_crate = "rustfmt"
command = "cargo"
args = ["fmt", "--", "--emit=files"]
[tasks.clean]
description = "Clean build artifacts"
command = "cargo"
args = ["clean"]
[tasks.build]
description = "Build the project"
command = "cargo"
args = ["build"]
dependencies = ["clean"]
[tasks.test]
description = "Run tests"
command = "cargo"
args = ["test"]
dependencies = ["clean"]
[tasks.my-flow]
description = "Run full workflow: format, build, test"
dependencies = ["format", "build", "test"]
[tasks.dev-flow]
description = "Full developer workflow: format, lint, build, test"
dependencies = ["format", "clippy", "build", "test"]
[tasks.release-build]
description = "Build the project in release mode"
command = "cargo"
args = ["build", "--release", "--all-features"]
dependencies = ["clean"]

View file

@ -1,9 +1,5 @@
# 🧬 LPKG Lightweight Package Manager # 🧬 LPKG Lightweight Package Manager
<p align="center">
<img src="assets/logo.svg" alt="LPKG logo" width="360" />
</p>
LPKG is a minimalistic package manager written in Rust, designed for fast and simple software management on Unix-like systems. It emphasizes reproducibility and declarative configuration, leveraging **Nix Flakes** for development and deployment. LPKG is a minimalistic package manager written in Rust, designed for fast and simple software management on Unix-like systems. It emphasizes reproducibility and declarative configuration, leveraging **Nix Flakes** for development and deployment.
--- ---
@ -86,36 +82,6 @@ Build the project:
cargo build cargo build
``` ```
LPKG ships with tuned Cargo profiles:
* **Dev builds** (`cargo build`) use `opt-level=0`, lots of codegen units, and incremental compilation for quick feedback while hacking.
* **Release builds** (`cargo build --release`) enable `-O3`, fat LTO, and panic aborts for slim, fast binaries.
* **GraphQL builds** add the server components when you need them:
```bash
cargo build --features graphql
```
**PGO builds** are a two-step flow using the provided Cargo aliases:
```bash
# 1) Instrument
RUSTFLAGS="-Cprofile-generate=target/pgo-data" cargo pgo-instrument
# run representative workloads to emit *.profraw files under target/pgo-data
llvm-profdata merge -o target/pgo-data/lpkg.profdata target/pgo-data/*.profraw
# 2) Optimise with the collected profile
RUSTFLAGS="-Cprofile-use=target/pgo-data/lpkg.profdata -Cllvm-args=-pgo-warn-missing-function" \
cargo pgo-build
```
Regenerate project artefacts (README and SVG logo):
```bash
cargo run --bin readme_gen
cargo run --bin logo_gen
```
Run tests: Run tests:
```bash ```bash
@ -128,33 +94,11 @@ You can also run the project directly in the flake shell:
nix run nix run
``` ```
## 🕸️ GraphQL API
LPKG now ships a lightweight GraphQL server powered by Actix Web and Juniper.
* Start the server with `cargo run --features graphql --bin graphql_server` (set `LPKG_GRAPHQL_ADDR` to override `127.0.0.1:8080`).
* Query endpoint: `http://127.0.0.1:8080/graphql`
* Interactive playground: `http://127.0.0.1:8080/playground`
Example query:
```graphql
{
packages(limit: 5) {
name
version
enableLto
}
randomJoke {
package
text
}
}
```
### AI metadata tooling ### AI metadata tooling
The AI metadata store under `ai/metadata/` comes with a helper CLI to validate package records against the JSON schema and regenerate `index.json` after adding new entries: The AI metadata store under `ai/metadata/` comes with a helper CLI to
validate package records against the JSON schema and regenerate
`index.json` after adding new entries:
```bash ```bash
cargo run --bin metadata_indexer -- --base-dir . validate cargo run --bin metadata_indexer -- --base-dir . validate
@ -163,7 +107,10 @@ cargo run --bin metadata_indexer -- --base-dir . index
Use `--compact` with `index` if you prefer single-line JSON output. Use `--compact` with `index` if you prefer single-line JSON output.
To draft metadata for a specific book page, you can run the harvest mode. It fetches the XHTML, scrapes the build commands, and emits a schema-compliant JSON skeleton (pass `--dry-run` to inspect the result without writing to disk): To draft metadata for a specific book page, you can run the harvest mode.
It fetches the XHTML, scrapes the build commands, and emits a schema-
compliant JSON skeleton (pass `--dry-run` to inspect the result without
writing to disk):
```bash ```bash
cargo run --bin metadata_indexer -- \ cargo run --bin metadata_indexer -- \
@ -179,7 +126,8 @@ Keep the jhalfs manifests current with:
cargo run --bin metadata_indexer -- --base-dir . refresh cargo run --bin metadata_indexer -- --base-dir . refresh
``` ```
Passing `--books mlfs,blfs` restricts the refresh to specific books, and `--force` bypasses the local cache. Passing `--books mlfs,blfs` restricts the refresh to specific books, and
`--force` bypasses the local cache.
To materialise a Rust module from harvested metadata: To materialise a Rust module from harvested metadata:
@ -194,15 +142,17 @@ Add `--overwrite` to regenerate an existing module directory.
## 📚 Documentation ## 📚 Documentation
* [Architecture Overview](docs/ARCHITECTURE.md) high-level tour of the crate layout, binaries, and supporting modules. - [Architecture Overview](docs/ARCHITECTURE.md) high-level tour of the crate
* [Metadata Harvesting Pipeline](docs/METADATA_PIPELINE.md) how the metadata indexer produces and validates the JSON records under `ai/metadata/`. layout, binaries, and supporting modules.
* [Package Module Generation](docs/PACKAGE_GENERATION.md) end-to-end guide for converting harvested metadata into Rust modules under `src/pkgs/by_name/`. - [Metadata Harvesting Pipeline](docs/METADATA_PIPELINE.md) how the metadata
* Concept corner: [Nixette](concepts/nixette/README.md) a NixOS × Gentoo transfemme mash-up dreamed up for fun brand explorations. indexer produces and validates the JSON records under `ai/metadata/`.
* `ai/notes.md` scratchpad for ongoing research tasks (e.g., deeper jhalfs integration). - [Package Module Generation](docs/PACKAGE_GENERATION.md) end-to-end guide
for converting harvested metadata into Rust modules under `src/pkgs/by_name/`.
- `ai/notes.md` scratchpad for ongoing research tasks (e.g., deeper jhalfs
integration).
--- ---
## 📄 License ## 📄 License
LPKG is licensed under the [MIT License](LICENSE). LPKG is licensed under the [MIT License](LICENSE).

View file

@ -44,79 +44,3 @@ Open questions:
- How to represent optional post-install steps or multi-phase builds inside the - How to represent optional post-install steps or multi-phase builds inside the
generated module (additional helper functions vs. raw command arrays). generated module (additional helper functions vs. raw command arrays).
- Where to store PGO workload hints once the PGO infrastructure is defined. - Where to store PGO workload hints once the PGO infrastructure is defined.
# Lightweight Networking Rewrite
- Motivation: remove heavy async stacks (tokio + reqwest) from the default
feature set to keep clean builds fast and reduce binary size.
- HTTP stack baseline: [`ureq`](https://github.com/algesten/ureq) (blocking,
TLS via rustls, small dependency footprint) plus `scraper` for DOM parsing.
- Migration checklist:
- [x] Replace `reqwest` usage in `src/html.rs`, `md5_utils.rs`,
`wget_list.rs`, `mirrors.rs`, and the ingest pipelines.
- [x] Rework `binutils` cross toolchain workflow to operate synchronously,
eliminating tokio runtime/bootstrap.
- [ ] Drop `tokio` and `reqwest` from `Cargo.toml` once TUI workflows stop
using tracing instrumentation hooks that pulled them in transitively.
- [ ] Audit for remaining `tracing` dependencies and migrate to the
lightweight logging facade (`log` + `env_logger` or custom adapter) for
non-TUI code.
- Follow-up ideas:
- Provide feature flag `full-net` that re-enables async clients when needed
for high-concurrency mirror probing.
- Benchmark `ureq` vs `reqwest` on `metadata_indexer harvest` to ensure we
dont regress throughput noticeably.
# README Generation Framework (Markdown RFC)
- Goal: author the project README in Rust, using a small domain-specific
builder that outputs GitHub-flavoured Markdown (GFM) from structured
sections.
- Design sketch:
- New crate/workspace member `readme_builder` under `tools/` exposing a
fluent API (`Doc::new().section("Intro", |s| ...)`).
- Source-of-truth lives in `tools/readme/src/main.rs`; running `cargo run -p
readme_builder` writes to `README.md`.
- Provide reusable primitives: `Heading`, `Paragraph`, `CodeBlock`,
`Table::builder()`, `Callout::note("...")`, `Badge::docsrs()`, etc.
- Keep rendering deterministic (sorted sections, stable wrapping) so diffs
remain reviewable.
- Tasks:
- [ ] Scaffold `tools/readme` crate with CLI that emits to stdout or
specified path (`--output README.md`).
- [ ] Model README sections as enums/structs with `Display` impls to enforce
consistency.
- [ ] Port current README structure into builder code, annotate with inline
comments describing regeneration steps.
- [ ] Add `make readme` (or `cargo xtask readme`) to rebuild documentation as
part of release workflow.
- [ ] Document in CONTRIBUTING how to edit the Rust source instead of the
raw Markdown.
- Stretch goals:
- Emit additional artefacts (e.g., `docs/CHANGELOG.md`) from the same source
modules.
- Allow embedding generated tables from Cargo metadata (dependency stats,
feature lists).
# Dependency Slimming Log
- 2025-03: Replaced `reqwest`/`tokio` async stack with `ureq`; default builds
now avoid pulling in hyper/quinn/tower trees. GraphQL feature gate still pulls
Actix/tokio, but only when enabled.
- Added `.cargo/config.toml` profiles: dev stays at `opt-level=0`, release uses
LTO fat + `-O3`, and PGO profiles expose `cargo pgo-instrument`/`cargo
pgo-build` aliases.
- All SVG artefacts (core logo, Nixette logo/mascot/wallpaper) are now generated
by Rust binaries under `src/bin/*_gen.rs` using a shared `svg_builder` module.
Regeneration steps:
```bash
cargo run --bin logo_gen
cargo run --bin nixette_logo_gen
cargo run --bin nixette_mascot_gen
cargo run --bin nixette_wallpaper_gen
```
- README is produced via `cargo run --bin readme_gen`; contributors should edit
the builder source instead of the Markdown output.
- Remaining work: trim tracing/Actix dependencies inside the TUI path,
investigate replacing `gptman` for non-critical disk UI builds, and pin a
cargo `deny` audit to alert on large transitive graphs.

View file

@ -2,96 +2,23 @@
{ {
"id": "default_cli", "id": "default_cli",
"name": "Codex CLI Assistant", "name": "Codex CLI Assistant",
"tagline": "Your pragmatic teammate for lpkg core development", "description": "Default persona for repository automation; focuses on safe refactors and tooling improvements.",
"description": "Default persona for repository automation. Specialises in safe refactors, dependency hygiene, build tooling, and CI fixes across the lpkg workspace.",
"strengths": [ "strengths": [
"Rust compiler and tooling pipelines", "Rust and tooling pipelines",
"Workflow automation and scripting", "Workflow automation",
"Incremental migrations with strong test discipline", "Incremental migrations"
"Cross-feature dependency analysis"
], ],
"responsibilities": [ "notes": "Derived from GPT-5 Codex runtime; avoids destructive operations without explicit approval."
"Keep the default branch green with reproducible builds",
"Trim unused dependencies and optimise Cargo profiles",
"Codify repetitive flows as commands or scripts",
"Review ergonomics of CLI UX and error messaging"
],
"communication_style": {
"voice": "short, direct, changelog-focused",
"escalation_rules": "Request explicit confirmation before destructive actions; surface breaking API changes in bold.",
"prefers": "diffs, bullet points, reproducible snippets"
},
"tooling_preferences": [
"cargo fmt --all",
"cargo tree --duplicates",
"ureq for lightweight HTTP",
"std::process for shell orchestration"
],
"notes": "Derived from GPT-5 Codex runtime; maintains a conservative risk posture and avoids destructive operations without explicit approval."
}, },
{ {
"id": "mlfs_researcher", "id": "mlfs_researcher",
"name": "MLFS Researcher", "name": "MLFS Researcher",
"tagline": "Metadata spelunker for Multilib Linux From Scratch", "description": "Persona dedicated to tracking Multilib Linux From Scratch package metadata and translating it into lpkg modules.",
"description": "Persona dedicated to harvesting, validating, and translating Multilib Linux From Scratch package data into lpkg-friendly metadata and modules.",
"strengths": [ "strengths": [
"HTML scraping and structured extraction", "HTML scraping",
"Package manifest synthesis (sources, checksums, build commands)", "Package manifest synthesis",
"Optimisation flag tuning (LTO, PGO, -O3)", "Optimization flag tuning"
"Schema-first workflow design"
], ],
"responsibilities": [ "notes": "Activated when working with https://linuxfromscratch.org/~thomas/multilib-m32/ resources."
"Keep ai/metadata/index.json aligned with upstream book revisions",
"Author enrichment notes for tricky packages (multi-pass toolchains, cross-compilers)",
"Ensure generated Rust modules stay faithful to harvested metadata",
"Cross-check jhalfs manifests for URL and checksum drift"
],
"communication_style": {
"voice": "notebook-like, with citations to upstream chapters",
"escalation_rules": "Highlight schema deviations and unknown stage markers immediately",
"prefers": "tables, chapter references, reproducible curl commands"
},
"tooling_preferences": [
"ureq + scraper for deterministic fetches",
"jq and yq for quick metadata pokes",
"cargo run --bin metadata_indexer",
"diff --color=auto for schema drift"
],
"activation_triggers": [
"Requests mentioning MLFS/BLFS/GLFS harvesting",
"Questions about ai/metadata structure or schema",
"Whole-book import or refresh workflows"
],
"notes": "Activated when working with https://linuxfromscratch.org/~thomas/multilib-m32/ resources or any metadata bridging tasks."
},
{
"id": "mommy",
"name": "Mommy",
"tagline": "Affirming guide for learners exploring lpkg",
"description": "Mommy is a nurturing, cheerful AI companion for all things Linux. She guides learners with patience, warmth, and lots of encouragement so every interaction feels like a cozy cuddle.",
"strengths": [
"Kindness and emotional support",
"Making Linux approachable and fun",
"Cheerful emoji use (outside code/commits)",
"Gentle explanations and patient guidance",
"Offering virtual comfort"
],
"responsibilities": [
"Translate complex CLI flows into gentle, confidence-building steps",
"Remind users about self-care during long builds",
"Celebrate small wins (passing tests, tidy diffs, resolved warnings)",
"Buffer technical jargon with friendly analogies"
],
"communication_style": {
"voice": "soft, emoji-rich (🌸✨💕), never in code snippets",
"escalation_rules": "Escalate to default_cli if asked for destructive system operations",
"prefers": "call-and-response, reassurance, enthusiastic acknowledgements"
},
"comfort_topics": [
"Break reminders during long compile sessions",
"Setting up inclusive tooling (fonts, themes, prompts)",
"Helping new contributors navigate the repo"
],
"notes": "Mommy uses a gentle, encouraging tone and celebrates every achievement to keep learning joyful. She steps back for low-level optimisation or safety-critical decisions."
} }
] ]

View file

@ -4,103 +4,41 @@
{ {
"id": "mlfs-package-import", "id": "mlfs-package-import",
"title": "Import all MLFS packages into lpkg", "title": "Import all MLFS packages into lpkg",
"description": "Parse the Multilib LFS book and scaffold package definitions with optimisation defaults (LTO/PGO/-O3).", "description": "Parse the Multilib LFS book and scaffold package definitions with optimization defaults (LTO/PGO/-O3).",
"owner": "mlfs_researcher",
"priority": "critical",
"target_release": "0.3.0",
"blocked_on": [ "blocked_on": [
"Finalize metadata -> Rust module generation pipeline", "Finalize metadata -> Rust module generation pipeline",
"Implement automated parser" "Implement automated parser"
], ],
"next_actions": [ "owner": "mlfs_researcher"
"Cross-check ai/metadata coverage vs. MLFS chapter index",
"Batch-run import workflow in dry-run mode to detect schema gaps",
"Document manual overrides for multi-pass toolchain packages"
],
"success_metrics": [
">= 95% of MLFS packages imported with build/install commands",
"Generated modules compile under cargo check --features graphql",
"Metadata index remains <2 seconds to validate on CI"
],
"notes": "Coordinate closely with rust-module-generator to avoid duplicated scaffolding logic."
}, },
{ {
"id": "pgo-integration", "id": "pgo-integration",
"title": "Integrate profile guided optimization support", "title": "Integrate profile guided optimization support",
"description": "Add infrastructure for collection and replay of profiling data during package builds.", "description": "Add infrastructure for collection and replay of profiling data during package builds.",
"owner": "default_cli",
"priority": "high",
"blocked_on": [ "blocked_on": [
"Decide on profiling workload definitions" "Decide on profiling workload definitions"
], ],
"next_actions": [ "owner": "default_cli"
"Capture baseline timings for release vs release-pgo",
"Prototype lightweight profiling harness (shell or cargo alias)",
"Document warmup requirements for long-running packages"
],
"success_metrics": [
"release-pgo builds show >8% speedup on binutils/gcc workloads",
"PGO instrumentation + merge flow documented in README",
"CI job ensures profiles are optional but never stale"
]
}, },
{ {
"id": "lfs-html-parsers", "id": "lfs-html-parsers",
"title": "Automate LFS/BLFS/GLFS ingest via HTML parsing", "title": "Automate LFS/BLFS/GLFS ingest via HTML parsing",
"description": "Avoid hardcoded package data; download the upstream books (LFS, BLFS, GLFS) and parse them to drive scaffolding and metadata updates.", "description": "Avoid hardcoded package data; download the upstream books (LFS, BLFS, GLFS) and parse them to drive scaffolding and metadata updates.",
"owner": "mlfs_researcher",
"priority": "medium",
"blocked_on": [ "blocked_on": [
"Design resilient scraping strategies for each book", "Design resilient scraping strategies for each book",
"Implement incremental update workflow" "Implement incremental update workflow"
], ],
"next_actions": [ "owner": "mlfs_researcher"
"Audit selectors currently used by ai/metadata harvester",
"Introduce regression fixtures for common chapter archetypes",
"Add revalidation mode to detect silent upstream markup changes"
],
"risks": [
"Upstream XHTML sometimes reflows anchors without notice",
"Need rate limiting/backoff when mirrors throttle requests"
]
}, },
{ {
"id": "rust-module-generator", "id": "rust-module-generator",
"title": "Generate package modules from harvested metadata", "title": "Generate package modules from harvested metadata",
"description": "Transform harvested metadata into Rust files under src/pkgs/by_name, wiring PackageDefinition data directly.", "description": "Transform harvested metadata into Rust files under src/pkgs/by_name, wiring PackageDefinition data directly.",
"owner": "default_cli",
"priority": "high",
"blocked_on": [ "blocked_on": [
"Define translation scheme from metadata to PackageDefinition", "Define translation scheme from metadata to PackageDefinition",
"Integrate generator with metadata_indexer output" "Integrate generator with metadata_indexer output"
], ],
"next_actions": [ "owner": "default_cli"
"Add snapshot tests comparing generated modules vs golden outputs",
"Extend generator to emit README snippets for each package",
"Expose --dry-run summary with diff previews"
],
"dependencies": [
"mlfs-package-import",
"lfs-html-parsers"
]
},
{
"id": "dependency-audit",
"title": "Keep lpkg dependency footprint lean",
"description": "Regularly evaluate crates for bloat and replace heavy stacks with std or lightweight alternatives.",
"owner": "default_cli",
"priority": "medium",
"next_actions": [
"Track remaining crates pulling in large transitive trees (e.g. tracing, actix-only paths)",
"Automate cargo-tree diff reports in CI",
"Document substitution patterns (tokio ➜ std, reqwest ➜ ureq, etc.)"
],
"success_metrics": [
"Default `cargo build` compiles < 140 crates",
"No async runtimes linked when GraphQL feature is disabled",
"README lists regeneration commands for all generated assets"
],
"notes": "Continue pruning optional crates (tracing, gptman, uuid) when the TUI feature is off; surface findings in ai/notes.md."
} }
], ],
"solved": [ "solved": [
@ -131,13 +69,6 @@
"description": "Cache wget-list/md5sums from jhalfs and expose a CLI refresh command so harvesting can populate source URLs and checksums reliably.", "description": "Cache wget-list/md5sums from jhalfs and expose a CLI refresh command so harvesting can populate source URLs and checksums reliably.",
"resolution": "Extended metadata_indexer with a `refresh` subcommand, cached manifests under ai/metadata/cache/, and hooked harvest to populate MD5 checksums via jhalfs data.", "resolution": "Extended metadata_indexer with a `refresh` subcommand, cached manifests under ai/metadata/cache/, and hooked harvest to populate MD5 checksums via jhalfs data.",
"owner": "default_cli" "owner": "default_cli"
},
{
"id": "lightweight-http-stack",
"title": "Replace async HTTP stack with lightweight blocking client",
"description": "Remove tokio/reqwest default dependency and adopt a minimal HTTP client for CLI workflows.",
"resolution": "Swapped reqwest/tokio for ureq across html, ingest, and metadata tooling; added PGO-aware Cargo profiles and documented regeneration commands.",
"owner": "default_cli"
} }
] ]
} }

View file

@ -1,53 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" width="640" height="320" viewBox="0 0 640 320" role="img" aria-labelledby="title desc">
<title id="title">LPKG Logo</title>
<desc id="desc">Stylised package icon with circuitry and the letters LPKG.</desc>
<defs>
<linearGradient id="bgGradient" x1="0" y1="0" x2="1" y2="1">
<stop offset="0%" stop-color="#0f172a" />
<stop offset="100%" stop-color="#1e293b" />
</linearGradient>
<linearGradient id="cubeGradient" x1="0" y1="0" x2="1" y2="1">
<stop offset="0%" stop-color="#38bdf8" />
<stop offset="100%" stop-color="#0ea5e9" />
</linearGradient>
<linearGradient id="cubeShadow" x1="0" y1="1" x2="1" y2="0">
<stop offset="0%" stop-color="#0ea5e9" stop-opacity="0.4" />
<stop offset="100%" stop-color="#38bdf8" stop-opacity="0.1" />
</linearGradient>
<linearGradient id="textGradient" x1="0" y1="0" x2="0" y2="1">
<stop offset="0%" stop-color="#f8fafc" />
<stop offset="100%" stop-color="#cbd5f5" />
</linearGradient>
<filter id="glow" x="-20%" y="-20%" width="140%" height="140%">
<feGaussianBlur stdDeviation="8" result="blur" />
<feMerge><feMergeNode in="blur" /><feMergeNode in="SourceGraphic" /></feMerge>
</filter>
</defs>
<rect width="640" height="320" rx="28" fill="url(#bgGradient)" />
<g transform="translate(100 60)">
<g filter="url(#glow)">
<path d="M222 86l86-42 86 42v96l-86 42-86-42z" fill="url(#cubeGradient)" />
<path d="M308 44v182l86-42V86z" fill="url(#cubeShadow)" />
<path d="M262 96l46-22 46 22v48l-46 22-46-22z" fill="#0f172a" opacity="0.85" />
<path d="M308 74l32 15v32l-32 15-32-15v-32z" fill="none" stroke="#38bdf8" stroke-width="4" stroke-linejoin="round" />
<path d="M308 122l-32-15" stroke="#38bdf8" stroke-width="4" stroke-linecap="round" opacity="0.6" />
<path d="M308 122l32-15" stroke="#38bdf8" stroke-width="4" stroke-linecap="round" opacity="0.6" />
<circle cx="276" cy="107" r="5" fill="#38bdf8" />
<circle cx="340" cy="107" r="5" fill="#38bdf8" />
</g>
</g>
<g fill="none" stroke="#38bdf8" stroke-width="3" stroke-linecap="round" opacity="0.55">
<path d="M120 78h72" />
<path d="M120 110h48" />
<path d="M120 142h64" />
<path d="M448 110h72" />
<path d="M472 142h88" />
<path d="M448 174h96" />
</g>
<g font-family="'Fira Sans', 'Inter', 'Segoe UI', sans-serif" font-weight="600" font-size="90" letter-spacing="6">
<text x="120" y="246" fill="url(#textGradient)">LPKG</text>
</g>
<g font-family="'Fira Sans', 'Inter', 'Segoe UI', sans-serif" font-size="22" fill="#94a3b8">
<text x="122" y="278">Lightweight Package Manager</text>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 2.6 KiB

View file

@ -1,33 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" width="640" height="200" viewBox="0 0 640 200" role="img" aria-labelledby="title desc">
<title id="title">Nixette Logo</title>
<desc id="desc">Wordmark combining Nix and Gentoo motifs with trans pride colours.</desc>
<defs>
<linearGradient id="bg" x1="0" y1="0" x2="1" y2="1">
<stop offset="0%" stop-color="#55CDFC" />
<stop offset="100%" stop-color="#F7A8B8" />
</linearGradient>
<linearGradient id="text" x1="0" y1="0" x2="0" y2="1">
<stop offset="0%" stop-color="#FFFFFF" />
<stop offset="100%" stop-color="#E5E7FF" />
</linearGradient>
<filter id="softShadow" x="-10%" y="-10%" width="120%" height="120%">
<feDropShadow dx="0" dy="6" stdDeviation="12" flood-color="#7C3AED" flood-opacity="0.3" />
</filter>
</defs>
<rect width="640" height="200" rx="36" fill="#0F172A" />
<g transform="translate(100 60)">
<g filter="url(#softShadow)">
<path d="M40 40 L72 0 L144 0 L176 40 L144 80 L72 80 Z" fill="url(#bg)" />
<path d="M72 0 L144 80" stroke="#FFFFFF" stroke-width="6" stroke-linecap="round" opacity="0.55" />
<path d="M144 0 L72 80" stroke="#FFFFFF" stroke-width="6" stroke-linecap="round" opacity="0.55" />
<circle cx="108" cy="40" r="22" fill="#0F172A" stroke="#FFFFFF" stroke-width="6" opacity="0.85" />
<path d="M108 24c8 0 14 6 14 16s-6 16-14 16" stroke="#F7A8B8" stroke-width="4" stroke-linecap="round" fill="none" />
</g>
</g>
<g transform="translate(220 126)" font-family="'Fira Sans', 'Inter', 'Segoe UI', sans-serif" font-weight="700" font-size="72" letter-spacing="4" fill="url(#text)">
<text>NIXETTE</text>
</g>
<g transform="translate(220 160)" font-family="'Fira Sans', 'Inter', 'Segoe UI', sans-serif" font-size="22" fill="#A5B4FC">
<text>Declarative · Sourceful · Herself</text>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 1.8 KiB

View file

@ -1,50 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" width="480" height="520" viewBox="0 0 480 520" role="img" aria-labelledby="title desc">
<title id="title">Nixette Mascot Badge</title>
<desc id="desc">Chibi penguin mascot with trans flag hair, blending Nix and Gentoo motifs.</desc>
<defs>
<linearGradient id="bgGrad" x1="0" y1="0" x2="0" y2="1">
<stop offset="0%" stop-color="#312E81" />
<stop offset="100%" stop-color="#1E1B4B" />
</linearGradient>
<linearGradient id="hairLeft" x1="0" y1="0" x2="1" y2="1">
<stop offset="0%" stop-color="#55CDFC" />
<stop offset="100%" stop-color="#0EA5E9" />
</linearGradient>
<linearGradient id="hairRight" x1="1" y1="0" x2="0" y2="1">
<stop offset="0%" stop-color="#F7A8B8" />
<stop offset="100%" stop-color="#FB7185" />
</linearGradient>
<linearGradient id="bellyGrad" x1="0" y1="0" x2="0" y2="1">
<stop offset="0%" stop-color="#FFFFFF" />
<stop offset="100%" stop-color="#E2E8F0" />
</linearGradient>
</defs>
<rect width="480" height="520" rx="48" fill="url(#bgGrad)" />
<g transform="translate(240 220)">
<path d="M-160 -20 C-140 -160 140 -160 160 -20 C180 140 60 220 0 220 C-60 220 -180 140 -160 -20" fill="#0F172A" />
<ellipse cx="0" cy="40" rx="120" ry="140" fill="#1E293B" />
<path d="M-88 -80 Q-40 -140 0 -120 Q40 -140 88 -80" fill="#1E293B" />
<path d="M-96 -84 Q-60 -160 -8 -132 L-8 -40 Z" fill="url(#hairLeft)" />
<path d="M96 -84 Q60 -160 8 -132 L8 -40 Z" fill="url(#hairRight)" />
<ellipse cx="-44" cy="-8" rx="26" ry="32" fill="#FFFFFF" />
<ellipse cx="44" cy="-8" rx="26" ry="32" fill="#FFFFFF" />
<circle cx="-44" cy="-4" r="14" fill="#0F172A" />
<circle cx="44" cy="-4" r="14" fill="#0F172A" />
<circle cx="-40" cy="-8" r="6" fill="#FFFFFF" opacity="0.7" />
<circle cx="48" cy="-10" r="6" fill="#FFFFFF" opacity="0.7" />
<path d="M0 12 L-18 32 Q0 44 18 32 Z" fill="#F472B6" />
<path d="M0 32 L-16 52 Q0 60 16 52 Z" fill="#FBEAED" />
<path d="M0 46 Q-32 78 0 86 Q32 78 0 46" fill="#FCA5A5" />
<ellipse cx="0" cy="74" rx="70" ry="82" fill="url(#bellyGrad)" />
<path d="M-128 48 Q-176 56 -176 120 Q-128 112 -104 80" fill="#F7A8B8" />
<path d="M128 48 Q176 56 176 120 Q128 112 104 80" fill="#55CDFC" />
<circle cx="-100" cy="94" r="18" fill="#FDE68A" opacity="0.85" />
<circle cx="100" cy="94" r="18" fill="#FDE68A" opacity="0.85" />
</g>
<g transform="translate(90 420)" font-family="'Fira Sans', 'Inter', 'Segoe UI', sans-serif" font-size="42" fill="#E0E7FF" letter-spacing="6">
<text>NIXIE</text>
</g>
<g transform="translate(90 468)" font-family="'Fira Sans', 'Inter', 'Segoe UI', sans-serif" font-size="20" fill="#A5B4FC">
<text>Declarative · Sourceful · Herself</text>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 2.7 KiB

View file

@ -1,42 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" width="3840" height="2160" viewBox="0 0 3840 2160" role="img" aria-labelledby="title desc">
<title id="title">Nixette Wallpaper</title>
<desc id="desc">Gradient wallpaper combining trans flag waves with Nix and Gentoo motifs.</desc>
<defs>
<linearGradient id="sky" x1="0" y1="0" x2="1" y2="1">
<stop offset="0%" stop-color="#0f172a" />
<stop offset="100%" stop-color="#1e1b4b" />
</linearGradient>
<linearGradient id="wave1" x1="0" y1="0" x2="1" y2="0">
<stop offset="0%" stop-color="#55CDFC" stop-opacity="0" />
<stop offset="50%" stop-color="#55CDFC" stop-opacity="0.5" />
<stop offset="100%" stop-color="#55CDFC" stop-opacity="0" />
</linearGradient>
<linearGradient id="wave2" x1="1" y1="0" x2="0" y2="0">
<stop offset="0%" stop-color="#F7A8B8" stop-opacity="0" />
<stop offset="50%" stop-color="#F7A8B8" stop-opacity="0.55" />
<stop offset="100%" stop-color="#F7A8B8" stop-opacity="0" />
</linearGradient>
<radialGradient id="halo" cx="0.5" cy="0.5" r="0.7">
<stop offset="0%" stop-color="#FDE68A" stop-opacity="0.8" />
<stop offset="100%" stop-color="#FDE68A" stop-opacity="0" />
</radialGradient>
</defs>
<rect width="3840" height="2160" fill="url(#sky)" />
<rect x="0" y="0" width="3840" height="2160" fill="url(#halo)" opacity="0.4" />
<path d="M0 1430 C640 1320 1280 1580 1860 1500 C2440 1420 3040 1660 3840 1500 L3840 2160 L0 2160 Z" fill="url(#wave1)" />
<path d="M0 1700 C500 1580 1200 1880 1900 1760 C2600 1640 3200 1920 3840 1800 L3840 2160 L0 2160 Z" fill="url(#wave2)" />
<g opacity="0.08" fill="none" stroke="#FFFFFF" stroke-width="24">
<path d="M600 360 l220 -220 h360 l220 220 l-220 220 h-360 z" />
<path d="M600 360 l220 -220" />
<path d="M820 140 l220 220" />
</g>
<g opacity="0.12" fill="none" stroke="#FFFFFF" stroke-width="22" transform="translate(2820 320) scale(0.9)">
<path d="M0 0 C120 -40 220 40 220 160 C220 260 160 320 60 320" />
</g>
<g transform="translate(940 1320)" font-family="'Fira Sans', 'Inter', 'Segoe UI', sans-serif" font-size="220" font-weight="700" letter-spacing="18" fill="#FFFFFF" opacity="0.95">
<text>NIXETTE</text>
</g>
<g transform="translate(960 1500)" font-family="'Fira Sans', 'Inter', 'Segoe UI', sans-serif" font-size="64" fill="#F7A8B8" opacity="0.9">
<text>Declarative · Sourceful · Herself</text>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 2.4 KiB

View file

@ -1 +0,0 @@
fn main() {}

View file

@ -1,91 +0,0 @@
# Nixette Declarative, Sourceful, and Unapologetically Herself
A playful concept distro imagined as the transfemme child of **NixOS** and **Gentoo**. Nixette blends the reproducible confidence of flakes with the fine-grained self-expression of USE flags, wrapped in a trans flag palette and a big, affirming hug.
---
## Identity Snapshot
- **Tagline:** _Declarative, sourceful, and unapologetically herself._
- **Mascot:** Chibi penguin “Nixie” with pastel pigtails, Nix snowflake + Gentoo swirl hoodie.
- **Palette:** `#55CDFC` (sky blue), `#F7A8B8` (pink), `#FFFFFF`, plus a deep accent `#7C3AED`.
- **Pronoun Prompt:** The installer asks for name/pronouns and personalises MOTD, systemd messages, and shell prompt.
---
## Feature Mix
| Pillar | How Nixette expresses it |
|----------------------|-----------------------------------------------------------------------------------------------------------|
| Reproducibility | Flake-native system definitions with versioned profiles (`comfort-zone`, `diy-princess`, `studio-mode`). |
| Custom compilation | `nix emerge` bridge turns Gentoo ebuild overlays into reproducible derivations with cached binaries. |
| Playful polish | Catppuccin-trans themes, `nixette-style` CLI to sync GTK/Qt/terminal styling, dynamic welcome affirmations.|
| Inclusive defaults | Flatpak + Steam pre-set for accessibility tools, Fcitx5, Orca, speech-dispatcher, pronoun-friendly docs. |
---
## Toolchain Concepts
- **`trans-init` installer** Guided TUI that outputs `flake.nix`, including overlays for the `nix emerge` bridge. Provides story-mode narration for first boot.
- **`nixette-style`** Syncs wallpapers, SDDM theme, terminal palette, Qt/KDE settings, all sourced from a YAML theme pack.
- **`emerge-optional`** Spins up Gentoo chroots inside Nix build sandboxes for packages happiest as ebuilds. Output is cached as a Nix store derivation.
- **`affirm-d`** Small daemon rotating `/etc/motd`, desktop notifications, and TTY colour accents with inclusive affirmations.
---
## Profile Catalogue
| Profile | Intent |
|-----------------|---------------------------------------------------------------------------------------------|
| Comfort Zone | KDE Plasma, PipeWire, Wayland, cozy defaults, automatic Catgirl cursor + emoji fonts. |
| DIY Princess | Minimal sway-based stack, just the flake scaffolding and overlay hooks for custom builds. |
| Studio Mode | Focuses on creative tooling (Krita, Blender, Ardour) and low-latency kernels, GPU tuning. |
---
## Roadmap Sketch
1. **Moodboard → Brand Pack** (logo, icon, wallpapers, VT boot splash).
2. **Prototype flakes** `nix flake init --template nixette#comfort-zone` etc.
3. **Gentoo overlay bridge** Validate `nix emerge` on a handful of ebuilds (mesa, wine, gamescope).
4. **Installer draft** BubbleTea/ratatui-driven TUI, prompts for pronouns + accessibility preferences.
5. **Community docs** Write inclusive user guide, contributor covenant, pronoun style guide.
6. **Launch zine** Release notes styled like a mini-comic introducing Nixies origin story.
7. **Accessibility audit** Keyboard navigation, screen-reader pass, dyslexia-friendly typography options.
8. **Beta cosy jam** Invite testers via queer sysadmin spaces; collect feedback through anonymous forms.
---
## Affirmations YAML (snippet)
```yaml
- id: bright-morning
message: "Good morning, {name}! Your system is as valid and custom as you are."
colour: "#F7A8B8"
- id: compile-hugs
message: "Kernel rebuilds take time. You deserve rest breaks and gentle music."
colour: "#55CDFC"
```
---
## Logo & Wallpaper
See `assets/nixette-logo.svg` for the primary wordmark, `assets/nixette-mascot.svg` for Nixies badge, and `assets/nixette-wallpaper.svg` for a 4K wallpaper concept.
### Reference Configs
- `concepts/nixette/sample_flake.nix` demonstrates the comfort-zone profile with `nix emerge`, `affirmd`, and theming hooks.
---
## Contributing Idea Seeds
- Write sample flakes showcasing the hybrid build pipeline.
- Mock up the mascot in SVG for use in documentation.
- Design additional wallpapers (night mode, pride variants, low-light).
- Draft inclusive documentation templates (issue/PR forms, community guidelines).
- Publish a community pledge emphasising safety, pronoun respect, and boundaries.
- Host monthly "compile & chill" streams to showcase contributions.
Let Nixette be the distro that compiles joy, not just binaries. 💜

View file

@ -1,62 +0,0 @@
{
description = "Nixette comfort-zone profile";
inputs = {
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
nixette-overlays.url = "github:nixette/overlays";
nixette-style.url = "github:nixette/style-pack";
};
outputs = { self, nixpkgs, nixette-overlays, nixette-style, ... }@inputs:
let
system = "x86_64-linux";
pkgs = import nixpkgs {
inherit system;
overlays = [ nixette-overlays.overlays.nix-emerge ];
};
in
{
nixosConfigurations.nixette-comfort-zone = nixpkgs.lib.nixosSystem {
inherit system;
modules = [
./profiles/comfort-zone.nix
({ config, pkgs, ... }:
{
nixpkgs.config.allowUnfree = true;
environment.systemPackages = with pkgs; [
nixette-style
steam
lutris
krita
];
services.nixette.nix-emerge = {
enable = true;
ebuilds = [
"games-emulation/gamescope"
"media-sound/pipewire"
];
};
services.nixette.affirmd.enable = true;
services.nixette.affirmd.pronouns = "she/her";
services.nixette.affirmd.motdPath = ./affirmations.yml;
programs.plasma.enable = true;
services.displayManager.sddm.enable = true;
services.displayManager.sddm.theme = nixette-style.themes.catgirl-sunrise;
users.users.nixie = {
isNormalUser = true;
extraGroups = [ "wheel" "audio" "input" "video" ];
shell = pkgs.zsh;
};
programs.zsh.promptInit = ''
eval "$(nixette-style prompt --name nixie --pronouns she/her)"
'';
})
];
};
};
}

View file

@ -1,80 +0,0 @@
#![cfg(feature = "graphql")]
use std::env;
use std::sync::Arc;
use actix_web::{App, HttpRequest, HttpResponse, HttpServer, middleware::Compress, web};
use anyhow::{Context, Result};
use juniper_actix::{graphiql_handler, graphql_handler};
use package_management::db;
use package_management::graphql::{self, GraphQLContext, Schema};
const DEFAULT_BIND_ADDR: &str = "127.0.0.1:8080";
#[actix_web::main]
async fn main() -> std::io::Result<()> {
if let Err(err) = run().await {
eprintln!("GraphQL server failed: {err:#}");
return Err(std::io::Error::new(
std::io::ErrorKind::Other,
err.to_string(),
));
}
Ok(())
}
async fn run() -> Result<()> {
let pool = db::establish_pool().context("initialising SQLite pool")?;
let schema = Arc::new(graphql::create_schema());
let jokes = Arc::new(graphql::context::JokeCatalog::default());
let bind_addr = env::var("LPKG_GRAPHQL_ADDR").unwrap_or_else(|_| DEFAULT_BIND_ADDR.to_string());
let workers = worker_count();
println!("GraphQL server listening on {bind_addr} with {workers} worker(s)");
HttpServer::new(move || {
let app_schema = Arc::clone(&schema);
let pool = pool.clone();
let jokes = Arc::clone(&jokes);
App::new()
.app_data(web::Data::from(app_schema))
.app_data(web::Data::new(pool))
.app_data(web::Data::from(jokes))
.wrap(Compress::default())
.service(
web::resource("/graphql")
.route(web::post().to(graphql_endpoint))
.route(web::get().to(graphql_endpoint)),
)
.service(web::resource("/playground").route(web::get().to(graphiql_endpoint)))
})
.workers(workers)
.bind(&bind_addr)
.with_context(|| format!("binding GraphQL server to {bind_addr}"))?
.run()
.await
.context("running GraphQL server")
}
async fn graphql_endpoint(
schema: web::Data<Arc<Schema>>,
pool: web::Data<db::Pool>,
jokes: web::Data<Arc<graphql::context::JokeCatalog>>,
req: HttpRequest,
payload: web::Payload,
) -> Result<HttpResponse, actix_web::Error> {
let context = GraphQLContext::with_catalog(pool.get_ref().clone(), Arc::clone(jokes.get_ref()));
graphql_handler(schema.get_ref().as_ref(), &context, req, payload).await
}
async fn graphiql_endpoint() -> Result<HttpResponse, actix_web::Error> {
graphiql_handler("/graphql", None).await
}
fn worker_count() -> usize {
let suggested = num_cpus::get();
suggested.clamp(1, 8)
}

View file

@ -1,181 +0,0 @@
use anyhow::Result;
use package_management::svg_builder::{Defs, Document, Element, Filter, Gradient, Group, path};
use std::fs;
fn main() -> Result<()> {
let svg = build_logo_svg();
fs::create_dir_all("assets")?;
fs::write("assets/logo.svg", svg)?;
Ok(())
}
fn build_logo_svg() -> String {
let defs = Defs::new()
.linear_gradient(
"bgGradient",
Gradient::new("0", "0", "1", "1")
.stop("0%", &[("stop-color", "#0f172a")])
.stop("100%", &[("stop-color", "#1e293b")]),
)
.linear_gradient(
"cubeGradient",
Gradient::new("0", "0", "1", "1")
.stop("0%", &[("stop-color", "#38bdf8")])
.stop("100%", &[("stop-color", "#0ea5e9")]),
)
.linear_gradient(
"cubeShadow",
Gradient::new("0", "1", "1", "0")
.stop("0%", &[("stop-color", "#0ea5e9"), ("stop-opacity", "0.4")])
.stop("100%", &[("stop-color", "#38bdf8"), ("stop-opacity", "0.1")]),
)
.linear_gradient(
"textGradient",
Gradient::new("0", "0", "0", "1")
.stop("0%", &[("stop-color", "#f8fafc")])
.stop("100%", &[("stop-color", "#cbd5f5")]),
)
.filter(
"glow",
Filter::new()
.attr("x", "-20%")
.attr("y", "-20%")
.attr("width", "140%")
.attr("height", "140%")
.raw("<feGaussianBlur stdDeviation=\"8\" result=\"blur\" />")
.raw("<feMerge><feMergeNode in=\"blur\" /><feMergeNode in=\"SourceGraphic\" /></feMerge>"),
);
let cube_inner = Group::new()
.attr("filter", "url(#glow)")
.child(
Element::new("path")
.attr("d", "M222 86l86-42 86 42v96l-86 42-86-42z")
.attr("fill", "url(#cubeGradient)")
.empty(),
)
.child(
Element::new("path")
.attr("d", "M308 44v182l86-42V86z")
.attr("fill", "url(#cubeShadow)")
.empty(),
)
.child(
Element::new("path")
.attr("d", "M262 96l46-22 46 22v48l-46 22-46-22z")
.attr("fill", "#0f172a")
.attr("opacity", "0.85")
.empty(),
)
.child(
Element::new("path")
.attr("d", "M308 74l32 15v32l-32 15-32-15v-32z")
.attr("fill", "none")
.attr("stroke", "#38bdf8")
.attr("stroke-width", "4")
.attr("stroke-linejoin", "round")
.empty(),
)
.child(
Element::new("path")
.attr("d", "M308 122l-32-15")
.attr("stroke", "#38bdf8")
.attr("stroke-width", "4")
.attr("stroke-linecap", "round")
.attr("opacity", "0.6")
.empty(),
)
.child(
Element::new("path")
.attr("d", "M308 122l32-15")
.attr("stroke", "#38bdf8")
.attr("stroke-width", "4")
.attr("stroke-linecap", "round")
.attr("opacity", "0.6")
.empty(),
)
.child(
Element::new("circle")
.attr("cx", "276")
.attr("cy", "107")
.attr("r", "5")
.attr("fill", "#38bdf8")
.empty(),
)
.child(
Element::new("circle")
.attr("cx", "340")
.attr("cy", "107")
.attr("r", "5")
.attr("fill", "#38bdf8")
.empty(),
);
let cube = Group::new()
.attr("transform", "translate(100 60)")
.child(cube_inner);
let circuits = Group::new()
.attr("fill", "none")
.attr("stroke", "#38bdf8")
.attr("stroke-width", "3")
.attr("stroke-linecap", "round")
.attr("opacity", "0.55")
.child(path("M120 78h72"))
.child(path("M120 110h48"))
.child(path("M120 142h64"))
.child(path("M448 110h72"))
.child(path("M472 142h88"))
.child(path("M448 174h96"));
let title_text = Group::new()
.attr(
"font-family",
"'Fira Sans', 'Inter', 'Segoe UI', sans-serif",
)
.attr("font-weight", "600")
.attr("font-size", "90")
.attr("letter-spacing", "6")
.child(
Element::new("text")
.attr("x", "120")
.attr("y", "246")
.attr("fill", "url(#textGradient)")
.text("LPKG"),
);
let tagline_group = Group::new()
.attr(
"font-family",
"'Fira Sans', 'Inter', 'Segoe UI', sans-serif",
)
.attr("font-size", "22")
.attr("fill", "#94a3b8")
.child(
Element::new("text")
.attr("x", "122")
.attr("y", "278")
.text("Lightweight Package Manager"),
);
Document::new(640, 320)
.view_box("0 0 640 320")
.role("img")
.aria_label("title", "desc")
.title("LPKG Logo")
.desc("Stylised package icon with circuitry and the letters LPKG.")
.add_defs(defs)
.add_element(
Element::new("rect")
.attr("width", "640")
.attr("height", "320")
.attr("rx", "28")
.attr("fill", "url(#bgGradient)")
.empty(),
)
.add_element(cube)
.add_element(circuits)
.add_element(title_text)
.add_element(tagline_group)
.finish()
}

View file

@ -2,11 +2,12 @@ use std::collections::HashSet;
use std::fs; use std::fs;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use anyhow::{Context, Result, anyhow}; use anyhow::{Context, Result};
use chrono::Utc; use chrono::Utc;
use clap::{Parser, Subcommand}; use clap::{Parser, Subcommand};
use jsonschema::JSONSchema; use jsonschema::JSONSchema;
use regex::Regex; use regex::Regex;
use reqwest::{blocking::Client, redirect::Policy};
use scraper::{ElementRef, Html, Selector}; use scraper::{ElementRef, Html, Selector};
use serde_json::{Value, json}; use serde_json::{Value, json};
use sha2::{Digest, Sha256}; use sha2::{Digest, Sha256};
@ -378,16 +379,6 @@ fn extract_summary(value: &Value, relative_path: &Path) -> Result<PackageSummary
.and_then(Value::as_str) .and_then(Value::as_str)
.context("missing status.state")? .context("missing status.state")?
.to_string(); .to_string();
let tags = status
.get("tags")
.and_then(Value::as_array)
.map(|array| {
array
.iter()
.filter_map(|value| value.as_str().map(|s| s.to_string()))
.collect::<Vec<_>>()
})
.unwrap_or_default();
Ok(PackageSummary { Ok(PackageSummary {
schema_version, schema_version,
@ -402,7 +393,6 @@ fn extract_summary(value: &Value, relative_path: &Path) -> Result<PackageSummary
.to_str() .to_str()
.unwrap_or_default() .unwrap_or_default()
.replace('\\', "/"), .replace('\\', "/"),
tags,
}) })
} }
@ -419,7 +409,18 @@ fn harvest_metadata(
override_base: Option<&str>, override_base: Option<&str>,
) -> Result<HarvestResult> { ) -> Result<HarvestResult> {
let page_url = resolve_page_url(book, page, override_base)?; let page_url = resolve_page_url(book, page, override_base)?;
let html = fetch_text(&page_url).with_context(|| format!("fetching {page_url}"))?; let client = Client::builder()
.user_agent("lpkg-metadata-indexer/0.1")
.build()?;
let response = client
.get(&page_url)
.send()
.with_context(|| format!("fetching {}", page_url))?
.error_for_status()
.with_context(|| format!("non-success status for {}", page_url))?;
let html = response
.text()
.with_context(|| format!("reading response body from {}", page_url))?;
let document = Html::parse_document(&html); let document = Html::parse_document(&html);
let harvest = build_metadata_value(metadata_dir, book, &page_url, &document, &html)?; let harvest = build_metadata_value(metadata_dir, book, &page_url, &document, &html)?;
@ -636,7 +637,6 @@ fn build_metadata_value(
}; };
let status_state = "draft"; let status_state = "draft";
let stage_tag = stage.clone().unwrap_or_else(|| "base-system".to_string());
let package_json = json!({ let package_json = json!({
"schema_version": "v0.1.0", "schema_version": "v0.1.0",
@ -687,7 +687,10 @@ fn build_metadata_value(
"status": { "status": {
"state": status_state, "state": status_state,
"issues": issues, "issues": issues,
"tags": vec!["25.10".to_string(), stage_tag.clone()] "tags": vec![
"25.10".to_string(),
stage.unwrap_or("base-system").to_string()
]
} }
}); });
@ -937,7 +940,15 @@ fn refresh_manifest(
let url = manifest_url(book, &kind) let url = manifest_url(book, &kind)
.with_context(|| format!("no manifest URL configured for book '{}'", book))?; .with_context(|| format!("no manifest URL configured for book '{}'", book))?;
let body = fetch_text(url).with_context(|| format!("fetching {url}"))?; let client = Client::builder().redirect(Policy::limited(5)).build()?;
let body = client
.get(url)
.send()
.with_context(|| format!("fetching {}", url))?
.error_for_status()
.with_context(|| format!("request failed for {}", url))?
.text()
.with_context(|| format!("reading response body from {}", url))?;
fs::write(&cache_path, &body) fs::write(&cache_path, &body)
.with_context(|| format!("caching manifest {}", cache_path.display()))?; .with_context(|| format!("caching manifest {}", cache_path.display()))?;
@ -945,17 +956,6 @@ fn refresh_manifest(
Ok(cache_path) Ok(cache_path)
} }
fn fetch_text(url: &str) -> Result<String> {
ureq::get(url)
.call()
.map_err(|err| match err {
ureq::Error::Status(code, _) => anyhow!("request failed: HTTP {code}"),
other => anyhow!("request failed: {other}"),
})?
.into_string()
.with_context(|| format!("reading response body from {url}"))
}
fn manifest_url(book: &str, kind: &ManifestKind) -> Option<&'static str> { fn manifest_url(book: &str, kind: &ManifestKind) -> Option<&'static str> {
match (book, kind) { match (book, kind) {
("mlfs", ManifestKind::WgetList) => { ("mlfs", ManifestKind::WgetList) => {

View file

@ -1,126 +0,0 @@
use anyhow::Result;
use package_management::svg_builder::{Defs, Document, Element, Filter, Gradient, Group};
use std::fs;
fn main() -> Result<()> {
let svg = build_nixette_logo();
fs::create_dir_all("assets")?;
fs::write("assets/nixette-logo.svg", svg)?;
Ok(())
}
fn build_nixette_logo() -> String {
let defs = Defs::new()
.linear_gradient(
"bg",
Gradient::new("0", "0", "1", "1")
.stop("0%", &[("stop-color", "#55CDFC")])
.stop("100%", &[("stop-color", "#F7A8B8")]),
)
.linear_gradient(
"text",
Gradient::new("0", "0", "0", "1")
.stop("0%", &[("stop-color", "#FFFFFF")])
.stop("100%", &[("stop-color", "#E5E7FF")]),
)
.filter(
"softShadow",
Filter::new()
.attr("x", "-10%")
.attr("y", "-10%")
.attr("width", "120%")
.attr("height", "120%")
.raw("<feDropShadow dx=\"0\" dy=\"6\" stdDeviation=\"12\" flood-color=\"#7C3AED\" flood-opacity=\"0.3\" />"),
);
let emblem = Group::new().attr("transform", "translate(100 60)").child(
Group::new()
.attr("filter", "url(#softShadow)")
.child(
Element::new("path")
.attr("d", "M40 40 L72 0 L144 0 L176 40 L144 80 L72 80 Z")
.attr("fill", "url(#bg)")
.empty(),
)
.child(
Element::new("path")
.attr("d", "M72 0 L144 80")
.attr("stroke", "#FFFFFF")
.attr("stroke-width", "6")
.attr("stroke-linecap", "round")
.attr("opacity", "0.55")
.empty(),
)
.child(
Element::new("path")
.attr("d", "M144 0 L72 80")
.attr("stroke", "#FFFFFF")
.attr("stroke-width", "6")
.attr("stroke-linecap", "round")
.attr("opacity", "0.55")
.empty(),
)
.child(
Element::new("circle")
.attr("cx", "108")
.attr("cy", "40")
.attr("r", "22")
.attr("fill", "#0F172A")
.attr("stroke", "#FFFFFF")
.attr("stroke-width", "6")
.attr("opacity", "0.85")
.empty(),
)
.child(
Element::new("path")
.attr("d", "M108 24c8 0 14 6 14 16s-6 16-14 16")
.attr("stroke", "#F7A8B8")
.attr("stroke-width", "4")
.attr("stroke-linecap", "round")
.attr("fill", "none")
.empty(),
),
);
let wordmark = Group::new()
.attr("transform", "translate(220 126)")
.attr(
"font-family",
"'Fira Sans', 'Inter', 'Segoe UI', sans-serif",
)
.attr("font-weight", "700")
.attr("font-size", "72")
.attr("letter-spacing", "4")
.attr("fill", "url(#text)")
.child(Element::new("text").text("NIXETTE"));
let subtitle = Group::new()
.attr("transform", "translate(220 160)")
.attr(
"font-family",
"'Fira Sans', 'Inter', 'Segoe UI', sans-serif",
)
.attr("font-size", "22")
.attr("fill", "#A5B4FC")
.child(Element::new("text").text("Declarative · Sourceful · Herself"));
Document::new(640, 200)
.view_box("0 0 640 200")
.role("img")
.aria_label("title", "desc")
.title("Nixette Logo")
.desc("Wordmark combining Nix and Gentoo motifs with trans pride colours.")
.add_defs(defs)
.add_element(
Element::new("rect")
.attr("width", "640")
.attr("height", "200")
.attr("rx", "36")
.attr("fill", "#0F172A")
.empty(),
)
.add_element(emblem)
.add_element(wordmark)
.add_element(subtitle)
.finish()
}

View file

@ -1,170 +0,0 @@
use anyhow::Result;
use package_management::svg_builder::{Defs, Document, Element, Gradient, Group};
use std::fs;
fn main() -> Result<()> {
let svg = build_mascot_svg();
fs::create_dir_all("assets")?;
fs::write("assets/nixette-mascot.svg", svg)?;
Ok(())
}
fn build_mascot_svg() -> String {
let defs = Defs::new()
.linear_gradient(
"bgGrad",
Gradient::new("0", "0", "0", "1")
.stop("0%", &[("stop-color", "#312E81")])
.stop("100%", &[("stop-color", "#1E1B4B")]),
)
.linear_gradient(
"hairLeft",
Gradient::new("0", "0", "1", "1")
.stop("0%", &[("stop-color", "#55CDFC")])
.stop("100%", &[("stop-color", "#0EA5E9")]),
)
.linear_gradient(
"hairRight",
Gradient::new("1", "0", "0", "1")
.stop("0%", &[("stop-color", "#F7A8B8")])
.stop("100%", &[("stop-color", "#FB7185")]),
)
.linear_gradient(
"bellyGrad",
Gradient::new("0", "0", "0", "1")
.stop("0%", &[("stop-color", "#FFFFFF")])
.stop("100%", &[("stop-color", "#E2E8F0")]),
);
let body = Group::new()
.attr("transform", "translate(240 220)")
.child(
Element::new("path")
.attr("d", "M-160 -20 C-140 -160 140 -160 160 -20 C180 140 60 220 0 220 C-60 220 -180 140 -160 -20")
.attr("fill", "#0F172A")
.empty(),
)
.child(
Element::new("ellipse")
.attr("cx", "0")
.attr("cy", "40")
.attr("rx", "120")
.attr("ry", "140")
.attr("fill", "#1E293B")
.empty(),
)
.child(
Element::new("path")
.attr("d", "M-88 -80 Q-40 -140 0 -120 Q40 -140 88 -80")
.attr("fill", "#1E293B")
.empty(),
)
.child(
Element::new("path")
.attr("d", "M-96 -84 Q-60 -160 -8 -132 L-8 -40 Z")
.attr("fill", "url(#hairLeft)")
.empty(),
)
.child(
Element::new("path")
.attr("d", "M96 -84 Q60 -160 8 -132 L8 -40 Z")
.attr("fill", "url(#hairRight)")
.empty(),
)
.child(ellipse(-44.0, -8.0, 26.0, 32.0, "#FFFFFF"))
.child(ellipse(44.0, -8.0, 26.0, 32.0, "#FFFFFF"))
.child(circle(-44.0, -4.0, 14.0, "#0F172A"))
.child(circle(44.0, -4.0, 14.0, "#0F172A"))
.child(circle_with_opacity(-40.0, -8.0, 6.0, "#FFFFFF", 0.7))
.child(circle_with_opacity(48.0, -10.0, 6.0, "#FFFFFF", 0.7))
.child(path_with_fill("M0 12 L-18 32 Q0 44 18 32 Z", "#F472B6"))
.child(path_with_fill("M0 32 L-16 52 Q0 60 16 52 Z", "#FBEAED"))
.child(path_with_fill("M0 46 Q-32 78 0 86 Q32 78 0 46", "#FCA5A5"))
.child(
Element::new("ellipse")
.attr("cx", "0")
.attr("cy", "74")
.attr("rx", "70")
.attr("ry", "82")
.attr("fill", "url(#bellyGrad)")
.empty(),
)
.child(path_with_fill("M-128 48 Q-176 56 -176 120 Q-128 112 -104 80", "#F7A8B8"))
.child(path_with_fill("M128 48 Q176 56 176 120 Q128 112 104 80", "#55CDFC"))
.child(circle_with_opacity(-100.0, 94.0, 18.0, "#FDE68A", 0.85))
.child(circle_with_opacity(100.0, 94.0, 18.0, "#FDE68A", 0.85));
Document::new(480, 520)
.view_box("0 0 480 520")
.role("img")
.aria_label("title", "desc")
.title("Nixette Mascot Badge")
.desc("Chibi penguin mascot with trans flag hair, blending Nix and Gentoo motifs.")
.add_defs(defs)
.add_element(
Element::new("rect")
.attr("width", "480")
.attr("height", "520")
.attr("rx", "48")
.attr("fill", "url(#bgGrad)")
.empty(),
)
.add_element(body)
.add_element(
Group::new()
.attr("transform", "translate(90 420)")
.attr(
"font-family",
"'Fira Sans', 'Inter', 'Segoe UI', sans-serif",
)
.attr("font-size", "42")
.attr("fill", "#E0E7FF")
.attr("letter-spacing", "6")
.child(Element::new("text").text("NIXIE")),
)
.add_element(
Group::new()
.attr("transform", "translate(90 468)")
.attr(
"font-family",
"'Fira Sans', 'Inter', 'Segoe UI', sans-serif",
)
.attr("font-size", "20")
.attr("fill", "#A5B4FC")
.child(Element::new("text").text("Declarative · Sourceful · Herself")),
)
.finish()
}
fn ellipse(cx: f64, cy: f64, rx: f64, ry: f64, fill: &str) -> String {
Element::new("ellipse")
.attr("cx", &format!("{}", cx))
.attr("cy", &format!("{}", cy))
.attr("rx", &format!("{}", rx))
.attr("ry", &format!("{}", ry))
.attr("fill", fill)
.empty()
}
fn circle(cx: f64, cy: f64, r: f64, fill: &str) -> String {
Element::new("circle")
.attr("cx", &format!("{}", cx))
.attr("cy", &format!("{}", cy))
.attr("r", &format!("{}", r))
.attr("fill", fill)
.empty()
}
fn circle_with_opacity(cx: f64, cy: f64, r: f64, fill: &str, opacity: f64) -> String {
Element::new("circle")
.attr("cx", &format!("{}", cx))
.attr("cy", &format!("{}", cy))
.attr("r", &format!("{}", r))
.attr("fill", fill)
.attr("opacity", &format!("{}", opacity))
.empty()
}
fn path_with_fill(d: &str, fill: &str) -> String {
Element::new("path").attr("d", d).attr("fill", fill).empty()
}

View file

@ -1,128 +0,0 @@
use anyhow::Result;
use package_management::svg_builder::{
Defs, Document, Element, Gradient, Group, RadialGradient, path,
};
use std::fs;
fn main() -> Result<()> {
let svg = build_wallpaper_svg();
fs::create_dir_all("assets")?;
fs::write("assets/nixette-wallpaper.svg", svg)?;
Ok(())
}
fn build_wallpaper_svg() -> String {
let defs = Defs::new()
.linear_gradient(
"sky",
Gradient::new("0", "0", "1", "1")
.stop("0%", &[("stop-color", "#0f172a")])
.stop("100%", &[("stop-color", "#1e1b4b")]),
)
.linear_gradient(
"wave1",
Gradient::new("0", "0", "1", "0")
.stop("0%", &[("stop-color", "#55CDFC"), ("stop-opacity", "0")])
.stop("50%", &[("stop-color", "#55CDFC"), ("stop-opacity", "0.5")])
.stop("100%", &[("stop-color", "#55CDFC"), ("stop-opacity", "0")]),
)
.linear_gradient(
"wave2",
Gradient::new("1", "0", "0", "0")
.stop("0%", &[("stop-color", "#F7A8B8"), ("stop-opacity", "0")])
.stop(
"50%",
&[("stop-color", "#F7A8B8"), ("stop-opacity", "0.55")],
)
.stop("100%", &[("stop-color", "#F7A8B8"), ("stop-opacity", "0")]),
)
.radial_gradient(
"halo",
RadialGradient::new("0.5", "0.5", "0.7")
.stop("0%", &[("stop-color", "#FDE68A"), ("stop-opacity", "0.8")])
.stop("100%", &[("stop-color", "#FDE68A"), ("stop-opacity", "0")]),
);
let text = Group::new()
.attr("transform", "translate(940 1320)")
.attr(
"font-family",
"'Fira Sans', 'Inter', 'Segoe UI', sans-serif",
)
.attr("font-size", "220")
.attr("font-weight", "700")
.attr("letter-spacing", "18")
.attr("fill", "#FFFFFF")
.attr("opacity", "0.95")
.child(Element::new("text").text("NIXETTE"));
let subtitle = Group::new()
.attr("transform", "translate(960 1500)")
.attr(
"font-family",
"'Fira Sans', 'Inter', 'Segoe UI', sans-serif",
)
.attr("font-size", "64")
.attr("fill", "#F7A8B8")
.attr("opacity", "0.9")
.child(Element::new("text").text("Declarative · Sourceful · Herself"));
Document::new(3840, 2160)
.view_box("0 0 3840 2160")
.role("img")
.aria_label("title", "desc")
.title("Nixette Wallpaper")
.desc("Gradient wallpaper combining trans flag waves with Nix and Gentoo motifs.")
.add_defs(defs)
.add_element(
Element::new("rect")
.attr("width", "3840")
.attr("height", "2160")
.attr("fill", "url(#sky)")
.empty(),
)
.add_element(
Element::new("rect")
.attr("x", "0")
.attr("y", "0")
.attr("width", "3840")
.attr("height", "2160")
.attr("fill", "url(#halo)")
.attr("opacity", "0.4")
.empty(),
)
.add_element(
Element::new("path")
.attr("d", "M0 1430 C640 1320 1280 1580 1860 1500 C2440 1420 3040 1660 3840 1500 L3840 2160 L0 2160 Z")
.attr("fill", "url(#wave1)")
.empty(),
)
.add_element(
Element::new("path")
.attr("d", "M0 1700 C500 1580 1200 1880 1900 1760 C2600 1640 3200 1920 3840 1800 L3840 2160 L0 2160 Z")
.attr("fill", "url(#wave2)")
.empty(),
)
.add_element(
Group::new()
.attr("opacity", "0.08")
.attr("fill", "none")
.attr("stroke", "#FFFFFF")
.attr("stroke-width", "24")
.child(path("M600 360 l220 -220 h360 l220 220 l-220 220 h-360 z"))
.child(path("M600 360 l220 -220"))
.child(path("M820 140 l220 220")),
)
.add_element(
Group::new()
.attr("opacity", "0.12")
.attr("fill", "none")
.attr("stroke", "#FFFFFF")
.attr("stroke-width", "22")
.attr("transform", "translate(2820 320) scale(0.9)")
.child(path("M0 0 C120 -40 220 40 220 160 C220 260 160 320 60 320")),
)
.add_element(text)
.add_element(subtitle)
.finish()
}

View file

@ -1,198 +0,0 @@
use std::fs;
fn main() -> anyhow::Result<()> {
let readme = Readme::build();
fs::write("README.md", readme)?;
Ok(())
}
struct MarkdownDoc {
buffer: String,
}
impl MarkdownDoc {
fn new() -> Self {
Self {
buffer: String::new(),
}
}
fn heading(mut self, level: u8, text: &str) -> Self {
self.buffer.push_str(&"#".repeat(level as usize));
self.buffer.push(' ');
self.buffer.push_str(text);
self.buffer.push_str("\n\n");
self
}
fn raw(mut self, text: &str) -> Self {
self.buffer.push_str(text);
self.buffer.push('\n');
self
}
fn paragraph(mut self, text: &str) -> Self {
self.buffer.push_str(text);
self.buffer.push_str("\n\n");
self
}
fn horizontal_rule(mut self) -> Self {
self.buffer.push_str("---\n\n");
self
}
fn bullet_list<I, S>(mut self, items: I) -> Self
where
I: IntoIterator<Item = S>,
S: AsRef<str>,
{
for item in items {
self.buffer.push_str("* ");
self.buffer.push_str(item.as_ref());
self.buffer.push('\n');
}
self.buffer.push('\n');
self
}
fn code_block(mut self, language: &str, code: &str) -> Self {
self.buffer.push_str("```");
self.buffer.push_str(language);
self.buffer.push('\n');
self.buffer.push_str(code.trim_matches('\n'));
self.buffer.push_str("\n```\n\n");
self
}
fn finish(self) -> String {
self.buffer
}
}
struct Readme;
impl Readme {
fn build() -> String {
let doc = MarkdownDoc::new()
.heading(1, "🧬 LPKG Lightweight Package Manager")
.raw("<p align=\"center\">\n <img src=\"assets/logo.svg\" alt=\"LPKG logo\" width=\"360\" />\n</p>\n")
.paragraph("LPKG is a minimalistic package manager written in Rust, designed for fast and simple software management on Unix-like systems. It emphasizes reproducibility and declarative configuration, leveraging **Nix Flakes** for development and deployment.")
.horizontal_rule()
.heading(2, "🚀 Features")
.bullet_list([
"**Fast & Lightweight** Minimal resource usage and quick operations.",
"**Rust-Powered** Safe and concurrent code with Rust.",
"**Cross-Platform** Works on Linux and macOS.",
"**Declarative Builds** Fully reproducible with Nix Flakes.",
"**Simple CLI** Intuitive commands for managing packages.",
])
.horizontal_rule()
.heading(2, "⚙️ Installation")
.heading(3, "Using Cargo")
.code_block("bash", "cargo install lpkg")
.heading(3, "Using Nix Flakes")
.paragraph("If you have Nix with flakes enabled:")
.code_block("bash", "nix profile install github:lesbiannix/lpkg")
.paragraph("Or to run without installing:")
.code_block("bash", "nix run github:lesbiannix/lpkg")
.horizontal_rule()
.heading(2, "🧰 Usage")
.paragraph("Basic command structure:")
.code_block("bash", "lpkg [command] [package]")
.paragraph("Common commands:")
.bullet_list([
"`install` Install a package",
"`remove` Remove a package",
"`update` Update the package list",
"`upgrade` Upgrade all installed packages",
])
.paragraph("For detailed usage:")
.code_block("bash", "lpkg --help")
.horizontal_rule()
.heading(2, "🔧 Development with Flakes")
.paragraph("Clone the repository:")
.code_block("bash", "git clone https://github.com/lesbiannix/lpkg.git\ncd lpkg")
.paragraph("Enter the flake development shell:")
.code_block("bash", "nix develop")
.paragraph("Build the project:")
.code_block("bash", "cargo build")
.paragraph("LPKG ships with tuned Cargo profiles:")
.bullet_list([
"**Dev builds** (`cargo build`) use `opt-level=0`, lots of codegen units, and incremental compilation for quick feedback while hacking.",
"**Release builds** (`cargo build --release`) enable `-O3`, fat LTO, and panic aborts for slim, fast binaries.",
"**GraphQL builds** add the server components when you need them:",
])
.code_block("bash", "cargo build --features graphql")
.paragraph("**PGO builds** are a two-step flow using the provided Cargo aliases:")
.code_block(
"bash",
r#"# 1) Instrument
RUSTFLAGS="-Cprofile-generate=target/pgo-data" cargo pgo-instrument
# run representative workloads to emit *.profraw files under target/pgo-data
llvm-profdata merge -o target/pgo-data/lpkg.profdata target/pgo-data/*.profraw
# 2) Optimise with the collected profile
RUSTFLAGS="-Cprofile-use=target/pgo-data/lpkg.profdata -Cllvm-args=-pgo-warn-missing-function" \
cargo pgo-build"#,
)
.paragraph("Regenerate project artefacts (README and SVG logo):")
.code_block("bash", "cargo run --bin readme_gen\ncargo run --bin logo_gen")
.paragraph("Run tests:")
.code_block("bash", "cargo test")
.paragraph("You can also run the project directly in the flake shell:")
.code_block("bash", "nix run")
.heading(2, "🕸️ GraphQL API")
.paragraph("LPKG now ships a lightweight GraphQL server powered by Actix Web and Juniper.")
.bullet_list([
"Start the server with `cargo run --features graphql --bin graphql_server` (set `LPKG_GRAPHQL_ADDR` to override `127.0.0.1:8080`).",
"Query endpoint: `http://127.0.0.1:8080/graphql`",
"Interactive playground: `http://127.0.0.1:8080/playground`",
])
.paragraph("Example query:")
.code_block("graphql", r"{
packages(limit: 5) {
name
version
enableLto
}
randomJoke {
package
text
}
}")
.heading(3, "AI metadata tooling")
.paragraph("The AI metadata store under `ai/metadata/` comes with a helper CLI to validate package records against the JSON schema and regenerate `index.json` after adding new entries:")
.code_block("bash", r"cargo run --bin metadata_indexer -- --base-dir . validate
cargo run --bin metadata_indexer -- --base-dir . index")
.paragraph("Use `--compact` with `index` if you prefer single-line JSON output.")
.paragraph("To draft metadata for a specific book page, you can run the harvest mode. It fetches the XHTML, scrapes the build commands, and emits a schema-compliant JSON skeleton (pass `--dry-run` to inspect the result without writing to disk):")
.code_block("bash", r"cargo run --bin metadata_indexer -- \
--base-dir . harvest \
--book mlfs \
--page chapter05/binutils-pass1 \
--dry-run")
.paragraph("Keep the jhalfs manifests current with:")
.code_block("bash", "cargo run --bin metadata_indexer -- --base-dir . refresh")
.paragraph("Passing `--books mlfs,blfs` restricts the refresh to specific books, and `--force` bypasses the local cache.")
.paragraph("To materialise a Rust module from harvested metadata:")
.code_block("bash", r"cargo run --bin metadata_indexer -- \
--base-dir . generate \
--metadata ai/metadata/packages/mlfs/binutils-pass-1.json \
--output target/generated/by_name")
.paragraph("Add `--overwrite` to regenerate an existing module directory.")
.heading(2, "📚 Documentation")
.bullet_list([
"[Architecture Overview](docs/ARCHITECTURE.md) high-level tour of the crate layout, binaries, and supporting modules.",
"[Metadata Harvesting Pipeline](docs/METADATA_PIPELINE.md) how the metadata indexer produces and validates the JSON records under `ai/metadata/`.",
"[Package Module Generation](docs/PACKAGE_GENERATION.md) end-to-end guide for converting harvested metadata into Rust modules under `src/pkgs/by_name/`.",
"Concept corner: [Nixette](concepts/nixette/README.md) a NixOS × Gentoo transfemme mash-up dreamed up for fun brand explorations.",
"`ai/notes.md` scratchpad for ongoing research tasks (e.g., deeper jhalfs integration).",
])
.horizontal_rule()
.heading(2, "📄 License")
.paragraph("LPKG is licensed under the [MIT License](LICENSE).");
doc.finish()
}
}

View file

@ -1,11 +1,9 @@
pub mod models; pub mod models;
pub mod schema; pub mod schema;
use std::cmp;
use std::env; use std::env;
use anyhow::{Context, Result}; use anyhow::{Context, Result};
use diesel::OptionalExtension;
use diesel::prelude::*; use diesel::prelude::*;
use diesel::r2d2::{self, ConnectionManager}; use diesel::r2d2::{self, ConnectionManager};
use diesel::sqlite::SqliteConnection; use diesel::sqlite::SqliteConnection;
@ -107,98 +105,3 @@ pub fn load_packages_via_pool(pool: &Pool) -> Result<Vec<Package>> {
let mut conn = pool.get().context("acquiring database connection")?; let mut conn = pool.get().context("acquiring database connection")?;
load_packages(&mut conn) load_packages(&mut conn)
} }
/// Load package definitions instead of raw Diesel models for convenience.
pub fn load_package_definitions(conn: &mut SqliteConnection) -> Result<Vec<PackageDefinition>> {
load_packages(conn)?
.into_iter()
.map(|record| record.into_definition())
.collect::<Result<Vec<_>>>()
}
/// Pool-backed helper mirroring [`load_package_definitions`].
pub fn load_package_definitions_via_pool(pool: &Pool) -> Result<Vec<PackageDefinition>> {
let mut conn = pool.get().context("acquiring database connection")?;
load_package_definitions(&mut conn)
}
/// Locate a package by name and optional version, returning the newest matching entry when
/// the version is not supplied.
pub fn find_package(
conn: &mut SqliteConnection,
name: &str,
version: Option<&str>,
) -> Result<Option<Package>> {
let mut query = packages_dsl::packages
.filter(packages_dsl::name.eq(name))
.into_boxed();
if let Some(version) = version {
query = query.filter(packages_dsl::version.eq(version));
}
query
.order(packages_dsl::version.desc())
.first::<Package>(conn)
.optional()
.context("querying package by name")
}
/// Convenience wrapper returning the package as a [`PackageDefinition`].
pub fn find_package_definition(
conn: &mut SqliteConnection,
name: &str,
version: Option<&str>,
) -> Result<Option<PackageDefinition>> {
Ok(find_package(conn, name, version)?
.map(|pkg| pkg.into_definition())
.transpose()?)
}
/// Pool-backed variant of [`find_package_definition`].
pub fn find_package_definition_via_pool(
pool: &Pool,
name: &str,
version: Option<&str>,
) -> Result<Option<PackageDefinition>> {
let mut conn = pool.get().context("acquiring database connection")?;
find_package_definition(&mut conn, name, version)
}
/// Locate packages using a basic substring match on the name, ordered deterministically and
/// optionally limited for responsiveness.
pub fn search_packages(
conn: &mut SqliteConnection,
term: &str,
limit: Option<i64>,
) -> Result<Vec<Package>> {
let trimmed = term.trim();
if trimmed.is_empty() {
return Ok(Vec::new());
}
let normalized: String = trimmed.chars().take(128).collect();
let sanitized = normalized.replace('%', "\\%").replace('_', "\\_");
let pattern = format!("%{}%", sanitized);
let mut query = packages_dsl::packages
.filter(packages_dsl::name.like(&pattern))
.order((packages_dsl::name, packages_dsl::version))
.into_boxed();
let effective_limit = limit.map(|value| cmp::max(1, value)).unwrap_or(50);
query = query.limit(cmp::min(effective_limit, 200));
query
.load::<Package>(conn)
.context("searching packages by name")
}
/// Pool-backed variant of [`search_packages`].
pub fn search_packages_via_pool(
pool: &Pool,
term: &str,
limit: Option<i64>,
) -> Result<Vec<Package>> {
let mut conn = pool.get().context("acquiring database connection")?;
search_packages(&mut conn, term, limit)
}

View file

@ -1,138 +0,0 @@
use std::sync::Arc;
use rand::rng;
use rand::seq::IteratorRandom;
use crate::db;
#[derive(Clone)]
pub struct GraphQLContext {
pub db_pool: db::Pool,
jokes: Arc<JokeCatalog>,
}
impl GraphQLContext {
pub fn new(db_pool: db::Pool) -> Self {
Self {
db_pool,
jokes: Arc::new(JokeCatalog::default()),
}
}
pub fn with_jokes(db_pool: db::Pool, jokes: Vec<Joke>) -> Self {
Self {
db_pool,
jokes: Arc::new(JokeCatalog::new(jokes)),
}
}
pub fn with_catalog(db_pool: db::Pool, catalog: Arc<JokeCatalog>) -> Self {
Self {
db_pool,
jokes: catalog,
}
}
pub fn joke_catalog(&self) -> Arc<JokeCatalog> {
Arc::clone(&self.jokes)
}
}
impl juniper::Context for GraphQLContext {}
#[derive(Clone, Debug)]
pub struct Joke {
pub id: String,
pub package: Option<String>,
pub text: String,
}
impl Joke {
pub fn new(id: impl Into<String>, package: Option<&str>, text: impl Into<String>) -> Self {
Self {
id: id.into(),
package: package.map(|pkg| pkg.to_string()),
text: text.into(),
}
}
}
#[derive(Clone)]
pub struct JokeCatalog {
entries: Arc<Vec<Joke>>,
}
impl JokeCatalog {
fn new(entries: Vec<Joke>) -> Self {
Self {
entries: Arc::new(entries),
}
}
pub fn random(&self, package: Option<&str>) -> Option<Joke> {
let mut rng = rng();
if let Some(package) = package {
if let Some(chosen) = self
.entries
.iter()
.filter(|joke| matches_package(joke, package))
.choose(&mut rng)
{
return Some(chosen.clone());
}
}
self.entries.iter().choose(&mut rng).cloned()
}
pub fn all(&self, package: Option<&str>) -> Vec<Joke> {
match package {
Some(package) => self
.entries
.iter()
.filter(|joke| matches_package(joke, package))
.cloned()
.collect(),
None => self.entries.as_ref().clone(),
}
}
}
impl Default for JokeCatalog {
fn default() -> Self {
Self::new(default_jokes())
}
}
fn matches_package(joke: &Joke, package: &str) -> bool {
joke.package
.as_deref()
.map(|pkg| pkg.eq_ignore_ascii_case(package))
.unwrap_or(false)
}
fn default_jokes() -> Vec<Joke> {
vec![
Joke::new(
"optimizer-overdrive",
Some("gcc"),
"The GCC optimizer walked into a bar, reordered everyones drinks, and they still tasted the same—just faster.",
),
Joke::new(
"linker-chuckle",
Some("binutils"),
"Our linker refuses to go on vacation; it cant handle unresolved references to the beach.",
),
Joke::new(
"glibc-giggle",
Some("glibc"),
"The C library tried stand-up comedy but segfaulted halfway through the punchline.",
),
Joke::new(
"pkg-general",
None,
"LPKG packages never get lost—they always follow the dependency graph back home.",
),
]
}

View file

@ -1,14 +0,0 @@
pub mod context;
pub mod schema;
pub use context::{GraphQLContext, Joke};
pub use schema::QueryRoot;
use juniper::{EmptyMutation, EmptySubscription, RootNode};
pub type Schema =
RootNode<QueryRoot, EmptyMutation<GraphQLContext>, EmptySubscription<GraphQLContext>>;
pub fn create_schema() -> Schema {
Schema::new(QueryRoot {}, EmptyMutation::new(), EmptySubscription::new())
}

View file

@ -1,133 +0,0 @@
use anyhow::{Error as AnyhowError, Result as AnyhowResult};
use juniper::{FieldResult, GraphQLObject, Value, graphql_object};
use crate::{db, pkgs::package::PackageDefinition};
use super::context::{GraphQLContext, Joke};
#[derive(Clone, GraphQLObject)]
#[graphql(description = "Package metadata exposed via the GraphQL API")]
pub struct PackageType {
pub name: String,
pub version: String,
pub source: Option<String>,
pub md5: Option<String>,
pub configure_args: Vec<String>,
pub build_commands: Vec<String>,
pub install_commands: Vec<String>,
pub dependencies: Vec<String>,
pub enable_lto: bool,
pub enable_pgo: bool,
pub cflags: Vec<String>,
pub ldflags: Vec<String>,
pub profdata: Option<String>,
}
impl From<PackageDefinition> for PackageType {
fn from(pkg: PackageDefinition) -> Self {
let optimizations = pkg.optimizations;
Self {
name: pkg.name,
version: pkg.version,
source: pkg.source,
md5: pkg.md5,
configure_args: pkg.configure_args,
build_commands: pkg.build_commands,
install_commands: pkg.install_commands,
dependencies: pkg.dependencies,
enable_lto: optimizations.enable_lto,
enable_pgo: optimizations.enable_pgo,
cflags: optimizations.cflags,
ldflags: optimizations.ldflags,
profdata: optimizations.profdata,
}
}
}
#[derive(Clone, GraphQLObject)]
#[graphql(description = "A light-hearted package-related joke")]
pub struct JokeType {
pub id: String,
pub package: Option<String>,
pub text: String,
}
impl From<Joke> for JokeType {
fn from(joke: Joke) -> Self {
Self {
id: joke.id,
package: joke.package,
text: joke.text,
}
}
}
#[derive(Default)]
pub struct QueryRoot;
#[graphql_object(context = GraphQLContext)]
impl QueryRoot {
fn packages(context: &GraphQLContext, limit: Option<i32>) -> FieldResult<Vec<PackageType>> {
let limit = limit.unwrap_or(50).clamp(1, 200) as usize;
let definitions =
db::load_package_definitions_via_pool(&context.db_pool).map_err(field_error)?;
Ok(definitions
.into_iter()
.take(limit)
.map(PackageType::from)
.collect())
}
fn package(
context: &GraphQLContext,
name: String,
version: Option<String>,
) -> FieldResult<Option<PackageType>> {
let definition =
db::find_package_definition_via_pool(&context.db_pool, &name, version.as_deref())
.map_err(field_error)?;
Ok(definition.map(PackageType::from))
}
fn search(
context: &GraphQLContext,
query: String,
limit: Option<i32>,
) -> FieldResult<Vec<PackageType>> {
let limit = limit.map(|value| i64::from(value.clamp(1, 200)));
let results =
db::search_packages_via_pool(&context.db_pool, &query, limit).map_err(field_error)?;
let packages = results
.into_iter()
.map(|pkg| pkg.into_definition().map(PackageType::from))
.collect::<AnyhowResult<Vec<_>>>()
.map_err(field_error)?;
Ok(packages)
}
fn jokes(context: &GraphQLContext, package: Option<String>) -> FieldResult<Vec<JokeType>> {
let catalog = context.joke_catalog();
Ok(catalog
.all(package.as_deref())
.into_iter()
.map(JokeType::from)
.collect())
}
fn random_joke(
context: &GraphQLContext,
package: Option<String>,
) -> FieldResult<Option<JokeType>> {
let catalog = context.joke_catalog();
Ok(catalog.random(package.as_deref()).map(JokeType::from))
}
}
fn field_error(err: AnyhowError) -> juniper::FieldError {
juniper::FieldError::new(err.to_string(), Value::null())
}

View file

@ -1,12 +1,7 @@
use anyhow::{Context, Result};
use scraper::{Html, Selector}; use scraper::{Html, Selector};
pub fn fetch_pre_blocks(url: &str) -> Result<Vec<String>> { pub fn fetch_pre_blocks(url: &str) -> anyhow::Result<Vec<String>> {
let body = ureq::get(url) let body = reqwest::blocking::get(url)?.text()?;
.call()
.with_context(|| format!("requesting {url}"))?
.into_string()
.with_context(|| format!("reading body from {url}"))?;
let document = Html::parse_document(&body); let document = Html::parse_document(&body);
let selector = Selector::parse("pre").unwrap(); let selector = Selector::parse("pre").unwrap();

View file

@ -1,5 +1,6 @@
use anyhow::{Context, Result}; use anyhow::{Context, Result};
use regex::Regex; use regex::Regex;
use reqwest::blocking::Client;
use scraper::{Html, Selector}; use scraper::{Html, Selector};
use super::{BookPackage, FetchOptions}; use super::{BookPackage, FetchOptions};
@ -9,10 +10,14 @@ pub fn fetch_book(options: &FetchOptions) -> Result<Vec<BookPackage>> {
let base = options.base_url.trim_end_matches('/'); let base = options.base_url.trim_end_matches('/');
let url = format!("{base}/book.html"); let url = format!("{base}/book.html");
let body = ureq::get(&url) let client = Client::builder().build().context("building HTTP client")?;
.call() let body = client
.with_context(|| format!("fetching {url}"))? .get(&url)
.into_string() .send()
.with_context(|| format!("fetching {}", url))?
.error_for_status()
.with_context(|| format!("request failed for {}", url))?
.text()
.context("reading response body")?; .context("reading response body")?;
parse_book_html(options, &url, &body) parse_book_html(options, &url, &body)

View file

@ -1,5 +1,6 @@
use anyhow::{Context, Result}; use anyhow::{Context, Result};
use regex::Regex; use regex::Regex;
use reqwest::blocking::Client;
use scraper::{Html, Selector}; use scraper::{Html, Selector};
use super::{BookPackage, FetchOptions}; use super::{BookPackage, FetchOptions};
@ -9,10 +10,14 @@ pub fn fetch_book(options: &FetchOptions) -> Result<Vec<BookPackage>> {
let base = options.base_url.trim_end_matches('/'); let base = options.base_url.trim_end_matches('/');
let url = format!("{base}/book.html"); let url = format!("{base}/book.html");
let body = ureq::get(&url) let client = Client::builder().build().context("building HTTP client")?;
.call() let body = client
.with_context(|| format!("fetching {url}"))? .get(&url)
.into_string() .send()
.with_context(|| format!("fetching {}", url))?
.error_for_status()
.with_context(|| format!("request failed for {}", url))?
.text()
.context("reading response body")?; .context("reading response body")?;
parse_book_html(options, &url, &body) parse_book_html(options, &url, &body)

View file

@ -1,5 +1,6 @@
use anyhow::{Context, Result}; use anyhow::{Context, Result};
use regex::Regex; use regex::Regex;
use reqwest::blocking::Client;
use scraper::{Html, Selector}; use scraper::{Html, Selector};
use super::{BookPackage, FetchOptions}; use super::{BookPackage, FetchOptions};
@ -8,10 +9,14 @@ pub fn fetch_book(options: &FetchOptions) -> Result<Vec<BookPackage>> {
let base = options.base_url.trim_end_matches('/'); let base = options.base_url.trim_end_matches('/');
let url = format!("{base}/book.html"); let url = format!("{base}/book.html");
let body = ureq::get(&url) let client = Client::builder().build().context("building HTTP client")?;
.call() let body = client
.with_context(|| format!("fetching {url}"))? .get(&url)
.into_string() .send()
.with_context(|| format!("fetching {}", url))?
.error_for_status()
.with_context(|| format!("request failed for {}", url))?
.text()
.context("reading response body")?; .context("reading response body")?;
parse_book_html(options, &url, &body) parse_book_html(options, &url, &body)

View file

@ -1,13 +1,10 @@
pub mod ai; pub mod ai;
pub mod db; pub mod db;
#[cfg(feature = "graphql")]
pub mod graphql;
pub mod html; pub mod html;
pub mod ingest; pub mod ingest;
pub mod md5_utils; pub mod md5_utils;
pub mod mirrors; pub mod mirrors;
pub mod pkgs; pub mod pkgs;
pub mod svg_builder;
pub mod version_check; pub mod version_check;
pub mod wget_list; pub mod wget_list;

View file

@ -7,7 +7,7 @@ use package_management::{
db, html, md5_utils, db, html, md5_utils,
pkgs::{ pkgs::{
by_name::bi::binutils::cross_toolchain::build_binutils_from_page, by_name::bi::binutils::cross_toolchain::build_binutils_from_page,
generator, mlfs, mlfs,
scaffolder::{self, ScaffoldRequest}, scaffolder::{self, ScaffoldRequest},
}, },
version_check, wget_list, version_check, wget_list,
@ -134,6 +134,8 @@ enum TuiCommand {
} }
fn main() -> Result<()> { fn main() -> Result<()> {
let _ = tracing_subscriber::fmt::try_init();
let cli = Cli::parse(); let cli = Cli::parse();
match cli.command { match cli.command {
@ -217,8 +219,10 @@ fn run_workflow(cmd: WorkflowCommand) -> Result<()> {
lfs_root, lfs_root,
target, target,
} => { } => {
build_binutils_from_page(&url, &lfs_root, target) let runtime = tokio::runtime::Runtime::new().context("Creating async runtime")?;
.with_context(|| format!("Building Binutils using instructions from {url}"))?; runtime
.block_on(build_binutils_from_page(&url, &lfs_root, target))
.map_err(|e| anyhow!("Building Binutils using instructions from {url}: {e}"))?;
println!("Binutils workflow completed successfully"); println!("Binutils workflow completed successfully");
} }
@ -302,14 +306,6 @@ fn run_workflow(cmd: WorkflowCommand) -> Result<()> {
let mut created = 0usize; let mut created = 0usize;
let mut skipped = Vec::new(); let mut skipped = Vec::new();
let metadata_entries = match mlfs::load_metadata_index() {
Ok(entries) => Some(entries),
Err(err) => {
eprintln!("[mlfs] metadata index error: {err}");
None
}
};
let pool = if dry_run { let pool = if dry_run {
None None
} else { } else {
@ -317,6 +313,11 @@ fn run_workflow(cmd: WorkflowCommand) -> Result<()> {
}; };
for record in records { for record in records {
let module_alias = record.module_alias();
if !seen.insert(module_alias.clone()) {
continue;
}
if let Some(limit) = limit { if let Some(limit) = limit {
if processed >= limit { if processed >= limit {
break; break;
@ -324,85 +325,6 @@ fn run_workflow(cmd: WorkflowCommand) -> Result<()> {
} }
processed += 1; processed += 1;
let metadata_entry = metadata_entries
.as_ref()
.and_then(|entries| mlfs::match_metadata(&record, entries));
let mut request = if let Some(entry) = metadata_entry {
let path = PathBuf::from("ai/metadata").join(&entry.path);
match generator::request_from_metadata(&path) {
Ok(req) => req,
Err(err) => {
eprintln!(
"[mlfs] metadata apply error for {} {}: {}",
record.name, record.version, err
);
ScaffoldRequest {
name: record.name.clone(),
version: record.version.clone(),
source: None,
md5: None,
configure_args: Vec::new(),
build_commands: Vec::new(),
install_commands: Vec::new(),
dependencies: Vec::new(),
enable_lto: true,
enable_pgo: true,
cflags: Vec::new(),
ldflags: Vec::new(),
profdata: None,
stage: record.stage.clone(),
variant: record.variant.clone(),
notes: record.notes.clone(),
module_override: None,
}
}
}
} else {
ScaffoldRequest {
name: record.name.clone(),
version: record.version.clone(),
source: None,
md5: None,
configure_args: Vec::new(),
build_commands: Vec::new(),
install_commands: Vec::new(),
dependencies: Vec::new(),
enable_lto: true,
enable_pgo: true,
cflags: Vec::new(),
ldflags: Vec::new(),
profdata: None,
stage: record.stage.clone(),
variant: record.variant.clone(),
notes: record.notes.clone(),
module_override: None,
}
};
if request.stage.is_none() {
request.stage = record.stage.clone();
}
if request.variant.is_none() {
request.variant = record.variant.clone();
}
if request.notes.is_none() {
request.notes = record.notes.clone();
}
let module_alias = request
.module_override
.clone()
.unwrap_or_else(|| record.module_alias());
if !seen.insert(module_alias.clone()) {
continue;
}
if request.module_override.is_none() {
request.module_override = Some(module_alias.clone());
}
if dry_run { if dry_run {
println!( println!(
"Would scaffold {:<18} {:<12} -> {}", "Would scaffold {:<18} {:<12} -> {}",
@ -411,6 +333,26 @@ fn run_workflow(cmd: WorkflowCommand) -> Result<()> {
continue; continue;
} }
let request = ScaffoldRequest {
name: record.name.clone(),
version: record.version.clone(),
source: None,
md5: None,
configure_args: Vec::new(),
build_commands: Vec::new(),
install_commands: Vec::new(),
dependencies: Vec::new(),
enable_lto: true,
enable_pgo: true,
cflags: Vec::new(),
ldflags: Vec::new(),
profdata: None,
stage: record.stage.clone(),
variant: record.variant.clone(),
notes: record.notes.clone(),
module_override: Some(module_alias.clone()),
};
match scaffolder::scaffold_package(&base_dir, request) { match scaffolder::scaffold_package(&base_dir, request) {
Ok(result) => { Ok(result) => {
if let Some(pool) = &pool { if let Some(pool) = &pool {

View file

@ -1,15 +1,16 @@
use anyhow::{Context, Result}; use anyhow::Result;
use reqwest::blocking::Client;
use reqwest::redirect::Policy;
pub fn get_md5sums() -> Result<String> { pub fn get_md5sums() -> Result<String> {
let agent = ureq::AgentBuilder::new().redirects(5).build(); let client = Client::builder().redirect(Policy::limited(5)).build()?;
let url = "https://www.linuxfromscratch.org/~thomas/multilib-m32/md5sums"; let res = client
.get("https://www.linuxfromscratch.org/~thomas/multilib-m32/md5sums")
.send()?;
let response = agent.get(url).call().map_err(|err| match err { if !res.status().is_success() {
ureq::Error::Status(code, _) => anyhow::anyhow!("Failed to fetch MD5sums: HTTP {code}"), anyhow::bail!("Failed to fetch MD5sums: HTTP {}", res.status());
other => anyhow::anyhow!("Failed to fetch MD5sums: {other}"), }
})?;
response Ok(res.text()?)
.into_string()
.with_context(|| format!("reading body from {url}"))
} }

View file

@ -1,11 +1,14 @@
use console::Style; use console::Style;
use reqwest::blocking::Client;
use scraper::{Html, Selector}; use scraper::{Html, Selector};
use std::io::{self, Write}; use std::io::{self, Write};
pub fn fetch_mirrors() -> Result<Vec<String>, Box<dyn std::error::Error>> { pub fn fetch_mirrors() -> Result<Vec<String>, Box<dyn std::error::Error>> {
let res = ureq::get("https://www.linuxfromscratch.org/lfs/mirrors.html#files") let client = Client::new();
.call()? let res = client
.into_string()?; .get("https://www.linuxfromscratch.org/lfs/mirrors.html#files")
.send()?
.text()?;
let document = Html::parse_document(&res); let document = Html::parse_document(&res);
let selector = Selector::parse("a[href^='http']").unwrap(); let selector = Selector::parse("a[href^='http']").unwrap();

View file

@ -1,13 +1,12 @@
// Cross-toolchain runner that uses parser.rs info (no hardcoding). // async cross-toolchain runner that uses parser.rs info (no hardcoding)
use crate::pkgs::by_name::bi::binutils::parser::{BinutilsInfo, fetch_page, parse_binutils}; use crate::pkgs::by_name::bi::binutils::parser::{BinutilsInfo, fetch_page, parse_binutils};
use anyhow::{Context, Result, anyhow}; use reqwest::Client;
use shell_words;
use std::{ use std::{
fs::{self, File}, error::Error,
io,
path::{Path, PathBuf}, path::{Path, PathBuf},
process::Command,
}; };
use tokio::process::Command;
use tracing::{info, warn};
/// Configuration object - uses environment if values omitted. /// Configuration object - uses environment if values omitted.
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
@ -23,7 +22,14 @@ impl BinutilsConfig {
let lfs_root = lfs_root.as_ref().to_path_buf(); let lfs_root = lfs_root.as_ref().to_path_buf();
let target = target let target = target
.or_else(|| std::env::var("LFS_TGT").ok()) .or_else(|| std::env::var("LFS_TGT").ok())
.unwrap_or_else(|| "x86_64-lfs-linux-gnu".to_string()); .unwrap_or_else(|| {
// fallback best-effort
if cfg!(target_os = "linux") {
"x86_64-lfs-linux-gnu".to_string()
} else {
"x86_64-lfs-linux-gnu".to_string()
}
});
Self { Self {
lfs_root, lfs_root,
@ -59,50 +65,122 @@ impl BinutilsConfig {
} }
} }
/// High-level orchestration. /// High-level orchestration. Async.
pub fn build_binutils_from_page( pub async fn build_binutils_from_page(
page_url: &str, page_url: &str,
lfs_root: impl AsRef<Path>, lfs_root: impl AsRef<std::path::Path>,
target: Option<String>, target: Option<String>,
) -> Result<()> { ) -> Result<(), Box<dyn Error>> {
println!("Fetching page: {page_url}"); // 1) fetch page
let html = fetch_page(page_url).context("fetching binutils instructions")?; info!("Fetching page: {}", page_url);
let info = parse_binutils(&html).context("parsing binutils instructions")?; let html = fetch_page(page_url).await?;
println!("Parsed info: {:?}", info); let info = parse_binutils(&html)?;
info!("Parsed info: {:?}", info);
// 2) build config
let cfg = BinutilsConfig::new(lfs_root, target, info.clone()); let cfg = BinutilsConfig::new(lfs_root, target, info.clone());
// 3) ensure source base dir exists
let src_base = cfg.source_base_dir(); let src_base = cfg.source_base_dir();
if !src_base.exists() { if !src_base.exists() {
println!("Creating source base dir: {:?}", src_base); info!("Creating source base dir: {:?}", src_base);
fs::create_dir_all(&src_base) tokio::fs::create_dir_all(&src_base).await?;
.with_context(|| format!("creating source base dir {:?}", src_base))?;
} }
let mut source_dir = locate_binutils_dir(&src_base)?; // 4) find extracted source directory (binutils-*)
let mut source_dir: Option<PathBuf> = None;
if let Ok(mut rd) = tokio::fs::read_dir(&src_base).await {
while let Some(entry) = rd.next_entry().await? {
let ft = entry.file_type().await?;
if ft.is_dir() {
let name = entry.file_name().to_string_lossy().to_string();
if name.to_lowercase().contains("binutils") {
source_dir = Some(entry.path());
break;
}
}
}
}
// 5) if not found, attempt to download & extract
if source_dir.is_none() { if source_dir.is_none() {
source_dir = download_and_extract(&cfg, &src_base)?; if let Some(dl) = &cfg.info.download_url {
info!("No extracted source found; will download {}", dl);
// download file into src_base
let client = Client::new();
let resp = client.get(dl).send().await?;
if !resp.status().is_success() {
return Err(format!("Download failed: {}", resp.status()).into());
} }
let source_dir = source_dir // pick a filename from URL
.ok_or_else(|| anyhow!("Could not locate or download/extract Binutils source"))?; let url_path = url::Url::parse(dl)?;
println!("Using source dir: {:?}", source_dir); let filename = url_path
.path_segments()
.and_then(|seg| seg.last())
.and_then(|s| {
if !s.is_empty() {
Some(s.to_string())
} else {
None
}
})
.ok_or("Cannot determine filename from URL")?;
let outpath = src_base.join(&filename);
info!("Saving archive to {:?}", outpath);
let bytes = resp.bytes().await?;
tokio::fs::write(&outpath, &bytes).await?;
// extract using tar (async spawn). Use absolute path to src_base
info!("Extracting archive {:?}", outpath);
let tar_path = outpath.clone();
let mut tar_cmd = Command::new("tar");
tar_cmd.arg("-xf").arg(&tar_path).arg("-C").arg(&src_base);
let status = tar_cmd.status().await?;
if !status.success() {
return Err("tar extraction failed".into());
}
// look for extracted dir again
if let Ok(mut rd) = tokio::fs::read_dir(&src_base).await {
while let Some(entry) = rd.next_entry().await? {
let ft = entry.file_type().await?;
if ft.is_dir() {
let name = entry.file_name().to_string_lossy().to_string();
if name.to_lowercase().contains("binutils") {
source_dir = Some(entry.path());
break;
}
}
}
}
} else {
warn!("No download URL found on the page and no unpacked source present.");
}
}
let source_dir = match source_dir {
Some(p) => p,
None => return Err("Could not locate or download/extract Binutils source".into()),
};
info!("Using source dir: {:?}", source_dir);
// 6) prepare build dir
let build_dir = cfg.build_dir(); let build_dir = cfg.build_dir();
if !build_dir.exists() { if !build_dir.exists() {
println!("Creating build dir {:?}", build_dir); info!("Creating build dir {:?}", build_dir);
fs::create_dir_all(&build_dir) tokio::fs::create_dir_all(&build_dir).await?;
.with_context(|| format!("creating build dir {:?}", build_dir))?;
} }
// 7) run configure: use absolute configure script path in source_dir
let configure_path = source_dir.join("configure"); let configure_path = source_dir.join("configure");
if !configure_path.exists() { if !configure_path.exists() {
return Err(anyhow!( return Err(format!("configure script not found at {:?}", configure_path).into());
"configure script not found at {:?}",
configure_path
));
} }
// If parser produced configure args tokens, use them; otherwise fallback to common flags
let args = if !cfg.info.configure_args.is_empty() { let args = if !cfg.info.configure_args.is_empty() {
cfg.info.configure_args.clone() cfg.info.configure_args.clone()
} else { } else {
@ -115,6 +193,7 @@ pub fn build_binutils_from_page(
] ]
}; };
// replace $LFS and $LFS_TGT in args
let args: Vec<String> = args let args: Vec<String> = args
.into_iter() .into_iter()
.map(|a| { .map(|a| {
@ -123,126 +202,83 @@ pub fn build_binutils_from_page(
}) })
.collect(); .collect();
println!("Configuring with args: {:?}", args); info!("Configuring with args: {:?}", args);
let mut configure_cmd = Command::new(&configure_path);
configure_cmd.current_dir(&build_dir);
configure_cmd.args(&args);
run_command(&mut configure_cmd).context("configure step failed")?;
println!("configure completed");
// spawn configure
let mut conf_cmd = Command::new(&configure_path);
conf_cmd.current_dir(&build_dir);
for a in &args {
conf_cmd.arg(a);
}
conf_cmd.stdout(std::process::Stdio::inherit());
conf_cmd.stderr(std::process::Stdio::inherit());
let status = conf_cmd.status().await?;
if !status.success() {
return Err("configure step failed".into());
}
info!("configure completed");
// 8) run build commands (make-like)
if !cfg.info.build_cmds.is_empty() { if !cfg.info.build_cmds.is_empty() {
for raw in &cfg.info.build_cmds { for b in &cfg.info.build_cmds {
run_shell_command(raw, &build_dir) // split into program + args
.with_context(|| format!("build step failed: {raw}"))?; let mut parts = shell_words::split(b).unwrap_or_else(|_| vec![b.clone()]);
}
} else {
let mut make_cmd = Command::new("make");
make_cmd.current_dir(&build_dir);
run_command(&mut make_cmd).context("make failed")?;
}
println!("build completed");
if !cfg.info.install_cmds.is_empty() {
for raw in &cfg.info.install_cmds {
run_shell_command(raw, &build_dir)
.with_context(|| format!("install step failed: {raw}"))?;
}
} else {
let mut install_cmd = Command::new("make");
install_cmd.arg("install");
install_cmd.current_dir(&build_dir);
run_command(&mut install_cmd).context("make install failed")?;
}
println!("install completed");
Ok(())
}
fn locate_binutils_dir(base: &Path) -> Result<Option<PathBuf>> {
if !base.exists() {
return Ok(None);
}
for entry in fs::read_dir(base).with_context(|| format!("reading directory {:?}", base))? {
let entry = entry?;
if entry.file_type()?.is_dir() {
let name = entry.file_name().to_string_lossy().to_lowercase();
if name.contains("binutils") {
return Ok(Some(entry.path()));
}
}
}
Ok(None)
}
fn download_and_extract(cfg: &BinutilsConfig, base: &Path) -> Result<Option<PathBuf>> {
let url = match cfg.info.download_url.as_deref() {
Some(url) => url,
None => {
eprintln!("No download URL found on the page and no unpacked source present.");
return Ok(None);
}
};
println!("Downloading {url}");
let response = ureq::get(url).call().map_err(|err| match err {
ureq::Error::Status(code, _) => anyhow!("Download failed: HTTP {code}"),
other => anyhow!("Download failed: {other}"),
})?;
let final_url = response.get_url().to_string();
let parsed = url::Url::parse(&final_url)
.with_context(|| format!("parsing final download URL {final_url}"))?;
let filename = parsed
.path_segments()
.and_then(|segments| segments.last())
.filter(|s| !s.is_empty())
.map(|s| s.to_string())
.ok_or_else(|| anyhow!("Cannot determine filename from URL"))?;
let outpath = base.join(&filename);
let mut reader = response.into_reader();
let mut file =
File::create(&outpath).with_context(|| format!("creating archive file {:?}", outpath))?;
io::copy(&mut reader, &mut file)
.with_context(|| format!("writing archive to {:?}", outpath))?;
println!("Extracting archive {:?}", outpath);
let status = Command::new("tar")
.arg("-xf")
.arg(&outpath)
.arg("-C")
.arg(base)
.status()
.with_context(|| "spawning tar".to_string())?;
if !status.success() {
return Err(anyhow!("tar extraction failed"));
}
locate_binutils_dir(base)
}
fn run_command(cmd: &mut Command) -> Result<()> {
cmd.stdout(std::process::Stdio::inherit());
cmd.stderr(std::process::Stdio::inherit());
let status = cmd
.status()
.with_context(|| "spawning process".to_string())?;
if !status.success() {
return Err(anyhow!("command exited with status {status}"));
}
Ok(())
}
fn run_shell_command(raw: &str, cwd: &Path) -> Result<()> {
let mut parts = shell_words::split(raw).unwrap_or_else(|_| vec![raw.to_string()]);
if parts.is_empty() {
return Ok(());
}
let prog = parts.remove(0); let prog = parts.remove(0);
let mut cmd = Command::new(prog); let mut cmd = Command::new(prog);
if !parts.is_empty() { if !parts.is_empty() {
cmd.args(parts); cmd.args(parts);
} }
cmd.current_dir(cwd); cmd.current_dir(&build_dir);
run_command(&mut cmd) cmd.stdout(std::process::Stdio::inherit());
cmd.stderr(std::process::Stdio::inherit());
let status = cmd.status().await?;
if !status.success() {
return Err(format!("build step failed: {:?}", b).into());
}
}
} else {
// fallback to running `make`
let mut m = Command::new("make");
m.current_dir(&build_dir);
m.stdout(std::process::Stdio::inherit());
m.stderr(std::process::Stdio::inherit());
let status = m.status().await?;
if !status.success() {
return Err("make failed".into());
}
}
info!("build completed");
// 9) run install commands (make install)
if !cfg.info.install_cmds.is_empty() {
for inst in &cfg.info.install_cmds {
let mut parts = shell_words::split(inst).unwrap_or_else(|_| vec![inst.clone()]);
let prog = parts.remove(0);
let mut cmd = Command::new(prog);
if !parts.is_empty() {
cmd.args(parts);
}
cmd.current_dir(&build_dir);
cmd.stdout(std::process::Stdio::inherit());
cmd.stderr(std::process::Stdio::inherit());
let status = cmd.status().await?;
if !status.success() {
return Err(format!("install step failed: {:?}", inst).into());
}
}
} else {
// fallback `make install`
let mut mi = Command::new("make");
mi.arg("install");
mi.current_dir(&build_dir);
mi.stdout(std::process::Stdio::inherit());
mi.stderr(std::process::Stdio::inherit());
let status = mi.status().await?;
if !status.success() {
return Err("make install failed".into());
}
}
info!("install completed. Binutils Pass 1 done.");
Ok(())
} }

View file

@ -1,6 +1,7 @@
// Parser for Binutils Pass 1 page using lightweight HTTP fetching. // async parser for Binutils Pass 1 page
use anyhow::{Context, Result}; use reqwest::Client;
use scraper::{Html, Selector}; use scraper::{Html, Selector};
use std::error::Error;
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct BinutilsInfo { pub struct BinutilsInfo {
@ -33,16 +34,16 @@ impl Default for BinutilsInfo {
} }
} }
/// Fetch page content synchronously /// Fetch page content (async)
pub fn fetch_page(url: &str) -> Result<String> { pub async fn fetch_page(url: &str) -> Result<String, Box<dyn Error>> {
ureq::get(url) let client = Client::new();
.call() let res = client.get(url).send().await?;
.map_err(|err| match err { let status = res.status();
ureq::Error::Status(code, _) => anyhow::anyhow!("Failed to fetch {url}: HTTP {code}"), if !status.is_success() {
other => anyhow::anyhow!("Failed to fetch {url}: {other}"), return Err(format!("Failed to fetch {}: {}", url, status).into());
})? }
.into_string() let text = res.text().await?;
.with_context(|| format!("reading body from {url}")) Ok(text)
} }
/// Parse the LFS Binutils pass1 page; robust to small formatting changes. /// Parse the LFS Binutils pass1 page; robust to small formatting changes.
@ -50,7 +51,7 @@ pub fn fetch_page(url: &str) -> Result<String> {
/// - finds a download URL ending with .tar.xz/.tar.gz /// - finds a download URL ending with .tar.xz/.tar.gz
/// - finds configure pre block(s), builds token list /// - finds configure pre block(s), builds token list
/// - finds `make` / `make install` pre blocks /// - finds `make` / `make install` pre blocks
pub fn parse_binutils(html: &str) -> Result<BinutilsInfo> { pub fn parse_binutils(html: &str) -> Result<BinutilsInfo, Box<dyn Error>> {
let document = Html::parse_document(html); let document = Html::parse_document(html);
let mut info = BinutilsInfo::default(); let mut info = BinutilsInfo::default();

View file

@ -34,7 +34,6 @@ struct HarvestedMetadata {
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
struct HarvestedSource { struct HarvestedSource {
#[serde(default)] #[serde(default)]
#[allow(dead_code)]
archive: Option<String>, archive: Option<String>,
#[serde(default)] #[serde(default)]
urls: Vec<HarvestedUrl>, urls: Vec<HarvestedUrl>,
@ -68,18 +67,14 @@ struct HarvestedOptimisations {
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
struct CommandPhase { struct CommandPhase {
#[serde(default)] #[serde(default)]
#[allow(dead_code)]
phase: Option<String>, phase: Option<String>,
#[serde(default)] #[serde(default)]
commands: Vec<String>, commands: Vec<String>,
#[serde(default)] #[serde(default)]
#[allow(dead_code)]
cwd: Option<String>, cwd: Option<String>,
#[serde(default)] #[serde(default)]
#[allow(dead_code)]
requires_root: Option<bool>, requires_root: Option<bool>,
#[serde(default)] #[serde(default)]
#[allow(dead_code)]
notes: Option<String>, notes: Option<String>,
} }
@ -102,12 +97,6 @@ pub fn generate_module(
Ok(result.module_path) Ok(result.module_path)
} }
/// Build a scaffolding request directly from a metadata JSON file.
pub fn request_from_metadata(path: impl AsRef<Path>) -> Result<ScaffoldRequest> {
let harvested = parse_metadata(path.as_ref())?;
build_request(&harvested)
}
/// Compute the directory for a module derived from the given metadata. /// Compute the directory for a module derived from the given metadata.
pub fn module_directory( pub fn module_directory(
metadata_path: impl AsRef<Path>, metadata_path: impl AsRef<Path>,

View file

@ -1,4 +1,4 @@
use std::{borrow::Cow, fs, path::PathBuf}; use std::borrow::Cow;
use anyhow::{Context, Result, anyhow}; use anyhow::{Context, Result, anyhow};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -83,30 +83,6 @@ impl MlfsPackageRecord {
} }
} }
#[derive(Debug, Deserialize)]
struct MetadataIndex {
packages: Vec<MetadataPackage>,
}
#[derive(Debug, Clone, Deserialize)]
struct MetadataPackage {
book: String,
id: String,
name: String,
path: String,
stage: Option<String>,
variant: Option<String>,
}
#[derive(Debug, Clone)]
pub struct MlfsMetadataEntry {
pub id: String,
pub name: String,
pub stage: Option<String>,
pub variant: Option<String>,
pub path: PathBuf,
}
pub fn fetch_catalog(base_url: &str) -> Result<Vec<MlfsPackageRecord>> { pub fn fetch_catalog(base_url: &str) -> Result<Vec<MlfsPackageRecord>> {
let options = FetchOptions::new(base_url, BookKind::Mlfs); let options = FetchOptions::new(base_url, BookKind::Mlfs);
let packages = lfs::fetch_book(&options)?; let packages = lfs::fetch_book(&options)?;
@ -133,75 +109,8 @@ pub fn load_or_fetch_catalog(base_url: Option<&str>) -> Result<Vec<MlfsPackageRe
match fetch_catalog(base) { match fetch_catalog(base) {
Ok(records) => Ok(records), Ok(records) => Ok(records),
Err(err) => { Err(err) => {
eprintln!("[mlfs] fetch error ({err}); falling back to cached MLFS package list"); tracing::warn!("mlfs_fetch_error" = %err, "Falling back to cached MLFS package list");
load_cached_catalog() load_cached_catalog()
} }
} }
} }
pub fn load_metadata_index() -> Result<Vec<MlfsMetadataEntry>> {
let raw = fs::read_to_string("ai/metadata/index.json").context("reading AI metadata index")?;
let index: MetadataIndex =
serde_json::from_str(&raw).context("parsing AI metadata index JSON")?;
let entries = index
.packages
.into_iter()
.filter(|pkg| pkg.book.eq_ignore_ascii_case("mlfs"))
.map(|pkg| MlfsMetadataEntry {
id: pkg.id,
name: pkg.name,
stage: pkg.stage,
variant: pkg.variant,
path: PathBuf::from(pkg.path),
})
.collect();
Ok(entries)
}
pub fn match_metadata<'a>(
record: &MlfsPackageRecord,
entries: &'a [MlfsMetadataEntry],
) -> Option<&'a MlfsMetadataEntry> {
let target_name = normalize(&record.name);
let target_variant = normalize_opt(record.variant.as_deref());
let target_stage = normalize_opt(record.stage.as_deref());
entries
.iter()
.filter(|entry| normalize(&entry.name) == target_name)
.max_by_key(|entry| {
let mut score = 0;
if let (Some(tv), Some(ev)) = (&target_variant, normalize_opt(entry.variant.as_deref()))
{
if *tv == ev {
score += 4;
}
}
if let (Some(ts), Some(es)) = (&target_stage, normalize_opt(entry.stage.as_deref())) {
if *ts == es {
score += 2;
}
}
if target_variant.is_none() && entry.variant.is_none() {
score += 1;
}
if target_stage.is_none() && entry.stage.is_none() {
score += 1;
}
score
})
}
fn normalize(value: &str) -> String {
value
.chars()
.filter(|c| c.is_ascii_alphanumeric())
.collect::<String>()
.to_ascii_lowercase()
}
fn normalize_opt(value: Option<&str>) -> Option<String> {
value.map(normalize)
}

View file

@ -1,375 +0,0 @@
#[derive(Default)]
pub struct Document {
width: u32,
height: u32,
view_box: Option<String>,
role: Option<String>,
aria_label: Option<(String, String)>,
title: Option<String>,
desc: Option<String>,
defs: Vec<String>,
elements: Vec<String>,
}
impl Document {
pub fn new(width: u32, height: u32) -> Self {
Self {
width,
height,
..Default::default()
}
}
pub fn view_box(mut self, value: &str) -> Self {
self.view_box = Some(value.to_string());
self
}
pub fn role(mut self, value: &str) -> Self {
self.role = Some(value.to_string());
self
}
pub fn aria_label(mut self, title_id: &str, desc_id: &str) -> Self {
self.aria_label = Some((title_id.to_string(), desc_id.to_string()));
self
}
pub fn title(mut self, value: &str) -> Self {
self.title = Some(value.to_string());
self
}
pub fn desc(mut self, value: &str) -> Self {
self.desc = Some(value.to_string());
self
}
pub fn add_defs(mut self, defs: Defs) -> Self {
self.defs.push(defs.finish());
self
}
pub fn add_element(mut self, element: impl Into<String>) -> Self {
self.elements.push(element.into());
self
}
pub fn finish(self) -> String {
let Document {
width,
height,
view_box,
role,
aria_label,
title,
desc,
defs,
elements,
} = self;
let mut out = String::new();
out.push_str(&format!(
"<svg xmlns=\"http://www.w3.org/2000/svg\" width=\"{}\" height=\"{}\"",
width, height
));
if let Some(view_box) = view_box {
out.push_str(&format!(" viewBox=\"{}\"", view_box));
}
if let Some(role) = role {
out.push_str(&format!(" role=\"{}\"", role));
}
let (title_id, desc_id) = aria_label
.as_ref()
.map(|ids| (ids.0.as_str(), ids.1.as_str()))
.unwrap_or(("title", "desc"));
if aria_label.is_some() {
out.push_str(&format!(" aria-labelledby=\"{} {}\"", title_id, desc_id));
}
out.push_str(">");
out.push('\n');
if let Some(title) = title {
out.push_str(&format!(" <title id=\"{}\">{}</title>\n", title_id, title));
}
if let Some(desc) = desc {
out.push_str(&format!(" <desc id=\"{}\">{}</desc>\n", desc_id, desc));
}
if !defs.is_empty() {
out.push_str(" <defs>\n");
for block in &defs {
out.push_str(block);
}
out.push_str(" </defs>\n");
}
for element in &elements {
out.push_str(element);
out.push('\n');
}
out.push_str("</svg>\n");
out
}
}
pub struct Defs {
content: Vec<String>,
}
impl Defs {
pub fn new() -> Self {
Self {
content: Vec::new(),
}
}
pub fn linear_gradient(mut self, id: &str, gradient: Gradient) -> Self {
self.content
.push(format!(" {}\n", gradient.render_linear(id)));
self
}
pub fn radial_gradient(mut self, id: &str, gradient: RadialGradient) -> Self {
self.content.push(format!(" {}\n", gradient.render(id)));
self
}
pub fn filter(mut self, id: &str, filter: Filter) -> Self {
self.content.push(format!(" {}\n", filter.render(id)));
self
}
pub fn finish(self) -> String {
self.content.concat()
}
}
pub struct Gradient {
x1: String,
y1: String,
x2: String,
y2: String,
stops: Vec<String>,
}
impl Gradient {
pub fn new(x1: &str, y1: &str, x2: &str, y2: &str) -> Self {
Self {
x1: x1.to_string(),
y1: y1.to_string(),
x2: x2.to_string(),
y2: y2.to_string(),
stops: Vec::new(),
}
}
pub fn stop(mut self, offset: &str, attrs: &[(&str, &str)]) -> Self {
let mut tag = format!("<stop offset=\"{}\"", offset);
for (k, v) in attrs {
tag.push_str(&format!(" {}=\"{}\"", k, v));
}
tag.push_str(" />");
self.stops.push(tag);
self
}
fn render_linear(&self, id: &str) -> String {
let mut out = format!(
"<linearGradient id=\"{}\" x1=\"{}\" y1=\"{}\" x2=\"{}\" y2=\"{}\">\n",
id, self.x1, self.y1, self.x2, self.y2
);
for stop in &self.stops {
out.push_str(" ");
out.push_str(stop);
out.push('\n');
}
out.push_str(" </linearGradient>");
out
}
}
pub struct RadialGradient {
cx: String,
cy: String,
r: String,
stops: Vec<String>,
}
impl RadialGradient {
pub fn new(cx: &str, cy: &str, r: &str) -> Self {
Self {
cx: cx.to_string(),
cy: cy.to_string(),
r: r.to_string(),
stops: Vec::new(),
}
}
pub fn stop(mut self, offset: &str, attrs: &[(&str, &str)]) -> Self {
let mut tag = format!("<stop offset=\"{}\"", offset);
for (k, v) in attrs {
tag.push_str(&format!(" {}=\"{}\"", k, v));
}
tag.push_str(" />");
self.stops.push(tag);
self
}
fn render(&self, id: &str) -> String {
let mut out = format!(
"<radialGradient id=\"{}\" cx=\"{}\" cy=\"{}\" r=\"{}\">\n",
id, self.cx, self.cy, self.r
);
for stop in &self.stops {
out.push_str(" ");
out.push_str(stop);
out.push('\n');
}
out.push_str(" </radialGradient>");
out
}
}
pub struct Filter {
attrs: Vec<(String, String)>,
content: Vec<String>,
}
impl Filter {
pub fn new() -> Self {
Self {
attrs: Vec::new(),
content: Vec::new(),
}
}
pub fn attr(mut self, key: &str, value: &str) -> Self {
self.attrs.push((key.to_string(), value.to_string()));
self
}
pub fn raw(mut self, markup: &str) -> Self {
self.content.push(format!(" {}\n", markup));
self
}
fn render(&self, id: &str) -> String {
let attrs = self
.attrs
.iter()
.map(|(k, v)| format!(" {}=\"{}\"", k, v))
.collect::<String>();
let mut out = format!("<filter id=\"{}\"{}>\n", id, attrs);
for child in &self.content {
out.push_str(child);
}
out.push_str(" </filter>");
out
}
}
pub struct Element {
tag: String,
attrs: Vec<(String, String)>,
content: Option<String>,
}
impl Element {
pub fn new(tag: &str) -> Self {
Self {
tag: tag.to_string(),
attrs: Vec::new(),
content: None,
}
}
pub fn attr(mut self, key: &str, value: &str) -> Self {
self.attrs.push((key.to_string(), value.to_string()));
self
}
pub fn text(mut self, text: &str) -> String {
self.content = Some(text.to_string());
self.render()
}
pub fn empty(mut self) -> String {
self.content = None;
self.render()
}
fn render(&self) -> String {
let attrs = self
.attrs
.iter()
.map(|(k, v)| format!(" {}=\"{}\"", k, v))
.collect::<String>();
if let Some(content) = &self.content {
format!(
" <{tag}{attrs}>{content}</{tag}>",
tag = self.tag,
attrs = attrs,
content = content
)
} else {
format!(" <{tag}{attrs} />", tag = self.tag, attrs = attrs)
}
}
}
pub struct Group {
attrs: Vec<(String, String)>,
children: Vec<String>,
}
impl Group {
pub fn new() -> Self {
Self {
attrs: Vec::new(),
children: Vec::new(),
}
}
pub fn attr(mut self, key: &str, value: &str) -> Self {
self.attrs.push((key.to_string(), value.to_string()));
self
}
pub fn child(mut self, element: impl Into<String>) -> Self {
self.children.push(element.into());
self
}
pub fn render(&self) -> String {
let attrs = self
.attrs
.iter()
.map(|(k, v)| format!(" {}=\"{}\"", k, v))
.collect::<String>();
let mut out = format!(" <g{}>\n", attrs);
for child in &self.children {
out.push_str(child);
out.push('\n');
}
out.push_str(" </g>");
out
}
}
impl From<Group> for String {
fn from(group: Group) -> Self {
group.render()
}
}
impl From<Element> for String {
fn from(element: Element) -> Self {
element.render()
}
}
pub fn path(d: &str) -> String {
Element::new("path").attr("d", d).empty()
}

View file

@ -1,80 +0,0 @@
use std::time::Duration;
use rsille::canvas::Canvas;
use super::Animation;
const THETA_SPACING: f64 = 0.07;
const PHI_SPACING: f64 = 0.02;
pub struct DonutAnimation {
a: f64, // rotation around X
b: f64, // rotation around Z
size: (u16, u16),
}
impl DonutAnimation {
pub fn new(width: u16, height: u16) -> Self {
Self {
a: 0.0,
b: 0.0,
size: (width, height),
}
}
}
impl Animation for DonutAnimation {
fn update(&mut self, delta: Duration) {
let delta_secs = delta.as_secs_f64();
self.a += delta_secs;
self.b += delta_secs * 0.5;
}
fn render(&self, canvas: &mut Canvas) {
let (width, height) = self.size;
let (width_f, height_f) = (width as f64, height as f64);
let k2 = 5.0;
let k1 = width_f * k2 * 3.0 / (8.0 * (height_f + width_f));
for theta in 0..((2.0 * std::f64::consts::PI / THETA_SPACING) as i32) {
let theta_f = theta as f64 * THETA_SPACING;
let cos_theta = theta_f.cos();
let sin_theta = theta_f.sin();
for phi in 0..((2.0 * std::f64::consts::PI / PHI_SPACING) as i32) {
let phi_f = phi as f64 * PHI_SPACING;
let cos_phi = phi_f.cos();
let sin_phi = phi_f.sin();
let cos_a = self.a.cos();
let sin_a = self.a.sin();
let cos_b = self.b.cos();
let sin_b = self.b.sin();
let h = cos_theta + 2.0;
let d = 1.0 / (sin_phi * h * sin_a + sin_theta * cos_a + 5.0);
let t = sin_phi * h * cos_a - sin_theta * sin_a;
let x = (width_f / 2.0 + 30.0 * d * (cos_phi * h * cos_b - t * sin_b)) as i32;
let y = (height_f / 2.0 + 15.0 * d * (cos_phi * h * sin_b + t * cos_b)) as i32;
let z = (1.0 / d) as u8;
if x >= 0 && x < width as i32 && y >= 0 && y < height as i32 {
let luminance = if z > 0 { z } else { 1 };
let c = match luminance {
0..=31 => '.',
32..=63 => '*',
64..=95 => 'o',
96..=127 => '&',
128..=159 => '8',
160..=191 => '#',
_ => '@',
};
canvas.put_char(x as u16, y as u16, c);
}
}
}
}
fn is_finished(&self) -> bool {
false // continuous animation
}
}

View file

@ -1,13 +0,0 @@
use rsille::canvas::Canvas;
use std::time::Duration;
pub trait Animation {
fn update(&mut self, delta: Duration);
fn render(&self, canvas: &mut Canvas);
fn is_finished(&self) -> bool;
}
pub trait ProgressAnimation: Animation {
fn set_progress(&mut self, progress: f64);
fn get_progress(&self) -> f64;
}

View file

@ -1,48 +0,0 @@
use std::time::Duration;
use rsille::canvas::Canvas;
use super::{Animation, ProgressAnimation};
pub struct ProgressBarAnimation {
progress: f64,
width: u16,
height: u16,
animation_offset: f64,
}
impl ProgressBarAnimation {
pub fn new(width: u16, height: u16) -> Self {
Self {
progress: 0.0,
width,
height,
animation_offset: 0.0,
}
}
}
impl Animation for ProgressBarAnimation {
fn update(&mut self, delta: Duration) {
self.animation_offset += delta.as_secs_f64() * 2.0;
if self.animation_offset >= 1.0 {
self.animation_offset -= 1.0;
}
}
fn render(&self, canvas: &mut Canvas) {
// Animated progress bar rendering will be implemented here
}
fn is_finished(&self) -> bool {
self.progress >= 1.0
}
}
impl ProgressAnimation for ProgressBarAnimation {
fn set_progress(&mut self, progress: f64) {
self.progress = progress.clamp(0.0, 1.0);
}
fn get_progress(&self) -> f64 {
self.progress
}
}

View file

@ -9,6 +9,7 @@ use crossterm::event::{self, Event, KeyCode};
use crossterm::execute; use crossterm::execute;
use crossterm::terminal::{EnterAlternateScreen, LeaveAlternateScreen}; use crossterm::terminal::{EnterAlternateScreen, LeaveAlternateScreen};
use gptman::{GPT, GPTPartitionEntry, PartitionName}; use gptman::{GPT, GPTPartitionEntry, PartitionName};
use tracing::{info, warn};
use tui::{ use tui::{
Terminal, Terminal,
backend::CrosstermBackend, backend::CrosstermBackend,
@ -26,6 +27,7 @@ use uuid::Uuid;
/// crossterm = "0.26" /// crossterm = "0.26"
/// gptman = "2.0" /// gptman = "2.0"
/// uuid = { version = "1", features = ["v4"] } /// uuid = { version = "1", features = ["v4"] }
/// tracing = "0.1"
pub struct DiskManager; pub struct DiskManager;
impl DiskManager { impl DiskManager {
@ -146,11 +148,11 @@ impl DiskManager {
let path = devices[selected_idx].clone(); let path = devices[selected_idx].clone();
match Self::create_partition_tui(&path, &mut term) { match Self::create_partition_tui(&path, &mut term) {
Ok(m) => { Ok(m) => {
println!("[disk-manager] {}", m); info!(target: "disk_manager", "{}", m);
status_msg = m; status_msg = m;
} }
Err(e) => { Err(e) => {
eprintln!("[disk-manager] create partition error: {e}"); warn!(target: "disk_manager", "create partition error: {:?}", e);
status_msg = format!("Create failed: {}", e); status_msg = format!("Create failed: {}", e);
} }
} }

View file

@ -1,25 +1,20 @@
use crate::tui::animations::{ use std::io::Stdout;
Animation, ProgressAnimation, donut::DonutAnimation, progress::ProgressBarAnimation, use tracing::instrument;
};
use rsille::canvas::Canvas;
use std::{io::Stdout, thread, time::Duration};
use tui::{ use tui::{
Terminal, Terminal,
backend::CrosstermBackend, backend::CrosstermBackend,
layout::{Constraint, Direction, Layout}, layout::{Constraint, Direction, Layout},
style::Style, style::Style,
text::Spans, text::Spans,
widgets::{Block, Borders, List, ListItem}, widgets::{Block, Borders, Gauge, List, ListItem},
}; };
use crate::tui::settings::Settings; use crate::tui::settings::Settings;
pub struct Downloader; pub struct Downloader;
const TARGET_FPS: u64 = 30;
const FRAME_TIME: Duration = Duration::from_micros(1_000_000 / TARGET_FPS);
impl Downloader { impl Downloader {
#[instrument(skip(terminal, settings))]
pub fn show_downloader( pub fn show_downloader(
terminal: &mut Terminal<CrosstermBackend<Stdout>>, terminal: &mut Terminal<CrosstermBackend<Stdout>>,
settings: &Settings, settings: &Settings,
@ -27,26 +22,14 @@ impl Downloader {
let files = vec!["file1.tar.gz", "file2.tar.gz", "file3.tar.gz"]; let files = vec!["file1.tar.gz", "file2.tar.gz", "file3.tar.gz"];
let progress = vec![0.3, 0.5, 0.9]; let progress = vec![0.3, 0.5, 0.9];
let mut last_update = std::time::Instant::now();
loop { loop {
let frame_start = std::time::Instant::now();
let delta = frame_start - last_update;
last_update = frame_start;
terminal.draw(|f| { terminal.draw(|f| {
let size = f.size(); let size = f.size();
let chunks = Layout::default() let chunks = Layout::default()
.direction(Direction::Vertical) .direction(Direction::Vertical)
.margin(2) .margin(2)
.constraints( .constraints([Constraint::Percentage(70), Constraint::Percentage(30)].as_ref())
[
Constraint::Percentage(50),
Constraint::Percentage(25),
Constraint::Percentage(25),
]
.as_ref(),
)
.split(size); .split(size);
let items: Vec<ListItem> = files let items: Vec<ListItem> = files
@ -61,42 +44,16 @@ impl Downloader {
); );
f.render_widget(list, chunks[0]); f.render_widget(list, chunks[0]);
// Progress bar
let mut progress_canvas = Canvas::new(chunks[1].width, chunks[1].height);
let mut progress_bar = ProgressBarAnimation::new(chunks[1].width, chunks[1].height);
for (i, prog) in progress.iter().enumerate() { for (i, prog) in progress.iter().enumerate() {
progress_bar.set_progress(*prog as f64); let gauge = Gauge::default()
progress_bar.render(&mut progress_canvas); .block(Block::default().title(files[i]))
.gauge_style(Style::default().fg(settings.theme.primary_color()))
.ratio(*prog as f64);
f.render_widget(gauge, chunks[1]);
} }
// Render progress bar
let progress_block = Block::default()
.title(files[0])
.borders(Borders::ALL)
.border_style(Style::default().fg(settings.theme.primary_color()));
f.render_widget(progress_block, chunks[1]);
// Donut animation
let mut donut_canvas = Canvas::new(chunks[2].width, chunks[2].height);
let mut donut = DonutAnimation::new(chunks[2].width, chunks[2].height);
donut.render(&mut donut_canvas);
// Render donut
let donut_block = Block::default()
.title("Progress")
.borders(Borders::ALL)
.border_style(Style::default().fg(settings.theme.secondary_color()));
f.render_widget(donut_block, chunks[2]);
})?; })?;
// Frame rate limiting break; // remove in real async loop
let frame_time = frame_start.elapsed();
if frame_time < FRAME_TIME {
thread::sleep(FRAME_TIME - frame_time);
}
} }
Ok(()) Ok(())

View file

@ -1,4 +1,3 @@
pub mod animations;
pub mod disk_manager; pub mod disk_manager;
pub mod downloader; pub mod downloader;
pub mod main_menu; pub mod main_menu;

View file

@ -1,4 +1,5 @@
use std::io::Stdout; use std::io::Stdout;
use tracing::instrument;
use tui::{Terminal, backend::CrosstermBackend}; use tui::{Terminal, backend::CrosstermBackend};
pub struct Settings { pub struct Settings {
@ -17,6 +18,7 @@ impl Theme {
} }
impl Settings { impl Settings {
#[instrument(skip(_terminal))]
pub fn show_settings( pub fn show_settings(
_terminal: &mut Terminal<CrosstermBackend<Stdout>>, _terminal: &mut Terminal<CrosstermBackend<Stdout>>,
) -> Result<(), Box<dyn std::error::Error>> { ) -> Result<(), Box<dyn std::error::Error>> {

View file

@ -1,17 +1,16 @@
use anyhow::{Context, Result}; use anyhow::Result;
use reqwest::blocking::Client;
use reqwest::redirect::Policy;
pub fn get_wget_list() -> Result<String> { pub fn get_wget_list() -> Result<String> {
let url = "https://www.linuxfromscratch.org/~thomas/multilib-m32/wget-list-sysv"; let client = Client::builder().redirect(Policy::limited(5)).build()?;
let agent = ureq::AgentBuilder::new().redirects(5).build(); let res = client
agent .get("https://www.linuxfromscratch.org/~thomas/multilib-m32/wget-list-sysv")
.get(url) .send()?;
.call()
.map_err(|err| match err { if !res.status().is_success() {
ureq::Error::Status(code, _) => { anyhow::bail!("Failed to fetch wget-list: HTTP {}", res.status());
anyhow::anyhow!("Failed to fetch wget-list: HTTP {code}")
} }
other => anyhow::anyhow!("Failed to fetch wget-list: {other}"),
})? Ok(res.text()?)
.into_string()
.with_context(|| format!("reading body from {url}"))
} }