Compare commits

..

No commits in common. "3f74a55897c2cf0597657fc89946f76f81456041" and "7424aba439edee46f48c63d2740598a3ccd1fd07" have entirely different histories.

50 changed files with 240 additions and 7003 deletions

943
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -14,7 +14,7 @@ console = "0.16.1"
# Optional Terminal UI # Optional Terminal UI
crossterm = { version = "0.29.0", optional = true } crossterm = { version = "0.29.0", optional = true }
tui = { version = "0.19.0", optional = true } ratatui = { version = "0.29.0", optional = true }
# Parsing & scraping # Parsing & scraping
html_parser = "0.7.0" html_parser = "0.7.0"
@ -22,10 +22,6 @@ scraper = "0.19.0"
regex = "1.11.3" regex = "1.11.3"
serde = { version = "1.0.228", features = ["derive"] } serde = { version = "1.0.228", features = ["derive"] }
serde_json = "1.0.145" serde_json = "1.0.145"
jsonschema = "0.17.0"
walkdir = "2.5.0"
chrono = { version = "0.4.38", default-features = false, features = ["clock"] }
sha2 = "0.10.8"
# Utilities # Utilities
indicatif = "0.18.0" indicatif = "0.18.0"
@ -38,25 +34,18 @@ md5 = "0.8.0"
reqwest = { version = "0.12.23", features = ["blocking", "json"] } reqwest = { version = "0.12.23", features = ["blocking", "json"] }
semver = "1.0.27" semver = "1.0.27"
inquire = "0.9.1" inquire = "0.9.1"
tui = "0.19.0"
tracing = "0.1.41" tracing = "0.1.41"
tracing-appender = "0.2.3" tracing-appender = "0.2.3"
tracing-subscriber = { version = "0.3.20", features = ["env-filter", "fmt"] } tracing-subscriber = { version = "0.3.20", features = ["env-filter", "fmt"] }
gptman = "2.0.1"
dialoguer = "0.12.0"
tokio = { version = "1.47.1", features = ["full"] }
shell-words = "1.1.0"
url = "2.5.7"
uuid = { version = "1.18.1", features = ["v4"] }
hex = "0.4.3"
diesel = { version = "2.1.6", features = ["sqlite", "r2d2", "returning_clauses_for_sqlite_3_35"] }
[features] [features]
# TUI feature flag # TUI feature flag
tui = ["dep:tui", "dep:crossterm"] tui = ["ratatui", "crossterm"]
# Optional default features # Optional default features
default = [] default = []
crossterm = ["dep:crossterm"]
# ----------------------- # -----------------------
# Cargo-make tasks # Cargo-make tasks

View file

@ -94,65 +94,10 @@ You can also run the project directly in the flake shell:
nix run nix run
``` ```
### AI metadata tooling
The AI metadata store under `ai/metadata/` comes with a helper CLI to
validate package records against the JSON schema and regenerate
`index.json` after adding new entries:
```bash
cargo run --bin metadata_indexer -- --base-dir . validate
cargo run --bin metadata_indexer -- --base-dir . index
```
Use `--compact` with `index` if you prefer single-line JSON output.
To draft metadata for a specific book page, you can run the harvest mode.
It fetches the XHTML, scrapes the build commands, and emits a schema-
compliant JSON skeleton (pass `--dry-run` to inspect the result without
writing to disk):
```bash
cargo run --bin metadata_indexer -- \
--base-dir . harvest \
--book mlfs \
--page chapter05/binutils-pass1 \
--dry-run
```
Keep the jhalfs manifests current with:
```bash
cargo run --bin metadata_indexer -- --base-dir . refresh
```
Passing `--books mlfs,blfs` restricts the refresh to specific books, and
`--force` bypasses the local cache.
To materialise a Rust module from harvested metadata:
```bash
cargo run --bin metadata_indexer -- \
--base-dir . generate \
--metadata ai/metadata/packages/mlfs/binutils-pass-1.json \
--output target/generated/by_name
```
Add `--overwrite` to regenerate an existing module directory.
## 📚 Documentation
- [Architecture Overview](docs/ARCHITECTURE.md) high-level tour of the crate
layout, binaries, and supporting modules.
- [Metadata Harvesting Pipeline](docs/METADATA_PIPELINE.md) how the metadata
indexer produces and validates the JSON records under `ai/metadata/`.
- [Package Module Generation](docs/PACKAGE_GENERATION.md) end-to-end guide
for converting harvested metadata into Rust modules under `src/pkgs/by_name/`.
- `ai/notes.md` scratchpad for ongoing research tasks (e.g., deeper jhalfs
integration).
--- ---
## 📄 License ## 📄 License
LPKG is licensed under the [MIT License](LICENSE). LPKG is licensed under the [MIT License](LICENSE).

View file

@ -1,29 +0,0 @@
[
{
"id": "disk-manager-private-fields",
"title": "Disk manager accesses private GPT fields",
"description": "src/tui/disk_manager.rs reaches into gptman::GPT::partitions which is private, breaking compilation.",
"status": "open",
"owner": "default_cli",
"created_at": "2025-03-09T00:00:00Z",
"labels": ["tui", "blocking-build"]
},
{
"id": "tui-feature-build",
"title": "TUI modules require crossterm feature gating",
"description": "When the crate is built without the `tui` feature the main binary still imports crossterm types and fails to compile.",
"status": "open",
"owner": "default_cli",
"created_at": "2025-03-09T00:00:00Z",
"labels": ["tui", "feature-gate"]
},
{
"id": "metadata-harvest-no-source-urls",
"title": "Harvested metadata missing source URLs",
"description": "`metadata_indexer harvest --book mlfs --page chapter05/binutils-pass1 --dry-run` emits a draft record with no source URLs even after wget-list fallback; Binutils tarball discovery logic needs to inspect package download tables or improve slug matching.",
"status": "open",
"owner": "default_cli",
"created_at": "2025-10-01T04:40:00Z",
"labels": ["metadata", "ingest", "mlfs"]
}
]

View file

@ -1,22 +0,0 @@
# Repository Context Snapshot
- `README.md`, `docs/ARCHITECTURE.md`, and `docs/METADATA_PIPELINE.md` describe
the crate structure, CLI entry points, and metadata workflows. Consult them
first when revisiting the project.
- `metadata_indexer` now supports a `refresh` command that pulls jhalfs
`wget-list`/`md5sums` manifests into `ai/metadata/cache/` and the `harvest`
command automatically draws URLs and checksums from those manifests. A
`generate` subcommand consumes harvested metadata and scaffolds Rust modules
under `src/pkgs/by_name` (or a custom output directory). See
`docs/PACKAGE_GENERATION.md` for the CLI flow.
- AI state lives under `ai/`:
- `ai/personas.json`, `ai/tasks.json`, `ai/bugs.json` track personas,
outstanding work, and known issues.
- `ai/metadata/` stores package records plus the JSON schema.
- `ai/notes.md` captures ongoing research ideas (e.g., deeper BLFS/GLFS
manifest coverage).
- Duplicate MLFS metadata entries were pruned (`binutils-pass1.json` removed in
favour of the `binutils-pass-1.json` slug).
This file is intended as a quick orientation checkpoint alongside the richer
architecture docs.

View file

@ -1,93 +0,0 @@
590765dee95907dbc3c856f7255bd669 acl-2.3.2.tar.xz
227043ec2f6ca03c0948df5517f9c927 attr-2.5.2.tar.gz
1be79f7106ab6767f18391c5e22be701 autoconf-2.72.tar.xz
4017e96f89fca45ca946f1c5db6be714 automake-1.16.5.tar.xz
ad5b38410e3bf0e9bcc20e2765f5e3f9 bash-5.2.21.tar.gz
e249b1f86f886d6fb71c15f72b65dd3d bc-6.7.5.tar.xz
a075178a9646551379bfb64040487715 binutils-2.42.tar.xz
c28f119f405a2304ff0a7ccdcc629713 bison-3.8.2.tar.xz
67e051268d0c475ea773822f7500d0e5 bzip2-1.0.8.tar.gz
50fcafcecde5a380415b12e9c574e0b2 check-0.15.2.tar.gz
459e9546074db2834eefe5421f250025 coreutils-9.4.tar.xz
68c5208c58236eba447d7d6d1326b821 dejagnu-1.6.3.tar.gz
2745c50f6f4e395e7b7d52f902d075bf diffutils-3.10.tar.xz
6b4f18a33873623041857b4963641ee9 e2fsprogs-1.47.0.tar.gz
79ad698e61a052bea79e77df6a08bc4b elfutils-0.190.tar.bz2
bd169cb11f4b9bdfddadf9e88a5c4d4b expat-2.6.0.tar.xz
00fce8de158422f5ccd2666512329bd2 expect5.45.4.tar.gz
26b2a96d4e3a8938827a1e572afd527a file-5.45.tar.gz
4a4a547e888a944b2f3af31d789a1137 findutils-4.9.0.tar.xz
2882e3179748cc9f9c23ec593d6adc8d flex-2.6.4.tar.gz
3bc52f1952b9a78361114147da63c35b flit_core-3.9.0.tar.gz
97c5a7d83f91a7e1b2035ebbe6ac7abd gawk-5.3.0.tar.xz
e0e48554cc6e4f261d55ddee9ab69075 gcc-13.2.0.tar.xz
8551961e36bf8c70b7500d255d3658ec gdbm-1.23.tar.gz
2d8507d003ef3ddd1c172707ffa97ed8 gettext-0.22.4.tar.xz
be81e87f72b5ea2c0ffe2bedfeb680c6 glibc-2.39.tar.xz
956dc04e864001a9c22429f761f2c283 gmp-6.3.0.tar.xz
9e251c0a618ad0824b51117d5d9db87e gperf-3.1.tar.gz
7c9bbd74492131245f7cdb291fa142c0 grep-3.11.tar.xz
5e4f40315a22bb8a158748e7d5094c7d groff-1.23.0.tar.gz
60c564b1bdc39d8e43b3aab4bc0fb140 grub-2.12.tar.xz
d5c9fc9441288817a4a0be2da0249e29 gzip-1.13.tar.xz
aed66d04de615d76c70890233081e584 iana-etc-20240125.tar.gz
9e5a6dfd2d794dc056a770e8ad4a9263 inetutils-2.5.tar.xz
12e517cac2b57a0121cda351570f1e63 intltool-0.51.0.tar.gz
35d8277d1469596b7edc07a51470a033 iproute2-6.7.0.tar.xz
caf5418c851eac59e70a78d9730d4cea Jinja2-3.1.3.tar.gz
e2fd7adccf6b1e98eb1ae8d5a1ce5762 kbd-2.6.4.tar.xz
6165867e1836d51795a11ea4762ff66a kmod-31.tar.xz
cf05e2546a3729492b944b4874dd43dd less-643.tar.gz
a236eaa9a1f699bc3fb6ab2acd7e7b6c lfs-bootscripts-20230728.tar.xz
4667bacb837f9ac4adb4a1a0266f4b65 libcap-2.69.tar.xz
0da1a5ed7786ac12dcbaf0d499d8a049 libffi-3.4.4.tar.gz
1a48b5771b9f6c790fb4efdb1ac71342 libpipeline-1.5.7.tar.gz
2fc0b6ddcd66a89ed6e45db28fa44232 libtool-2.4.7.tar.xz
b84cd4104e08c975063ec6c4d0372446 libxcrypt-4.4.36.tar.xz
370e1b6155ae63133380e421146619e0 linux-6.7.4.tar.xz
0d90823e1426f1da2fd872df0311298d m4-1.4.19.tar.xz
c8469a3713cbbe04d955d4ae4be23eeb make-4.4.1.tar.gz
67e0052fa200901b314fad7b68c9db27 man-db-2.12.0.tar.xz
26b39e38248144156d437e1e10cb20bf man-pages-6.06.tar.xz
8fe7227653f2fb9b1ffe7f9f2058998a MarkupSafe-2.1.5.tar.gz
2d0ebd3a24249617b1c4d30026380cf8 meson-1.3.2.tar.gz
5c9bc658c9fd0f940e8e3e0f09530c62 mpc-1.3.1.tar.gz
523c50c6318dde6f9dc523bc0244690a mpfr-4.2.1.tar.xz
c5367e829b6d9f3f97b280bb3e6bfbc3 ncurses-6.4-20230520.tar.xz
32151c08211d7ca3c1d832064f6939b0 ninja-1.11.1.tar.gz
c239213887804ba00654884918b37441 openssl-3.2.1.tar.gz
78ad9937e4caadcba1526ef1853730d5 patch-2.7.6.tar.xz
d3957d75042918a23ec0abac4a2b7e0a perl-5.38.2.tar.xz
bc29d74c2483197deb9f1f3b414b7918 pkgconf-2.1.1.tar.xz
2f747fc7df8ccf402d03e375c565cf96 procps-ng-4.0.4.tar.xz
ed3206da1184ce9e82d607dc56c52633 psmisc-23.6.tar.xz
e7c178b97bf8f7ccd677b94d614f7b3c Python-3.12.2.tar.xz
8a6310f6288e7f60c3565277ec3b5279 python-3.12.2-docs-html.tar.bz2
4aa1b31be779e6b84f9a96cb66bc50f6 readline-8.2.tar.gz
6aac9b2dbafcd5b7a67a8a9bcb8036c3 sed-4.9.tar.xz
6f6eb780ce12c90d81ce243747ed7ab0 setuptools-69.1.0.tar.gz
452b0e59f08bf618482228ba3732d0ae shadow-4.14.5.tar.xz
c70599ab0d037fde724f7210c2c8d7f8 sysklogd-1.5.1.tar.gz
521cda27409a9edf0370c128fae3e690 systemd-255.tar.gz
1ebe54d7a80f9abf8f2d14ddfeb2432d systemd-man-pages-255.tar.xz
81a05f28d7b67533cfc778fcadea168c sysvinit-3.08.tar.xz
a2d8042658cfd8ea939e6d911eaf4152 tar-1.35.tar.xz
0e4358aade2f5db8a8b6f2f6d9481ec2 tcl8.6.13-src.tar.gz
4452f2f6d557f5598cca17b786d6eb68 tcl8.6.13-html.tar.gz
edd9928b4a3f82674bcc3551616eef3b texinfo-7.1.tar.xz
2349edd8335245525cc082f2755d5bf4 tzdata2024a.tar.gz
acd4360d8a5c3ef320b9db88d275dae6 udev-lfs-20230818.tar.xz
f3591e6970c017bb4bcd24ae762a98f5 util-linux-2.39.3.tar.xz
79dfe62be5d347b1325cbd5ce2a1f9b3 vim-9.1.0041.tar.gz
802ad6e5f9336fcb1c76b7593f0cd22d wheel-0.42.0.tar.gz
89a8e82cfd2ad948b349c0a69c494463 XML-Parser-2.47.tar.gz
7ade7bd1181a731328f875bec62a9377 xz-5.4.6.tar.xz
9855b6d802d7fe5b7bd5b196a2271655 zlib-1.3.1.tar.gz
63251602329a106220e0a5ad26ba656f zstd-1.5.5.tar.gz
2d1691a629c558e894dbb78ee6bf34ef bash-5.2.21-upstream_fixes-1.patch
6a5ac7e89b791aae556de0f745916f7f bzip2-1.0.8-install_docs-1.patch
cca7dc8c73147444e77bc45d210229bb coreutils-9.4-i18n-1.patch
9a5997c3452909b1769918c759eff8a2 glibc-2.39-fhs-1.patch
f75cca16a38da6caa7d52151f7136895 kbd-2.6.4-backspace-1.patch
9ed497b6cb8adcb8dbda9dee9ebce791 readline-8.2-upstream_fixes-3.patch
17ffccbb8e18c39e8cedc32046f3a475 sysvinit-3.08-consolidated-1.patch

View file

@ -1,95 +0,0 @@
https://download.savannah.gnu.org/releases/acl/acl-2.3.2.tar.xz
https://download.savannah.gnu.org/releases/attr/attr-2.5.2.tar.gz
https://ftp.gnu.org/gnu/autoconf/autoconf-2.72.tar.xz
https://ftp.gnu.org/gnu/automake/automake-1.16.5.tar.xz
https://ftp.gnu.org/gnu/bash/bash-5.2.21.tar.gz
https://github.com/gavinhoward/bc/releases/download/6.7.5/bc-6.7.5.tar.xz
https://sourceware.org/pub/binutils/releases/binutils-2.42.tar.xz
https://ftp.gnu.org/gnu/bison/bison-3.8.2.tar.xz
https://www.sourceware.org/pub/bzip2/bzip2-1.0.8.tar.gz
https://github.com/libcheck/check/releases/download/0.15.2/check-0.15.2.tar.gz
https://ftp.gnu.org/gnu/coreutils/coreutils-9.4.tar.xz
https://dbus.freedesktop.org/releases/dbus/dbus-1.14.10.tar.xz
https://ftp.gnu.org/gnu/dejagnu/dejagnu-1.6.3.tar.gz
https://ftp.gnu.org/gnu/diffutils/diffutils-3.10.tar.xz
https://downloads.sourceforge.net/project/e2fsprogs/e2fsprogs/v1.47.0/e2fsprogs-1.47.0.tar.gz
https://sourceware.org/ftp/elfutils/0.190/elfutils-0.190.tar.bz2
https://prdownloads.sourceforge.net/expat/expat-2.6.0.tar.xz
https://prdownloads.sourceforge.net/expect/expect5.45.4.tar.gz
https://astron.com/pub/file/file-5.45.tar.gz
https://ftp.gnu.org/gnu/findutils/findutils-4.9.0.tar.xz
https://github.com/westes/flex/releases/download/v2.6.4/flex-2.6.4.tar.gz
https://pypi.org/packages/source/f/flit-core/flit_core-3.9.0.tar.gz
https://ftp.gnu.org/gnu/gawk/gawk-5.3.0.tar.xz
https://ftp.gnu.org/gnu/gcc/gcc-13.2.0/gcc-13.2.0.tar.xz
https://ftp.gnu.org/gnu/gdbm/gdbm-1.23.tar.gz
https://ftp.gnu.org/gnu/gettext/gettext-0.22.4.tar.xz
https://ftp.gnu.org/gnu/glibc/glibc-2.39.tar.xz
https://ftp.gnu.org/gnu/gmp/gmp-6.3.0.tar.xz
https://ftp.gnu.org/gnu/gperf/gperf-3.1.tar.gz
https://ftp.gnu.org/gnu/grep/grep-3.11.tar.xz
https://ftp.gnu.org/gnu/groff/groff-1.23.0.tar.gz
https://ftp.gnu.org/gnu/grub/grub-2.12.tar.xz
https://ftp.gnu.org/gnu/gzip/gzip-1.13.tar.xz
https://github.com/Mic92/iana-etc/releases/download/20240125/iana-etc-20240125.tar.gz
https://ftp.gnu.org/gnu/inetutils/inetutils-2.5.tar.xz
https://launchpad.net/intltool/trunk/0.51.0/+download/intltool-0.51.0.tar.gz
https://www.kernel.org/pub/linux/utils/net/iproute2/iproute2-6.7.0.tar.xz
https://pypi.org/packages/source/J/Jinja2/Jinja2-3.1.3.tar.gz
https://www.kernel.org/pub/linux/utils/kbd/kbd-2.6.4.tar.xz
https://www.kernel.org/pub/linux/utils/kernel/kmod/kmod-31.tar.xz
https://www.greenwoodsoftware.com/less/less-643.tar.gz
https://www.linuxfromscratch.org/lfs/downloads/12.1/lfs-bootscripts-20230728.tar.xz
https://www.kernel.org/pub/linux/libs/security/linux-privs/libcap2/libcap-2.69.tar.xz
https://github.com/libffi/libffi/releases/download/v3.4.4/libffi-3.4.4.tar.gz
https://download.savannah.gnu.org/releases/libpipeline/libpipeline-1.5.7.tar.gz
https://ftp.gnu.org/gnu/libtool/libtool-2.4.7.tar.xz
https://github.com/besser82/libxcrypt/releases/download/v4.4.36/libxcrypt-4.4.36.tar.xz
https://www.kernel.org/pub/linux/kernel/v6.x/linux-6.7.4.tar.xz
https://ftp.gnu.org/gnu/m4/m4-1.4.19.tar.xz
https://ftp.gnu.org/gnu/make/make-4.4.1.tar.gz
https://download.savannah.gnu.org/releases/man-db/man-db-2.12.0.tar.xz
https://www.kernel.org/pub/linux/docs/man-pages/man-pages-6.06.tar.xz
https://pypi.org/packages/source/M/MarkupSafe/MarkupSafe-2.1.5.tar.gz
https://github.com/mesonbuild/meson/releases/download/1.3.2/meson-1.3.2.tar.gz
https://ftp.gnu.org/gnu/mpc/mpc-1.3.1.tar.gz
https://ftp.gnu.org/gnu/mpfr/mpfr-4.2.1.tar.xz
https://anduin.linuxfromscratch.org/LFS/ncurses-6.4-20230520.tar.xz
https://github.com/ninja-build/ninja/archive/v1.11.1/ninja-1.11.1.tar.gz
https://www.openssl.org/source/openssl-3.2.1.tar.gz
https://ftp.gnu.org/gnu/patch/patch-2.7.6.tar.xz
https://www.cpan.org/src/5.0/perl-5.38.2.tar.xz
https://distfiles.ariadne.space/pkgconf/pkgconf-2.1.1.tar.xz
https://sourceforge.net/projects/procps-ng/files/Production/procps-ng-4.0.4.tar.xz
https://sourceforge.net/projects/psmisc/files/psmisc/psmisc-23.6.tar.xz
https://www.python.org/ftp/python/3.12.2/Python-3.12.2.tar.xz
https://www.python.org/ftp/python/doc/3.12.2/python-3.12.2-docs-html.tar.bz2
https://ftp.gnu.org/gnu/readline/readline-8.2.tar.gz
https://ftp.gnu.org/gnu/sed/sed-4.9.tar.xz
https://pypi.org/packages/source/s/setuptools/setuptools-69.1.0.tar.gz
https://github.com/shadow-maint/shadow/releases/download/4.14.5/shadow-4.14.5.tar.xz
https://www.infodrom.org/projects/sysklogd/download/sysklogd-1.5.1.tar.gz
https://github.com/systemd/systemd/archive/v255/systemd-255.tar.gz
https://anduin.linuxfromscratch.org/LFS/systemd-man-pages-255.tar.xz
https://github.com/slicer69/sysvinit/releases/download/3.08/sysvinit-3.08.tar.xz
https://ftp.gnu.org/gnu/tar/tar-1.35.tar.xz
https://downloads.sourceforge.net/tcl/tcl8.6.13-src.tar.gz
https://downloads.sourceforge.net/tcl/tcl8.6.13-html.tar.gz
https://ftp.gnu.org/gnu/texinfo/texinfo-7.1.tar.xz
https://www.iana.org/time-zones/repository/releases/tzdata2024a.tar.gz
https://anduin.linuxfromscratch.org/LFS/udev-lfs-20230818.tar.xz
https://www.kernel.org/pub/linux/utils/util-linux/v2.39/util-linux-2.39.3.tar.xz
https://github.com/vim/vim/archive/v9.1.0041/vim-9.1.0041.tar.gz
https://pypi.org/packages/source/w/wheel/wheel-0.42.0.tar.gz
https://cpan.metacpan.org/authors/id/T/TO/TODDR/XML-Parser-2.47.tar.gz
https://github.com/tukaani-project/xz/releases/download/v5.4.6/xz-5.4.6.tar.xz
https://zlib.net/fossils/zlib-1.3.1.tar.gz
https://github.com/facebook/zstd/releases/download/v1.5.5/zstd-1.5.5.tar.gz
https://www.linuxfromscratch.org/patches/lfs/12.1/bash-5.2.21-upstream_fixes-1.patch
https://www.linuxfromscratch.org/patches/lfs/12.1/bzip2-1.0.8-install_docs-1.patch
https://www.linuxfromscratch.org/patches/lfs/12.1/coreutils-9.4-i18n-1.patch
https://www.linuxfromscratch.org/patches/lfs/12.1/glibc-2.39-fhs-1.patch
https://www.linuxfromscratch.org/patches/lfs/12.1/kbd-2.6.4-backspace-1.patch
https://www.linuxfromscratch.org/patches/lfs/12.1/readline-8.2-upstream_fixes-3.patch
https://www.linuxfromscratch.org/patches/lfs/12.1/sysvinit-3.08-consolidated-1.patch
https://www.linuxfromscratch.org/patches/lfs/12.1/systemd-255-upstream_fixes-1.patch

View file

@ -1,97 +0,0 @@
590765dee95907dbc3c856f7255bd669 acl-2.3.2.tar.xz
227043ec2f6ca03c0948df5517f9c927 attr-2.5.2.tar.gz
1be79f7106ab6767f18391c5e22be701 autoconf-2.72.tar.xz
cea31dbf1120f890cbf2a3032cfb9a68 automake-1.18.1.tar.xz
977c8c0c5ae6309191e7768e28ebc951 bash-5.3.tar.gz
ad4db5a0eb4fdbb3f6813be4b6b3da74 bc-7.0.3.tar.xz
dee5b4267e0305a99a3c9d6131f45759 binutils-2.45.tar.xz
c28f119f405a2304ff0a7ccdcc629713 bison-3.8.2.tar.xz
67e051268d0c475ea773822f7500d0e5 bzip2-1.0.8.tar.gz
b2e687b6e664b9dd76581836c5c3e782 coreutils-9.8.tar.xz
68c5208c58236eba447d7d6d1326b821 dejagnu-1.6.3.tar.gz
d1b18b20868fb561f77861cd90b05de4 diffutils-3.12.tar.xz
113d7a7ee0710d2a670a44692a35fd2e e2fsprogs-1.47.3.tar.gz
ceefa052ded950a4c523688799193a44 elfutils-0.193.tar.bz2
423975a2a775ff32f12c53635b463a91 expat-2.7.3.tar.xz
00fce8de158422f5ccd2666512329bd2 expect5.45.4.tar.gz
459da2d4b534801e2e2861611d823864 file-5.46.tar.gz
870cfd71c07d37ebe56f9f4aaf4ad872 findutils-4.10.0.tar.xz
2882e3179748cc9f9c23ec593d6adc8d flex-2.6.4.tar.gz
c538415c1f27bd69cbbbf3cdd5135d39 flit_core-3.12.0.tar.gz
b7014650c5f45e5d4837c31209dc0037 gawk-5.3.2.tar.xz
b861b092bf1af683c46a8aa2e689a6fd gcc-15.2.0.tar.xz
aaa600665bc89e2febb3c7bd90679115 gdbm-1.26.tar.gz
8e14e926f088e292f5f2bce95b81d10e gettext-0.26.tar.xz
23c6f5a27932b435cae94e087cb8b1f5 glibc-2.42.tar.xz
956dc04e864001a9c22429f761f2c283 gmp-6.3.0.tar.xz
31753b021ea78a21f154bf9eecb8b079 gperf-3.3.tar.gz
5d9301ed9d209c4a88c8d3a6fd08b9ac grep-3.12.tar.xz
5e4f40315a22bb8a158748e7d5094c7d groff-1.23.0.tar.gz
60c564b1bdc39d8e43b3aab4bc0fb140 grub-2.12.tar.xz
4bf5a10f287501ee8e8ebe00ef62b2c2 gzip-1.14.tar.xz
437a3e9f4a420244c90db4ab20e713b6 iana-etc-20250926.tar.gz
401d7d07682a193960bcdecafd03de94 inetutils-2.6.tar.xz
12e517cac2b57a0121cda351570f1e63 intltool-0.51.0.tar.gz
80e1f91bf59d572acc15d5c6eb4f3e7c iproute2-6.16.0.tar.xz
11ee9d335b227ea2e8579c4ba6e56138 isl-0.27.tar.xz
66d4c25ff43d1deaf9637ccda523dec8 jinja2-3.1.6.tar.gz
7be7c6f658f5fb9512e2c490349a8eeb kbd-2.9.0.tar.xz
36f2cc483745e81ede3406fa55e1065a kmod-34.2.tar.xz
0386dc14f6a081a94dfb4c2413864eed less-679.tar.gz
2be34eced7c861fea8894e7195dac636 lfs-bootscripts-20250827.tar.xz
449ade7d620b5c4eeb15a632fbaa4f74 libcap-2.76.tar.xz
92af9efad4ba398995abf44835c5d9e9 libffi-3.5.2.tar.gz
17ac6969b2015386bcb5d278a08a40b5 libpipeline-1.5.8.tar.gz
22e0a29df8af5fdde276ea3a7d351d30 libtool-2.5.4.tar.xz
1796a5d20098e9dd9e3f576803c83000 libxcrypt-4.4.38.tar.xz
feb0a3d5ecf5a4628aed7d9f8f7ab3f6 linux-6.16.9.tar.xz
dead9f5f1966d9ae56e1e32761e4e675 lz4-1.10.0.tar.gz
6eb2ebed5b24e74b6e890919331d2132 m4-1.4.20.tar.xz
c8469a3713cbbe04d955d4ae4be23eeb make-4.4.1.tar.gz
b6335533cbeac3b24cd7be31fdee8c83 man-db-2.13.1.tar.xz
16f68d70139dd2bbcae4102be4705753 man-pages-6.15.tar.xz
13a73126d25afa72a1ff0daed072f5fe markupsafe-3.0.3.tar.gz
19e0a1091cec23d369dd77d852844195 meson-1.9.1.tar.gz
5c9bc658c9fd0f940e8e3e0f09530c62 mpc-1.3.1.tar.gz
7c32c39b8b6e3ae85f25156228156061 mpfr-4.2.2.tar.xz
679987405412f970561cc85e1e6428a2 ncurses-6.5-20250809.tgz
c35f8f55f4cf60f1a916068d8f45a0f8 ninja-1.13.1.tar.gz
0ec20faeb96bbb203c8684cc7fe4432e openssl-3.5.3.tar.gz
ab0ef21ddebe09d1803575120d3f99f8 packaging-25.0.tar.gz
149327a021d41c8f88d034eab41c039f patch-2.8.tar.xz
641f99b635ebb9332a9b6a8ce8e2f3cf pcre2-10.46.tar.bz2
7a6950a9f12d01eb96a9d2ed2f4e0072 perl-5.42.0.tar.xz
3291128c917fdb8fccd8c9e7784b643b pkgconf-2.5.1.tar.xz
90803e64f51f192f3325d25c3335d057 procps-ng-4.0.5.tar.xz
53eae841735189a896d614cba440eb10 psmisc-23.7.tar.xz
256cdb3bbf45cdce7499e52ba6c36ea3 Python-3.13.7.tar.xz
b84c0d81b2758398bb7f5b7411d3d908 python-3.13.7-docs-html.tar.bz2
25a73bfb2a3ad7146c5e9d4408d9f6cd readline-8.3.tar.gz
6aac9b2dbafcd5b7a67a8a9bcb8036c3 sed-4.9.tar.xz
82e1d67883b713f9493659b50d13b436 setuptools-80.9.0.tar.gz
30ef46f54363db1d624587be68794ef2 shadow-4.18.0.tar.xz
d74bbdca4ab1b2bd46d3b3f8dbb0f3db sqlite-autoconf-3500400.tar.gz
63a62af5b35913459954e6e66876f2b8 sqlite-doc-3500400.tar.xz
af60786956a2dc84054fbf46652e515e sysklogd-2.7.2.tar.gz
25fe5d328e22641254761f1baa74cee0 systemd-257.8.tar.gz
a44063e2ec0cf4adfd2ed5c9e9e095c5 systemd-man-pages-257.8.tar.xz
bc6890b975d19dc9db42d0c7364dd092 sysvinit-3.14.tar.xz
a2d8042658cfd8ea939e6d911eaf4152 tar-1.35.tar.xz
1ec3444533f54d0f86cd120058e15e48 tcl8.6.17-src.tar.gz
60c71044e723b0db5f21be82929f3534 tcl8.6.17-html.tar.gz
11939a7624572814912a18e76c8d8972 texinfo-7.2.tar.xz
ad65154c48c74a9b311fe84778c5434f tzdata2025b.tar.gz
acd4360d8a5c3ef320b9db88d275dae6 udev-lfs-20230818.tar.xz
a2a3281ce76821c4bc28794fdf9d3994 util-linux-2.41.2.tar.xz
e72f31be182f1ccf4b66bef46ac1e60e vim-9.1.1806.tar.gz
65e09ee84af36821e3b1e9564aa91bd5 wheel-0.46.1.tar.gz
89a8e82cfd2ad948b349c0a69c494463 XML-Parser-2.47.tar.gz
cf5e1feb023d22c6bdaa30e84ef3abe3 xz-5.8.1.tar.xz
9855b6d802d7fe5b7bd5b196a2271655 zlib-1.3.1.tar.gz
780fc1896922b1bc52a4e90980cdda48 zstd-1.5.7.tar.gz
6a5ac7e89b791aae556de0f745916f7f bzip2-1.0.8-install_docs-1.patch
c800540039fb0707954197486b1bde70 coreutils-9.8-i18n-2.patch
0ca4d6bb8d572fbcdb13cb36cd34833e expect-5.45.4-gcc15-1.patch
9a5997c3452909b1769918c759eff8a2 glibc-2.42-fhs-1.patch
f75cca16a38da6caa7d52151f7136895 kbd-2.9.0-backspace-1.patch
3af8fd8e13cad481eeeaa48be4247445 sysvinit-3.14-consolidated-1.patch

View file

@ -1,97 +0,0 @@
https://download.savannah.gnu.org/releases/acl/acl-2.3.2.tar.xz
https://download.savannah.gnu.org/releases/attr/attr-2.5.2.tar.gz
https://ftp.gnu.org/gnu/autoconf/autoconf-2.72.tar.xz
https://ftp.gnu.org/gnu/automake/automake-1.18.1.tar.xz
https://ftp.gnu.org/gnu/bash/bash-5.3.tar.gz
https://github.com/gavinhoward/bc/releases/download/7.0.3/bc-7.0.3.tar.xz
https://sourceware.org/pub/binutils/releases/binutils-2.45.tar.xz
https://ftp.gnu.org/gnu/bison/bison-3.8.2.tar.xz
https://www.sourceware.org/pub/bzip2/bzip2-1.0.8.tar.gz
https://ftp.gnu.org/gnu/coreutils/coreutils-9.8.tar.xz
https://ftp.gnu.org/gnu/dejagnu/dejagnu-1.6.3.tar.gz
https://ftp.gnu.org/gnu/diffutils/diffutils-3.12.tar.xz
https://downloads.sourceforge.net/project/e2fsprogs/e2fsprogs/v1.47.3/e2fsprogs-1.47.3.tar.gz
https://sourceware.org/ftp/elfutils/0.193/elfutils-0.193.tar.bz2
https://github.com/libexpat/libexpat/releases/download/R_2_7_3/expat-2.7.3.tar.xz
https://prdownloads.sourceforge.net/expect/expect5.45.4.tar.gz
https://astron.com/pub/file/file-5.46.tar.gz
https://ftp.gnu.org/gnu/findutils/findutils-4.10.0.tar.xz
https://github.com/westes/flex/releases/download/v2.6.4/flex-2.6.4.tar.gz
https://pypi.org/packages/source/f/flit-core/flit_core-3.12.0.tar.gz
https://ftp.gnu.org/gnu/gawk/gawk-5.3.2.tar.xz
https://ftp.gnu.org/gnu/gcc/gcc-15.2.0/gcc-15.2.0.tar.xz
https://ftp.gnu.org/gnu/gdbm/gdbm-1.26.tar.gz
https://ftp.gnu.org/gnu/gettext/gettext-0.26.tar.xz
https://ftp.gnu.org/gnu/glibc/glibc-2.42.tar.xz
https://ftp.gnu.org/gnu/gmp/gmp-6.3.0.tar.xz
https://ftp.gnu.org/gnu/gperf/gperf-3.3.tar.gz
https://ftp.gnu.org/gnu/grep/grep-3.12.tar.xz
https://ftp.gnu.org/gnu/groff/groff-1.23.0.tar.gz
https://ftp.gnu.org/gnu/grub/grub-2.12.tar.xz
https://ftp.gnu.org/gnu/gzip/gzip-1.14.tar.xz
https://github.com/Mic92/iana-etc/releases/download/20250926/iana-etc-20250926.tar.gz
https://ftp.gnu.org/gnu/inetutils/inetutils-2.6.tar.xz
https://launchpad.net/intltool/trunk/0.51.0/+download/intltool-0.51.0.tar.gz
https://www.kernel.org/pub/linux/utils/net/iproute2/iproute2-6.16.0.tar.xz
https://libisl.sourceforge.io/isl-0.27.tar.xz
https://pypi.org/packages/source/J/Jinja2/jinja2-3.1.6.tar.gz
https://www.kernel.org/pub/linux/utils/kbd/kbd-2.9.0.tar.xz
https://www.kernel.org/pub/linux/utils/kernel/kmod/kmod-34.2.tar.xz
https://www.greenwoodsoftware.com/less/less-679.tar.gz
https://www.linuxfromscratch.org/lfs/downloads/development/lfs-bootscripts-20250827.tar.xz
https://www.kernel.org/pub/linux/libs/security/linux-privs/libcap2/libcap-2.76.tar.xz
https://github.com/libffi/libffi/releases/download/v3.5.2/libffi-3.5.2.tar.gz
https://download.savannah.gnu.org/releases/libpipeline/libpipeline-1.5.8.tar.gz
https://ftp.gnu.org/gnu/libtool/libtool-2.5.4.tar.xz
https://github.com/besser82/libxcrypt/releases/download/v4.4.38/libxcrypt-4.4.38.tar.xz
https://www.kernel.org/pub/linux/kernel/v6.x/linux-6.16.9.tar.xz
https://github.com/lz4/lz4/releases/download/v1.10.0/lz4-1.10.0.tar.gz
https://ftp.gnu.org/gnu/m4/m4-1.4.20.tar.xz
https://ftp.gnu.org/gnu/make/make-4.4.1.tar.gz
https://download.savannah.gnu.org/releases/man-db/man-db-2.13.1.tar.xz
https://www.kernel.org/pub/linux/docs/man-pages/man-pages-6.15.tar.xz
https://pypi.org/packages/source/M/MarkupSafe/markupsafe-3.0.3.tar.gz
https://github.com/mesonbuild/meson/releases/download/1.9.1/meson-1.9.1.tar.gz
https://ftp.gnu.org/gnu/mpc/mpc-1.3.1.tar.gz
https://ftp.gnu.org/gnu/mpfr/mpfr-4.2.2.tar.xz
https://invisible-mirror.net/archives/ncurses/current/ncurses-6.5-20250809.tgz
https://github.com/ninja-build/ninja/archive/v1.13.1/ninja-1.13.1.tar.gz
https://github.com/openssl/openssl/releases/download/openssl-3.5.3/openssl-3.5.3.tar.gz
https://files.pythonhosted.org/packages/source/p/packaging/packaging-25.0.tar.gz
https://ftp.gnu.org/gnu/patch/patch-2.8.tar.xz
https://github.com/PCRE2Project/pcre2/releases/download/pcre2-10.46/pcre2-10.46.tar.bz2
https://www.cpan.org/src/5.0/perl-5.42.0.tar.xz
https://distfiles.ariadne.space/pkgconf/pkgconf-2.5.1.tar.xz
https://sourceforge.net/projects/procps-ng/files/Production/procps-ng-4.0.5.tar.xz
https://sourceforge.net/projects/psmisc/files/psmisc/psmisc-23.7.tar.xz
https://www.python.org/ftp/python/3.13.7/Python-3.13.7.tar.xz
https://www.python.org/ftp/python/doc/3.13.7/python-3.13.7-docs-html.tar.bz2
https://ftp.gnu.org/gnu/readline/readline-8.3.tar.gz
https://ftp.gnu.org/gnu/sed/sed-4.9.tar.xz
https://pypi.org/packages/source/s/setuptools/setuptools-80.9.0.tar.gz
https://github.com/shadow-maint/shadow/releases/download/4.18.0/shadow-4.18.0.tar.xz
https://sqlite.org/2025/sqlite-autoconf-3500400.tar.gz
https://anduin.linuxfromscratch.org/LFS/sqlite-doc-3500400.tar.xz
https://github.com/troglobit/sysklogd/releases/download/v2.7.2/sysklogd-2.7.2.tar.gz
https://github.com/systemd/systemd/archive/v257.8/systemd-257.8.tar.gz
https://anduin.linuxfromscratch.org/LFS/systemd-man-pages-257.8.tar.xz
https://github.com/slicer69/sysvinit/releases/download/3.14/sysvinit-3.14.tar.xz
https://ftp.gnu.org/gnu/tar/tar-1.35.tar.xz
https://downloads.sourceforge.net/tcl/tcl8.6.17-src.tar.gz
https://downloads.sourceforge.net/tcl/tcl8.6.17-html.tar.gz
https://ftp.gnu.org/gnu/texinfo/texinfo-7.2.tar.xz
https://www.iana.org/time-zones/repository/releases/tzdata2025b.tar.gz
https://anduin.linuxfromscratch.org/LFS/udev-lfs-20230818.tar.xz
https://www.kernel.org/pub/linux/utils/util-linux/v2.41/util-linux-2.41.2.tar.xz
https://github.com/vim/vim/archive/v9.1.1806/vim-9.1.1806.tar.gz
https://pypi.org/packages/source/w/wheel/wheel-0.46.1.tar.gz
https://cpan.metacpan.org/authors/id/T/TO/TODDR/XML-Parser-2.47.tar.gz
https://github.com//tukaani-project/xz/releases/download/v5.8.1/xz-5.8.1.tar.xz
https://zlib.net/fossils/zlib-1.3.1.tar.gz
https://github.com/facebook/zstd/releases/download/v1.5.7/zstd-1.5.7.tar.gz
https://www.linuxfromscratch.org/patches/lfs/development/bzip2-1.0.8-install_docs-1.patch
https://www.linuxfromscratch.org/patches/lfs/development/coreutils-9.8-i18n-2.patch
https://www.linuxfromscratch.org/patches/lfs/development/expect-5.45.4-gcc15-1.patch
https://www.linuxfromscratch.org/patches/lfs/development/glibc-2.42-fhs-1.patch
https://www.linuxfromscratch.org/patches/lfs/development/kbd-2.9.0-backspace-1.patch
https://www.linuxfromscratch.org/patches/lfs/development/sysvinit-3.14-consolidated-1.patch

View file

@ -1,46 +0,0 @@
{
"generated_at": "2025-10-01T05:54:56.228701+00:00",
"packages": [
{
"book": "mlfs",
"id": "mlfs/linux",
"name": "Linux",
"path": "packages/mlfs/linux-headers.json",
"stage": "cross-toolchain",
"status": "draft",
"variant": null,
"version": "6.16.9 API Headers"
},
{
"book": "mlfs",
"id": "mlfs/glibc",
"name": "Glibc",
"path": "packages/mlfs/glibc.json",
"stage": "cross-toolchain",
"status": "draft",
"variant": null,
"version": "2.42"
},
{
"book": "mlfs",
"id": "mlfs/binutils-pass-1",
"name": "Binutils",
"path": "packages/mlfs/binutils-pass-1.json",
"stage": "cross-toolchain",
"status": "draft",
"variant": "Pass 1",
"version": "2.45"
},
{
"book": "mlfs",
"id": "mlfs/gcc-pass-1",
"name": "GCC",
"path": "packages/mlfs/gcc-pass-1.json",
"stage": "cross-toolchain",
"status": "draft",
"variant": "Pass 1",
"version": "15.2.0"
}
],
"schema_version": "v0.1.0"
}

View file

@ -1,113 +0,0 @@
{
"artifacts": {
"disk": 678,
"install_prefix": null,
"sbu": 1.0
},
"build": [
{
"commands": [
"mkdir -v build",
"cd build"
],
"cwd": null,
"notes": null,
"phase": "setup",
"requires_root": false
},
{
"commands": [
"../configure --prefix=$LFS/tools \\",
"--with-sysroot=$LFS \\",
"--target=$LFS_TGT \\",
"--disable-nls \\",
"--enable-gprofng=no \\",
"--disable-werror \\",
"--enable-new-dtags \\",
"--enable-default-hash-style=gnu"
],
"cwd": null,
"notes": null,
"phase": "configure",
"requires_root": false
},
{
"commands": [
"make"
],
"cwd": null,
"notes": null,
"phase": "build",
"requires_root": false
},
{
"commands": [
"make install"
],
"cwd": null,
"notes": null,
"phase": "install",
"requires_root": false
}
],
"dependencies": {
"build": [],
"runtime": []
},
"environment": {
"users": [],
"variables": []
},
"optimizations": {
"cflags": [
"-O3",
"-flto"
],
"enable_lto": true,
"enable_pgo": true,
"ldflags": [
"-flto"
],
"profdata": null
},
"package": {
"anchors": {
"section": "https://linuxfromscratch.org/~thomas/multilib-m32/chapter05/binutils-pass1.html#ch-tools-binutils-pass1"
},
"book": "mlfs",
"chapter": 5,
"id": "mlfs/binutils-pass-1",
"name": "Binutils",
"section": "5.2",
"stage": "cross-toolchain",
"upstream": null,
"variant": "Pass 1",
"version": "2.45"
},
"provenance": {
"book_release": "lfs-ml-12.4-40-multilib",
"content_hash": "7c580aad04933a2f6ec5e5410a57695dd2d0b76a293212f33fd3edd226490853",
"page_url": "https://linuxfromscratch.org/~thomas/multilib-m32/chapter05/binutils-pass1.html",
"retrieved_at": "2025-10-01T04:57:22.375928+00:00"
},
"schema_version": "v0.1.0",
"source": {
"archive": "binutils-2.45.tar.xz",
"checksums": [
{
"alg": "md5",
"value": "dee5b4267e0305a99a3c9d6131f45759"
}
],
"urls": [
{
"kind": "primary",
"url": "https://sourceware.org/pub/binutils/releases/binutils-2.45.tar.xz"
}
]
},
"status": {
"issues": [],
"state": "draft"
}
}

View file

@ -1,174 +0,0 @@
{
"artifacts": {
"disk": 5,
"install_prefix": null,
"sbu": 3.0
},
"build": [
{
"commands": [
"tar -xf ../mpfr-4.2.2.tar.xz",
"mv -v mpfr-4.2.2 mpfr",
"tar -xf ../gmp-6.3.0.tar.xz",
"mv -v gmp-6.3.0 gmp",
"tar -xf ../mpc-1.3.1.tar.gz",
"mv -v mpc-1.3.1 mpc"
],
"cwd": null,
"notes": null,
"phase": "setup",
"requires_root": false
},
{
"commands": [
"sed -e '/m64=/s/lib64/lib/' \\",
"-e '/m32=/s/m32=.*/m32=..\\/lib32$(call if_multiarch,:i386-linux-gnu)/' \\",
"-i.orig gcc/config/i386/t-linux64"
],
"cwd": null,
"notes": null,
"phase": "build",
"requires_root": false
},
{
"commands": [
"sed '/STACK_REALIGN_DEFAULT/s/0/(!TARGET_64BIT \\&\\& TARGET_SSE)/' \\",
"-i gcc/config/i386/i386.h"
],
"cwd": null,
"notes": null,
"phase": "build",
"requires_root": false
},
{
"commands": [
"mkdir -v build",
"cd build"
],
"cwd": null,
"notes": null,
"phase": "setup",
"requires_root": false
},
{
"commands": [
"mlist=m64,m32",
"../configure \\",
"--target=$LFS_TGT \\",
"--prefix=$LFS/tools \\",
"--with-glibc-version=2.42 \\",
"--with-sysroot=$LFS \\",
"--with-newlib \\",
"--without-headers \\",
"--enable-default-pie \\",
"--enable-default-ssp \\",
"--enable-initfini-array \\",
"--disable-nls \\",
"--disable-shared \\",
"--enable-multilib --with-multilib-list=$mlist \\",
"--disable-decimal-float \\",
"--disable-threads \\",
"--disable-libatomic \\",
"--disable-libgomp \\",
"--disable-libquadmath \\",
"--disable-libssp \\",
"--disable-libvtv \\",
"--disable-libstdcxx \\",
"--enable-languages=c,c++"
],
"cwd": null,
"notes": null,
"phase": "configure",
"requires_root": false
},
{
"commands": [
"make"
],
"cwd": null,
"notes": null,
"phase": "build",
"requires_root": false
},
{
"commands": [
"make install"
],
"cwd": null,
"notes": null,
"phase": "install",
"requires_root": false
},
{
"commands": [
"cd ..",
"cat gcc/limitx.h gcc/glimits.h gcc/limity.h > \\",
"`dirname $($LFS_TGT-gcc -print-libgcc-file-name)`/include/limits.h"
],
"cwd": null,
"notes": null,
"phase": "build",
"requires_root": false
}
],
"dependencies": {
"build": [],
"runtime": []
},
"environment": {
"users": [],
"variables": []
},
"optimizations": {
"cflags": [
"-O3",
"-flto"
],
"enable_lto": true,
"enable_pgo": true,
"ldflags": [
"-flto"
],
"profdata": null
},
"package": {
"anchors": {
"section": "https://linuxfromscratch.org/~thomas/multilib-m32/chapter05/gcc-pass1.html#ch-tools-gcc-pass1"
},
"book": "mlfs",
"chapter": 5,
"id": "mlfs/gcc-pass-1",
"name": "GCC",
"section": "5.3",
"stage": "cross-toolchain",
"upstream": null,
"variant": "Pass 1",
"version": "15.2.0"
},
"provenance": {
"book_release": "lfs-ml-12.4-40-multilib",
"content_hash": "439fb0bf6a99414e9ddf1301e603948e073d5df2dcf897ac5581c399bd99f045",
"page_url": "https://linuxfromscratch.org/~thomas/multilib-m32/chapter05/gcc-pass1.html",
"retrieved_at": "2025-10-01T05:30:05.164831+00:00"
},
"schema_version": "v0.1.0",
"source": {
"archive": "mpfr-4.2.2.tar.xz",
"checksums": [
{
"alg": "md5",
"value": "7c32c39b8b6e3ae85f25156228156061"
}
],
"urls": [
{
"kind": "primary",
"url": "https://ftp.gnu.org/gnu/gcc/gcc-15.2.0/gcc-15.2.0.tar.xz"
}
]
},
"status": {
"issues": [],
"state": "draft"
}
}

View file

@ -1,289 +0,0 @@
{
"artifacts": {
"disk": 870,
"install_prefix": null,
"sbu": 1.0
},
"build": [
{
"commands": [
"ln -sfv ../lib/ld-linux-x86-64.so.2 $LFS/lib64",
"ln -sfv ../lib/ld-linux-x86-64.so.2 $LFS/lib64/ld-lsb-x86-64.so.3"
],
"cwd": null,
"notes": null,
"phase": "build",
"requires_root": false
},
{
"commands": [
"patch -Np1 -i ../glibc-2.42-fhs-1.patch"
],
"cwd": null,
"notes": null,
"phase": "build",
"requires_root": false
},
{
"commands": [
"mkdir -v build",
"cd build"
],
"cwd": null,
"notes": null,
"phase": "setup",
"requires_root": false
},
{
"commands": [
"echo \"rootsbindir=/usr/sbin\" > configparms"
],
"cwd": null,
"notes": null,
"phase": "build",
"requires_root": false
},
{
"commands": [
"../configure \\",
"--prefix=/usr \\",
"--host=$LFS_TGT \\",
"--build=$(../scripts/config.guess) \\",
"--disable-nscd \\",
"libc_cv_slibdir=/usr/lib \\",
"--enable-kernel=5.4"
],
"cwd": null,
"notes": null,
"phase": "configure",
"requires_root": false
},
{
"commands": [
"make"
],
"cwd": null,
"notes": null,
"phase": "build",
"requires_root": false
},
{
"commands": [
"make DESTDIR=$LFS install"
],
"cwd": null,
"notes": null,
"phase": "build",
"requires_root": false
},
{
"commands": [
"sed '/RTLDLIST=/s@/usr@@g' -i $LFS/usr/bin/ldd"
],
"cwd": null,
"notes": null,
"phase": "build",
"requires_root": false
},
{
"commands": [
"echo 'int main(){}' | $LFS_TGT-gcc -x c - -v -Wl,--verbose &> dummy.log",
"readelf -l a.out | grep ': /lib'"
],
"cwd": null,
"notes": null,
"phase": "build",
"requires_root": false
},
{
"commands": [
"grep -E -o \"$LFS/lib.*/S?crt[1in].*succeeded\" dummy.log"
],
"cwd": null,
"notes": null,
"phase": "build",
"requires_root": false
},
{
"commands": [
"grep -B3 \"^ $LFS/usr/include\" dummy.log"
],
"cwd": null,
"notes": null,
"phase": "build",
"requires_root": false
},
{
"commands": [
"grep 'SEARCH.*/usr/lib' dummy.log |sed 's|; |\\n|g'"
],
"cwd": null,
"notes": null,
"phase": "build",
"requires_root": false
},
{
"commands": [
"grep \"/lib.*/libc.so.6 \" dummy.log"
],
"cwd": null,
"notes": null,
"phase": "build",
"requires_root": false
},
{
"commands": [
"grep found dummy.log"
],
"cwd": null,
"notes": null,
"phase": "build",
"requires_root": false
},
{
"commands": [
"rm -v a.out dummy.log"
],
"cwd": null,
"notes": null,
"phase": "build",
"requires_root": false
},
{
"commands": [
"make clean",
"find .. -name \"*.a\" -delete"
],
"cwd": null,
"notes": null,
"phase": "build",
"requires_root": false
},
{
"commands": [
"CC=\"$LFS_TGT-gcc -m32\" \\",
"CXX=\"$LFS_TGT-g++ -m32\" \\",
"../configure \\",
"--prefix=/usr \\",
"--host=$LFS_TGT32 \\",
"--build=$(../scripts/config.guess) \\",
"--disable-nscd \\",
"--with-headers=$LFS/usr/include \\",
"--libdir=/usr/lib32 \\",
"--libexecdir=/usr/lib32 \\",
"libc_cv_slibdir=/usr/lib32 \\",
"--enable-kernel=5.4"
],
"cwd": null,
"notes": null,
"phase": "configure",
"requires_root": false
},
{
"commands": [
"make"
],
"cwd": null,
"notes": null,
"phase": "build",
"requires_root": false
},
{
"commands": [
"make DESTDIR=$PWD/DESTDIR install",
"cp -a DESTDIR/usr/lib32 $LFS/usr/",
"install -vm644 DESTDIR/usr/include/gnu/{lib-names,stubs}-32.h \\",
"$LFS/usr/include/gnu/",
"ln -svf ../lib32/ld-linux.so.2 $LFS/lib/ld-linux.so.2"
],
"cwd": null,
"notes": null,
"phase": "build",
"requires_root": false
},
{
"commands": [
"echo 'int main(){}' > dummy.c",
"$LFS_TGT-gcc -m32 dummy.c",
"readelf -l a.out | grep '/ld-linux'"
],
"cwd": null,
"notes": null,
"phase": "build",
"requires_root": false
},
{
"commands": [
"rm -v dummy.c a.out"
],
"cwd": null,
"notes": null,
"phase": "build",
"requires_root": false
}
],
"dependencies": {
"build": [],
"runtime": []
},
"environment": {
"users": [],
"variables": []
},
"optimizations": {
"cflags": [
"-O3",
"-flto"
],
"enable_lto": true,
"enable_pgo": true,
"ldflags": [
"-flto"
],
"profdata": null
},
"package": {
"anchors": {
"section": "https://linuxfromscratch.org/~thomas/multilib-m32/chapter05/glibc.html#ch-tools-glibc"
},
"book": "mlfs",
"chapter": 5,
"id": "mlfs/glibc",
"name": "Glibc",
"section": "5.5",
"stage": "cross-toolchain",
"upstream": null,
"variant": null,
"version": "2.42"
},
"provenance": {
"book_release": "lfs-ml-12.4-40-multilib",
"content_hash": "52b9b51a37f960c3d8694d06cfd1a8ef297a3a2e473e565f402765b24c4e2329",
"page_url": "https://linuxfromscratch.org/~thomas/multilib-m32/chapter05/glibc.html",
"retrieved_at": "2025-10-01T05:30:28.195848+00:00"
},
"schema_version": "v0.1.0",
"source": {
"archive": "glibc-2.42.tar.xz",
"checksums": [
{
"alg": "md5",
"value": "23c6f5a27932b435cae94e087cb8b1f5"
}
],
"urls": [
{
"kind": "primary",
"url": "https://ftp.gnu.org/gnu/glibc/glibc-2.42.tar.xz"
},
{
"kind": "primary",
"url": "https://www.linuxfromscratch.org/patches/lfs/development/glibc-2.42-fhs-1.patch"
}
]
},
"status": {
"issues": [],
"state": "draft"
}
}

View file

@ -1,81 +0,0 @@
{
"artifacts": {
"disk": 1,
"install_prefix": null,
"sbu": 0.0
},
"build": [
{
"commands": [
"make mrproper"
],
"cwd": null,
"notes": null,
"phase": "build",
"requires_root": false
},
{
"commands": [
"make headers",
"find usr/include -type f ! -name '*.h' -delete",
"cp -rv usr/include $LFS/usr"
],
"cwd": null,
"notes": null,
"phase": "build",
"requires_root": false
}
],
"dependencies": {
"build": [],
"runtime": []
},
"environment": {
"users": [],
"variables": []
},
"optimizations": {
"cflags": [
"-O3",
"-flto"
],
"enable_lto": true,
"enable_pgo": true,
"ldflags": [
"-flto"
],
"profdata": null
},
"package": {
"anchors": {
"section": "https://linuxfromscratch.org/~thomas/multilib-m32/chapter05/linux-headers.html#ch-tools-linux-headers"
},
"book": "mlfs",
"chapter": 5,
"id": "mlfs/linux",
"name": "Linux",
"section": "5.4",
"stage": "cross-toolchain",
"upstream": null,
"variant": null,
"version": "6.16.9 API Headers"
},
"provenance": {
"book_release": "lfs-ml-12.4-40-multilib",
"content_hash": "cd251fbfaaa5da1eb43185331f5beaa07cdd9d50c79f19be266435781195b66d",
"page_url": "https://linuxfromscratch.org/~thomas/multilib-m32/chapter05/linux-headers.html",
"retrieved_at": "2025-10-01T05:30:14.291785+00:00"
},
"schema_version": "v0.1.0",
"source": {
"archive": null,
"checksums": [],
"urls": []
},
"status": {
"issues": [
"No source URLs with archive extensions detected"
],
"state": "draft"
}
}

View file

@ -1,377 +0,0 @@
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$id": "https://lpkg.dev/schemas/package-metadata.json",
"title": "LPKG Package Metadata",
"type": "object",
"required": ["schema_version", "package", "source", "build", "provenance", "status"],
"additionalProperties": false,
"properties": {
"schema_version": {
"type": "string",
"pattern": "^v\\d+\\.\\d+\\.\\d+$"
},
"package": {
"type": "object",
"required": ["id", "name", "version", "book"],
"additionalProperties": false,
"properties": {
"id": {
"type": "string",
"pattern": "^[a-z0-9][a-z0-9-/]*$"
},
"name": {
"type": "string",
"minLength": 1
},
"upstream": {
"type": ["string", "null"]
},
"version": {
"type": "string",
"minLength": 1
},
"book": {
"type": "string",
"enum": ["lfs", "mlfs", "blfs", "glfs"]
},
"chapter": {
"type": ["integer", "null"],
"minimum": 0
},
"section": {
"type": ["string", "null"],
"pattern": "^\\d+\\.\\d+$"
},
"stage": {
"type": ["string", "null"],
"enum": [
"cross-toolchain",
"temporary-tools",
"system",
"system-configuration",
"system-finalization",
"desktop",
"server",
"multilib",
"kernel",
"boot",
null
]
},
"variant": {
"type": ["string", "null"],
"minLength": 1
},
"anchors": {
"type": "object",
"additionalProperties": {
"type": "string",
"format": "uri"
}
}
}
},
"source": {
"type": "object",
"required": ["urls"],
"additionalProperties": false,
"properties": {
"urls": {
"type": "array",
"items": {
"type": "object",
"required": ["url"],
"additionalProperties": false,
"properties": {
"url": {
"type": "string",
"format": "uri"
},
"kind": {
"type": "string",
"enum": ["primary", "mirror", "patch", "signature"]
}
}
}
},
"archive": {
"type": ["string", "null"]
},
"checksums": {
"type": "array",
"items": {
"type": "object",
"required": ["alg", "value"],
"additionalProperties": false,
"properties": {
"alg": {
"type": "string",
"enum": ["md5", "sha1", "sha256", "sha512"]
},
"value": {
"type": "string",
"pattern": "^[A-Fa-f0-9]{16,128}$"
}
}
}
}
}
},
"artifacts": {
"type": "object",
"additionalProperties": false,
"properties": {
"sbu": {
"type": ["number", "null"],
"minimum": 0
},
"disk": {
"type": ["integer", "null"],
"minimum": 0,
"description": "Approximate disk usage in MB"
},
"install_prefix": {
"type": ["string", "null"],
"minLength": 1
}
}
},
"dependencies": {
"type": "object",
"additionalProperties": false,
"properties": {
"build": {
"type": "array",
"items": {
"type": "object",
"required": ["name"],
"additionalProperties": false,
"properties": {
"name": {
"type": "string",
"minLength": 1
},
"optional": {
"type": "boolean",
"default": false
}
}
}
},
"runtime": {
"type": "array",
"items": {
"type": "string",
"minLength": 1
}
}
}
},
"environment": {
"type": "object",
"additionalProperties": false,
"properties": {
"variables": {
"type": "array",
"items": {
"type": "object",
"required": ["name"],
"additionalProperties": false,
"properties": {
"name": {
"type": "string",
"pattern": "^[A-Z0-9_]+$"
},
"required": {
"type": "boolean",
"default": true
},
"description": {
"type": "string"
}
}
}
},
"users": {
"type": "array",
"items": {
"type": "object",
"required": ["name"],
"additionalProperties": false,
"properties": {
"name": {
"type": "string",
"minLength": 1
},
"purpose": {
"type": "string"
}
}
}
}
}
},
"build": {
"type": "array",
"minItems": 1,
"items": {
"type": "object",
"required": ["phase", "commands"],
"additionalProperties": false,
"properties": {
"phase": {
"type": "string",
"enum": ["setup", "configure", "build", "test", "install", "post"]
},
"commands": {
"type": "array",
"items": {
"type": "string",
"minLength": 1
}
},
"cwd": {
"type": ["string", "null"],
"minLength": 1
},
"requires_root": {
"type": "boolean",
"default": false
},
"notes": {
"type": ["string", "null"],
"minLength": 1
}
}
}
},
"optimizations": {
"type": "object",
"additionalProperties": false,
"properties": {
"enable_lto": {
"type": "boolean"
},
"enable_pgo": {
"type": "boolean"
},
"cflags": {
"type": "array",
"items": {
"type": "string"
}
},
"ldflags": {
"type": "array",
"items": {
"type": "string"
}
},
"profdata": {
"type": ["string", "null"],
"minLength": 1
}
}
},
"tests": {
"type": "array",
"items": {
"type": "object",
"required": ["commands"],
"additionalProperties": false,
"properties": {
"commands": {
"type": "array",
"items": {
"type": "string"
}
},
"optional": {
"type": "boolean"
},
"expected_failures": {
"type": "array",
"items": {
"type": "string"
}
}
}
}
},
"post_install": {
"type": "array",
"items": {
"type": "object",
"required": ["commands"],
"additionalProperties": false,
"properties": {
"commands": {
"type": "array",
"items": {
"type": "string"
}
},
"description": {
"type": ["string", "null"]
}
}
}
},
"notes": {
"type": "array",
"items": {
"type": "object",
"required": ["text"],
"additionalProperties": false,
"properties": {
"severity": {
"type": "string",
"enum": ["info", "warning", "error"]
},
"text": {
"type": "string"
}
}
}
},
"provenance": {
"type": "object",
"required": ["book_release", "page_url", "retrieved_at"],
"additionalProperties": false,
"properties": {
"book_release": {
"type": "string"
},
"page_url": {
"type": "string",
"format": "uri"
},
"retrieved_at": {
"type": "string",
"format": "date-time"
},
"content_hash": {
"type": "string",
"pattern": "^[A-Fa-f0-9]{64}$"
}
}
},
"status": {
"type": "object",
"required": ["state"],
"additionalProperties": false,
"properties": {
"state": {
"type": "string",
"enum": ["draft", "review", "imported", "stale"]
},
"issues": {
"type": "array",
"items": {
"type": "string"
}
}
}
}
}
}

View file

@ -1,46 +0,0 @@
# Integrating jhalfs Source Metadata
- Goal: reuse jhalfs wget-list and md5sums to populate package `source.urls` and
auto-fill checksums when harvesting metadata for MLFS/BLFS/GLFS packages.
- Data source: `https://anduin.linuxfromscratch.org/` hosts per-release
`wget-list`/`md5sums` files already curated by the jhalfs project.
- Approach:
1. Fetch (and optionally cache under `ai/cache/`) the lists for each book.
2. When harvesting, map `<package>-<version>` against the list to gather all
relevant URLs.
3. Pull matching checksum entries to populate `source.checksums`.
4. Keep the existing HTML scrape for chapter/stage text; jhalfs covers only
sources.
- Benefits: avoids fragile HTML tables, keeps URLs aligned with official build
scripts, and ensures checksums are up-to-date.
# Metadata → Rust Module Strategy
Goal: emit Rust modules under `src/pkgs/by_name` directly from harvested
metadata once MLFS/BLFS/GLFS records are validated.
Outline:
1. **Schema alignment** Ensure harvested JSON carries everything the
`PackageDefinition` constructor expects (source URLs, checksums, build
commands, dependencies, optimisation flags, notes/stage metadata).
2. **Translation layer** Implement a converter (likely in a new module,
e.g. `src/pkgs/generator.rs`) that reads a metadata JSON file and produces a
`ScaffoldRequest` or directly writes the module source via the existing
scaffolder.
3. **Naming/layout** Derive module paths from `package.id` (e.g.
`mlfs/binutils-pass-1``src/pkgs/by_name/bi/binutils/pass_1/mod.rs`) while
preserving the prefix/slug conventions already used by the scaffolder.
4. **CLI integration** Add a subcommand (`metadata_indexer generate`) that
accepts a list of package IDs or a glob, feeds each through the translator,
and optionally stages the resulting Rust files.
5. **Diff safety** Emit modules to a temporary location first, compare
against existing files, and only overwrite when changes are detected; keep a
`--dry-run` mode for review.
6. **Tests/checks** After generation, run `cargo fmt` and `cargo check` to
ensure the new modules compile; optionally add schema fixtures covering edge
cases (variants, multiple URLs, absent checksums).
Open questions:
- How to represent optional post-install steps or multi-phase builds inside the
generated module (additional helper functions vs. raw command arrays).
- Where to store PGO workload hints once the PGO infrastructure is defined.

View file

@ -1,24 +0,0 @@
[
{
"id": "default_cli",
"name": "Codex CLI Assistant",
"description": "Default persona for repository automation; focuses on safe refactors and tooling improvements.",
"strengths": [
"Rust and tooling pipelines",
"Workflow automation",
"Incremental migrations"
],
"notes": "Derived from GPT-5 Codex runtime; avoids destructive operations without explicit approval."
},
{
"id": "mlfs_researcher",
"name": "MLFS Researcher",
"description": "Persona dedicated to tracking Multilib Linux From Scratch package metadata and translating it into lpkg modules.",
"strengths": [
"HTML scraping",
"Package manifest synthesis",
"Optimization flag tuning"
],
"notes": "Activated when working with https://linuxfromscratch.org/~thomas/multilib-m32/ resources."
}
]

View file

@ -1,74 +0,0 @@
{
"generated_at": "2025-03-09T00:00:00Z",
"unfinished": [
{
"id": "mlfs-package-import",
"title": "Import all MLFS packages into lpkg",
"description": "Parse the Multilib LFS book and scaffold package definitions with optimization defaults (LTO/PGO/-O3).",
"blocked_on": [
"Finalize metadata -> Rust module generation pipeline",
"Implement automated parser"
],
"owner": "mlfs_researcher"
},
{
"id": "pgo-integration",
"title": "Integrate profile guided optimization support",
"description": "Add infrastructure for collection and replay of profiling data during package builds.",
"blocked_on": [
"Decide on profiling workload definitions"
],
"owner": "default_cli"
},
{
"id": "lfs-html-parsers",
"title": "Automate LFS/BLFS/GLFS ingest via HTML parsing",
"description": "Avoid hardcoded package data; download the upstream books (LFS, BLFS, GLFS) and parse them to drive scaffolding and metadata updates.",
"blocked_on": [
"Design resilient scraping strategies for each book",
"Implement incremental update workflow"
],
"owner": "mlfs_researcher"
},
{
"id": "rust-module-generator",
"title": "Generate package modules from harvested metadata",
"description": "Transform harvested metadata into Rust files under src/pkgs/by_name, wiring PackageDefinition data directly.",
"blocked_on": [
"Define translation scheme from metadata to PackageDefinition",
"Integrate generator with metadata_indexer output"
],
"owner": "default_cli"
}
],
"solved": [
{
"id": "ai-metadata-store",
"title": "Create AI metadata directory",
"description": "Introduce ai/personas.json, ai/tasks.json, ai/bugs.json for persistent assistant context.",
"resolution": "Initial JSON files checked in with placeholder content.",
"owner": "default_cli"
},
{
"id": "metadata-schema-v0.1",
"title": "Define package metadata schema",
"description": "Specify JSON schema and layout for storing scraped package detail from LFS family books.",
"resolution": "Added ai/metadata/schema.json with v0.1.0 structure and seeded initial package entry/index.",
"owner": "default_cli"
},
{
"id": "metadata-indexer-cli",
"title": "Build metadata validation/indexing tool",
"description": "Provide a standalone CLI to validate package metadata against the schema and regenerate ai/metadata/index.json.",
"resolution": "Added src/bin/metadata_indexer.rs with schema validation, summary extraction, and index writer integration.",
"owner": "default_cli"
},
{
"id": "metadata-jhalfs-refresh",
"title": "Wire jhalfs manifests into metadata harvester",
"description": "Cache wget-list/md5sums from jhalfs and expose a CLI refresh command so harvesting can populate source URLs and checksums reliably.",
"resolution": "Extended metadata_indexer with a `refresh` subcommand, cached manifests under ai/metadata/cache/, and hooked harvest to populate MD5 checksums via jhalfs data.",
"owner": "default_cli"
}
]
}

File diff suppressed because it is too large Load diff

View file

@ -1,120 +0,0 @@
# Architecture Overview
This project is split into a reusable Rust library crate (`package_management`)
and several binaries that orchestrate day-to-day workflows. The sections below
outline the main entry points and how the supporting modules fit together.
## CLI entry points
| Binary | Location | Purpose |
| ------ | -------- | ------- |
| `lpkg` | `src/main.rs` | Primary command-line interface with workflow automation and optional TUI integration. |
| `metadata_indexer` | `src/bin/metadata_indexer.rs` | Harvests LFS/BLFS/GLFS package metadata, validates it against the JSON schema, keeps `ai/metadata/index.json` up to date, and can scaffold Rust modules from harvested records. |
### `lpkg` workflows
`lpkg` uses [Clap](https://docs.rs/clap) to expose multiple subcommands:
- `EnvCheck` fetches `<pre>` blocks from an LFS-style HTML page and runs the
embedded `ver_check` / `ver_kernel` scripts.
- `FetchManifests` downloads the books canonical `wget-list` and `md5sums`
files and writes them to disk.
- `BuildBinutils` parses the Binutils Pass 1 page, mirrors the documented
build steps, and executes them in a Tokio runtime.
- `ScaffoldPackage` generates a new module under `src/pkgs/by_name/` with
optimisation defaults (LTO/PGO/`-O3`) and persists metadata via the DB
helpers.
- `ImportMlfs` walks the MLFS catalogue, scaffolding definitions and storing
them in the database (with optional `--dry-run`, `--limit`, and `--overwrite`).
When compiled with the `tui` feature flag, the CLI also exposes
`lpkg tui disk-manager`, which drops the user into the terminal UI defined in
`src/tui/`.
### `metadata_indexer`
The `metadata_indexer` binary is a companion tool for maintaining the JSON
artifacts under `ai/metadata/`:
- `validate` validates every `packages/**.json` file against
`ai/metadata/schema.json` and reports schema or summary extraction issues.
- `index` revalidates the metadata and regenerates
`ai/metadata/index.json` (use `--compact` for single-line JSON).
- `harvest` fetches a given book page, extracts build metadata, and emits a
schema-compliant JSON skeleton. When direct HTML parsing does not locate the
source tarball, it falls back to cached jhalfs manifests to populate
`source.urls` and MD5 checksums.
- `refresh` downloads (or re-downloads with `--force`) the jhalfs manifests
(`wget-list`, `md5sums`) for one or more books and stores them under
`ai/metadata/cache/`.
## Module layout
```
src/
ai/ // JSON loaders for repository personas, tasks, and bugs
db/ // Diesel database setup and models
html.rs // Lightweight HTML helpers (fetch + parse <pre> blocks)
ingest/ // Parsers for LFS / MLFS / BLFS / GLFS book content
md5_utils.rs // Fetches canonical md5sums from the book mirror
mirrors.rs // Lists official source mirrors for downloads
pkgs/ // Package scaffolding and metadata definition helpers
tui/ // Optional terminal UI (crossterm + tui)
version_check.rs// Executes ver_check / ver_kernel snippets
wget_list.rs // Fetches jhalfs-maintained wget-list manifests
bin/metadata_indexer.rs // AI metadata CLI described above
```
### Notable modules
- **`src/pkgs/scaffolder.rs`**
- Generates filesystem modules and `PackageDefinition` records based on a
`ScaffoldRequest`.
- Normalises directory layout (prefix modules, `mod.rs` entries) and applies
optimisation defaults (LTO, PGO, `-O3`).
- **`src/ingest/`**
- Provides HTML parsers tailored to each book flavour (LFS, MLFS, BLFS,
GLFS). The parsers emit `BookPackage` records consumed by the scaffolder
and metadata importer.
- **`src/db/`**
- Diesel models and schema for persisting package metadata. `lpkg` uses these
helpers when scaffolding or importing packages.
- **`src/tui/`**
- Houses the optional terminal interface (disk manager, main menu, settings,
downloader). The entry points are conditionally compiled behind the `tui`
cargo feature.
## Data & metadata assets
The repository keeps long-lived ARTifacts under `ai/`:
- `ai/metadata/` JSON schema (`schema.json`), package records, and a generated
index (`index.json`). The `metadata_indexer` binary maintains these files.
- `ai/personas.json`, `ai/tasks.json`, `ai/bugs.json` contextual data for
automated assistance.
- `ai/notes.md` scratchpad for future work (e.g., jhalfs integration).
`data/` currently contains catalogues derived from the MLFS book and can be
extended with additional book snapshots.
## Database and persistence
The Diesel setup uses SQLite (via the `diesel` crate with `sqlite` and `r2d2`
features enabled). Connection pooling lives in `src/db/mod.rs` and is consumed
by workflows that scaffold or import packages.
## Optional terminal UI
The TUI resolves around `DiskManager` (a crossterm + tui based interface for
GPT partition inspection and creation). Additional stubs (`main_menu.rs`,
`settings.rs`, `downloader.rs`) are present for future expansion. The main CLI
falls back to `DiskManager::run_tui()` whenever `lpkg` is invoked without a
subcommand and is compiled with `--features tui`.
---
For more operational details around metadata harvesting, refer to
[`docs/METADATA_PIPELINE.md`](./METADATA_PIPELINE.md).

View file

@ -1,80 +0,0 @@
# Metadata Harvesting Pipeline
This repository tracks AI-friendly package metadata under `ai/metadata/`.
The `metadata_indexer` binary orchestrates validation and harvesting tasks.
This document explains the workflow and the supporting assets.
## Directory layout
- `ai/metadata/schema.json` JSON Schema (Draft 2020-12) describing one
package record.
- `ai/metadata/packages/<book>/<slug>.json` harvested package metadata.
- `ai/metadata/index.json` generated summary table linking package IDs to
their JSON files.
- `ai/notes.md` scratchpad for future improvements (e.g., jhalfs integration).
## `metadata_indexer` commands
| Command | Description |
| ------- | ----------- |
| `validate` | Loads every package JSON file and validates it against `schema.json`. Reports schema violations and summary extraction errors. |
| `index` | Re-runs validation and regenerates `index.json`. Use `--compact` to write a single-line JSON payload. |
| `harvest` | Fetches a book page, scrapes build instructions, and emits a draft metadata record (to stdout with `--dry-run` or into `ai/metadata/packages/`). Falls back to jhalfs manifests when inline source links are absent. |
| `refresh` | Updates cached jhalfs manifests (`wget-list`, `md5sums`) under `ai/metadata/cache/`. Supports `--books` filtering and `--force` to bypass the cache. |
| `generate` | Translates harvested metadata into Rust modules under `src/pkgs/by_name` (or a specified directory), using the scaffolder to create `PackageDefinition` wrappers. |
### Harvesting flow
1. **Fetch HTML** the requested page is downloaded with `reqwest` and parsed
using `scraper` selectors.
2. **Heading metadata** the `h1.sect1` title provides the chapter/section,
canonical package name, version, and optional variant hints.
3. **Build steps** `<pre class="userinput">` blocks become ordered `build`
phases (`setup`, `configure`, `build`, `test`, `install`).
4. **Artifact stats** `div.segmentedlist` entries supply SBU and disk usage.
5. **Source URLs** the harvester tries two strategies:
- Inline HTML links inside the page (common for BLFS articles).
- Fallback to the cached jhalfs `wget-list` for the selected book to find
matching `<package>-<version>` entries.
6. **Checksums** the matching entry from the cached jhalfs `md5sums`
manifest populates `source.checksums` when the archive name is known.
7. **Status** unresolved items (missing URLs, anchors, etc.) are recorded in
`status.issues` so humans can interrogate or patch the draft before
promoting it.
### Known gaps
- **Source links via tables** some MLFS chapters list download links inside a
“Package Information” table. The current implementation relies on the
jhalfs `wget-list` fallback instead of parsing that table.
- **Anchor discovery** if the heading lacks an explicit `id` attribute, the
scraper attempts to locate child anchors or scan the raw HTML. If none are
found, a warning is recorded and `status.issues` contains a reminder.
## Using jhalfs manifests
The maintained `wget-list`/`md5sums` files hosted by jhalfs provide canonical
source URLs and hashes. The `metadata_indexer refresh` command keeps these
manifests cached under `ai/metadata/cache/`. Harvesting consumes the cached
copies to populate URLs and MD5 checksums.
Planned enhancements (see `ai/notes.md` and `ai/bugs.json#metadata-harvest-no-source-urls`):
1. Abstract list fetching so BLFS/GLFS variants can reuse the logic.
2. Normalise the match criteria for package + version (handling pass stages,
suffixes, etc.).
## Manual review checklist
When a new metadata file is generated:
- `schema_version` should match `schema.json` (currently `v0.1.0`).
- `package.id` should be unique (format `<book>/<slug>`).
- `source.urls` must include at least one primary URL; add mirrors/patches as
needed.
- Clear any `status.issues` before promoting the record from `draft`.
- Run `cargo run --bin metadata_indexer -- --base-dir . index` to regenerate
the global index once the draft is finalised.
Refer to `README.md` for usage examples and to `docs/ARCHITECTURE.md` for a
broader overview of the crate layout.

View file

@ -1,61 +0,0 @@
# Package Module Generation
This document explains how harvested metadata is transformed into concrete
Rust modules under `src/pkgs/by_name/`.
## Overview
1. **Harvest metadata** Use `metadata_indexer harvest` to capture package data
from the LFS/BLFS/GLFS books. Each record is written to
`ai/metadata/packages/<book>/<slug>.json`.
2. **Refresh manifests** Run
`metadata_indexer refresh` to ensure the jhalfs `wget-list` and `md5sums`
caches are up to date. Harvesting relies on these caches for canonical
source URLs and checksums.
3. **Generate modules** Use
`metadata_indexer generate --metadata <path> --output <by_name_dir>` to turn a
metadata file into a full Rust module that exposes a `PackageDefinition`.
Generated modules leverage the existing scaffolder logic, so the command will
create any missing prefix directories (e.g. `bi/mod.rs`) and populate the final
`mod.rs` file with the correct code template.
## Command reference
```bash
# Harvest metadata from a book page
cargo run --bin metadata_indexer -- --base-dir . harvest \
--book mlfs \
--page chapter05/binutils-pass1 \
--output ai/metadata/packages/mlfs/binutils-pass-1.json
# Refresh jhalfs manifests (optional but recommended)
cargo run --bin metadata_indexer -- --base-dir . refresh
# Generate a module under the standard src tree
cargo run --bin metadata_indexer -- --base-dir . generate \
--metadata ai/metadata/packages/mlfs/binutils-pass-1.json \
--output src/pkgs/by_name \
--overwrite
```
### Flags
- `--output` defaults to `src/pkgs/by_name`. Point it to another directory if
you want to stage modules elsewhere (e.g. `target/generated/by_name`).
- `--overwrite` deletes the existing module directory before scaffolding a new
one.
After generation, run `cargo fmt` and `cargo check` to ensure the crate compiles
with the new modules.
## Implementation notes
- Metadata fields such as `build`, `dependencies`, and `optimizations` are
mapped directly onto the scaffolders `ScaffoldRequest` type.
- Source URLs and MD5 checksums are sourced from the harvested metadata
(populated via the jhalfs manifests).
- The module slug is derived from `package.id` (e.g.
`mlfs/binutils-pass-1``src/pkgs/by_name/bi/binutils_pass_1/mod.rs`).
See the code in `src/pkgs/generator.rs` for the full translation logic.

View file

@ -1,79 +0,0 @@
use std::path::{Path, PathBuf};
use anyhow::Result;
use serde::Deserialize;
/// Loads assistant persona metadata from `ai/personas.json`.
pub fn load_personas(base_dir: impl AsRef<Path>) -> Result<Vec<Persona>> {
let path = resolve(base_dir, "personas.json");
read_json(path)
}
/// Loads the tracked task board from `ai/tasks.json`.
pub fn load_tasks(base_dir: impl AsRef<Path>) -> Result<TaskBoard> {
let path = resolve(base_dir, "tasks.json");
read_json(path)
}
/// Loads the current bug ledger from `ai/bugs.json`.
pub fn load_bugs(base_dir: impl AsRef<Path>) -> Result<Vec<Bug>> {
let path = resolve(base_dir, "bugs.json");
read_json(path)
}
fn resolve(base_dir: impl AsRef<Path>, file: &str) -> PathBuf {
base_dir.as_ref().join("ai").join(file)
}
fn read_json<T>(path: PathBuf) -> Result<T>
where
T: for<'de> Deserialize<'de>,
{
let data = std::fs::read_to_string(&path)?;
Ok(serde_json::from_str(&data)?)
}
#[derive(Debug, Deserialize)]
pub struct Persona {
pub id: String,
pub name: String,
pub description: String,
#[serde(default)]
pub strengths: Vec<String>,
#[serde(default)]
pub notes: String,
}
#[derive(Debug, Deserialize)]
pub struct TaskBoard {
pub generated_at: String,
pub unfinished: Vec<Task>,
pub solved: Vec<Task>,
}
#[derive(Debug, Deserialize)]
pub struct Task {
pub id: String,
pub title: String,
pub description: String,
#[serde(default)]
pub blocked_on: Vec<String>,
#[serde(default)]
pub owner: Option<String>,
#[serde(default)]
pub resolution: Option<String>,
}
#[derive(Debug, Deserialize)]
pub struct Bug {
pub id: String,
pub title: String,
pub description: String,
pub status: String,
#[serde(default)]
pub owner: Option<String>,
#[serde(default)]
pub created_at: Option<String>,
#[serde(default)]
pub labels: Vec<String>,
}

File diff suppressed because it is too large Load diff

View file

@ -1,107 +0,0 @@
pub mod models;
pub mod schema;
use std::env;
use anyhow::{Context, Result};
use diesel::prelude::*;
use diesel::r2d2::{self, ConnectionManager};
use diesel::sqlite::SqliteConnection;
use crate::pkgs::package::PackageDefinition;
use self::models::{NewPackage, Package};
use self::schema::packages::dsl as packages_dsl;
pub type Pool = r2d2::Pool<ConnectionManager<SqliteConnection>>;
pub type Connection = r2d2::PooledConnection<ConnectionManager<SqliteConnection>>;
const DEFAULT_DB_URL: &str = "lpkg.db";
/// Resolve the database URL from `LPKG_DATABASE_URL` or fall back to `lpkg.db` in the CWD.
pub fn database_url() -> String {
env::var("LPKG_DATABASE_URL").unwrap_or_else(|_| DEFAULT_DB_URL.to_string())
}
/// Build an r2d2 connection pool and ensure the schema exists.
pub fn establish_pool() -> Result<Pool> {
let manager = ConnectionManager::<SqliteConnection>::new(database_url());
let pool = Pool::builder()
.build(manager)
.context("creating Diesel connection pool")?;
{
let mut conn = pool
.get()
.context("establishing initial database connection")?;
initialize(&mut conn)?;
}
Ok(pool)
}
fn initialize(conn: &mut SqliteConnection) -> Result<()> {
diesel::sql_query(
"CREATE TABLE IF NOT EXISTS packages (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL,
version TEXT NOT NULL,
source TEXT,
md5 TEXT,
configure_args TEXT,
build_commands TEXT,
install_commands TEXT,
dependencies TEXT,
enable_lto BOOLEAN NOT NULL DEFAULT 1,
enable_pgo BOOLEAN NOT NULL DEFAULT 1,
cflags TEXT,
ldflags TEXT,
profdata TEXT
)",
)
.execute(conn)
.context("creating packages table")?;
diesel::sql_query(
"CREATE UNIQUE INDEX IF NOT EXISTS idx_packages_name_version ON packages(name, version)",
)
.execute(conn)
.context("creating packages unique index")?;
Ok(())
}
/// Insert or update a package definition in the database.
pub fn upsert_package(conn: &mut SqliteConnection, definition: &PackageDefinition) -> Result<()> {
let record = NewPackage::try_from(definition)?;
diesel::insert_into(packages_dsl::packages)
.values(&record)
.on_conflict((packages_dsl::name, packages_dsl::version))
.do_update()
.set(&record)
.execute(conn)
.context("upserting package record")?;
Ok(())
}
/// Convenience helper to upsert via pool and return the persisted definition.
pub fn upsert_package_via_pool(pool: &Pool, definition: &PackageDefinition) -> Result<()> {
let mut conn = pool.get().context("acquiring database connection")?;
upsert_package(&mut conn, definition)
}
/// Load all packages from the database.
pub fn load_packages(conn: &mut SqliteConnection) -> Result<Vec<Package>> {
packages_dsl::packages
.order((packages_dsl::name, packages_dsl::version))
.load::<Package>(conn)
.context("loading packages from database")
}
/// Load packages using the shared pool.
pub fn load_packages_via_pool(pool: &Pool) -> Result<Vec<Package>> {
let mut conn = pool.get().context("acquiring database connection")?;
load_packages(&mut conn)
}

View file

@ -1,104 +0,0 @@
use anyhow::{Context, Result};
use diesel::prelude::*;
use serde::{Deserialize, Serialize};
use crate::pkgs::package::PackageDefinition;
use super::schema::packages;
#[derive(Debug, Queryable, Serialize, Deserialize)]
pub struct Package {
pub id: i32,
pub name: String,
pub version: String,
pub source: Option<String>,
pub md5: Option<String>,
pub configure_args: Option<String>,
pub build_commands: Option<String>,
pub install_commands: Option<String>,
pub dependencies: Option<String>,
pub enable_lto: bool,
pub enable_pgo: bool,
pub cflags: Option<String>,
pub ldflags: Option<String>,
pub profdata: Option<String>,
}
impl Package {
pub fn into_definition(self) -> Result<PackageDefinition> {
Ok(PackageDefinition {
name: self.name,
version: self.version,
source: self.source,
md5: self.md5,
configure_args: parse_vec(self.configure_args)?,
build_commands: parse_vec(self.build_commands)?,
install_commands: parse_vec(self.install_commands)?,
dependencies: parse_vec(self.dependencies)?,
optimizations: crate::pkgs::package::OptimizationSettings {
enable_lto: self.enable_lto,
enable_pgo: self.enable_pgo,
cflags: parse_vec(self.cflags)?,
ldflags: parse_vec(self.ldflags)?,
profdata: self.profdata,
},
})
}
}
#[derive(Debug, Insertable, AsChangeset)]
#[diesel(table_name = packages)]
pub struct NewPackage {
pub name: String,
pub version: String,
pub source: Option<String>,
pub md5: Option<String>,
pub configure_args: Option<String>,
pub build_commands: Option<String>,
pub install_commands: Option<String>,
pub dependencies: Option<String>,
pub enable_lto: bool,
pub enable_pgo: bool,
pub cflags: Option<String>,
pub ldflags: Option<String>,
pub profdata: Option<String>,
}
impl TryFrom<&PackageDefinition> for NewPackage {
type Error = anyhow::Error;
fn try_from(value: &PackageDefinition) -> Result<Self> {
Ok(Self {
name: value.name.clone(),
version: value.version.clone(),
source: value.source.clone(),
md5: value.md5.clone(),
configure_args: serialize_vec(&value.configure_args)?,
build_commands: serialize_vec(&value.build_commands)?,
install_commands: serialize_vec(&value.install_commands)?,
dependencies: serialize_vec(&value.dependencies)?,
enable_lto: value.optimizations.enable_lto,
enable_pgo: value.optimizations.enable_pgo,
cflags: serialize_vec(&value.optimizations.cflags)?,
ldflags: serialize_vec(&value.optimizations.ldflags)?,
profdata: value.optimizations.profdata.clone(),
})
}
}
fn serialize_vec(values: &[String]) -> Result<Option<String>> {
if values.is_empty() {
Ok(None)
} else {
serde_json::to_string(values)
.map(Some)
.context("serializing vector to JSON")
}
}
fn parse_vec(raw: Option<String>) -> Result<Vec<String>> {
match raw {
Some(data) => serde_json::from_str(&data).context("parsing JSON vector"),
None => Ok(Vec::new()),
}
}

View file

@ -1,19 +0,0 @@
// Diesel schema for package storage. Maintained manually to avoid build-script dependency.
diesel::table! {
packages (id) {
id -> Integer,
name -> Text,
version -> Text,
source -> Nullable<Text>,
md5 -> Nullable<Text>,
configure_args -> Nullable<Text>,
build_commands -> Nullable<Text>,
install_commands -> Nullable<Text>,
dependencies -> Nullable<Text>,
enable_lto -> Bool,
enable_pgo -> Bool,
cflags -> Nullable<Text>,
ldflags -> Nullable<Text>,
profdata -> Nullable<Text>,
}
}

View file

@ -1,113 +0,0 @@
use anyhow::{Context, Result};
use regex::Regex;
use reqwest::blocking::Client;
use scraper::{Html, Selector};
use super::{BookPackage, FetchOptions};
use crate::ingest::lfs::split_name_version;
pub fn fetch_book(options: &FetchOptions) -> Result<Vec<BookPackage>> {
let base = options.base_url.trim_end_matches('/');
let url = format!("{base}/book.html");
let client = Client::builder().build().context("building HTTP client")?;
let body = client
.get(&url)
.send()
.with_context(|| format!("fetching {}", url))?
.error_for_status()
.with_context(|| format!("request failed for {}", url))?
.text()
.context("reading response body")?;
parse_book_html(options, &url, &body)
}
pub fn parse_book_html(
options: &FetchOptions,
book_url: &str,
body: &str,
) -> Result<Vec<BookPackage>> {
let document = Html::parse_document(body);
let selector = Selector::parse("h1.sect1").unwrap();
let numbering_re =
Regex::new(r"^(?P<chapter>\d+)\.(?P<section>\d+)\.\s+(?P<title>.+)$").unwrap();
let mut results = Vec::new();
for heading in document.select(&selector) {
let text = heading
.text()
.map(str::trim)
.filter(|s| !s.is_empty())
.collect::<Vec<_>>()
.join(" ")
.replace('\n', " ")
.trim()
.to_string();
if text.is_empty() {
continue;
}
// BLFS headings often look like "33.2. Bzip2" or "33.2. Bzip2-1.0.8"
let caps = match numbering_re.captures(&text) {
Some(caps) => caps,
None => continue,
};
let chapter_num: u32 = caps["chapter"].parse().unwrap_or(0);
let section_num: u32 = caps["section"].parse().unwrap_or(0);
let title = caps["title"].trim();
let (name, version, variant) = match split_name_version(title) {
Some(parts) => parts,
None => continue,
};
let href = heading.value().id().map(|id| {
let mut base = book_url.to_string();
if !base.contains('#') {
base.push('#');
}
format!("{}{}", base, id)
});
let section_label = Some(format!("{}.{}", chapter_num, section_num));
results.push(BookPackage {
book: options.book,
chapter: Some(chapter_num),
section: section_label,
name,
version: Some(version),
href,
md5: None,
stage: None,
variant,
notes: None,
});
}
Ok(results)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::ingest::BookKind;
#[test]
fn parse_blfs_sample() {
let html = r#"
<html><body>
<h1 class=\"sect1\" id=\"ch33-bzip2\">33.2. Bzip2-1.0.8</h1>
<h1 class=\"sect1\" id=\"ch33-about\">33.1. Introduction</h1>
</body></html>
"#;
let opts = FetchOptions::new("https://example.invalid/blfs", BookKind::Blfs);
let items = parse_book_html(&opts, "https://example.invalid/blfs/book.html", html).unwrap();
assert_eq!(items.len(), 1);
assert_eq!(items[0].name, "Bzip2");
assert_eq!(items[0].version.as_deref(), Some("1.0.8"));
}
}

View file

@ -1,109 +0,0 @@
use anyhow::{Context, Result};
use regex::Regex;
use reqwest::blocking::Client;
use scraper::{Html, Selector};
use super::{BookPackage, FetchOptions};
use crate::ingest::lfs::split_name_version;
pub fn fetch_book(options: &FetchOptions) -> Result<Vec<BookPackage>> {
let base = options.base_url.trim_end_matches('/');
let url = format!("{base}/book.html");
let client = Client::builder().build().context("building HTTP client")?;
let body = client
.get(&url)
.send()
.with_context(|| format!("fetching {}", url))?
.error_for_status()
.with_context(|| format!("request failed for {}", url))?
.text()
.context("reading response body")?;
parse_book_html(options, &url, &body)
}
pub fn parse_book_html(
options: &FetchOptions,
book_url: &str,
body: &str,
) -> Result<Vec<BookPackage>> {
let document = Html::parse_document(body);
let selector = Selector::parse("h1.sect1").unwrap();
let numbering_re =
Regex::new(r"^(?P<chapter>\d+)\.(?P<section>\d+)\.\s+(?P<title>.+)$").unwrap();
let mut results = Vec::new();
for heading in document.select(&selector) {
let text = heading
.text()
.map(str::trim)
.filter(|s| !s.is_empty())
.collect::<Vec<_>>()
.join(" ")
.replace('\n', " ")
.trim()
.to_string();
if text.is_empty() {
continue;
}
let caps = match numbering_re.captures(&text) {
Some(caps) => caps,
None => continue,
};
let chapter_num: u32 = caps["chapter"].parse().unwrap_or(0);
let section_num: u32 = caps["section"].parse().unwrap_or(0);
let title = caps["title"].trim();
let (name, version, variant) = match split_name_version(title) {
Some(parts) => parts,
None => continue,
};
let href = heading.value().id().map(|id| {
let mut base = book_url.to_string();
if !base.contains('#') {
base.push('#');
}
format!("{}{}", base, id)
});
results.push(BookPackage {
book: options.book,
chapter: Some(chapter_num),
section: Some(format!("{}.{}", chapter_num, section_num)),
name,
version: Some(version),
href,
md5: None,
stage: None,
variant,
notes: None,
});
}
Ok(results)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::ingest::BookKind;
#[test]
fn parse_glfs_sample() {
let html = r#"
<html><body>
<h1 class=\"sect1\" id=\"ch12-coreutils\">12.4. Coreutils-9.8</h1>
</body></html>
"#;
let opts = FetchOptions::new("https://example.invalid/glfs", BookKind::Glfs);
let items = parse_book_html(&opts, "https://example.invalid/glfs/book.html", html).unwrap();
assert_eq!(items.len(), 1);
assert_eq!(items[0].name, "Coreutils");
assert_eq!(items[0].version.as_deref(), Some("9.8"));
}
}

View file

@ -1,169 +0,0 @@
use anyhow::{Context, Result};
use regex::Regex;
use reqwest::blocking::Client;
use scraper::{Html, Selector};
use super::{BookPackage, FetchOptions};
pub fn fetch_book(options: &FetchOptions) -> Result<Vec<BookPackage>> {
let base = options.base_url.trim_end_matches('/');
let url = format!("{base}/book.html");
let client = Client::builder().build().context("building HTTP client")?;
let body = client
.get(&url)
.send()
.with_context(|| format!("fetching {}", url))?
.error_for_status()
.with_context(|| format!("request failed for {}", url))?
.text()
.context("reading response body")?;
parse_book_html(options, &url, &body)
}
pub fn parse_book_html(
options: &FetchOptions,
book_url: &str,
body: &str,
) -> Result<Vec<BookPackage>> {
let document = Html::parse_document(body);
let selector = Selector::parse("h1.sect1").unwrap();
let numbering_re =
Regex::new(r"^(?P<chapter>\d+)\.(?P<section>\d+)\.\s+(?P<title>.+)$").unwrap();
let mut results = Vec::new();
for heading in document.select(&selector) {
let text = heading
.text()
.map(str::trim)
.filter(|s| !s.is_empty())
.collect::<Vec<_>>()
.join(" ")
.replace('\n', " ")
.trim()
.to_string();
if text.is_empty() {
continue;
}
let caps = match numbering_re.captures(&text) {
Some(caps) => caps,
None => continue,
};
let chapter_num: u32 = caps["chapter"].parse().unwrap_or(0);
let section_num: u32 = caps["section"].parse().unwrap_or(0);
let title = caps["title"].trim();
let (name, version, variant) = match split_name_version(title) {
Some(parts) => parts,
None => continue,
};
let stage = stage_for_chapter(chapter_num).map(|s| s.to_string());
let identifier = format!("{chapter_num}.{section_num:02}");
let href = heading.value().id().map(|id| {
let mut base = book_url.to_string();
if !base.contains('#') {
base.push('#');
}
format!("{}{}", base, id)
});
results.push(BookPackage {
book: options.book,
chapter: Some(chapter_num),
section: Some(identifier),
name,
version: Some(version),
href,
md5: None,
stage,
variant,
notes: None,
});
}
Ok(results)
}
pub(crate) fn split_name_version(title: &str) -> Option<(String, String, Option<String>)> {
// Find the last '-' whose next character is a digit (start of version)
let bytes = title.as_bytes();
for idx in (0..bytes.len()).rev() {
if bytes[idx] == b'-' {
if let Some(next) = bytes.get(idx + 1) {
if next.is_ascii_digit() {
let name = title[..idx].trim();
let mut remainder = title[idx + 1..].trim();
if name.is_empty() || remainder.is_empty() {
return None;
}
let mut variant = None;
if let Some(pos) = remainder.find(" - ") {
variant = Some(remainder[pos + 3..].trim().to_string());
remainder = remainder[..pos].trim();
} else if let Some(pos) = remainder.find(" (") {
let note = remainder[pos + 1..].trim_end_matches(')').trim();
variant = Some(note.to_string());
remainder = remainder[..pos].trim();
}
return Some((name.to_string(), remainder.to_string(), variant));
}
}
}
}
None
}
fn stage_for_chapter(chapter: u32) -> Option<&'static str> {
match chapter {
5 => Some("cross-toolchain"),
6 | 7 => Some("temporary-tools"),
8 => Some("system"),
9 => Some("system-configuration"),
10 => Some("system-finalization"),
_ => None,
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::ingest::BookKind;
use scraper::{Html, Selector};
#[test]
fn parse_sample_headings() {
let html = r#"
<html><body>
<h1 class=\"sect1\" id=\"ch05-binutils-pass1\">5.5. Binutils-2.45 - Pass 1</h1>
<h1 class=\"sect1\" id=\"ch05-gcc-pass1\">5.6. GCC-15.2.0 - Pass 1</h1>
<h1 class=\"sect1\" id=\"ch09-bootscripts\">9.3. LFS-Bootscripts-20250827</h1>
<h1 class=\"sect1\" id=\"ch08-xml-parser\">8.41. XML::Parser-2.47</h1>
</body></html>
"#;
let opts = FetchOptions::new("https://example.invalid/lfs", BookKind::Mlfs);
let document = Html::parse_document(html);
let selector = Selector::parse("h1.sect1").unwrap();
assert!(
document.select(&selector).next().is_some(),
"sample headings selector returned no nodes"
);
let packages =
parse_book_html(&opts, "https://example.invalid/lfs/book.html", html).unwrap();
assert_eq!(packages.len(), 4);
assert_eq!(packages[0].name, "Binutils");
assert_eq!(packages[0].version.as_deref(), Some("2.45"));
assert_eq!(packages[0].variant.as_deref(), Some("Pass 1"));
assert_eq!(packages[0].stage.as_deref(), Some("cross-toolchain"));
assert_eq!(packages[1].variant.as_deref(), Some("Pass 1"));
assert_eq!(packages[2].variant, None);
assert_eq!(packages[3].name, "XML::Parser");
}
}

View file

@ -1,67 +0,0 @@
pub mod blfs;
pub mod glfs;
pub mod lfs;
use std::fmt;
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum BookKind {
Lfs,
Mlfs,
Blfs,
Glfs,
}
impl fmt::Display for BookKind {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let label = match self {
BookKind::Lfs => "lfs",
BookKind::Mlfs => "mlfs",
BookKind::Blfs => "blfs",
BookKind::Glfs => "glfs",
};
f.write_str(label)
}
}
#[derive(Debug, Clone)]
pub struct BookPackage {
pub book: BookKind,
pub chapter: Option<u32>,
pub section: Option<String>,
pub name: String,
pub version: Option<String>,
pub href: Option<String>,
pub md5: Option<String>,
pub stage: Option<String>,
pub variant: Option<String>,
pub notes: Option<String>,
}
impl BookPackage {
pub fn identifier(&self) -> String {
match &self.variant {
Some(variant) if !variant.is_empty() => {
format!(
"{}-{}-{}",
self.book,
self.name,
variant.replace(' ', "-").to_lowercase()
)
}
_ => format!("{}-{}", self.book, self.name),
}
}
}
#[derive(Debug, Clone)]
pub struct FetchOptions<'a> {
pub base_url: &'a str,
pub book: BookKind,
}
impl<'a> FetchOptions<'a> {
pub fn new(base_url: &'a str, book: BookKind) -> Self {
Self { base_url, book }
}
}

View file

@ -1,12 +1,2 @@
pub mod ai;
pub mod db;
pub mod html;
pub mod ingest;
pub mod md5_utils;
pub mod mirrors;
pub mod pkgs; pub mod pkgs;
pub mod version_check;
pub mod wget_list;
#[cfg(feature = "tui")]
pub mod tui; pub mod tui;

View file

@ -1,427 +1,6 @@
use std::{collections::BTreeSet, env, fs, path::PathBuf}; mod tui;
use anyhow::{Context, Result, anyhow};
use clap::{CommandFactory, Parser, Subcommand};
use package_management::{
db, html, md5_utils,
pkgs::{
by_name::bi::binutils::cross_toolchain::build_binutils_from_page,
mlfs,
scaffolder::{self, ScaffoldRequest},
},
version_check, wget_list,
};
#[cfg(feature = "tui")]
use package_management::tui::disk_manager::DiskManager;
#[derive(Parser)]
#[command(name = "lpkg", version, about = "LPKG Lightweight Package Manager", long_about = None)]
struct Cli {
/// Command to run. Defaults to launching the TUI (when available).
#[command(subcommand)]
command: Option<Command>,
}
#[derive(Subcommand)]
enum Command {
/// Run one of the automated workflows.
Workflow {
#[command(subcommand)]
workflow: WorkflowCommand,
},
/// Launch interactive terminal UIs.
#[cfg(feature = "tui")]
#[command(subcommand)]
Tui(TuiCommand),
}
#[derive(Subcommand)]
enum WorkflowCommand {
/// Fetch <pre> blocks from the given URL and run version checks found inside them.
EnvCheck {
/// URL of the Linux From Scratch page containing ver_check/ver_kernel snippets.
url: String,
},
/// Download the LFS wget-list and md5sums, optionally writing them to disk.
FetchManifests {
/// Output directory to store wget-list and md5sums files. Uses current dir if omitted.
#[arg(long)]
output: Option<PathBuf>,
},
/// Parse the Binutils Pass 1 page and build it using the extracted steps.
BuildBinutils {
/// URL of the Binutils Pass 1 instructions to parse.
url: String,
/// Root directory of the LFS workspace (used for $LFS paths).
#[arg(long = "lfs-root")]
lfs_root: PathBuf,
/// Optional explicit cross-compilation target (defaults to $LFS_TGT env or x86_64-lfs-linux-gnu).
#[arg(long)]
target: Option<String>,
},
/// Scaffold a new package module under `src/pkgs/by_name` with tuned optimizations.
ScaffoldPackage {
/// Logical package name (used for module layout and metadata).
#[arg(long)]
name: String,
/// Upstream version string.
#[arg(long)]
version: String,
/// Optional source archive URL.
#[arg(long)]
source: Option<String>,
/// Optional MD5 checksum of the source archive.
#[arg(long)]
md5: Option<String>,
/// Additional configure arguments (repeat flag).
#[arg(long = "configure-arg", value_name = "ARG")]
configure_arg: Vec<String>,
/// Build commands (repeat flag).
#[arg(long = "build-cmd", value_name = "CMD")]
build_cmd: Vec<String>,
/// Install commands (repeat flag).
#[arg(long = "install-cmd", value_name = "CMD")]
install_cmd: Vec<String>,
/// Declared dependencies (repeat flag).
#[arg(long = "dependency", value_name = "PKG")]
dependency: Vec<String>,
/// Whether to enable LTO (defaults to true).
#[arg(long = "enable-lto", default_value_t = true)]
enable_lto: bool,
/// Whether to enable PGO instrumentation/use (defaults to true).
#[arg(long = "enable-pgo", default_value_t = true)]
enable_pgo: bool,
/// Additional CFLAGS (repeat flag).
#[arg(long = "cflag", value_name = "FLAG")]
cflag: Vec<String>,
/// Additional LDFLAGS (repeat flag).
#[arg(long = "ldflag", value_name = "FLAG")]
ldflag: Vec<String>,
/// Optional profile data file name for PGO replay (enables -fprofile-use).
#[arg(long)]
profdata: Option<String>,
/// Base directory for module generation (defaults to src/pkgs/by_name).
#[arg(long, default_value = "src/pkgs/by_name")]
base: PathBuf,
},
/// Import all packages from the MLFS catalogue, scaffolding modules and persisting metadata.
ImportMlfs {
/// Perform a dry run without writing files or touching the database.
#[arg(long, default_value_t = false)]
dry_run: bool,
/// Only process the first N records (after deduplication).
#[arg(long)]
limit: Option<usize>,
/// Base directory for module generation (defaults to src/pkgs/by_name).
#[arg(long, default_value = "src/pkgs/by_name")]
base: PathBuf,
/// Overwrite existing modules by deleting and regenerating them.
#[arg(long, default_value_t = false)]
overwrite: bool,
/// Source URL for the MLFS book (defaults to the canonical mirror).
#[arg(long = "source-url")]
source_url: Option<String>,
},
}
#[cfg(feature = "tui")]
#[derive(Subcommand)]
enum TuiCommand {
/// Launch the disk manager UI.
DiskManager,
}
fn main() -> Result<()> {
let _ = tracing_subscriber::fmt::try_init();
let cli = Cli::parse();
match cli.command {
Some(Command::Workflow { workflow }) => run_workflow(workflow)?,
#[cfg(feature = "tui")]
Some(Command::Tui(cmd)) => run_tui(cmd)?,
None => {
#[cfg(feature = "tui")]
{
println!(
"No command specified. Launching disk manager TUI. Use `lpkg help` for more options."
);
DiskManager::run_tui().map_err(|e| anyhow!(e.to_string()))?;
}
#[cfg(not(feature = "tui"))]
{
Cli::command().print_help()?;
println!();
}
}
}
Ok(())
}
fn run_workflow(cmd: WorkflowCommand) -> Result<()> {
match cmd {
WorkflowCommand::EnvCheck { url } => {
let pre_blocks = html::fetch_pre_blocks(&url)
.with_context(|| format!("Fetching HTML `<pre>` blocks from {url}"))?;
let mut ran_any = false;
let mut failures = Vec::new();
for (idx, block) in pre_blocks.iter().enumerate() {
if !(block.contains("ver_check") || block.contains("ver_kernel")) {
continue;
}
ran_any = true;
println!("Running version checks from block #{idx}...");
if !version_check::run_version_checks_from_block(block) {
failures.push(idx + 1);
}
}
if !ran_any {
return Err(anyhow!(
"No ver_check or ver_kernel snippets found at {url}."
));
}
if !failures.is_empty() {
return Err(anyhow!("Version checks failed in block(s): {:?}", failures));
}
println!("All version checks passed 👍");
}
WorkflowCommand::FetchManifests { output } => {
let wget_list = wget_list::get_wget_list().context("Fetching wget-list")?;
let md5sums = md5_utils::get_md5sums().context("Fetching md5sums")?;
println!("Fetched wget-list ({} bytes)", wget_list.len());
println!("Fetched md5sums ({} bytes)", md5sums.len());
let target_dir = output.unwrap_or(std::env::current_dir()?);
fs::create_dir_all(&target_dir)
.with_context(|| format!("Creating output directory at {:?}", target_dir))?;
let wget_path = target_dir.join("wget-list");
let md5_path = target_dir.join("md5sums");
fs::write(&wget_path, wget_list).with_context(|| format!("Writing {wget_path:?}"))?;
fs::write(&md5_path, md5sums).with_context(|| format!("Writing {md5_path:?}"))?;
println!("Saved artifacts to {:?} and {:?}", wget_path, md5_path);
}
WorkflowCommand::BuildBinutils {
url,
lfs_root,
target,
} => {
let runtime = tokio::runtime::Runtime::new().context("Creating async runtime")?;
runtime
.block_on(build_binutils_from_page(&url, &lfs_root, target))
.map_err(|e| anyhow!("Building Binutils using instructions from {url}: {e}"))?;
println!("Binutils workflow completed successfully");
}
WorkflowCommand::ScaffoldPackage {
name,
version,
source,
md5,
configure_arg,
build_cmd,
install_cmd,
dependency,
enable_lto,
enable_pgo,
cflag,
ldflag,
profdata,
base,
} => {
let base_dir = if base.is_relative() {
env::current_dir()
.context("Resolving scaffold base directory")?
.join(base)
} else {
base
};
let request = ScaffoldRequest {
name: name.clone(),
version: version.clone(),
source,
md5,
configure_args: configure_arg,
build_commands: build_cmd,
install_commands: install_cmd,
dependencies: dependency,
enable_lto,
enable_pgo,
cflags: cflag,
ldflags: ldflag,
profdata,
stage: None,
variant: None,
notes: None,
module_override: None,
};
let scaffold = scaffolder::scaffold_package(&base_dir, request)
.with_context(|| format!("Scaffolding package {name}"))?;
let pool = db::establish_pool().context("Setting up package database")?;
db::upsert_package_via_pool(&pool, &scaffold.definition)
.with_context(|| format!("Persisting package metadata for {name}"))?;
println!("Generated module: {:?}", scaffold.module_path);
println!(
"Remember to stage and commit as `{name}: init at {version}` after reviewing the template"
);
}
WorkflowCommand::ImportMlfs {
dry_run,
limit,
base,
overwrite,
source_url,
} => {
let base_dir = if base.is_relative() {
env::current_dir()
.context("Resolving MLFS scaffold base directory")?
.join(base)
} else {
base
};
let mut records = mlfs::load_or_fetch_catalog(source_url.as_deref())
.context("Loading MLFS catalogue")?;
records.sort_by(|a, b| a.name.cmp(&b.name).then(a.variant.cmp(&b.variant)));
let mut seen = BTreeSet::new();
let mut processed = 0usize;
let mut created = 0usize;
let mut skipped = Vec::new();
let pool = if dry_run {
None
} else {
Some(db::establish_pool().context("Setting up package database")?)
};
for record in records {
let module_alias = record.module_alias();
if !seen.insert(module_alias.clone()) {
continue;
}
if let Some(limit) = limit {
if processed >= limit {
break;
}
}
processed += 1;
if dry_run {
println!(
"Would scaffold {:<18} {:<12} -> {}",
record.name, record.version, module_alias
);
continue;
}
let request = ScaffoldRequest {
name: record.name.clone(),
version: record.version.clone(),
source: None,
md5: None,
configure_args: Vec::new(),
build_commands: Vec::new(),
install_commands: Vec::new(),
dependencies: Vec::new(),
enable_lto: true,
enable_pgo: true,
cflags: Vec::new(),
ldflags: Vec::new(),
profdata: None,
stage: record.stage.clone(),
variant: record.variant.clone(),
notes: record.notes.clone(),
module_override: Some(module_alias.clone()),
};
match scaffolder::scaffold_package(&base_dir, request) {
Ok(result) => {
if let Some(pool) = &pool {
db::upsert_package_via_pool(pool, &result.definition).with_context(
|| {
format!(
"Persisting MLFS package metadata for {} {}",
record.name, record.version
)
},
)?;
}
println!(
"Scaffolded {:<18} {:<12} -> {}",
record.name, record.version, module_alias
);
created += 1;
}
Err(err) => {
let already_exists =
err.to_string().to_lowercase().contains("already exists");
if already_exists && !overwrite {
skipped.push(module_alias);
} else {
return Err(err);
}
}
}
}
if dry_run {
println!(
"Dry run complete. {} package definitions queued.",
processed
);
} else {
println!(
"MLFS import complete. Created {} modules, skipped {} (already existed).",
created,
skipped.len()
);
if !skipped.is_empty() {
println!(
"Skipped modules: {}",
skipped
.iter()
.take(10)
.cloned()
.collect::<Vec<_>>()
.join(", ")
);
if skipped.len() > 10 {
println!("... and {} more", skipped.len() - 10);
}
}
}
}
}
Ok(())
}
#[cfg(feature = "tui")]
fn run_tui(cmd: TuiCommand) -> Result<()> {
match cmd {
TuiCommand::DiskManager => {
DiskManager::run_tui().map_err(|e| anyhow!(e.to_string()))?;
}
}
fn main() -> Result<(), Box<dyn std::error::Error>> {
tui::disk_manager::DiskManager::run_tui()?;
Ok(()) Ok(())
} }

View file

@ -1,38 +0,0 @@
// MLFS metadata: stage: cross-toolchain, variant: Pass 1
use crate::pkgs::package::{OptimizationSettings, PackageDefinition};
pub fn definition() -> PackageDefinition {
let mut pkg = PackageDefinition::new("Binutils", "2.45");
pkg.source =
Some("https://sourceware.org/pub/binutils/releases/binutils-2.45.tar.xz".to_string());
pkg.md5 = Some("dee5b4267e0305a99a3c9d6131f45759".to_string());
pkg.configure_args = Vec::new();
pkg.build_commands = vec![
"mkdir -v build".to_string(),
"cd build".to_string(),
"../configure --prefix=$LFS/tools \\".to_string(),
"--with-sysroot=$LFS \\".to_string(),
"--target=$LFS_TGT \\".to_string(),
"--disable-nls \\".to_string(),
"--enable-gprofng=no \\".to_string(),
"--disable-werror \\".to_string(),
"--enable-new-dtags \\".to_string(),
"--enable-default-hash-style=gnu".to_string(),
"make".to_string(),
];
pkg.install_commands = vec!["make install".to_string()];
pkg.dependencies = Vec::new();
let profdata = None;
let profdata_clone = profdata.clone();
pkg.optimizations = match profdata_clone {
Some(path) => OptimizationSettings::for_pgo_replay(path),
None => OptimizationSettings::default(),
};
pkg.optimizations.enable_lto = true;
pkg.optimizations.enable_pgo = true;
pkg.optimizations.cflags = vec!["-O3".to_string(), "-flto".to_string()];
pkg.optimizations.ldflags = vec!["-flto".to_string()];
pkg.optimizations.profdata = profdata;
pkg
}

View file

@ -1,2 +1 @@
pub mod binutils; pub mod binutils;
pub mod binutils_pass_1;

View file

@ -1,66 +0,0 @@
// MLFS metadata: stage: cross-toolchain, variant: Pass 1
use crate::pkgs::package::{OptimizationSettings, PackageDefinition};
pub fn definition() -> PackageDefinition {
let mut pkg = PackageDefinition::new("GCC", "15.2.0");
pkg.source = Some("https://ftp.gnu.org/gnu/gcc/gcc-15.2.0/gcc-15.2.0.tar.xz".to_string());
pkg.md5 = Some("7c32c39b8b6e3ae85f25156228156061".to_string());
pkg.configure_args = Vec::new();
pkg.build_commands = vec![
"tar -xf ../mpfr-4.2.2.tar.xz".to_string(),
"mv -v mpfr-4.2.2 mpfr".to_string(),
"tar -xf ../gmp-6.3.0.tar.xz".to_string(),
"mv -v gmp-6.3.0 gmp".to_string(),
"tar -xf ../mpc-1.3.1.tar.gz".to_string(),
"mv -v mpc-1.3.1 mpc".to_string(),
"sed -e '/m64=/s/lib64/lib/' \\".to_string(),
"-e '/m32=/s/m32=.*/m32=..\\/lib32$(call if_multiarch,:i386-linux-gnu)/' \\".to_string(),
"-i.orig gcc/config/i386/t-linux64".to_string(),
"sed '/STACK_REALIGN_DEFAULT/s/0/(!TARGET_64BIT \\&\\& TARGET_SSE)/' \\".to_string(),
"-i gcc/config/i386/i386.h".to_string(),
"mkdir -v build".to_string(),
"cd build".to_string(),
"mlist=m64,m32".to_string(),
"../configure \\".to_string(),
"--target=$LFS_TGT \\".to_string(),
"--prefix=$LFS/tools \\".to_string(),
"--with-glibc-version=2.42 \\".to_string(),
"--with-sysroot=$LFS \\".to_string(),
"--with-newlib \\".to_string(),
"--without-headers \\".to_string(),
"--enable-default-pie \\".to_string(),
"--enable-default-ssp \\".to_string(),
"--enable-initfini-array \\".to_string(),
"--disable-nls \\".to_string(),
"--disable-shared \\".to_string(),
"--enable-multilib --with-multilib-list=$mlist \\".to_string(),
"--disable-decimal-float \\".to_string(),
"--disable-threads \\".to_string(),
"--disable-libatomic \\".to_string(),
"--disable-libgomp \\".to_string(),
"--disable-libquadmath \\".to_string(),
"--disable-libssp \\".to_string(),
"--disable-libvtv \\".to_string(),
"--disable-libstdcxx \\".to_string(),
"--enable-languages=c,c++".to_string(),
"make".to_string(),
"cd ..".to_string(),
"cat gcc/limitx.h gcc/glimits.h gcc/limity.h > \\".to_string(),
"`dirname $($LFS_TGT-gcc -print-libgcc-file-name)`/include/limits.h".to_string(),
];
pkg.install_commands = vec!["make install".to_string()];
pkg.dependencies = Vec::new();
let profdata = None;
let profdata_clone = profdata.clone();
pkg.optimizations = match profdata_clone {
Some(path) => OptimizationSettings::for_pgo_replay(path),
None => OptimizationSettings::default(),
};
pkg.optimizations.enable_lto = true;
pkg.optimizations.enable_pgo = true;
pkg.optimizations.cflags = vec!["-O3".to_string(), "-flto".to_string()];
pkg.optimizations.ldflags = vec!["-flto".to_string()];
pkg.optimizations.profdata = profdata;
pkg
}

View file

@ -1 +0,0 @@
pub mod gcc_pass_1;

View file

@ -1,74 +0,0 @@
// MLFS metadata: stage: cross-toolchain
use crate::pkgs::package::{OptimizationSettings, PackageDefinition};
pub fn definition() -> PackageDefinition {
let mut pkg = PackageDefinition::new("Glibc", "2.42");
pkg.source = Some("https://ftp.gnu.org/gnu/glibc/glibc-2.42.tar.xz".to_string());
pkg.md5 = Some("23c6f5a27932b435cae94e087cb8b1f5".to_string());
pkg.configure_args = Vec::new();
pkg.build_commands = vec![
"ln -sfv ../lib/ld-linux-x86-64.so.2 $LFS/lib64".to_string(),
"ln -sfv ../lib/ld-linux-x86-64.so.2 $LFS/lib64/ld-lsb-x86-64.so.3".to_string(),
"patch -Np1 -i ../glibc-2.42-fhs-1.patch".to_string(),
"mkdir -v build".to_string(),
"cd build".to_string(),
"echo \"rootsbindir=/usr/sbin\" > configparms".to_string(),
"../configure \\".to_string(),
"--prefix=/usr \\".to_string(),
"--host=$LFS_TGT \\".to_string(),
"--build=$(../scripts/config.guess) \\".to_string(),
"--disable-nscd \\".to_string(),
"libc_cv_slibdir=/usr/lib \\".to_string(),
"--enable-kernel=5.4".to_string(),
"make".to_string(),
"make DESTDIR=$LFS install".to_string(),
"sed '/RTLDLIST=/s@/usr@@g' -i $LFS/usr/bin/ldd".to_string(),
"echo 'int main(){}' | $LFS_TGT-gcc -x c - -v -Wl,--verbose &> dummy.log".to_string(),
"readelf -l a.out | grep ': /lib'".to_string(),
"grep -E -o \"$LFS/lib.*/S?crt[1in].*succeeded\" dummy.log".to_string(),
"grep -B3 \"^ $LFS/usr/include\" dummy.log".to_string(),
"grep 'SEARCH.*/usr/lib' dummy.log |sed 's|; |\\n|g'".to_string(),
"grep \"/lib.*/libc.so.6 \" dummy.log".to_string(),
"grep found dummy.log".to_string(),
"rm -v a.out dummy.log".to_string(),
"make clean".to_string(),
"find .. -name \"*.a\" -delete".to_string(),
"CC=\"$LFS_TGT-gcc -m32\" \\".to_string(),
"CXX=\"$LFS_TGT-g++ -m32\" \\".to_string(),
"../configure \\".to_string(),
"--prefix=/usr \\".to_string(),
"--host=$LFS_TGT32 \\".to_string(),
"--build=$(../scripts/config.guess) \\".to_string(),
"--disable-nscd \\".to_string(),
"--with-headers=$LFS/usr/include \\".to_string(),
"--libdir=/usr/lib32 \\".to_string(),
"--libexecdir=/usr/lib32 \\".to_string(),
"libc_cv_slibdir=/usr/lib32 \\".to_string(),
"--enable-kernel=5.4".to_string(),
"make".to_string(),
"make DESTDIR=$PWD/DESTDIR install".to_string(),
"cp -a DESTDIR/usr/lib32 $LFS/usr/".to_string(),
"install -vm644 DESTDIR/usr/include/gnu/{lib-names,stubs}-32.h \\".to_string(),
"$LFS/usr/include/gnu/".to_string(),
"ln -svf ../lib32/ld-linux.so.2 $LFS/lib/ld-linux.so.2".to_string(),
"echo 'int main(){}' > dummy.c".to_string(),
"$LFS_TGT-gcc -m32 dummy.c".to_string(),
"readelf -l a.out | grep '/ld-linux'".to_string(),
"rm -v dummy.c a.out".to_string(),
];
pkg.install_commands = Vec::new();
pkg.dependencies = Vec::new();
let profdata = None;
let profdata_clone = profdata.clone();
pkg.optimizations = match profdata_clone {
Some(path) => OptimizationSettings::for_pgo_replay(path),
None => OptimizationSettings::default(),
};
pkg.optimizations.enable_lto = true;
pkg.optimizations.enable_pgo = true;
pkg.optimizations.cflags = vec!["-O3".to_string(), "-flto".to_string()];
pkg.optimizations.ldflags = vec!["-flto".to_string()];
pkg.optimizations.profdata = profdata;
pkg
}

View file

@ -1 +0,0 @@
pub mod glibc;

View file

@ -1,30 +0,0 @@
// MLFS metadata: stage: cross-toolchain
use crate::pkgs::package::{OptimizationSettings, PackageDefinition};
pub fn definition() -> PackageDefinition {
let mut pkg = PackageDefinition::new("Linux", "6.16.9 API Headers");
pkg.source = None;
pkg.md5 = None;
pkg.configure_args = Vec::new();
pkg.build_commands = vec![
"make mrproper".to_string(),
"make headers".to_string(),
"find usr/include -type f ! -name '*.h' -delete".to_string(),
"cp -rv usr/include $LFS/usr".to_string(),
];
pkg.install_commands = Vec::new();
pkg.dependencies = Vec::new();
let profdata = None;
let profdata_clone = profdata.clone();
pkg.optimizations = match profdata_clone {
Some(path) => OptimizationSettings::for_pgo_replay(path),
None => OptimizationSettings::default(),
};
pkg.optimizations.enable_lto = true;
pkg.optimizations.enable_pgo = true;
pkg.optimizations.cflags = vec!["-O3".to_string(), "-flto".to_string()];
pkg.optimizations.ldflags = vec!["-flto".to_string()];
pkg.optimizations.profdata = profdata;
pkg
}

View file

@ -1 +0,0 @@
pub mod linux;

View file

@ -1,4 +1 @@
pub mod bi; pub mod bi;
pub mod gc;
pub mod gl;
pub mod li;

View file

@ -1,236 +0,0 @@
use std::collections::HashSet;
use std::fs;
use std::path::{Path, PathBuf};
use anyhow::{Context, Result, anyhow};
use serde::Deserialize;
use crate::pkgs::scaffolder::{self, ScaffoldRequest};
#[derive(Debug, Deserialize)]
struct HarvestedPackage {
package: HarvestedMetadata,
source: HarvestedSource,
#[serde(default)]
build: Vec<CommandPhase>,
#[serde(default)]
dependencies: Option<HarvestedDependencies>,
optimizations: HarvestedOptimisations,
}
#[derive(Debug, Deserialize)]
struct HarvestedMetadata {
id: String,
name: String,
version: String,
#[serde(default)]
stage: Option<String>,
#[serde(default)]
variant: Option<String>,
#[serde(default)]
notes: Option<String>,
}
#[derive(Debug, Deserialize)]
struct HarvestedSource {
#[serde(default)]
archive: Option<String>,
#[serde(default)]
urls: Vec<HarvestedUrl>,
#[serde(default)]
checksums: Vec<HarvestedChecksum>,
}
#[derive(Debug, Deserialize)]
struct HarvestedUrl {
url: String,
}
#[derive(Debug, Deserialize)]
struct HarvestedChecksum {
alg: String,
value: String,
}
#[derive(Debug, Deserialize)]
struct HarvestedOptimisations {
enable_lto: bool,
enable_pgo: bool,
#[serde(default)]
cflags: Vec<String>,
#[serde(default)]
ldflags: Vec<String>,
#[serde(default)]
profdata: Option<String>,
}
#[derive(Debug, Deserialize)]
struct CommandPhase {
#[serde(default)]
phase: Option<String>,
#[serde(default)]
commands: Vec<String>,
#[serde(default)]
cwd: Option<String>,
#[serde(default)]
requires_root: Option<bool>,
#[serde(default)]
notes: Option<String>,
}
#[derive(Debug, Deserialize)]
struct HarvestedDependencies {
#[serde(default)]
build: Vec<String>,
#[serde(default)]
runtime: Vec<String>,
}
/// Generate a Rust module from harvested metadata, returning the path to the generated file.
pub fn generate_module(
metadata_path: impl AsRef<Path>,
base_dir: impl AsRef<Path>,
) -> Result<PathBuf> {
let harvested = parse_metadata(metadata_path.as_ref())?;
let request = build_request(&harvested)?;
let result = scaffolder::scaffold_package(base_dir.as_ref(), request)?;
Ok(result.module_path)
}
/// Compute the directory for a module derived from the given metadata.
pub fn module_directory(
metadata_path: impl AsRef<Path>,
base_dir: impl AsRef<Path>,
) -> Result<PathBuf> {
let harvested = parse_metadata(metadata_path.as_ref())?;
let slug = module_override_from_id(&harvested.package.id).ok_or_else(|| {
anyhow!(
"unable to derive module slug from id '{}'",
harvested.package.id
)
})?;
let module = sanitize_module_name(&slug);
let dir = base_dir
.as_ref()
.join(prefix_from_module(&module))
.join(module);
Ok(dir)
}
fn build_request(pkg: &HarvestedPackage) -> Result<ScaffoldRequest> {
let slug = module_override_from_id(&pkg.package.id)
.ok_or_else(|| anyhow!("unable to derive module slug from id '{}'", pkg.package.id))?;
let mut build_commands = Vec::new();
let mut install_commands = Vec::new();
for command in flatten_commands(&pkg.build) {
if command.contains("make install") {
install_commands.push(command);
} else {
build_commands.push(command);
}
}
let mut dependencies = HashSet::new();
if let Some(deps) = &pkg.dependencies {
for dep in &deps.build {
dependencies.insert(dep.clone());
}
for dep in &deps.runtime {
dependencies.insert(dep.clone());
}
}
let mut dependencies: Vec<String> = dependencies.into_iter().collect();
dependencies.sort();
let request = ScaffoldRequest {
name: pkg.package.name.clone(),
version: pkg.package.version.clone(),
source: pkg.source.urls.first().map(|u| u.url.clone()),
md5: pkg
.source
.checksums
.iter()
.find(|c| c.alg.eq_ignore_ascii_case("md5"))
.map(|c| c.value.clone()),
configure_args: Vec::new(),
build_commands,
install_commands,
dependencies,
enable_lto: pkg.optimizations.enable_lto,
enable_pgo: pkg.optimizations.enable_pgo,
cflags: pkg.optimizations.cflags.clone(),
ldflags: pkg.optimizations.ldflags.clone(),
profdata: pkg.optimizations.profdata.clone(),
stage: pkg.package.stage.clone(),
variant: pkg.package.variant.clone(),
notes: pkg.package.notes.clone(),
module_override: Some(slug),
};
Ok(request)
}
fn flatten_commands(phases: &[CommandPhase]) -> Vec<String> {
phases
.iter()
.flat_map(|phase| phase.commands.iter().cloned())
.collect()
}
fn module_override_from_id(id: &str) -> Option<String> {
let slug = match id.split_once('/') {
Some((_, slug)) => slug,
None => id,
};
Some(
slug.replace('.', "_")
.replace('/', "_")
.replace('-', "_")
.replace(' ', "_")
.to_lowercase(),
)
}
fn parse_metadata(path: &Path) -> Result<HarvestedPackage> {
let metadata = fs::read_to_string(path)
.with_context(|| format!("reading metadata file {}", path.display()))?;
let harvested: HarvestedPackage = serde_json::from_str(&metadata)
.with_context(|| format!("parsing harvested metadata from {}", path.display()))?;
Ok(harvested)
}
fn sanitize_module_name(name: &str) -> String {
let mut out = String::new();
for ch in name.chars() {
if ch.is_ascii_alphanumeric() {
out.push(ch.to_ascii_lowercase());
} else if ch == '_' || ch == '+' || ch == '-' {
out.push('_');
} else {
out.push('_');
}
}
if out.is_empty() {
out.push_str("pkg");
}
if out
.chars()
.next()
.map(|c| c.is_ascii_digit())
.unwrap_or(false)
{
out.insert(0, 'p');
}
out
}
fn prefix_from_module(module: &str) -> String {
let mut chars = module.chars();
let first = chars.next().unwrap_or('p');
let second = chars.next().unwrap_or('k');
let mut s = String::new();
s.push(first);
s.push(second);
s
}

View file

@ -1,116 +0,0 @@
use std::borrow::Cow;
use anyhow::{Context, Result, anyhow};
use serde::{Deserialize, Serialize};
use crate::ingest::{BookKind, BookPackage, FetchOptions, lfs};
use crate::pkgs::package::PackageDefinition;
pub const DEFAULT_MLFS_BASE_URL: &str = "https://linuxfromscratch.org/~thomas/multilib-m32";
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MlfsPackageRecord {
pub name: String,
pub version: String,
pub chapter: Option<u32>,
pub section: Option<String>,
#[serde(default)]
pub stage: Option<String>,
#[serde(default)]
pub variant: Option<String>,
#[serde(default)]
pub notes: Option<String>,
}
impl MlfsPackageRecord {
pub fn id(&self) -> String {
let mut id = self.name.replace('+', "plus");
if let Some(variant) = &self.variant {
id.push('_');
id.push_str(&variant.replace('-', "_"));
}
id
}
pub fn module_alias(&self) -> String {
self.id()
.replace('.', "_")
.replace('/', "_")
.replace(' ', "_")
.to_lowercase()
}
pub fn display_label(&self) -> Cow<'_, str> {
match (&self.section, &self.variant) {
(Some(section), Some(variant)) => Cow::from(format!("{} ({})", section, variant)),
(Some(section), None) => Cow::from(section.as_str()),
(None, Some(variant)) => Cow::from(variant.as_str()),
_ => Cow::from(self.name.as_str()),
}
}
pub fn to_package_definition(&self) -> PackageDefinition {
let mut pkg = PackageDefinition::new(&self.name, &self.version);
if let Some(stage) = &self.stage {
pkg.optimizations
.cflags
.push(format!("-DLPKG_STAGE={}", stage.to_uppercase()));
}
if let Some(variant) = &self.variant {
pkg.optimizations
.cflags
.push(format!("-DLPKG_VARIANT={}", variant.to_uppercase()));
}
if let Some(notes) = &self.notes {
pkg.optimizations
.cflags
.push(format!("-DLPKG_NOTES={}", notes.replace(' ', "_")));
}
pkg
}
fn from_book_package(pkg: BookPackage) -> Option<Self> {
let version = pkg.version?;
Some(Self {
name: pkg.name,
version,
chapter: pkg.chapter,
section: pkg.section,
stage: pkg.stage,
variant: pkg.variant,
notes: pkg.notes,
})
}
}
pub fn fetch_catalog(base_url: &str) -> Result<Vec<MlfsPackageRecord>> {
let options = FetchOptions::new(base_url, BookKind::Mlfs);
let packages = lfs::fetch_book(&options)?;
let mut records = packages
.into_iter()
.filter_map(MlfsPackageRecord::from_book_package)
.collect::<Vec<_>>();
if records.is_empty() {
return Err(anyhow!("No packages parsed from MLFS book at {base_url}."));
}
records.sort_by(|a, b| a.name.cmp(&b.name).then(a.variant.cmp(&b.variant)));
Ok(records)
}
pub fn load_cached_catalog() -> Result<Vec<MlfsPackageRecord>> {
let raw = include_str!("../../data/mlfs_ml-12.4-40-multilib.json");
let records: Vec<MlfsPackageRecord> =
serde_json::from_str(raw).context("parsing cached MLFS package manifest")?;
Ok(records)
}
pub fn load_or_fetch_catalog(base_url: Option<&str>) -> Result<Vec<MlfsPackageRecord>> {
let base = base_url.unwrap_or(DEFAULT_MLFS_BASE_URL);
match fetch_catalog(base) {
Ok(records) => Ok(records),
Err(err) => {
tracing::warn!("mlfs_fetch_error" = %err, "Falling back to cached MLFS package list");
load_cached_catalog()
}
}
}

View file

@ -1,5 +1 @@
pub mod by_name; pub mod by_name;
pub mod generator;
pub mod mlfs;
pub mod package;
pub mod scaffolder;

View file

@ -1,74 +0,0 @@
use serde::{Deserialize, Serialize};
/// High-level description of a package managed by LPKG.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PackageDefinition {
pub name: String,
pub version: String,
pub source: Option<String>,
pub md5: Option<String>,
pub configure_args: Vec<String>,
pub build_commands: Vec<String>,
pub install_commands: Vec<String>,
pub dependencies: Vec<String>,
pub optimizations: OptimizationSettings,
}
impl PackageDefinition {
pub fn new(name: impl Into<String>, version: impl Into<String>) -> Self {
Self {
name: name.into(),
version: version.into(),
source: None,
md5: None,
configure_args: Vec::new(),
build_commands: Vec::new(),
install_commands: Vec::new(),
dependencies: Vec::new(),
optimizations: OptimizationSettings::default(),
}
}
}
/// Tunable compiler and linker flags applied during package builds.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct OptimizationSettings {
pub enable_lto: bool,
pub enable_pgo: bool,
pub cflags: Vec<String>,
pub ldflags: Vec<String>,
pub profdata: Option<String>,
}
impl Default for OptimizationSettings {
fn default() -> Self {
Self {
enable_lto: true,
enable_pgo: true,
cflags: vec![
"-O3".to_string(),
"-flto".to_string(),
"-fprofile-generate".to_string(),
],
ldflags: vec!["-flto".to_string(), "-fprofile-generate".to_string()],
profdata: None,
}
}
}
impl OptimizationSettings {
/// Convenience helper for disabling instrumentation once profile data has been gathered.
pub fn for_pgo_replay(profdata: impl Into<String>) -> Self {
Self {
enable_lto: true,
enable_pgo: true,
cflags: vec![
"-O3".to_string(),
"-flto".to_string(),
"-fprofile-use".to_string(),
],
ldflags: vec!["-flto".to_string(), "-fprofile-use".to_string()],
profdata: Some(profdata.into()),
}
}
}

View file

@ -1,293 +0,0 @@
use std::fs::{self, OpenOptions};
use std::io::Write;
use std::path::{Path, PathBuf};
use anyhow::{Context, Result, anyhow};
use crate::pkgs::package::{OptimizationSettings, PackageDefinition};
#[derive(Debug, Clone)]
pub struct ScaffoldRequest {
pub name: String,
pub version: String,
pub source: Option<String>,
pub md5: Option<String>,
pub configure_args: Vec<String>,
pub build_commands: Vec<String>,
pub install_commands: Vec<String>,
pub dependencies: Vec<String>,
pub enable_lto: bool,
pub enable_pgo: bool,
pub cflags: Vec<String>,
pub ldflags: Vec<String>,
pub profdata: Option<String>,
pub stage: Option<String>,
pub variant: Option<String>,
pub notes: Option<String>,
pub module_override: Option<String>,
}
#[derive(Debug, Clone)]
pub struct ScaffoldResult {
pub module_path: PathBuf,
pub prefix_module: PathBuf,
pub by_name_module: PathBuf,
pub definition: PackageDefinition,
}
pub fn scaffold_package(
base_dir: impl AsRef<Path>,
request: ScaffoldRequest,
) -> Result<ScaffoldResult> {
let base_dir = base_dir.as_ref();
if !base_dir.ends_with("by_name") {
return Err(anyhow!("expected base directory ending with 'by_name'"));
}
let module_source_name = request.module_override.as_deref().unwrap_or(&request.name);
let module_name = sanitize(module_source_name);
let prefix = prefix(&module_name);
let prefix_dir = base_dir.join(&prefix);
fs::create_dir_all(&prefix_dir)
.with_context(|| format!("creating prefix directory {:?}", prefix_dir))?;
let by_name_mod = base_dir.join("mod.rs");
ensure_mod_entry(&by_name_mod, &prefix)?;
let prefix_mod = prefix_dir.join("mod.rs");
ensure_mod_entry(&prefix_mod, &module_name)?;
let package_dir = prefix_dir.join(&module_name);
if package_dir.exists() {
return Err(anyhow!("package module {:?} already exists", package_dir));
}
fs::create_dir_all(&package_dir)
.with_context(|| format!("creating package directory {:?}", package_dir))?;
let module_path = package_dir.join("mod.rs");
let definition = build_definition(&request);
let source = generate_module_source(&request, &definition);
fs::write(&module_path, source)
.with_context(|| format!("writing module source to {:?}", module_path))?;
Ok(ScaffoldResult {
module_path,
prefix_module: prefix_mod,
by_name_module: by_name_mod,
definition,
})
}
fn ensure_mod_entry(path: &Path, module: &str) -> Result<()> {
let entry = format!("pub mod {};", module);
if path.exists() {
let contents =
fs::read_to_string(path).with_context(|| format!("reading module file {:?}", path))?;
if contents.contains(&entry) || contents.contains(&entry.trim()) {
return Ok(());
}
let mut file = OpenOptions::new()
.append(true)
.open(path)
.with_context(|| format!("opening module file {:?}", path))?;
writeln!(file, "pub mod {};", module)
.with_context(|| format!("appending to module file {:?}", path))?;
} else {
fs::write(path, format!("pub mod {};\n", module))
.with_context(|| format!("creating module file {:?}", path))?;
}
Ok(())
}
fn build_definition(request: &ScaffoldRequest) -> PackageDefinition {
let mut pkg = PackageDefinition::new(&request.name, &request.version);
pkg.source = request.source.clone();
pkg.md5 = request.md5.clone();
pkg.configure_args = request.configure_args.clone();
pkg.build_commands = request.build_commands.clone();
pkg.install_commands = request.install_commands.clone();
pkg.dependencies = request.dependencies.clone();
let mut cflags = if request.cflags.is_empty() {
default_cflags(request)
} else {
request.cflags.clone()
};
let mut ldflags = if request.ldflags.is_empty() {
default_ldflags(request)
} else {
request.ldflags.clone()
};
dedup(&mut cflags);
dedup(&mut ldflags);
let profdata = request.profdata.clone();
let profdata_clone = profdata.clone();
pkg.optimizations = match profdata_clone {
Some(path) => OptimizationSettings::for_pgo_replay(path),
None => OptimizationSettings::default(),
};
pkg.optimizations.enable_lto = request.enable_lto;
pkg.optimizations.enable_pgo = request.enable_pgo;
pkg.optimizations.cflags = cflags;
pkg.optimizations.ldflags = ldflags;
pkg.optimizations.profdata = profdata;
pkg
}
fn default_cflags(request: &ScaffoldRequest) -> Vec<String> {
let mut flags = vec!["-O3".to_string(), "-flto".to_string()];
if request.enable_pgo {
if request.profdata.is_some() {
flags.push("-fprofile-use".to_string());
} else {
flags.push("-fprofile-generate".to_string());
}
}
flags
}
fn default_ldflags(request: &ScaffoldRequest) -> Vec<String> {
let mut flags = vec!["-flto".to_string()];
if request.enable_pgo {
if request.profdata.is_some() {
flags.push("-fprofile-use".to_string());
} else {
flags.push("-fprofile-generate".to_string());
}
}
flags
}
fn dedup(values: &mut Vec<String>) {
let mut seen = std::collections::BTreeSet::new();
values.retain(|value| seen.insert(value.clone()));
}
fn generate_module_source(request: &ScaffoldRequest, definition: &PackageDefinition) -> String {
let mut metadata = Vec::new();
if let Some(stage) = &request.stage {
metadata.push(format!("stage: {}", stage));
}
if let Some(variant) = &request.variant {
metadata.push(format!("variant: {}", variant));
}
if let Some(notes) = &request.notes {
metadata.push(format!("notes: {}", notes));
}
let metadata = if metadata.is_empty() {
String::new()
} else {
format!("// MLFS metadata: {}\n\n", metadata.join(", "))
};
let configure_args = format_vec(&definition.configure_args);
let build_commands = format_vec(&definition.build_commands);
let install_commands = format_vec(&definition.install_commands);
let dependencies = format_vec(&definition.dependencies);
let cflags = format_vec(&definition.optimizations.cflags);
let ldflags = format_vec(&definition.optimizations.ldflags);
let source = format_option(&definition.source);
let md5 = format_option(&definition.md5);
let profdata = format_option(&definition.optimizations.profdata);
format!(
"{metadata}use crate::pkgs::package::{{OptimizationSettings, PackageDefinition}};\n\n\
pub fn definition() -> PackageDefinition {{\n\
let mut pkg = PackageDefinition::new(\"{name}\", \"{version}\");\n\
pkg.source = {source};\n\
pkg.md5 = {md5};\n\
pkg.configure_args = {configure_args};\n\
pkg.build_commands = {build_commands};\n\
pkg.install_commands = {install_commands};\n\
pkg.dependencies = {dependencies};\n\
let profdata = {profdata};\n\
let profdata_clone = profdata.clone();\n\
pkg.optimizations = match profdata_clone {{\n\
Some(path) => OptimizationSettings::for_pgo_replay(path),\n\
None => OptimizationSettings::default(),\n\
}};\n\
pkg.optimizations.enable_lto = {enable_lto};\n\
pkg.optimizations.enable_pgo = {enable_pgo};\n\
pkg.optimizations.cflags = {cflags};\n\
pkg.optimizations.ldflags = {ldflags};\n\
pkg.optimizations.profdata = profdata;\n\
pkg\n\
}}\n",
metadata = metadata,
name = request.name,
version = request.version,
source = source,
md5 = md5,
configure_args = configure_args,
build_commands = build_commands,
install_commands = install_commands,
dependencies = dependencies,
profdata = profdata,
enable_lto = request.enable_lto,
enable_pgo = request.enable_pgo,
cflags = cflags,
ldflags = ldflags,
)
}
fn format_vec(values: &[String]) -> String {
if values.is_empty() {
"Vec::new()".to_string()
} else {
let items: Vec<String> = values
.iter()
.map(|v| format!("\"{}\".to_string()", escape(v)))
.collect();
format!("vec![{}]", items.join(", "))
}
}
fn format_option(value: &Option<String>) -> String {
match value {
Some(v) => format!("Some(\"{}\".to_string())", escape(v)),
None => "None".to_string(),
}
}
fn sanitize(name: &str) -> String {
let mut out = String::new();
for ch in name.chars() {
if ch.is_ascii_alphanumeric() {
out.push(ch.to_ascii_lowercase());
} else if ch == '_' || ch == '+' {
out.push('_');
} else if ch == '-' {
out.push('_');
} else {
out.push('_');
}
}
if out.is_empty() {
out.push_str("pkg");
}
if out
.chars()
.next()
.map(|c| c.is_ascii_digit())
.unwrap_or(false)
{
out.insert(0, 'p');
}
out
}
fn prefix(module: &str) -> String {
let mut chars = module.chars();
let first = chars.next().unwrap_or('p');
let second = chars.next().unwrap_or('k');
let mut s = String::new();
s.push(first);
s.push(second);
s
}
fn escape(input: &str) -> String {
input.replace('\\', "\\\\").replace('"', "\\\"")
}

View file

@ -1,7 +1,7 @@
// src/tui/disk_manager.rs // src/tui/disk_manager.rs
use std::{ use std::{
fs::{File, read_dir}, fs::{File, read_dir},
io::{self, Seek, SeekFrom}, io::{self, Seek, SeekFrom, Write},
path::PathBuf, path::PathBuf,
}; };
@ -186,12 +186,12 @@ impl DiskManager {
}, },
}; };
// Create list of lines to display using public GPT API: // Create list of lines to display:
let mut lines: Vec<String> = Vec::new(); let mut lines: Vec<String> = Vec::new();
lines.push(format!("Partitions on {}:", disk.display())); lines.push(format!("Partitions on {}:", disk.display()));
for (i, entry) in gpt.iter() { for (i, entry_opt) in gpt.partitions.iter().enumerate() {
if entry.is_used() { if let Some(entry) = entry_opt {
let name = entry.partition_name.as_str(); let name = entry.partition_name.to_string();
lines.push(format!( lines.push(format!(
"{}: {} -> {} (type: {})", "{}: {} -> {} (type: {})",
i, i,
@ -388,9 +388,9 @@ impl DiskManager {
let sectors = (size_mb as u128 * 1024 * 1024 / 512) as u64; let sectors = (size_mb as u128 * 1024 * 1024 / 512) as u64;
// choose starting LBA: find max ending_lba among existing partitions; align to 2048 // choose starting LBA: find max ending_lba among existing partitions; align to 2048
let last_end = gpt let last_end = gpt
.partitions
.iter() .iter()
.filter(|(_, e)| e.is_used()) .filter_map(|p| p.as_ref().map(|e| e.ending_lba))
.map(|(_, e)| e.ending_lba)
.max() .max()
.unwrap_or(2048); .unwrap_or(2048);
let start = ((last_end + 2048) / 2048) * 2048 + 1; let start = ((last_end + 2048) / 2048) * 2048 + 1;
@ -410,15 +410,15 @@ impl DiskManager {
}; };
new_entry.partition_type_guid = type_guid; new_entry.partition_type_guid = type_guid;
// find first empty partition slot (indexing is 1-based for gptman::GPT) // find first empty partition slot
let idx_opt = gpt.iter().find(|(_, e)| e.is_unused()).map(|(i, _)| i); let idx_opt = gpt.partitions.iter().position(|p| p.is_none());
let idx = match idx_opt { let idx = match idx_opt {
Some(i) => i, Some(i) => i,
None => return Err("No free GPT partition entries (maxed out)".into()), None => return Err("No free GPT partition entries (maxed out)".into()),
}; };
// assign and write // assign and write
gpt[idx] = new_entry; gpt.partitions[idx] = Some(new_entry);
// Seek to start (important) // Seek to start (important)
file.seek(SeekFrom::Start(0))?; file.seek(SeekFrom::Start(0))?;

View file

@ -1,6 +1,7 @@
use crate::tui::disk_manager::DiskManager; use crate::tui::disk_manager::DiskManager;
use crossterm::event::{self, Event, KeyCode}; use crossterm::event::{self, Event, KeyCode};
use std::error::Error; use std::error::Error;
use std::io::Stdout;
use tui::{ use tui::{
Terminal, Terminal,
backend::CrosstermBackend, backend::CrosstermBackend,
@ -10,7 +11,7 @@ use tui::{
}; };
pub fn show_main_menu() -> Result<(), Box<dyn Error>> { pub fn show_main_menu() -> Result<(), Box<dyn Error>> {
let stdout = std::io::stdout(); let mut stdout = std::io::stdout();
let backend = CrosstermBackend::new(stdout); let backend = CrosstermBackend::new(stdout);
let mut terminal = Terminal::new(backend)?; let mut terminal = Terminal::new(backend)?;
@ -36,7 +37,7 @@ pub fn show_main_menu() -> Result<(), Box<dyn Error>> {
if event::poll(std::time::Duration::from_millis(100))? { if event::poll(std::time::Duration::from_millis(100))? {
if let Event::Key(key) = event::read()? { if let Event::Key(key) = event::read()? {
match key.code { match key.code {
KeyCode::Char('1') => DiskManager::run_tui()?, KeyCode::Char('1') => DiskManager::show_disk_manager(&mut terminal)?,
KeyCode::Char('0') => break, KeyCode::Char('0') => break,
_ => {} _ => {}
} }

View file

@ -18,9 +18,9 @@ impl Theme {
} }
impl Settings { impl Settings {
#[instrument(skip(_terminal))] #[instrument(skip(terminal))]
pub fn show_settings( pub fn show_settings(
_terminal: &mut Terminal<CrosstermBackend<Stdout>>, terminal: &mut Terminal<CrosstermBackend<Stdout>>,
) -> Result<(), Box<dyn std::error::Error>> { ) -> Result<(), Box<dyn std::error::Error>> {
// Render settings UI here // Render settings UI here
Ok(()) Ok(())