Integrate metadata documentation and jhalfs manifests

This commit is contained in:
m00d 2025-10-01 06:58:04 +02:00
parent 74bf8a32d6
commit 3ce470e019
34 changed files with 5544 additions and 240 deletions

809
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -14,7 +14,7 @@ console = "0.16.1"
# Optional Terminal UI
crossterm = { version = "0.29.0", optional = true }
ratatui = { version = "0.29.0", optional = true }
tui = { version = "0.19.0", optional = true }
# Parsing & scraping
html_parser = "0.7.0"
@ -22,6 +22,10 @@ scraper = "0.19.0"
regex = "1.11.3"
serde = { version = "1.0.228", features = ["derive"] }
serde_json = "1.0.145"
jsonschema = "0.17.0"
walkdir = "2.5.0"
chrono = { version = "0.4.38", default-features = false, features = ["clock"] }
sha2 = "0.10.8"
# Utilities
indicatif = "0.18.0"
@ -34,7 +38,6 @@ md5 = "0.8.0"
reqwest = { version = "0.12.23", features = ["blocking", "json"] }
semver = "1.0.27"
inquire = "0.9.1"
tui = "0.19.0"
tracing = "0.1.41"
tracing-appender = "0.2.3"
tracing-subscriber = { version = "0.3.20", features = ["env-filter", "fmt"] }
@ -45,14 +48,15 @@ shell-words = "1.1.0"
url = "2.5.7"
uuid = { version = "1.18.1", features = ["v4"] }
hex = "0.4.3"
diesel = { version = "2.1.6", features = ["sqlite", "r2d2", "returning_clauses_for_sqlite_3_35"] }
[features]
# TUI feature flag
tui = ["ratatui", "crossterm"]
tui = ["dep:tui", "dep:crossterm"]
# Optional default features
default = []
crossterm = ["dep:crossterm"]
# -----------------------
# Cargo-make tasks

View file

@ -94,10 +94,43 @@ You can also run the project directly in the flake shell:
nix run
```
### AI metadata tooling
The AI metadata store under `ai/metadata/` comes with a helper CLI to
validate package records against the JSON schema and regenerate
`index.json` after adding new entries:
```bash
cargo run --bin metadata_indexer -- --base-dir . validate
cargo run --bin metadata_indexer -- --base-dir . index
```
Use `--compact` with `index` if you prefer single-line JSON output.
To draft metadata for a specific book page, you can run the harvest mode.
It fetches the XHTML, scrapes the build commands, and emits a schema-
compliant JSON skeleton (pass `--dry-run` to inspect the result without
writing to disk):
```bash
cargo run --bin metadata_indexer -- \
--base-dir . harvest \
--book mlfs \
--page chapter05/binutils-pass1 \
--dry-run
```
## 📚 Documentation
- [Architecture Overview](docs/ARCHITECTURE.md) high-level tour of the crate
layout, binaries, and supporting modules.
- [Metadata Harvesting Pipeline](docs/METADATA_PIPELINE.md) how the metadata
indexer produces and validates the JSON records under `ai/metadata/`.
- `ai/notes.md` scratchpad for ongoing research tasks (e.g., deeper jhalfs
integration).
---
## 📄 License
LPKG is licensed under the [MIT License](LICENSE).

29
ai/bugs.json Normal file
View file

@ -0,0 +1,29 @@
[
{
"id": "disk-manager-private-fields",
"title": "Disk manager accesses private GPT fields",
"description": "src/tui/disk_manager.rs reaches into gptman::GPT::partitions which is private, breaking compilation.",
"status": "open",
"owner": "default_cli",
"created_at": "2025-03-09T00:00:00Z",
"labels": ["tui", "blocking-build"]
},
{
"id": "tui-feature-build",
"title": "TUI modules require crossterm feature gating",
"description": "When the crate is built without the `tui` feature the main binary still imports crossterm types and fails to compile.",
"status": "open",
"owner": "default_cli",
"created_at": "2025-03-09T00:00:00Z",
"labels": ["tui", "feature-gate"]
},
{
"id": "metadata-harvest-no-source-urls",
"title": "Harvested metadata missing source URLs",
"description": "`metadata_indexer harvest --book mlfs --page chapter05/binutils-pass1 --dry-run` emits a draft record with no source URLs even after wget-list fallback; Binutils tarball discovery logic needs to inspect package download tables or improve slug matching.",
"status": "open",
"owner": "default_cli",
"created_at": "2025-10-01T04:40:00Z",
"labels": ["metadata", "ingest", "mlfs"]
}
]

97
ai/metadata/cache/mlfs-md5sums.txt vendored Normal file
View file

@ -0,0 +1,97 @@
590765dee95907dbc3c856f7255bd669 acl-2.3.2.tar.xz
227043ec2f6ca03c0948df5517f9c927 attr-2.5.2.tar.gz
1be79f7106ab6767f18391c5e22be701 autoconf-2.72.tar.xz
cea31dbf1120f890cbf2a3032cfb9a68 automake-1.18.1.tar.xz
977c8c0c5ae6309191e7768e28ebc951 bash-5.3.tar.gz
ad4db5a0eb4fdbb3f6813be4b6b3da74 bc-7.0.3.tar.xz
dee5b4267e0305a99a3c9d6131f45759 binutils-2.45.tar.xz
c28f119f405a2304ff0a7ccdcc629713 bison-3.8.2.tar.xz
67e051268d0c475ea773822f7500d0e5 bzip2-1.0.8.tar.gz
b2e687b6e664b9dd76581836c5c3e782 coreutils-9.8.tar.xz
68c5208c58236eba447d7d6d1326b821 dejagnu-1.6.3.tar.gz
d1b18b20868fb561f77861cd90b05de4 diffutils-3.12.tar.xz
113d7a7ee0710d2a670a44692a35fd2e e2fsprogs-1.47.3.tar.gz
ceefa052ded950a4c523688799193a44 elfutils-0.193.tar.bz2
423975a2a775ff32f12c53635b463a91 expat-2.7.3.tar.xz
00fce8de158422f5ccd2666512329bd2 expect5.45.4.tar.gz
459da2d4b534801e2e2861611d823864 file-5.46.tar.gz
870cfd71c07d37ebe56f9f4aaf4ad872 findutils-4.10.0.tar.xz
2882e3179748cc9f9c23ec593d6adc8d flex-2.6.4.tar.gz
c538415c1f27bd69cbbbf3cdd5135d39 flit_core-3.12.0.tar.gz
b7014650c5f45e5d4837c31209dc0037 gawk-5.3.2.tar.xz
b861b092bf1af683c46a8aa2e689a6fd gcc-15.2.0.tar.xz
aaa600665bc89e2febb3c7bd90679115 gdbm-1.26.tar.gz
8e14e926f088e292f5f2bce95b81d10e gettext-0.26.tar.xz
23c6f5a27932b435cae94e087cb8b1f5 glibc-2.42.tar.xz
956dc04e864001a9c22429f761f2c283 gmp-6.3.0.tar.xz
31753b021ea78a21f154bf9eecb8b079 gperf-3.3.tar.gz
5d9301ed9d209c4a88c8d3a6fd08b9ac grep-3.12.tar.xz
5e4f40315a22bb8a158748e7d5094c7d groff-1.23.0.tar.gz
60c564b1bdc39d8e43b3aab4bc0fb140 grub-2.12.tar.xz
4bf5a10f287501ee8e8ebe00ef62b2c2 gzip-1.14.tar.xz
437a3e9f4a420244c90db4ab20e713b6 iana-etc-20250926.tar.gz
401d7d07682a193960bcdecafd03de94 inetutils-2.6.tar.xz
12e517cac2b57a0121cda351570f1e63 intltool-0.51.0.tar.gz
80e1f91bf59d572acc15d5c6eb4f3e7c iproute2-6.16.0.tar.xz
11ee9d335b227ea2e8579c4ba6e56138 isl-0.27.tar.xz
66d4c25ff43d1deaf9637ccda523dec8 jinja2-3.1.6.tar.gz
7be7c6f658f5fb9512e2c490349a8eeb kbd-2.9.0.tar.xz
36f2cc483745e81ede3406fa55e1065a kmod-34.2.tar.xz
0386dc14f6a081a94dfb4c2413864eed less-679.tar.gz
2be34eced7c861fea8894e7195dac636 lfs-bootscripts-20250827.tar.xz
449ade7d620b5c4eeb15a632fbaa4f74 libcap-2.76.tar.xz
92af9efad4ba398995abf44835c5d9e9 libffi-3.5.2.tar.gz
17ac6969b2015386bcb5d278a08a40b5 libpipeline-1.5.8.tar.gz
22e0a29df8af5fdde276ea3a7d351d30 libtool-2.5.4.tar.xz
1796a5d20098e9dd9e3f576803c83000 libxcrypt-4.4.38.tar.xz
feb0a3d5ecf5a4628aed7d9f8f7ab3f6 linux-6.16.9.tar.xz
dead9f5f1966d9ae56e1e32761e4e675 lz4-1.10.0.tar.gz
6eb2ebed5b24e74b6e890919331d2132 m4-1.4.20.tar.xz
c8469a3713cbbe04d955d4ae4be23eeb make-4.4.1.tar.gz
b6335533cbeac3b24cd7be31fdee8c83 man-db-2.13.1.tar.xz
16f68d70139dd2bbcae4102be4705753 man-pages-6.15.tar.xz
13a73126d25afa72a1ff0daed072f5fe markupsafe-3.0.3.tar.gz
19e0a1091cec23d369dd77d852844195 meson-1.9.1.tar.gz
5c9bc658c9fd0f940e8e3e0f09530c62 mpc-1.3.1.tar.gz
7c32c39b8b6e3ae85f25156228156061 mpfr-4.2.2.tar.xz
679987405412f970561cc85e1e6428a2 ncurses-6.5-20250809.tgz
c35f8f55f4cf60f1a916068d8f45a0f8 ninja-1.13.1.tar.gz
0ec20faeb96bbb203c8684cc7fe4432e openssl-3.5.3.tar.gz
ab0ef21ddebe09d1803575120d3f99f8 packaging-25.0.tar.gz
149327a021d41c8f88d034eab41c039f patch-2.8.tar.xz
641f99b635ebb9332a9b6a8ce8e2f3cf pcre2-10.46.tar.bz2
7a6950a9f12d01eb96a9d2ed2f4e0072 perl-5.42.0.tar.xz
3291128c917fdb8fccd8c9e7784b643b pkgconf-2.5.1.tar.xz
90803e64f51f192f3325d25c3335d057 procps-ng-4.0.5.tar.xz
53eae841735189a896d614cba440eb10 psmisc-23.7.tar.xz
256cdb3bbf45cdce7499e52ba6c36ea3 Python-3.13.7.tar.xz
b84c0d81b2758398bb7f5b7411d3d908 python-3.13.7-docs-html.tar.bz2
25a73bfb2a3ad7146c5e9d4408d9f6cd readline-8.3.tar.gz
6aac9b2dbafcd5b7a67a8a9bcb8036c3 sed-4.9.tar.xz
82e1d67883b713f9493659b50d13b436 setuptools-80.9.0.tar.gz
30ef46f54363db1d624587be68794ef2 shadow-4.18.0.tar.xz
d74bbdca4ab1b2bd46d3b3f8dbb0f3db sqlite-autoconf-3500400.tar.gz
63a62af5b35913459954e6e66876f2b8 sqlite-doc-3500400.tar.xz
af60786956a2dc84054fbf46652e515e sysklogd-2.7.2.tar.gz
25fe5d328e22641254761f1baa74cee0 systemd-257.8.tar.gz
a44063e2ec0cf4adfd2ed5c9e9e095c5 systemd-man-pages-257.8.tar.xz
bc6890b975d19dc9db42d0c7364dd092 sysvinit-3.14.tar.xz
a2d8042658cfd8ea939e6d911eaf4152 tar-1.35.tar.xz
1ec3444533f54d0f86cd120058e15e48 tcl8.6.17-src.tar.gz
60c71044e723b0db5f21be82929f3534 tcl8.6.17-html.tar.gz
11939a7624572814912a18e76c8d8972 texinfo-7.2.tar.xz
ad65154c48c74a9b311fe84778c5434f tzdata2025b.tar.gz
acd4360d8a5c3ef320b9db88d275dae6 udev-lfs-20230818.tar.xz
a2a3281ce76821c4bc28794fdf9d3994 util-linux-2.41.2.tar.xz
e72f31be182f1ccf4b66bef46ac1e60e vim-9.1.1806.tar.gz
65e09ee84af36821e3b1e9564aa91bd5 wheel-0.46.1.tar.gz
89a8e82cfd2ad948b349c0a69c494463 XML-Parser-2.47.tar.gz
cf5e1feb023d22c6bdaa30e84ef3abe3 xz-5.8.1.tar.xz
9855b6d802d7fe5b7bd5b196a2271655 zlib-1.3.1.tar.gz
780fc1896922b1bc52a4e90980cdda48 zstd-1.5.7.tar.gz
6a5ac7e89b791aae556de0f745916f7f bzip2-1.0.8-install_docs-1.patch
c800540039fb0707954197486b1bde70 coreutils-9.8-i18n-2.patch
0ca4d6bb8d572fbcdb13cb36cd34833e expect-5.45.4-gcc15-1.patch
9a5997c3452909b1769918c759eff8a2 glibc-2.42-fhs-1.patch
f75cca16a38da6caa7d52151f7136895 kbd-2.9.0-backspace-1.patch
3af8fd8e13cad481eeeaa48be4247445 sysvinit-3.14-consolidated-1.patch

97
ai/metadata/cache/mlfs-wget-list.txt vendored Normal file
View file

@ -0,0 +1,97 @@
https://download.savannah.gnu.org/releases/acl/acl-2.3.2.tar.xz
https://download.savannah.gnu.org/releases/attr/attr-2.5.2.tar.gz
https://ftp.gnu.org/gnu/autoconf/autoconf-2.72.tar.xz
https://ftp.gnu.org/gnu/automake/automake-1.18.1.tar.xz
https://ftp.gnu.org/gnu/bash/bash-5.3.tar.gz
https://github.com/gavinhoward/bc/releases/download/7.0.3/bc-7.0.3.tar.xz
https://sourceware.org/pub/binutils/releases/binutils-2.45.tar.xz
https://ftp.gnu.org/gnu/bison/bison-3.8.2.tar.xz
https://www.sourceware.org/pub/bzip2/bzip2-1.0.8.tar.gz
https://ftp.gnu.org/gnu/coreutils/coreutils-9.8.tar.xz
https://ftp.gnu.org/gnu/dejagnu/dejagnu-1.6.3.tar.gz
https://ftp.gnu.org/gnu/diffutils/diffutils-3.12.tar.xz
https://downloads.sourceforge.net/project/e2fsprogs/e2fsprogs/v1.47.3/e2fsprogs-1.47.3.tar.gz
https://sourceware.org/ftp/elfutils/0.193/elfutils-0.193.tar.bz2
https://github.com/libexpat/libexpat/releases/download/R_2_7_3/expat-2.7.3.tar.xz
https://prdownloads.sourceforge.net/expect/expect5.45.4.tar.gz
https://astron.com/pub/file/file-5.46.tar.gz
https://ftp.gnu.org/gnu/findutils/findutils-4.10.0.tar.xz
https://github.com/westes/flex/releases/download/v2.6.4/flex-2.6.4.tar.gz
https://pypi.org/packages/source/f/flit-core/flit_core-3.12.0.tar.gz
https://ftp.gnu.org/gnu/gawk/gawk-5.3.2.tar.xz
https://ftp.gnu.org/gnu/gcc/gcc-15.2.0/gcc-15.2.0.tar.xz
https://ftp.gnu.org/gnu/gdbm/gdbm-1.26.tar.gz
https://ftp.gnu.org/gnu/gettext/gettext-0.26.tar.xz
https://ftp.gnu.org/gnu/glibc/glibc-2.42.tar.xz
https://ftp.gnu.org/gnu/gmp/gmp-6.3.0.tar.xz
https://ftp.gnu.org/gnu/gperf/gperf-3.3.tar.gz
https://ftp.gnu.org/gnu/grep/grep-3.12.tar.xz
https://ftp.gnu.org/gnu/groff/groff-1.23.0.tar.gz
https://ftp.gnu.org/gnu/grub/grub-2.12.tar.xz
https://ftp.gnu.org/gnu/gzip/gzip-1.14.tar.xz
https://github.com/Mic92/iana-etc/releases/download/20250926/iana-etc-20250926.tar.gz
https://ftp.gnu.org/gnu/inetutils/inetutils-2.6.tar.xz
https://launchpad.net/intltool/trunk/0.51.0/+download/intltool-0.51.0.tar.gz
https://www.kernel.org/pub/linux/utils/net/iproute2/iproute2-6.16.0.tar.xz
https://libisl.sourceforge.io/isl-0.27.tar.xz
https://pypi.org/packages/source/J/Jinja2/jinja2-3.1.6.tar.gz
https://www.kernel.org/pub/linux/utils/kbd/kbd-2.9.0.tar.xz
https://www.kernel.org/pub/linux/utils/kernel/kmod/kmod-34.2.tar.xz
https://www.greenwoodsoftware.com/less/less-679.tar.gz
https://www.linuxfromscratch.org/lfs/downloads/development/lfs-bootscripts-20250827.tar.xz
https://www.kernel.org/pub/linux/libs/security/linux-privs/libcap2/libcap-2.76.tar.xz
https://github.com/libffi/libffi/releases/download/v3.5.2/libffi-3.5.2.tar.gz
https://download.savannah.gnu.org/releases/libpipeline/libpipeline-1.5.8.tar.gz
https://ftp.gnu.org/gnu/libtool/libtool-2.5.4.tar.xz
https://github.com/besser82/libxcrypt/releases/download/v4.4.38/libxcrypt-4.4.38.tar.xz
https://www.kernel.org/pub/linux/kernel/v6.x/linux-6.16.9.tar.xz
https://github.com/lz4/lz4/releases/download/v1.10.0/lz4-1.10.0.tar.gz
https://ftp.gnu.org/gnu/m4/m4-1.4.20.tar.xz
https://ftp.gnu.org/gnu/make/make-4.4.1.tar.gz
https://download.savannah.gnu.org/releases/man-db/man-db-2.13.1.tar.xz
https://www.kernel.org/pub/linux/docs/man-pages/man-pages-6.15.tar.xz
https://pypi.org/packages/source/M/MarkupSafe/markupsafe-3.0.3.tar.gz
https://github.com/mesonbuild/meson/releases/download/1.9.1/meson-1.9.1.tar.gz
https://ftp.gnu.org/gnu/mpc/mpc-1.3.1.tar.gz
https://ftp.gnu.org/gnu/mpfr/mpfr-4.2.2.tar.xz
https://invisible-mirror.net/archives/ncurses/current/ncurses-6.5-20250809.tgz
https://github.com/ninja-build/ninja/archive/v1.13.1/ninja-1.13.1.tar.gz
https://github.com/openssl/openssl/releases/download/openssl-3.5.3/openssl-3.5.3.tar.gz
https://files.pythonhosted.org/packages/source/p/packaging/packaging-25.0.tar.gz
https://ftp.gnu.org/gnu/patch/patch-2.8.tar.xz
https://github.com/PCRE2Project/pcre2/releases/download/pcre2-10.46/pcre2-10.46.tar.bz2
https://www.cpan.org/src/5.0/perl-5.42.0.tar.xz
https://distfiles.ariadne.space/pkgconf/pkgconf-2.5.1.tar.xz
https://sourceforge.net/projects/procps-ng/files/Production/procps-ng-4.0.5.tar.xz
https://sourceforge.net/projects/psmisc/files/psmisc/psmisc-23.7.tar.xz
https://www.python.org/ftp/python/3.13.7/Python-3.13.7.tar.xz
https://www.python.org/ftp/python/doc/3.13.7/python-3.13.7-docs-html.tar.bz2
https://ftp.gnu.org/gnu/readline/readline-8.3.tar.gz
https://ftp.gnu.org/gnu/sed/sed-4.9.tar.xz
https://pypi.org/packages/source/s/setuptools/setuptools-80.9.0.tar.gz
https://github.com/shadow-maint/shadow/releases/download/4.18.0/shadow-4.18.0.tar.xz
https://sqlite.org/2025/sqlite-autoconf-3500400.tar.gz
https://anduin.linuxfromscratch.org/LFS/sqlite-doc-3500400.tar.xz
https://github.com/troglobit/sysklogd/releases/download/v2.7.2/sysklogd-2.7.2.tar.gz
https://github.com/systemd/systemd/archive/v257.8/systemd-257.8.tar.gz
https://anduin.linuxfromscratch.org/LFS/systemd-man-pages-257.8.tar.xz
https://github.com/slicer69/sysvinit/releases/download/3.14/sysvinit-3.14.tar.xz
https://ftp.gnu.org/gnu/tar/tar-1.35.tar.xz
https://downloads.sourceforge.net/tcl/tcl8.6.17-src.tar.gz
https://downloads.sourceforge.net/tcl/tcl8.6.17-html.tar.gz
https://ftp.gnu.org/gnu/texinfo/texinfo-7.2.tar.xz
https://www.iana.org/time-zones/repository/releases/tzdata2025b.tar.gz
https://anduin.linuxfromscratch.org/LFS/udev-lfs-20230818.tar.xz
https://www.kernel.org/pub/linux/utils/util-linux/v2.41/util-linux-2.41.2.tar.xz
https://github.com/vim/vim/archive/v9.1.1806/vim-9.1.1806.tar.gz
https://pypi.org/packages/source/w/wheel/wheel-0.46.1.tar.gz
https://cpan.metacpan.org/authors/id/T/TO/TODDR/XML-Parser-2.47.tar.gz
https://github.com//tukaani-project/xz/releases/download/v5.8.1/xz-5.8.1.tar.xz
https://zlib.net/fossils/zlib-1.3.1.tar.gz
https://github.com/facebook/zstd/releases/download/v1.5.7/zstd-1.5.7.tar.gz
https://www.linuxfromscratch.org/patches/lfs/development/bzip2-1.0.8-install_docs-1.patch
https://www.linuxfromscratch.org/patches/lfs/development/coreutils-9.8-i18n-2.patch
https://www.linuxfromscratch.org/patches/lfs/development/expect-5.45.4-gcc15-1.patch
https://www.linuxfromscratch.org/patches/lfs/development/glibc-2.42-fhs-1.patch
https://www.linuxfromscratch.org/patches/lfs/development/kbd-2.9.0-backspace-1.patch
https://www.linuxfromscratch.org/patches/lfs/development/sysvinit-3.14-consolidated-1.patch

16
ai/metadata/index.json Normal file
View file

@ -0,0 +1,16 @@
{
"generated_at": "2025-10-01T04:35:27.106227+00:00",
"packages": [
{
"book": "mlfs",
"id": "mlfs/binutils/pass1",
"name": "Binutils",
"path": "packages/mlfs/binutils-pass1.json",
"stage": "cross-toolchain",
"status": "draft",
"variant": "Pass 1",
"version": "2.45"
}
],
"schema_version": "v0.1.0"
}

View file

@ -0,0 +1,113 @@
{
"artifacts": {
"disk": 678,
"install_prefix": null,
"sbu": 1.0
},
"build": [
{
"commands": [
"mkdir -v build",
"cd build"
],
"cwd": null,
"notes": null,
"phase": "setup",
"requires_root": false
},
{
"commands": [
"../configure --prefix=$LFS/tools \\",
"--with-sysroot=$LFS \\",
"--target=$LFS_TGT \\",
"--disable-nls \\",
"--enable-gprofng=no \\",
"--disable-werror \\",
"--enable-new-dtags \\",
"--enable-default-hash-style=gnu"
],
"cwd": null,
"notes": null,
"phase": "configure",
"requires_root": false
},
{
"commands": [
"make"
],
"cwd": null,
"notes": null,
"phase": "build",
"requires_root": false
},
{
"commands": [
"make install"
],
"cwd": null,
"notes": null,
"phase": "install",
"requires_root": false
}
],
"dependencies": {
"build": [],
"runtime": []
},
"environment": {
"users": [],
"variables": []
},
"optimizations": {
"cflags": [
"-O3",
"-flto"
],
"enable_lto": true,
"enable_pgo": true,
"ldflags": [
"-flto"
],
"profdata": null
},
"package": {
"anchors": {
"section": "https://linuxfromscratch.org/~thomas/multilib-m32/chapter05/binutils-pass1.html#ch-tools-binutils-pass1"
},
"book": "mlfs",
"chapter": 5,
"id": "mlfs/binutils-pass-1",
"name": "Binutils",
"section": "5.2",
"stage": "cross-toolchain",
"upstream": null,
"variant": "Pass 1",
"version": "2.45"
},
"provenance": {
"book_release": "lfs-ml-12.4-40-multilib",
"content_hash": "7c580aad04933a2f6ec5e5410a57695dd2d0b76a293212f33fd3edd226490853",
"page_url": "https://linuxfromscratch.org/~thomas/multilib-m32/chapter05/binutils-pass1.html",
"retrieved_at": "2025-10-01T04:57:22.375928+00:00"
},
"schema_version": "v0.1.0",
"source": {
"archive": "binutils-2.45.tar.xz",
"checksums": [
{
"alg": "md5",
"value": "dee5b4267e0305a99a3c9d6131f45759"
}
],
"urls": [
{
"kind": "primary",
"url": "https://sourceware.org/pub/binutils/releases/binutils-2.45.tar.xz"
}
]
},
"status": {
"issues": [],
"state": "draft"
}
}

View file

@ -0,0 +1,147 @@
{
"schema_version": "v0.1.0",
"package": {
"id": "mlfs/binutils/pass1",
"name": "Binutils",
"upstream": "gnu/binutils",
"version": "2.45",
"book": "mlfs",
"chapter": 5,
"section": "5.02",
"stage": "cross-toolchain",
"variant": "Pass 1",
"anchors": {
"section": "https://linuxfromscratch.org/~thomas/multilib-m32/chapter05/binutils-pass1.html#ch-tools-binutils-pass1"
}
},
"source": {
"urls": [
{
"url": "https://ftp.gnu.org/gnu/binutils/binutils-2.45.tar.xz",
"kind": "primary"
},
{
"url": "https://ftpmirror.gnu.org/binutils/binutils-2.45.tar.xz",
"kind": "mirror"
}
],
"archive": "binutils-2.45.tar.xz"
},
"artifacts": {
"sbu": 1,
"disk": 678,
"install_prefix": "$LFS/tools"
},
"dependencies": {
"build": [
{ "name": "bash" },
{ "name": "coreutils" },
{ "name": "gcc", "optional": true }
],
"runtime": []
},
"environment": {
"variables": [
{
"name": "LFS",
"description": "Absolute path to mounted LFS workspace"
},
{
"name": "LFS_TGT",
"description": "Target triple for cross toolchain"
}
],
"users": []
},
"build": [
{
"phase": "setup",
"commands": [
"tar -xf binutils-2.45.tar.xz",
"cd binutils-2.45",
"mkdir -v build",
"cd build"
]
},
{
"phase": "configure",
"commands": [
"../configure --prefix=$LFS/tools \\",
" --with-sysroot=$LFS \\",
" --target=$LFS_TGT \\",
" --disable-nls \\",
" --enable-gprofng=no \\",
" --disable-werror \\",
" --enable-new-dtags \\",
" --enable-default-hash-style=gnu"
],
"cwd": "build"
},
{
"phase": "build",
"commands": [
"make"
],
"cwd": "build"
},
{
"phase": "test",
"commands": [
"make -k check"
],
"cwd": "build",
"notes": "Tests are optional for cross-toolchain; failures can be ignored"
},
{
"phase": "install",
"commands": [
"make install"
],
"cwd": "build"
}
],
"optimizations": {
"enable_lto": true,
"enable_pgo": true,
"cflags": ["-O3", "-flto", "-fprofile-generate"],
"ldflags": ["-flto", "-fprofile-generate"],
"profdata": null
},
"tests": [
{
"commands": [
"make -k check"
],
"optional": true,
"expected_failures": ["gas/run/elf-x86-64-reloc.sh"]
}
],
"post_install": [
{
"commands": [
"rm -v $LFS/tools/lib/libbfd.a",
"rm -v $LFS/tools/lib/libctf-nobfd.a"
],
"description": "Remove static libraries per LFS guidance"
}
],
"notes": [
{
"severity": "warning",
"text": "Ensure the host uses recent flex/bison to avoid configure warnings."
}
],
"provenance": {
"book_release": "ml-12.4-40-multilib",
"page_url": "https://linuxfromscratch.org/~thomas/multilib-m32/chapter05/binutils-pass1.html",
"retrieved_at": "2025-03-09T00:00:00Z",
"content_hash": "0000000000000000000000000000000000000000000000000000000000000000"
},
"status": {
"state": "draft",
"issues": [
"Checksums not yet verified",
"Dependency list requires confirmation"
]
}
}

377
ai/metadata/schema.json Normal file
View file

@ -0,0 +1,377 @@
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$id": "https://lpkg.dev/schemas/package-metadata.json",
"title": "LPKG Package Metadata",
"type": "object",
"required": ["schema_version", "package", "source", "build", "provenance", "status"],
"additionalProperties": false,
"properties": {
"schema_version": {
"type": "string",
"pattern": "^v\\d+\\.\\d+\\.\\d+$"
},
"package": {
"type": "object",
"required": ["id", "name", "version", "book"],
"additionalProperties": false,
"properties": {
"id": {
"type": "string",
"pattern": "^[a-z0-9][a-z0-9-/]*$"
},
"name": {
"type": "string",
"minLength": 1
},
"upstream": {
"type": "string"
},
"version": {
"type": "string",
"minLength": 1
},
"book": {
"type": "string",
"enum": ["lfs", "mlfs", "blfs", "glfs"]
},
"chapter": {
"type": ["integer", "null"],
"minimum": 0
},
"section": {
"type": ["string", "null"],
"pattern": "^\\d+\\.\\d+$"
},
"stage": {
"type": ["string", "null"],
"enum": [
"cross-toolchain",
"temporary-tools",
"system",
"system-configuration",
"system-finalization",
"desktop",
"server",
"multilib",
"kernel",
"boot",
null
]
},
"variant": {
"type": ["string", "null"],
"minLength": 1
},
"anchors": {
"type": "object",
"additionalProperties": {
"type": "string",
"format": "uri"
}
}
}
},
"source": {
"type": "object",
"required": ["urls"],
"additionalProperties": false,
"properties": {
"urls": {
"type": "array",
"items": {
"type": "object",
"required": ["url"],
"additionalProperties": false,
"properties": {
"url": {
"type": "string",
"format": "uri"
},
"kind": {
"type": "string",
"enum": ["primary", "mirror", "patch", "signature"]
}
}
}
},
"archive": {
"type": ["string", "null"]
},
"checksums": {
"type": "array",
"items": {
"type": "object",
"required": ["alg", "value"],
"additionalProperties": false,
"properties": {
"alg": {
"type": "string",
"enum": ["md5", "sha1", "sha256", "sha512"]
},
"value": {
"type": "string",
"pattern": "^[A-Fa-f0-9]{16,128}$"
}
}
}
}
}
},
"artifacts": {
"type": "object",
"additionalProperties": false,
"properties": {
"sbu": {
"type": ["number", "null"],
"minimum": 0
},
"disk": {
"type": ["integer", "null"],
"minimum": 0,
"description": "Approximate disk usage in MB"
},
"install_prefix": {
"type": ["string", "null"],
"minLength": 1
}
}
},
"dependencies": {
"type": "object",
"additionalProperties": false,
"properties": {
"build": {
"type": "array",
"items": {
"type": "object",
"required": ["name"],
"additionalProperties": false,
"properties": {
"name": {
"type": "string",
"minLength": 1
},
"optional": {
"type": "boolean",
"default": false
}
}
}
},
"runtime": {
"type": "array",
"items": {
"type": "string",
"minLength": 1
}
}
}
},
"environment": {
"type": "object",
"additionalProperties": false,
"properties": {
"variables": {
"type": "array",
"items": {
"type": "object",
"required": ["name"],
"additionalProperties": false,
"properties": {
"name": {
"type": "string",
"pattern": "^[A-Z0-9_]+$"
},
"required": {
"type": "boolean",
"default": true
},
"description": {
"type": "string"
}
}
}
},
"users": {
"type": "array",
"items": {
"type": "object",
"required": ["name"],
"additionalProperties": false,
"properties": {
"name": {
"type": "string",
"minLength": 1
},
"purpose": {
"type": "string"
}
}
}
}
}
},
"build": {
"type": "array",
"minItems": 1,
"items": {
"type": "object",
"required": ["phase", "commands"],
"additionalProperties": false,
"properties": {
"phase": {
"type": "string",
"enum": ["setup", "configure", "build", "test", "install", "post"]
},
"commands": {
"type": "array",
"items": {
"type": "string",
"minLength": 1
}
},
"cwd": {
"type": ["string", "null"],
"minLength": 1
},
"requires_root": {
"type": "boolean",
"default": false
},
"notes": {
"type": ["string", "null"],
"minLength": 1
}
}
}
},
"optimizations": {
"type": "object",
"additionalProperties": false,
"properties": {
"enable_lto": {
"type": "boolean"
},
"enable_pgo": {
"type": "boolean"
},
"cflags": {
"type": "array",
"items": {
"type": "string"
}
},
"ldflags": {
"type": "array",
"items": {
"type": "string"
}
},
"profdata": {
"type": ["string", "null"],
"minLength": 1
}
}
},
"tests": {
"type": "array",
"items": {
"type": "object",
"required": ["commands"],
"additionalProperties": false,
"properties": {
"commands": {
"type": "array",
"items": {
"type": "string"
}
},
"optional": {
"type": "boolean"
},
"expected_failures": {
"type": "array",
"items": {
"type": "string"
}
}
}
}
},
"post_install": {
"type": "array",
"items": {
"type": "object",
"required": ["commands"],
"additionalProperties": false,
"properties": {
"commands": {
"type": "array",
"items": {
"type": "string"
}
},
"description": {
"type": ["string", "null"]
}
}
}
},
"notes": {
"type": "array",
"items": {
"type": "object",
"required": ["text"],
"additionalProperties": false,
"properties": {
"severity": {
"type": "string",
"enum": ["info", "warning", "error"]
},
"text": {
"type": "string"
}
}
}
},
"provenance": {
"type": "object",
"required": ["book_release", "page_url", "retrieved_at"],
"additionalProperties": false,
"properties": {
"book_release": {
"type": "string"
},
"page_url": {
"type": "string",
"format": "uri"
},
"retrieved_at": {
"type": "string",
"format": "date-time"
},
"content_hash": {
"type": "string",
"pattern": "^[A-Fa-f0-9]{64}$"
}
}
},
"status": {
"type": "object",
"required": ["state"],
"additionalProperties": false,
"properties": {
"state": {
"type": "string",
"enum": ["draft", "review", "imported", "stale"]
},
"issues": {
"type": "array",
"items": {
"type": "string"
}
}
}
}
}
}

15
ai/notes.md Normal file
View file

@ -0,0 +1,15 @@
# Integrating jhalfs Source Metadata
- Goal: reuse jhalfs wget-list and md5sums to populate package `source.urls` and
auto-fill checksums when harvesting metadata for MLFS/BLFS/GLFS packages.
- Data source: `https://anduin.linuxfromscratch.org/` hosts per-release
`wget-list`/`md5sums` files already curated by the jhalfs project.
- Approach:
1. Fetch (and optionally cache under `ai/cache/`) the lists for each book.
2. When harvesting, map `<package>-<version>` against the list to gather all
relevant URLs.
3. Pull matching checksum entries to populate `source.checksums`.
4. Keep the existing HTML scrape for chapter/stage text; jhalfs covers only
sources.
- Benefits: avoids fragile HTML tables, keeps URLs aligned with official build
scripts, and ensures checksums are up-to-date.

24
ai/personas.json Normal file
View file

@ -0,0 +1,24 @@
[
{
"id": "default_cli",
"name": "Codex CLI Assistant",
"description": "Default persona for repository automation; focuses on safe refactors and tooling improvements.",
"strengths": [
"Rust and tooling pipelines",
"Workflow automation",
"Incremental migrations"
],
"notes": "Derived from GPT-5 Codex runtime; avoids destructive operations without explicit approval."
},
{
"id": "mlfs_researcher",
"name": "MLFS Researcher",
"description": "Persona dedicated to tracking Multilib Linux From Scratch package metadata and translating it into lpkg modules.",
"strengths": [
"HTML scraping",
"Package manifest synthesis",
"Optimization flag tuning"
],
"notes": "Activated when working with https://linuxfromscratch.org/~thomas/multilib-m32/ resources."
}
]

56
ai/tasks.json Normal file
View file

@ -0,0 +1,56 @@
{
"generated_at": "2025-03-09T00:00:00Z",
"unfinished": [
{
"id": "mlfs-package-import",
"title": "Import all MLFS packages into lpkg",
"description": "Parse the Multilib LFS book and scaffold package definitions with optimization defaults (LTO/PGO/-O3).",
"blocked_on": [
"Implement automated parser"
],
"owner": "mlfs_researcher"
},
{
"id": "pgo-integration",
"title": "Integrate profile guided optimization support",
"description": "Add infrastructure for collection and replay of profiling data during package builds.",
"blocked_on": [
"Decide on profiling workload definitions"
],
"owner": "default_cli"
},
{
"id": "lfs-html-parsers",
"title": "Automate LFS/BLFS/GLFS ingest via HTML parsing",
"description": "Avoid hardcoded package data; download the upstream books (LFS, BLFS, GLFS) and parse them to drive scaffolding and metadata updates.",
"blocked_on": [
"Design resilient scraping strategies for each book",
"Implement incremental update workflow"
],
"owner": "mlfs_researcher"
}
],
"solved": [
{
"id": "ai-metadata-store",
"title": "Create AI metadata directory",
"description": "Introduce ai/personas.json, ai/tasks.json, ai/bugs.json for persistent assistant context.",
"resolution": "Initial JSON files checked in with placeholder content.",
"owner": "default_cli"
},
{
"id": "metadata-schema-v0.1",
"title": "Define package metadata schema",
"description": "Specify JSON schema and layout for storing scraped package detail from LFS family books.",
"resolution": "Added ai/metadata/schema.json with v0.1.0 structure and seeded initial package entry/index.",
"owner": "default_cli"
},
{
"id": "metadata-indexer-cli",
"title": "Build metadata validation/indexing tool",
"description": "Provide a standalone CLI to validate package metadata against the schema and regenerate ai/metadata/index.json.",
"resolution": "Added src/bin/metadata_indexer.rs with schema validation, summary extraction, and index writer integration.",
"owner": "default_cli"
}
]
}

File diff suppressed because it is too large Load diff

117
docs/ARCHITECTURE.md Normal file
View file

@ -0,0 +1,117 @@
# Architecture Overview
This project is split into a reusable Rust library crate (`package_management`)
and several binaries that orchestrate day-to-day workflows. The sections below
outline the main entry points and how the supporting modules fit together.
## CLI entry points
| Binary | Location | Purpose |
| ------ | -------- | ------- |
| `lpkg` | `src/main.rs` | Primary command-line interface with workflow automation and optional TUI integration. |
| `metadata_indexer` | `src/bin/metadata_indexer.rs` | Harvests LFS/BLFS/GLFS package metadata, validates it against the JSON schema, and keeps `ai/metadata/index.json` up to date. |
### `lpkg` workflows
`lpkg` uses [Clap](https://docs.rs/clap) to expose multiple subcommands:
- `EnvCheck` fetches `<pre>` blocks from an LFS-style HTML page and runs the
embedded `ver_check` / `ver_kernel` scripts.
- `FetchManifests` downloads the books canonical `wget-list` and `md5sums`
files and writes them to disk.
- `BuildBinutils` parses the Binutils Pass 1 page, mirrors the documented
build steps, and executes them in a Tokio runtime.
- `ScaffoldPackage` generates a new module under `src/pkgs/by_name/` with
optimisation defaults (LTO/PGO/`-O3`) and persists metadata via the DB
helpers.
- `ImportMlfs` walks the MLFS catalogue, scaffolding definitions and storing
them in the database (with optional `--dry-run`, `--limit`, and `--overwrite`).
When compiled with the `tui` feature flag, the CLI also exposes
`lpkg tui disk-manager`, which drops the user into the terminal UI defined in
`src/tui/`.
### `metadata_indexer`
The `metadata_indexer` binary is a companion tool for maintaining the JSON
artifacts under `ai/metadata/`:
- `validate` validates every `packages/**.json` file against
`ai/metadata/schema.json` and reports schema or summary extraction issues.
- `index` revalidates the metadata and regenerates
`ai/metadata/index.json` (use `--compact` for single-line JSON).
- `harvest` fetches a given book page, extracts build metadata, and emits a
schema-compliant JSON skeleton. When direct HTML parsing does not locate the
source tarball, it falls back to the jhalfs `wget-list` data to populate
`source.urls`.
## Module layout
```
src/
ai/ // JSON loaders for repository personas, tasks, and bugs
db/ // Diesel database setup and models
html.rs // Lightweight HTML helpers (fetch + parse <pre> blocks)
ingest/ // Parsers for LFS / MLFS / BLFS / GLFS book content
md5_utils.rs // Fetches canonical md5sums from the book mirror
mirrors.rs // Lists official source mirrors for downloads
pkgs/ // Package scaffolding and metadata definition helpers
tui/ // Optional terminal UI (crossterm + tui)
version_check.rs// Executes ver_check / ver_kernel snippets
wget_list.rs // Fetches jhalfs-maintained wget-list manifests
bin/metadata_indexer.rs // AI metadata CLI described above
```
### Notable modules
- **`src/pkgs/scaffolder.rs`**
- Generates filesystem modules and `PackageDefinition` records based on a
`ScaffoldRequest`.
- Normalises directory layout (prefix modules, `mod.rs` entries) and applies
optimisation defaults (LTO, PGO, `-O3`).
- **`src/ingest/`**
- Provides HTML parsers tailored to each book flavour (LFS, MLFS, BLFS,
GLFS). The parsers emit `BookPackage` records consumed by the scaffolder
and metadata importer.
- **`src/db/`**
- Diesel models and schema for persisting package metadata. `lpkg` uses these
helpers when scaffolding or importing packages.
- **`src/tui/`**
- Houses the optional terminal interface (disk manager, main menu, settings,
downloader). The entry points are conditionally compiled behind the `tui`
cargo feature.
## Data & metadata assets
The repository keeps long-lived ARTifacts under `ai/`:
- `ai/metadata/` JSON schema (`schema.json`), package records, and a generated
index (`index.json`). The `metadata_indexer` binary maintains these files.
- `ai/personas.json`, `ai/tasks.json`, `ai/bugs.json` contextual data for
automated assistance.
- `ai/notes.md` scratchpad for future work (e.g., jhalfs integration).
`data/` currently contains catalogues derived from the MLFS book and can be
extended with additional book snapshots.
## Database and persistence
The Diesel setup uses SQLite (via the `diesel` crate with `sqlite` and `r2d2`
features enabled). Connection pooling lives in `src/db/mod.rs` and is consumed
by workflows that scaffold or import packages.
## Optional terminal UI
The TUI resolves around `DiskManager` (a crossterm + tui based interface for
GPT partition inspection and creation). Additional stubs (`main_menu.rs`,
`settings.rs`, `downloader.rs`) are present for future expansion. The main CLI
falls back to `DiskManager::run_tui()` whenever `lpkg` is invoked without a
subcommand and is compiled with `--features tui`.
---
For more operational details around metadata harvesting, refer to
[`docs/METADATA_PIPELINE.md`](./METADATA_PIPELINE.md).

83
docs/METADATA_PIPELINE.md Normal file
View file

@ -0,0 +1,83 @@
# Metadata Harvesting Pipeline
This repository tracks AI-friendly package metadata under `ai/metadata/`.
The `metadata_indexer` binary orchestrates validation and harvesting tasks.
This document explains the workflow and the supporting assets.
## Directory layout
- `ai/metadata/schema.json` JSON Schema (Draft 2020-12) describing one
package record.
- `ai/metadata/packages/<book>/<slug>.json` harvested package metadata.
- `ai/metadata/index.json` generated summary table linking package IDs to
their JSON files.
- `ai/notes.md` scratchpad for future improvements (e.g., jhalfs integration).
## `metadata_indexer` commands
| Command | Description |
| ------- | ----------- |
| `validate` | Loads every package JSON file and validates it against `schema.json`. Reports schema violations and summary extraction errors. |
| `index` | Re-runs validation and regenerates `index.json`. Use `--compact` to write a single-line JSON payload. |
| `harvest` | Fetches a book page, scrapes build instructions, and emits a draft metadata record (to stdout with `--dry-run` or into `ai/metadata/packages/`). |
### Harvesting flow
1. **Fetch HTML** the requested page is downloaded with `reqwest` and parsed
using `scraper` selectors.
2. **Heading metadata** the `h1.sect1` title provides the chapter/section,
canonical package name, version, and optional variant hints.
3. **Build steps** `<pre class="userinput">` blocks become ordered `build`
phases (`setup`, `configure`, `build`, `test`, `install`).
4. **Artifact stats** `div.segmentedlist` entries supply SBU and disk usage.
5. **Source URLs** the harvester tries two strategies:
- Inline HTML links inside the page (common for BLFS articles).
- Fallback to the jhalfs `wget-list` for the selected book (currently MLFS)
using `package-management::wget_list::get_wget_list` to find matching
`<package>-<version>` entries.
6. **Checksums** integration with the books `md5sums` mirror is pending;
placeholder wiring exists (`src/md5_utils.rs`).
7. **Status** unresolved items (missing URLs, anchors, etc.) are recorded in
`status.issues` so humans can interrogate or patch the draft before
promoting it.
### Known gaps
- **Source links via tables** some MLFS chapters list download links inside a
“Package Information” table. The current implementation relies on the
jhalfs `wget-list` fallback instead of parsing that table.
- **Checksums** MD5 lookups from jhalfs are planned but not yet wired into
the harvest pipeline.
- **Anchor discovery** if the heading lacks an explicit `id` attribute, the
scraper attempts to locate child anchors or scan the raw HTML. If none are
found, a warning is recorded and `status.issues` contains a reminder.
## Using jhalfs manifests
The maintained `wget-list`/`md5sums` files hosted by jhalfs provide canonical
source URLs and hashes. The helper modules `src/wget_list.rs` and
`src/md5_utils.rs` download these lists for the multilib LFS book. The
harvester currently consumes the wget-list as a fallback; integrating the
`md5sums` file will let us emit `source.checksums` automatically.
Planned enhancements (see `ai/notes.md` and `ai/bugs.json#metadata-harvest-no-source-urls`):
1. Abstract list fetching so BLFS/GLFS variants can reuse the logic.
2. Normalise the match criteria for package + version (handling pass stages,
suffixes, etc.).
3. Populate checksum entries alongside URLs.
## Manual review checklist
When a new metadata file is generated:
- `schema_version` should match `schema.json` (currently `v0.1.0`).
- `package.id` should be unique (format `<book>/<slug>`).
- `source.urls` must include at least one primary URL; add mirrors/patches as
needed.
- Clear any `status.issues` before promoting the record from `draft`.
- Run `cargo run --bin metadata_indexer -- --base-dir . index` to regenerate
the global index once the draft is finalised.
Refer to `README.md` for usage examples and to `docs/ARCHITECTURE.md` for a
broader overview of the crate layout.

79
src/ai/mod.rs Normal file
View file

@ -0,0 +1,79 @@
use std::path::{Path, PathBuf};
use anyhow::Result;
use serde::Deserialize;
/// Loads assistant persona metadata from `ai/personas.json`.
pub fn load_personas(base_dir: impl AsRef<Path>) -> Result<Vec<Persona>> {
let path = resolve(base_dir, "personas.json");
read_json(path)
}
/// Loads the tracked task board from `ai/tasks.json`.
pub fn load_tasks(base_dir: impl AsRef<Path>) -> Result<TaskBoard> {
let path = resolve(base_dir, "tasks.json");
read_json(path)
}
/// Loads the current bug ledger from `ai/bugs.json`.
pub fn load_bugs(base_dir: impl AsRef<Path>) -> Result<Vec<Bug>> {
let path = resolve(base_dir, "bugs.json");
read_json(path)
}
fn resolve(base_dir: impl AsRef<Path>, file: &str) -> PathBuf {
base_dir.as_ref().join("ai").join(file)
}
fn read_json<T>(path: PathBuf) -> Result<T>
where
T: for<'de> Deserialize<'de>,
{
let data = std::fs::read_to_string(&path)?;
Ok(serde_json::from_str(&data)?)
}
#[derive(Debug, Deserialize)]
pub struct Persona {
pub id: String,
pub name: String,
pub description: String,
#[serde(default)]
pub strengths: Vec<String>,
#[serde(default)]
pub notes: String,
}
#[derive(Debug, Deserialize)]
pub struct TaskBoard {
pub generated_at: String,
pub unfinished: Vec<Task>,
pub solved: Vec<Task>,
}
#[derive(Debug, Deserialize)]
pub struct Task {
pub id: String,
pub title: String,
pub description: String,
#[serde(default)]
pub blocked_on: Vec<String>,
#[serde(default)]
pub owner: Option<String>,
#[serde(default)]
pub resolution: Option<String>,
}
#[derive(Debug, Deserialize)]
pub struct Bug {
pub id: String,
pub title: String,
pub description: String,
pub status: String,
#[serde(default)]
pub owner: Option<String>,
#[serde(default)]
pub created_at: Option<String>,
#[serde(default)]
pub labels: Vec<String>,
}

1017
src/bin/metadata_indexer.rs Normal file

File diff suppressed because it is too large Load diff

107
src/db/mod.rs Normal file
View file

@ -0,0 +1,107 @@
pub mod models;
pub mod schema;
use std::env;
use anyhow::{Context, Result};
use diesel::prelude::*;
use diesel::r2d2::{self, ConnectionManager};
use diesel::sqlite::SqliteConnection;
use crate::pkgs::package::PackageDefinition;
use self::models::{NewPackage, Package};
use self::schema::packages::dsl as packages_dsl;
pub type Pool = r2d2::Pool<ConnectionManager<SqliteConnection>>;
pub type Connection = r2d2::PooledConnection<ConnectionManager<SqliteConnection>>;
const DEFAULT_DB_URL: &str = "lpkg.db";
/// Resolve the database URL from `LPKG_DATABASE_URL` or fall back to `lpkg.db` in the CWD.
pub fn database_url() -> String {
env::var("LPKG_DATABASE_URL").unwrap_or_else(|_| DEFAULT_DB_URL.to_string())
}
/// Build an r2d2 connection pool and ensure the schema exists.
pub fn establish_pool() -> Result<Pool> {
let manager = ConnectionManager::<SqliteConnection>::new(database_url());
let pool = Pool::builder()
.build(manager)
.context("creating Diesel connection pool")?;
{
let mut conn = pool
.get()
.context("establishing initial database connection")?;
initialize(&mut conn)?;
}
Ok(pool)
}
fn initialize(conn: &mut SqliteConnection) -> Result<()> {
diesel::sql_query(
"CREATE TABLE IF NOT EXISTS packages (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL,
version TEXT NOT NULL,
source TEXT,
md5 TEXT,
configure_args TEXT,
build_commands TEXT,
install_commands TEXT,
dependencies TEXT,
enable_lto BOOLEAN NOT NULL DEFAULT 1,
enable_pgo BOOLEAN NOT NULL DEFAULT 1,
cflags TEXT,
ldflags TEXT,
profdata TEXT
)",
)
.execute(conn)
.context("creating packages table")?;
diesel::sql_query(
"CREATE UNIQUE INDEX IF NOT EXISTS idx_packages_name_version ON packages(name, version)",
)
.execute(conn)
.context("creating packages unique index")?;
Ok(())
}
/// Insert or update a package definition in the database.
pub fn upsert_package(conn: &mut SqliteConnection, definition: &PackageDefinition) -> Result<()> {
let record = NewPackage::try_from(definition)?;
diesel::insert_into(packages_dsl::packages)
.values(&record)
.on_conflict((packages_dsl::name, packages_dsl::version))
.do_update()
.set(&record)
.execute(conn)
.context("upserting package record")?;
Ok(())
}
/// Convenience helper to upsert via pool and return the persisted definition.
pub fn upsert_package_via_pool(pool: &Pool, definition: &PackageDefinition) -> Result<()> {
let mut conn = pool.get().context("acquiring database connection")?;
upsert_package(&mut conn, definition)
}
/// Load all packages from the database.
pub fn load_packages(conn: &mut SqliteConnection) -> Result<Vec<Package>> {
packages_dsl::packages
.order((packages_dsl::name, packages_dsl::version))
.load::<Package>(conn)
.context("loading packages from database")
}
/// Load packages using the shared pool.
pub fn load_packages_via_pool(pool: &Pool) -> Result<Vec<Package>> {
let mut conn = pool.get().context("acquiring database connection")?;
load_packages(&mut conn)
}

104
src/db/models.rs Normal file
View file

@ -0,0 +1,104 @@
use anyhow::{Context, Result};
use diesel::prelude::*;
use serde::{Deserialize, Serialize};
use crate::pkgs::package::PackageDefinition;
use super::schema::packages;
#[derive(Debug, Queryable, Serialize, Deserialize)]
pub struct Package {
pub id: i32,
pub name: String,
pub version: String,
pub source: Option<String>,
pub md5: Option<String>,
pub configure_args: Option<String>,
pub build_commands: Option<String>,
pub install_commands: Option<String>,
pub dependencies: Option<String>,
pub enable_lto: bool,
pub enable_pgo: bool,
pub cflags: Option<String>,
pub ldflags: Option<String>,
pub profdata: Option<String>,
}
impl Package {
pub fn into_definition(self) -> Result<PackageDefinition> {
Ok(PackageDefinition {
name: self.name,
version: self.version,
source: self.source,
md5: self.md5,
configure_args: parse_vec(self.configure_args)?,
build_commands: parse_vec(self.build_commands)?,
install_commands: parse_vec(self.install_commands)?,
dependencies: parse_vec(self.dependencies)?,
optimizations: crate::pkgs::package::OptimizationSettings {
enable_lto: self.enable_lto,
enable_pgo: self.enable_pgo,
cflags: parse_vec(self.cflags)?,
ldflags: parse_vec(self.ldflags)?,
profdata: self.profdata,
},
})
}
}
#[derive(Debug, Insertable, AsChangeset)]
#[diesel(table_name = packages)]
pub struct NewPackage {
pub name: String,
pub version: String,
pub source: Option<String>,
pub md5: Option<String>,
pub configure_args: Option<String>,
pub build_commands: Option<String>,
pub install_commands: Option<String>,
pub dependencies: Option<String>,
pub enable_lto: bool,
pub enable_pgo: bool,
pub cflags: Option<String>,
pub ldflags: Option<String>,
pub profdata: Option<String>,
}
impl TryFrom<&PackageDefinition> for NewPackage {
type Error = anyhow::Error;
fn try_from(value: &PackageDefinition) -> Result<Self> {
Ok(Self {
name: value.name.clone(),
version: value.version.clone(),
source: value.source.clone(),
md5: value.md5.clone(),
configure_args: serialize_vec(&value.configure_args)?,
build_commands: serialize_vec(&value.build_commands)?,
install_commands: serialize_vec(&value.install_commands)?,
dependencies: serialize_vec(&value.dependencies)?,
enable_lto: value.optimizations.enable_lto,
enable_pgo: value.optimizations.enable_pgo,
cflags: serialize_vec(&value.optimizations.cflags)?,
ldflags: serialize_vec(&value.optimizations.ldflags)?,
profdata: value.optimizations.profdata.clone(),
})
}
}
fn serialize_vec(values: &[String]) -> Result<Option<String>> {
if values.is_empty() {
Ok(None)
} else {
serde_json::to_string(values)
.map(Some)
.context("serializing vector to JSON")
}
}
fn parse_vec(raw: Option<String>) -> Result<Vec<String>> {
match raw {
Some(data) => serde_json::from_str(&data).context("parsing JSON vector"),
None => Ok(Vec::new()),
}
}

19
src/db/schema.rs Normal file
View file

@ -0,0 +1,19 @@
// Diesel schema for package storage. Maintained manually to avoid build-script dependency.
diesel::table! {
packages (id) {
id -> Integer,
name -> Text,
version -> Text,
source -> Nullable<Text>,
md5 -> Nullable<Text>,
configure_args -> Nullable<Text>,
build_commands -> Nullable<Text>,
install_commands -> Nullable<Text>,
dependencies -> Nullable<Text>,
enable_lto -> Bool,
enable_pgo -> Bool,
cflags -> Nullable<Text>,
ldflags -> Nullable<Text>,
profdata -> Nullable<Text>,
}
}

113
src/ingest/blfs.rs Normal file
View file

@ -0,0 +1,113 @@
use anyhow::{Context, Result};
use regex::Regex;
use reqwest::blocking::Client;
use scraper::{Html, Selector};
use super::{BookPackage, FetchOptions};
use crate::ingest::lfs::split_name_version;
pub fn fetch_book(options: &FetchOptions) -> Result<Vec<BookPackage>> {
let base = options.base_url.trim_end_matches('/');
let url = format!("{base}/book.html");
let client = Client::builder().build().context("building HTTP client")?;
let body = client
.get(&url)
.send()
.with_context(|| format!("fetching {}", url))?
.error_for_status()
.with_context(|| format!("request failed for {}", url))?
.text()
.context("reading response body")?;
parse_book_html(options, &url, &body)
}
pub fn parse_book_html(
options: &FetchOptions,
book_url: &str,
body: &str,
) -> Result<Vec<BookPackage>> {
let document = Html::parse_document(body);
let selector = Selector::parse("h1.sect1").unwrap();
let numbering_re =
Regex::new(r"^(?P<chapter>\d+)\.(?P<section>\d+)\.\s+(?P<title>.+)$").unwrap();
let mut results = Vec::new();
for heading in document.select(&selector) {
let text = heading
.text()
.map(str::trim)
.filter(|s| !s.is_empty())
.collect::<Vec<_>>()
.join(" ")
.replace('\n', " ")
.trim()
.to_string();
if text.is_empty() {
continue;
}
// BLFS headings often look like "33.2. Bzip2" or "33.2. Bzip2-1.0.8"
let caps = match numbering_re.captures(&text) {
Some(caps) => caps,
None => continue,
};
let chapter_num: u32 = caps["chapter"].parse().unwrap_or(0);
let section_num: u32 = caps["section"].parse().unwrap_or(0);
let title = caps["title"].trim();
let (name, version, variant) = match split_name_version(title) {
Some(parts) => parts,
None => continue,
};
let href = heading.value().id().map(|id| {
let mut base = book_url.to_string();
if !base.contains('#') {
base.push('#');
}
format!("{}{}", base, id)
});
let section_label = Some(format!("{}.{}", chapter_num, section_num));
results.push(BookPackage {
book: options.book,
chapter: Some(chapter_num),
section: section_label,
name,
version: Some(version),
href,
md5: None,
stage: None,
variant,
notes: None,
});
}
Ok(results)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::ingest::BookKind;
#[test]
fn parse_blfs_sample() {
let html = r#"
<html><body>
<h1 class=\"sect1\" id=\"ch33-bzip2\">33.2. Bzip2-1.0.8</h1>
<h1 class=\"sect1\" id=\"ch33-about\">33.1. Introduction</h1>
</body></html>
"#;
let opts = FetchOptions::new("https://example.invalid/blfs", BookKind::Blfs);
let items = parse_book_html(&opts, "https://example.invalid/blfs/book.html", html).unwrap();
assert_eq!(items.len(), 1);
assert_eq!(items[0].name, "Bzip2");
assert_eq!(items[0].version.as_deref(), Some("1.0.8"));
}
}

109
src/ingest/glfs.rs Normal file
View file

@ -0,0 +1,109 @@
use anyhow::{Context, Result};
use regex::Regex;
use reqwest::blocking::Client;
use scraper::{Html, Selector};
use super::{BookPackage, FetchOptions};
use crate::ingest::lfs::split_name_version;
pub fn fetch_book(options: &FetchOptions) -> Result<Vec<BookPackage>> {
let base = options.base_url.trim_end_matches('/');
let url = format!("{base}/book.html");
let client = Client::builder().build().context("building HTTP client")?;
let body = client
.get(&url)
.send()
.with_context(|| format!("fetching {}", url))?
.error_for_status()
.with_context(|| format!("request failed for {}", url))?
.text()
.context("reading response body")?;
parse_book_html(options, &url, &body)
}
pub fn parse_book_html(
options: &FetchOptions,
book_url: &str,
body: &str,
) -> Result<Vec<BookPackage>> {
let document = Html::parse_document(body);
let selector = Selector::parse("h1.sect1").unwrap();
let numbering_re =
Regex::new(r"^(?P<chapter>\d+)\.(?P<section>\d+)\.\s+(?P<title>.+)$").unwrap();
let mut results = Vec::new();
for heading in document.select(&selector) {
let text = heading
.text()
.map(str::trim)
.filter(|s| !s.is_empty())
.collect::<Vec<_>>()
.join(" ")
.replace('\n', " ")
.trim()
.to_string();
if text.is_empty() {
continue;
}
let caps = match numbering_re.captures(&text) {
Some(caps) => caps,
None => continue,
};
let chapter_num: u32 = caps["chapter"].parse().unwrap_or(0);
let section_num: u32 = caps["section"].parse().unwrap_or(0);
let title = caps["title"].trim();
let (name, version, variant) = match split_name_version(title) {
Some(parts) => parts,
None => continue,
};
let href = heading.value().id().map(|id| {
let mut base = book_url.to_string();
if !base.contains('#') {
base.push('#');
}
format!("{}{}", base, id)
});
results.push(BookPackage {
book: options.book,
chapter: Some(chapter_num),
section: Some(format!("{}.{}", chapter_num, section_num)),
name,
version: Some(version),
href,
md5: None,
stage: None,
variant,
notes: None,
});
}
Ok(results)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::ingest::BookKind;
#[test]
fn parse_glfs_sample() {
let html = r#"
<html><body>
<h1 class=\"sect1\" id=\"ch12-coreutils\">12.4. Coreutils-9.8</h1>
</body></html>
"#;
let opts = FetchOptions::new("https://example.invalid/glfs", BookKind::Glfs);
let items = parse_book_html(&opts, "https://example.invalid/glfs/book.html", html).unwrap();
assert_eq!(items.len(), 1);
assert_eq!(items[0].name, "Coreutils");
assert_eq!(items[0].version.as_deref(), Some("9.8"));
}
}

169
src/ingest/lfs.rs Normal file
View file

@ -0,0 +1,169 @@
use anyhow::{Context, Result};
use regex::Regex;
use reqwest::blocking::Client;
use scraper::{Html, Selector};
use super::{BookPackage, FetchOptions};
pub fn fetch_book(options: &FetchOptions) -> Result<Vec<BookPackage>> {
let base = options.base_url.trim_end_matches('/');
let url = format!("{base}/book.html");
let client = Client::builder().build().context("building HTTP client")?;
let body = client
.get(&url)
.send()
.with_context(|| format!("fetching {}", url))?
.error_for_status()
.with_context(|| format!("request failed for {}", url))?
.text()
.context("reading response body")?;
parse_book_html(options, &url, &body)
}
pub fn parse_book_html(
options: &FetchOptions,
book_url: &str,
body: &str,
) -> Result<Vec<BookPackage>> {
let document = Html::parse_document(body);
let selector = Selector::parse("h1.sect1").unwrap();
let numbering_re =
Regex::new(r"^(?P<chapter>\d+)\.(?P<section>\d+)\.\s+(?P<title>.+)$").unwrap();
let mut results = Vec::new();
for heading in document.select(&selector) {
let text = heading
.text()
.map(str::trim)
.filter(|s| !s.is_empty())
.collect::<Vec<_>>()
.join(" ")
.replace('\n', " ")
.trim()
.to_string();
if text.is_empty() {
continue;
}
let caps = match numbering_re.captures(&text) {
Some(caps) => caps,
None => continue,
};
let chapter_num: u32 = caps["chapter"].parse().unwrap_or(0);
let section_num: u32 = caps["section"].parse().unwrap_or(0);
let title = caps["title"].trim();
let (name, version, variant) = match split_name_version(title) {
Some(parts) => parts,
None => continue,
};
let stage = stage_for_chapter(chapter_num).map(|s| s.to_string());
let identifier = format!("{chapter_num}.{section_num:02}");
let href = heading.value().id().map(|id| {
let mut base = book_url.to_string();
if !base.contains('#') {
base.push('#');
}
format!("{}{}", base, id)
});
results.push(BookPackage {
book: options.book,
chapter: Some(chapter_num),
section: Some(identifier),
name,
version: Some(version),
href,
md5: None,
stage,
variant,
notes: None,
});
}
Ok(results)
}
pub(crate) fn split_name_version(title: &str) -> Option<(String, String, Option<String>)> {
// Find the last '-' whose next character is a digit (start of version)
let bytes = title.as_bytes();
for idx in (0..bytes.len()).rev() {
if bytes[idx] == b'-' {
if let Some(next) = bytes.get(idx + 1) {
if next.is_ascii_digit() {
let name = title[..idx].trim();
let mut remainder = title[idx + 1..].trim();
if name.is_empty() || remainder.is_empty() {
return None;
}
let mut variant = None;
if let Some(pos) = remainder.find(" - ") {
variant = Some(remainder[pos + 3..].trim().to_string());
remainder = remainder[..pos].trim();
} else if let Some(pos) = remainder.find(" (") {
let note = remainder[pos + 1..].trim_end_matches(')').trim();
variant = Some(note.to_string());
remainder = remainder[..pos].trim();
}
return Some((name.to_string(), remainder.to_string(), variant));
}
}
}
}
None
}
fn stage_for_chapter(chapter: u32) -> Option<&'static str> {
match chapter {
5 => Some("cross-toolchain"),
6 | 7 => Some("temporary-tools"),
8 => Some("system"),
9 => Some("system-configuration"),
10 => Some("system-finalization"),
_ => None,
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::ingest::BookKind;
use scraper::{Html, Selector};
#[test]
fn parse_sample_headings() {
let html = r#"
<html><body>
<h1 class=\"sect1\" id=\"ch05-binutils-pass1\">5.5. Binutils-2.45 - Pass 1</h1>
<h1 class=\"sect1\" id=\"ch05-gcc-pass1\">5.6. GCC-15.2.0 - Pass 1</h1>
<h1 class=\"sect1\" id=\"ch09-bootscripts\">9.3. LFS-Bootscripts-20250827</h1>
<h1 class=\"sect1\" id=\"ch08-xml-parser\">8.41. XML::Parser-2.47</h1>
</body></html>
"#;
let opts = FetchOptions::new("https://example.invalid/lfs", BookKind::Mlfs);
let document = Html::parse_document(html);
let selector = Selector::parse("h1.sect1").unwrap();
assert!(
document.select(&selector).next().is_some(),
"sample headings selector returned no nodes"
);
let packages =
parse_book_html(&opts, "https://example.invalid/lfs/book.html", html).unwrap();
assert_eq!(packages.len(), 4);
assert_eq!(packages[0].name, "Binutils");
assert_eq!(packages[0].version.as_deref(), Some("2.45"));
assert_eq!(packages[0].variant.as_deref(), Some("Pass 1"));
assert_eq!(packages[0].stage.as_deref(), Some("cross-toolchain"));
assert_eq!(packages[1].variant.as_deref(), Some("Pass 1"));
assert_eq!(packages[2].variant, None);
assert_eq!(packages[3].name, "XML::Parser");
}
}

67
src/ingest/mod.rs Normal file
View file

@ -0,0 +1,67 @@
pub mod blfs;
pub mod glfs;
pub mod lfs;
use std::fmt;
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum BookKind {
Lfs,
Mlfs,
Blfs,
Glfs,
}
impl fmt::Display for BookKind {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let label = match self {
BookKind::Lfs => "lfs",
BookKind::Mlfs => "mlfs",
BookKind::Blfs => "blfs",
BookKind::Glfs => "glfs",
};
f.write_str(label)
}
}
#[derive(Debug, Clone)]
pub struct BookPackage {
pub book: BookKind,
pub chapter: Option<u32>,
pub section: Option<String>,
pub name: String,
pub version: Option<String>,
pub href: Option<String>,
pub md5: Option<String>,
pub stage: Option<String>,
pub variant: Option<String>,
pub notes: Option<String>,
}
impl BookPackage {
pub fn identifier(&self) -> String {
match &self.variant {
Some(variant) if !variant.is_empty() => {
format!(
"{}-{}-{}",
self.book,
self.name,
variant.replace(' ', "-").to_lowercase()
)
}
_ => format!("{}-{}", self.book, self.name),
}
}
}
#[derive(Debug, Clone)]
pub struct FetchOptions<'a> {
pub base_url: &'a str,
pub book: BookKind,
}
impl<'a> FetchOptions<'a> {
pub fn new(base_url: &'a str, book: BookKind) -> Self {
Self { base_url, book }
}
}

View file

@ -1,2 +1,12 @@
pub mod ai;
pub mod db;
pub mod html;
pub mod ingest;
pub mod md5_utils;
pub mod mirrors;
pub mod pkgs;
pub mod version_check;
pub mod wget_list;
#[cfg(feature = "tui")]
pub mod tui;

View file

@ -1,6 +1,427 @@
mod tui;
use std::{collections::BTreeSet, env, fs, path::PathBuf};
use anyhow::{Context, Result, anyhow};
use clap::{CommandFactory, Parser, Subcommand};
use package_management::{
db, html, md5_utils,
pkgs::{
by_name::bi::binutils::cross_toolchain::build_binutils_from_page,
mlfs,
scaffolder::{self, ScaffoldRequest},
},
version_check, wget_list,
};
#[cfg(feature = "tui")]
use package_management::tui::disk_manager::DiskManager;
#[derive(Parser)]
#[command(name = "lpkg", version, about = "LPKG Lightweight Package Manager", long_about = None)]
struct Cli {
/// Command to run. Defaults to launching the TUI (when available).
#[command(subcommand)]
command: Option<Command>,
}
#[derive(Subcommand)]
enum Command {
/// Run one of the automated workflows.
Workflow {
#[command(subcommand)]
workflow: WorkflowCommand,
},
/// Launch interactive terminal UIs.
#[cfg(feature = "tui")]
#[command(subcommand)]
Tui(TuiCommand),
}
#[derive(Subcommand)]
enum WorkflowCommand {
/// Fetch <pre> blocks from the given URL and run version checks found inside them.
EnvCheck {
/// URL of the Linux From Scratch page containing ver_check/ver_kernel snippets.
url: String,
},
/// Download the LFS wget-list and md5sums, optionally writing them to disk.
FetchManifests {
/// Output directory to store wget-list and md5sums files. Uses current dir if omitted.
#[arg(long)]
output: Option<PathBuf>,
},
/// Parse the Binutils Pass 1 page and build it using the extracted steps.
BuildBinutils {
/// URL of the Binutils Pass 1 instructions to parse.
url: String,
/// Root directory of the LFS workspace (used for $LFS paths).
#[arg(long = "lfs-root")]
lfs_root: PathBuf,
/// Optional explicit cross-compilation target (defaults to $LFS_TGT env or x86_64-lfs-linux-gnu).
#[arg(long)]
target: Option<String>,
},
/// Scaffold a new package module under `src/pkgs/by_name` with tuned optimizations.
ScaffoldPackage {
/// Logical package name (used for module layout and metadata).
#[arg(long)]
name: String,
/// Upstream version string.
#[arg(long)]
version: String,
/// Optional source archive URL.
#[arg(long)]
source: Option<String>,
/// Optional MD5 checksum of the source archive.
#[arg(long)]
md5: Option<String>,
/// Additional configure arguments (repeat flag).
#[arg(long = "configure-arg", value_name = "ARG")]
configure_arg: Vec<String>,
/// Build commands (repeat flag).
#[arg(long = "build-cmd", value_name = "CMD")]
build_cmd: Vec<String>,
/// Install commands (repeat flag).
#[arg(long = "install-cmd", value_name = "CMD")]
install_cmd: Vec<String>,
/// Declared dependencies (repeat flag).
#[arg(long = "dependency", value_name = "PKG")]
dependency: Vec<String>,
/// Whether to enable LTO (defaults to true).
#[arg(long = "enable-lto", default_value_t = true)]
enable_lto: bool,
/// Whether to enable PGO instrumentation/use (defaults to true).
#[arg(long = "enable-pgo", default_value_t = true)]
enable_pgo: bool,
/// Additional CFLAGS (repeat flag).
#[arg(long = "cflag", value_name = "FLAG")]
cflag: Vec<String>,
/// Additional LDFLAGS (repeat flag).
#[arg(long = "ldflag", value_name = "FLAG")]
ldflag: Vec<String>,
/// Optional profile data file name for PGO replay (enables -fprofile-use).
#[arg(long)]
profdata: Option<String>,
/// Base directory for module generation (defaults to src/pkgs/by_name).
#[arg(long, default_value = "src/pkgs/by_name")]
base: PathBuf,
},
/// Import all packages from the MLFS catalogue, scaffolding modules and persisting metadata.
ImportMlfs {
/// Perform a dry run without writing files or touching the database.
#[arg(long, default_value_t = false)]
dry_run: bool,
/// Only process the first N records (after deduplication).
#[arg(long)]
limit: Option<usize>,
/// Base directory for module generation (defaults to src/pkgs/by_name).
#[arg(long, default_value = "src/pkgs/by_name")]
base: PathBuf,
/// Overwrite existing modules by deleting and regenerating them.
#[arg(long, default_value_t = false)]
overwrite: bool,
/// Source URL for the MLFS book (defaults to the canonical mirror).
#[arg(long = "source-url")]
source_url: Option<String>,
},
}
#[cfg(feature = "tui")]
#[derive(Subcommand)]
enum TuiCommand {
/// Launch the disk manager UI.
DiskManager,
}
fn main() -> Result<()> {
let _ = tracing_subscriber::fmt::try_init();
let cli = Cli::parse();
match cli.command {
Some(Command::Workflow { workflow }) => run_workflow(workflow)?,
#[cfg(feature = "tui")]
Some(Command::Tui(cmd)) => run_tui(cmd)?,
None => {
#[cfg(feature = "tui")]
{
println!(
"No command specified. Launching disk manager TUI. Use `lpkg help` for more options."
);
DiskManager::run_tui().map_err(|e| anyhow!(e.to_string()))?;
}
#[cfg(not(feature = "tui"))]
{
Cli::command().print_help()?;
println!();
}
}
}
Ok(())
}
fn run_workflow(cmd: WorkflowCommand) -> Result<()> {
match cmd {
WorkflowCommand::EnvCheck { url } => {
let pre_blocks = html::fetch_pre_blocks(&url)
.with_context(|| format!("Fetching HTML `<pre>` blocks from {url}"))?;
let mut ran_any = false;
let mut failures = Vec::new();
for (idx, block) in pre_blocks.iter().enumerate() {
if !(block.contains("ver_check") || block.contains("ver_kernel")) {
continue;
}
ran_any = true;
println!("Running version checks from block #{idx}...");
if !version_check::run_version_checks_from_block(block) {
failures.push(idx + 1);
}
}
if !ran_any {
return Err(anyhow!(
"No ver_check or ver_kernel snippets found at {url}."
));
}
if !failures.is_empty() {
return Err(anyhow!("Version checks failed in block(s): {:?}", failures));
}
println!("All version checks passed 👍");
}
WorkflowCommand::FetchManifests { output } => {
let wget_list = wget_list::get_wget_list().context("Fetching wget-list")?;
let md5sums = md5_utils::get_md5sums().context("Fetching md5sums")?;
println!("Fetched wget-list ({} bytes)", wget_list.len());
println!("Fetched md5sums ({} bytes)", md5sums.len());
let target_dir = output.unwrap_or(std::env::current_dir()?);
fs::create_dir_all(&target_dir)
.with_context(|| format!("Creating output directory at {:?}", target_dir))?;
let wget_path = target_dir.join("wget-list");
let md5_path = target_dir.join("md5sums");
fs::write(&wget_path, wget_list).with_context(|| format!("Writing {wget_path:?}"))?;
fs::write(&md5_path, md5sums).with_context(|| format!("Writing {md5_path:?}"))?;
println!("Saved artifacts to {:?} and {:?}", wget_path, md5_path);
}
WorkflowCommand::BuildBinutils {
url,
lfs_root,
target,
} => {
let runtime = tokio::runtime::Runtime::new().context("Creating async runtime")?;
runtime
.block_on(build_binutils_from_page(&url, &lfs_root, target))
.map_err(|e| anyhow!("Building Binutils using instructions from {url}: {e}"))?;
println!("Binutils workflow completed successfully");
}
WorkflowCommand::ScaffoldPackage {
name,
version,
source,
md5,
configure_arg,
build_cmd,
install_cmd,
dependency,
enable_lto,
enable_pgo,
cflag,
ldflag,
profdata,
base,
} => {
let base_dir = if base.is_relative() {
env::current_dir()
.context("Resolving scaffold base directory")?
.join(base)
} else {
base
};
let request = ScaffoldRequest {
name: name.clone(),
version: version.clone(),
source,
md5,
configure_args: configure_arg,
build_commands: build_cmd,
install_commands: install_cmd,
dependencies: dependency,
enable_lto,
enable_pgo,
cflags: cflag,
ldflags: ldflag,
profdata,
stage: None,
variant: None,
notes: None,
module_override: None,
};
let scaffold = scaffolder::scaffold_package(&base_dir, request)
.with_context(|| format!("Scaffolding package {name}"))?;
let pool = db::establish_pool().context("Setting up package database")?;
db::upsert_package_via_pool(&pool, &scaffold.definition)
.with_context(|| format!("Persisting package metadata for {name}"))?;
println!("Generated module: {:?}", scaffold.module_path);
println!(
"Remember to stage and commit as `{name}: init at {version}` after reviewing the template"
);
}
WorkflowCommand::ImportMlfs {
dry_run,
limit,
base,
overwrite,
source_url,
} => {
let base_dir = if base.is_relative() {
env::current_dir()
.context("Resolving MLFS scaffold base directory")?
.join(base)
} else {
base
};
let mut records = mlfs::load_or_fetch_catalog(source_url.as_deref())
.context("Loading MLFS catalogue")?;
records.sort_by(|a, b| a.name.cmp(&b.name).then(a.variant.cmp(&b.variant)));
let mut seen = BTreeSet::new();
let mut processed = 0usize;
let mut created = 0usize;
let mut skipped = Vec::new();
let pool = if dry_run {
None
} else {
Some(db::establish_pool().context("Setting up package database")?)
};
for record in records {
let module_alias = record.module_alias();
if !seen.insert(module_alias.clone()) {
continue;
}
if let Some(limit) = limit {
if processed >= limit {
break;
}
}
processed += 1;
if dry_run {
println!(
"Would scaffold {:<18} {:<12} -> {}",
record.name, record.version, module_alias
);
continue;
}
let request = ScaffoldRequest {
name: record.name.clone(),
version: record.version.clone(),
source: None,
md5: None,
configure_args: Vec::new(),
build_commands: Vec::new(),
install_commands: Vec::new(),
dependencies: Vec::new(),
enable_lto: true,
enable_pgo: true,
cflags: Vec::new(),
ldflags: Vec::new(),
profdata: None,
stage: record.stage.clone(),
variant: record.variant.clone(),
notes: record.notes.clone(),
module_override: Some(module_alias.clone()),
};
match scaffolder::scaffold_package(&base_dir, request) {
Ok(result) => {
if let Some(pool) = &pool {
db::upsert_package_via_pool(pool, &result.definition).with_context(
|| {
format!(
"Persisting MLFS package metadata for {} {}",
record.name, record.version
)
},
)?;
}
println!(
"Scaffolded {:<18} {:<12} -> {}",
record.name, record.version, module_alias
);
created += 1;
}
Err(err) => {
let already_exists =
err.to_string().to_lowercase().contains("already exists");
if already_exists && !overwrite {
skipped.push(module_alias);
} else {
return Err(err);
}
}
}
}
if dry_run {
println!(
"Dry run complete. {} package definitions queued.",
processed
);
} else {
println!(
"MLFS import complete. Created {} modules, skipped {} (already existed).",
created,
skipped.len()
);
if !skipped.is_empty() {
println!(
"Skipped modules: {}",
skipped
.iter()
.take(10)
.cloned()
.collect::<Vec<_>>()
.join(", ")
);
if skipped.len() > 10 {
println!("... and {} more", skipped.len() - 10);
}
}
}
}
}
Ok(())
}
#[cfg(feature = "tui")]
fn run_tui(cmd: TuiCommand) -> Result<()> {
match cmd {
TuiCommand::DiskManager => {
DiskManager::run_tui().map_err(|e| anyhow!(e.to_string()))?;
}
}
fn main() -> Result<(), Box<dyn std::error::Error>> {
tui::disk_manager::DiskManager::run_tui()?;
Ok(())
}

116
src/pkgs/mlfs.rs Normal file
View file

@ -0,0 +1,116 @@
use std::borrow::Cow;
use anyhow::{Context, Result, anyhow};
use serde::{Deserialize, Serialize};
use crate::ingest::{BookKind, BookPackage, FetchOptions, lfs};
use crate::pkgs::package::PackageDefinition;
pub const DEFAULT_MLFS_BASE_URL: &str = "https://linuxfromscratch.org/~thomas/multilib-m32";
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MlfsPackageRecord {
pub name: String,
pub version: String,
pub chapter: Option<u32>,
pub section: Option<String>,
#[serde(default)]
pub stage: Option<String>,
#[serde(default)]
pub variant: Option<String>,
#[serde(default)]
pub notes: Option<String>,
}
impl MlfsPackageRecord {
pub fn id(&self) -> String {
let mut id = self.name.replace('+', "plus");
if let Some(variant) = &self.variant {
id.push('_');
id.push_str(&variant.replace('-', "_"));
}
id
}
pub fn module_alias(&self) -> String {
self.id()
.replace('.', "_")
.replace('/', "_")
.replace(' ', "_")
.to_lowercase()
}
pub fn display_label(&self) -> Cow<'_, str> {
match (&self.section, &self.variant) {
(Some(section), Some(variant)) => Cow::from(format!("{} ({})", section, variant)),
(Some(section), None) => Cow::from(section.as_str()),
(None, Some(variant)) => Cow::from(variant.as_str()),
_ => Cow::from(self.name.as_str()),
}
}
pub fn to_package_definition(&self) -> PackageDefinition {
let mut pkg = PackageDefinition::new(&self.name, &self.version);
if let Some(stage) = &self.stage {
pkg.optimizations
.cflags
.push(format!("-DLPKG_STAGE={}", stage.to_uppercase()));
}
if let Some(variant) = &self.variant {
pkg.optimizations
.cflags
.push(format!("-DLPKG_VARIANT={}", variant.to_uppercase()));
}
if let Some(notes) = &self.notes {
pkg.optimizations
.cflags
.push(format!("-DLPKG_NOTES={}", notes.replace(' ', "_")));
}
pkg
}
fn from_book_package(pkg: BookPackage) -> Option<Self> {
let version = pkg.version?;
Some(Self {
name: pkg.name,
version,
chapter: pkg.chapter,
section: pkg.section,
stage: pkg.stage,
variant: pkg.variant,
notes: pkg.notes,
})
}
}
pub fn fetch_catalog(base_url: &str) -> Result<Vec<MlfsPackageRecord>> {
let options = FetchOptions::new(base_url, BookKind::Mlfs);
let packages = lfs::fetch_book(&options)?;
let mut records = packages
.into_iter()
.filter_map(MlfsPackageRecord::from_book_package)
.collect::<Vec<_>>();
if records.is_empty() {
return Err(anyhow!("No packages parsed from MLFS book at {base_url}."));
}
records.sort_by(|a, b| a.name.cmp(&b.name).then(a.variant.cmp(&b.variant)));
Ok(records)
}
pub fn load_cached_catalog() -> Result<Vec<MlfsPackageRecord>> {
let raw = include_str!("../../data/mlfs_ml-12.4-40-multilib.json");
let records: Vec<MlfsPackageRecord> =
serde_json::from_str(raw).context("parsing cached MLFS package manifest")?;
Ok(records)
}
pub fn load_or_fetch_catalog(base_url: Option<&str>) -> Result<Vec<MlfsPackageRecord>> {
let base = base_url.unwrap_or(DEFAULT_MLFS_BASE_URL);
match fetch_catalog(base) {
Ok(records) => Ok(records),
Err(err) => {
tracing::warn!("mlfs_fetch_error" = %err, "Falling back to cached MLFS package list");
load_cached_catalog()
}
}
}

View file

@ -1 +1,4 @@
pub mod by_name;
pub mod mlfs;
pub mod package;
pub mod scaffolder;

74
src/pkgs/package.rs Normal file
View file

@ -0,0 +1,74 @@
use serde::{Deserialize, Serialize};
/// High-level description of a package managed by LPKG.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PackageDefinition {
pub name: String,
pub version: String,
pub source: Option<String>,
pub md5: Option<String>,
pub configure_args: Vec<String>,
pub build_commands: Vec<String>,
pub install_commands: Vec<String>,
pub dependencies: Vec<String>,
pub optimizations: OptimizationSettings,
}
impl PackageDefinition {
pub fn new(name: impl Into<String>, version: impl Into<String>) -> Self {
Self {
name: name.into(),
version: version.into(),
source: None,
md5: None,
configure_args: Vec::new(),
build_commands: Vec::new(),
install_commands: Vec::new(),
dependencies: Vec::new(),
optimizations: OptimizationSettings::default(),
}
}
}
/// Tunable compiler and linker flags applied during package builds.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct OptimizationSettings {
pub enable_lto: bool,
pub enable_pgo: bool,
pub cflags: Vec<String>,
pub ldflags: Vec<String>,
pub profdata: Option<String>,
}
impl Default for OptimizationSettings {
fn default() -> Self {
Self {
enable_lto: true,
enable_pgo: true,
cflags: vec![
"-O3".to_string(),
"-flto".to_string(),
"-fprofile-generate".to_string(),
],
ldflags: vec!["-flto".to_string(), "-fprofile-generate".to_string()],
profdata: None,
}
}
}
impl OptimizationSettings {
/// Convenience helper for disabling instrumentation once profile data has been gathered.
pub fn for_pgo_replay(profdata: impl Into<String>) -> Self {
Self {
enable_lto: true,
enable_pgo: true,
cflags: vec![
"-O3".to_string(),
"-flto".to_string(),
"-fprofile-use".to_string(),
],
ldflags: vec!["-flto".to_string(), "-fprofile-use".to_string()],
profdata: Some(profdata.into()),
}
}
}

293
src/pkgs/scaffolder.rs Normal file
View file

@ -0,0 +1,293 @@
use std::fs::{self, OpenOptions};
use std::io::Write;
use std::path::{Path, PathBuf};
use anyhow::{Context, Result, anyhow};
use crate::pkgs::package::{OptimizationSettings, PackageDefinition};
#[derive(Debug, Clone)]
pub struct ScaffoldRequest {
pub name: String,
pub version: String,
pub source: Option<String>,
pub md5: Option<String>,
pub configure_args: Vec<String>,
pub build_commands: Vec<String>,
pub install_commands: Vec<String>,
pub dependencies: Vec<String>,
pub enable_lto: bool,
pub enable_pgo: bool,
pub cflags: Vec<String>,
pub ldflags: Vec<String>,
pub profdata: Option<String>,
pub stage: Option<String>,
pub variant: Option<String>,
pub notes: Option<String>,
pub module_override: Option<String>,
}
#[derive(Debug, Clone)]
pub struct ScaffoldResult {
pub module_path: PathBuf,
pub prefix_module: PathBuf,
pub by_name_module: PathBuf,
pub definition: PackageDefinition,
}
pub fn scaffold_package(
base_dir: impl AsRef<Path>,
request: ScaffoldRequest,
) -> Result<ScaffoldResult> {
let base_dir = base_dir.as_ref();
if !base_dir.ends_with("by_name") {
return Err(anyhow!("expected base directory ending with 'by_name'"));
}
let module_source_name = request.module_override.as_deref().unwrap_or(&request.name);
let module_name = sanitize(module_source_name);
let prefix = prefix(&module_name);
let prefix_dir = base_dir.join(&prefix);
fs::create_dir_all(&prefix_dir)
.with_context(|| format!("creating prefix directory {:?}", prefix_dir))?;
let by_name_mod = base_dir.join("mod.rs");
ensure_mod_entry(&by_name_mod, &prefix)?;
let prefix_mod = prefix_dir.join("mod.rs");
ensure_mod_entry(&prefix_mod, &module_name)?;
let package_dir = prefix_dir.join(&module_name);
if package_dir.exists() {
return Err(anyhow!("package module {:?} already exists", package_dir));
}
fs::create_dir_all(&package_dir)
.with_context(|| format!("creating package directory {:?}", package_dir))?;
let module_path = package_dir.join("mod.rs");
let definition = build_definition(&request);
let source = generate_module_source(&request, &definition);
fs::write(&module_path, source)
.with_context(|| format!("writing module source to {:?}", module_path))?;
Ok(ScaffoldResult {
module_path,
prefix_module: prefix_mod,
by_name_module: by_name_mod,
definition,
})
}
fn ensure_mod_entry(path: &Path, module: &str) -> Result<()> {
let entry = format!("pub mod {};", module);
if path.exists() {
let contents =
fs::read_to_string(path).with_context(|| format!("reading module file {:?}", path))?;
if contents.contains(&entry) || contents.contains(&entry.trim()) {
return Ok(());
}
let mut file = OpenOptions::new()
.append(true)
.open(path)
.with_context(|| format!("opening module file {:?}", path))?;
writeln!(file, "pub mod {};", module)
.with_context(|| format!("appending to module file {:?}", path))?;
} else {
fs::write(path, format!("pub mod {};\n", module))
.with_context(|| format!("creating module file {:?}", path))?;
}
Ok(())
}
fn build_definition(request: &ScaffoldRequest) -> PackageDefinition {
let mut pkg = PackageDefinition::new(&request.name, &request.version);
pkg.source = request.source.clone();
pkg.md5 = request.md5.clone();
pkg.configure_args = request.configure_args.clone();
pkg.build_commands = request.build_commands.clone();
pkg.install_commands = request.install_commands.clone();
pkg.dependencies = request.dependencies.clone();
let mut cflags = if request.cflags.is_empty() {
default_cflags(request)
} else {
request.cflags.clone()
};
let mut ldflags = if request.ldflags.is_empty() {
default_ldflags(request)
} else {
request.ldflags.clone()
};
dedup(&mut cflags);
dedup(&mut ldflags);
let profdata = request.profdata.clone();
let profdata_clone = profdata.clone();
pkg.optimizations = match profdata_clone {
Some(path) => OptimizationSettings::for_pgo_replay(path),
None => OptimizationSettings::default(),
};
pkg.optimizations.enable_lto = request.enable_lto;
pkg.optimizations.enable_pgo = request.enable_pgo;
pkg.optimizations.cflags = cflags;
pkg.optimizations.ldflags = ldflags;
pkg.optimizations.profdata = profdata;
pkg
}
fn default_cflags(request: &ScaffoldRequest) -> Vec<String> {
let mut flags = vec!["-O3".to_string(), "-flto".to_string()];
if request.enable_pgo {
if request.profdata.is_some() {
flags.push("-fprofile-use".to_string());
} else {
flags.push("-fprofile-generate".to_string());
}
}
flags
}
fn default_ldflags(request: &ScaffoldRequest) -> Vec<String> {
let mut flags = vec!["-flto".to_string()];
if request.enable_pgo {
if request.profdata.is_some() {
flags.push("-fprofile-use".to_string());
} else {
flags.push("-fprofile-generate".to_string());
}
}
flags
}
fn dedup(values: &mut Vec<String>) {
let mut seen = std::collections::BTreeSet::new();
values.retain(|value| seen.insert(value.clone()));
}
fn generate_module_source(request: &ScaffoldRequest, definition: &PackageDefinition) -> String {
let mut metadata = Vec::new();
if let Some(stage) = &request.stage {
metadata.push(format!("stage: {}", stage));
}
if let Some(variant) = &request.variant {
metadata.push(format!("variant: {}", variant));
}
if let Some(notes) = &request.notes {
metadata.push(format!("notes: {}", notes));
}
let metadata = if metadata.is_empty() {
String::new()
} else {
format!("// MLFS metadata: {}\n\n", metadata.join(", "))
};
let configure_args = format_vec(&definition.configure_args);
let build_commands = format_vec(&definition.build_commands);
let install_commands = format_vec(&definition.install_commands);
let dependencies = format_vec(&definition.dependencies);
let cflags = format_vec(&definition.optimizations.cflags);
let ldflags = format_vec(&definition.optimizations.ldflags);
let source = format_option(&definition.source);
let md5 = format_option(&definition.md5);
let profdata = format_option(&definition.optimizations.profdata);
format!(
"{metadata}use crate::pkgs::package::{{OptimizationSettings, PackageDefinition}};\n\n\
pub fn definition() -> PackageDefinition {{\n\
let mut pkg = PackageDefinition::new(\"{name}\", \"{version}\");\n\
pkg.source = {source};\n\
pkg.md5 = {md5};\n\
pkg.configure_args = {configure_args};\n\
pkg.build_commands = {build_commands};\n\
pkg.install_commands = {install_commands};\n\
pkg.dependencies = {dependencies};\n\
let profdata = {profdata};\n\
let profdata_clone = profdata.clone();\n\
pkg.optimizations = match profdata_clone {{\n\
Some(path) => OptimizationSettings::for_pgo_replay(path),\n\
None => OptimizationSettings::default(),\n\
}};\n\
pkg.optimizations.enable_lto = {enable_lto};\n\
pkg.optimizations.enable_pgo = {enable_pgo};\n\
pkg.optimizations.cflags = {cflags};\n\
pkg.optimizations.ldflags = {ldflags};\n\
pkg.optimizations.profdata = profdata;\n\
pkg\n\
}}\n",
metadata = metadata,
name = request.name,
version = request.version,
source = source,
md5 = md5,
configure_args = configure_args,
build_commands = build_commands,
install_commands = install_commands,
dependencies = dependencies,
profdata = profdata,
enable_lto = request.enable_lto,
enable_pgo = request.enable_pgo,
cflags = cflags,
ldflags = ldflags,
)
}
fn format_vec(values: &[String]) -> String {
if values.is_empty() {
"Vec::new()".to_string()
} else {
let items: Vec<String> = values
.iter()
.map(|v| format!("\"{}\".to_string()", escape(v)))
.collect();
format!("vec![{}]", items.join(", "))
}
}
fn format_option(value: &Option<String>) -> String {
match value {
Some(v) => format!("Some(\"{}\".to_string())", escape(v)),
None => "None".to_string(),
}
}
fn sanitize(name: &str) -> String {
let mut out = String::new();
for ch in name.chars() {
if ch.is_ascii_alphanumeric() {
out.push(ch.to_ascii_lowercase());
} else if ch == '_' || ch == '+' {
out.push('_');
} else if ch == '-' {
out.push('_');
} else {
out.push('_');
}
}
if out.is_empty() {
out.push_str("pkg");
}
if out
.chars()
.next()
.map(|c| c.is_ascii_digit())
.unwrap_or(false)
{
out.insert(0, 'p');
}
out
}
fn prefix(module: &str) -> String {
let mut chars = module.chars();
let first = chars.next().unwrap_or('p');
let second = chars.next().unwrap_or('k');
let mut s = String::new();
s.push(first);
s.push(second);
s
}
fn escape(input: &str) -> String {
input.replace('\\', "\\\\").replace('"', "\\\"")
}

View file

@ -1,7 +1,7 @@
// src/tui/disk_manager.rs
use std::{
fs::{File, read_dir},
io::{self, Seek, SeekFrom, Write},
io::{self, Seek, SeekFrom},
path::PathBuf,
};
@ -186,12 +186,12 @@ impl DiskManager {
},
};
// Create list of lines to display:
// Create list of lines to display using public GPT API:
let mut lines: Vec<String> = Vec::new();
lines.push(format!("Partitions on {}:", disk.display()));
for (i, entry_opt) in gpt.partitions.iter().enumerate() {
if let Some(entry) = entry_opt {
let name = entry.partition_name.to_string();
for (i, entry) in gpt.iter() {
if entry.is_used() {
let name = entry.partition_name.as_str();
lines.push(format!(
"{}: {} -> {} (type: {})",
i,
@ -388,9 +388,9 @@ impl DiskManager {
let sectors = (size_mb as u128 * 1024 * 1024 / 512) as u64;
// choose starting LBA: find max ending_lba among existing partitions; align to 2048
let last_end = gpt
.partitions
.iter()
.filter_map(|p| p.as_ref().map(|e| e.ending_lba))
.filter(|(_, e)| e.is_used())
.map(|(_, e)| e.ending_lba)
.max()
.unwrap_or(2048);
let start = ((last_end + 2048) / 2048) * 2048 + 1;
@ -410,15 +410,15 @@ impl DiskManager {
};
new_entry.partition_type_guid = type_guid;
// find first empty partition slot
let idx_opt = gpt.partitions.iter().position(|p| p.is_none());
// find first empty partition slot (indexing is 1-based for gptman::GPT)
let idx_opt = gpt.iter().find(|(_, e)| e.is_unused()).map(|(i, _)| i);
let idx = match idx_opt {
Some(i) => i,
None => return Err("No free GPT partition entries (maxed out)".into()),
};
// assign and write
gpt.partitions[idx] = Some(new_entry);
gpt[idx] = new_entry;
// Seek to start (important)
file.seek(SeekFrom::Start(0))?;

View file

@ -1,7 +1,6 @@
use crate::tui::disk_manager::DiskManager;
use crossterm::event::{self, Event, KeyCode};
use std::error::Error;
use std::io::Stdout;
use tui::{
Terminal,
backend::CrosstermBackend,
@ -11,7 +10,7 @@ use tui::{
};
pub fn show_main_menu() -> Result<(), Box<dyn Error>> {
let mut stdout = std::io::stdout();
let stdout = std::io::stdout();
let backend = CrosstermBackend::new(stdout);
let mut terminal = Terminal::new(backend)?;
@ -37,7 +36,7 @@ pub fn show_main_menu() -> Result<(), Box<dyn Error>> {
if event::poll(std::time::Duration::from_millis(100))? {
if let Event::Key(key) = event::read()? {
match key.code {
KeyCode::Char('1') => DiskManager::show_disk_manager(&mut terminal)?,
KeyCode::Char('1') => DiskManager::run_tui()?,
KeyCode::Char('0') => break,
_ => {}
}

View file

@ -18,9 +18,9 @@ impl Theme {
}
impl Settings {
#[instrument(skip(terminal))]
#[instrument(skip(_terminal))]
pub fn show_settings(
terminal: &mut Terminal<CrosstermBackend<Stdout>>,
_terminal: &mut Terminal<CrosstermBackend<Stdout>>,
) -> Result<(), Box<dyn std::error::Error>> {
// Render settings UI here
Ok(())